Skip to content

Commit da6a8d8

Browse files
committed
feat(continue-watsonx-ai-provider): add native tool support with call processing
1 parent 7867cf8 commit da6a8d8

1 file changed

Lines changed: 64 additions & 9 deletions

File tree

src/api/providers/ibm-watsonx.ts

Lines changed: 64 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -111,21 +111,77 @@ export class WatsonxAIHandler extends BaseProvider implements SingleCompletionHa
111111
* @param projectId - The IBM watsonx project ID
112112
* @param modelId - The model ID to use
113113
* @param messages - The messages to send
114+
* @param metadata - Optional metadata for tool support
114115
* @returns The parameters object for the API call
115116
*/
116-
private createTextChatParams(projectId: string, modelId: string, messages: any[]) {
117+
private createTextChatParams(
118+
projectId: string,
119+
modelId: string,
120+
messages: any[],
121+
metadata?: ApiHandlerCreateMessageMetadata,
122+
): {
123+
projectId: string
124+
modelId: string
125+
messages: any[]
126+
maxTokens: number
127+
temperature: number
128+
maxCompletionTokens: number
129+
tools?: any[]
130+
toolChoice?: any
131+
} {
117132
const maxTokens = this.options.modelMaxTokens || 2048
118133
const temperature = this.options.modelTemperature || 0.7
119134
// Set to 0 for the model's configured max generated tokens
120135
const maxCompletionTokens = 0
121-
return {
136+
137+
const params = {
122138
projectId,
123139
modelId,
124140
messages,
125141
maxTokens,
126142
temperature,
127143
maxCompletionTokens,
128144
}
145+
146+
// Add native tool support
147+
if (metadata?.tools && metadata.tools.length > 0) {
148+
return {
149+
...params,
150+
tools: this.convertToolsForOpenAI(metadata.tools),
151+
...(metadata.tool_choice && { toolChoice: metadata.tool_choice }),
152+
}
153+
}
154+
155+
return params
156+
}
157+
158+
/**
159+
* Processes watsonx response message and yields appropriate chunks
160+
*
161+
* @param message - The message from watsonx response
162+
*/
163+
private *processResponseMessage(message: any): Generator<any> {
164+
// Handle text content
165+
if (message.content) {
166+
yield {
167+
type: "text",
168+
text: message.content,
169+
}
170+
}
171+
172+
// Handle tool calls
173+
if (message.tool_calls && message.tool_calls.length > 0) {
174+
for (const toolCall of message.tool_calls) {
175+
if (toolCall.type === "function") {
176+
yield {
177+
type: "tool_call",
178+
id: toolCall.id,
179+
name: toolCall.function.name,
180+
arguments: toolCall.function.arguments,
181+
}
182+
}
183+
}
184+
}
129185
}
130186

131187
/**
@@ -147,19 +203,18 @@ export class WatsonxAIHandler extends BaseProvider implements SingleCompletionHa
147203
// Convert messages to WatsonX format with system prompt
148204
const watsonxMessages = [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)]
149205

150-
const params = this.createTextChatParams(this.projectId!, modelId, watsonxMessages)
206+
const params = this.createTextChatParams(this.projectId!, modelId, watsonxMessages, metadata)
207+
151208
const response = await this.service.textChat(params)
152209

153-
if (!response?.result?.choices?.[0]?.message?.content) {
210+
if (!response?.result?.choices?.[0]?.message) {
154211
throw new Error("Invalid or empty response from IBM watsonx API")
155212
}
156213

157-
const responseText = response.result.choices[0].message.content
214+
const message = response.result.choices[0].message
158215

159-
yield {
160-
type: "text",
161-
text: responseText,
162-
}
216+
// Process response message (text and tool calls)
217+
yield* this.processResponseMessage(message)
163218

164219
const usageInfo = response.result.usage || {}
165220
const inputTokens = usageInfo.prompt_tokens || 0

0 commit comments

Comments
 (0)