Skip to content

Commit 9e46d3e

Browse files
0xMinkdaniel-lxs
andauthored
Fix provider 400s: strip reasoning_details from messages, $ref from tool schemas (#11431)
Co-authored-by: daniel-lxs <ricciodaniel98@gmail.com>
1 parent 6c9ff49 commit 9e46d3e

24 files changed

Lines changed: 419 additions & 60 deletions

src/api/providers/__tests__/anthropic-vertex.spec.ts

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -246,20 +246,21 @@ describe("AnthropicVertexHandler", () => {
246246
)
247247
})
248248

249-
it("should pass messages directly to streamText as ModelMessage[]", async () => {
249+
it("should sanitize and pass messages to streamText as ModelMessage[]", async () => {
250250
mockStreamText.mockReturnValue(createMockStreamResult([]))
251251

252252
const stream = handler.createMessage(systemPrompt, mockMessages)
253253
for await (const _chunk of stream) {
254254
// consume
255255
}
256256

257-
// Messages are now already in ModelMessage format, passed directly to streamText
258-
expect(mockStreamText).toHaveBeenCalledWith(
259-
expect.objectContaining({
260-
messages: mockMessages,
261-
}),
262-
)
257+
// Messages are sanitized (allowlist: role, content, providerOptions) before passing to streamText
258+
const callArgs = mockStreamText.mock.calls[0]![0]
259+
expect(callArgs.messages).toHaveLength(2)
260+
expect(callArgs.messages[0].role).toBe("user")
261+
expect(callArgs.messages[0].content).toBe("Hello")
262+
expect(callArgs.messages[1].role).toBe("assistant")
263+
expect(callArgs.messages[1].content).toBe("Hi there!")
263264
})
264265

265266
it("should pass tools through AI SDK conversion pipeline", async () => {

src/api/providers/__tests__/anthropic.spec.ts

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -399,6 +399,45 @@ describe("AnthropicHandler", () => {
399399
expect(endChunk).toBeDefined()
400400
})
401401

402+
it("should strip reasoning_details and reasoning_content from messages before sending to API", async () => {
403+
setupStreamTextMock([{ type: "text-delta", text: "test" }])
404+
405+
// Simulate messages with extra legacy fields that survive JSON deserialization
406+
const messagesWithExtraFields = [
407+
{
408+
role: "user",
409+
content: [{ type: "text" as const, text: "Hello" }],
410+
},
411+
{
412+
role: "assistant",
413+
content: [{ type: "text" as const, text: "Hi" }],
414+
reasoning_details: [{ type: "thinking", thinking: "some reasoning" }],
415+
reasoning_content: "some reasoning content",
416+
},
417+
{
418+
role: "user",
419+
content: [{ type: "text" as const, text: "Follow up" }],
420+
},
421+
] as any
422+
423+
const stream = handler.createMessage(systemPrompt, messagesWithExtraFields)
424+
425+
for await (const _chunk of stream) {
426+
// Consume stream
427+
}
428+
429+
// Verify streamText was called exactly once
430+
expect(mockStreamText).toHaveBeenCalledTimes(1)
431+
const callArgs = mockStreamText.mock.calls[0]![0]
432+
for (const msg of callArgs.messages) {
433+
expect(msg).not.toHaveProperty("reasoning_details")
434+
expect(msg).not.toHaveProperty("reasoning_content")
435+
}
436+
// Verify the rest of the message is preserved
437+
expect(callArgs.messages[1].role).toBe("assistant")
438+
expect(callArgs.messages[1].content).toEqual([{ type: "text", text: "Hi" }])
439+
})
440+
402441
it("should pass system prompt via system param when no systemProviderOptions", async () => {
403442
setupStreamTextMock([{ type: "text-delta", text: "test" }])
404443

src/api/providers/anthropic-vertex.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import type { Anthropic } from "@anthropic-ai/sdk"
22
import { createVertexAnthropic } from "@ai-sdk/google-vertex/anthropic"
3-
import { streamText, generateText, ToolSet, ModelMessage } from "ai"
3+
import { streamText, generateText, ToolSet } from "ai"
44

55
import {
66
type ModelInfo,
@@ -28,6 +28,7 @@ import {
2828
} from "../transform/ai-sdk"
2929
import { applyToolCacheOptions, applySystemPromptCaching } from "../transform/cache-breakpoints"
3030
import { calculateApiCostAnthropic } from "../../shared/cost"
31+
import { sanitizeMessagesForProvider } from "../transform/sanitize-messages"
3132

3233
import { DEFAULT_HEADERS } from "./constants"
3334
import { BaseProvider } from "./base-provider"
@@ -91,8 +92,8 @@ export class AnthropicVertexHandler extends BaseProvider implements SingleComple
9192
): ApiStream {
9293
const modelConfig = this.getModel()
9394

94-
// Convert messages to AI SDK format
95-
const aiSdkMessages = messages as ModelMessage[]
95+
// Sanitize messages for the provider API (allowlist: role, content, providerOptions).
96+
const aiSdkMessages = sanitizeMessagesForProvider(messages)
9697

9798
// Convert tools to AI SDK format
9899
const openAiTools = this.convertToolsForOpenAI(metadata?.tools)

src/api/providers/anthropic.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { createAnthropic } from "@ai-sdk/anthropic"
2-
import { streamText, generateText, ToolSet, ModelMessage } from "ai"
2+
import { streamText, generateText, ToolSet } from "ai"
33

44
import {
55
type ModelInfo,
@@ -26,6 +26,7 @@ import {
2626
} from "../transform/ai-sdk"
2727
import { applyToolCacheOptions, applySystemPromptCaching } from "../transform/cache-breakpoints"
2828
import { calculateApiCostAnthropic } from "../../shared/cost"
29+
import { sanitizeMessagesForProvider } from "../transform/sanitize-messages"
2930

3031
import { DEFAULT_HEADERS } from "./constants"
3132
import { BaseProvider } from "./base-provider"
@@ -77,8 +78,8 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
7778
): ApiStream {
7879
const modelConfig = this.getModel()
7980

80-
// Convert messages to AI SDK format
81-
const aiSdkMessages = messages as ModelMessage[]
81+
// Sanitize messages for the provider API (allowlist: role, content, providerOptions).
82+
const aiSdkMessages = sanitizeMessagesForProvider(messages)
8283

8384
// Convert tools to AI SDK format
8485
const openAiTools = this.convertToolsForOpenAI(metadata?.tools)

src/api/providers/azure.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { Anthropic } from "@anthropic-ai/sdk"
22
import { createAzure } from "@ai-sdk/azure"
3-
import { streamText, generateText, ToolSet, ModelMessage } from "ai"
3+
import { streamText, generateText, ToolSet } from "ai"
44

55
import { azureModels, azureDefaultModelInfo, type ModelInfo } from "@roo-code/types"
66

@@ -21,6 +21,7 @@ import { DEFAULT_HEADERS } from "./constants"
2121
import { BaseProvider } from "./base-provider"
2222
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
2323
import type { RooMessage } from "../../core/task-persistence/rooMessage"
24+
import { sanitizeMessagesForProvider } from "../transform/sanitize-messages"
2425

2526
const AZURE_DEFAULT_TEMPERATURE = 0
2627

@@ -139,8 +140,8 @@ export class AzureHandler extends BaseProvider implements SingleCompletionHandle
139140
const { temperature } = this.getModel()
140141
const languageModel = this.getLanguageModel()
141142

142-
// Convert messages to AI SDK format
143-
const aiSdkMessages = messages as ModelMessage[]
143+
// Sanitize messages for the provider API (allowlist: role, content, providerOptions).
144+
const aiSdkMessages = sanitizeMessagesForProvider(messages)
144145

145146
// Convert tools to OpenAI format first, then to AI SDK format
146147
const openAiTools = this.convertToolsForOpenAI(metadata?.tools)

src/api/providers/baseten.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { Anthropic } from "@anthropic-ai/sdk"
22
import { createBaseten } from "@ai-sdk/baseten"
3-
import { streamText, generateText, ToolSet, ModelMessage } from "ai"
3+
import { streamText, generateText, ToolSet } from "ai"
44

55
import { basetenModels, basetenDefaultModelId, type ModelInfo } from "@roo-code/types"
66

@@ -21,6 +21,7 @@ import { DEFAULT_HEADERS } from "./constants"
2121
import { BaseProvider } from "./base-provider"
2222
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
2323
import type { RooMessage } from "../../core/task-persistence/rooMessage"
24+
import { sanitizeMessagesForProvider } from "../transform/sanitize-messages"
2425

2526
const BASETEN_DEFAULT_TEMPERATURE = 0.5
2627

@@ -102,7 +103,7 @@ export class BasetenHandler extends BaseProvider implements SingleCompletionHand
102103
const { temperature } = this.getModel()
103104
const languageModel = this.getLanguageModel()
104105

105-
const aiSdkMessages = messages as ModelMessage[]
106+
const aiSdkMessages = sanitizeMessagesForProvider(messages)
106107

107108
const openAiTools = this.convertToolsForOpenAI(metadata?.tools)
108109
const aiSdkTools = convertToolsForAiSdk(openAiTools) as ToolSet | undefined

src/api/providers/deepseek.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { Anthropic } from "@anthropic-ai/sdk"
22
import { createDeepSeek } from "@ai-sdk/deepseek"
3-
import { streamText, generateText, ToolSet, ModelMessage } from "ai"
3+
import { streamText, generateText, ToolSet } from "ai"
44

55
import { deepSeekModels, deepSeekDefaultModelId, DEEP_SEEK_DEFAULT_TEMPERATURE, type ModelInfo } from "@roo-code/types"
66

@@ -21,6 +21,7 @@ import { DEFAULT_HEADERS } from "./constants"
2121
import { BaseProvider } from "./base-provider"
2222
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
2323
import type { RooMessage } from "../../core/task-persistence/rooMessage"
24+
import { sanitizeMessagesForProvider } from "../transform/sanitize-messages"
2425

2526
/**
2627
* DeepSeek provider using the dedicated @ai-sdk/deepseek package.
@@ -117,8 +118,8 @@ export class DeepSeekHandler extends BaseProvider implements SingleCompletionHan
117118
const { temperature } = this.getModel()
118119
const languageModel = this.getLanguageModel()
119120

120-
// Convert messages to AI SDK format
121-
const aiSdkMessages = messages as ModelMessage[]
121+
// Sanitize messages for the provider API (allowlist: role, content, providerOptions).
122+
const aiSdkMessages = sanitizeMessagesForProvider(messages)
122123

123124
// Convert tools to OpenAI format first, then to AI SDK format
124125
const openAiTools = this.convertToolsForOpenAI(metadata?.tools)

src/api/providers/fireworks.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { Anthropic } from "@anthropic-ai/sdk"
22
import { createFireworks } from "@ai-sdk/fireworks"
3-
import { streamText, generateText, ToolSet, ModelMessage } from "ai"
3+
import { streamText, generateText, ToolSet } from "ai"
44

55
import { fireworksModels, fireworksDefaultModelId, type ModelInfo } from "@roo-code/types"
66

@@ -21,6 +21,7 @@ import { DEFAULT_HEADERS } from "./constants"
2121
import { BaseProvider } from "./base-provider"
2222
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
2323
import type { RooMessage } from "../../core/task-persistence/rooMessage"
24+
import { sanitizeMessagesForProvider } from "../transform/sanitize-messages"
2425

2526
const FIREWORKS_DEFAULT_TEMPERATURE = 0.5
2627

@@ -117,8 +118,8 @@ export class FireworksHandler extends BaseProvider implements SingleCompletionHa
117118
const { temperature } = this.getModel()
118119
const languageModel = this.getLanguageModel()
119120

120-
// Convert messages to AI SDK format
121-
const aiSdkMessages = messages as ModelMessage[]
121+
// Sanitize messages for the provider API (allowlist: role, content, providerOptions).
122+
const aiSdkMessages = sanitizeMessagesForProvider(messages)
122123

123124
// Convert tools to OpenAI format first, then to AI SDK format
124125
const openAiTools = this.convertToolsForOpenAI(metadata?.tools)

src/api/providers/lm-studio.ts

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,5 @@
11
import { Anthropic } from "@anthropic-ai/sdk"
2-
import {
3-
streamText,
4-
generateText,
5-
ToolSet,
6-
wrapLanguageModel,
7-
extractReasoningMiddleware,
8-
LanguageModel,
9-
ModelMessage,
10-
} from "ai"
2+
import { streamText, generateText, ToolSet, wrapLanguageModel, extractReasoningMiddleware, LanguageModel } from "ai"
113

124
import { type ModelInfo, openAiModelInfoSaneDefaults, LMSTUDIO_DEFAULT_TEMPERATURE } from "@roo-code/types"
135

@@ -27,6 +19,7 @@ import { OpenAICompatibleHandler, OpenAICompatibleConfig } from "./openai-compat
2719
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
2820
import { getModelsFromCache } from "./fetchers/modelCache"
2921
import type { RooMessage } from "../../core/task-persistence/rooMessage"
22+
import { sanitizeMessagesForProvider } from "../transform/sanitize-messages"
3023

3124
export class LmStudioHandler extends OpenAICompatibleHandler implements SingleCompletionHandler {
3225
constructor(options: ApiHandlerOptions) {
@@ -65,7 +58,7 @@ export class LmStudioHandler extends OpenAICompatibleHandler implements SingleCo
6558
const model = this.getModel()
6659
const languageModel = this.getLanguageModel()
6760

68-
const aiSdkMessages = messages as ModelMessage[]
61+
const aiSdkMessages = sanitizeMessagesForProvider(messages)
6962

7063
const openAiTools = this.convertToolsForOpenAI(metadata?.tools)
7164
const aiSdkTools = convertToolsForAiSdk(openAiTools) as ToolSet | undefined

src/api/providers/minimax.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import type { Anthropic } from "@anthropic-ai/sdk"
22
import { createAnthropic } from "@ai-sdk/anthropic"
3-
import { streamText, generateText, ToolSet, ModelMessage } from "ai"
3+
import { streamText, generateText, ToolSet } from "ai"
44

55
import { type ModelInfo, minimaxDefaultModelId, minimaxModels } from "@roo-code/types"
66

@@ -23,6 +23,7 @@ import { DEFAULT_HEADERS } from "./constants"
2323
import { BaseProvider } from "./base-provider"
2424
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
2525
import type { RooMessage } from "../../core/task-persistence/rooMessage"
26+
import { sanitizeMessagesForProvider } from "../transform/sanitize-messages"
2627

2728
export class MiniMaxHandler extends BaseProvider implements SingleCompletionHandler {
2829
private client: ReturnType<typeof createAnthropic>
@@ -73,7 +74,7 @@ export class MiniMaxHandler extends BaseProvider implements SingleCompletionHand
7374
})
7475

7576
const mergedMessages = mergeEnvironmentDetailsForMiniMax(messages as any)
76-
const aiSdkMessages = mergedMessages as ModelMessage[]
77+
const aiSdkMessages = sanitizeMessagesForProvider(mergedMessages as RooMessage[])
7778
const openAiTools = this.convertToolsForOpenAI(metadata?.tools)
7879
const aiSdkTools = convertToolsForAiSdk(openAiTools) as ToolSet | undefined
7980
applyToolCacheOptions(aiSdkTools as Parameters<typeof applyToolCacheOptions>[0], metadata?.toolProviderOptions)

0 commit comments

Comments
 (0)