Skip to content

Commit 0e5407a

Browse files
roomote-v0[bot]roomotedaniel-lxs
authored
fix: make defaultTemperature required in getModelParams to prevent silent temperature overrides (#11218)
* fix: DeepSeek temperature defaulting to 0 instead of 0.3 Pass defaultTemperature: DEEP_SEEK_DEFAULT_TEMPERATURE to getModelParams() in DeepSeekHandler.getModel() to ensure the correct default temperature (0.3) is used when no user configuration is provided. Closes #11194 * refactor: make defaultTemperature required in getModelParams Make the defaultTemperature parameter required in getModelParams() instead of defaulting to 0. This prevents providers with their own non-zero default temperature (like DeepSeek's 0.3) from being silently overridden by the implicit 0 default. Every provider now explicitly declares its temperature default, making the temperature resolution chain clear: user setting → model default → provider default --------- Co-authored-by: Roo Code <roomote@roocode.com> Co-authored-by: daniel-lxs <ricciodaniel98@gmail.com>
1 parent 00b1a7e commit 0e5407a

14 files changed

Lines changed: 79 additions & 11 deletions

File tree

src/api/providers/__tests__/deepseek.spec.ts

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ vi.mock("@ai-sdk/deepseek", () => ({
2525

2626
import type { Anthropic } from "@anthropic-ai/sdk"
2727

28-
import { deepSeekDefaultModelId, type ModelInfo } from "@roo-code/types"
28+
import { deepSeekDefaultModelId, DEEP_SEEK_DEFAULT_TEMPERATURE, type ModelInfo } from "@roo-code/types"
2929

3030
import type { ApiHandlerOptions } from "../../../shared/api"
3131

@@ -155,6 +155,20 @@ describe("DeepSeekHandler", () => {
155155
expect(model).toHaveProperty("temperature")
156156
expect(model).toHaveProperty("maxTokens")
157157
})
158+
159+
it("should use DEEP_SEEK_DEFAULT_TEMPERATURE as the default temperature", () => {
160+
const model = handler.getModel()
161+
expect(model.temperature).toBe(DEEP_SEEK_DEFAULT_TEMPERATURE)
162+
})
163+
164+
it("should respect user-provided temperature over DEEP_SEEK_DEFAULT_TEMPERATURE", () => {
165+
const handlerWithTemp = new DeepSeekHandler({
166+
...mockOptions,
167+
modelTemperature: 0.9,
168+
})
169+
const model = handlerWithTemp.getModel()
170+
expect(model.temperature).toBe(0.9)
171+
})
158172
})
159173

160174
describe("createMessage", () => {

src/api/providers/anthropic-vertex.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,13 @@ export class AnthropicVertexHandler extends BaseProvider implements SingleComple
231231
}
232232
}
233233

234-
const params = getModelParams({ format: "anthropic", modelId: id, model: info, settings: this.options })
234+
const params = getModelParams({
235+
format: "anthropic",
236+
modelId: id,
237+
model: info,
238+
settings: this.options,
239+
defaultTemperature: 0,
240+
})
235241

236242
// Build betas array for request headers
237243
const betas: string[] = []

src/api/providers/anthropic.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -358,6 +358,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
358358
modelId: id,
359359
model: info,
360360
settings: this.options,
361+
defaultTemperature: 0,
361362
})
362363

363364
// The `:thinking` suffix indicates that the model is a "Hybrid"

src/api/providers/cerebras.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,13 @@ export class CerebrasHandler extends BaseProvider implements SingleCompletionHan
4949
override getModel(): { id: string; info: ModelInfo; maxTokens?: number; temperature?: number } {
5050
const id = (this.options.apiModelId ?? cerebrasDefaultModelId) as CerebrasModelId
5151
const info = cerebrasModels[id as keyof typeof cerebrasModels] || cerebrasModels[cerebrasDefaultModelId]
52-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
52+
const params = getModelParams({
53+
format: "openai",
54+
modelId: id,
55+
model: info,
56+
settings: this.options,
57+
defaultTemperature: CEREBRAS_DEFAULT_TEMPERATURE,
58+
})
5359
return { id, info, ...params }
5460
}
5561

src/api/providers/deepinfra.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ export class DeepInfraHandler extends RouterProvider implements SingleCompletion
4747
modelId: id,
4848
model: info,
4949
settings: this.options,
50+
defaultTemperature: 0,
5051
})
5152

5253
return { id, info, ...params }

src/api/providers/deepseek.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,13 @@ export class DeepSeekHandler extends BaseProvider implements SingleCompletionHan
4343
override getModel(): { id: string; info: ModelInfo; maxTokens?: number; temperature?: number } {
4444
const id = this.options.apiModelId ?? deepSeekDefaultModelId
4545
const info = deepSeekModels[id as keyof typeof deepSeekModels] || deepSeekModels[deepSeekDefaultModelId]
46-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
46+
const params = getModelParams({
47+
format: "openai",
48+
modelId: id,
49+
model: info,
50+
settings: this.options,
51+
defaultTemperature: DEEP_SEEK_DEFAULT_TEMPERATURE,
52+
})
4753
return { id, info, ...params }
4854
}
4955

src/api/providers/doubao.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,13 @@ export class DoubaoHandler extends OpenAiHandler {
6464
override getModel() {
6565
const id = this.options.apiModelId ?? doubaoDefaultModelId
6666
const info = doubaoModels[id as keyof typeof doubaoModels] || doubaoModels[doubaoDefaultModelId]
67-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
67+
const params = getModelParams({
68+
format: "openai",
69+
modelId: id,
70+
model: info,
71+
settings: this.options,
72+
defaultTemperature: 0,
73+
})
6874
return { id, info, ...params }
6975
}
7076

src/api/providers/mistral.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,13 @@ export class MistralHandler extends BaseProvider implements SingleCompletionHand
5555
override getModel(): { id: string; info: ModelInfo; maxTokens?: number; temperature?: number } {
5656
const id = (this.options.apiModelId ?? mistralDefaultModelId) as MistralModelId
5757
const info = mistralModels[id as keyof typeof mistralModels] || mistralModels[mistralDefaultModelId]
58-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
58+
const params = getModelParams({
59+
format: "openai",
60+
modelId: id,
61+
model: info,
62+
settings: this.options,
63+
defaultTemperature: 0,
64+
})
5965
return { id, info, ...params }
6066
}
6167

src/api/providers/moonshot.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,13 @@ export class MoonshotHandler extends OpenAICompatibleHandler {
2929
override getModel() {
3030
const id = this.options.apiModelId ?? moonshotDefaultModelId
3131
const info = moonshotModels[id as keyof typeof moonshotModels] || moonshotModels[moonshotDefaultModelId]
32-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
32+
const params = getModelParams({
33+
format: "openai",
34+
modelId: id,
35+
model: info,
36+
settings: this.options,
37+
defaultTemperature: 0,
38+
})
3339
return { id, info, ...params }
3440
}
3541

src/api/providers/openai.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,13 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
282282
override getModel() {
283283
const id = this.options.openAiModelId ?? ""
284284
const info: ModelInfo = this.options.openAiCustomModelInfo ?? openAiModelInfoSaneDefaults
285-
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
285+
const params = getModelParams({
286+
format: "openai",
287+
modelId: id,
288+
model: info,
289+
settings: this.options,
290+
defaultTemperature: 0,
291+
})
286292
return { id, info, ...params }
287293
}
288294

0 commit comments

Comments
 (0)