Skip to content

Commit adf6814

Browse files
fix: param settings openai
1 parent 3c662fa commit adf6814

5 files changed

Lines changed: 29 additions & 5 deletions

File tree

api/server/controllers/agents/client.js

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,9 @@ const {
99
} = require('@librechat/api');
1010
const {
1111
Callback,
12+
Providers,
1213
GraphEvents,
14+
TitleMethod,
1315
formatMessage,
1416
formatAgentMessages,
1517
formatContentStrings,
@@ -59,6 +61,18 @@ const payloadParser = ({ req, agent, endpoint }) => {
5961

6062
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
6163

64+
const omitTitleOptions = new Set([
65+
'stream',
66+
'thinking',
67+
'streaming',
68+
'clientOptions',
69+
'thinkingConfig',
70+
'thinkingBudget',
71+
'includeThoughts',
72+
'maxOutputTokens',
73+
'additionalModelRequestFields',
74+
]);
75+
6276
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
6377

6478
function createTokenCounter(encoding) {
@@ -934,6 +948,7 @@ class AgentClient extends BaseClient {
934948
}
935949
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
936950
const endpoint = this.options.agent.endpoint;
951+
const provider = this.options.agent.provider || endpoint;
937952
const { req, res } = this.options;
938953
/** @type {import('@librechat/agents').ClientOptions} */
939954
let clientOptions = {

client/src/locales/en/translation.json

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,8 @@
190190
"com_endpoint_deprecated": "Deprecated",
191191
"com_endpoint_deprecated_info": "This endpoint is deprecated and may be removed in future versions, please use the agent endpoint instead",
192192
"com_endpoint_deprecated_info_a11y": "The plugin endpoint is deprecated and may be removed in future versions, please use the agent endpoint instead",
193+
"com_endpoint_disable_streaming": "Disable streaming responses and receive the complete response at once. Useful for models like o3 that require organization verification for streaming",
194+
"com_endpoint_disable_streaming_label": "Disable Streaming",
193195
"com_endpoint_examples": " Presets",
194196
"com_endpoint_export": "Export",
195197
"com_endpoint_export_share": "Export/Share",
@@ -271,6 +273,7 @@
271273
"com_endpoint_top_k": "Top K",
272274
"com_endpoint_top_p": "Top P",
273275
"com_endpoint_use_active_assistant": "Use Active Assistant",
276+
"com_endpoint_use_responses_api": "Use Responses API",
274277
"com_error_expired_user_key": "Provided key for {{0}} expired at {{1}}. Please provide a new key and try again.",
275278
"com_error_files_dupe": "Duplicate file detected.",
276279
"com_error_files_empty": "Empty files are not allowed.",

packages/api/src/endpoints/openai/llm.spec.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,12 +85,13 @@ describe('getOpenAIConfig', () => {
8585
modelOptions: { ...modelOptions, useResponsesApi: true },
8686
});
8787

88-
expect(result.llmConfig.reasoning).toEqual({
88+
expect(result.llmConfig.modelKwargs?.reasoning).toEqual({
8989
effort: ReasoningEffort.high,
9090
summary: ReasoningSummary.detailed,
9191
});
9292
expect((result.llmConfig as Record<string, unknown>).reasoning_effort).toBeUndefined();
9393
expect((result.llmConfig as Record<string, unknown>).reasoning_summary).toBeUndefined();
94+
expect(result.llmConfig.reasoning).toBeUndefined();
9495
});
9596

9697
it('should handle reasoning params without useResponsesApi', () => {

packages/api/src/endpoints/openai/llm.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -176,12 +176,17 @@ export function getOpenAIConfig(
176176

177177
// Handle reasoning parameters for Responses API
178178
if (hasReasoningParams({ reasoning_effort, reasoning_summary })) {
179+
const reasoning: Record<string, any> = {};
180+
179181
if (reasoning_effort != null && reasoning_effort !== ReasoningEffort.none) {
180-
modelKwargs.reasoning_effort = reasoning_effort;
181-
hasModelKwargs = true;
182+
reasoning.effort = reasoning_effort;
182183
}
183184
if (reasoning_summary != null && reasoning_summary !== ReasoningSummary.none) {
184-
modelKwargs.reasoning_summary = reasoning_summary;
185+
reasoning.summary = reasoning_summary;
186+
}
187+
188+
if (Object.keys(reasoning).length > 0) {
189+
modelKwargs.reasoning = reasoning;
185190
hasModelKwargs = true;
186191
}
187192
}

packages/data-provider/src/parameterSettings.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,7 @@ const openAIParams: Record<string, SettingDefinition> = {
213213
description: 'com_endpoint_openai_reasoning_effort',
214214
descriptionCode: true,
215215
type: 'enum',
216-
default: ReasoningEffort.medium,
216+
default: ReasoningEffort.none,
217217
component: 'slider',
218218
options: [
219219
ReasoningEffort.none,

0 commit comments

Comments
 (0)