Skip to content

Commit 255c757

Browse files
author
Roman Snapko
committed
Extend getLLMConfig usage to include aiConfig and apply model settings in chat name generation
1 parent f8a8fc5 commit 255c757

1 file changed

Lines changed: 2 additions & 1 deletion

File tree

packages/server/api/src/app/ai/chat/ai-chat.service.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ export async function generateChatName(
103103
messages: ModelMessage[],
104104
projectId: string,
105105
): Promise<GeneratedChatName> {
106-
const { languageModel } = await getLLMConfig(projectId);
106+
const { languageModel, aiConfig } = await getLLMConfig(projectId);
107107
const systemPrompt = await loadPrompt('chat-name.txt');
108108
if (!systemPrompt.trim()) {
109109
throw new Error('Failed to load prompt to generate the chat name.');
@@ -117,6 +117,7 @@ export async function generateChatName(
117117
system: systemPrompt,
118118
messages: sanitizedMessages,
119119
schema: generatedChatNameSchema,
120+
...aiConfig.modelSettings,
120121
experimental_telemetry: { isEnabled: isLLMTelemetryEnabled() },
121122
maxRetries: 2,
122123
});

0 commit comments

Comments
 (0)