Skip to content

Commit c249efa

Browse files
added tool disabling for ephemeral endpoints + automatic linter
1 parent ff6d888 commit c249efa

318 files changed

Lines changed: 9218 additions & 6838 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

api/app/clients/GoogleClient.js

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -236,11 +236,11 @@ class GoogleClient extends BaseClient {
236236
msg.content = (
237237
!Array.isArray(msg.content)
238238
? [
239-
{
240-
type: ContentTypes.TEXT,
241-
[ContentTypes.TEXT]: msg.content,
242-
},
243-
]
239+
{
240+
type: ContentTypes.TEXT,
241+
[ContentTypes.TEXT]: msg.content,
242+
},
243+
]
244244
: msg.content
245245
).concat(message.image_urls);
246246

api/app/clients/OllamaClient.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ class OllamaClient {
6767
return models;
6868
} catch (error) {
6969
const logMessage =
70-
'Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn\'t start with `ollama` (case-insensitive).';
70+
"Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn't start with `ollama` (case-insensitive).";
7171
logAxiosError({ message: logMessage, error });
7272
return [];
7373
}

api/app/clients/PluginsClient.js

Lines changed: 24 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -92,21 +92,30 @@ class PluginsClient extends OpenAIClient {
9292
);
9393

9494
// Initialize user-specific MCP connections early for plugins execution
95-
if (this.options.req && user) {
95+
// Check if tools are enabled for this endpoint
96+
const endpoint = this.options.endpoint;
97+
const endpointConfig = this.options.req?.app.locals[endpoint];
98+
const toolsEnabled = endpointConfig?.tools !== false;
99+
100+
if (this.options.req && user && toolsEnabled) {
96101
const MCPInitializer = require('~/server/services/MCPInitializer');
97102
const mcpInitializer = MCPInitializer.getInstance();
98103
const mcpResult = await mcpInitializer.ensureUserMCPReady(
99-
user,
100-
'PluginsClient',
101-
this.options.req.app.locals.availableTools
104+
user,
105+
'PluginsClient',
106+
this.options.req.app.locals.availableTools,
102107
);
103-
108+
104109
if (mcpResult.success) {
105-
logger.info(`[PluginsClient] MCP initialization successful: ${mcpResult.serverCount} servers, ${mcpResult.toolCount} tools in ${mcpResult.duration}ms`);
110+
logger.info(
111+
`[PluginsClient] MCP initialization successful: ${mcpResult.serverCount} servers, ${mcpResult.toolCount} tools in ${mcpResult.duration}ms`,
112+
);
106113
} else {
107114
logger.warn(`[PluginsClient] MCP initialization failed: ${mcpResult.error}`);
108115
// Continue without MCP tools - plugins can still work with other tools
109116
}
117+
} else if (!toolsEnabled) {
118+
logger.info(`[PluginsClient] Tools are disabled for endpoint: ${endpoint}`);
110119
}
111120

112121
// Map Messages to Langchain format
@@ -121,6 +130,15 @@ class PluginsClient extends OpenAIClient {
121130
chatHistory: new ChatMessageHistory(pastMessages),
122131
});
123132

133+
// Skip loading tools if they're disabled for this endpoint
134+
if (!toolsEnabled) {
135+
logger.info(
136+
`[PluginsClient] Skipping tool loading - tools disabled for endpoint: ${endpoint}`,
137+
);
138+
this.tools = [];
139+
return;
140+
}
141+
124142
const { loadedTools } = await loadTools({
125143
user,
126144
model,

api/app/clients/memory/example.js

Lines changed: 17 additions & 17 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

api/app/clients/memory/summaryBuffer.demo.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ const chatPromptMemory = new ConversationSummaryBufferMemory({
99
});
1010

1111
(async () => {
12-
await chatPromptMemory.saveContext({ input: 'hi my name\'s Danny' }, { output: 'whats up' });
12+
await chatPromptMemory.saveContext({ input: "hi my name's Danny" }, { output: 'whats up' });
1313
await chatPromptMemory.saveContext({ input: 'not much you' }, { output: 'not much' });
1414
await chatPromptMemory.saveContext(
1515
{ input: 'are you excited for the olympics?' },

api/app/clients/output_parsers/addImages.spec.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ describe('addImages', () => {
7474

7575
it('should append correctly from a real scenario', () => {
7676
responseMessage.text =
77-
'Here is the generated image based on your request. It depicts a surreal landscape filled with floating musical notes. The style is impressionistic, with vibrant sunset hues dominating the scene. At the center, there\'s a silhouette of a grand piano, adding a dreamy emotion to the overall image. This could serve as a unique and creative music album cover. Would you like to make any changes or generate another image?';
77+
"Here is the generated image based on your request. It depicts a surreal landscape filled with floating musical notes. The style is impressionistic, with vibrant sunset hues dominating the scene. At the center, there's a silhouette of a grand piano, adding a dreamy emotion to the overall image. This could serve as a unique and creative music album cover. Would you like to make any changes or generate another image?";
7878
const originalText = responseMessage.text;
7979
const imageMarkdown = '![generated image](/images/img-RnVWaYo2Yg4x3e0isICiMuf5.png)';
8080
intermediateSteps.push({ observation: imageMarkdown });

api/app/clients/output_parsers/handleOutputs.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -65,14 +65,14 @@ function buildPromptPrefix({ result, message, functionsAgent }) {
6565
const preliminaryAnswer =
6666
result.output?.length > 0 ? `Preliminary Answer: "${result.output.trim()}"` : '';
6767
const prefix = preliminaryAnswer
68-
? 'review and improve the answer you generated using plugins in response to the User Message below. The user hasn\'t seen your answer or thoughts yet.'
68+
? "review and improve the answer you generated using plugins in response to the User Message below. The user hasn't seen your answer or thoughts yet."
6969
: 'respond to the User Message below based on your preliminary thoughts & actions.';
7070

7171
return `As a helpful AI Assistant, ${prefix}${errorMessage}\n${internalActions}
7272
${preliminaryAnswer}
7373
Reply conversationally to the User based on your ${
74-
preliminaryAnswer ? 'preliminary answer, ' : ''
75-
}internal actions, thoughts, and observations, making improvements wherever possible, but do not modify URLs.
74+
preliminaryAnswer ? 'preliminary answer, ' : ''
75+
}internal actions, thoughts, and observations, making improvements wherever possible, but do not modify URLs.
7676
${
7777
preliminaryAnswer
7878
? ''

api/app/clients/prompts/addCacheControl.spec.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ describe('addCacheControl', () => {
66
{ role: 'user', content: [{ type: 'text', text: 'Hello' }] },
77
{ role: 'assistant', content: [{ type: 'text', text: 'Hi there' }] },
88
{ role: 'user', content: [{ type: 'text', text: 'How are you?' }] },
9-
{ role: 'assistant', content: [{ type: 'text', text: 'I\'m doing well, thanks!' }] },
9+
{ role: 'assistant', content: [{ type: 'text', text: "I'm doing well, thanks!" }] },
1010
{ role: 'user', content: [{ type: 'text', text: 'Great!' }] },
1111
];
1212

@@ -22,7 +22,7 @@ describe('addCacheControl', () => {
2222
{ role: 'user', content: 'Hello' },
2323
{ role: 'assistant', content: 'Hi there' },
2424
{ role: 'user', content: 'How are you?' },
25-
{ role: 'assistant', content: 'I\'m doing well, thanks!' },
25+
{ role: 'assistant', content: "I'm doing well, thanks!" },
2626
{ role: 'user', content: 'Great!' },
2727
];
2828

@@ -140,7 +140,7 @@ describe('addCacheControl', () => {
140140
{ role: 'user', content: 'Hello' },
141141
{ role: 'assistant', content: 'Hi there' },
142142
{ role: 'user', content: [{ type: 'text', text: 'How are you?' }] },
143-
{ role: 'assistant', content: 'I\'m doing well, thanks!' },
143+
{ role: 'assistant', content: "I'm doing well, thanks!" },
144144
{ role: 'user', content: 'Great!' },
145145
];
146146

@@ -160,7 +160,7 @@ describe('addCacheControl', () => {
160160
},
161161
]);
162162
expect(result[1].content).toBe('Hi there');
163-
expect(result[3].content).toBe('I\'m doing well, thanks!');
163+
expect(result[3].content).toBe("I'm doing well, thanks!");
164164
});
165165

166166
test('should handle edge case with multiple content types', () => {

api/app/clients/prompts/createContextHandlers.js

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -96,35 +96,35 @@ function createContextHandlers(req, userMessageContent) {
9696
resolvedQueries.length === 0
9797
? '\n\tThe semantic search did not return any results.'
9898
: resolvedQueries
99-
.map((queryResult, index) => {
100-
const file = processedFiles[index];
101-
let contextItems = queryResult.data;
99+
.map((queryResult, index) => {
100+
const file = processedFiles[index];
101+
let contextItems = queryResult.data;
102102

103-
const generateContext = (currentContext) =>
104-
`
103+
const generateContext = (currentContext) =>
104+
`
105105
<file>
106106
<filename>${file.filename}</filename>
107107
<context>${currentContext}
108108
</context>
109109
</file>`;
110110

111-
if (useFullContext) {
112-
return generateContext(`\n${contextItems}`);
113-
}
111+
if (useFullContext) {
112+
return generateContext(`\n${contextItems}`);
113+
}
114114

115-
contextItems = queryResult.data
116-
.map((item) => {
117-
const pageContent = item[0].page_content;
118-
return `
115+
contextItems = queryResult.data
116+
.map((item) => {
117+
const pageContent = item[0].page_content;
118+
return `
119119
<contextItem>
120120
<![CDATA[${pageContent?.trim()}]]>
121121
</contextItem>`;
122-
})
123-
.join('');
122+
})
123+
.join('');
124124

125-
return generateContext(contextItems);
126-
})
127-
.join('');
125+
return generateContext(contextItems);
126+
})
127+
.join('');
128128

129129
if (useFullContext) {
130130
const prompt = `${header}

api/app/clients/prompts/formatAgentMessages.spec.js

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ describe('formatAgentMessages', () => {
130130
content: [
131131
{
132132
type: ContentTypes.TEXT,
133-
[ContentTypes.TEXT]: 'I\'ll search for that information.',
133+
[ContentTypes.TEXT]: "I'll search for that information.",
134134
tool_call_ids: ['search_1'],
135135
},
136136
{
@@ -144,7 +144,7 @@ describe('formatAgentMessages', () => {
144144
},
145145
{
146146
type: ContentTypes.TEXT,
147-
[ContentTypes.TEXT]: 'Now, I\'ll convert the temperature.',
147+
[ContentTypes.TEXT]: "Now, I'll convert the temperature.",
148148
tool_call_ids: ['convert_1'],
149149
},
150150
{
@@ -156,7 +156,7 @@ describe('formatAgentMessages', () => {
156156
output: '23.89°C',
157157
},
158158
},
159-
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Here\'s your answer.' },
159+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: "Here's your answer." },
160160
],
161161
},
162162
];
@@ -171,7 +171,7 @@ describe('formatAgentMessages', () => {
171171
expect(result[4]).toBeInstanceOf(AIMessage);
172172

173173
// Check first AIMessage
174-
expect(result[0].content).toBe('I\'ll search for that information.');
174+
expect(result[0].content).toBe("I'll search for that information.");
175175
expect(result[0].tool_calls).toHaveLength(1);
176176
expect(result[0].tool_calls[0]).toEqual({
177177
id: 'search_1',
@@ -187,7 +187,7 @@ describe('formatAgentMessages', () => {
187187
);
188188

189189
// Check second AIMessage
190-
expect(result[2].content).toBe('Now, I\'ll convert the temperature.');
190+
expect(result[2].content).toBe("Now, I'll convert the temperature.");
191191
expect(result[2].tool_calls).toHaveLength(1);
192192
expect(result[2].tool_calls[0]).toEqual({
193193
id: 'convert_1',
@@ -202,7 +202,7 @@ describe('formatAgentMessages', () => {
202202

203203
// Check final AIMessage
204204
expect(result[4].content).toStrictEqual([
205-
{ [ContentTypes.TEXT]: 'Here\'s your answer.', type: ContentTypes.TEXT },
205+
{ [ContentTypes.TEXT]: "Here's your answer.", type: ContentTypes.TEXT },
206206
]);
207207
});
208208

@@ -217,7 +217,7 @@ describe('formatAgentMessages', () => {
217217
role: 'assistant',
218218
content: [{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'How can I help you?' }],
219219
},
220-
{ role: 'user', content: 'What\'s the weather?' },
220+
{ role: 'user', content: "What's the weather?" },
221221
{
222222
role: 'assistant',
223223
content: [
@@ -240,7 +240,7 @@ describe('formatAgentMessages', () => {
240240
{
241241
role: 'assistant',
242242
content: [
243-
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: 'Here\'s the weather information.' },
243+
{ type: ContentTypes.TEXT, [ContentTypes.TEXT]: "Here's the weather information." },
244244
],
245245
},
246246
];
@@ -265,12 +265,12 @@ describe('formatAgentMessages', () => {
265265
{ [ContentTypes.TEXT]: 'How can I help you?', type: ContentTypes.TEXT },
266266
]);
267267
expect(result[2].content).toStrictEqual([
268-
{ [ContentTypes.TEXT]: 'What\'s the weather?', type: ContentTypes.TEXT },
268+
{ [ContentTypes.TEXT]: "What's the weather?", type: ContentTypes.TEXT },
269269
]);
270270
expect(result[3].content).toBe('Let me check that for you.');
271271
expect(result[4].content).toBe('Sunny, 75°F');
272272
expect(result[5].content).toStrictEqual([
273-
{ [ContentTypes.TEXT]: 'Here\'s the weather information.', type: ContentTypes.TEXT },
273+
{ [ContentTypes.TEXT]: "Here's the weather information.", type: ContentTypes.TEXT },
274274
]);
275275

276276
// Check that there are no consecutive AIMessages

0 commit comments

Comments
 (0)