Skip to content

Commit 21ea2ab

Browse files
committed
feat: add LMStudio & Mise support
1 parent 7176dcb commit 21ea2ab

9 files changed

Lines changed: 453 additions & 0 deletions

File tree

mise.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
[tools]
2+
node = "20"
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import { INodeParams, INodeCredential } from '../src/Interface'
2+
3+
class LMStudioApi implements INodeCredential {
4+
label: string
5+
name: string
6+
version: number
7+
inputs: INodeParams[]
8+
9+
constructor() {
10+
this.label = 'LM Studio API'
11+
this.name = 'lmStudioApi'
12+
this.version = 1.0
13+
this.inputs = [
14+
{
15+
label: 'LM Studio Api Key',
16+
name: 'lmStudioApiKey',
17+
type: 'password',
18+
placeholder: '<LM_STUDIO_ACCESS_TOKEN>'
19+
}
20+
]
21+
}
22+
}
23+
24+
module.exports = { credClass: LMStudioApi }
Lines changed: 149 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,149 @@
1+
import { ChatOpenAI as LangchainChatLmStudio, ChatOpenAIFields as ChatLmStudioFields } from '@langchain/openai'
2+
import { BaseCache } from '@langchain/core/caches'
3+
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
4+
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
5+
import { FlowiseChatLmStudio } from './FlowiseChatLmStudio'
6+
7+
class ChatLmStudio_ChatModels implements INode {
8+
label: string
9+
name: string
10+
version: number
11+
type: string
12+
icon: string
13+
category: string
14+
description: string
15+
baseClasses: string[]
16+
credential: INodeParams
17+
inputs: INodeParams[]
18+
19+
constructor() {
20+
this.label = 'Chat LMStudio'
21+
this.name = 'chatLmStudio'
22+
this.version = 3.0
23+
this.type = 'ChatLmStudio'
24+
this.icon = 'lmstudio.png'
25+
this.category = 'Chat Models'
26+
this.description = 'Use local LLMs using LmStudio'
27+
this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(LangchainChatLmStudio)]
28+
this.credential = {
29+
label: 'Connect Credential',
30+
name: 'credential',
31+
type: 'credential',
32+
credentialNames: ['lmStudioApi'],
33+
optional: true
34+
}
35+
this.inputs = [
36+
{
37+
label: 'Cache',
38+
name: 'cache',
39+
type: 'BaseCache',
40+
optional: true
41+
},
42+
{
43+
label: 'Base URL',
44+
name: 'baseURL',
45+
type: 'string',
46+
placeholder: 'http://localhost:1234/v1'
47+
},
48+
{
49+
label: 'Model Name',
50+
name: 'modelName',
51+
type: 'string',
52+
placeholder: 'gpt4all-lora-quantized.bin'
53+
},
54+
{
55+
label: 'Temperature',
56+
name: 'temperature',
57+
type: 'number',
58+
step: 0.1,
59+
default: 0.9,
60+
optional: true
61+
},
62+
{
63+
label: 'Allow Image Uploads',
64+
name: 'allowImageUploads',
65+
type: 'boolean',
66+
description:
67+
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
68+
default: false,
69+
optional: true
70+
},
71+
{
72+
label: 'Streaming',
73+
name: 'streaming',
74+
type: 'boolean',
75+
default: true,
76+
optional: true,
77+
additionalParams: true
78+
},
79+
{
80+
label: 'Max Tokens',
81+
name: 'maxTokens',
82+
type: 'number',
83+
step: 1,
84+
optional: true,
85+
additionalParams: true
86+
},
87+
{
88+
label: 'Top Probability',
89+
name: 'topP',
90+
type: 'number',
91+
step: 0.1,
92+
optional: true,
93+
additionalParams: true
94+
},
95+
{
96+
label: 'Timeout',
97+
name: 'timeout',
98+
type: 'number',
99+
step: 1,
100+
optional: true,
101+
additionalParams: true
102+
}
103+
]
104+
}
105+
106+
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
107+
const temperature = nodeData.inputs?.temperature as string
108+
const modelName = nodeData.inputs?.modelName as string
109+
const maxTokens = nodeData.inputs?.maxTokens as string
110+
const topP = nodeData.inputs?.topP as string
111+
const timeout = nodeData.inputs?.timeout as string
112+
const baseURL = nodeData.inputs?.baseURL as string
113+
const streaming = nodeData.inputs?.streaming as boolean
114+
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
115+
116+
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
117+
const lmStudioApiKey = getCredentialParam('lmStudioApiKey', credentialData, nodeData)
118+
119+
const cache = nodeData.inputs?.cache as BaseCache
120+
121+
const obj: ChatLmStudioFields = {
122+
temperature: parseFloat(temperature),
123+
modelName,
124+
streaming: streaming ?? true,
125+
configuration: {
126+
baseURL,
127+
apiKey: lmStudioApiKey
128+
}
129+
}
130+
131+
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
132+
if (topP) obj.topP = parseFloat(topP)
133+
if (timeout) obj.timeout = parseInt(timeout, 10)
134+
if (cache) obj.cache = cache
135+
136+
const multiModalOption: IMultiModalOption = {
137+
image: {
138+
allowImageUploads: allowImageUploads ?? false
139+
}
140+
}
141+
142+
const model = new FlowiseChatLmStudio(nodeData.id, obj)
143+
model.setMultiModalOption(multiModalOption)
144+
145+
return model
146+
}
147+
}
148+
149+
module.exports = { nodeClass: ChatLmStudio_ChatModels }
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
import { ChatOpenAI as LangchainChatLmStudio, ChatOpenAIFields as ChatLmStudioFields } from '@langchain/openai'
2+
import { IMultiModalOption, IVisionChatModal } from '../../../src'
3+
4+
export class FlowiseChatLmStudio extends LangchainChatLmStudio implements IVisionChatModal {
5+
configuredModel: string
6+
configuredMaxToken?: number
7+
multiModalOption: IMultiModalOption
8+
builtInTools: Record<string, any>[] = []
9+
id: string
10+
11+
constructor(id: string, fields?: ChatLmStudioFields) {
12+
super(fields)
13+
this.id = id
14+
this.configuredModel = fields?.modelName ?? ''
15+
this.configuredMaxToken = fields?.maxTokens
16+
}
17+
18+
revertToOriginalModel(): void {
19+
this.model = this.configuredModel
20+
this.maxTokens = this.configuredMaxToken
21+
}
22+
23+
setMultiModalOption(multiModalOption: IMultiModalOption): void {
24+
this.multiModalOption = multiModalOption
25+
}
26+
27+
setVisionModel(): void {
28+
// pass
29+
}
30+
31+
addBuiltInTools(builtInTool: Record<string, any>): void {
32+
this.builtInTools.push(builtInTool)
33+
}
34+
}
58.6 KB
Loading
Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
import {
2+
ClientOptions,
3+
OpenAIEmbeddings as LmStudioEmbeddings,
4+
OpenAIEmbeddingsParams as LmStudioEmbeddingsParams
5+
} from '@langchain/openai'
6+
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
7+
import { getCredentialData, getCredentialParam } from '../../../src/utils'
8+
9+
class LmStudioEmbedding_Embeddings implements INode {
10+
label: string
11+
name: string
12+
version: number
13+
type: string
14+
icon: string
15+
category: string
16+
description: string
17+
baseClasses: string[]
18+
credential: INodeParams
19+
inputs: INodeParams[]
20+
21+
constructor() {
22+
this.label = 'LMStudio Embeddings'
23+
this.name = 'lmStudioEmbeddings'
24+
this.version = 1.0
25+
this.type = 'LmStudio Embeddings'
26+
this.icon = 'lmstudio.png'
27+
this.category = 'Embeddings'
28+
this.description = 'Use local embeddings from LMStudio'
29+
this.baseClasses = [this.type, 'Embeddings']
30+
this.credential = {
31+
label: 'Connect Credential',
32+
name: 'credential',
33+
type: 'credential',
34+
credentialNames: ['lmStudioApi'],
35+
optional: true
36+
}
37+
this.inputs = [
38+
{
39+
label: 'Base URL',
40+
name: 'baseURL',
41+
type: 'string',
42+
placeholder: 'http://localhost:1234/v1'
43+
},
44+
{
45+
label: 'Model Name',
46+
name: 'modelName',
47+
type: 'string',
48+
placeholder: 'text-embedding-ada-002'
49+
}
50+
]
51+
}
52+
53+
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
54+
const modelName = nodeData.inputs?.modelName as string
55+
const baseURL = nodeData.inputs?.baseURL as string
56+
57+
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
58+
const lmStudioApiKey = getCredentialParam('lmStudioApiKey', credentialData, nodeData)
59+
60+
const obj: Partial<LmStudioEmbeddingsParams> & { configuration?: ClientOptions } = {
61+
modelName,
62+
configuration: {
63+
apiKey: lmStudioApiKey,
64+
baseURL
65+
}
66+
}
67+
68+
const model = new LmStudioEmbeddings(obj)
69+
70+
return model
71+
}
72+
}
73+
74+
module.exports = { nodeClass: LmStudioEmbedding_Embeddings }
58.6 KB
Loading

0 commit comments

Comments
 (0)