Skip to content

Commit b858092

Browse files
committed
Update ChatStore, App, InputMessage, and Config components
1 parent 06beba1 commit b858092

6 files changed

Lines changed: 95 additions & 55 deletions

File tree

src/models/index.ts

Lines changed: 44 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1,80 +1,91 @@
11
const modelsTemplate = [
22
{
33
name: "gpt-3.5-turbo",
4-
provider: "openai",
4+
provider: "devchat",
5+
stream: true,
56
max_input_tokens: 13000,
6-
temperature: 0.3,
7-
max_tokens: 2048,
7+
// temperature: 0.3,
8+
// max_tokens: 2048,
89
},
910
{
1011
name: "gpt-4",
1112
provider: "openai",
13+
stream: true,
1214
max_input_tokens: 6000,
13-
temperature: 0.3,
14-
max_tokens: 2048,
15+
// temperature: 0.3,
16+
// max_tokens: 2048,
1517
},
1618
{
1719
name: "gpt-4-turbo-preview",
18-
provider: "openai",
19-
max_input_tokens: 32000,
20-
temperature: 0.3,
21-
max_tokens: 2048,
20+
provider: "devchat",
21+
stream: true,
22+
// max_input_tokens: 32000,
23+
// temperature: 0.3,
24+
// max_tokens: 2048,
2225
},
2326
{
2427
name: "claude-2.1",
2528
provider: "devchat",
26-
max_input_tokens: 32000,
27-
temperature: 0.3,
28-
max_tokens: 2048,
29+
stream: true,
30+
// max_input_tokens: 32000,
31+
// temperature: 0.3,
32+
// max_tokens: 2048,
2933
},
3034
{
3135
name: "xinghuo-3.5",
3236
provider: "devchat",
33-
max_input_tokens: 6000,
34-
temperature: 0.3,
35-
max_tokens: 2048,
37+
stream: true,
38+
// max_input_tokens: 6000,
39+
// temperature: 0.3,
40+
// max_tokens: 2048,
3641
},
3742
{
3843
name: "GLM-4",
3944
provider: "devchat",
40-
max_input_tokens: 8000,
41-
temperature: 0.3,
42-
max_tokens: 2048,
45+
stream: true,
46+
// max_input_tokens: 8000,
47+
// temperature: 0.3,
48+
// max_tokens: 2048,
4349
},
4450
{
4551
name: "ERNIE-Bot-4.0",
4652
provider: "devchat",
47-
max_input_tokens: 8000,
48-
temperature: 0.3,
49-
max_tokens: 2048,
53+
stream: true,
54+
// max_input_tokens: 8000,
55+
// temperature: 0.3,
56+
// max_tokens: 2048,
5057
},
5158
{
5259
name: "togetherai/codellama/CodeLlama-70b-Instruct-hf",
5360
provider: "devchat",
54-
max_input_tokens: 4000,
55-
temperature: 0.3,
56-
max_tokens: 2048,
61+
stream: true,
62+
// max_input_tokens: 4000,
63+
// temperature: 0.3,
64+
// max_tokens: 2048,
5765
},
5866
{
5967
name: "togetherai/mistralai/Mixtral-8x7B-Instruct-v0.1",
6068
provider: "devchat",
61-
max_input_tokens: 4000,
62-
temperature: 0.3,
63-
max_tokens: 2048,
69+
stream: true,
70+
// max_input_tokens: 4000,
71+
// temperature: 0.3,
72+
// max_tokens: 2048,
6473
},
6574
{
6675
name: "minimax/abab6-chat",
6776
provider: "devchat",
68-
max_input_tokens: 4000,
69-
temperature: 0.3,
70-
max_tokens: 2048,
77+
stream: true,
78+
// max_input_tokens: 4000,
79+
// temperature: 0.3,
80+
// max_tokens: 2048,
7181
},
7282
{
7383
name: "llama-2-70b-chat",
7484
provider: "devchat",
75-
max_input_tokens: 4000,
76-
temperature: 0.3,
77-
max_tokens: 2048,
85+
stream: true,
86+
// max_input_tokens: 4000,
87+
// temperature: 0.3,
88+
// max_tokens: 2048,
7889
},
7990
];
8091

src/views/App.tsx

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,9 @@ export default function App() {
2525
};
2626

2727
const getConfig = () => {
28-
console.log("getConfig");
29-
MessageUtil.sendMessage({ command: "readConfig" });
30-
MessageUtil.registerHandler("readConfig", (data: any) => {
31-
console.log("readConfig: ", data);
32-
config.setConfig(data);
28+
MessageUtil.sendMessage({ command: "readConfig", key: "" });
29+
MessageUtil.registerHandler("readConfig", (data: { value: any }) => {
30+
config.setConfig(data.value);
3331
setReady(true);
3432
});
3533
};

src/views/components/InputMessage/index.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -337,6 +337,7 @@ const InputMessage = observer((props: any) => {
337337
},
338338
},
339339
};
340+
// DC.eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJvcmdfaWQiOjI1MTYwMzg1MzQ2LCJqdGkiOjczMjc0OTkzMjcwMzc1MjkyNjB9.e-KAl0QM0Ooe6NdLAV3vaJAiA4Zr_DAgqkE0cl7MKZw
340341

341342
return (
342343
<Stack
@@ -367,7 +368,7 @@ const InputMessage = observer((props: any) => {
367368
leftIcon={<IconRobot size="1rem" />}
368369
styles={buttonStyles}
369370
>
370-
{getModelShowName(chat.chatModel)}
371+
{getModelShowName(config.defaultModel)}
371372
</Button>
372373
</Menu.Target>
373374
<Menu.Dropdown>

src/views/pages/Config.tsx

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ const commonInputStyle = {
3232
input: {
3333
fontSize: "var(--vscode-editor-font-size)",
3434
backgroundColor: "var(--vscode-sideBar-background)",
35-
borderColor: "var(--vscode-input-border)",
35+
borderColor: "var(--vscode-editor-foreground)",
3636
color: "var(--vscode-editor-foreground)",
3737
"&[data-disabled]": {
3838
color: "var(--vscode-disabledForeground)",
@@ -90,7 +90,6 @@ const Config = function () {
9090
useEffect(() => {
9191
MessageUtil.registerHandler("updateSetting", (data) => {
9292
// 保存后的回调
93-
9493
MessageUtil.sendMessage({ command: "readConfig" });
9594
});
9695
if (router.currentRoute !== "config") return;
@@ -106,6 +105,7 @@ const Config = function () {
106105

107106
useEffect(() => {
108107
if (router.currentRoute !== "config") return;
108+
form.setValues(config.config);
109109
if (config.settle && loading) {
110110
setTimeout(() => {
111111
router.updateRoute("chat");
@@ -115,11 +115,18 @@ const Config = function () {
115115
}, [config.settle]);
116116

117117
const onSave = (values) => {
118-
if (!isEqual(values, config.config)) {
119-
config.updateSettle(false);
120-
startLoading();
121-
MessageUtil.sendMessage({ command: "saveConfig", data: values });
122-
}
118+
config.updateSettle(false);
119+
startLoading();
120+
MessageUtil.sendMessage({
121+
command: "writeConfig",
122+
value: values,
123+
key: "",
124+
});
125+
config.setConfig(values);
126+
setTimeout(() => {
127+
router.updateRoute("chat");
128+
closeLoading();
129+
}, 1000);
123130
};
124131

125132
const changeModelDetail = (key: string, value: number | string) => {
@@ -345,7 +352,7 @@ const Config = function () {
345352
label="Python for commands"
346353
placeholder="/xxx/xxx"
347354
description="Please enter the path of your python"
348-
{...form.getInputProps("python_for_command")}
355+
{...form.getInputProps("python_for_commands")}
349356
/>
350357
</Stack>
351358
<Group

src/views/stores/ChatStore.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,6 @@ export const ChatStore = types
9494
isBottom: true,
9595
isTop: false,
9696
scrollBottom: 0,
97-
chatModel: "GPT-3.5",
9897
chatPanelWidth: 300,
9998
disabled: false,
10099
rechargeSite: "https://web.devchat.ai/pricing/",

src/views/stores/ConfigStore.ts

Lines changed: 31 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,14 @@ export const ConfigStore = types
66
.model("Config", {
77
config: types.optional(types.frozen(), {}),
88
settle: types.optional(types.boolean, false),
9+
defaultModel: types.optional(types.string, ""),
910
})
1011
.actions((self) => ({
1112
updateSettle: (value: boolean) => {
1213
self.settle = value;
1314
},
1415
getDefaultModel: () => {
15-
return self.config?.default_model;
16+
return self.defaultModel;
1617
},
1718
getLanguage: () => {
1819
return self.config?.language;
@@ -39,26 +40,40 @@ export const ConfigStore = types
3940
setConfig: (data) => {
4041
self.settle = false;
4142
const newConfig = { ...data };
43+
if (!data.models) {
44+
newConfig.models = {};
45+
}
4246
modelsTemplate.forEach((item) => {
47+
const currentModel: any = {
48+
...item,
49+
};
50+
delete currentModel.name;
4351
if (!newConfig.models[item.name]) {
44-
newConfig[item.name] = {
45-
...item,
52+
newConfig.models[item.name] = {
53+
...currentModel,
4654
};
4755
} else {
56+
console.log(
57+
"newConfig.models[item.name]: ",
58+
newConfig.models[item.name]
59+
);
4860
newConfig.models[item.name] = {
49-
...item,
50-
...newConfig[item.name],
61+
...currentModel,
62+
...newConfig.models[item.name],
5163
};
5264
}
5365
});
66+
5467
if (!newConfig.providers?.openai) {
5568
newConfig.providers.openai = {
5669
api_key: "",
5770
api_base: "",
5871
};
5972
}
6073
self.config = newConfig;
74+
console.log("newConfig: ", newConfig);
6175
self.settle = true;
76+
self.defaultModel = newConfig.default_model;
6277
},
6378
getModelList: () => {
6479
const modelsArray = modelsTemplate.map((item) => {
@@ -67,8 +82,17 @@ export const ConfigStore = types
6782
return modelsArray;
6883
},
6984
setConfigValue: (key: string, value: any) => {
70-
self.config[key] = value;
71-
MessageUtil.sendMessage({});
85+
if (key === "default_model") {
86+
self.defaultModel = value;
87+
}
88+
const newConfig = { ...self.config };
89+
newConfig[key] = value;
90+
self.config = newConfig;
91+
MessageUtil.sendMessage({
92+
command: "writeConfig",
93+
value: newConfig,
94+
key: "",
95+
});
7296
},
7397
}));
7498

0 commit comments

Comments
 (0)