@lobehub/chat 1.45.5 → 1.45.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/changelog/v1.json +18 -0
- package/locales/ar/modelProvider.json +2 -2
- package/locales/bg-BG/modelProvider.json +2 -2
- package/locales/de-DE/modelProvider.json +2 -2
- package/locales/en-US/modelProvider.json +2 -2
- package/locales/es-ES/modelProvider.json +2 -2
- package/locales/fa-IR/modelProvider.json +2 -2
- package/locales/fr-FR/modelProvider.json +2 -2
- package/locales/it-IT/modelProvider.json +2 -2
- package/locales/ja-JP/modelProvider.json +2 -2
- package/locales/ko-KR/modelProvider.json +2 -2
- package/locales/nl-NL/modelProvider.json +2 -2
- package/locales/pl-PL/modelProvider.json +2 -2
- package/locales/pt-BR/modelProvider.json +2 -2
- package/locales/ru-RU/modelProvider.json +2 -2
- package/locales/tr-TR/modelProvider.json +2 -2
- package/locales/vi-VN/modelProvider.json +2 -2
- package/locales/zh-CN/modelProvider.json +2 -2
- package/locales/zh-TW/modelProvider.json +2 -2
- package/package.json +3 -3
- package/src/app/(main)/chat/(workspace)/features/TelemetryNotification.tsx +1 -1
- package/src/app/(main)/files/(content)/@menu/features/KnowledgeBase/EmptyStatus.tsx +1 -1
- package/src/app/(main)/files/[id]/Header.tsx +1 -1
- package/src/app/(main)/settings/provider/features/CreateNewProvider/index.tsx +1 -1
- package/src/app/(main)/settings/sync/features/WebRTC/SyncSwitch/index.tsx +7 -7
- package/src/components/BubblesLoading/index.tsx +3 -3
- package/src/config/aiModels/index.ts +38 -0
- package/src/config/modelProviders/index.ts +3 -0
- package/src/database/repositories/aiInfra/index.ts +3 -1
- package/src/features/Conversation/Messages/Assistant/FileChunks/index.tsx +1 -1
- package/src/features/Conversation/components/History/index.tsx +1 -1
- package/src/features/InitClientDB/PGliteIcon.tsx +1 -1
- package/src/libs/agent-runtime/openai/__snapshots__/index.test.ts.snap +6 -0
- package/src/libs/agent-runtime/utils/openaiCompatibleFactory/index.test.ts +94 -23
- package/src/libs/agent-runtime/utils/openaiCompatibleFactory/index.ts +21 -6
- package/src/locales/default/modelProvider.ts +2 -2
- package/src/migrations/FromV3ToV4/index.ts +1 -1
- package/src/server/globalConfig/{genServerLLMConfig.test.ts → _deprecated.test.ts} +2 -4
- package/src/server/globalConfig/{genServerLLMConfig.ts → _deprecated.ts} +1 -1
- package/src/server/globalConfig/genServerAiProviderConfig.ts +42 -0
- package/src/server/globalConfig/index.ts +23 -1
- package/src/server/routers/lambda/aiModel.ts +2 -2
- package/src/server/routers/lambda/aiProvider.ts +2 -2
- package/src/types/aiModel.ts +1 -0
- package/src/types/serverConfig.ts +1 -0
- package/src/types/user/settings/modelProvider.ts +2 -0
- package/src/utils/__snapshots__/parseModels.test.ts.snap +37 -5
- package/src/utils/_deprecated/__snapshots__/parseModels.test.ts.snap +112 -0
- package/src/utils/_deprecated/parseModels.test.ts +276 -0
- package/src/utils/_deprecated/parseModels.ts +161 -0
- package/src/utils/fetch/__tests__/fetchSSE.test.ts +1 -1
- package/src/utils/parseModels.test.ts +153 -46
- package/src/utils/parseModels.ts +34 -21
- package/tests/setup-db.ts +0 -3
@@ -1,9 +1,10 @@
|
|
1
1
|
import { describe, expect, it } from 'vitest';
|
2
2
|
|
3
|
-
import { LOBE_DEFAULT_MODEL_LIST
|
4
|
-
import {
|
3
|
+
import { LOBE_DEFAULT_MODEL_LIST } from '@/config/aiModels';
|
4
|
+
import { openaiChatModels } from '@/config/aiModels/openai';
|
5
|
+
import { AiFullModelCard } from '@/types/aiModel';
|
5
6
|
|
6
|
-
import { parseModelString,
|
7
|
+
import { parseModelString, transformToAiChatModelList } from './parseModels';
|
7
8
|
|
8
9
|
describe('parseModelString', () => {
|
9
10
|
it('custom deletion, addition, and renaming of models', () => {
|
@@ -38,6 +39,8 @@ describe('parseModelString', () => {
|
|
38
39
|
displayName: 'ChatGLM 6B',
|
39
40
|
id: 'chatglm-6b',
|
40
41
|
contextWindowTokens: 4096,
|
42
|
+
abilities: {},
|
43
|
+
type: 'chat',
|
41
44
|
});
|
42
45
|
});
|
43
46
|
|
@@ -46,9 +49,12 @@ describe('parseModelString', () => {
|
|
46
49
|
|
47
50
|
expect(result.add[0]).toEqual({
|
48
51
|
displayName: '讯飞星火 v3.5',
|
49
|
-
|
52
|
+
abilities: {
|
53
|
+
functionCall: true,
|
54
|
+
},
|
50
55
|
id: 'spark-v3.5',
|
51
56
|
contextWindowTokens: 8192,
|
57
|
+
type: 'chat',
|
52
58
|
});
|
53
59
|
});
|
54
60
|
|
@@ -60,15 +66,21 @@ describe('parseModelString', () => {
|
|
60
66
|
expect(result.add).toEqual([
|
61
67
|
{
|
62
68
|
displayName: 'Gemini 1.5 Flash',
|
63
|
-
|
69
|
+
abilities: {
|
70
|
+
vision: true,
|
71
|
+
},
|
64
72
|
id: 'gemini-1.5-flash-latest',
|
65
73
|
contextWindowTokens: 16000,
|
74
|
+
type: 'chat',
|
66
75
|
},
|
67
76
|
{
|
68
77
|
displayName: 'ChatGPT Plus',
|
69
|
-
|
70
|
-
|
71
|
-
|
78
|
+
abilities: {
|
79
|
+
vision: true,
|
80
|
+
functionCall: true,
|
81
|
+
files: true,
|
82
|
+
},
|
83
|
+
type: 'chat',
|
72
84
|
id: 'gpt-4-all',
|
73
85
|
contextWindowTokens: 128000,
|
74
86
|
},
|
@@ -82,100 +94,170 @@ describe('parseModelString', () => {
|
|
82
94
|
expect(result.add).toEqual([
|
83
95
|
{
|
84
96
|
displayName: 'ChatGPT-4',
|
85
|
-
|
86
|
-
|
97
|
+
abilities: {
|
98
|
+
functionCall: true,
|
99
|
+
files: true,
|
100
|
+
},
|
101
|
+
type: 'chat',
|
87
102
|
id: 'gpt-4-0125-preview',
|
88
103
|
contextWindowTokens: 128000,
|
89
104
|
},
|
90
105
|
{
|
91
106
|
displayName: 'ChatGPT-4 Vision',
|
92
|
-
|
93
|
-
|
107
|
+
abilities: {
|
108
|
+
functionCall: true,
|
109
|
+
files: true,
|
110
|
+
vision: true,
|
111
|
+
},
|
112
|
+
type: 'chat',
|
94
113
|
id: 'gpt-4-turbo-2024-04-09',
|
95
114
|
contextWindowTokens: 128000,
|
96
|
-
vision: true,
|
97
115
|
},
|
98
116
|
]);
|
99
117
|
});
|
100
118
|
|
101
119
|
it('should handle empty extension capability value', () => {
|
102
120
|
const result = parseModelString('model1<1024:>');
|
103
|
-
expect(result.add[0]).toEqual({
|
121
|
+
expect(result.add[0]).toEqual({
|
122
|
+
abilities: {},
|
123
|
+
type: 'chat',
|
124
|
+
id: 'model1',
|
125
|
+
contextWindowTokens: 1024,
|
126
|
+
});
|
104
127
|
});
|
105
128
|
|
106
129
|
it('should handle empty extension capability name', () => {
|
107
130
|
const result = parseModelString('model1<1024::file>');
|
108
|
-
expect(result.add[0]).toEqual({
|
131
|
+
expect(result.add[0]).toEqual({
|
132
|
+
id: 'model1',
|
133
|
+
contextWindowTokens: 1024,
|
134
|
+
abilities: {
|
135
|
+
files: true,
|
136
|
+
},
|
137
|
+
type: 'chat',
|
138
|
+
});
|
109
139
|
});
|
110
140
|
|
111
141
|
it('should handle duplicate extension capabilities', () => {
|
112
142
|
const result = parseModelString('model1<1024:vision:vision>');
|
113
|
-
expect(result.add[0]).toEqual({
|
143
|
+
expect(result.add[0]).toEqual({
|
144
|
+
id: 'model1',
|
145
|
+
contextWindowTokens: 1024,
|
146
|
+
abilities: {
|
147
|
+
vision: true,
|
148
|
+
},
|
149
|
+
type: 'chat',
|
150
|
+
});
|
114
151
|
});
|
115
152
|
|
116
153
|
it('should handle case-sensitive extension capability names', () => {
|
117
154
|
const result = parseModelString('model1<1024:VISION:FC:file>');
|
118
|
-
expect(result.add[0]).toEqual({
|
155
|
+
expect(result.add[0]).toEqual({
|
156
|
+
id: 'model1',
|
157
|
+
contextWindowTokens: 1024,
|
158
|
+
abilities: {
|
159
|
+
files: true,
|
160
|
+
},
|
161
|
+
type: 'chat',
|
162
|
+
});
|
119
163
|
});
|
120
164
|
|
121
165
|
it('should handle case-sensitive extension capability values', () => {
|
122
166
|
const result = parseModelString('model1<1024:vision:Fc:File>');
|
123
|
-
expect(result.add[0]).toEqual({
|
167
|
+
expect(result.add[0]).toEqual({
|
168
|
+
id: 'model1',
|
169
|
+
contextWindowTokens: 1024,
|
170
|
+
abilities: {
|
171
|
+
vision: true,
|
172
|
+
},
|
173
|
+
type: 'chat',
|
174
|
+
});
|
124
175
|
});
|
125
176
|
|
126
177
|
it('should handle empty angle brackets', () => {
|
127
178
|
const result = parseModelString('model1<>');
|
128
|
-
expect(result.add[0]).toEqual({ id: 'model1' });
|
179
|
+
expect(result.add[0]).toEqual({ id: 'model1', abilities: {}, type: 'chat' });
|
129
180
|
});
|
130
181
|
|
131
182
|
it('should handle not close angle brackets', () => {
|
132
183
|
const result = parseModelString('model1<,model2');
|
133
|
-
expect(result.add).toEqual([
|
184
|
+
expect(result.add).toEqual([
|
185
|
+
{ id: 'model1', abilities: {}, type: 'chat' },
|
186
|
+
{ id: 'model2', abilities: {}, type: 'chat' },
|
187
|
+
]);
|
134
188
|
});
|
135
189
|
|
136
190
|
it('should handle multi close angle brackets', () => {
|
137
191
|
const result = parseModelString('model1<>>,model2');
|
138
|
-
expect(result.add).toEqual([
|
192
|
+
expect(result.add).toEqual([
|
193
|
+
{ id: 'model1', abilities: {}, type: 'chat' },
|
194
|
+
{ id: 'model2', abilities: {}, type: 'chat' },
|
195
|
+
]);
|
139
196
|
});
|
140
197
|
|
141
198
|
it('should handle only colon inside angle brackets', () => {
|
142
199
|
const result = parseModelString('model1<:>');
|
143
|
-
expect(result.add[0]).toEqual({ id: 'model1' });
|
200
|
+
expect(result.add[0]).toEqual({ id: 'model1', abilities: {}, type: 'chat' });
|
144
201
|
});
|
145
202
|
|
146
203
|
it('should handle only non-digit characters inside angle brackets', () => {
|
147
204
|
const result = parseModelString('model1<abc>');
|
148
|
-
expect(result.add[0]).toEqual({ id: 'model1' });
|
205
|
+
expect(result.add[0]).toEqual({ id: 'model1', abilities: {}, type: 'chat' });
|
149
206
|
});
|
150
207
|
|
151
208
|
it('should handle non-digit characters followed by digits inside angle brackets', () => {
|
152
209
|
const result = parseModelString('model1<abc123>');
|
153
|
-
expect(result.add[0]).toEqual({ id: 'model1' });
|
210
|
+
expect(result.add[0]).toEqual({ id: 'model1', abilities: {}, type: 'chat' });
|
154
211
|
});
|
155
212
|
|
156
213
|
it('should handle digits followed by non-colon characters inside angle brackets', () => {
|
157
214
|
const result = parseModelString('model1<1024abc>');
|
158
|
-
expect(result.add[0]).toEqual({
|
215
|
+
expect(result.add[0]).toEqual({
|
216
|
+
id: 'model1',
|
217
|
+
contextWindowTokens: 1024,
|
218
|
+
abilities: {},
|
219
|
+
type: 'chat',
|
220
|
+
});
|
159
221
|
});
|
160
222
|
|
161
223
|
it('should handle digits followed by multiple colons inside angle brackets', () => {
|
162
224
|
const result = parseModelString('model1<1024::>');
|
163
|
-
expect(result.add[0]).toEqual({
|
225
|
+
expect(result.add[0]).toEqual({
|
226
|
+
id: 'model1',
|
227
|
+
contextWindowTokens: 1024,
|
228
|
+
abilities: {},
|
229
|
+
type: 'chat',
|
230
|
+
});
|
164
231
|
});
|
165
232
|
|
166
233
|
it('should handle digits followed by a colon and non-letter characters inside angle brackets', () => {
|
167
234
|
const result = parseModelString('model1<1024:123>');
|
168
|
-
expect(result.add[0]).toEqual({
|
235
|
+
expect(result.add[0]).toEqual({
|
236
|
+
id: 'model1',
|
237
|
+
contextWindowTokens: 1024,
|
238
|
+
abilities: {},
|
239
|
+
type: 'chat',
|
240
|
+
});
|
169
241
|
});
|
170
242
|
|
171
243
|
it('should handle digits followed by a colon and spaces inside angle brackets', () => {
|
172
244
|
const result = parseModelString('model1<1024: vision>');
|
173
|
-
expect(result.add[0]).toEqual({
|
245
|
+
expect(result.add[0]).toEqual({
|
246
|
+
id: 'model1',
|
247
|
+
contextWindowTokens: 1024,
|
248
|
+
abilities: {},
|
249
|
+
type: 'chat',
|
250
|
+
});
|
174
251
|
});
|
175
252
|
|
176
253
|
it('should handle digits followed by multiple colons and spaces inside angle brackets', () => {
|
177
254
|
const result = parseModelString('model1<1024: : vision>');
|
178
|
-
expect(result.add[0]).toEqual({
|
255
|
+
expect(result.add[0]).toEqual({
|
256
|
+
id: 'model1',
|
257
|
+
contextWindowTokens: 1024,
|
258
|
+
abilities: {},
|
259
|
+
type: 'chat',
|
260
|
+
});
|
179
261
|
});
|
180
262
|
});
|
181
263
|
|
@@ -185,7 +267,11 @@ describe('parseModelString', () => {
|
|
185
267
|
expect(result.add[0]).toEqual({
|
186
268
|
id: 'model1',
|
187
269
|
displayName: 'Model 1',
|
188
|
-
|
270
|
+
abilities: {},
|
271
|
+
type: 'chat',
|
272
|
+
config: {
|
273
|
+
deploymentName: 'model1',
|
274
|
+
},
|
189
275
|
});
|
190
276
|
});
|
191
277
|
|
@@ -194,48 +280,59 @@ describe('parseModelString', () => {
|
|
194
280
|
expect(result.add[0]).toEqual({
|
195
281
|
id: 'gpt-35-turbo',
|
196
282
|
displayName: 'GPT 3.5 Turbo',
|
197
|
-
|
283
|
+
abilities: {},
|
284
|
+
type: 'chat',
|
285
|
+
config: {
|
286
|
+
deploymentName: 'my-deploy',
|
287
|
+
},
|
198
288
|
});
|
199
289
|
});
|
200
290
|
});
|
201
291
|
});
|
202
292
|
|
203
293
|
describe('transformToChatModelCards', () => {
|
204
|
-
const defaultChatModels:
|
205
|
-
{ id: 'model1', displayName: 'Model 1', enabled: true },
|
206
|
-
{ id: 'model2', displayName: 'Model 2', enabled: false },
|
294
|
+
const defaultChatModels: AiFullModelCard[] = [
|
295
|
+
{ id: 'model1', displayName: 'Model 1', enabled: true, type: 'chat' },
|
296
|
+
{ id: 'model2', displayName: 'Model 2', enabled: false, type: 'chat' },
|
207
297
|
];
|
208
298
|
|
209
299
|
it('should return undefined when modelString is empty', () => {
|
210
|
-
const result =
|
300
|
+
const result = transformToAiChatModelList({
|
211
301
|
modelString: '',
|
212
302
|
defaultChatModels,
|
303
|
+
providerId: 'openai',
|
213
304
|
});
|
214
305
|
expect(result).toBeUndefined();
|
215
306
|
});
|
216
307
|
|
217
308
|
it('should remove all models when removeAll is true', () => {
|
218
|
-
const result =
|
309
|
+
const result = transformToAiChatModelList({
|
219
310
|
modelString: '-all',
|
220
311
|
defaultChatModels,
|
312
|
+
providerId: 'openai',
|
221
313
|
});
|
222
314
|
expect(result).toEqual([]);
|
223
315
|
});
|
224
316
|
|
225
317
|
it('should remove specified models', () => {
|
226
|
-
const result =
|
318
|
+
const result = transformToAiChatModelList({
|
227
319
|
modelString: '-model1',
|
228
320
|
defaultChatModels,
|
321
|
+
providerId: 'openai',
|
229
322
|
});
|
230
|
-
expect(result).toEqual([
|
323
|
+
expect(result).toEqual([
|
324
|
+
{ id: 'model2', displayName: 'Model 2', enabled: false, type: 'chat' },
|
325
|
+
]);
|
231
326
|
});
|
232
327
|
|
233
328
|
it('should add a new known model', () => {
|
234
|
-
const knownModel = LOBE_DEFAULT_MODEL_LIST
|
235
|
-
const result =
|
329
|
+
const knownModel = LOBE_DEFAULT_MODEL_LIST.find((m) => m.providerId === 'ai21')!;
|
330
|
+
const result = transformToAiChatModelList({
|
236
331
|
modelString: `${knownModel.id}`,
|
237
332
|
defaultChatModels,
|
333
|
+
providerId: 'ai21',
|
238
334
|
});
|
335
|
+
|
239
336
|
expect(result).toContainEqual({
|
240
337
|
...knownModel,
|
241
338
|
displayName: knownModel.displayName || knownModel.id,
|
@@ -244,31 +341,41 @@ describe('transformToChatModelCards', () => {
|
|
244
341
|
});
|
245
342
|
|
246
343
|
it('should update an existing known model', () => {
|
247
|
-
const knownModel = LOBE_DEFAULT_MODEL_LIST
|
248
|
-
const result =
|
344
|
+
const knownModel = LOBE_DEFAULT_MODEL_LIST.find((m) => m.providerId === 'openai')!;
|
345
|
+
const result = transformToAiChatModelList({
|
249
346
|
modelString: `+${knownModel.id}=Updated Model`,
|
250
347
|
defaultChatModels: [knownModel],
|
348
|
+
providerId: 'openai',
|
349
|
+
});
|
350
|
+
|
351
|
+
expect(result).toContainEqual({
|
352
|
+
...knownModel,
|
353
|
+
displayName: 'Updated Model',
|
354
|
+
enabled: true,
|
251
355
|
});
|
252
|
-
expect(result![0]).toEqual({ ...knownModel, displayName: 'Updated Model', enabled: true });
|
253
356
|
});
|
254
357
|
|
255
358
|
it('should add a new custom model', () => {
|
256
|
-
const result =
|
359
|
+
const result = transformToAiChatModelList({
|
257
360
|
modelString: '+custom_model=Custom Model',
|
258
361
|
defaultChatModels,
|
362
|
+
providerId: 'openai',
|
259
363
|
});
|
260
364
|
expect(result).toContainEqual({
|
261
365
|
id: 'custom_model',
|
262
366
|
displayName: 'Custom Model',
|
263
367
|
enabled: true,
|
368
|
+
abilities: {},
|
369
|
+
type: 'chat',
|
264
370
|
});
|
265
371
|
});
|
266
372
|
|
267
373
|
it('should have file with builtin models like gpt-4-0125-preview', () => {
|
268
|
-
const result =
|
374
|
+
const result = transformToAiChatModelList({
|
269
375
|
modelString:
|
270
376
|
'-all,+gpt-4-0125-preview=ChatGPT-4<128000:fc:file>,+gpt-4-turbo-2024-04-09=ChatGPT-4 Vision<128000:fc:vision:file>',
|
271
|
-
defaultChatModels:
|
377
|
+
defaultChatModels: openaiChatModels,
|
378
|
+
providerId: 'openai',
|
272
379
|
});
|
273
380
|
|
274
381
|
expect(result).toMatchSnapshot();
|
package/src/utils/parseModels.ts
CHANGED
@@ -1,13 +1,14 @@
|
|
1
1
|
import { produce } from 'immer';
|
2
2
|
|
3
|
-
import { LOBE_DEFAULT_MODEL_LIST } from '@/config/
|
4
|
-
import {
|
3
|
+
import { LOBE_DEFAULT_MODEL_LIST } from '@/config/aiModels';
|
4
|
+
import { AiFullModelCard } from '@/types/aiModel';
|
5
|
+
import { merge } from '@/utils/merge';
|
5
6
|
|
6
7
|
/**
|
7
8
|
* Parse model string to add or remove models.
|
8
9
|
*/
|
9
10
|
export const parseModelString = (modelString: string = '', withDeploymentName = false) => {
|
10
|
-
let models:
|
11
|
+
let models: AiFullModelCard[] = [];
|
11
12
|
let removeAll = false;
|
12
13
|
const removedModels: string[] = [];
|
13
14
|
const modelNames = modelString.split(/[,,]/).filter(Boolean);
|
@@ -45,13 +46,16 @@ export const parseModelString = (modelString: string = '', withDeploymentName =
|
|
45
46
|
models.splice(existingIndex, 1);
|
46
47
|
}
|
47
48
|
|
48
|
-
const model:
|
49
|
+
const model: AiFullModelCard = {
|
50
|
+
abilities: {},
|
49
51
|
displayName: displayName || undefined,
|
50
52
|
id,
|
53
|
+
// TODO: 临时写死为 chat ,后续基于元数据迭代成对应的类型
|
54
|
+
type: 'chat',
|
51
55
|
};
|
52
56
|
|
53
57
|
if (deploymentName) {
|
54
|
-
model.
|
58
|
+
model.config = { deploymentName };
|
55
59
|
}
|
56
60
|
|
57
61
|
if (capabilities.length > 0) {
|
@@ -61,15 +65,15 @@ export const parseModelString = (modelString: string = '', withDeploymentName =
|
|
61
65
|
for (const capability of capabilityList) {
|
62
66
|
switch (capability) {
|
63
67
|
case 'vision': {
|
64
|
-
model.vision = true;
|
68
|
+
model.abilities!.vision = true;
|
65
69
|
break;
|
66
70
|
}
|
67
71
|
case 'fc': {
|
68
|
-
model.functionCall = true;
|
72
|
+
model.abilities!.functionCall = true;
|
69
73
|
break;
|
70
74
|
}
|
71
75
|
case 'file': {
|
72
|
-
model.files = true;
|
76
|
+
model.abilities!.files = true;
|
73
77
|
break;
|
74
78
|
}
|
75
79
|
default: {
|
@@ -92,15 +96,17 @@ export const parseModelString = (modelString: string = '', withDeploymentName =
|
|
92
96
|
/**
|
93
97
|
* Extract a special method to process chatModels
|
94
98
|
*/
|
95
|
-
export const
|
99
|
+
export const transformToAiChatModelList = ({
|
96
100
|
modelString = '',
|
97
101
|
defaultChatModels,
|
102
|
+
providerId,
|
98
103
|
withDeploymentName = false,
|
99
104
|
}: {
|
100
|
-
defaultChatModels:
|
105
|
+
defaultChatModels: AiFullModelCard[];
|
101
106
|
modelString?: string;
|
107
|
+
providerId: string;
|
102
108
|
withDeploymentName?: boolean;
|
103
|
-
}):
|
109
|
+
}): AiFullModelCard[] | undefined => {
|
104
110
|
if (!modelString) return undefined;
|
105
111
|
|
106
112
|
const modelConfig = parseModelString(modelString, withDeploymentName);
|
@@ -115,7 +121,14 @@ export const transformToChatModelCards = ({
|
|
115
121
|
// 处理添加或替换逻辑
|
116
122
|
for (const toAddModel of modelConfig.add) {
|
117
123
|
// first try to find the model in LOBE_DEFAULT_MODEL_LIST to confirm if it is a known model
|
118
|
-
|
124
|
+
let knownModel = LOBE_DEFAULT_MODEL_LIST.find(
|
125
|
+
(model) => model.id === toAddModel.id && model.providerId === providerId,
|
126
|
+
);
|
127
|
+
|
128
|
+
if (!knownModel) {
|
129
|
+
knownModel = LOBE_DEFAULT_MODEL_LIST.find((model) => model.id === toAddModel.id);
|
130
|
+
if (knownModel) knownModel.providerId = providerId;
|
131
|
+
}
|
119
132
|
|
120
133
|
// if the model is known, update it based on the known model
|
121
134
|
if (knownModel) {
|
@@ -124,20 +137,20 @@ export const transformToChatModelCards = ({
|
|
124
137
|
|
125
138
|
// if the model is already in chatModels, update it
|
126
139
|
if (modelInList) {
|
127
|
-
draft[index] = {
|
128
|
-
...modelInList,
|
140
|
+
draft[index] = merge(modelInList, {
|
129
141
|
...toAddModel,
|
130
142
|
displayName: toAddModel.displayName || modelInList.displayName || modelInList.id,
|
131
143
|
enabled: true,
|
132
|
-
};
|
144
|
+
});
|
133
145
|
} else {
|
134
146
|
// if the model is not in chatModels, add it
|
135
|
-
draft.push(
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
147
|
+
draft.push(
|
148
|
+
merge(knownModel, {
|
149
|
+
...toAddModel,
|
150
|
+
displayName: toAddModel.displayName || knownModel.displayName || knownModel.id,
|
151
|
+
enabled: true,
|
152
|
+
}),
|
153
|
+
);
|
141
154
|
}
|
142
155
|
} else {
|
143
156
|
// if the model is not in LOBE_DEFAULT_MODEL_LIST, add it as a new custom model
|