@lobehub/chat 1.90.1 → 1.90.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/changelog/v1.json +18 -0
- package/package.json +1 -1
- package/src/config/aiModels/minimax.ts +8 -0
- package/src/config/aiModels/openai.ts +46 -6
- package/src/config/aiModels/qwen.ts +64 -42
- package/src/config/modelProviders/qwen.ts +2 -5
- package/src/config/modelProviders/xai.ts +1 -1
- package/src/libs/model-runtime/minimax/index.ts +11 -1
- package/src/libs/model-runtime/openai/index.ts +8 -1
- package/src/libs/model-runtime/utils/streams/openai.ts +79 -9
- package/src/libs/model-runtime/zhipu/index.ts +3 -0
- package/src/server/globalConfig/index.ts +3 -0
- package/src/services/chat.ts +1 -0
- package/src/types/aiProvider.ts +1 -0
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,56 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.90.3](https://github.com/lobehub/lobe-chat/compare/v1.90.2...v1.90.3)
|
6
|
+
|
7
|
+
<sup>Released on **2025-06-01**</sup>
|
8
|
+
|
9
|
+
#### 💄 Styles
|
10
|
+
|
11
|
+
- **misc**: Enable deploymentName for Aliyun Bailian.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### Styles
|
19
|
+
|
20
|
+
- **misc**: Enable deploymentName for Aliyun Bailian, closes [#7576](https://github.com/lobehub/lobe-chat/issues/7576) ([169e598](https://github.com/lobehub/lobe-chat/commit/169e598))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
30
|
+
### [Version 1.90.2](https://github.com/lobehub/lobe-chat/compare/v1.90.1...v1.90.2)
|
31
|
+
|
32
|
+
<sup>Released on **2025-06-01**</sup>
|
33
|
+
|
34
|
+
#### 💄 Styles
|
35
|
+
|
36
|
+
- **misc**: Support `web_search` tool for MiniMax & Zhipu.
|
37
|
+
|
38
|
+
<br/>
|
39
|
+
|
40
|
+
<details>
|
41
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
42
|
+
|
43
|
+
#### Styles
|
44
|
+
|
45
|
+
- **misc**: Support `web_search` tool for MiniMax & Zhipu, closes [#7980](https://github.com/lobehub/lobe-chat/issues/7980) ([28cdafb](https://github.com/lobehub/lobe-chat/commit/28cdafb))
|
46
|
+
|
47
|
+
</details>
|
48
|
+
|
49
|
+
<div align="right">
|
50
|
+
|
51
|
+
[](#readme-top)
|
52
|
+
|
53
|
+
</div>
|
54
|
+
|
5
55
|
### [Version 1.90.1](https://github.com/lobehub/lobe-chat/compare/v1.90.0...v1.90.1)
|
6
56
|
|
7
57
|
<sup>Released on **2025-06-01**</sup>
|
package/changelog/v1.json
CHANGED
@@ -1,4 +1,22 @@
|
|
1
1
|
[
|
2
|
+
{
|
3
|
+
"children": {
|
4
|
+
"improvements": [
|
5
|
+
"Enable deploymentName for Aliyun Bailian."
|
6
|
+
]
|
7
|
+
},
|
8
|
+
"date": "2025-06-01",
|
9
|
+
"version": "1.90.3"
|
10
|
+
},
|
11
|
+
{
|
12
|
+
"children": {
|
13
|
+
"improvements": [
|
14
|
+
"Support web_search tool for MiniMax & Zhipu."
|
15
|
+
]
|
16
|
+
},
|
17
|
+
"date": "2025-06-01",
|
18
|
+
"version": "1.90.2"
|
19
|
+
},
|
2
20
|
{
|
3
21
|
"children": {
|
4
22
|
"fixes": [
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.90.
|
3
|
+
"version": "1.90.3",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -4,6 +4,7 @@ const minimaxChatModels: AIChatModelCard[] = [
|
|
4
4
|
{
|
5
5
|
abilities: {
|
6
6
|
functionCall: true,
|
7
|
+
search: true,
|
7
8
|
vision: true,
|
8
9
|
},
|
9
10
|
contextWindowTokens: 1_000_192,
|
@@ -19,11 +20,15 @@ const minimaxChatModels: AIChatModelCard[] = [
|
|
19
20
|
output: 8,
|
20
21
|
},
|
21
22
|
releasedAt: '2025-01-15',
|
23
|
+
settings: {
|
24
|
+
searchImpl: 'params',
|
25
|
+
},
|
22
26
|
type: 'chat',
|
23
27
|
},
|
24
28
|
{
|
25
29
|
abilities: {
|
26
30
|
functionCall: true,
|
31
|
+
search: true,
|
27
32
|
vision: true,
|
28
33
|
},
|
29
34
|
contextWindowTokens: 245_760,
|
@@ -37,6 +42,9 @@ const minimaxChatModels: AIChatModelCard[] = [
|
|
37
42
|
input: 1,
|
38
43
|
output: 1,
|
39
44
|
},
|
45
|
+
settings: {
|
46
|
+
searchImpl: 'params',
|
47
|
+
},
|
40
48
|
type: 'chat',
|
41
49
|
},
|
42
50
|
{
|
@@ -236,6 +236,26 @@ export const openaiChatModels: AIChatModelCard[] = [
|
|
236
236
|
releasedAt: '2024-07-18',
|
237
237
|
type: 'chat',
|
238
238
|
},
|
239
|
+
{
|
240
|
+
abilities: {
|
241
|
+
search: true,
|
242
|
+
},
|
243
|
+
contextWindowTokens: 128_000,
|
244
|
+
description:
|
245
|
+
'GPT-4o mini 搜索预览版是一个专门训练用于理解和执行网页搜索查询的模型,使用的是 Chat Completions API。除了令牌费用之外,网页搜索查询还会按每次工具调用收取费用。',
|
246
|
+
displayName: 'GPT-4o mini Search Preview',
|
247
|
+
id: 'gpt-4o-mini-search-preview',
|
248
|
+
maxOutput: 16_384,
|
249
|
+
pricing: {
|
250
|
+
input: 0.15,
|
251
|
+
output: 0.6,
|
252
|
+
},
|
253
|
+
releasedAt: '2025-03-11',
|
254
|
+
settings: {
|
255
|
+
searchImpl: 'internal',
|
256
|
+
},
|
257
|
+
type: 'chat',
|
258
|
+
},
|
239
259
|
{
|
240
260
|
abilities: {
|
241
261
|
functionCall: true,
|
@@ -244,14 +264,34 @@ export const openaiChatModels: AIChatModelCard[] = [
|
|
244
264
|
contextWindowTokens: 128_000,
|
245
265
|
description:
|
246
266
|
'ChatGPT-4o 是一款动态模型,实时更新以保持当前最新版本。它结合了强大的语言理解与生成能力,适合于大规模应用场景,包括客户服务、教育和技术支持。',
|
247
|
-
displayName: 'GPT-4o
|
248
|
-
id: 'gpt-4o
|
267
|
+
displayName: 'GPT-4o',
|
268
|
+
id: 'gpt-4o',
|
249
269
|
pricing: {
|
250
270
|
cachedInput: 1.25,
|
251
271
|
input: 2.5,
|
252
272
|
output: 10,
|
253
273
|
},
|
254
|
-
releasedAt: '2024-
|
274
|
+
releasedAt: '2024-05-13',
|
275
|
+
type: 'chat',
|
276
|
+
},
|
277
|
+
{
|
278
|
+
abilities: {
|
279
|
+
search: true,
|
280
|
+
},
|
281
|
+
contextWindowTokens: 128_000,
|
282
|
+
description:
|
283
|
+
'GPT-4o 搜索预览版是一个专门训练用于理解和执行网页搜索查询的模型,使用的是 Chat Completions API。除了令牌费用之外,网页搜索查询还会按每次工具调用收取费用。',
|
284
|
+
displayName: 'GPT-4o Search Preview',
|
285
|
+
id: 'gpt-4o-search-preview',
|
286
|
+
maxOutput: 16_384,
|
287
|
+
pricing: {
|
288
|
+
input: 2.5,
|
289
|
+
output: 10,
|
290
|
+
},
|
291
|
+
releasedAt: '2025-03-11',
|
292
|
+
settings: {
|
293
|
+
searchImpl: 'internal',
|
294
|
+
},
|
255
295
|
type: 'chat',
|
256
296
|
},
|
257
297
|
{
|
@@ -262,14 +302,14 @@ export const openaiChatModels: AIChatModelCard[] = [
|
|
262
302
|
contextWindowTokens: 128_000,
|
263
303
|
description:
|
264
304
|
'ChatGPT-4o 是一款动态模型,实时更新以保持当前最新版本。它结合了强大的语言理解与生成能力,适合于大规模应用场景,包括客户服务、教育和技术支持。',
|
265
|
-
displayName: 'GPT-4o',
|
266
|
-
id: 'gpt-4o',
|
305
|
+
displayName: 'GPT-4o 1120',
|
306
|
+
id: 'gpt-4o-2024-11-20',
|
267
307
|
pricing: {
|
268
308
|
cachedInput: 1.25,
|
269
309
|
input: 2.5,
|
270
310
|
output: 10,
|
271
311
|
},
|
272
|
-
releasedAt: '2024-
|
312
|
+
releasedAt: '2024-11-20',
|
273
313
|
type: 'chat',
|
274
314
|
},
|
275
315
|
{
|
@@ -196,12 +196,15 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
196
196
|
reasoning: true,
|
197
197
|
search: true,
|
198
198
|
},
|
199
|
+
config: {
|
200
|
+
deploymentName: 'qwq-plus-latest', // expired on 2025-09-02
|
201
|
+
},
|
199
202
|
contextWindowTokens: 131_072,
|
200
203
|
description:
|
201
204
|
'基于 Qwen2.5 模型训练的 QwQ 推理模型,通过强化学习大幅度提升了模型推理能力。模型数学代码等核心指标(AIME 24/25、LiveCodeBench)以及部分通用指标(IFEval、LiveBench等)达到DeepSeek-R1 满血版水平。',
|
202
205
|
displayName: 'QwQ Plus',
|
203
206
|
enabled: true,
|
204
|
-
id: 'qwq-plus
|
207
|
+
id: 'qwq-plus',
|
205
208
|
maxOutput: 8192,
|
206
209
|
organization: 'Qwen',
|
207
210
|
pricing: {
|
@@ -221,11 +224,14 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
221
224
|
reasoning: true,
|
222
225
|
search: true,
|
223
226
|
},
|
227
|
+
config: {
|
228
|
+
deploymentName: 'qwen-turbo-2025-04-28', // expired on 2025-10-26
|
229
|
+
},
|
224
230
|
contextWindowTokens: 1_000_000,
|
225
231
|
description: '通义千问超大规模语言模型,支持中文、英文等不同语言输入。',
|
226
232
|
displayName: 'Qwen Turbo',
|
227
233
|
enabled: true,
|
228
|
-
id: 'qwen-turbo
|
234
|
+
id: 'qwen-turbo',
|
229
235
|
maxOutput: 8192,
|
230
236
|
organization: 'Qwen',
|
231
237
|
pricing: {
|
@@ -246,11 +252,14 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
246
252
|
reasoning: true,
|
247
253
|
search: true,
|
248
254
|
},
|
255
|
+
config: {
|
256
|
+
deploymentName: 'qwen-plus-2025-04-28', // expired on 2025-10-26
|
257
|
+
},
|
249
258
|
contextWindowTokens: 131_072,
|
250
259
|
description: '通义千问超大规模语言模型增强版,支持中文、英文等不同语言输入。',
|
251
260
|
displayName: 'Qwen Plus',
|
252
261
|
enabled: true,
|
253
|
-
id: 'qwen-plus
|
262
|
+
id: 'qwen-plus',
|
254
263
|
maxOutput: 8192,
|
255
264
|
organization: 'Qwen',
|
256
265
|
pricing: {
|
@@ -270,12 +279,15 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
270
279
|
functionCall: true,
|
271
280
|
search: true,
|
272
281
|
},
|
282
|
+
config: {
|
283
|
+
deploymentName: 'qwen-max-2025-01-25',
|
284
|
+
},
|
273
285
|
contextWindowTokens: 131_072,
|
274
286
|
description:
|
275
287
|
'通义千问千亿级别超大规模语言模型,支持中文、英文等不同语言输入,当前通义千问2.5产品版本背后的API模型。',
|
276
288
|
displayName: 'Qwen Max',
|
277
289
|
enabled: true,
|
278
|
-
id: 'qwen-max
|
290
|
+
id: 'qwen-max',
|
279
291
|
maxOutput: 8192,
|
280
292
|
organization: 'Qwen',
|
281
293
|
pricing: {
|
@@ -292,6 +304,9 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
292
304
|
abilities: {
|
293
305
|
functionCall: true,
|
294
306
|
},
|
307
|
+
config: {
|
308
|
+
deploymentName: 'qwen-long-latest',
|
309
|
+
},
|
295
310
|
contextWindowTokens: 10_000_000,
|
296
311
|
description:
|
297
312
|
'通义千问超大规模语言模型,支持长文本上下文,以及基于长文档、多文档等多个场景的对话功能。',
|
@@ -311,12 +326,15 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
311
326
|
abilities: {
|
312
327
|
vision: true,
|
313
328
|
},
|
329
|
+
config: {
|
330
|
+
deploymentName: 'qwen-omni-turbo-latest',
|
331
|
+
},
|
314
332
|
contextWindowTokens: 32_768,
|
315
333
|
description:
|
316
334
|
'Qwen-Omni 系列模型支持输入多种模态的数据,包括视频、音频、图片、文本,并输出音频与文本。',
|
317
335
|
displayName: 'Qwen Omni Turbo',
|
318
336
|
enabled: true,
|
319
|
-
id: 'qwen-omni-turbo
|
337
|
+
id: 'qwen-omni-turbo',
|
320
338
|
maxOutput: 2048,
|
321
339
|
organization: 'Qwen',
|
322
340
|
pricing: {
|
@@ -348,11 +366,14 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
348
366
|
abilities: {
|
349
367
|
vision: true,
|
350
368
|
},
|
369
|
+
config: {
|
370
|
+
deploymentName: 'qwen-vl-plus-2025-01-25',
|
371
|
+
},
|
351
372
|
contextWindowTokens: 131_072,
|
352
373
|
description:
|
353
374
|
'通义千问大规模视觉语言模型增强版。大幅提升细节识别能力和文字识别能力,支持超百万像素分辨率和任意长宽比规格的图像。',
|
354
375
|
displayName: 'Qwen VL Plus',
|
355
|
-
id: 'qwen-vl-plus
|
376
|
+
id: 'qwen-vl-plus',
|
356
377
|
maxOutput: 8192,
|
357
378
|
organization: 'Qwen',
|
358
379
|
pricing: {
|
@@ -366,12 +387,15 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
366
387
|
abilities: {
|
367
388
|
vision: true,
|
368
389
|
},
|
390
|
+
config: {
|
391
|
+
deploymentName: 'qwen-vl-max-2025-04-08',
|
392
|
+
},
|
369
393
|
contextWindowTokens: 131_072,
|
370
394
|
description:
|
371
395
|
'通义千问超大规模视觉语言模型。相比增强版,再次提升视觉推理能力和指令遵循能力,提供更高的视觉感知和认知水平。',
|
372
396
|
displayName: 'Qwen VL Max',
|
373
397
|
enabled: true,
|
374
|
-
id: 'qwen-vl-max
|
398
|
+
id: 'qwen-vl-max',
|
375
399
|
maxOutput: 8192,
|
376
400
|
organization: 'Qwen',
|
377
401
|
pricing: {
|
@@ -385,11 +409,14 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
385
409
|
abilities: {
|
386
410
|
vision: true,
|
387
411
|
},
|
412
|
+
config: {
|
413
|
+
deploymentName: 'qwen-vl-ocr-2025-04-13',
|
414
|
+
},
|
388
415
|
contextWindowTokens: 34_096,
|
389
416
|
description:
|
390
417
|
'通义千问OCR是文字提取专有模型,专注于文档、表格、试题、手写体文字等类型图像的文字提取能力。它能够识别多种文字,目前支持的语言有:汉语、英语、法语、日语、韩语、德语、俄语、意大利语、越南语、阿拉伯语。',
|
391
418
|
displayName: 'Qwen VL OCR',
|
392
|
-
id: 'qwen-vl-ocr
|
419
|
+
id: 'qwen-vl-ocr',
|
393
420
|
maxOutput: 4096,
|
394
421
|
organization: 'Qwen',
|
395
422
|
pricing: {
|
@@ -400,10 +427,13 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
400
427
|
type: 'chat',
|
401
428
|
},
|
402
429
|
{
|
430
|
+
config: {
|
431
|
+
deploymentName: 'qwen-math-turbo-latest',
|
432
|
+
},
|
403
433
|
contextWindowTokens: 4096,
|
404
434
|
description: '通义千问数学模型是专门用于数学解题的语言模型。',
|
405
435
|
displayName: 'Qwen Math Turbo',
|
406
|
-
id: 'qwen-math-turbo
|
436
|
+
id: 'qwen-math-turbo',
|
407
437
|
maxOutput: 3072,
|
408
438
|
organization: 'Qwen',
|
409
439
|
pricing: {
|
@@ -414,10 +444,13 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
414
444
|
type: 'chat',
|
415
445
|
},
|
416
446
|
{
|
447
|
+
config: {
|
448
|
+
deploymentName: 'qwen-math-plus-latest',
|
449
|
+
},
|
417
450
|
contextWindowTokens: 4096,
|
418
451
|
description: '通义千问数学模型是专门用于数学解题的语言模型。',
|
419
452
|
displayName: 'Qwen Math Plus',
|
420
|
-
id: 'qwen-math-plus
|
453
|
+
id: 'qwen-math-plus',
|
421
454
|
maxOutput: 3072,
|
422
455
|
organization: 'Qwen',
|
423
456
|
pricing: {
|
@@ -428,10 +461,13 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
428
461
|
type: 'chat',
|
429
462
|
},
|
430
463
|
{
|
464
|
+
config: {
|
465
|
+
deploymentName: 'qwen-coder-turbo-latest',
|
466
|
+
},
|
431
467
|
contextWindowTokens: 131_072,
|
432
468
|
description: '通义千问代码模型。',
|
433
469
|
displayName: 'Qwen Coder Turbo',
|
434
|
-
id: 'qwen-coder-turbo
|
470
|
+
id: 'qwen-coder-turbo',
|
435
471
|
maxOutput: 8192,
|
436
472
|
organization: 'Qwen',
|
437
473
|
pricing: {
|
@@ -442,10 +478,13 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
442
478
|
type: 'chat',
|
443
479
|
},
|
444
480
|
{
|
481
|
+
config: {
|
482
|
+
deploymentName: 'qwen-coder-plus-latest',
|
483
|
+
},
|
445
484
|
contextWindowTokens: 131_072,
|
446
485
|
description: '通义千问代码模型。',
|
447
486
|
displayName: 'Qwen Coder Plus',
|
448
|
-
id: 'qwen-coder-plus
|
487
|
+
id: 'qwen-coder-plus',
|
449
488
|
maxOutput: 8192,
|
450
489
|
organization: 'Qwen',
|
451
490
|
pricing: {
|
@@ -501,11 +540,14 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
501
540
|
reasoning: true,
|
502
541
|
vision: true,
|
503
542
|
},
|
543
|
+
config: {
|
544
|
+
deploymentName: 'qvq-max-latest',
|
545
|
+
},
|
504
546
|
contextWindowTokens: 122_880,
|
505
547
|
description:
|
506
548
|
'通义千问QVQ视觉推理模型,支持视觉输入及思维链输出,在数学、编程、视觉分析、创作以及通用任务上都表现了更强的能力。',
|
507
549
|
displayName: 'QVQ Max',
|
508
|
-
id: 'qvq-max
|
550
|
+
id: 'qvq-max',
|
509
551
|
maxOutput: 8192,
|
510
552
|
organization: 'Qwen',
|
511
553
|
pricing: {
|
@@ -667,8 +709,8 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
667
709
|
{
|
668
710
|
contextWindowTokens: 131_072,
|
669
711
|
description: '通义千问代码模型开源版。',
|
670
|
-
displayName: 'Qwen2.5 Coder
|
671
|
-
id: 'qwen2.5-coder-
|
712
|
+
displayName: 'Qwen2.5 Coder 14B',
|
713
|
+
id: 'qwen2.5-coder-14b-instruct',
|
672
714
|
maxOutput: 8192,
|
673
715
|
organization: 'Qwen',
|
674
716
|
pricing: {
|
@@ -679,36 +721,16 @@ const qwenChatModels: AIChatModelCard[] = [
|
|
679
721
|
type: 'chat',
|
680
722
|
},
|
681
723
|
{
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
displayName: 'Qwen VL',
|
688
|
-
id: 'qwen-vl-v1',
|
689
|
-
maxOutput: 1500,
|
690
|
-
organization: 'Qwen',
|
691
|
-
pricing: {
|
692
|
-
currency: 'CNY',
|
693
|
-
input: 0,
|
694
|
-
output: 0,
|
695
|
-
},
|
696
|
-
type: 'chat',
|
697
|
-
},
|
698
|
-
{
|
699
|
-
abilities: {
|
700
|
-
vision: true,
|
701
|
-
},
|
702
|
-
contextWindowTokens: 8000,
|
703
|
-
description: '通义千问VL支持灵活的交互方式,包括多图、多轮问答、创作等能力的模型。',
|
704
|
-
displayName: 'Qwen VL Chat',
|
705
|
-
id: 'qwen-vl-chat-v1',
|
706
|
-
maxOutput: 1500,
|
724
|
+
contextWindowTokens: 131_072,
|
725
|
+
description: '通义千问代码模型开源版。',
|
726
|
+
displayName: 'Qwen2.5 Coder 32B',
|
727
|
+
id: 'qwen2.5-coder-32b-instruct',
|
728
|
+
maxOutput: 8192,
|
707
729
|
organization: 'Qwen',
|
708
730
|
pricing: {
|
709
731
|
currency: 'CNY',
|
710
|
-
input:
|
711
|
-
output:
|
732
|
+
input: 2,
|
733
|
+
output: 6,
|
712
734
|
},
|
713
735
|
type: 'chat',
|
714
736
|
},
|
@@ -419,7 +419,7 @@ const Qwen: ModelProviderCard = {
|
|
419
419
|
id: 'qwen',
|
420
420
|
modelList: { showModelFetcher: true },
|
421
421
|
modelsUrl: 'https://help.aliyun.com/zh/dashscope/developer-reference/api-details',
|
422
|
-
name: '
|
422
|
+
name: 'Aliyun Bailian',
|
423
423
|
proxyUrl: {
|
424
424
|
placeholder: 'https://dashscope.aliyuncs.com/compatible-mode/v1',
|
425
425
|
},
|
@@ -429,16 +429,13 @@ const Qwen: ModelProviderCard = {
|
|
429
429
|
placeholder: 'https://dashscope.aliyuncs.com/compatible-mode/v1',
|
430
430
|
},
|
431
431
|
sdkType: 'openai',
|
432
|
+
showDeployName: true,
|
432
433
|
showModelFetcher: true,
|
433
434
|
smoothing: {
|
434
435
|
speed: 2,
|
435
436
|
text: true,
|
436
437
|
},
|
437
438
|
},
|
438
|
-
smoothing: {
|
439
|
-
speed: 2,
|
440
|
-
text: true,
|
441
|
-
},
|
442
439
|
url: 'https://www.aliyun.com/product/bailian',
|
443
440
|
};
|
444
441
|
|
@@ -12,7 +12,16 @@ export const LobeMinimaxAI = LobeOpenAICompatibleFactory({
|
|
12
12
|
baseURL: 'https://api.minimax.chat/v1',
|
13
13
|
chatCompletion: {
|
14
14
|
handlePayload: (payload) => {
|
15
|
-
const { max_tokens, temperature, top_p, ...params } = payload;
|
15
|
+
const { enabledSearch, max_tokens, temperature, tools, top_p, ...params } = payload;
|
16
|
+
|
17
|
+
const minimaxTools = enabledSearch
|
18
|
+
? [
|
19
|
+
...(tools || []),
|
20
|
+
{
|
21
|
+
type: 'web_search',
|
22
|
+
},
|
23
|
+
]
|
24
|
+
: tools;
|
16
25
|
|
17
26
|
return {
|
18
27
|
...params,
|
@@ -20,6 +29,7 @@ export const LobeMinimaxAI = LobeOpenAICompatibleFactory({
|
|
20
29
|
max_tokens: max_tokens !== undefined ? max_tokens : getMinimaxMaxOutputs(payload.model),
|
21
30
|
presence_penalty: undefined,
|
22
31
|
temperature: temperature === undefined || temperature <= 0 ? undefined : temperature / 2,
|
32
|
+
tools: minimaxTools,
|
23
33
|
top_p: top_p !== undefined && top_p > 0 && top_p <= 1 ? top_p : undefined,
|
24
34
|
} as any;
|
25
35
|
},
|
@@ -21,6 +21,8 @@ export const LobeOpenAI = LobeOpenAICompatibleFactory({
|
|
21
21
|
}
|
22
22
|
|
23
23
|
if (model.includes('-search-')) {
|
24
|
+
const oaiSearchContextSize = process.env.OPENAI_SEARCH_CONTEXT_SIZE; // low, medium, high
|
25
|
+
|
24
26
|
return {
|
25
27
|
...payload,
|
26
28
|
frequency_penalty: undefined,
|
@@ -28,7 +30,12 @@ export const LobeOpenAI = LobeOpenAICompatibleFactory({
|
|
28
30
|
stream: payload.stream ?? true,
|
29
31
|
temperature: undefined,
|
30
32
|
top_p: undefined,
|
31
|
-
|
33
|
+
...(oaiSearchContextSize && {
|
34
|
+
web_search_options: {
|
35
|
+
search_context_size: oaiSearchContextSize,
|
36
|
+
},
|
37
|
+
}),
|
38
|
+
} as any;
|
32
39
|
}
|
33
40
|
|
34
41
|
return { ...payload, stream: payload.stream ?? true };
|
@@ -99,16 +99,87 @@ export const transformOpenAIStream = (
|
|
99
99
|
if (item.finish_reason) {
|
100
100
|
// one-api 的流式接口,会出现既有 finish_reason ,也有 content 的情况
|
101
101
|
// {"id":"demo","model":"deepl-en","choices":[{"index":0,"delta":{"role":"assistant","content":"Introduce yourself."},"finish_reason":"stop"}]}
|
102
|
-
|
103
102
|
if (typeof item.delta?.content === 'string' && !!item.delta.content) {
|
103
|
+
// MiniMax 内建搜索功能会在第一个 tools 流中 content 返回引用源,需要忽略
|
104
|
+
// {"id":"0483748a25071c611e2f48d2982fbe96","choices":[{"finish_reason":"stop","index":0,"delta":{"content":"[{\"no\":1,\"url\":\"https://www.xiaohongshu.com/discovery/item/66d8de3c000000001f01e752\",\"title\":\"郑钦文为国而战,没有理由不坚持🏅\",\"content\":\"·2024年08月03日\\n中国队选手郑钦文夺得巴黎奥运会网球女单比赛金牌(巴黎奥运第16金)\\n#巴黎奥运会[话题]# #郑钦文[话题]# #人物素材积累[话题]# #作文素材积累[话题]# #申论素材[话题]#\",\"web_icon\":\"https://www.xiaohongshu.com/favicon.ico\"}]","role":"tool","tool_call_id":"call_function_6696730535"}}],"created":1748255114,"model":"abab6.5s-chat","object":"chat.completion.chunk","usage":{"total_tokens":0,"total_characters":0},"input_sensitive":false,"output_sensitive":false,"input_sensitive_type":0,"output_sensitive_type":0,"output_sensitive_int":0}
|
105
|
+
if (typeof item.delta?.role === 'string' && item.delta.role === 'tool') {
|
106
|
+
return { data: null, id: chunk.id, type: 'text' };
|
107
|
+
}
|
108
|
+
|
104
109
|
return { data: item.delta.content, id: chunk.id, type: 'text' };
|
105
110
|
}
|
106
111
|
|
112
|
+
// OpenAI Search Preview 模型返回引用源
|
113
|
+
// {"id":"chatcmpl-18037d13-243c-4941-8b05-9530b352cf17","object":"chat.completion.chunk","created":1748351805,"model":"gpt-4o-mini-search-preview-2025-03-11","choices":[{"index":0,"delta":{"annotations":[{"type":"url_citation","url_citation":{"url":"https://zh.wikipedia.org/wiki/%E4%B8%8A%E6%B5%B7%E4%B9%90%E9%AB%98%E4%B9%90%E5%9B%AD?utm_source=openai","title":"上海乐高乐园","start_index":75,"end_index":199}}]},"finish_reason":"stop"}],"service_tier":"default"}
|
114
|
+
if ((item as any).delta?.annotations && (item as any).delta.annotations.length > 0) {
|
115
|
+
const citations = (item as any).delta.annotations;
|
116
|
+
|
117
|
+
return [
|
118
|
+
{
|
119
|
+
data: {
|
120
|
+
citations: citations.map(
|
121
|
+
(item: any) =>
|
122
|
+
({
|
123
|
+
title: item.url_citation.title,
|
124
|
+
url: item.url_citation.url,
|
125
|
+
}) as CitationItem,
|
126
|
+
),
|
127
|
+
},
|
128
|
+
id: chunk.id,
|
129
|
+
type: 'grounding',
|
130
|
+
},
|
131
|
+
];
|
132
|
+
}
|
133
|
+
|
134
|
+
// MiniMax 内建搜索功能会在最后一个流中的 message 数组中返回 4 个 Object,其中最后一个为 annotations
|
135
|
+
// {"id":"0483bf14ba55225a66de2342a21b4003","choices":[{"finish_reason":"tool_calls","index":0,"messages":[{"content":"","role":"user","reasoning_content":""},{"content":"","role":"assistant","tool_calls":[{"id":"call_function_0872338692","type":"web_search","function":{"name":"get_search_result","arguments":"{\"query_tag\":[\"天气\"],\"query_list\":[\"上海 2025年5月26日 天气\"]}"}}],"reasoning_content":""},{"content":"","role":"tool","tool_call_id":"call_function_0872338692","reasoning_content":""},{"content":"","role":"assistant","name":"海螺AI","annotations":[{"text":"【5†source】","url":"https://mtianqi.eastday.com/tianqi/shanghai/20250526.html","quote":"上海天气预报提供上海2025年05月26日天气"}],"audio_content":"","reasoning_content":""}]}],"created":1748274196,"model":"MiniMax-Text-01","object":"chat.completion","usage":{"total_tokens":13110,"total_characters":0,"prompt_tokens":12938,"completion_tokens":172},"base_resp":{"status_code":0,"status_msg":"Invalid parameters detected, json: unknown field \"user\""}}
|
136
|
+
if ((item as any).messages && (item as any).messages.length > 0) {
|
137
|
+
const citations = (item as any).messages.at(-1).annotations;
|
138
|
+
|
139
|
+
return [
|
140
|
+
{
|
141
|
+
data: {
|
142
|
+
citations: citations.map(
|
143
|
+
(item: any) =>
|
144
|
+
({
|
145
|
+
title: item.url,
|
146
|
+
url: item.url,
|
147
|
+
}) as CitationItem,
|
148
|
+
),
|
149
|
+
},
|
150
|
+
id: chunk.id,
|
151
|
+
type: 'grounding',
|
152
|
+
},
|
153
|
+
];
|
154
|
+
}
|
155
|
+
|
107
156
|
if (chunk.usage) {
|
108
157
|
const usage = chunk.usage;
|
109
158
|
return { data: convertUsage(usage), id: chunk.id, type: 'usage' };
|
110
159
|
}
|
111
160
|
|
161
|
+
// xAI Live Search 功能返回引用源
|
162
|
+
// {"id":"8721eebb-6465-4c47-ba2e-8e2ec0f97055","object":"chat.completion.chunk","created":1747809109,"model":"grok-3","choices":[{"index":0,"delta":{"role":"assistant"},"finish_reason":"stop"}],"system_fingerprint":"fp_1affcf9872","citations":["https://world.huanqiu.com/"]}
|
163
|
+
if ((chunk as any).citations) {
|
164
|
+
const citations = (chunk as any).citations;
|
165
|
+
|
166
|
+
return [
|
167
|
+
{
|
168
|
+
data: {
|
169
|
+
citations: citations.map(
|
170
|
+
(item: any) =>
|
171
|
+
({
|
172
|
+
title: item,
|
173
|
+
url: item,
|
174
|
+
}) as CitationItem,
|
175
|
+
),
|
176
|
+
},
|
177
|
+
id: chunk.id,
|
178
|
+
type: 'grounding',
|
179
|
+
},
|
180
|
+
];
|
181
|
+
}
|
182
|
+
|
112
183
|
return { data: item.finish_reason, id: chunk.id, type: 'stop' };
|
113
184
|
}
|
114
185
|
|
@@ -146,7 +217,9 @@ export const transformOpenAIStream = (
|
|
146
217
|
// in Hunyuan api, the citation is in every chunk
|
147
218
|
('search_info' in chunk && (chunk.search_info as any)?.search_results) ||
|
148
219
|
// in Wenxin api, the citation is in the first and last chunk
|
149
|
-
('search_results' in chunk && chunk.search_results)
|
220
|
+
('search_results' in chunk && chunk.search_results) ||
|
221
|
+
// in Zhipu api, the citation is in the first chunk
|
222
|
+
('web_search' in chunk && chunk.web_search);
|
150
223
|
|
151
224
|
if (citations) {
|
152
225
|
streamContext.returnedCitation = true;
|
@@ -154,13 +227,10 @@ export const transformOpenAIStream = (
|
|
154
227
|
return [
|
155
228
|
{
|
156
229
|
data: {
|
157
|
-
citations: (citations as any[]).map(
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
url: typeof item === 'string' ? item : item.url,
|
162
|
-
}) as CitationItem,
|
163
|
-
),
|
230
|
+
citations: (citations as any[]).map((item) => ({
|
231
|
+
title: typeof item === 'string' ? item : item.title,
|
232
|
+
url: typeof item === 'string' ? item : item.url || item.link,
|
233
|
+
})).filter(c => c.title && c.url), // Zhipu 内建搜索工具有时会返回空 link 引发程序崩溃
|
164
234
|
},
|
165
235
|
id: chunk.id,
|
166
236
|
type: 'grounding',
|
@@ -22,6 +22,9 @@ export const LobeZhipuAI = LobeOpenAICompatibleFactory({
|
|
22
22
|
type: 'web_search',
|
23
23
|
web_search: {
|
24
24
|
enable: true,
|
25
|
+
result_sequence: 'before', // 将搜索结果返回顺序更改为 before 适配最小化 OpenAIStream 改动
|
26
|
+
search_engine: process.env.ZHIPU_SEARCH_ENGINE || 'search_std', // search_std, search_pro
|
27
|
+
search_result: true,
|
25
28
|
},
|
26
29
|
},
|
27
30
|
]
|
@@ -40,6 +40,9 @@ export const getServerGlobalConfig = async () => {
|
|
40
40
|
enabled: isDesktop ? true : undefined,
|
41
41
|
fetchOnClient: isDesktop ? false : !process.env.OLLAMA_PROXY_URL,
|
42
42
|
},
|
43
|
+
qwen: {
|
44
|
+
withDeploymentName: true,
|
45
|
+
},
|
43
46
|
tencentcloud: {
|
44
47
|
enabledKey: 'ENABLED_TENCENT_CLOUD',
|
45
48
|
modelListKey: 'TENCENT_CLOUD_MODEL_LIST',
|
package/src/services/chat.ts
CHANGED