@lobehub/chat 1.57.1 → 1.59.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/changelog/v1.json +18 -0
- package/docker-compose/local/docker-compose.yml +1 -0
- package/locales/ar/modelProvider.json +24 -0
- package/locales/ar/models.json +60 -0
- package/locales/ar/providers.json +15 -0
- package/locales/bg-BG/modelProvider.json +24 -0
- package/locales/bg-BG/models.json +60 -0
- package/locales/bg-BG/providers.json +15 -0
- package/locales/de-DE/modelProvider.json +24 -0
- package/locales/de-DE/models.json +60 -0
- package/locales/de-DE/providers.json +15 -0
- package/locales/en-US/modelProvider.json +24 -0
- package/locales/en-US/models.json +60 -0
- package/locales/en-US/providers.json +15 -0
- package/locales/es-ES/modelProvider.json +24 -0
- package/locales/es-ES/models.json +60 -0
- package/locales/es-ES/providers.json +15 -0
- package/locales/fa-IR/modelProvider.json +24 -0
- package/locales/fa-IR/models.json +60 -0
- package/locales/fa-IR/providers.json +15 -0
- package/locales/fr-FR/modelProvider.json +24 -0
- package/locales/fr-FR/models.json +60 -0
- package/locales/fr-FR/providers.json +15 -0
- package/locales/it-IT/modelProvider.json +24 -0
- package/locales/it-IT/models.json +60 -0
- package/locales/it-IT/providers.json +15 -0
- package/locales/ja-JP/modelProvider.json +24 -0
- package/locales/ja-JP/models.json +60 -0
- package/locales/ja-JP/providers.json +15 -0
- package/locales/ko-KR/modelProvider.json +24 -0
- package/locales/ko-KR/models.json +60 -0
- package/locales/ko-KR/providers.json +15 -0
- package/locales/nl-NL/modelProvider.json +24 -0
- package/locales/nl-NL/models.json +60 -0
- package/locales/nl-NL/providers.json +15 -0
- package/locales/pl-PL/modelProvider.json +24 -0
- package/locales/pl-PL/models.json +60 -0
- package/locales/pl-PL/providers.json +15 -0
- package/locales/pt-BR/modelProvider.json +24 -0
- package/locales/pt-BR/models.json +60 -0
- package/locales/pt-BR/providers.json +15 -0
- package/locales/ru-RU/modelProvider.json +24 -0
- package/locales/ru-RU/models.json +60 -0
- package/locales/ru-RU/providers.json +15 -0
- package/locales/tr-TR/modelProvider.json +24 -0
- package/locales/tr-TR/models.json +60 -0
- package/locales/tr-TR/providers.json +15 -0
- package/locales/vi-VN/modelProvider.json +24 -0
- package/locales/vi-VN/models.json +60 -0
- package/locales/vi-VN/providers.json +15 -0
- package/locales/zh-CN/modelProvider.json +24 -0
- package/locales/zh-CN/models.json +1109 -1049
- package/locales/zh-CN/providers.json +82 -67
- package/locales/zh-TW/modelProvider.json +24 -0
- package/locales/zh-TW/models.json +60 -0
- package/locales/zh-TW/providers.json +15 -0
- package/next.config.ts +5 -0
- package/package.json +4 -2
- package/src/app/[variants]/(main)/settings/provider/(detail)/azureai/page.tsx +58 -0
- package/src/app/[variants]/(main)/settings/provider/(list)/ProviderGrid/index.tsx +6 -3
- package/src/app/[variants]/(main)/settings/provider/features/CreateNewProvider/index.tsx +13 -2
- package/src/app/[variants]/(main)/settings/provider/features/ModelList/CreateNewModelModal/Form.tsx +6 -8
- package/src/app/[variants]/(main)/settings/provider/features/ModelList/CreateNewModelModal/index.tsx +5 -6
- package/src/app/[variants]/(main)/settings/provider/features/ModelList/ModelConfigModal/index.tsx +4 -3
- package/src/app/[variants]/(main)/settings/provider/features/ModelList/ProviderSettingsContext.ts +2 -0
- package/src/app/[variants]/(main)/settings/provider/features/ModelList/index.tsx +6 -7
- package/src/app/[variants]/(main)/settings/provider/features/ProviderConfig/index.tsx +1 -1
- package/src/config/aiModels/azureai.ts +18 -0
- package/src/config/aiModels/index.ts +6 -0
- package/src/config/aiModels/volcengine.ts +83 -0
- package/src/config/llm.ts +6 -0
- package/src/config/modelProviders/azure.ts +2 -1
- package/src/config/modelProviders/azureai.ts +19 -0
- package/src/config/modelProviders/index.ts +6 -0
- package/src/config/modelProviders/volcengine.ts +23 -0
- package/src/database/server/models/aiProvider.ts +2 -0
- package/src/libs/agent-runtime/AgentRuntime.ts +17 -8
- package/src/libs/agent-runtime/azureai/index.ts +109 -0
- package/src/libs/agent-runtime/baichuan/index.test.ts +8 -250
- package/src/libs/agent-runtime/cloudflare/index.ts +22 -18
- package/src/libs/agent-runtime/index.ts +2 -1
- package/src/libs/agent-runtime/types/type.ts +5 -0
- package/src/libs/agent-runtime/utils/streams/__snapshots__/protocol.test.ts.snap +331 -0
- package/src/libs/agent-runtime/utils/streams/protocol.test.ts +137 -0
- package/src/libs/agent-runtime/utils/streams/protocol.ts +34 -0
- package/src/libs/agent-runtime/{doubao → volcengine}/index.ts +3 -3
- package/src/locales/default/modelProvider.ts +25 -0
- package/src/server/modules/AgentRuntime/index.ts +8 -1
- package/src/services/chat.ts +16 -4
- package/src/types/aiProvider.ts +5 -0
- package/src/types/user/settings/keyVaults.ts +2 -0
@@ -0,0 +1,331 @@
|
|
1
|
+
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
2
|
+
|
3
|
+
exports[`createSSEDataExtractor > real world data > should convert azure ai data 1`] = `
|
4
|
+
[
|
5
|
+
{
|
6
|
+
"choices": [
|
7
|
+
{
|
8
|
+
"delta": {
|
9
|
+
"content": "",
|
10
|
+
"reasoning_content": null,
|
11
|
+
"role": "assistant",
|
12
|
+
"tool_calls": null,
|
13
|
+
},
|
14
|
+
"finish_reason": null,
|
15
|
+
"index": 0,
|
16
|
+
"logprobs": null,
|
17
|
+
"matched_stop": null,
|
18
|
+
},
|
19
|
+
],
|
20
|
+
"created": 1739714651,
|
21
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
22
|
+
"model": "DeepSeek-R1",
|
23
|
+
"object": "chat.completion.chunk",
|
24
|
+
"usage": null,
|
25
|
+
},
|
26
|
+
{
|
27
|
+
"choices": [
|
28
|
+
{
|
29
|
+
"delta": {
|
30
|
+
"content": "<think>",
|
31
|
+
"reasoning_content": null,
|
32
|
+
"role": null,
|
33
|
+
"tool_calls": null,
|
34
|
+
},
|
35
|
+
"finish_reason": null,
|
36
|
+
"index": 0,
|
37
|
+
"logprobs": null,
|
38
|
+
"matched_stop": null,
|
39
|
+
},
|
40
|
+
],
|
41
|
+
"created": 1739714651,
|
42
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
43
|
+
"model": "DeepSeek-R1",
|
44
|
+
"object": "chat.completion.chunk",
|
45
|
+
"usage": null,
|
46
|
+
},
|
47
|
+
{
|
48
|
+
"choices": [
|
49
|
+
{
|
50
|
+
"delta": {
|
51
|
+
"content": "</think>",
|
52
|
+
"reasoning_content": null,
|
53
|
+
"role": null,
|
54
|
+
"tool_calls": null,
|
55
|
+
},
|
56
|
+
"finish_reason": null,
|
57
|
+
"index": 0,
|
58
|
+
"logprobs": null,
|
59
|
+
"matched_stop": null,
|
60
|
+
},
|
61
|
+
],
|
62
|
+
"created": 1739714651,
|
63
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
64
|
+
"model": "DeepSeek-R1",
|
65
|
+
"object": "chat.completion.chunk",
|
66
|
+
"usage": null,
|
67
|
+
},
|
68
|
+
{
|
69
|
+
"choices": [
|
70
|
+
{
|
71
|
+
"delta": {
|
72
|
+
"content": "Hello",
|
73
|
+
"reasoning_content": null,
|
74
|
+
"role": null,
|
75
|
+
"tool_calls": null,
|
76
|
+
},
|
77
|
+
"finish_reason": null,
|
78
|
+
"index": 0,
|
79
|
+
"logprobs": null,
|
80
|
+
"matched_stop": null,
|
81
|
+
},
|
82
|
+
],
|
83
|
+
"created": 1739714651,
|
84
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
85
|
+
"model": "DeepSeek-R1",
|
86
|
+
"object": "chat.completion.chunk",
|
87
|
+
"usage": null,
|
88
|
+
},
|
89
|
+
{
|
90
|
+
"choices": [
|
91
|
+
{
|
92
|
+
"delta": {
|
93
|
+
"content": "!",
|
94
|
+
"reasoning_content": null,
|
95
|
+
"role": null,
|
96
|
+
"tool_calls": null,
|
97
|
+
},
|
98
|
+
"finish_reason": null,
|
99
|
+
"index": 0,
|
100
|
+
"logprobs": null,
|
101
|
+
"matched_stop": null,
|
102
|
+
},
|
103
|
+
],
|
104
|
+
"created": 1739714652,
|
105
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
106
|
+
"model": "DeepSeek-R1",
|
107
|
+
"object": "chat.completion.chunk",
|
108
|
+
"usage": null,
|
109
|
+
},
|
110
|
+
{
|
111
|
+
"choices": [
|
112
|
+
{
|
113
|
+
"delta": {
|
114
|
+
"content": " How",
|
115
|
+
"reasoning_content": null,
|
116
|
+
"role": null,
|
117
|
+
"tool_calls": null,
|
118
|
+
},
|
119
|
+
"finish_reason": null,
|
120
|
+
"index": 0,
|
121
|
+
"logprobs": null,
|
122
|
+
"matched_stop": null,
|
123
|
+
},
|
124
|
+
],
|
125
|
+
"created": 1739714652,
|
126
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
127
|
+
"model": "DeepSeek-R1",
|
128
|
+
"object": "chat.completion.chunk",
|
129
|
+
"usage": null,
|
130
|
+
},
|
131
|
+
{
|
132
|
+
"choices": [
|
133
|
+
{
|
134
|
+
"delta": {
|
135
|
+
"content": " can",
|
136
|
+
"reasoning_content": null,
|
137
|
+
"role": null,
|
138
|
+
"tool_calls": null,
|
139
|
+
},
|
140
|
+
"finish_reason": null,
|
141
|
+
"index": 0,
|
142
|
+
"logprobs": null,
|
143
|
+
"matched_stop": null,
|
144
|
+
},
|
145
|
+
],
|
146
|
+
"created": 1739714652,
|
147
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
148
|
+
"model": "DeepSeek-R1",
|
149
|
+
"object": "chat.completion.chunk",
|
150
|
+
"usage": null,
|
151
|
+
},
|
152
|
+
{
|
153
|
+
"choices": [
|
154
|
+
{
|
155
|
+
"delta": {
|
156
|
+
"content": " I",
|
157
|
+
"reasoning_content": null,
|
158
|
+
"role": null,
|
159
|
+
"tool_calls": null,
|
160
|
+
},
|
161
|
+
"finish_reason": null,
|
162
|
+
"index": 0,
|
163
|
+
"logprobs": null,
|
164
|
+
"matched_stop": null,
|
165
|
+
},
|
166
|
+
],
|
167
|
+
"created": 1739714652,
|
168
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
169
|
+
"model": "DeepSeek-R1",
|
170
|
+
"object": "chat.completion.chunk",
|
171
|
+
"usage": null,
|
172
|
+
},
|
173
|
+
{
|
174
|
+
"choices": [
|
175
|
+
{
|
176
|
+
"delta": {
|
177
|
+
"content": " assist",
|
178
|
+
"reasoning_content": null,
|
179
|
+
"role": null,
|
180
|
+
"tool_calls": null,
|
181
|
+
},
|
182
|
+
"finish_reason": null,
|
183
|
+
"index": 0,
|
184
|
+
"logprobs": null,
|
185
|
+
"matched_stop": null,
|
186
|
+
},
|
187
|
+
],
|
188
|
+
"created": 1739714652,
|
189
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
190
|
+
"model": "DeepSeek-R1",
|
191
|
+
"object": "chat.completion.chunk",
|
192
|
+
"usage": null,
|
193
|
+
},
|
194
|
+
{
|
195
|
+
"choices": [
|
196
|
+
{
|
197
|
+
"delta": {
|
198
|
+
"content": " you",
|
199
|
+
"reasoning_content": null,
|
200
|
+
"role": null,
|
201
|
+
"tool_calls": null,
|
202
|
+
},
|
203
|
+
"finish_reason": null,
|
204
|
+
"index": 0,
|
205
|
+
"logprobs": null,
|
206
|
+
"matched_stop": null,
|
207
|
+
},
|
208
|
+
],
|
209
|
+
"created": 1739714652,
|
210
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
211
|
+
"model": "DeepSeek-R1",
|
212
|
+
"object": "chat.completion.chunk",
|
213
|
+
"usage": null,
|
214
|
+
},
|
215
|
+
{
|
216
|
+
"choices": [
|
217
|
+
{
|
218
|
+
"delta": {
|
219
|
+
"content": " today",
|
220
|
+
"reasoning_content": null,
|
221
|
+
"role": null,
|
222
|
+
"tool_calls": null,
|
223
|
+
},
|
224
|
+
"finish_reason": null,
|
225
|
+
"index": 0,
|
226
|
+
"logprobs": null,
|
227
|
+
"matched_stop": null,
|
228
|
+
},
|
229
|
+
],
|
230
|
+
"created": 1739714652,
|
231
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
232
|
+
"model": "DeepSeek-R1",
|
233
|
+
"object": "chat.completion.chunk",
|
234
|
+
"usage": null,
|
235
|
+
},
|
236
|
+
{
|
237
|
+
"choices": [
|
238
|
+
{
|
239
|
+
"delta": {
|
240
|
+
"content": "?",
|
241
|
+
"reasoning_content": null,
|
242
|
+
"role": null,
|
243
|
+
"tool_calls": null,
|
244
|
+
},
|
245
|
+
"finish_reason": null,
|
246
|
+
"index": 0,
|
247
|
+
"logprobs": null,
|
248
|
+
"matched_stop": null,
|
249
|
+
},
|
250
|
+
],
|
251
|
+
"created": 1739714652,
|
252
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
253
|
+
"model": "DeepSeek-R1",
|
254
|
+
"object": "chat.completion.chunk",
|
255
|
+
"usage": null,
|
256
|
+
},
|
257
|
+
{
|
258
|
+
"choices": [
|
259
|
+
{
|
260
|
+
"delta": {
|
261
|
+
"content": " ",
|
262
|
+
"reasoning_content": null,
|
263
|
+
"role": null,
|
264
|
+
"tool_calls": null,
|
265
|
+
},
|
266
|
+
"finish_reason": null,
|
267
|
+
"index": 0,
|
268
|
+
"logprobs": null,
|
269
|
+
"matched_stop": null,
|
270
|
+
},
|
271
|
+
],
|
272
|
+
"created": 1739714652,
|
273
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
274
|
+
"model": "DeepSeek-R1",
|
275
|
+
"object": "chat.completion.chunk",
|
276
|
+
"usage": null,
|
277
|
+
},
|
278
|
+
{
|
279
|
+
"choices": [
|
280
|
+
{
|
281
|
+
"delta": {
|
282
|
+
"content": "😊",
|
283
|
+
"reasoning_content": null,
|
284
|
+
"role": null,
|
285
|
+
"tool_calls": null,
|
286
|
+
},
|
287
|
+
"finish_reason": null,
|
288
|
+
"index": 0,
|
289
|
+
"logprobs": null,
|
290
|
+
"matched_stop": null,
|
291
|
+
},
|
292
|
+
],
|
293
|
+
"created": 1739714652,
|
294
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
295
|
+
"model": "DeepSeek-R1",
|
296
|
+
"object": "chat.completion.chunk",
|
297
|
+
"usage": null,
|
298
|
+
},
|
299
|
+
{
|
300
|
+
"choices": [
|
301
|
+
{
|
302
|
+
"delta": {
|
303
|
+
"content": "",
|
304
|
+
"reasoning_content": null,
|
305
|
+
"role": null,
|
306
|
+
"tool_calls": null,
|
307
|
+
},
|
308
|
+
"finish_reason": "stop",
|
309
|
+
"index": 0,
|
310
|
+
"logprobs": null,
|
311
|
+
"matched_stop": 1,
|
312
|
+
},
|
313
|
+
],
|
314
|
+
"created": 1739714652,
|
315
|
+
"id": "1392a93d52c3483ea872d0ab2aaff7d7",
|
316
|
+
"model": "DeepSeek-R1",
|
317
|
+
"object": "chat.completion.chunk",
|
318
|
+
"usage": null,
|
319
|
+
},
|
320
|
+
{
|
321
|
+
"choices": [],
|
322
|
+
"id": "79fca0de792a4ffb8ec836442a2a42c0",
|
323
|
+
"model": "DeepSeek-R1",
|
324
|
+
"usage": {
|
325
|
+
"completion_tokens": 16,
|
326
|
+
"prompt_tokens": 4,
|
327
|
+
"total_tokens": 20,
|
328
|
+
},
|
329
|
+
},
|
330
|
+
]
|
331
|
+
`;
|
@@ -0,0 +1,137 @@
|
|
1
|
+
import { describe, expect, it } from 'vitest';
|
2
|
+
|
3
|
+
import { createSSEDataExtractor } from './protocol';
|
4
|
+
|
5
|
+
describe('createSSEDataExtractor', () => {
|
6
|
+
// Helper function to convert string to Uint8Array
|
7
|
+
const stringToUint8Array = (str: string): Uint8Array => {
|
8
|
+
return new TextEncoder().encode(str);
|
9
|
+
};
|
10
|
+
|
11
|
+
// Helper function to process chunks through transformer
|
12
|
+
const processChunk = async (transformer: TransformStream, chunk: Uint8Array) => {
|
13
|
+
const results: any[] = [];
|
14
|
+
const readable = new ReadableStream({
|
15
|
+
start(controller) {
|
16
|
+
controller.enqueue(chunk);
|
17
|
+
controller.close();
|
18
|
+
},
|
19
|
+
});
|
20
|
+
|
21
|
+
const writable = new WritableStream({
|
22
|
+
write(chunk) {
|
23
|
+
results.push(chunk);
|
24
|
+
},
|
25
|
+
});
|
26
|
+
|
27
|
+
await readable.pipeThrough(transformer).pipeTo(writable);
|
28
|
+
|
29
|
+
return results;
|
30
|
+
};
|
31
|
+
|
32
|
+
it('should correctly transform single SSE data line', async () => {
|
33
|
+
const transformer = createSSEDataExtractor();
|
34
|
+
const input = 'data: {"message": "hello"}\n';
|
35
|
+
const chunk = stringToUint8Array(input);
|
36
|
+
|
37
|
+
const results = await processChunk(transformer, chunk);
|
38
|
+
|
39
|
+
expect(results).toEqual([{ message: 'hello' }]);
|
40
|
+
});
|
41
|
+
|
42
|
+
it('should handle multiple SSE data lines', async () => {
|
43
|
+
const transformer = createSSEDataExtractor();
|
44
|
+
const input = `data: {"message": "hello"}\ndata: {"message": "world"}\n`;
|
45
|
+
const chunk = stringToUint8Array(input);
|
46
|
+
|
47
|
+
const results = await processChunk(transformer, chunk);
|
48
|
+
|
49
|
+
expect(results).toEqual([{ message: 'hello' }, { message: 'world' }]);
|
50
|
+
});
|
51
|
+
|
52
|
+
it('should ignore non-data lines', async () => {
|
53
|
+
const transformer = createSSEDataExtractor();
|
54
|
+
const input = `id: 1\ndata: {"message": "hello"}\nevent: message\n`;
|
55
|
+
const chunk = stringToUint8Array(input);
|
56
|
+
|
57
|
+
const results = await processChunk(transformer, chunk);
|
58
|
+
|
59
|
+
expect(results).toEqual([{ message: 'hello' }]);
|
60
|
+
});
|
61
|
+
|
62
|
+
it('should skip [DONE] heartbeat messages', async () => {
|
63
|
+
const transformer = createSSEDataExtractor();
|
64
|
+
const input = `data: {"message": "hello"}\ndata: [DONE]\ndata: {"message": "world"}\n`;
|
65
|
+
const chunk = stringToUint8Array(input);
|
66
|
+
|
67
|
+
const results = await processChunk(transformer, chunk);
|
68
|
+
|
69
|
+
expect(results).toEqual([{ message: 'hello' }, { message: 'world' }]);
|
70
|
+
});
|
71
|
+
|
72
|
+
it('should handle invalid JSON gracefully', async () => {
|
73
|
+
const transformer = createSSEDataExtractor();
|
74
|
+
const input = `data: {"message": "hello"}\ndata: invalid-json\ndata: {"message": "world"}\n`;
|
75
|
+
const chunk = stringToUint8Array(input);
|
76
|
+
|
77
|
+
const results = await processChunk(transformer, chunk);
|
78
|
+
|
79
|
+
expect(results).toEqual([{ message: 'hello' }, { message: 'world' }]);
|
80
|
+
});
|
81
|
+
|
82
|
+
it('should handle empty data lines', async () => {
|
83
|
+
const transformer = createSSEDataExtractor();
|
84
|
+
const input = `data: \ndata: {"message": "hello"}\ndata: \n`;
|
85
|
+
const chunk = stringToUint8Array(input);
|
86
|
+
|
87
|
+
const results = await processChunk(transformer, chunk);
|
88
|
+
|
89
|
+
expect(results).toEqual([{ message: 'hello' }]);
|
90
|
+
});
|
91
|
+
|
92
|
+
it('should process large chunks of data correctly', async () => {
|
93
|
+
const transformer = createSSEDataExtractor();
|
94
|
+
const messages = Array(100)
|
95
|
+
.fill(null)
|
96
|
+
.map((_, i) => `data: {"message": "message${i}"}\n`)
|
97
|
+
.join('');
|
98
|
+
const chunk = stringToUint8Array(messages);
|
99
|
+
|
100
|
+
const results = await processChunk(transformer, chunk);
|
101
|
+
|
102
|
+
expect(results).toHaveLength(100);
|
103
|
+
expect(results[0]).toEqual({ message: 'message0' });
|
104
|
+
expect(results[99]).toEqual({ message: 'message99' });
|
105
|
+
});
|
106
|
+
|
107
|
+
describe('real world data', () => {
|
108
|
+
it('should convert azure ai data', async () => {
|
109
|
+
const chunks = [
|
110
|
+
`data: {"choices":[{"delta":{"content":"","reasoning_content":null,"role":"assistant","tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714651,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
111
|
+
`data: {"choices":[{"delta":{"content":"\u003cthink\u003e","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714651,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
112
|
+
`data: {"choices":[{"delta":{"content":"\n\n","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714651,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
113
|
+
`data: {"choices":[{"delta":{"content":"\u003c/think\u003e","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714651,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
114
|
+
`data: {"choices":[{"delta":{"content":"\n\n","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714651,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
115
|
+
`data: {"choices":[{"delta":{"content":"Hello","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714651,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
116
|
+
`data: {"choices":[{"delta":{"content":"!","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
117
|
+
`data: {"choices":[{"delta":{"content":" How","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
118
|
+
`data: {"choices":[{"delta":{"content":" can","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
119
|
+
`data: {"choices":[{"delta":{"content":" I","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
120
|
+
`data: {"choices":[{"delta":{"content":" assist","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
121
|
+
`data: {"choices":[{"delta":{"content":" you","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
122
|
+
`data: {"choices":[{"delta":{"content":" today","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
123
|
+
`data: {"choices":[{"delta":{"content":"?","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
124
|
+
`data: {"choices":[{"delta":{"content":" ","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
125
|
+
`data: {"choices":[{"delta":{"content":"😊","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":null,"index":0,"logprobs":null,"matched_stop":null}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
126
|
+
`data: {"choices":[{"delta":{"content":"","reasoning_content":null,"role":null,"tool_calls":null},"finish_reason":"stop","index":0,"logprobs":null,"matched_stop":1}],"created":1739714652,"id":"1392a93d52c3483ea872d0ab2aaff7d7","model":"DeepSeek-R1","object":"chat.completion.chunk","usage":null}\n`,
|
127
|
+
`data: {"choices":[],"id":"79fca0de792a4ffb8ec836442a2a42c0","model":"DeepSeek-R1","usage":{"completion_tokens":16,"prompt_tokens":4,"total_tokens":20}}\n`,
|
128
|
+
`data: [DONE]`,
|
129
|
+
];
|
130
|
+
|
131
|
+
const transformer = createSSEDataExtractor();
|
132
|
+
|
133
|
+
const results = await processChunk(transformer, stringToUint8Array(chunks.join('')));
|
134
|
+
expect(results).matchSnapshot();
|
135
|
+
});
|
136
|
+
});
|
137
|
+
});
|
@@ -170,3 +170,37 @@ export const createFirstErrorHandleTransformer = (
|
|
170
170
|
},
|
171
171
|
});
|
172
172
|
};
|
173
|
+
|
174
|
+
/**
|
175
|
+
* create a transformer to remove SSE format data
|
176
|
+
*/
|
177
|
+
export const createSSEDataExtractor = () =>
|
178
|
+
new TransformStream({
|
179
|
+
transform(chunk: Uint8Array, controller) {
|
180
|
+
// 将 Uint8Array 转换为字符串
|
181
|
+
const text = new TextDecoder().decode(chunk, { stream: true });
|
182
|
+
|
183
|
+
// 处理多行数据的情况
|
184
|
+
const lines = text.split('\n');
|
185
|
+
|
186
|
+
for (const line of lines) {
|
187
|
+
// 只处理以 "data: " 开头的行
|
188
|
+
if (line.startsWith('data: ')) {
|
189
|
+
// 提取 "data: " 后面的实际数据
|
190
|
+
const jsonText = line.slice(6);
|
191
|
+
|
192
|
+
// 跳过心跳消息
|
193
|
+
if (jsonText === '[DONE]') continue;
|
194
|
+
|
195
|
+
try {
|
196
|
+
// 解析 JSON 数据
|
197
|
+
const data = JSON.parse(jsonText);
|
198
|
+
// 将解析后的数据传递给下一个处理器
|
199
|
+
controller.enqueue(data);
|
200
|
+
} catch {
|
201
|
+
console.warn('Failed to parse SSE data:', jsonText);
|
202
|
+
}
|
203
|
+
}
|
204
|
+
}
|
205
|
+
},
|
206
|
+
});
|
@@ -1,10 +1,10 @@
|
|
1
1
|
import { ModelProvider } from '../types';
|
2
2
|
import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';
|
3
3
|
|
4
|
-
export const
|
4
|
+
export const LobeVolcengineAI = LobeOpenAICompatibleFactory({
|
5
5
|
baseURL: 'https://ark.cn-beijing.volces.com/api/v3',
|
6
6
|
debug: {
|
7
7
|
chatCompletion: () => process.env.DEBUG_DOUBAO_CHAT_COMPLETION === '1',
|
8
8
|
},
|
9
|
-
provider: ModelProvider.
|
10
|
-
});
|
9
|
+
provider: ModelProvider.Volcengine,
|
10
|
+
});
|
@@ -19,6 +19,25 @@ export default {
|
|
19
19
|
title: 'API Key',
|
20
20
|
},
|
21
21
|
},
|
22
|
+
azureai: {
|
23
|
+
azureApiVersion: {
|
24
|
+
desc: 'Azure 的 API 版本,遵循 YYYY-MM-DD 格式,查阅[最新版本](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/reference#chat-completions)',
|
25
|
+
fetch: '获取列表',
|
26
|
+
title: 'Azure API Version',
|
27
|
+
},
|
28
|
+
endpoint: {
|
29
|
+
desc: '从 Azure AI 项目概述找到 Azure AI 模型推理终结点',
|
30
|
+
placeholder: 'https://ai-userxxxxxxxxxx.services.ai.azure.com/models',
|
31
|
+
title: 'Azure AI 终结点',
|
32
|
+
},
|
33
|
+
title: 'Azure OpenAI',
|
34
|
+
token: {
|
35
|
+
desc: '从 Azure AI 项目概述找到 API 密钥',
|
36
|
+
placeholder: 'Azure 密钥',
|
37
|
+
title: '密钥',
|
38
|
+
},
|
39
|
+
},
|
40
|
+
|
22
41
|
bedrock: {
|
23
42
|
accessKeyId: {
|
24
43
|
desc: '填入 AWS Access Key Id',
|
@@ -98,6 +117,7 @@ export default {
|
|
98
117
|
title: '代理地址',
|
99
118
|
},
|
100
119
|
sdkType: {
|
120
|
+
placeholder: 'openai/anthropic/azureai/ollama/...',
|
101
121
|
required: '请选择 SDK 类型',
|
102
122
|
title: '请求格式',
|
103
123
|
},
|
@@ -211,6 +231,11 @@ export default {
|
|
211
231
|
placeholder: '请输入 Azure 中的模型部署名称',
|
212
232
|
title: '模型部署名称',
|
213
233
|
},
|
234
|
+
deployName: {
|
235
|
+
extra: '发送请求时会将该字段作为模型 ID',
|
236
|
+
placeholder: '请输入模型实际部署的名称或 id',
|
237
|
+
title: '模型部署名称',
|
238
|
+
},
|
214
239
|
displayName: {
|
215
240
|
placeholder: '请输入模型的展示名称,例如 ChatGPT、GPT-4 等',
|
216
241
|
title: '模型展示名称',
|
@@ -57,6 +57,13 @@ const getLlmOptionsFromPayload = (provider: string, payload: JWTPayload) => {
|
|
57
57
|
return { apiKey, apiVersion, baseURL };
|
58
58
|
}
|
59
59
|
|
60
|
+
case ModelProvider.AzureAI: {
|
61
|
+
const { AZUREAI_ENDPOINT, AZUREAI_ENDPOINT_KEY } = llmConfig;
|
62
|
+
const apiKey = payload?.apiKey || AZUREAI_ENDPOINT_KEY;
|
63
|
+
const baseURL = payload?.baseURL || AZUREAI_ENDPOINT;
|
64
|
+
return { apiKey, baseURL };
|
65
|
+
}
|
66
|
+
|
60
67
|
case ModelProvider.Bedrock: {
|
61
68
|
const { AWS_SECRET_ACCESS_KEY, AWS_ACCESS_KEY_ID, AWS_REGION, AWS_SESSION_TOKEN } = llmConfig;
|
62
69
|
let accessKeyId: string | undefined = AWS_ACCESS_KEY_ID;
|
@@ -100,7 +107,7 @@ const getLlmOptionsFromPayload = (provider: string, payload: JWTPayload) => {
|
|
100
107
|
|
101
108
|
return { apiKey };
|
102
109
|
}
|
103
|
-
|
110
|
+
|
104
111
|
case ModelProvider.TencentCloud: {
|
105
112
|
const { TENCENT_CLOUD_API_KEY } = llmConfig;
|
106
113
|
|
package/src/services/chat.ts
CHANGED
@@ -50,7 +50,10 @@ const isCanUseFC = (model: string, provider: string) => {
|
|
50
50
|
return aiModelSelectors.isModelSupportToolUse(model, provider)(useAiInfraStore.getState());
|
51
51
|
};
|
52
52
|
|
53
|
-
|
53
|
+
/**
|
54
|
+
* TODO: we need to update this function to auto find deploymentName with provider setting config
|
55
|
+
*/
|
56
|
+
const findDeploymentName = (model: string, provider: string) => {
|
54
57
|
let deploymentId = model;
|
55
58
|
|
56
59
|
// TODO: remove isDeprecatedEdition condition in V2.0
|
@@ -63,7 +66,9 @@ const findAzureDeploymentName = (model: string) => {
|
|
63
66
|
if (deploymentName) deploymentId = deploymentName;
|
64
67
|
} else {
|
65
68
|
// find the model by id
|
66
|
-
const modelItem = useAiInfraStore
|
69
|
+
const modelItem = useAiInfraStore
|
70
|
+
.getState()
|
71
|
+
.enabledAiModels?.find((i) => i.id === model && i.providerId === provider);
|
67
72
|
|
68
73
|
if (modelItem && modelItem.config?.deploymentName) {
|
69
74
|
deploymentId = modelItem.config?.deploymentName;
|
@@ -222,8 +227,15 @@ class ChatService {
|
|
222
227
|
let model = res.model || DEFAULT_AGENT_CONFIG.model;
|
223
228
|
|
224
229
|
// if the provider is Azure, get the deployment name as the request model
|
225
|
-
|
226
|
-
|
230
|
+
const providersWithDeploymentName = [
|
231
|
+
ModelProvider.Azure,
|
232
|
+
ModelProvider.Volcengine,
|
233
|
+
ModelProvider.Doubao,
|
234
|
+
ModelProvider.AzureAI,
|
235
|
+
] as string[];
|
236
|
+
|
237
|
+
if (providersWithDeploymentName.includes(provider)) {
|
238
|
+
model = findDeploymentName(model, provider);
|
227
239
|
}
|
228
240
|
|
229
241
|
const payload = merge(
|
package/src/types/aiProvider.ts
CHANGED
@@ -16,13 +16,18 @@ export type AiProviderSourceType = (typeof AiProviderSourceEnum)[keyof typeof Ai
|
|
16
16
|
export const AiProviderSDKEnum = {
|
17
17
|
Anthropic: 'anthropic',
|
18
18
|
Azure: 'azure',
|
19
|
+
AzureAI: 'azureai',
|
19
20
|
Bedrock: 'bedrock',
|
20
21
|
Cloudflare: 'cloudflare',
|
22
|
+
/**
|
23
|
+
* @deprecated
|
24
|
+
*/
|
21
25
|
Doubao: 'doubao',
|
22
26
|
Google: 'google',
|
23
27
|
Huggingface: 'huggingface',
|
24
28
|
Ollama: 'ollama',
|
25
29
|
Openai: 'openai',
|
30
|
+
Volcengine: 'volcengine',
|
26
31
|
} as const;
|
27
32
|
|
28
33
|
export type AiProviderSDKType = (typeof AiProviderSDKEnum)[keyof typeof AiProviderSDKEnum];
|
@@ -30,6 +30,7 @@ export interface UserKeyVaults {
|
|
30
30
|
ai360?: OpenAICompatibleKeyVault;
|
31
31
|
anthropic?: OpenAICompatibleKeyVault;
|
32
32
|
azure?: AzureOpenAIKeyVault;
|
33
|
+
azureai?: AzureOpenAIKeyVault;
|
33
34
|
baichuan?: OpenAICompatibleKeyVault;
|
34
35
|
bedrock?: AWSBedrockKeyVault;
|
35
36
|
cloudflare?: CloudflareKeyVault;
|
@@ -67,6 +68,7 @@ export interface UserKeyVaults {
|
|
67
68
|
togetherai?: OpenAICompatibleKeyVault;
|
68
69
|
upstage?: OpenAICompatibleKeyVault;
|
69
70
|
vllm?: OpenAICompatibleKeyVault;
|
71
|
+
volcengine?: OpenAICompatibleKeyVault;
|
70
72
|
wenxin?: OpenAICompatibleKeyVault;
|
71
73
|
xai?: OpenAICompatibleKeyVault;
|
72
74
|
zeroone?: OpenAICompatibleKeyVault;
|