@lobehub/chat 1.61.6 → 1.62.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/.github/ISSUE_TEMPLATE/1_bug_report_cn.yml +8 -0
  2. package/.github/ISSUE_TEMPLATE/config.yml +4 -1
  3. package/CHANGELOG.md +58 -0
  4. package/changelog/v1.json +21 -0
  5. package/locales/ar/components.json +1 -0
  6. package/locales/bg-BG/components.json +1 -0
  7. package/locales/de-DE/components.json +1 -0
  8. package/locales/en-US/components.json +4 -3
  9. package/locales/es-ES/components.json +1 -0
  10. package/locales/fa-IR/components.json +1 -0
  11. package/locales/fr-FR/components.json +1 -0
  12. package/locales/it-IT/components.json +1 -0
  13. package/locales/ja-JP/components.json +1 -0
  14. package/locales/ko-KR/components.json +1 -0
  15. package/locales/nl-NL/components.json +1 -0
  16. package/locales/pl-PL/components.json +1 -0
  17. package/locales/pt-BR/components.json +1 -0
  18. package/locales/ru-RU/components.json +1 -0
  19. package/locales/tr-TR/components.json +1 -0
  20. package/locales/vi-VN/components.json +1 -0
  21. package/locales/zh-CN/components.json +2 -1
  22. package/locales/zh-TW/components.json +1 -0
  23. package/package.json +2 -2
  24. package/src/components/ModelSelect/index.tsx +24 -2
  25. package/src/components/Thinking/index.tsx +7 -2
  26. package/src/config/aiModels/jina.ts +7 -5
  27. package/src/config/aiModels/perplexity.ts +8 -0
  28. package/src/config/llm.ts +8 -0
  29. package/src/config/modelProviders/sambanova.ts +4 -1
  30. package/src/database/client/migrations.json +12 -8
  31. package/src/database/migrations/0015_add_message_search_metadata.sql +2 -0
  32. package/src/database/migrations/meta/0015_snapshot.json +3616 -0
  33. package/src/database/migrations/meta/_journal.json +7 -0
  34. package/src/database/schemas/message.ts +3 -1
  35. package/src/database/server/models/message.ts +2 -0
  36. package/src/features/Conversation/components/ChatItem/index.tsx +10 -1
  37. package/src/features/Conversation/components/MarkdownElements/Thinking/Render.tsx +5 -1
  38. package/src/features/Conversation/components/MarkdownElements/remarkPlugins/createRemarkCustomTagPlugin.ts +1 -0
  39. package/src/features/Conversation/components/MarkdownElements/remarkPlugins/getNodeContent.test.ts +107 -0
  40. package/src/features/Conversation/components/MarkdownElements/remarkPlugins/getNodeContent.ts +6 -0
  41. package/src/libs/agent-runtime/perplexity/index.test.ts +156 -12
  42. package/src/libs/agent-runtime/utils/streams/anthropic.ts +3 -3
  43. package/src/libs/agent-runtime/utils/streams/bedrock/claude.ts +6 -2
  44. package/src/libs/agent-runtime/utils/streams/bedrock/llama.ts +3 -3
  45. package/src/libs/agent-runtime/utils/streams/google-ai.ts +3 -3
  46. package/src/libs/agent-runtime/utils/streams/ollama.ts +3 -3
  47. package/src/libs/agent-runtime/utils/streams/openai.ts +26 -8
  48. package/src/libs/agent-runtime/utils/streams/protocol.ts +33 -8
  49. package/src/libs/agent-runtime/utils/streams/vertex-ai.ts +3 -3
  50. package/src/locales/default/components.ts +1 -0
  51. package/src/server/services/nextAuthUser/index.test.ts +109 -0
  52. package/src/services/user/client.test.ts +10 -0
  53. package/src/services/user/server.test.ts +149 -0
  54. package/src/store/chat/slices/aiChat/actions/generateAIChat.ts +17 -6
  55. package/src/store/chat/slices/message/action.ts +12 -7
  56. package/src/types/aiModel.ts +5 -0
  57. package/src/types/message/base.ts +13 -0
  58. package/src/types/message/chat.ts +3 -2
  59. package/src/utils/fetch/fetchSSE.ts +17 -1
@@ -105,6 +105,13 @@
105
105
  "when": 1737609172353,
106
106
  "tag": "0014_add_message_reasoning",
107
107
  "breakpoints": true
108
+ },
109
+ {
110
+ "idx": 15,
111
+ "version": "7",
112
+ "when": 1739901891891,
113
+ "tag": "0015_add_message_search_metadata",
114
+ "breakpoints": true
108
115
  }
109
116
  ],
110
117
  "version": "6"
@@ -13,7 +13,7 @@ import {
13
13
  import { createSelectSchema } from 'drizzle-zod';
14
14
 
15
15
  import { idGenerator } from '@/database/utils/idGenerator';
16
- import { ModelReasoning } from '@/types/message';
16
+ import { GroundingSearch, ModelReasoning } from '@/types/message';
17
17
 
18
18
  import { timestamps } from './_helpers';
19
19
  import { agents } from './agent';
@@ -34,6 +34,8 @@ export const messages = pgTable(
34
34
  role: text('role', { enum: ['user', 'system', 'assistant', 'tool'] }).notNull(),
35
35
  content: text('content'),
36
36
  reasoning: jsonb('reasoning').$type<ModelReasoning>(),
37
+ search: jsonb('search').$type<GroundingSearch>(),
38
+ metadata: jsonb('metadata'),
37
39
 
38
40
  model: text('model'),
39
41
  provider: text('provider'),
@@ -74,6 +74,8 @@ export class MessageModel {
74
74
  role: messages.role,
75
75
  content: messages.content,
76
76
  reasoning: messages.reasoning,
77
+ search: messages.search,
78
+ metadata: messages.metadata,
77
79
  error: messages.error,
78
80
 
79
81
  model: messages.model,
@@ -172,12 +172,21 @@ const Item = memo<ChatListItemProps>(
172
172
 
173
173
  const markdownProps = useMemo(
174
174
  () => ({
175
+ citations: item?.role === 'user' ? undefined : item?.search?.citations,
175
176
  components,
176
177
  customRender: markdownCustomRender,
177
178
  rehypePlugins: item?.role === 'user' ? undefined : rehypePlugins,
178
179
  remarkPlugins: item?.role === 'user' ? undefined : remarkPlugins,
180
+ showCitations:
181
+ item?.role === 'user'
182
+ ? undefined
183
+ : item?.search?.citations &&
184
+ // if the citations are all empty, we should not show the citations
185
+ item?.search?.citations.length > 0 &&
186
+ // if the citations's url and title are all the same, we should not show the citations
187
+ item?.search?.citations.every((item) => item.title !== item.url),
179
188
  }),
180
- [components, markdownCustomRender, item?.role],
189
+ [components, markdownCustomRender, item?.role, item?.search],
181
190
  );
182
191
 
183
192
  const onChange = useCallback((value: string) => updateMessageContent(id, value), [id]);
@@ -18,10 +18,14 @@ const Render = memo<MarkdownElementProps>(({ children, id }) => {
18
18
  const message = chatSelectors.getMessageById(id)(s);
19
19
  return [!isThinkingClosed(message?.content)];
20
20
  });
21
+ const citations = useChatStore((s) => {
22
+ const message = chatSelectors.getMessageById(id)(s);
23
+ return message?.search?.citations;
24
+ });
21
25
 
22
26
  if (!isGenerating && !children) return;
23
27
 
24
- return <Thinking content={children as string} thinking={isGenerating} />;
28
+ return <Thinking citations={citations} content={children as string} thinking={isGenerating} />;
25
29
  });
26
30
 
27
31
  export default Render;
@@ -32,6 +32,7 @@ export const createRemarkCustomTagPlugin = (tag: string) => () => {
32
32
  );
33
33
 
34
34
  // 转换为 Markdown 字符串
35
+
35
36
  const content = treeNodeToString(contentNodes);
36
37
 
37
38
  // 创建自定义节点
@@ -393,4 +393,111 @@ describe('treeNodeToString', () => {
393
393
  1. 求 $a_2$ 和 $a_3$,根据前三项的规律猜想该数列的通项公式
394
394
  2. 用数学归纳法证明你的猜想。`);
395
395
  });
396
+
397
+ describe('link node', () => {
398
+ it('with url', () => {
399
+ const nodes = [
400
+ {
401
+ type: 'paragraph',
402
+ children: [
403
+ {
404
+ type: 'link',
405
+ title: null,
406
+ url: 'citation-1',
407
+ children: [
408
+ {
409
+ type: 'text',
410
+ value: '#citation-1',
411
+ position: {
412
+ start: {
413
+ line: 5,
414
+ column: 26,
415
+ offset: 78,
416
+ },
417
+ end: {
418
+ line: 5,
419
+ column: 37,
420
+ offset: 89,
421
+ },
422
+ },
423
+ },
424
+ ],
425
+ position: {
426
+ start: {
427
+ line: 5,
428
+ column: 25,
429
+ offset: 77,
430
+ },
431
+ end: {
432
+ line: 5,
433
+ column: 50,
434
+ offset: 102,
435
+ },
436
+ },
437
+ },
438
+ ],
439
+ position: {
440
+ start: {
441
+ line: 5,
442
+ column: 1,
443
+ offset: 53,
444
+ },
445
+ end: {
446
+ line: 5,
447
+ column: 220,
448
+ offset: 272,
449
+ },
450
+ },
451
+ },
452
+ ];
453
+
454
+ const result = treeNodeToString(nodes as Parent[]);
455
+
456
+ expect(result).toEqual(`[#citation-1](citation-1)`);
457
+ });
458
+
459
+ it('handle error case', () => {
460
+ const nodes = [
461
+ {
462
+ type: 'paragraph',
463
+ children: [
464
+ {
465
+ type: 'link',
466
+ title: null,
467
+ url: 'citation-1',
468
+ children: [],
469
+ position: {
470
+ start: {
471
+ line: 5,
472
+ column: 25,
473
+ offset: 77,
474
+ },
475
+ end: {
476
+ line: 5,
477
+ column: 50,
478
+ offset: 102,
479
+ },
480
+ },
481
+ },
482
+ ],
483
+ position: {
484
+ start: {
485
+ line: 5,
486
+ column: 1,
487
+ offset: 53,
488
+ },
489
+ end: {
490
+ line: 5,
491
+ column: 220,
492
+ offset: 272,
493
+ },
494
+ },
495
+ },
496
+ ];
497
+
498
+ const result = treeNodeToString(nodes as Parent[]);
499
+
500
+ expect(result).toEqual(`[](citation-1)`);
501
+ });
502
+ });
396
503
  });
@@ -7,6 +7,12 @@ const processNode = (node: any): string => {
7
7
  return `$${node.value}$`;
8
8
  }
9
9
 
10
+ if (node.type === 'link') {
11
+ const text = node.children?.[0] ? processNode(node.children?.[0]) : '';
12
+
13
+ return `[${text}](${node.url})`;
14
+ }
15
+
10
16
  // 处理带有子节点的容器
11
17
  if (node.children) {
12
18
  const content = node.children.map((element: Parent) => processNode(element)).join('');
@@ -28,17 +28,9 @@ beforeEach(() => {
28
28
  );
29
29
  });
30
30
 
31
- afterEach(() => {
32
- vi.clearAllMocks();
33
- });
34
-
35
31
  describe('LobePerplexityAI', () => {
36
32
  describe('chat', () => {
37
33
  it('should call chat method with temperature', async () => {
38
- vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(
39
- new ReadableStream() as any,
40
- );
41
-
42
34
  await instance.chat({
43
35
  messages: [{ content: 'Hello', role: 'user' }],
44
36
  model: 'text-davinci-003',
@@ -56,10 +48,6 @@ describe('LobePerplexityAI', () => {
56
48
  });
57
49
 
58
50
  it('should be undefined when temperature >= 2', async () => {
59
- vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(
60
- new ReadableStream() as any,
61
- );
62
-
63
51
  await instance.chat({
64
52
  messages: [{ content: 'Hello', role: 'user' }],
65
53
  model: 'text-davinci-003',
@@ -75,5 +63,161 @@ describe('LobePerplexityAI', () => {
75
63
  expect.any(Object),
76
64
  );
77
65
  });
66
+
67
+ it('should with search citations', async () => {
68
+ const data = [
69
+ {
70
+ id: '506d64fb-e7f2-4d94-b80f-158369e9446d',
71
+ model: 'sonar-pro',
72
+ created: 1739896615,
73
+ usage: {
74
+ prompt_tokens: 4,
75
+ completion_tokens: 3,
76
+ total_tokens: 7,
77
+ citation_tokens: 2217,
78
+ num_search_queries: 1,
79
+ },
80
+ citations: [
81
+ 'https://www.weather.com.cn/weather/101210101.shtml',
82
+ 'https://tianqi.moji.com/weather/china/zhejiang/hangzhou',
83
+ 'https://weather.cma.cn/web/weather/58457.html',
84
+ 'https://tianqi.so.com/weather/101210101',
85
+ 'https://www.accuweather.com/zh/cn/hangzhou/106832/weather-forecast/106832',
86
+ 'https://www.hzqx.com',
87
+ 'https://www.hzqx.com/pc/hztq/',
88
+ ],
89
+ object: 'chat.completion',
90
+ choices: [
91
+ {
92
+ index: 0,
93
+ finish_reason: null,
94
+ message: {
95
+ role: 'assistant',
96
+ content: '杭州今',
97
+ },
98
+ delta: {
99
+ role: 'assist',
100
+ content: '杭州今',
101
+ },
102
+ },
103
+ ],
104
+ },
105
+ {
106
+ id: '506d64fb-e7f2-4d94-b80f-158369e9446d',
107
+ model: 'sonar-pro',
108
+ created: 1739896615,
109
+ usage: {
110
+ prompt_tokens: 4,
111
+ completion_tokens: 9,
112
+ total_tokens: 13,
113
+ citation_tokens: 2217,
114
+ num_search_queries: 1,
115
+ },
116
+ citations: [
117
+ 'https://www.weather.com.cn/weather/101210101.shtml',
118
+ 'https://tianqi.moji.com/weather/china/zhejiang/hangzhou',
119
+ 'https://weather.cma.cn/web/weather/58457.html',
120
+ 'https://tianqi.so.com/weather/101210101',
121
+ 'https://www.accuweather.com/zh/cn/hangzhou/106832/weather-forecast/106832',
122
+ 'https://www.hzqx.com',
123
+ 'https://www.hzqx.com/pc/hztq/',
124
+ ],
125
+ object: 'chat.completion',
126
+ choices: [
127
+ {
128
+ index: 0,
129
+ finish_reason: null,
130
+ message: {
131
+ role: 'assistant',
132
+ content: '杭州今天和未来几天的',
133
+ },
134
+ delta: {
135
+ role: 'assistant',
136
+ content: '天和未来几天的',
137
+ },
138
+ },
139
+ ],
140
+ },
141
+ {
142
+ id: '506d64fb-e7f2-4d94-b80f-158369e9446d',
143
+ model: 'sonar-pro',
144
+ created: 1739896615,
145
+ usage: {
146
+ prompt_tokens: 4,
147
+ completion_tokens: 14,
148
+ total_tokens: 18,
149
+ citation_tokens: 2217,
150
+ num_search_queries: 1,
151
+ },
152
+ citations: [
153
+ 'https://www.weather.com.cn/weather/101210101.shtml',
154
+ 'https://tianqi.moji.com/weather/china/zhejiang/hangzhou',
155
+ 'https://weather.cma.cn/web/weather/58457.html',
156
+ 'https://tianqi.so.com/weather/101210101',
157
+ 'https://www.accuweather.com/zh/cn/hangzhou/106832/weather-forecast/106832',
158
+ 'https://www.hzqx.com',
159
+ 'https://www.hzqx.com/pc/hztq/',
160
+ ],
161
+ object: 'chat.completion',
162
+ choices: [
163
+ {
164
+ index: 0,
165
+ finish_reason: null,
166
+ message: {
167
+ role: 'assistant',
168
+ content: '杭州今天和未来几天的天气预报如',
169
+ },
170
+ },
171
+ ],
172
+ },
173
+ ];
174
+
175
+ const mockStream = new ReadableStream({
176
+ start(controller) {
177
+ data.forEach((chunk) => {
178
+ controller.enqueue(chunk);
179
+ });
180
+
181
+ controller.close();
182
+ },
183
+ });
184
+
185
+ vi.spyOn(instance['client'].chat.completions, 'create').mockResolvedValue(mockStream as any);
186
+
187
+ const result = await instance.chat({
188
+ messages: [{ content: 'Hello', role: 'user' }],
189
+ model: 'mistralai/mistral-7b-instruct:free',
190
+ temperature: 0,
191
+ });
192
+
193
+ const decoder = new TextDecoder();
194
+ const reader = result.body!.getReader();
195
+ const stream: string[] = [];
196
+
197
+ while (true) {
198
+ const { value, done } = await reader.read();
199
+ if (done) break;
200
+ stream.push(decoder.decode(value));
201
+ }
202
+
203
+ expect(stream).toEqual(
204
+ [
205
+ 'id: 506d64fb-e7f2-4d94-b80f-158369e9446d',
206
+ 'event: citations',
207
+ 'data: [{"title":"https://www.weather.com.cn/weather/101210101.shtml","url":"https://www.weather.com.cn/weather/101210101.shtml"},{"title":"https://tianqi.moji.com/weather/china/zhejiang/hangzhou","url":"https://tianqi.moji.com/weather/china/zhejiang/hangzhou"},{"title":"https://weather.cma.cn/web/weather/58457.html","url":"https://weather.cma.cn/web/weather/58457.html"},{"title":"https://tianqi.so.com/weather/101210101","url":"https://tianqi.so.com/weather/101210101"},{"title":"https://www.accuweather.com/zh/cn/hangzhou/106832/weather-forecast/106832","url":"https://www.accuweather.com/zh/cn/hangzhou/106832/weather-forecast/106832"},{"title":"https://www.hzqx.com","url":"https://www.hzqx.com"},{"title":"https://www.hzqx.com/pc/hztq/","url":"https://www.hzqx.com/pc/hztq/"}]\n',
208
+ 'id: 506d64fb-e7f2-4d94-b80f-158369e9446d',
209
+ 'event: text',
210
+ 'data: "杭州今"\n',
211
+ 'id: 506d64fb-e7f2-4d94-b80f-158369e9446d',
212
+ 'event: text',
213
+ 'data: "天和未来几天的"\n',
214
+ 'id: 506d64fb-e7f2-4d94-b80f-158369e9446d',
215
+ 'event: data',
216
+ 'data: {"id":"506d64fb-e7f2-4d94-b80f-158369e9446d","index":0}\n',
217
+ ].map((line) => `${line}\n`),
218
+ );
219
+
220
+ expect((await reader.read()).done).toBe(true);
221
+ });
78
222
  });
79
223
  });
@@ -3,9 +3,9 @@ import type { Stream } from '@anthropic-ai/sdk/streaming';
3
3
 
4
4
  import { ChatStreamCallbacks } from '../../types';
5
5
  import {
6
+ StreamContext,
6
7
  StreamProtocolChunk,
7
8
  StreamProtocolToolCallChunk,
8
- StreamStack,
9
9
  StreamToolCallChunkData,
10
10
  convertIterableToStream,
11
11
  createCallbacksTransformer,
@@ -14,7 +14,7 @@ import {
14
14
 
15
15
  export const transformAnthropicStream = (
16
16
  chunk: Anthropic.MessageStreamEvent,
17
- stack: StreamStack,
17
+ stack: StreamContext,
18
18
  ): StreamProtocolChunk => {
19
19
  // maybe need another structure to add support for multiple choices
20
20
  switch (chunk.type) {
@@ -100,7 +100,7 @@ export const AnthropicStream = (
100
100
  stream: Stream<Anthropic.MessageStreamEvent> | ReadableStream,
101
101
  callbacks?: ChatStreamCallbacks,
102
102
  ) => {
103
- const streamStack: StreamStack = { id: '' };
103
+ const streamStack: StreamContext = { id: '' };
104
104
 
105
105
  const readableStream =
106
106
  stream instanceof ReadableStream ? stream : convertIterableToStream(stream);
@@ -4,14 +4,18 @@ import { nanoid } from '@/utils/uuid';
4
4
 
5
5
  import { ChatStreamCallbacks } from '../../../types';
6
6
  import { transformAnthropicStream } from '../anthropic';
7
- import { StreamStack, createCallbacksTransformer, createSSEProtocolTransformer } from '../protocol';
7
+ import {
8
+ StreamContext,
9
+ createCallbacksTransformer,
10
+ createSSEProtocolTransformer,
11
+ } from '../protocol';
8
12
  import { createBedrockStream } from './common';
9
13
 
10
14
  export const AWSBedrockClaudeStream = (
11
15
  res: InvokeModelWithResponseStreamResponse | ReadableStream,
12
16
  cb?: ChatStreamCallbacks,
13
17
  ): ReadableStream<string> => {
14
- const streamStack: StreamStack = { id: 'chat_' + nanoid() };
18
+ const streamStack: StreamContext = { id: 'chat_' + nanoid() };
15
19
 
16
20
  const stream = res instanceof ReadableStream ? res : createBedrockStream(res);
17
21
 
@@ -4,8 +4,8 @@ import { nanoid } from '@/utils/uuid';
4
4
 
5
5
  import { ChatStreamCallbacks } from '../../../types';
6
6
  import {
7
+ StreamContext,
7
8
  StreamProtocolChunk,
8
- StreamStack,
9
9
  createCallbacksTransformer,
10
10
  createSSEProtocolTransformer,
11
11
  } from '../protocol';
@@ -27,7 +27,7 @@ interface BedrockLlamaStreamChunk {
27
27
 
28
28
  export const transformLlamaStream = (
29
29
  chunk: BedrockLlamaStreamChunk,
30
- stack: StreamStack,
30
+ stack: StreamContext,
31
31
  ): StreamProtocolChunk => {
32
32
  // maybe need another structure to add support for multiple choices
33
33
  if (chunk.stop_reason) {
@@ -41,7 +41,7 @@ export const AWSBedrockLlamaStream = (
41
41
  res: InvokeModelWithResponseStreamResponse | ReadableStream,
42
42
  cb?: ChatStreamCallbacks,
43
43
  ): ReadableStream<string> => {
44
- const streamStack: StreamStack = { id: 'chat_' + nanoid() };
44
+ const streamStack: StreamContext = { id: 'chat_' + nanoid() };
45
45
 
46
46
  const stream = res instanceof ReadableStream ? res : createBedrockStream(res);
47
47
 
@@ -4,8 +4,8 @@ import { nanoid } from '@/utils/uuid';
4
4
 
5
5
  import { ChatStreamCallbacks } from '../../types';
6
6
  import {
7
+ StreamContext,
7
8
  StreamProtocolChunk,
8
- StreamStack,
9
9
  StreamToolCallChunkData,
10
10
  createCallbacksTransformer,
11
11
  createSSEProtocolTransformer,
@@ -14,7 +14,7 @@ import {
14
14
 
15
15
  const transformGoogleGenerativeAIStream = (
16
16
  chunk: EnhancedGenerateContentResponse,
17
- stack: StreamStack,
17
+ stack: StreamContext,
18
18
  ): StreamProtocolChunk => {
19
19
  // maybe need another structure to add support for multiple choices
20
20
  const functionCalls = chunk.functionCalls();
@@ -49,7 +49,7 @@ export const GoogleGenerativeAIStream = (
49
49
  rawStream: ReadableStream<EnhancedGenerateContentResponse>,
50
50
  callbacks?: ChatStreamCallbacks,
51
51
  ) => {
52
- const streamStack: StreamStack = { id: 'chat_' + nanoid() };
52
+ const streamStack: StreamContext = { id: 'chat_' + nanoid() };
53
53
 
54
54
  return rawStream
55
55
  .pipeThrough(createSSEProtocolTransformer(transformGoogleGenerativeAIStream, streamStack))
@@ -4,14 +4,14 @@ import { ChatStreamCallbacks } from '@/libs/agent-runtime';
4
4
  import { nanoid } from '@/utils/uuid';
5
5
 
6
6
  import {
7
+ StreamContext,
7
8
  StreamProtocolChunk,
8
- StreamStack,
9
9
  createCallbacksTransformer,
10
10
  createSSEProtocolTransformer,
11
11
  generateToolCallId,
12
12
  } from './protocol';
13
13
 
14
- const transformOllamaStream = (chunk: ChatResponse, stack: StreamStack): StreamProtocolChunk => {
14
+ const transformOllamaStream = (chunk: ChatResponse, stack: StreamContext): StreamProtocolChunk => {
15
15
  // maybe need another structure to add support for multiple choices
16
16
  if (chunk.done && !chunk.message.content) {
17
17
  return { data: 'finished', id: stack.id, type: 'stop' };
@@ -39,7 +39,7 @@ export const OllamaStream = (
39
39
  res: ReadableStream<ChatResponse>,
40
40
  cb?: ChatStreamCallbacks,
41
41
  ): ReadableStream<string> => {
42
- const streamStack: StreamStack = { id: 'chat_' + nanoid() };
42
+ const streamStack: StreamContext = { id: 'chat_' + nanoid() };
43
43
 
44
44
  return res
45
45
  .pipeThrough(createSSEProtocolTransformer(transformOllamaStream, streamStack))
@@ -1,15 +1,15 @@
1
1
  import OpenAI from 'openai';
2
2
  import type { Stream } from 'openai/streaming';
3
3
 
4
- import { ChatMessageError } from '@/types/message';
4
+ import { ChatMessageError, CitationItem } from '@/types/message';
5
5
 
6
6
  import { AgentRuntimeErrorType, ILobeAgentRuntimeErrorType } from '../../error';
7
7
  import { ChatStreamCallbacks } from '../../types';
8
8
  import {
9
9
  FIRST_CHUNK_ERROR_KEY,
10
+ StreamContext,
10
11
  StreamProtocolChunk,
11
12
  StreamProtocolToolCallChunk,
12
- StreamStack,
13
13
  StreamToolCallChunkData,
14
14
  convertIterableToStream,
15
15
  createCallbacksTransformer,
@@ -20,8 +20,8 @@ import {
20
20
 
21
21
  export const transformOpenAIStream = (
22
22
  chunk: OpenAI.ChatCompletionChunk,
23
- stack?: StreamStack,
24
- ): StreamProtocolChunk => {
23
+ streamContext: StreamContext,
24
+ ): StreamProtocolChunk | StreamProtocolChunk[] => {
25
25
  // handle the first chunk error
26
26
  if (FIRST_CHUNK_ERROR_KEY in chunk) {
27
27
  delete chunk[FIRST_CHUNK_ERROR_KEY];
@@ -48,8 +48,8 @@ export const transformOpenAIStream = (
48
48
  if (typeof item.delta?.tool_calls === 'object' && item.delta.tool_calls?.length > 0) {
49
49
  return {
50
50
  data: item.delta.tool_calls.map((value, index): StreamToolCallChunkData => {
51
- if (stack && !stack.tool) {
52
- stack.tool = { id: value.id!, index: value.index, name: value.function!.name! };
51
+ if (streamContext && !streamContext.tool) {
52
+ streamContext.tool = { id: value.id!, index: value.index, name: value.function!.name! };
53
53
  }
54
54
 
55
55
  return {
@@ -57,7 +57,10 @@ export const transformOpenAIStream = (
57
57
  arguments: value.function?.arguments ?? '{}',
58
58
  name: value.function?.name ?? null,
59
59
  },
60
- id: value.id || stack?.tool?.id || generateToolCallId(index, value.function?.name),
60
+ id:
61
+ value.id ||
62
+ streamContext?.tool?.id ||
63
+ generateToolCallId(index, value.function?.name),
61
64
 
62
65
  // mistral's tool calling don't have index and function field, it's data like:
63
66
  // [{"id":"xbhnmTtY7","function":{"name":"lobe-image-designer____text2image____builtin","arguments":"{\"prompts\": [\"A photo of a small, fluffy dog with a playful expression and wagging tail.\", \"A watercolor painting of a small, energetic dog with a glossy coat and bright eyes.\", \"A vector illustration of a small, adorable dog with a short snout and perky ears.\", \"A drawing of a small, scruffy dog with a mischievous grin and a wagging tail.\"], \"quality\": \"standard\", \"seeds\": [123456, 654321, 111222, 333444], \"size\": \"1024x1024\", \"style\": \"vivid\"}"}}]
@@ -114,6 +117,21 @@ export const transformOpenAIStream = (
114
117
  }
115
118
 
116
119
  if (typeof content === 'string') {
120
+ // in Perplexity api, the citation is in every chunk, but we only need to return it once
121
+
122
+ if ('citations' in chunk && !streamContext?.returnedPplxCitation) {
123
+ streamContext.returnedPplxCitation = true;
124
+
125
+ const citations = (chunk.citations as any[]).map((item) =>
126
+ typeof item === 'string' ? ({ title: item, url: item } as CitationItem) : item,
127
+ );
128
+
129
+ return [
130
+ { data: citations, id: chunk.id, type: 'citations' },
131
+ { data: content, id: chunk.id, type: 'text' },
132
+ ];
133
+ }
134
+
117
135
  return { data: content, id: chunk.id, type: 'text' };
118
136
  }
119
137
  }
@@ -164,7 +182,7 @@ export const OpenAIStream = (
164
182
  stream: Stream<OpenAI.ChatCompletionChunk> | ReadableStream,
165
183
  { callbacks, provider, bizErrorTypeTransformer }: OpenAIStreamOptions = {},
166
184
  ) => {
167
- const streamStack: StreamStack = { id: '' };
185
+ const streamStack: StreamContext = { id: '' };
168
186
 
169
187
  const readableStream =
170
188
  stream instanceof ReadableStream ? stream : convertIterableToStream(stream);