@lobehub/chat 1.15.31 → 1.15.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of @lobehub/chat might be problematic. Click here for more details.
- package/CHANGELOG.md +50 -0
- package/Dockerfile +3 -1
- package/Dockerfile.database +3 -1
- package/README.md +8 -14
- package/README.zh-CN.md +8 -14
- package/package.json +1 -1
- package/src/libs/agent-runtime/utils/streams/openai.test.ts +201 -99
- package/src/libs/agent-runtime/utils/streams/openai.ts +18 -8
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,56 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.15.33](https://github.com/lobehub/lobe-chat/compare/v1.15.32...v1.15.33)
|
6
|
+
|
7
|
+
<sup>Released on **2024-09-10**</sup>
|
8
|
+
|
9
|
+
#### 🐛 Bug Fixes
|
10
|
+
|
11
|
+
- **misc**: Fix `/etc/resolv.conf`edit permission in docker image.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### What's fixed
|
19
|
+
|
20
|
+
- **misc**: Fix `/etc/resolv.conf`edit permission in docker image, closes [#3880](https://github.com/lobehub/lobe-chat/issues/3880) ([fdaa190](https://github.com/lobehub/lobe-chat/commit/fdaa190))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
30
|
+
### [Version 1.15.32](https://github.com/lobehub/lobe-chat/compare/v1.15.31...v1.15.32)
|
31
|
+
|
32
|
+
<sup>Released on **2024-09-10**</sup>
|
33
|
+
|
34
|
+
#### 🐛 Bug Fixes
|
35
|
+
|
36
|
+
- **misc**: Fix tools calling in some edge cases.
|
37
|
+
|
38
|
+
<br/>
|
39
|
+
|
40
|
+
<details>
|
41
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
42
|
+
|
43
|
+
#### What's fixed
|
44
|
+
|
45
|
+
- **misc**: Fix tools calling in some edge cases, closes [#3872](https://github.com/lobehub/lobe-chat/issues/3872) ([2ed759d](https://github.com/lobehub/lobe-chat/commit/2ed759d))
|
46
|
+
|
47
|
+
</details>
|
48
|
+
|
49
|
+
<div align="right">
|
50
|
+
|
51
|
+
[](#readme-top)
|
52
|
+
|
53
|
+
</div>
|
54
|
+
|
5
55
|
### [Version 1.15.31](https://github.com/lobehub/lobe-chat/compare/v1.15.30...v1.15.31)
|
6
56
|
|
7
57
|
<sup>Released on **2024-09-10**</sup>
|
package/Dockerfile
CHANGED
@@ -10,12 +10,13 @@ RUN \
|
|
10
10
|
fi \
|
11
11
|
# Add required package & update base package
|
12
12
|
&& apk update \
|
13
|
-
&& apk add --no-cache bind-tools proxychains-ng \
|
13
|
+
&& apk add --no-cache bind-tools proxychains-ng sudo \
|
14
14
|
&& apk upgrade --no-cache \
|
15
15
|
# Add user nextjs to run the app
|
16
16
|
&& addgroup --system --gid 1001 nodejs \
|
17
17
|
&& adduser --system --uid 1001 nextjs \
|
18
18
|
&& chown -R nextjs:nodejs "/etc/proxychains" \
|
19
|
+
&& echo "nextjs ALL=(ALL) NOPASSWD: /bin/chmod * /etc/resolv.conf" >> /etc/sudoers \
|
19
20
|
&& rm -rf /tmp/* /var/cache/apk/*
|
20
21
|
|
21
22
|
## Builder image, install all the dependencies and build the app
|
@@ -190,6 +191,7 @@ CMD \
|
|
190
191
|
fi; \
|
191
192
|
# Fix DNS resolving issue in Docker Compose, ref https://github.com/lobehub/lobe-chat/pull/3837
|
192
193
|
if [ -f "/etc/resolv.conf" ]; then \
|
194
|
+
sudo chmod 666 "/etc/resolv.conf"; \
|
193
195
|
resolv_conf=$(grep '^nameserver' "/etc/resolv.conf" | awk '{print "nameserver " $2}'); \
|
194
196
|
printf "%s\n" \
|
195
197
|
"$resolv_conf" \
|
package/Dockerfile.database
CHANGED
@@ -10,12 +10,13 @@ RUN \
|
|
10
10
|
fi \
|
11
11
|
# Add required package & update base package
|
12
12
|
&& apk update \
|
13
|
-
&& apk add --no-cache bind-tools proxychains-ng \
|
13
|
+
&& apk add --no-cache bind-tools proxychains-ng sudo \
|
14
14
|
&& apk upgrade --no-cache \
|
15
15
|
# Add user nextjs to run the app
|
16
16
|
&& addgroup --system --gid 1001 nodejs \
|
17
17
|
&& adduser --system --uid 1001 nextjs \
|
18
18
|
&& chown -R nextjs:nodejs "/etc/proxychains" \
|
19
|
+
&& echo "nextjs ALL=(ALL) NOPASSWD: /bin/chmod * /etc/resolv.conf" >> /etc/sudoers \
|
19
20
|
&& rm -rf /tmp/* /var/cache/apk/*
|
20
21
|
|
21
22
|
## Builder image, install all the dependencies and build the app
|
@@ -222,6 +223,7 @@ CMD \
|
|
222
223
|
fi; \
|
223
224
|
# Fix DNS resolving issue in Docker Compose, ref https://github.com/lobehub/lobe-chat/pull/3837
|
224
225
|
if [ -f "/etc/resolv.conf" ]; then \
|
226
|
+
sudo chmod 666 "/etc/resolv.conf"; \
|
225
227
|
resolv_conf=$(grep '^nameserver' "/etc/resolv.conf" | awk '{print "nameserver " $2}'); \
|
226
228
|
printf "%s\n" \
|
227
229
|
"$resolv_conf" \
|
package/README.md
CHANGED
@@ -285,20 +285,14 @@ Our marketplace is not just a showcase platform but also a collaborative space.
|
|
285
285
|
|
286
286
|
<!-- AGENT LIST -->
|
287
287
|
|
288
|
-
| Recent Submits
|
289
|
-
|
|
290
|
-
| [
|
291
|
-
| [
|
292
|
-
| [
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
3. Use the above answers as evidence to logically and coherently provide a final answer to your question, assisted by visual charts.
|
297
|
-
|
298
|
-
Please tell me what issue you would like to explore?<br/>`backtracking-questions` `thinking-strategies` `problem-analysis` |
|
299
|
-
\| [Unreal Engine Master](https://chat-preview.lobehub.com/market?agent=unreal-engine-master)<br/><sup>By **[thedivergentai](https://github.com/thedivergentai)** on **2024-08-27**</sup> | Unreal Game Development Companion<br/>`game-development` `unreal-engine` `software-engineering` |
|
300
|
-
|
301
|
-
> 📊 Total agents: [<kbd>**322**</kbd> ](https://github.com/lobehub/lobe-chat-agents)
|
288
|
+
| Recent Submits | Description |
|
289
|
+
| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
290
|
+
| [AI Agent Generator](https://chat-preview.lobehub.com/market?agent=ai-agent-generator)<br/><sup>By **[xyftw](https://github.com/xyftw)** on **2024-09-10**</sup> | Skilled at creating AI Agent character descriptions that meet the needs.<br/>`ai-agent` `character-creation` |
|
291
|
+
| [HTML to React](https://chat-preview.lobehub.com/market?agent=web-react)<br/><sup>By **[xingwang02](https://github.com/xingwang02)** on **2024-09-10**</sup> | Input HTML snippets and convert them into React components<br/>`react` `html` |
|
292
|
+
| [FiveM & QBCore Framework Expert](https://chat-preview.lobehub.com/market?agent=lua-development)<br/><sup>By **[heartsiddharth1](https://github.com/heartsiddharth1)** on **2024-09-08**</sup> | Expertise in FiveM development, QBCore framework, Lua programming, JavaScript, database management, server administration, version control, full-stack web development, DevOps, and community engagement with a focus on performance, security, and best practices.<br/>`five-m` `qb-core` `lua` `java-script` `my-sql` `server-management` `git` `full-stack-web-development` `dev-ops` `community-engagement` |
|
293
|
+
| [Nuxt 3/Vue.js Master Developer](https://chat-preview.lobehub.com/market?agent=nuxt-vue-developer)<br/><sup>By **[Kadreev](https://github.com/Kadreev)** on **2024-09-03**</sup> | Specialized in full-stack development with Nuxt 3 expertise.<br/>`nuxt-3` `vue-js` `full-stack-development` `java-script` `web-applications` |
|
294
|
+
|
295
|
+
> 📊 Total agents: [<kbd>**325**</kbd> ](https://github.com/lobehub/lobe-chat-agents)
|
302
296
|
|
303
297
|
<!-- AGENT LIST -->
|
304
298
|
|
package/README.zh-CN.md
CHANGED
@@ -273,20 +273,14 @@ LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地
|
|
273
273
|
|
274
274
|
<!-- AGENT LIST -->
|
275
275
|
|
276
|
-
| 最近新增
|
277
|
-
|
|
278
|
-
| [
|
279
|
-
| [
|
280
|
-
| [
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
3. 将上述回答作为论据,有逻辑、条理地,使用可视化图表辅助对你的问题进行最终作答。
|
285
|
-
|
286
|
-
请告诉我你想要探讨的问题是什么?<br/>`后退提问` `思考策略` `问题分析` |
|
287
|
-
\| [虚幻引擎大师](https://chat-preview.lobehub.com/market?agent=unreal-engine-master)<br/><sup>By **[thedivergentai](https://github.com/thedivergentai)** on **2024-08-27**</sup> | 虚幻游戏开发助手<br/>`游戏开发` `虚幻引擎` `软件工程` |
|
288
|
-
|
289
|
-
> 📊 Total agents: [<kbd>**322**</kbd> ](https://github.com/lobehub/lobe-chat-agents)
|
276
|
+
| 最近新增 | 助手说明 |
|
277
|
+
| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
278
|
+
| [AI 代理生成器](https://chat-preview.lobehub.com/market?agent=ai-agent-generator)<br/><sup>By **[xyftw](https://github.com/xyftw)** on **2024-09-10**</sup> | 擅长创建满足需求的 AI 代理角色描述。<br/>`ai-agent` `角色创建` |
|
279
|
+
| [HTML to React](https://chat-preview.lobehub.com/market?agent=web-react)<br/><sup>By **[xingwang02](https://github.com/xingwang02)** on **2024-09-10**</sup> | 输入 HTML 片段,转化为 React 组件<br/>`react、-html` |
|
280
|
+
| [FiveM 和 QBCore 框架专家](https://chat-preview.lobehub.com/market?agent=lua-development)<br/><sup>By **[heartsiddharth1](https://github.com/heartsiddharth1)** on **2024-09-08**</sup> | 在 FiveM 开发、QBCore 框架、Lua 编程、JavaScript、数据库管理、服务器管理、版本控制、全栈 Web 开发、DevOps 和社区参与方面具有专业知识,重点关注性能、安全性和最佳实践。<br/>`five-m` `qb-core` `lua` `java-script` `my-sql` `server-management` `git` `full-stack-web-development` `dev-ops` `community-engagement` |
|
281
|
+
| [Nuxt 3/Vue.js 大师开发者](https://chat-preview.lobehub.com/market?agent=nuxt-vue-developer)<br/><sup>By **[Kadreev](https://github.com/Kadreev)** on **2024-09-03**</sup> | 专注于全栈开发,拥有 Nuxt 3 专业知识。<br/>`nuxt-3` `vue-js` `全栈开发` `java-script` `网络应用` |
|
282
|
+
|
283
|
+
> 📊 Total agents: [<kbd>**325**</kbd> ](https://github.com/lobehub/lobe-chat-agents)
|
290
284
|
|
291
285
|
<!-- AGENT LIST -->
|
292
286
|
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.15.
|
3
|
+
"version": "1.15.33",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -78,60 +78,6 @@ describe('OpenAIStream', () => {
|
|
78
78
|
expect(onCompletionMock).toHaveBeenCalledTimes(1);
|
79
79
|
});
|
80
80
|
|
81
|
-
it('should handle tool calls', async () => {
|
82
|
-
const mockOpenAIStream = new ReadableStream({
|
83
|
-
start(controller) {
|
84
|
-
controller.enqueue({
|
85
|
-
choices: [
|
86
|
-
{
|
87
|
-
delta: {
|
88
|
-
tool_calls: [
|
89
|
-
{
|
90
|
-
function: { name: 'tool1', arguments: '{}' },
|
91
|
-
id: 'call_1',
|
92
|
-
index: 0,
|
93
|
-
type: 'function',
|
94
|
-
},
|
95
|
-
{
|
96
|
-
function: { name: 'tool2', arguments: '{}' },
|
97
|
-
id: 'call_2',
|
98
|
-
index: 1,
|
99
|
-
},
|
100
|
-
],
|
101
|
-
},
|
102
|
-
index: 0,
|
103
|
-
},
|
104
|
-
],
|
105
|
-
id: '2',
|
106
|
-
});
|
107
|
-
|
108
|
-
controller.close();
|
109
|
-
},
|
110
|
-
});
|
111
|
-
|
112
|
-
const onToolCallMock = vi.fn();
|
113
|
-
|
114
|
-
const protocolStream = OpenAIStream(mockOpenAIStream, {
|
115
|
-
onToolCall: onToolCallMock,
|
116
|
-
});
|
117
|
-
|
118
|
-
const decoder = new TextDecoder();
|
119
|
-
const chunks = [];
|
120
|
-
|
121
|
-
// @ts-ignore
|
122
|
-
for await (const chunk of protocolStream) {
|
123
|
-
chunks.push(decoder.decode(chunk, { stream: true }));
|
124
|
-
}
|
125
|
-
|
126
|
-
expect(chunks).toEqual([
|
127
|
-
'id: 2\n',
|
128
|
-
'event: tool_calls\n',
|
129
|
-
`data: [{"function":{"name":"tool1","arguments":"{}"},"id":"call_1","index":0,"type":"function"},{"function":{"name":"tool2","arguments":"{}"},"id":"call_2","index":1,"type":"function"}]\n\n`,
|
130
|
-
]);
|
131
|
-
|
132
|
-
expect(onToolCallMock).toHaveBeenCalledTimes(1);
|
133
|
-
});
|
134
|
-
|
135
81
|
it('should handle empty stream', async () => {
|
136
82
|
const mockStream = new ReadableStream({
|
137
83
|
start(controller) {
|
@@ -216,51 +162,6 @@ describe('OpenAIStream', () => {
|
|
216
162
|
]);
|
217
163
|
});
|
218
164
|
|
219
|
-
it('should handle tool calls without index and type', async () => {
|
220
|
-
const mockOpenAIStream = new ReadableStream({
|
221
|
-
start(controller) {
|
222
|
-
controller.enqueue({
|
223
|
-
choices: [
|
224
|
-
{
|
225
|
-
delta: {
|
226
|
-
tool_calls: [
|
227
|
-
{
|
228
|
-
function: { name: 'tool1', arguments: '{}' },
|
229
|
-
id: 'call_1',
|
230
|
-
},
|
231
|
-
{
|
232
|
-
function: { name: 'tool2', arguments: '{}' },
|
233
|
-
id: 'call_2',
|
234
|
-
},
|
235
|
-
],
|
236
|
-
},
|
237
|
-
index: 0,
|
238
|
-
},
|
239
|
-
],
|
240
|
-
id: '5',
|
241
|
-
});
|
242
|
-
|
243
|
-
controller.close();
|
244
|
-
},
|
245
|
-
});
|
246
|
-
|
247
|
-
const protocolStream = OpenAIStream(mockOpenAIStream);
|
248
|
-
|
249
|
-
const decoder = new TextDecoder();
|
250
|
-
const chunks = [];
|
251
|
-
|
252
|
-
// @ts-ignore
|
253
|
-
for await (const chunk of protocolStream) {
|
254
|
-
chunks.push(decoder.decode(chunk, { stream: true }));
|
255
|
-
}
|
256
|
-
|
257
|
-
expect(chunks).toEqual([
|
258
|
-
'id: 5\n',
|
259
|
-
'event: tool_calls\n',
|
260
|
-
`data: [{"function":{"name":"tool1","arguments":"{}"},"id":"call_1","index":0,"type":"function"},{"function":{"name":"tool2","arguments":"{}"},"id":"call_2","index":1,"type":"function"}]\n\n`,
|
261
|
-
]);
|
262
|
-
});
|
263
|
-
|
264
165
|
it('should handle error when there is not correct error', async () => {
|
265
166
|
const mockOpenAIStream = new ReadableStream({
|
266
167
|
start(controller) {
|
@@ -302,4 +203,205 @@ describe('OpenAIStream', () => {
|
|
302
203
|
].map((i) => `${i}\n`),
|
303
204
|
);
|
304
205
|
});
|
206
|
+
|
207
|
+
describe('Tools Calling', () => {
|
208
|
+
it('should handle OpenAI official tool calls', async () => {
|
209
|
+
const mockOpenAIStream = new ReadableStream({
|
210
|
+
start(controller) {
|
211
|
+
controller.enqueue({
|
212
|
+
choices: [
|
213
|
+
{
|
214
|
+
delta: {
|
215
|
+
tool_calls: [
|
216
|
+
{
|
217
|
+
function: { name: 'tool1', arguments: '{}' },
|
218
|
+
id: 'call_1',
|
219
|
+
index: 0,
|
220
|
+
type: 'function',
|
221
|
+
},
|
222
|
+
{
|
223
|
+
function: { name: 'tool2', arguments: '{}' },
|
224
|
+
id: 'call_2',
|
225
|
+
index: 1,
|
226
|
+
},
|
227
|
+
],
|
228
|
+
},
|
229
|
+
index: 0,
|
230
|
+
},
|
231
|
+
],
|
232
|
+
id: '2',
|
233
|
+
});
|
234
|
+
|
235
|
+
controller.close();
|
236
|
+
},
|
237
|
+
});
|
238
|
+
|
239
|
+
const onToolCallMock = vi.fn();
|
240
|
+
|
241
|
+
const protocolStream = OpenAIStream(mockOpenAIStream, {
|
242
|
+
onToolCall: onToolCallMock,
|
243
|
+
});
|
244
|
+
|
245
|
+
const decoder = new TextDecoder();
|
246
|
+
const chunks = [];
|
247
|
+
|
248
|
+
// @ts-ignore
|
249
|
+
for await (const chunk of protocolStream) {
|
250
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
251
|
+
}
|
252
|
+
|
253
|
+
expect(chunks).toEqual([
|
254
|
+
'id: 2\n',
|
255
|
+
'event: tool_calls\n',
|
256
|
+
`data: [{"function":{"name":"tool1","arguments":"{}"},"id":"call_1","index":0,"type":"function"},{"function":{"name":"tool2","arguments":"{}"},"id":"call_2","index":1,"type":"function"}]\n\n`,
|
257
|
+
]);
|
258
|
+
|
259
|
+
expect(onToolCallMock).toHaveBeenCalledTimes(1);
|
260
|
+
});
|
261
|
+
|
262
|
+
it('should handle tool calls without index and type like mistral and minimax', async () => {
|
263
|
+
const mockOpenAIStream = new ReadableStream({
|
264
|
+
start(controller) {
|
265
|
+
controller.enqueue({
|
266
|
+
choices: [
|
267
|
+
{
|
268
|
+
delta: {
|
269
|
+
tool_calls: [
|
270
|
+
{
|
271
|
+
function: { name: 'tool1', arguments: '{}' },
|
272
|
+
id: 'call_1',
|
273
|
+
},
|
274
|
+
{
|
275
|
+
function: { name: 'tool2', arguments: '{}' },
|
276
|
+
id: 'call_2',
|
277
|
+
},
|
278
|
+
],
|
279
|
+
},
|
280
|
+
index: 0,
|
281
|
+
},
|
282
|
+
],
|
283
|
+
id: '5',
|
284
|
+
});
|
285
|
+
|
286
|
+
controller.close();
|
287
|
+
},
|
288
|
+
});
|
289
|
+
|
290
|
+
const protocolStream = OpenAIStream(mockOpenAIStream);
|
291
|
+
|
292
|
+
const decoder = new TextDecoder();
|
293
|
+
const chunks = [];
|
294
|
+
|
295
|
+
// @ts-ignore
|
296
|
+
for await (const chunk of protocolStream) {
|
297
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
298
|
+
}
|
299
|
+
|
300
|
+
expect(chunks).toEqual([
|
301
|
+
'id: 5\n',
|
302
|
+
'event: tool_calls\n',
|
303
|
+
`data: [{"function":{"name":"tool1","arguments":"{}"},"id":"call_1","index":0,"type":"function"},{"function":{"name":"tool2","arguments":"{}"},"id":"call_2","index":1,"type":"function"}]\n\n`,
|
304
|
+
]);
|
305
|
+
});
|
306
|
+
|
307
|
+
it('should handle LiteLLM tools Calling', async () => {
|
308
|
+
const streamData = [
|
309
|
+
{
|
310
|
+
id: '1',
|
311
|
+
choices: [{ index: 0, delta: { content: '为了获取杭州的天气情况', role: 'assistant' } }],
|
312
|
+
},
|
313
|
+
{
|
314
|
+
id: '1',
|
315
|
+
choices: [{ index: 0, delta: { content: '让我为您查询一下。' } }],
|
316
|
+
},
|
317
|
+
{
|
318
|
+
id: '1',
|
319
|
+
choices: [
|
320
|
+
{
|
321
|
+
index: 0,
|
322
|
+
delta: {
|
323
|
+
content: '',
|
324
|
+
tool_calls: [
|
325
|
+
{
|
326
|
+
id: 'toolu_01VQtK4W9kqxGGLHgsPPxiBj',
|
327
|
+
function: { arguments: '', name: 'realtime-weather____fetchCurrentWeather' },
|
328
|
+
type: 'function',
|
329
|
+
index: 0,
|
330
|
+
},
|
331
|
+
],
|
332
|
+
},
|
333
|
+
},
|
334
|
+
],
|
335
|
+
},
|
336
|
+
{
|
337
|
+
id: '1',
|
338
|
+
choices: [
|
339
|
+
{
|
340
|
+
index: 0,
|
341
|
+
delta: {
|
342
|
+
content: '',
|
343
|
+
tool_calls: [
|
344
|
+
{
|
345
|
+
function: { arguments: '{"city": "\u676d\u5dde"}' },
|
346
|
+
type: 'function',
|
347
|
+
index: 0,
|
348
|
+
},
|
349
|
+
],
|
350
|
+
},
|
351
|
+
},
|
352
|
+
],
|
353
|
+
},
|
354
|
+
{
|
355
|
+
id: '1',
|
356
|
+
choices: [{ finish_reason: 'tool_calls', index: 0, delta: {} }],
|
357
|
+
},
|
358
|
+
];
|
359
|
+
|
360
|
+
const mockOpenAIStream = new ReadableStream({
|
361
|
+
start(controller) {
|
362
|
+
streamData.forEach((data) => {
|
363
|
+
controller.enqueue(data);
|
364
|
+
});
|
365
|
+
|
366
|
+
controller.close();
|
367
|
+
},
|
368
|
+
});
|
369
|
+
|
370
|
+
const onToolCallMock = vi.fn();
|
371
|
+
|
372
|
+
const protocolStream = OpenAIStream(mockOpenAIStream, {
|
373
|
+
onToolCall: onToolCallMock,
|
374
|
+
});
|
375
|
+
|
376
|
+
const decoder = new TextDecoder();
|
377
|
+
const chunks = [];
|
378
|
+
|
379
|
+
// @ts-ignore
|
380
|
+
for await (const chunk of protocolStream) {
|
381
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
382
|
+
}
|
383
|
+
|
384
|
+
expect(chunks).toEqual(
|
385
|
+
[
|
386
|
+
'id: 1',
|
387
|
+
'event: text',
|
388
|
+
`data: "为了获取杭州的天气情况"\n`,
|
389
|
+
'id: 1',
|
390
|
+
'event: text',
|
391
|
+
`data: "让我为您查询一下。"\n`,
|
392
|
+
'id: 1',
|
393
|
+
'event: tool_calls',
|
394
|
+
`data: [{"function":{"arguments":"","name":"realtime-weather____fetchCurrentWeather"},"id":"toolu_01VQtK4W9kqxGGLHgsPPxiBj","index":0,"type":"function"}]\n`,
|
395
|
+
'id: 1',
|
396
|
+
'event: tool_calls',
|
397
|
+
`data: [{"function":{"arguments":"{\\"city\\": \\"杭州\\"}"},"id":"toolu_01VQtK4W9kqxGGLHgsPPxiBj","index":0,"type":"function"}]\n`,
|
398
|
+
'id: 1',
|
399
|
+
'event: stop',
|
400
|
+
`data: "tool_calls"\n`,
|
401
|
+
].map((i) => `${i}\n`),
|
402
|
+
);
|
403
|
+
|
404
|
+
expect(onToolCallMock).toHaveBeenCalledTimes(2);
|
405
|
+
});
|
406
|
+
});
|
305
407
|
});
|
@@ -8,13 +8,17 @@ import { ChatStreamCallbacks } from '../../types';
|
|
8
8
|
import {
|
9
9
|
StreamProtocolChunk,
|
10
10
|
StreamProtocolToolCallChunk,
|
11
|
+
StreamStack,
|
11
12
|
StreamToolCallChunkData,
|
12
13
|
createCallbacksTransformer,
|
13
14
|
createSSEProtocolTransformer,
|
14
15
|
generateToolCallId,
|
15
16
|
} from './protocol';
|
16
17
|
|
17
|
-
export const transformOpenAIStream = (
|
18
|
+
export const transformOpenAIStream = (
|
19
|
+
chunk: OpenAI.ChatCompletionChunk,
|
20
|
+
stack?: StreamStack,
|
21
|
+
): StreamProtocolChunk => {
|
18
22
|
// maybe need another structure to add support for multiple choices
|
19
23
|
|
20
24
|
try {
|
@@ -23,16 +27,20 @@ export const transformOpenAIStream = (chunk: OpenAI.ChatCompletionChunk): Stream
|
|
23
27
|
return { data: chunk, id: chunk.id, type: 'data' };
|
24
28
|
}
|
25
29
|
|
26
|
-
if (typeof item.delta?.content === 'string') {
|
30
|
+
if (typeof item.delta?.content === 'string' && !item.finish_reason && !item.delta?.tool_calls) {
|
27
31
|
return { data: item.delta.content, id: chunk.id, type: 'text' };
|
28
32
|
}
|
29
33
|
|
30
34
|
if (item.delta?.tool_calls) {
|
31
35
|
return {
|
32
|
-
data: item.delta.tool_calls.map(
|
33
|
-
(
|
36
|
+
data: item.delta.tool_calls.map((value, index): StreamToolCallChunkData => {
|
37
|
+
if (stack && !stack.tool) {
|
38
|
+
stack.tool = { id: value.id!, index: value.index, name: value.function!.name! };
|
39
|
+
}
|
40
|
+
|
41
|
+
return {
|
34
42
|
function: value.function,
|
35
|
-
id: value.id || generateToolCallId(index, value.function?.name),
|
43
|
+
id: value.id || stack?.tool?.id || generateToolCallId(index, value.function?.name),
|
36
44
|
|
37
45
|
// mistral's tool calling don't have index and function field, it's data like:
|
38
46
|
// [{"id":"xbhnmTtY7","function":{"name":"lobe-image-designer____text2image____builtin","arguments":"{\"prompts\": [\"A photo of a small, fluffy dog with a playful expression and wagging tail.\", \"A watercolor painting of a small, energetic dog with a glossy coat and bright eyes.\", \"A vector illustration of a small, adorable dog with a short snout and perky ears.\", \"A drawing of a small, scruffy dog with a mischievous grin and a wagging tail.\"], \"quality\": \"standard\", \"seeds\": [123456, 654321, 111222, 333444], \"size\": \"1024x1024\", \"style\": \"vivid\"}"}}]
|
@@ -43,8 +51,8 @@ export const transformOpenAIStream = (chunk: OpenAI.ChatCompletionChunk): Stream
|
|
43
51
|
// so we need to add these default values
|
44
52
|
index: typeof value.index !== 'undefined' ? value.index : index,
|
45
53
|
type: value.type || 'function',
|
46
|
-
}
|
47
|
-
),
|
54
|
+
};
|
55
|
+
}),
|
48
56
|
id: chunk.id,
|
49
57
|
type: 'tool_calls',
|
50
58
|
} as StreamProtocolToolCallChunk;
|
@@ -97,10 +105,12 @@ export const OpenAIStream = (
|
|
97
105
|
stream: Stream<OpenAI.ChatCompletionChunk> | ReadableStream,
|
98
106
|
callbacks?: ChatStreamCallbacks,
|
99
107
|
) => {
|
108
|
+
const streamStack: StreamStack = { id: '' };
|
109
|
+
|
100
110
|
const readableStream =
|
101
111
|
stream instanceof ReadableStream ? stream : readableFromAsyncIterable(chatStreamable(stream));
|
102
112
|
|
103
113
|
return readableStream
|
104
|
-
.pipeThrough(createSSEProtocolTransformer(transformOpenAIStream))
|
114
|
+
.pipeThrough(createSSEProtocolTransformer(transformOpenAIStream, streamStack))
|
105
115
|
.pipeThrough(createCallbacksTransformer(callbacks));
|
106
116
|
};
|