@lobehub/chat 1.1.11 → 1.1.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/README.md +1 -1
- package/README.zh-CN.md +1 -1
- package/docs/self-hosting/advanced/server-database.mdx +1 -1
- package/docs/self-hosting/advanced/server-database.zh-CN.mdx +1 -1
- package/docs/self-hosting/advanced/sso-providers/auth0.mdx +2 -3
- package/docs/self-hosting/advanced/sso-providers/auth0.zh-CN.mdx +2 -3
- package/docs/self-hosting/advanced/sso-providers/authentik.mdx +2 -3
- package/docs/self-hosting/advanced/sso-providers/authentik.zh-CN.mdx +2 -3
- package/docs/self-hosting/advanced/sso-providers/github.mdx +2 -3
- package/docs/self-hosting/advanced/sso-providers/github.zh-CN.mdx +2 -3
- package/docs/self-hosting/advanced/sso-providers/microsoft-entra-id.mdx +2 -3
- package/docs/self-hosting/advanced/sso-providers/microsoft-entra-id.zh-CN.mdx +2 -3
- package/docs/self-hosting/advanced/sso-providers/zitadel.mdx +3 -4
- package/docs/self-hosting/advanced/sso-providers/zitadel.zh-CN.mdx +3 -4
- package/package.json +1 -1
- package/src/app/(main)/settings/about/features/Version.tsx +3 -7
- package/src/libs/agent-runtime/azureOpenai/index.test.ts +2 -2
- package/src/libs/agent-runtime/azureOpenai/index.ts +14 -3
- package/src/libs/agent-runtime/utils/streams/azureOpenai.test.ts +537 -0
- package/src/libs/agent-runtime/utils/streams/azureOpenai.ts +89 -0
- package/src/libs/agent-runtime/utils/streams/index.ts +1 -0
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,56 @@
|
|
|
2
2
|
|
|
3
3
|
# Changelog
|
|
4
4
|
|
|
5
|
+
### [Version 1.1.13](https://github.com/lobehub/lobe-chat/compare/v1.1.12...v1.1.13)
|
|
6
|
+
|
|
7
|
+
<sup>Released on **2024-06-27**</sup>
|
|
8
|
+
|
|
9
|
+
#### 🐛 Bug Fixes
|
|
10
|
+
|
|
11
|
+
- **misc**: -check_updates cannot be set by FEATURE_FLAGS.
|
|
12
|
+
|
|
13
|
+
<br/>
|
|
14
|
+
|
|
15
|
+
<details>
|
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
|
17
|
+
|
|
18
|
+
#### What's fixed
|
|
19
|
+
|
|
20
|
+
- **misc**: -check_updates cannot be set by FEATURE_FLAGS, closes [#3038](https://github.com/lobehub/lobe-chat/issues/3038) ([bdadfee](https://github.com/lobehub/lobe-chat/commit/bdadfee))
|
|
21
|
+
|
|
22
|
+
</details>
|
|
23
|
+
|
|
24
|
+
<div align="right">
|
|
25
|
+
|
|
26
|
+
[](#readme-top)
|
|
27
|
+
|
|
28
|
+
</div>
|
|
29
|
+
|
|
30
|
+
### [Version 1.1.12](https://github.com/lobehub/lobe-chat/compare/v1.1.11...v1.1.12)
|
|
31
|
+
|
|
32
|
+
<sup>Released on **2024-06-26**</sup>
|
|
33
|
+
|
|
34
|
+
#### 🐛 Bug Fixes
|
|
35
|
+
|
|
36
|
+
- **misc**: Fix azure tools calling.
|
|
37
|
+
|
|
38
|
+
<br/>
|
|
39
|
+
|
|
40
|
+
<details>
|
|
41
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
|
42
|
+
|
|
43
|
+
#### What's fixed
|
|
44
|
+
|
|
45
|
+
- **misc**: Fix azure tools calling, closes [#3046](https://github.com/lobehub/lobe-chat/issues/3046) ([b929985](https://github.com/lobehub/lobe-chat/commit/b929985))
|
|
46
|
+
|
|
47
|
+
</details>
|
|
48
|
+
|
|
49
|
+
<div align="right">
|
|
50
|
+
|
|
51
|
+
[](#readme-top)
|
|
52
|
+
|
|
53
|
+
</div>
|
|
54
|
+
|
|
5
55
|
### [Version 1.1.11](https://github.com/lobehub/lobe-chat/compare/v1.1.10...v1.1.11)
|
|
6
56
|
|
|
7
57
|
<sup>Released on **2024-06-25**</sup>
|
package/README.md
CHANGED
|
@@ -233,7 +233,7 @@ In addition, these plugins are not limited to news aggregation, but can also ext
|
|
|
233
233
|
| [Search1API](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **fatwang2** on **2024-05-06**</sup> | Search aggregation service, specifically designed for LLMs<br/>`web` `search` |
|
|
234
234
|
| [Search Google via Serper](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **Barry** on **2024-04-30**</sup> | Google search engine via Serper.dev free API (2500x🆓/month)<br/>`web` `search` |
|
|
235
235
|
|
|
236
|
-
> 📊 Total plugins: [<kbd>**
|
|
236
|
+
> 📊 Total plugins: [<kbd>**51**</kbd>](https://github.com/lobehub/lobe-chat-plugins)
|
|
237
237
|
|
|
238
238
|
<!-- PLUGIN LIST -->
|
|
239
239
|
|
package/README.zh-CN.md
CHANGED
|
@@ -226,7 +226,7 @@ LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地
|
|
|
226
226
|
| [Search1API](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **fatwang2** on **2024-05-06**</sup> | 搜索聚合服务,专为 LLMs 设计<br/>`web` `search` |
|
|
227
227
|
| [通过 Serper 搜索 Google](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **Barry** on **2024-04-30**</sup> | 通过 Serper.dev 免费 API 进行 Google 搜索引擎(每月 2500 次🆓)<br/>`网络` `搜索` |
|
|
228
228
|
|
|
229
|
-
> 📊 Total plugins: [<kbd>**
|
|
229
|
+
> 📊 Total plugins: [<kbd>**51**</kbd>](https://github.com/lobehub/lobe-chat-plugins)
|
|
230
230
|
|
|
231
231
|
<!-- PLUGIN LIST -->
|
|
232
232
|
|
|
@@ -237,7 +237,7 @@ The corresponding environment variables are:
|
|
|
237
237
|
```shell
|
|
238
238
|
# Bucket name
|
|
239
239
|
S3_BUCKET=lobechat
|
|
240
|
-
# Bucket request endpoint
|
|
240
|
+
# Bucket request endpoint(Be sure to delete /lobechat and do not copy directly.)
|
|
241
241
|
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
|
|
242
242
|
# Public domain for accessing the bucket
|
|
243
243
|
NEXT_PUBLIC_S3_DOMAIN=https://s3-for-lobechat.your-domain.com
|
|
@@ -223,7 +223,7 @@ LobeChat 在很早以前就支持了多模态 的 AI 会话,其中涉及到图
|
|
|
223
223
|
```shell
|
|
224
224
|
# 存储桶的名称
|
|
225
225
|
S3_BUCKET=lobechat
|
|
226
|
-
# 存储桶的请求端点
|
|
226
|
+
# 存储桶的请求端点(注意要把/lobechat删掉,不要直接复制)
|
|
227
227
|
S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com
|
|
228
228
|
# 存储桶对外的访问域名
|
|
229
229
|
NEXT_PUBLIC_S3_DOMAIN=https://s3-for-lobechat.your-domain.com
|
|
@@ -76,9 +76,8 @@ When deploying LobeChat, you need to configure the following environment variabl
|
|
|
76
76
|
|
|
77
77
|
| Environment Variable | Type | Description |
|
|
78
78
|
| --- | --- | --- |
|
|
79
|
-
| `
|
|
80
|
-
| `
|
|
81
|
-
| `SSO_PROVIDERS` | Optional | Select the single sign-on provider for LoboChat. Use `auth0` for Auth0. |
|
|
79
|
+
| `NEXT_AUTH_SECRET` | Required | Key used to encrypt Auth.js session tokens. You can generate a key using the following command: `openssl rand -base64 32` |
|
|
80
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | Optional | Select the single sign-on provider for LoboChat. Use `auth0` for Auth0. |
|
|
82
81
|
| `AUTH0_CLIENT_ID` | Required | Client ID of the Auth0 application |
|
|
83
82
|
| `AUTH0_CLIENT_SECRET` | Required | Client Secret of the Auth0 application |
|
|
84
83
|
| `AUTH0_ISSUER` | Required | Domain of the Auth0 application, `https://example.auth0.com` |
|
|
@@ -71,9 +71,8 @@ http(s)://your-domain/api/auth/callback/auth0
|
|
|
71
71
|
|
|
72
72
|
| 环境变量 | 类型 | 描述 |
|
|
73
73
|
| --- | --- | --- |
|
|
74
|
-
| `
|
|
75
|
-
| `
|
|
76
|
-
| `SSO_PROVIDERS` | 必选 | 选择 LoboChat 的单点登录提供商。使用 Auth0 请填写 `auth0`。 |
|
|
74
|
+
| `NEXT_AUTH_SECRET` | 必选 | 用于加密 Auth.js 会话令牌的密钥。您可以使用以下命令生成秘钥: `openssl rand -base64 32` |
|
|
75
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | 必选 | 选择 LoboChat 的单点登录提供商。使用 Auth0 请填写 `auth0`。 |
|
|
77
76
|
| `AUTH0_CLIENT_ID` | 必选 | Auth0 应用程序的 Client ID |
|
|
78
77
|
| `AUTH0_CLIENT_SECRET` | 必选 | Auth0 应用程序的 Client Secret |
|
|
79
78
|
| `AUTH0_ISSUER` | 必选 | Auth0 应用程序的 Domain,`https://example.auth0.com` |
|
|
@@ -54,9 +54,8 @@ When deploying LobeChat, you need to configure the following environment variabl
|
|
|
54
54
|
|
|
55
55
|
| Environment Variable | Type | Description |
|
|
56
56
|
| --- | --- | --- |
|
|
57
|
-
| `
|
|
58
|
-
| `
|
|
59
|
-
| `SSO_PROVIDERS` | Required | Select the SSO provider for LoboChat. Use `authentik` for Authentik. |
|
|
57
|
+
| `NEXT_AUTH_SECRET` | Required | The secret used to encrypt Auth.js session tokens. You can generate a secret using the following command: `openssl rand -base64 32` |
|
|
58
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the SSO provider for LoboChat. Use `authentik` for Authentik. |
|
|
60
59
|
| `AUTHENTIK_CLIENT_ID` | Required | The Client ID from the Authentik application provider details page |
|
|
61
60
|
| `AUTHENTIK_CLIENT_SECRET` | Required | The Client Secret from the Authentik application provider details page |
|
|
62
61
|
| `AUTHENTIK_ISSUER` | Required | The OpenID Configuration Issuer from the Authentik application provider details page |
|
|
@@ -49,9 +49,8 @@ https://your-domain/api/auth/callback/authentik
|
|
|
49
49
|
|
|
50
50
|
| 环境变量 | 类型 | 描述 |
|
|
51
51
|
| --- | --- | --- |
|
|
52
|
-
| `
|
|
53
|
-
| `
|
|
54
|
-
| `SSO_PROVIDERS` | 必选 | 选择 LoboChat 的单点登录提供商。使用 Authentik 请填写 `authentik`。 |
|
|
52
|
+
| `NEXT_AUTH_SECRET` | 必选 | 用于加密 Auth.js 会话令牌的密钥。您可以使用以下命令生成秘钥: `openssl rand -base64 32` |
|
|
53
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | 必选 | 选择 LoboChat 的单点登录提供商。使用 Authentik 请填写 `authentik`。 |
|
|
55
54
|
| `AUTHENTIK_CLIENT_ID` | 必选 | Authentik 提供程序详情页的 客户端 ID |
|
|
56
55
|
| `AUTHENTIK_CLIENT_SECRET` | 必选 | Authentik 提供程序详情页的 客户端 Secret |
|
|
57
56
|
| `AUTHENTIK_ISSUER` | 必选 | Authentik 提供程序详情页的 OpenID 配置颁发者 |
|
|
@@ -82,9 +82,8 @@ When deploying LobeChat, you need to configure the following environment variabl
|
|
|
82
82
|
|
|
83
83
|
| Environment Variable | Type | Description |
|
|
84
84
|
| --- | --- | --- |
|
|
85
|
-
| `
|
|
86
|
-
| `
|
|
87
|
-
| `SSO_PROVIDERS` | Required | Select the Single Sign-On provider for LobeChat. Use `github` for Github. |
|
|
85
|
+
| `NEXT_AUTH_SECRET` | Required | Key used to encrypt Auth.js session tokens. You can generate the key using the command: `openssl rand -base64 32` |
|
|
86
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the Single Sign-On provider for LobeChat. Use `github` for Github. |
|
|
88
87
|
| `GITHUB_CLIENT_ID` | Required | Client ID in the Github App details page. |
|
|
89
88
|
| `GITHUB_CLIENT_SECRET` | Required | Client Secret in the Github App details page. |
|
|
90
89
|
| `ACCESS_CODE` | Required | Add a password for accessing this service. You can set a long random password to "disable" access code authorization. |
|
|
@@ -78,9 +78,8 @@ tags:
|
|
|
78
78
|
|
|
79
79
|
| 环境变量 | 类型 | 描述 |
|
|
80
80
|
| --- | --- | --- |
|
|
81
|
-
| `
|
|
82
|
-
| `
|
|
83
|
-
| `SSO_PROVIDERS` | 必选 | 选择 LoboChat 的单点登录提供商。使用 Github 请填写 `github`。 |
|
|
81
|
+
| `NEXT_AUTH_SECRET` | 必选 | 用于加密 Auth.js 会话令牌的密钥。您可以使用以下命令生成秘钥: `openssl rand -base64 32` |
|
|
82
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | 必选 | 选择 LoboChat 的单点登录提供商。使用 Github 请填写 `github`。 |
|
|
84
83
|
| `GITHUB_CLIENT_ID` | 必选 | Github App详情页的 客户端 ID |
|
|
85
84
|
| `GITHUB_CLIENT_SECRET` | 必选 | Github App详情页的 客户端 Secret |
|
|
86
85
|
| `ACCESS_CODE` | 必选 | 添加访问此服务的密码,你可以设置一个足够长的随机密码以 “禁用” 访问码授权 |
|
|
@@ -71,9 +71,8 @@ When deploying LobeChat, you need to configure the following environment variabl
|
|
|
71
71
|
|
|
72
72
|
| Environment Variable | Type | Description |
|
|
73
73
|
| --- | --- | --- |
|
|
74
|
-
| `
|
|
75
|
-
| `
|
|
76
|
-
| `SSO_PROVIDERS` | Required | Select the single sign-on provider for LoboChat. Use `azure-ad` for Microsoft Entra ID. |
|
|
74
|
+
| `NEXT_AUTH_SECRET` | Required | Key used to encrypt Auth.js session tokens. You can generate the key using the following command: `openssl rand -base64 32` |
|
|
75
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | Required | Select the single sign-on provider for LoboChat. Use `azure-ad` for Microsoft Entra ID. |
|
|
77
76
|
| `AZURE_AD_CLIENT_ID` | Required | Client ID of the Microsoft Entra ID application. |
|
|
78
77
|
| `AZURE_AD_CLIENT_SECRET` | Required | Client Secret of the Microsoft Entra ID application. |
|
|
79
78
|
| `AZURE_AD_TENANT_ID` | Required | Tenant ID of the Microsoft Entra ID application. |
|
|
@@ -68,9 +68,8 @@ https://your-domain/api/auth/callback/azure-ad
|
|
|
68
68
|
|
|
69
69
|
| 环境变量 | 类型 | 描述 |
|
|
70
70
|
| --- | --- | --- |
|
|
71
|
-
| `
|
|
72
|
-
| `
|
|
73
|
-
| `SSO_PROVIDERS` | 必选 | 选择 LoboChat 的单点登录提供商。使用 Microsoft Entra ID 请填写 `azure-ad`。 |
|
|
71
|
+
| `NEXT_AUTH_SECRET` | 必选 | 用于加密 Auth.js 会话令牌的密钥。您可以使用以下命令生成秘钥: `openssl rand -base64 32` |
|
|
72
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | 必选 | 选择 LoboChat 的单点登录提供商。使用 Microsoft Entra ID 请填写 `azure-ad`。 |
|
|
74
73
|
| `AZURE_AD_CLIENT_ID` | 必选 | Microsoft Entra ID 应用程序的 Client ID |
|
|
75
74
|
| `AZURE_AD_CLIENT_SECRET` | 必选 | Microsoft Entra ID 应用程序的 Client Secret |
|
|
76
75
|
| `AZURE_AD_TENANT_ID` | 必选 | Microsoft Entra ID 应用程序的 Tenant ID |
|
|
@@ -58,7 +58,7 @@ http(s)://your-domain/api/auth/callback/zitadel
|
|
|
58
58
|
URL is consistent with the deployed URL.
|
|
59
59
|
|
|
60
60
|
- Replace `http(s)://your-domain` with the actual URL that LobeChat is deployed to.
|
|
61
|
-
|
|
61
|
+
|
|
62
62
|
</Callout>
|
|
63
63
|
|
|
64
64
|
Confirm the configuration and click **Create**.
|
|
@@ -99,9 +99,8 @@ When deploying LobeChat, you need to configure the following environment variabl
|
|
|
99
99
|
|
|
100
100
|
| Environment Variable | Type | Description |
|
|
101
101
|
| --- | --- | --- |
|
|
102
|
-
| `
|
|
103
|
-
| `
|
|
104
|
-
| `SSO_PROVIDERS` | Optional | Select the single sign-on provider for LoboChat. Use `zitadel` for ZITADEL. |
|
|
102
|
+
| `NEXT_AUTH_SECRET` | Required | Key used to encrypt Auth.js session tokens. You can generate a key using the following command: `openssl rand -base64 32` |
|
|
103
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | Optional | Select the single sign-on provider for LoboChat. Use `zitadel` for ZITADEL. |
|
|
105
104
|
| `ZITADEL_CLIENT_ID` | Required | Client ID (`ClientId` as shown in ZITADEL) of the ZITADEL application |
|
|
106
105
|
| `ZITADEL_CLIENT_SECRET` | Required | Client Secret (`ClientSecret` as shown in ZITADEL) of the ZITADEL application |
|
|
107
106
|
| `ZITADEL_ISSUER` | Required | Issuer URL of the ZITADEL application |
|
|
@@ -54,7 +54,7 @@ http(s)://your-domain/api/auth/callback/zitadel
|
|
|
54
54
|
- 可以创建应用后再填写或修改重定向 URL,但请确保填写的 URL 与部署的 URL 一致。
|
|
55
55
|
|
|
56
56
|
- 请将 `http(s)://your-domain` 替换为 LobeChat 部署的实际 URL。
|
|
57
|
-
|
|
57
|
+
|
|
58
58
|
</Callout>
|
|
59
59
|
|
|
60
60
|
确认配置并点击「创建」。
|
|
@@ -95,9 +95,8 @@ http(s)://your-domain/api/auth/callback/zitadel
|
|
|
95
95
|
|
|
96
96
|
| 环境变量 | 类型 | 描述 |
|
|
97
97
|
| --- | --- | --- |
|
|
98
|
-
| `
|
|
99
|
-
| `
|
|
100
|
-
| `SSO_PROVIDERS` | 可选 | 为 LobeChat 选择单点登录提供程序。对于 ZITADEL,请填写 `zitadel`。 |
|
|
98
|
+
| `NEXT_AUTH_SECRET` | 必选 | 用于加密 Auth.js 会话令牌的密钥。您可以使用以下命令生成密钥:`openssl rand -base64 32` |
|
|
99
|
+
| `NEXT_AUTH_SSO_PROVIDERS` | 可选 | 为 LobeChat 选择单点登录提供程序。对于 ZITADEL,请填写 `zitadel`。 |
|
|
101
100
|
| `ZITADEL_CLIENT_ID` | 必选 | ZITADEL 应用的 Client ID(`ClientId`)。 |
|
|
102
101
|
| `ZITADEL_CLIENT_SECRET` | 必选 | ZITADEL 应用的 Client Secret(`ClientSecret`)。 |
|
|
103
102
|
| `ZITADEL_ISSUER` | 必选 | ZITADEL 应用的 OpenID Connect 颁发者(issuer)URL。 |
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lobehub/chat",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.13",
|
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"framework",
|
|
@@ -8,6 +8,7 @@ import { Center, Flexbox } from 'react-layout-kit';
|
|
|
8
8
|
|
|
9
9
|
import { MANUAL_UPGRADE_URL, OFFICIAL_SITE, RELEASES_URL } from '@/const/url';
|
|
10
10
|
import { CURRENT_VERSION } from '@/const/version';
|
|
11
|
+
import { useNewVersion } from '@/features/User/UserPanel/useNewVersion';
|
|
11
12
|
import { useGlobalStore } from '@/store/global';
|
|
12
13
|
|
|
13
14
|
const useStyles = createStyles(({ css, token }) => ({
|
|
@@ -20,16 +21,11 @@ const useStyles = createStyles(({ css, token }) => ({
|
|
|
20
21
|
}));
|
|
21
22
|
|
|
22
23
|
const Version = memo<{ mobile?: boolean }>(({ mobile }) => {
|
|
23
|
-
const
|
|
24
|
-
|
|
25
|
-
s.latestVersion,
|
|
26
|
-
s.useCheckLatestVersion,
|
|
27
|
-
]);
|
|
24
|
+
const hasNewVersion = useNewVersion();
|
|
25
|
+
const [latestVersion] = useGlobalStore((s) => [s.latestVersion]);
|
|
28
26
|
const { t } = useTranslation('common');
|
|
29
27
|
const { styles, theme } = useStyles();
|
|
30
28
|
|
|
31
|
-
useCheckLatestVersion();
|
|
32
|
-
|
|
33
29
|
return (
|
|
34
30
|
<Flexbox
|
|
35
31
|
align={mobile ? 'stretch' : 'center'}
|
|
@@ -226,7 +226,7 @@ describe('LobeAzureOpenAI', () => {
|
|
|
226
226
|
} catch (e) {
|
|
227
227
|
// Assert
|
|
228
228
|
expect(e).toEqual({
|
|
229
|
-
endpoint: 'https
|
|
229
|
+
endpoint: 'https://***.openai.azure.com/',
|
|
230
230
|
error: {
|
|
231
231
|
code: 'DeploymentNotFound',
|
|
232
232
|
message: 'Deployment not found',
|
|
@@ -254,7 +254,7 @@ describe('LobeAzureOpenAI', () => {
|
|
|
254
254
|
} catch (e) {
|
|
255
255
|
// Assert
|
|
256
256
|
expect(e).toEqual({
|
|
257
|
-
endpoint: 'https
|
|
257
|
+
endpoint: 'https://***.openai.azure.com/',
|
|
258
258
|
errorType: 'AgentRuntimeError',
|
|
259
259
|
provider: 'azure',
|
|
260
260
|
error: {
|
|
@@ -11,7 +11,7 @@ import { ChatCompetitionOptions, ChatStreamPayload, ModelProvider } from '../typ
|
|
|
11
11
|
import { AgentRuntimeError } from '../utils/createError';
|
|
12
12
|
import { debugStream } from '../utils/debugStream';
|
|
13
13
|
import { StreamingResponse } from '../utils/response';
|
|
14
|
-
import {
|
|
14
|
+
import { AzureOpenAIStream } from '../utils/streams';
|
|
15
15
|
|
|
16
16
|
export class LobeAzureOpenAI implements LobeRuntimeAI {
|
|
17
17
|
client: OpenAIClient;
|
|
@@ -47,7 +47,7 @@ export class LobeAzureOpenAI implements LobeRuntimeAI {
|
|
|
47
47
|
debugStream(debug).catch(console.error);
|
|
48
48
|
}
|
|
49
49
|
|
|
50
|
-
return StreamingResponse(
|
|
50
|
+
return StreamingResponse(AzureOpenAIStream(prod, options?.callback), {
|
|
51
51
|
headers: options?.headers,
|
|
52
52
|
});
|
|
53
53
|
} catch (e) {
|
|
@@ -72,7 +72,7 @@ export class LobeAzureOpenAI implements LobeRuntimeAI {
|
|
|
72
72
|
: AgentRuntimeErrorType.AgentRuntimeError;
|
|
73
73
|
|
|
74
74
|
throw AgentRuntimeError.chat({
|
|
75
|
-
endpoint: this.baseURL,
|
|
75
|
+
endpoint: this.maskSensitiveUrl(this.baseURL),
|
|
76
76
|
error,
|
|
77
77
|
errorType,
|
|
78
78
|
provider: ModelProvider.Azure,
|
|
@@ -103,4 +103,15 @@ export class LobeAzureOpenAI implements LobeRuntimeAI {
|
|
|
103
103
|
.toLowerCase()
|
|
104
104
|
.replaceAll(/(_[a-z])/g, (group) => group.toUpperCase().replace('_', ''));
|
|
105
105
|
};
|
|
106
|
+
|
|
107
|
+
private maskSensitiveUrl = (url: string) => {
|
|
108
|
+
// 使用正则表达式匹配 'https://' 后面和 '.openai.azure.com/' 前面的内容
|
|
109
|
+
const regex = /^(https:\/\/)([^.]+)(\.openai\.azure\.com\/.*)$/;
|
|
110
|
+
|
|
111
|
+
// 使用替换函数
|
|
112
|
+
return url.replace(regex, (match, protocol, subdomain, rest) => {
|
|
113
|
+
// 将子域名替换为 '***'
|
|
114
|
+
return `${protocol}***${rest}`;
|
|
115
|
+
});
|
|
116
|
+
};
|
|
106
117
|
}
|
|
@@ -0,0 +1,537 @@
|
|
|
1
|
+
import { desc } from 'drizzle-orm/expressions';
|
|
2
|
+
import { describe, expect, it, vi } from 'vitest';
|
|
3
|
+
|
|
4
|
+
import { AzureOpenAIStream } from './azureOpenai';
|
|
5
|
+
|
|
6
|
+
describe('AzureOpenAIStream', () => {
|
|
7
|
+
it('should transform AzureOpenAI stream to protocol stream', async () => {
|
|
8
|
+
const mockOpenAIStream = new ReadableStream({
|
|
9
|
+
start(controller) {
|
|
10
|
+
controller.enqueue({
|
|
11
|
+
choices: [
|
|
12
|
+
{
|
|
13
|
+
delta: { content: 'Hello' },
|
|
14
|
+
index: 0,
|
|
15
|
+
},
|
|
16
|
+
],
|
|
17
|
+
id: '1',
|
|
18
|
+
});
|
|
19
|
+
controller.enqueue({
|
|
20
|
+
choices: [
|
|
21
|
+
{
|
|
22
|
+
delta: { content: ' world!' },
|
|
23
|
+
index: 1,
|
|
24
|
+
},
|
|
25
|
+
],
|
|
26
|
+
id: '1',
|
|
27
|
+
});
|
|
28
|
+
controller.enqueue({
|
|
29
|
+
choices: [
|
|
30
|
+
{
|
|
31
|
+
delta: null,
|
|
32
|
+
finishReason: 'stop',
|
|
33
|
+
index: 2,
|
|
34
|
+
},
|
|
35
|
+
],
|
|
36
|
+
id: '1',
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
controller.close();
|
|
40
|
+
},
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
const onStartMock = vi.fn();
|
|
44
|
+
const onTextMock = vi.fn();
|
|
45
|
+
const onTokenMock = vi.fn();
|
|
46
|
+
const onCompletionMock = vi.fn();
|
|
47
|
+
|
|
48
|
+
const protocolStream = AzureOpenAIStream(mockOpenAIStream, {
|
|
49
|
+
onStart: onStartMock,
|
|
50
|
+
onText: onTextMock,
|
|
51
|
+
onToken: onTokenMock,
|
|
52
|
+
onCompletion: onCompletionMock,
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
const decoder = new TextDecoder();
|
|
56
|
+
const chunks = [];
|
|
57
|
+
|
|
58
|
+
// @ts-ignore
|
|
59
|
+
for await (const chunk of protocolStream) {
|
|
60
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
expect(chunks).toEqual([
|
|
64
|
+
'id: 1\n',
|
|
65
|
+
'event: text\n',
|
|
66
|
+
`data: "Hello"\n\n`,
|
|
67
|
+
'id: 1\n',
|
|
68
|
+
'event: text\n',
|
|
69
|
+
`data: " world!"\n\n`,
|
|
70
|
+
'id: 1\n',
|
|
71
|
+
'event: stop\n',
|
|
72
|
+
`data: "stop"\n\n`,
|
|
73
|
+
]);
|
|
74
|
+
|
|
75
|
+
expect(onStartMock).toHaveBeenCalledTimes(1);
|
|
76
|
+
expect(onTextMock).toHaveBeenNthCalledWith(1, '"Hello"');
|
|
77
|
+
expect(onTextMock).toHaveBeenNthCalledWith(2, '" world!"');
|
|
78
|
+
expect(onTokenMock).toHaveBeenCalledTimes(2);
|
|
79
|
+
expect(onCompletionMock).toHaveBeenCalledTimes(1);
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
it('should handle empty stream', async () => {
|
|
83
|
+
const mockStream = new ReadableStream({
|
|
84
|
+
start(controller) {
|
|
85
|
+
controller.close();
|
|
86
|
+
},
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
const protocolStream = AzureOpenAIStream(mockStream);
|
|
90
|
+
|
|
91
|
+
const decoder = new TextDecoder();
|
|
92
|
+
const chunks = [];
|
|
93
|
+
|
|
94
|
+
// @ts-ignore
|
|
95
|
+
for await (const chunk of protocolStream) {
|
|
96
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
expect(chunks).toEqual([]);
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
it('should handle delta content null', async () => {
|
|
103
|
+
const mockOpenAIStream = new ReadableStream({
|
|
104
|
+
start(controller) {
|
|
105
|
+
controller.enqueue({
|
|
106
|
+
choices: [
|
|
107
|
+
{
|
|
108
|
+
delta: { content: null },
|
|
109
|
+
index: 0,
|
|
110
|
+
},
|
|
111
|
+
],
|
|
112
|
+
id: '3',
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
controller.close();
|
|
116
|
+
},
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
const protocolStream = AzureOpenAIStream(mockOpenAIStream);
|
|
120
|
+
|
|
121
|
+
const decoder = new TextDecoder();
|
|
122
|
+
const chunks = [];
|
|
123
|
+
|
|
124
|
+
// @ts-ignore
|
|
125
|
+
for await (const chunk of protocolStream) {
|
|
126
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
expect(chunks).toEqual(['id: 3\n', 'event: data\n', `data: {"content":null}\n\n`]);
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
it('should handle other delta data', async () => {
|
|
133
|
+
const mockOpenAIStream = new ReadableStream({
|
|
134
|
+
start(controller) {
|
|
135
|
+
controller.enqueue({
|
|
136
|
+
choices: [
|
|
137
|
+
{
|
|
138
|
+
delta: { custom_field: 'custom_value' },
|
|
139
|
+
index: 0,
|
|
140
|
+
},
|
|
141
|
+
],
|
|
142
|
+
id: '4',
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
controller.close();
|
|
146
|
+
},
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
const protocolStream = AzureOpenAIStream(mockOpenAIStream);
|
|
150
|
+
|
|
151
|
+
const decoder = new TextDecoder();
|
|
152
|
+
const chunks = [];
|
|
153
|
+
|
|
154
|
+
// @ts-ignore
|
|
155
|
+
for await (const chunk of protocolStream) {
|
|
156
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
expect(chunks).toEqual([
|
|
160
|
+
'id: 4\n',
|
|
161
|
+
'event: data\n',
|
|
162
|
+
`data: {"delta":{"custom_field":"custom_value"},"id":"4","index":0}\n\n`,
|
|
163
|
+
]);
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
describe('tool Calling', () => {
|
|
167
|
+
it('should handle tool calls', async () => {
|
|
168
|
+
const streams = [
|
|
169
|
+
{
|
|
170
|
+
id: 'chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
171
|
+
model: 'gpt-4o-2024-05-13',
|
|
172
|
+
object: 'chat.completion.chunk',
|
|
173
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
174
|
+
created: '1970-01-20T21:36:14.698Z',
|
|
175
|
+
choices: [
|
|
176
|
+
{
|
|
177
|
+
delta: {
|
|
178
|
+
content: null,
|
|
179
|
+
role: 'assistant',
|
|
180
|
+
toolCalls: [
|
|
181
|
+
{
|
|
182
|
+
function: { arguments: '', name: 'realtime-weather____fetchCurrentWeather' },
|
|
183
|
+
id: 'call_1GT6no85IuAal06XHH2CZe8Q',
|
|
184
|
+
index: 0,
|
|
185
|
+
type: 'function',
|
|
186
|
+
},
|
|
187
|
+
],
|
|
188
|
+
},
|
|
189
|
+
index: 0,
|
|
190
|
+
logprobs: null,
|
|
191
|
+
finishReason: null,
|
|
192
|
+
contentFilterResults: {},
|
|
193
|
+
},
|
|
194
|
+
],
|
|
195
|
+
},
|
|
196
|
+
{
|
|
197
|
+
id: 'chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
198
|
+
model: 'gpt-4o-2024-05-13',
|
|
199
|
+
object: 'chat.completion.chunk',
|
|
200
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
201
|
+
created: '1970-01-20T21:36:14.698Z',
|
|
202
|
+
choices: [
|
|
203
|
+
{
|
|
204
|
+
delta: { toolCalls: [{ function: { arguments: '{"' }, index: 0 }] },
|
|
205
|
+
index: 0,
|
|
206
|
+
logprobs: null,
|
|
207
|
+
finishReason: null,
|
|
208
|
+
contentFilterResults: {},
|
|
209
|
+
},
|
|
210
|
+
],
|
|
211
|
+
},
|
|
212
|
+
{
|
|
213
|
+
id: 'chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
214
|
+
model: 'gpt-4o-2024-05-13',
|
|
215
|
+
object: 'chat.completion.chunk',
|
|
216
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
217
|
+
created: '1970-01-20T21:36:14.698Z',
|
|
218
|
+
choices: [
|
|
219
|
+
{
|
|
220
|
+
delta: { toolCalls: [{ function: { arguments: 'city' }, index: 0 }] },
|
|
221
|
+
index: 0,
|
|
222
|
+
logprobs: null,
|
|
223
|
+
finishReason: null,
|
|
224
|
+
contentFilterResults: {},
|
|
225
|
+
},
|
|
226
|
+
],
|
|
227
|
+
},
|
|
228
|
+
{
|
|
229
|
+
id: 'chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
230
|
+
model: 'gpt-4o-2024-05-13',
|
|
231
|
+
object: 'chat.completion.chunk',
|
|
232
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
233
|
+
created: '1970-01-20T21:36:14.698Z',
|
|
234
|
+
choices: [
|
|
235
|
+
{
|
|
236
|
+
delta: { toolCalls: [{ function: { arguments: '":"' }, index: 0 }] },
|
|
237
|
+
index: 0,
|
|
238
|
+
logprobs: null,
|
|
239
|
+
finishReason: null,
|
|
240
|
+
contentFilterResults: {},
|
|
241
|
+
},
|
|
242
|
+
],
|
|
243
|
+
},
|
|
244
|
+
{
|
|
245
|
+
id: 'chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
246
|
+
model: 'gpt-4o-2024-05-13',
|
|
247
|
+
object: 'chat.completion.chunk',
|
|
248
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
249
|
+
created: '1970-01-20T21:36:14.698Z',
|
|
250
|
+
choices: [
|
|
251
|
+
{
|
|
252
|
+
delta: { toolCalls: [{ function: { arguments: '杭州' }, index: 0 }] },
|
|
253
|
+
index: 0,
|
|
254
|
+
logprobs: null,
|
|
255
|
+
finishReason: null,
|
|
256
|
+
contentFilteesults: {},
|
|
257
|
+
},
|
|
258
|
+
],
|
|
259
|
+
},
|
|
260
|
+
{
|
|
261
|
+
id: 'chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
262
|
+
model: 'gpt-4o-2024-05-13',
|
|
263
|
+
object: 'chat.completion.chunk',
|
|
264
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
265
|
+
created: '1970-01-20T21:36:14.698Z',
|
|
266
|
+
choices: [
|
|
267
|
+
{
|
|
268
|
+
delta: { toolCalls: [{ function: { arguments: '"}' }, index: 0 }] },
|
|
269
|
+
index: 0,
|
|
270
|
+
logprobs: null,
|
|
271
|
+
finishReason: null,
|
|
272
|
+
contentFilterResults: {},
|
|
273
|
+
},
|
|
274
|
+
],
|
|
275
|
+
},
|
|
276
|
+
{
|
|
277
|
+
id: 'chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
278
|
+
model: 'gpt-4o-2024-05-13',
|
|
279
|
+
object: 'chat.completion.chunk',
|
|
280
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
281
|
+
created: '1970-01-20T21:36:14.698Z',
|
|
282
|
+
choices: [
|
|
283
|
+
{
|
|
284
|
+
delta: {},
|
|
285
|
+
index: 0,
|
|
286
|
+
logprobs: null,
|
|
287
|
+
finishReason: 'tool_calls',
|
|
288
|
+
contentFilterResults: {},
|
|
289
|
+
},
|
|
290
|
+
],
|
|
291
|
+
},
|
|
292
|
+
];
|
|
293
|
+
|
|
294
|
+
const mockReadableStream = new ReadableStream({
|
|
295
|
+
start(controller) {
|
|
296
|
+
streams.forEach((chunk) => {
|
|
297
|
+
controller.enqueue(chunk);
|
|
298
|
+
});
|
|
299
|
+
controller.close();
|
|
300
|
+
},
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
const onToolCallMock = vi.fn();
|
|
304
|
+
|
|
305
|
+
const protocolStream = AzureOpenAIStream(mockReadableStream, {
|
|
306
|
+
onToolCall: onToolCallMock,
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
const decoder = new TextDecoder();
|
|
310
|
+
const chunks = [];
|
|
311
|
+
|
|
312
|
+
// @ts-ignore
|
|
313
|
+
for await (const chunk of protocolStream) {
|
|
314
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
expect(chunks).toEqual(
|
|
318
|
+
[
|
|
319
|
+
'id: chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
320
|
+
'event: tool_calls',
|
|
321
|
+
`data: [{"function":{"arguments":"","name":"realtime-weather____fetchCurrentWeather"},"id":"call_1GT6no85IuAal06XHH2CZe8Q","index":0,"type":"function"}]\n`,
|
|
322
|
+
'id: chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
323
|
+
'event: tool_calls',
|
|
324
|
+
`data: [{"function":{"arguments":"{\\""},"id":"call_1GT6no85IuAal06XHH2CZe8Q","index":0,"type":"function"}]\n`,
|
|
325
|
+
'id: chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
326
|
+
'event: tool_calls',
|
|
327
|
+
`data: [{"function":{"arguments":"city"},"id":"call_1GT6no85IuAal06XHH2CZe8Q","index":0,"type":"function"}]\n`,
|
|
328
|
+
'id: chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
329
|
+
'event: tool_calls',
|
|
330
|
+
`data: [{"function":{"arguments":"\\":\\""},"id":"call_1GT6no85IuAal06XHH2CZe8Q","index":0,"type":"function"}]\n`,
|
|
331
|
+
'id: chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
332
|
+
'event: tool_calls',
|
|
333
|
+
`data: [{"function":{"arguments":"杭州"},"id":"call_1GT6no85IuAal06XHH2CZe8Q","index":0,"type":"function"}]\n`,
|
|
334
|
+
'id: chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
335
|
+
'event: tool_calls',
|
|
336
|
+
`data: [{"function":{"arguments":"\\"}"},"id":"call_1GT6no85IuAal06XHH2CZe8Q","index":0,"type":"function"}]\n`,
|
|
337
|
+
'id: chatcmpl-9eEBuv3ra8l4KKQhGj6ldhqfwV4Iy',
|
|
338
|
+
'event: stop',
|
|
339
|
+
`data: "tool_calls"\n`,
|
|
340
|
+
].map((item) => `${item}\n`),
|
|
341
|
+
);
|
|
342
|
+
|
|
343
|
+
expect(onToolCallMock).toHaveBeenCalledTimes(6);
|
|
344
|
+
});
|
|
345
|
+
it('should handle parallel tools calling', async () => {
|
|
346
|
+
const streams = [
|
|
347
|
+
{
|
|
348
|
+
id: 'chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
349
|
+
model: 'gpt-4o-2024-05-13',
|
|
350
|
+
object: 'chat.completion.chunk',
|
|
351
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
352
|
+
created: '1970-01-20T21:36:16.635Z',
|
|
353
|
+
choices: [
|
|
354
|
+
{
|
|
355
|
+
delta: {
|
|
356
|
+
toolCalls: [
|
|
357
|
+
{
|
|
358
|
+
function: { arguments: '', name: 'realtime-weather____fetchCurrentWeather' },
|
|
359
|
+
id: 'call_cnQ80VjcWCS69wWKp4jz0nJd',
|
|
360
|
+
index: 0,
|
|
361
|
+
type: 'function',
|
|
362
|
+
},
|
|
363
|
+
],
|
|
364
|
+
},
|
|
365
|
+
index: 0,
|
|
366
|
+
logprobs: null,
|
|
367
|
+
finishReason: null,
|
|
368
|
+
contentFilterResults: {},
|
|
369
|
+
},
|
|
370
|
+
],
|
|
371
|
+
},
|
|
372
|
+
{
|
|
373
|
+
id: 'chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
374
|
+
model: 'gpt-4o-2024-05-13',
|
|
375
|
+
object: 'chat.completion.chunk',
|
|
376
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
377
|
+
created: '1970-01-20T21:36:16.635Z',
|
|
378
|
+
choices: [
|
|
379
|
+
{
|
|
380
|
+
delta: { toolCalls: [{ function: { arguments: '{"city": "杭州"}' }, index: 0 }] },
|
|
381
|
+
index: 0,
|
|
382
|
+
logprobs: null,
|
|
383
|
+
finishReason: null,
|
|
384
|
+
contentFilterResults: {},
|
|
385
|
+
},
|
|
386
|
+
],
|
|
387
|
+
},
|
|
388
|
+
{
|
|
389
|
+
id: 'chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
390
|
+
model: 'gpt-4o-2024-05-13',
|
|
391
|
+
object: 'chat.completion.chunk',
|
|
392
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
393
|
+
created: '1970-01-20T21:36:16.635Z',
|
|
394
|
+
choices: [
|
|
395
|
+
{
|
|
396
|
+
delta: {
|
|
397
|
+
toolCalls: [
|
|
398
|
+
{
|
|
399
|
+
function: { arguments: '', name: 'realtime-weather____fetchCurrentWeather' },
|
|
400
|
+
id: 'call_LHrpPTrT563QkP9chVddzXQk',
|
|
401
|
+
index: 1,
|
|
402
|
+
type: 'function',
|
|
403
|
+
},
|
|
404
|
+
],
|
|
405
|
+
},
|
|
406
|
+
index: 0,
|
|
407
|
+
logprobs: null,
|
|
408
|
+
finishReason: null,
|
|
409
|
+
contentFilterResults: {},
|
|
410
|
+
},
|
|
411
|
+
],
|
|
412
|
+
},
|
|
413
|
+
{
|
|
414
|
+
id: 'chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
415
|
+
model: 'gpt-4o-2024-05-13',
|
|
416
|
+
object: 'chat.completion.chunk',
|
|
417
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
418
|
+
created: '1970-01-20T21:36:16.635Z',
|
|
419
|
+
choices: [
|
|
420
|
+
{
|
|
421
|
+
delta: { toolCalls: [{ function: { arguments: '{"city": "北京"}' }, index: 1 }] },
|
|
422
|
+
index: 0,
|
|
423
|
+
logprobs: null,
|
|
424
|
+
finishReason: null,
|
|
425
|
+
contentFilterResults: {},
|
|
426
|
+
},
|
|
427
|
+
],
|
|
428
|
+
},
|
|
429
|
+
{
|
|
430
|
+
id: 'chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
431
|
+
model: 'gpt-4o-2024-05-13',
|
|
432
|
+
object: 'chat.completion.chunk',
|
|
433
|
+
systemFingerprint: 'fp_abc28019ad',
|
|
434
|
+
created: '1970-01-20T21:36:16.635Z',
|
|
435
|
+
choices: [
|
|
436
|
+
{
|
|
437
|
+
delta: {},
|
|
438
|
+
index: 0,
|
|
439
|
+
logprobs: null,
|
|
440
|
+
finishReason: 'tool_calls',
|
|
441
|
+
contentFilterResults: {},
|
|
442
|
+
},
|
|
443
|
+
],
|
|
444
|
+
},
|
|
445
|
+
];
|
|
446
|
+
|
|
447
|
+
const mockReadableStream = new ReadableStream({
|
|
448
|
+
start(controller) {
|
|
449
|
+
streams.forEach((chunk) => {
|
|
450
|
+
controller.enqueue(chunk);
|
|
451
|
+
});
|
|
452
|
+
controller.close();
|
|
453
|
+
},
|
|
454
|
+
});
|
|
455
|
+
|
|
456
|
+
const onToolCallMock = vi.fn();
|
|
457
|
+
|
|
458
|
+
const protocolStream = AzureOpenAIStream(mockReadableStream, {
|
|
459
|
+
onToolCall: onToolCallMock,
|
|
460
|
+
});
|
|
461
|
+
|
|
462
|
+
const decoder = new TextDecoder();
|
|
463
|
+
const chunks = [];
|
|
464
|
+
|
|
465
|
+
// @ts-ignore
|
|
466
|
+
for await (const chunk of protocolStream) {
|
|
467
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
expect(chunks).toEqual(
|
|
471
|
+
[
|
|
472
|
+
'id: chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
473
|
+
'event: tool_calls',
|
|
474
|
+
`data: [{"function":{"arguments":"","name":"realtime-weather____fetchCurrentWeather"},"id":"call_cnQ80VjcWCS69wWKp4jz0nJd","index":0,"type":"function"}]\n`,
|
|
475
|
+
'id: chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
476
|
+
'event: tool_calls',
|
|
477
|
+
`data: [{"function":{"arguments":"{\\"city\\": \\"杭州\\"}"},"id":"call_cnQ80VjcWCS69wWKp4jz0nJd","index":0,"type":"function"}]\n`,
|
|
478
|
+
'id: chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
479
|
+
'event: tool_calls',
|
|
480
|
+
`data: [{"function":{"arguments":"","name":"realtime-weather____fetchCurrentWeather"},"id":"call_LHrpPTrT563QkP9chVddzXQk","index":1,"type":"function"}]\n`,
|
|
481
|
+
'id: chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
482
|
+
'event: tool_calls',
|
|
483
|
+
`data: [{"function":{"arguments":"{\\"city\\": \\"北京\\"}"},"id":"call_LHrpPTrT563QkP9chVddzXQk","index":1,"type":"function"}]\n`,
|
|
484
|
+
'id: chatcmpl-9eEh9DtpidX5CyE4GcyIeyhU3pLir',
|
|
485
|
+
'event: stop',
|
|
486
|
+
`data: "tool_calls"\n`,
|
|
487
|
+
].map((item) => `${item}\n`),
|
|
488
|
+
);
|
|
489
|
+
|
|
490
|
+
expect(onToolCallMock).toHaveBeenCalledTimes(4);
|
|
491
|
+
});
|
|
492
|
+
it('should handle tool calls without index and type', async () => {
|
|
493
|
+
const mockOpenAIStream = new ReadableStream({
|
|
494
|
+
start(controller) {
|
|
495
|
+
controller.enqueue({
|
|
496
|
+
choices: [
|
|
497
|
+
{
|
|
498
|
+
delta: {
|
|
499
|
+
toolCalls: [
|
|
500
|
+
{
|
|
501
|
+
function: { name: 'tool1', arguments: '{}' },
|
|
502
|
+
id: 'call_1',
|
|
503
|
+
},
|
|
504
|
+
{
|
|
505
|
+
function: { name: 'tool2', arguments: '{}' },
|
|
506
|
+
id: 'call_2',
|
|
507
|
+
},
|
|
508
|
+
],
|
|
509
|
+
},
|
|
510
|
+
index: 0,
|
|
511
|
+
},
|
|
512
|
+
],
|
|
513
|
+
id: '5',
|
|
514
|
+
});
|
|
515
|
+
|
|
516
|
+
controller.close();
|
|
517
|
+
},
|
|
518
|
+
});
|
|
519
|
+
|
|
520
|
+
const protocolStream = AzureOpenAIStream(mockOpenAIStream);
|
|
521
|
+
|
|
522
|
+
const decoder = new TextDecoder();
|
|
523
|
+
const chunks = [];
|
|
524
|
+
|
|
525
|
+
// @ts-ignore
|
|
526
|
+
for await (const chunk of protocolStream) {
|
|
527
|
+
chunks.push(decoder.decode(chunk, { stream: true }));
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
expect(chunks).toEqual([
|
|
531
|
+
'id: 5\n',
|
|
532
|
+
'event: tool_calls\n',
|
|
533
|
+
`data: [{"function":{"name":"tool1","arguments":"{}"},"id":"call_1","index":0,"type":"function"},{"function":{"name":"tool2","arguments":"{}"},"id":"call_2","index":1,"type":"function"}]\n\n`,
|
|
534
|
+
]);
|
|
535
|
+
});
|
|
536
|
+
});
|
|
537
|
+
});
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { ChatCompletions, ChatCompletionsFunctionToolCall } from '@azure/openai';
|
|
2
|
+
import { readableFromAsyncIterable } from 'ai';
|
|
3
|
+
import OpenAI from 'openai';
|
|
4
|
+
import type { Stream } from 'openai/streaming';
|
|
5
|
+
|
|
6
|
+
import { ChatStreamCallbacks } from '../../types';
|
|
7
|
+
import {
|
|
8
|
+
StreamProtocolChunk,
|
|
9
|
+
StreamProtocolToolCallChunk,
|
|
10
|
+
StreamStack,
|
|
11
|
+
StreamToolCallChunkData,
|
|
12
|
+
createCallbacksTransformer,
|
|
13
|
+
createSSEProtocolTransformer,
|
|
14
|
+
} from './protocol';
|
|
15
|
+
|
|
16
|
+
const transformOpenAIStream = (chunk: ChatCompletions, stack: StreamStack): StreamProtocolChunk => {
|
|
17
|
+
// maybe need another structure to add support for multiple choices
|
|
18
|
+
|
|
19
|
+
const item = chunk.choices[0];
|
|
20
|
+
if (!item) {
|
|
21
|
+
return { data: chunk, id: chunk.id, type: 'data' };
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (typeof item.delta?.content === 'string') {
|
|
25
|
+
return { data: item.delta.content, id: chunk.id, type: 'text' };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (item.delta?.toolCalls) {
|
|
29
|
+
return {
|
|
30
|
+
data: item.delta.toolCalls.map((value, index): StreamToolCallChunkData => {
|
|
31
|
+
const func = (value as ChatCompletionsFunctionToolCall).function;
|
|
32
|
+
|
|
33
|
+
// at first time, set tool id
|
|
34
|
+
if (!stack.tool) {
|
|
35
|
+
stack.tool = { id: value.id, index, name: func.name };
|
|
36
|
+
} else {
|
|
37
|
+
// in the parallel tool calling, set the new tool id
|
|
38
|
+
if (value.id && stack.tool.id !== value.id) {
|
|
39
|
+
stack.tool = { id: value.id, index, name: func.name };
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
function: func,
|
|
45
|
+
id: value.id || stack.tool?.id,
|
|
46
|
+
index: value.index || index,
|
|
47
|
+
type: value.type || 'function',
|
|
48
|
+
};
|
|
49
|
+
}),
|
|
50
|
+
id: chunk.id,
|
|
51
|
+
type: 'tool_calls',
|
|
52
|
+
} as StreamProtocolToolCallChunk;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// 给定结束原因
|
|
56
|
+
if (item.finishReason) {
|
|
57
|
+
return { data: item.finishReason, id: chunk.id, type: 'stop' };
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if (item.delta?.content === null) {
|
|
61
|
+
return { data: item.delta, id: chunk.id, type: 'data' };
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// 其余情况下,返回 delta 和 index
|
|
65
|
+
return {
|
|
66
|
+
data: { delta: item.delta, id: chunk.id, index: item.index },
|
|
67
|
+
id: chunk.id,
|
|
68
|
+
type: 'data',
|
|
69
|
+
};
|
|
70
|
+
};
|
|
71
|
+
|
|
72
|
+
const chatStreamable = async function* (stream: AsyncIterable<OpenAI.ChatCompletionChunk>) {
|
|
73
|
+
for await (const response of stream) {
|
|
74
|
+
yield response;
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
export const AzureOpenAIStream = (
|
|
79
|
+
stream: Stream<OpenAI.ChatCompletionChunk> | ReadableStream,
|
|
80
|
+
callbacks?: ChatStreamCallbacks,
|
|
81
|
+
) => {
|
|
82
|
+
const stack: StreamStack = { id: '' };
|
|
83
|
+
const readableStream =
|
|
84
|
+
stream instanceof ReadableStream ? stream : readableFromAsyncIterable(chatStreamable(stream));
|
|
85
|
+
|
|
86
|
+
return readableStream
|
|
87
|
+
.pipeThrough(createSSEProtocolTransformer(transformOpenAIStream, stack))
|
|
88
|
+
.pipeThrough(createCallbacksTransformer(callbacks));
|
|
89
|
+
};
|