@lobehub/chat 1.88.23 → 1.89.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/Dockerfile +2 -0
- package/Dockerfile.database +2 -0
- package/Dockerfile.pglite +2 -0
- package/changelog/v1.json +9 -0
- package/docs/usage/providers/modelscope.mdx +113 -0
- package/docs/usage/providers/modelscope.zh-CN.mdx +133 -0
- package/package.json +1 -1
- package/src/config/aiModels/index.ts +3 -0
- package/src/config/aiModels/modelscope.ts +63 -0
- package/src/config/llm.ts +6 -0
- package/src/config/modelProviders/index.ts +4 -0
- package/src/config/modelProviders/modelscope.ts +62 -0
- package/src/libs/model-runtime/modelscope/index.ts +69 -0
- package/src/libs/model-runtime/runtimeMap.ts +6 -4
- package/src/libs/model-runtime/types/type.ts +1 -0
- package/src/types/user/settings/keyVaults.ts +1 -0
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,31 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
## [Version 1.89.0](https://github.com/lobehub/lobe-chat/compare/v1.88.23...v1.89.0)
|
6
|
+
|
7
|
+
<sup>Released on **2025-06-01**</sup>
|
8
|
+
|
9
|
+
#### ✨ Features
|
10
|
+
|
11
|
+
- **misc**: Support ModelScope Provider.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### What's improved
|
19
|
+
|
20
|
+
- **misc**: Support ModelScope Provider, closes [#8026](https://github.com/lobehub/lobe-chat/issues/8026) ([7b91dfd](https://github.com/lobehub/lobe-chat/commit/7b91dfd))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
5
30
|
### [Version 1.88.23](https://github.com/lobehub/lobe-chat/compare/v1.88.22...v1.88.23)
|
6
31
|
|
7
32
|
<sup>Released on **2025-05-31**</sup>
|
package/Dockerfile
CHANGED
@@ -186,6 +186,8 @@ ENV \
|
|
186
186
|
MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
|
187
187
|
# Mistral
|
188
188
|
MISTRAL_API_KEY="" MISTRAL_MODEL_LIST="" \
|
189
|
+
# ModelScope
|
190
|
+
MODELSCOPE_API_KEY="" MODELSCOPE_MODEL_LIST="" MODELSCOPE_PROXY_URL="" \
|
189
191
|
# Moonshot
|
190
192
|
MOONSHOT_API_KEY="" MOONSHOT_MODEL_LIST="" MOONSHOT_PROXY_URL="" \
|
191
193
|
# Novita
|
package/Dockerfile.database
CHANGED
@@ -230,6 +230,8 @@ ENV \
|
|
230
230
|
MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
|
231
231
|
# Mistral
|
232
232
|
MISTRAL_API_KEY="" MISTRAL_MODEL_LIST="" \
|
233
|
+
# ModelScope
|
234
|
+
MODELSCOPE_API_KEY="" MODELSCOPE_MODEL_LIST="" MODELSCOPE_PROXY_URL="" \
|
233
235
|
# Moonshot
|
234
236
|
MOONSHOT_API_KEY="" MOONSHOT_MODEL_LIST="" MOONSHOT_PROXY_URL="" \
|
235
237
|
# Novita
|
package/Dockerfile.pglite
CHANGED
@@ -188,6 +188,8 @@ ENV \
|
|
188
188
|
MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
|
189
189
|
# Mistral
|
190
190
|
MISTRAL_API_KEY="" MISTRAL_MODEL_LIST="" \
|
191
|
+
# ModelScope
|
192
|
+
MODELSCOPE_API_KEY="" MODELSCOPE_MODEL_LIST="" MODELSCOPE_PROXY_URL="" \
|
191
193
|
# Moonshot
|
192
194
|
MOONSHOT_API_KEY="" MOONSHOT_MODEL_LIST="" MOONSHOT_PROXY_URL="" \
|
193
195
|
# Novita
|
package/changelog/v1.json
CHANGED
@@ -0,0 +1,113 @@
|
|
1
|
+
---
|
2
|
+
title: ModelScope Provider Setup
|
3
|
+
description: Learn how to configure and use ModelScope provider in LobeChat
|
4
|
+
tags:
|
5
|
+
- ModelScope
|
6
|
+
---
|
7
|
+
|
8
|
+
# ModelScope Provider Setup
|
9
|
+
|
10
|
+
ModelScope (魔塔社区) is Alibaba's open-source model community that provides access to various AI models. This guide will help you set up the ModelScope provider in LobeChat.
|
11
|
+
|
12
|
+
## Prerequisites
|
13
|
+
|
14
|
+
Before using ModelScope API, you need to:
|
15
|
+
|
16
|
+
1. **Create a ModelScope Account**
|
17
|
+
- Visit [ModelScope](https://www.modelscope.cn/)
|
18
|
+
- Register for an account
|
19
|
+
|
20
|
+
2. **Bind Alibaba Cloud Account**
|
21
|
+
- **Important**: ModelScope API requires binding with an Alibaba Cloud account
|
22
|
+
- Visit your [ModelScope Access Token page](https://www.modelscope.cn/my/myaccesstoken)
|
23
|
+
- Follow the instructions to bind your Alibaba Cloud account
|
24
|
+
- This step is mandatory for API access
|
25
|
+
|
26
|
+
3. **Get API Token**
|
27
|
+
- After binding your Alibaba Cloud account, generate an API token
|
28
|
+
- Copy the token for use in LobeChat
|
29
|
+
|
30
|
+
## Configuration
|
31
|
+
|
32
|
+
### Environment Variables
|
33
|
+
|
34
|
+
Add the following environment variables to your `.env` file:
|
35
|
+
|
36
|
+
```bash
|
37
|
+
# Enable ModelScope provider
|
38
|
+
ENABLED_MODELSCOPE=1
|
39
|
+
|
40
|
+
# ModelScope API key (required)
|
41
|
+
MODELSCOPE_API_KEY=your_modelscope_api_token
|
42
|
+
|
43
|
+
# Optional: Custom model list (comma-separated)
|
44
|
+
MODELSCOPE_MODEL_LIST=deepseek-ai/DeepSeek-V3-0324,Qwen/Qwen3-235B-A22B
|
45
|
+
|
46
|
+
# Optional: Proxy URL if needed
|
47
|
+
MODELSCOPE_PROXY_URL=https://your-proxy-url
|
48
|
+
```
|
49
|
+
|
50
|
+
### Docker Configuration
|
51
|
+
|
52
|
+
If using Docker, add the ModelScope environment variables to your `docker-compose.yml`:
|
53
|
+
|
54
|
+
```yaml
|
55
|
+
environment:
|
56
|
+
- ENABLED_MODELSCOPE=1
|
57
|
+
- MODELSCOPE_API_KEY=your_modelscope_api_token
|
58
|
+
- MODELSCOPE_MODEL_LIST=deepseek-ai/DeepSeek-V3-0324,Qwen/Qwen3-235B-A22B
|
59
|
+
```
|
60
|
+
|
61
|
+
## Available Models
|
62
|
+
|
63
|
+
ModelScope provides access to various models including:
|
64
|
+
|
65
|
+
- **DeepSeek Models**: DeepSeek-V3, DeepSeek-R1 series
|
66
|
+
- **Qwen Models**: Qwen3 series, Qwen2.5 series
|
67
|
+
- **Llama Models**: Meta-Llama-3 series
|
68
|
+
- **Other Models**: Various open-source models
|
69
|
+
|
70
|
+
## Troubleshooting
|
71
|
+
|
72
|
+
### Common Issues
|
73
|
+
|
74
|
+
1. **"Please bind your Alibaba Cloud account before use" Error**
|
75
|
+
- This means you haven't bound your Alibaba Cloud account to ModelScope
|
76
|
+
- Visit [ModelScope Access Token page](https://www.modelscope.cn/my/myaccesstoken)
|
77
|
+
- Complete the Alibaba Cloud account binding process
|
78
|
+
|
79
|
+
2. **401 Authentication Error**
|
80
|
+
- Check if your API token is correct
|
81
|
+
- Ensure the token hasn't expired
|
82
|
+
- Verify that your Alibaba Cloud account is properly bound
|
83
|
+
|
84
|
+
3. **Model Not Available**
|
85
|
+
- Some models may require additional permissions
|
86
|
+
- Check the model's page on ModelScope for access requirements
|
87
|
+
|
88
|
+
### Debug Mode
|
89
|
+
|
90
|
+
Enable debug mode to see detailed logs:
|
91
|
+
|
92
|
+
```bash
|
93
|
+
DEBUG_MODELSCOPE_CHAT_COMPLETION=1
|
94
|
+
```
|
95
|
+
|
96
|
+
## Notes
|
97
|
+
|
98
|
+
- ModelScope API is compatible with OpenAI API format
|
99
|
+
- The service is primarily designed for users in China
|
100
|
+
- Some models may have usage limitations or require additional verification
|
101
|
+
- API responses are in Chinese by default for some models
|
102
|
+
|
103
|
+
## Support
|
104
|
+
|
105
|
+
For ModelScope-specific issues:
|
106
|
+
|
107
|
+
- Visit [ModelScope Documentation](https://www.modelscope.cn/docs)
|
108
|
+
- Check [ModelScope Community](https://www.modelscope.cn/community)
|
109
|
+
|
110
|
+
For LobeChat integration issues:
|
111
|
+
|
112
|
+
- Check our [GitHub Issues](https://github.com/lobehub/lobe-chat/issues)
|
113
|
+
- Join our community discussions
|
@@ -0,0 +1,133 @@
|
|
1
|
+
---
|
2
|
+
title: ModelScope 提供商配置
|
3
|
+
description: 学习如何在 LobeChat 中配置和使用 ModelScope 提供商
|
4
|
+
tags:
|
5
|
+
- ModelScope
|
6
|
+
---
|
7
|
+
|
8
|
+
# ModelScope 提供商配置
|
9
|
+
|
10
|
+
ModelScope(魔塔社区)是阿里巴巴的开源模型社区,提供各种 AI 模型的访问服务。本指南将帮助您在 LobeChat 中设置 ModelScope 提供商。
|
11
|
+
|
12
|
+
## 前置条件
|
13
|
+
|
14
|
+
在使用 ModelScope API 之前,您需要:
|
15
|
+
|
16
|
+
1. **创建 ModelScope 账户**
|
17
|
+
- 访问 [ModelScope](https://www.modelscope.cn/)
|
18
|
+
- 注册账户
|
19
|
+
|
20
|
+
2. **绑定阿里云账户**
|
21
|
+
- **重要**:ModelScope API 需要绑定阿里云账户
|
22
|
+
- 访问您的 [ModelScope 访问令牌页面](https://www.modelscope.cn/my/myaccesstoken)
|
23
|
+
- 按照说明绑定您的阿里云账户
|
24
|
+
- 此步骤是 API 访问的必要条件
|
25
|
+
|
26
|
+
3. **获取 API 令牌**
|
27
|
+
- 绑定阿里云账户后,生成 API 令牌
|
28
|
+
- 复制令牌以在 LobeChat 中使用
|
29
|
+
|
30
|
+
## 配置
|
31
|
+
|
32
|
+
### 环境变量
|
33
|
+
|
34
|
+
在您的 `.env` 文件中添加以下环境变量:
|
35
|
+
|
36
|
+
```bash
|
37
|
+
# 启用 ModelScope 提供商
|
38
|
+
ENABLED_MODELSCOPE=1
|
39
|
+
|
40
|
+
# ModelScope API 密钥(必需)
|
41
|
+
MODELSCOPE_API_KEY=your_modelscope_api_token
|
42
|
+
|
43
|
+
# 可选:自定义模型列表(逗号分隔)
|
44
|
+
MODELSCOPE_MODEL_LIST=deepseek-ai/DeepSeek-V3-0324,Qwen/Qwen3-235B-A22B
|
45
|
+
|
46
|
+
# 可选:代理 URL(如需要)
|
47
|
+
MODELSCOPE_PROXY_URL=https://your-proxy-url
|
48
|
+
```
|
49
|
+
|
50
|
+
### Docker 配置
|
51
|
+
|
52
|
+
如果使用 Docker,请在您的 `docker-compose.yml` 中添加 ModelScope 环境变量:
|
53
|
+
|
54
|
+
```yaml
|
55
|
+
environment:
|
56
|
+
- ENABLED_MODELSCOPE=1
|
57
|
+
- MODELSCOPE_API_KEY=your_modelscope_api_token
|
58
|
+
- MODELSCOPE_MODEL_LIST=deepseek-ai/DeepSeek-V3-0324,Qwen/Qwen3-235B-A22B
|
59
|
+
```
|
60
|
+
|
61
|
+
## 可用模型
|
62
|
+
|
63
|
+
ModelScope 提供各种模型的访问,包括:
|
64
|
+
|
65
|
+
- **DeepSeek 模型**:DeepSeek-V3、DeepSeek-R1 系列
|
66
|
+
- **Qwen 模型**:Qwen3 系列、Qwen2.5 系列
|
67
|
+
- **Llama 模型**:Meta-Llama-3 系列
|
68
|
+
- **其他模型**:各种开源模型
|
69
|
+
|
70
|
+
## 故障排除
|
71
|
+
|
72
|
+
### 常见问题
|
73
|
+
|
74
|
+
1. **"请先绑定阿里云账户后使用" 错误**
|
75
|
+
- 这意味着您还没有将阿里云账户绑定到 ModelScope
|
76
|
+
- 访问 [ModelScope 访问令牌页面](https://www.modelscope.cn/my/myaccesstoken)
|
77
|
+
- 完成阿里云账户绑定流程
|
78
|
+
|
79
|
+
2. **401 认证错误**
|
80
|
+
- 检查您的 API 令牌是否正确
|
81
|
+
- 确保令牌没有过期
|
82
|
+
- 验证您的阿里云账户是否正确绑定
|
83
|
+
|
84
|
+
3. **模型不可用**
|
85
|
+
- 某些模型可能需要额外权限
|
86
|
+
- 查看 ModelScope 上模型页面的访问要求
|
87
|
+
|
88
|
+
### 调试模式
|
89
|
+
|
90
|
+
启用调试模式以查看详细日志:
|
91
|
+
|
92
|
+
```bash
|
93
|
+
DEBUG_MODELSCOPE_CHAT_COMPLETION=1
|
94
|
+
```
|
95
|
+
|
96
|
+
## 注意事项
|
97
|
+
|
98
|
+
- ModelScope API 与 OpenAI API 格式兼容
|
99
|
+
- 该服务主要面向中国用户设计
|
100
|
+
- 某些模型可能有使用限制或需要额外验证
|
101
|
+
- 某些模型的 API 响应默认为中文
|
102
|
+
|
103
|
+
## 支持
|
104
|
+
|
105
|
+
对于 ModelScope 特定问题:
|
106
|
+
|
107
|
+
- 访问 [ModelScope 文档](https://www.modelscope.cn/docs)
|
108
|
+
- 查看 [ModelScope 社区](https://www.modelscope.cn/community)
|
109
|
+
|
110
|
+
对于 LobeChat 集成问题:
|
111
|
+
|
112
|
+
- 查看我们的 [GitHub Issues](https://github.com/lobehub/lobe-chat/issues)
|
113
|
+
- 加入我们的社区讨论
|
114
|
+
|
115
|
+
## 模型 ID 格式
|
116
|
+
|
117
|
+
ModelScope 使用命名空间前缀格式的模型 ID,例如:
|
118
|
+
|
119
|
+
```
|
120
|
+
deepseek-ai/DeepSeek-V3-0324
|
121
|
+
deepseek-ai/DeepSeek-R1-0528
|
122
|
+
Qwen/Qwen3-235B-A22B
|
123
|
+
Qwen/Qwen3-32B
|
124
|
+
```
|
125
|
+
|
126
|
+
在配置模型列表时,请使用完整的模型 ID 格式。
|
127
|
+
|
128
|
+
## API 限制
|
129
|
+
|
130
|
+
- ModelScope API 有速率限制
|
131
|
+
- 某些模型可能需要特殊权限
|
132
|
+
- 建议在生产环境中监控 API 使用情况
|
133
|
+
- 部分高级模型可能需要付费使用
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.
|
3
|
+
"version": "1.89.0",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -24,6 +24,7 @@ import { default as jina } from './jina';
|
|
24
24
|
import { default as lmstudio } from './lmstudio';
|
25
25
|
import { default as minimax } from './minimax';
|
26
26
|
import { default as mistral } from './mistral';
|
27
|
+
import { default as modelscope } from './modelscope';
|
27
28
|
import { default as moonshot } from './moonshot';
|
28
29
|
import { default as novita } from './novita';
|
29
30
|
import { default as nvidia } from './nvidia';
|
@@ -97,6 +98,7 @@ export const LOBE_DEFAULT_MODEL_LIST = buildDefaultModelList({
|
|
97
98
|
lmstudio,
|
98
99
|
minimax,
|
99
100
|
mistral,
|
101
|
+
modelscope,
|
100
102
|
moonshot,
|
101
103
|
novita,
|
102
104
|
nvidia,
|
@@ -151,6 +153,7 @@ export { default as jina } from './jina';
|
|
151
153
|
export { default as lmstudio } from './lmstudio';
|
152
154
|
export { default as minimax } from './minimax';
|
153
155
|
export { default as mistral } from './mistral';
|
156
|
+
export { default as modelscope } from './modelscope';
|
154
157
|
export { default as moonshot } from './moonshot';
|
155
158
|
export { default as novita } from './novita';
|
156
159
|
export { default as nvidia } from './nvidia';
|
@@ -0,0 +1,63 @@
|
|
1
|
+
import { AIChatModelCard } from '@/types/aiModel';
|
2
|
+
|
3
|
+
const modelscopeChatModels: AIChatModelCard[] = [
|
4
|
+
{
|
5
|
+
abilities: {
|
6
|
+
functionCall: true,
|
7
|
+
},
|
8
|
+
contextWindowTokens: 131_072,
|
9
|
+
description: 'DeepSeek-V3是DeepSeek第三代模型,在多项基准测试中表现优异。',
|
10
|
+
displayName: 'DeepSeek-V3-0324',
|
11
|
+
enabled: true,
|
12
|
+
id: 'deepseek-ai/DeepSeek-V3-0324',
|
13
|
+
type: 'chat',
|
14
|
+
},
|
15
|
+
{
|
16
|
+
abilities: {
|
17
|
+
functionCall: true,
|
18
|
+
},
|
19
|
+
contextWindowTokens: 131_072,
|
20
|
+
description: 'DeepSeek-V3是DeepSeek第三代模型的最新版本,具有强大的推理和对话能力。',
|
21
|
+
displayName: 'DeepSeek-V3',
|
22
|
+
enabled: true,
|
23
|
+
id: 'deepseek-ai/DeepSeek-V3',
|
24
|
+
type: 'chat',
|
25
|
+
},
|
26
|
+
{
|
27
|
+
abilities: {
|
28
|
+
functionCall: true,
|
29
|
+
},
|
30
|
+
contextWindowTokens: 131_072,
|
31
|
+
description: 'DeepSeek-R1是DeepSeek最新的推理模型,专注于复杂推理任务。',
|
32
|
+
displayName: 'DeepSeek-R1',
|
33
|
+
enabled: true,
|
34
|
+
id: 'deepseek-ai/DeepSeek-R1',
|
35
|
+
type: 'chat',
|
36
|
+
},
|
37
|
+
{
|
38
|
+
abilities: {
|
39
|
+
functionCall: true,
|
40
|
+
},
|
41
|
+
contextWindowTokens: 131_072,
|
42
|
+
description: 'Qwen3-235B-A22B是通义千问3代超大规模模型,提供顶级的AI能力。',
|
43
|
+
displayName: 'Qwen3-235B-A22B',
|
44
|
+
enabled: true,
|
45
|
+
id: 'Qwen/Qwen3-235B-A22B',
|
46
|
+
type: 'chat',
|
47
|
+
},
|
48
|
+
{
|
49
|
+
abilities: {
|
50
|
+
functionCall: true,
|
51
|
+
},
|
52
|
+
contextWindowTokens: 131_072,
|
53
|
+
description: 'Qwen3-32B是通义千问3代模型,具有强大的推理和对话能力。',
|
54
|
+
displayName: 'Qwen3-32B',
|
55
|
+
enabled: true,
|
56
|
+
id: 'Qwen/Qwen3-32B',
|
57
|
+
type: 'chat',
|
58
|
+
},
|
59
|
+
];
|
60
|
+
|
61
|
+
export const allModels = [...modelscopeChatModels];
|
62
|
+
|
63
|
+
export default allModels;
|
package/src/config/llm.ts
CHANGED
@@ -162,6 +162,9 @@ export const getLLMConfig = () => {
|
|
162
162
|
|
163
163
|
ENABLED_INFINIAI: z.boolean(),
|
164
164
|
INFINIAI_API_KEY: z.string().optional(),
|
165
|
+
|
166
|
+
ENABLED_MODELSCOPE: z.boolean(),
|
167
|
+
MODELSCOPE_API_KEY: z.string().optional(),
|
165
168
|
},
|
166
169
|
runtimeEnv: {
|
167
170
|
API_KEY_SELECT_MODE: process.env.API_KEY_SELECT_MODE,
|
@@ -322,6 +325,9 @@ export const getLLMConfig = () => {
|
|
322
325
|
|
323
326
|
ENABLED_INFINIAI: !!process.env.INFINIAI_API_KEY,
|
324
327
|
INFINIAI_API_KEY: process.env.INFINIAI_API_KEY,
|
328
|
+
|
329
|
+
ENABLED_MODELSCOPE: !!process.env.MODELSCOPE_API_KEY,
|
330
|
+
MODELSCOPE_API_KEY: process.env.MODELSCOPE_API_KEY,
|
325
331
|
},
|
326
332
|
});
|
327
333
|
};
|
@@ -24,6 +24,7 @@ import JinaProvider from './jina';
|
|
24
24
|
import LMStudioProvider from './lmstudio';
|
25
25
|
import MinimaxProvider from './minimax';
|
26
26
|
import MistralProvider from './mistral';
|
27
|
+
import ModelScopeProvider from './modelscope';
|
27
28
|
import MoonshotProvider from './moonshot';
|
28
29
|
import NovitaProvider from './novita';
|
29
30
|
import NvidiaProvider from './nvidia';
|
@@ -67,6 +68,7 @@ export const LOBE_DEFAULT_MODEL_LIST: ChatModelCard[] = [
|
|
67
68
|
GithubProvider.chatModels,
|
68
69
|
MinimaxProvider.chatModels,
|
69
70
|
MistralProvider.chatModels,
|
71
|
+
ModelScopeProvider.chatModels,
|
70
72
|
MoonshotProvider.chatModels,
|
71
73
|
OllamaProvider.chatModels,
|
72
74
|
VLLMProvider.chatModels,
|
@@ -130,6 +132,7 @@ export const DEFAULT_MODEL_PROVIDER_LIST = [
|
|
130
132
|
GroqProvider,
|
131
133
|
PerplexityProvider,
|
132
134
|
MistralProvider,
|
135
|
+
ModelScopeProvider,
|
133
136
|
Ai21Provider,
|
134
137
|
UpstageProvider,
|
135
138
|
XAIProvider,
|
@@ -194,6 +197,7 @@ export { default as JinaProviderCard } from './jina';
|
|
194
197
|
export { default as LMStudioProviderCard } from './lmstudio';
|
195
198
|
export { default as MinimaxProviderCard } from './minimax';
|
196
199
|
export { default as MistralProviderCard } from './mistral';
|
200
|
+
export { default as ModelScopeProviderCard } from './modelscope';
|
197
201
|
export { default as MoonshotProviderCard } from './moonshot';
|
198
202
|
export { default as NovitaProviderCard } from './novita';
|
199
203
|
export { default as NvidiaProviderCard } from './nvidia';
|
@@ -0,0 +1,62 @@
|
|
1
|
+
import { ModelProviderCard } from '@/types/llm';
|
2
|
+
|
3
|
+
// ref: https://modelscope.cn/docs/model-service/API-Inference/intro
|
4
|
+
const ModelScope: ModelProviderCard = {
|
5
|
+
chatModels: [
|
6
|
+
{
|
7
|
+
contextWindowTokens: 131_072,
|
8
|
+
description: 'DeepSeek-V3是DeepSeek第三代模型,在多项基准测试中表现优异。',
|
9
|
+
displayName: 'DeepSeek-V3-0324',
|
10
|
+
enabled: true,
|
11
|
+
functionCall: true,
|
12
|
+
id: 'deepseek-ai/DeepSeek-V3-0324',
|
13
|
+
},
|
14
|
+
{
|
15
|
+
contextWindowTokens: 131_072,
|
16
|
+
description: 'DeepSeek-V3是DeepSeek第三代模型的最新版本,具有强大的推理和对话能力。',
|
17
|
+
displayName: 'DeepSeek-V3',
|
18
|
+
enabled: true,
|
19
|
+
functionCall: true,
|
20
|
+
id: 'deepseek-ai/DeepSeek-V3',
|
21
|
+
},
|
22
|
+
{
|
23
|
+
contextWindowTokens: 131_072,
|
24
|
+
description: 'DeepSeek-R1是DeepSeek最新的推理模型,专注于复杂推理任务。',
|
25
|
+
displayName: 'DeepSeek-R1',
|
26
|
+
enabled: true,
|
27
|
+
functionCall: true,
|
28
|
+
id: 'deepseek-ai/DeepSeek-R1',
|
29
|
+
},
|
30
|
+
{
|
31
|
+
contextWindowTokens: 131_072,
|
32
|
+
description: 'Qwen3-235B-A22B是通义千问3代超大规模模型,提供顶级的AI能力。',
|
33
|
+
displayName: 'Qwen3-235B-A22B',
|
34
|
+
enabled: true,
|
35
|
+
functionCall: true,
|
36
|
+
id: 'Qwen/Qwen3-235B-A22B',
|
37
|
+
},
|
38
|
+
{
|
39
|
+
contextWindowTokens: 131_072,
|
40
|
+
description: 'Qwen3-32B是通义千问3代模型,具有强大的推理和对话能力。',
|
41
|
+
displayName: 'Qwen3-32B',
|
42
|
+
enabled: true,
|
43
|
+
functionCall: true,
|
44
|
+
id: 'Qwen/Qwen3-32B',
|
45
|
+
},
|
46
|
+
],
|
47
|
+
checkModel: 'Qwen/Qwen3-32B',
|
48
|
+
description: 'ModelScope是阿里云推出的模型即服务平台,提供丰富的AI模型和推理服务。',
|
49
|
+
id: 'modelscope',
|
50
|
+
modelList: { showModelFetcher: true },
|
51
|
+
name: 'ModelScope',
|
52
|
+
settings: {
|
53
|
+
proxyUrl: {
|
54
|
+
placeholder: 'https://api-inference.modelscope.cn/v1',
|
55
|
+
},
|
56
|
+
sdkType: 'openai',
|
57
|
+
showModelFetcher: true,
|
58
|
+
},
|
59
|
+
url: 'https://modelscope.cn',
|
60
|
+
};
|
61
|
+
|
62
|
+
export default ModelScope;
|
@@ -0,0 +1,69 @@
|
|
1
|
+
|
2
|
+
import type { ChatModelCard } from '@/types/llm';
|
3
|
+
|
4
|
+
import { ModelProvider } from '../types';
|
5
|
+
import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';
|
6
|
+
|
7
|
+
export interface ModelScopeModelCard {
|
8
|
+
created: number;
|
9
|
+
id: string;
|
10
|
+
object: string;
|
11
|
+
owned_by: string;
|
12
|
+
}
|
13
|
+
|
14
|
+
export const LobeModelScopeAI = LobeOpenAICompatibleFactory({
|
15
|
+
baseURL: 'https://api-inference.modelscope.cn/v1',
|
16
|
+
debug: {
|
17
|
+
chatCompletion: () => process.env.DEBUG_MODELSCOPE_CHAT_COMPLETION === '1',
|
18
|
+
},
|
19
|
+
models: async ({ client }) => {
|
20
|
+
const { LOBE_DEFAULT_MODEL_LIST } = await import('@/config/aiModels');
|
21
|
+
|
22
|
+
const functionCallKeywords = ['qwen', 'deepseek', 'llama'];
|
23
|
+
|
24
|
+
const visionKeywords = ['qwen-vl', 'qwen2-vl', 'llava'];
|
25
|
+
|
26
|
+
const reasoningKeywords = ['qwq', 'deepseek-r1'];
|
27
|
+
|
28
|
+
try {
|
29
|
+
const modelsPage = (await client.models.list()) as any;
|
30
|
+
const modelList: ModelScopeModelCard[] = modelsPage.data || [];
|
31
|
+
|
32
|
+
return modelList
|
33
|
+
.map((model) => {
|
34
|
+
const knownModel = LOBE_DEFAULT_MODEL_LIST.find(
|
35
|
+
(m) => model.id.toLowerCase() === m.id.toLowerCase(),
|
36
|
+
);
|
37
|
+
|
38
|
+
const modelId = model.id.toLowerCase();
|
39
|
+
|
40
|
+
return {
|
41
|
+
contextWindowTokens: knownModel?.contextWindowTokens ?? undefined,
|
42
|
+
displayName: knownModel?.displayName ?? model.id,
|
43
|
+
enabled: knownModel?.enabled || false,
|
44
|
+
functionCall:
|
45
|
+
functionCallKeywords.some((keyword) => modelId.includes(keyword)) ||
|
46
|
+
knownModel?.abilities?.functionCall ||
|
47
|
+
false,
|
48
|
+
id: model.id,
|
49
|
+
reasoning:
|
50
|
+
reasoningKeywords.some((keyword) => modelId.includes(keyword)) ||
|
51
|
+
knownModel?.abilities?.reasoning ||
|
52
|
+
false,
|
53
|
+
vision:
|
54
|
+
visionKeywords.some((keyword) => modelId.includes(keyword)) ||
|
55
|
+
knownModel?.abilities?.vision ||
|
56
|
+
false,
|
57
|
+
};
|
58
|
+
})
|
59
|
+
.filter(Boolean) as ChatModelCard[];
|
60
|
+
} catch (error) {
|
61
|
+
console.warn(
|
62
|
+
'Failed to fetch ModelScope models. Please ensure your ModelScope API key is valid and your Alibaba Cloud account is properly bound:',
|
63
|
+
error,
|
64
|
+
);
|
65
|
+
return [];
|
66
|
+
}
|
67
|
+
},
|
68
|
+
provider: ModelProvider.ModelScope,
|
69
|
+
});
|
@@ -1,17 +1,17 @@
|
|
1
1
|
import { LobeAi21AI } from './ai21';
|
2
2
|
import { LobeAi360AI } from './ai360';
|
3
|
-
import LobeAnthropicAI from './anthropic';
|
3
|
+
import { LobeAnthropicAI } from './anthropic';
|
4
4
|
import { LobeAzureOpenAI } from './azureOpenai';
|
5
5
|
import { LobeAzureAI } from './azureai';
|
6
6
|
import { LobeBaichuanAI } from './baichuan';
|
7
|
-
import LobeBedrockAI from './bedrock';
|
7
|
+
import { LobeBedrockAI } from './bedrock';
|
8
8
|
import { LobeCloudflareAI } from './cloudflare';
|
9
9
|
import { LobeCohereAI } from './cohere';
|
10
10
|
import { LobeDeepSeekAI } from './deepseek';
|
11
11
|
import { LobeFireworksAI } from './fireworksai';
|
12
12
|
import { LobeGiteeAI } from './giteeai';
|
13
13
|
import { LobeGithubAI } from './github';
|
14
|
-
import LobeGoogleAI from './google';
|
14
|
+
import { LobeGoogleAI } from './google';
|
15
15
|
import { LobeGroq } from './groq';
|
16
16
|
import { LobeHigressAI } from './higress';
|
17
17
|
import { LobeHuggingFaceAI } from './huggingface';
|
@@ -22,10 +22,11 @@ import { LobeJinaAI } from './jina';
|
|
22
22
|
import { LobeLMStudioAI } from './lmstudio';
|
23
23
|
import { LobeMinimaxAI } from './minimax';
|
24
24
|
import { LobeMistralAI } from './mistral';
|
25
|
+
import { LobeModelScopeAI } from './modelscope';
|
25
26
|
import { LobeMoonshotAI } from './moonshot';
|
26
27
|
import { LobeNovitaAI } from './novita';
|
27
28
|
import { LobeNvidiaAI } from './nvidia';
|
28
|
-
import LobeOllamaAI from './ollama';
|
29
|
+
import { LobeOllamaAI } from './ollama';
|
29
30
|
import { LobeOpenAI } from './openai';
|
30
31
|
import { LobeOpenRouterAI } from './openrouter';
|
31
32
|
import { LobePerplexityAI } from './perplexity';
|
@@ -75,6 +76,7 @@ export const providerRuntimeMap = {
|
|
75
76
|
lmstudio: LobeLMStudioAI,
|
76
77
|
minimax: LobeMinimaxAI,
|
77
78
|
mistral: LobeMistralAI,
|
79
|
+
modelscope: LobeModelScopeAI,
|
78
80
|
moonshot: LobeMoonshotAI,
|
79
81
|
novita: LobeNovitaAI,
|
80
82
|
nvidia: LobeNvidiaAI,
|
@@ -58,6 +58,7 @@ export interface UserKeyVaults extends SearchEngineKeyVaults {
|
|
58
58
|
lobehub?: any;
|
59
59
|
minimax?: OpenAICompatibleKeyVault;
|
60
60
|
mistral?: OpenAICompatibleKeyVault;
|
61
|
+
modelscope?: OpenAICompatibleKeyVault;
|
61
62
|
moonshot?: OpenAICompatibleKeyVault;
|
62
63
|
novita?: OpenAICompatibleKeyVault;
|
63
64
|
nvidia?: OpenAICompatibleKeyVault;
|