@lobehub/chat 1.91.3 → 1.92.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +69 -0
- package/changelog/v1.json +24 -0
- package/locales/ar/setting.json +1 -1
- package/locales/bg-BG/setting.json +1 -1
- package/locales/de-DE/setting.json +1 -1
- package/locales/en-US/setting.json +1 -1
- package/locales/es-ES/setting.json +1 -1
- package/locales/fa-IR/setting.json +1 -1
- package/locales/fr-FR/setting.json +1 -1
- package/locales/it-IT/setting.json +1 -1
- package/locales/ja-JP/setting.json +1 -1
- package/locales/ko-KR/setting.json +1 -1
- package/locales/nl-NL/setting.json +1 -1
- package/locales/pl-PL/setting.json +1 -1
- package/locales/pt-BR/setting.json +1 -1
- package/locales/ru-RU/setting.json +1 -1
- package/locales/tr-TR/setting.json +1 -1
- package/locales/vi-VN/setting.json +1 -1
- package/locales/zh-CN/setting.json +1 -1
- package/locales/zh-TW/setting.json +1 -1
- package/package.json +1 -1
- package/src/components/ModelSelect/index.tsx +6 -3
- package/src/config/aiModels/google.ts +25 -0
- package/src/config/aiModels/hunyuan.ts +44 -0
- package/src/config/aiModels/novita.ts +39 -3
- package/src/config/aiModels/openrouter.ts +0 -1
- package/src/config/aiModels/qwen.ts +48 -6
- package/src/config/aiModels/siliconcloud.ts +0 -106
- package/src/features/AgentSetting/AgentModal/index.tsx +3 -2
- package/src/features/ChatInput/ActionBar/Search/Controls.tsx +6 -2
- package/src/libs/model-runtime/utils/streams/vertex-ai.ts +12 -0
- package/src/locales/default/setting.ts +1 -1
- package/src/services/chat.ts +17 -9
- package/src/store/chat/slices/aiChat/actions/generateAIChat.ts +23 -31
- package/src/store/user/slices/auth/selectors.test.ts +94 -0
- package/src/store/user/slices/auth/selectors.ts +3 -0
- package/src/utils/client/parserPlaceholder.test.ts +315 -0
- package/src/utils/client/parserPlaceholder.ts +192 -0
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,75 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.92.1](https://github.com/lobehub/lobe-chat/compare/v1.92.0...v1.92.1)
|
6
|
+
|
7
|
+
<sup>Released on **2025-06-07**</sup>
|
8
|
+
|
9
|
+
#### 💄 Styles
|
10
|
+
|
11
|
+
- **ModelSelect**: Add responsive layout for mobile devices.
|
12
|
+
- **misc**: Improve `{{username}}` placeholder variable, Update Gemini & Qwen models.
|
13
|
+
|
14
|
+
<br/>
|
15
|
+
|
16
|
+
<details>
|
17
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
18
|
+
|
19
|
+
#### Styles
|
20
|
+
|
21
|
+
- **ModelSelect**: Add responsive layout for mobile devices, closes [#7960](https://github.com/lobehub/lobe-chat/issues/7960) ([cb84c3e](https://github.com/lobehub/lobe-chat/commit/cb84c3e))
|
22
|
+
- **misc**: Improve `{{username}}` placeholder variable, closes [#8100](https://github.com/lobehub/lobe-chat/issues/8100) ([95fd588](https://github.com/lobehub/lobe-chat/commit/95fd588))
|
23
|
+
- **misc**: Update Gemini & Qwen models, closes [#8083](https://github.com/lobehub/lobe-chat/issues/8083) ([6308237](https://github.com/lobehub/lobe-chat/commit/6308237))
|
24
|
+
|
25
|
+
</details>
|
26
|
+
|
27
|
+
<div align="right">
|
28
|
+
|
29
|
+
[](#readme-top)
|
30
|
+
|
31
|
+
</div>
|
32
|
+
|
33
|
+
## [Version 1.92.0](https://github.com/lobehub/lobe-chat/compare/v1.91.3...v1.92.0)
|
34
|
+
|
35
|
+
<sup>Released on **2025-06-06**</sup>
|
36
|
+
|
37
|
+
#### ✨ Features
|
38
|
+
|
39
|
+
- **misc**: Support placeholder variables in prompts and input.
|
40
|
+
|
41
|
+
#### 🐛 Bug Fixes
|
42
|
+
|
43
|
+
- **misc**: Some web search bugs.
|
44
|
+
|
45
|
+
#### 💄 Styles
|
46
|
+
|
47
|
+
- **misc**: Support Vertex AI thought summaries.
|
48
|
+
|
49
|
+
<br/>
|
50
|
+
|
51
|
+
<details>
|
52
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
53
|
+
|
54
|
+
#### What's improved
|
55
|
+
|
56
|
+
- **misc**: Support placeholder variables in prompts and input, closes [#8060](https://github.com/lobehub/lobe-chat/issues/8060) ([3752739](https://github.com/lobehub/lobe-chat/commit/3752739))
|
57
|
+
|
58
|
+
#### What's fixed
|
59
|
+
|
60
|
+
- **misc**: Some web search bugs, closes [#8068](https://github.com/lobehub/lobe-chat/issues/8068) ([bebe7a3](https://github.com/lobehub/lobe-chat/commit/bebe7a3))
|
61
|
+
|
62
|
+
#### Styles
|
63
|
+
|
64
|
+
- **misc**: Support Vertex AI thought summaries, closes [#8090](https://github.com/lobehub/lobe-chat/issues/8090) ([1355a2e](https://github.com/lobehub/lobe-chat/commit/1355a2e))
|
65
|
+
|
66
|
+
</details>
|
67
|
+
|
68
|
+
<div align="right">
|
69
|
+
|
70
|
+
[](#readme-top)
|
71
|
+
|
72
|
+
</div>
|
73
|
+
|
5
74
|
### [Version 1.91.3](https://github.com/lobehub/lobe-chat/compare/v1.91.2...v1.91.3)
|
6
75
|
|
7
76
|
<sup>Released on **2025-06-05**</sup>
|
package/changelog/v1.json
CHANGED
@@ -1,4 +1,28 @@
|
|
1
1
|
[
|
2
|
+
{
|
3
|
+
"children": {
|
4
|
+
"improvements": [
|
5
|
+
"Improve {{username}} placeholder variable, Update Gemini & Qwen models."
|
6
|
+
]
|
7
|
+
},
|
8
|
+
"date": "2025-06-07",
|
9
|
+
"version": "1.92.1"
|
10
|
+
},
|
11
|
+
{
|
12
|
+
"children": {
|
13
|
+
"features": [
|
14
|
+
"Support placeholder variables in prompts and input."
|
15
|
+
],
|
16
|
+
"fixes": [
|
17
|
+
"Some web search bugs."
|
18
|
+
],
|
19
|
+
"improvements": [
|
20
|
+
"Support Vertex AI thought summaries."
|
21
|
+
]
|
22
|
+
},
|
23
|
+
"date": "2025-06-06",
|
24
|
+
"version": "1.92.0"
|
25
|
+
},
|
2
26
|
{
|
3
27
|
"children": {
|
4
28
|
"fixes": [
|
package/locales/ar/setting.json
CHANGED
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "سيتم ملء أحدث رسالة من المستخدم في هذا القالب",
|
223
|
-
"placeholder": "القالب المُعالج مسبقًا {{
|
223
|
+
"placeholder": "القالب المُعالج مسبقًا {{input_template}} سيتم استبداله بالمعلومات المُدخلة في الوقت الحقيقي",
|
224
224
|
"title": "معالجة مُدخلات المستخدم"
|
225
225
|
},
|
226
226
|
"submit": "تحديث تفضيلات الدردشة",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Последното съобщение на потребителя ще бъде попълнено в този шаблон",
|
223
|
-
"placeholder": "Шаблонът за предварителна обработка {{
|
223
|
+
"placeholder": "Шаблонът за предварителна обработка {{input_template}} ще бъде заменен с информация за въвеждане в реално време",
|
224
224
|
"title": "Предварителна обработка на потребителския вход"
|
225
225
|
},
|
226
226
|
"submit": "Актуализиране на предпочитанията за чат",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Die neueste Benutzernachricht wird in dieses Template eingefügt",
|
223
|
-
"placeholder": "Vorlagen-{{
|
223
|
+
"placeholder": "Vorlagen-{{input_template}} werden durch Echtzeit-Eingabeinformationen ersetzt",
|
224
224
|
"title": "Benutzereingabe-Vorverarbeitung"
|
225
225
|
},
|
226
226
|
"submit": "Chat-Präferenzen aktualisieren",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "The user's latest message will be filled into this template",
|
223
|
-
"placeholder": "Preprocessing template {{
|
223
|
+
"placeholder": "Preprocessing template {{input_template}} will be replaced with real-time input information",
|
224
224
|
"title": "User Input Preprocessing"
|
225
225
|
},
|
226
226
|
"submit": "Update Chat Preferences",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "El último mensaje del usuario se completará en esta plantilla",
|
223
|
-
"placeholder": "La plantilla de preprocesamiento {{
|
223
|
+
"placeholder": "La plantilla de preprocesamiento {{input_template}} se reemplazará por la información de entrada en tiempo real",
|
224
224
|
"title": "Preprocesamiento de entrada del usuario"
|
225
225
|
},
|
226
226
|
"submit": "Actualizar preferencias de chat",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "آخرین پیام کاربر در این قالب پر میشود",
|
223
|
-
"placeholder": "قالب پیشپردازش {{
|
223
|
+
"placeholder": "قالب پیشپردازش {{input_template}} با اطلاعات ورودی لحظهای جایگزین میشود",
|
224
224
|
"title": "پیشپردازش ورودی کاربر"
|
225
225
|
},
|
226
226
|
"submit": "بهروزرسانی ترجیحات چت",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Le dernier message de l'utilisateur sera rempli dans ce modèle",
|
223
|
-
"placeholder": "Le modèle de prétraitement {{
|
223
|
+
"placeholder": "Le modèle de prétraitement {{input_template}} sera remplacé par les informations d'entrée en temps réel",
|
224
224
|
"title": "Modèle de prétraitement de l'entrée utilisateur"
|
225
225
|
},
|
226
226
|
"submit": "Mettre à jour les préférences de chat",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Il template verrà popolato con l'ultimo messaggio dell'utente",
|
223
|
-
"placeholder": "Il modello di input {{
|
223
|
+
"placeholder": "Il modello di input {{input_template}} verrà sostituito con le informazioni in tempo reale",
|
224
224
|
"title": "Pre-elaborazione dell'input dell'utente"
|
225
225
|
},
|
226
226
|
"submit": "Aggiorna preferenze chat",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "ユーザーの最新メッセージがこのテンプレートに埋め込まれます",
|
223
|
-
"placeholder": "入力テンプレート {{
|
223
|
+
"placeholder": "入力テンプレート {{input_template}} はリアルタイムの入力情報に置き換えられます",
|
224
224
|
"title": "ユーザー入力のプリプロセス"
|
225
225
|
},
|
226
226
|
"submit": "チャットの好みを更新",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "사용자의 최신 메시지가이 템플릿에 채워집니다",
|
223
|
-
"placeholder": "입력 템플릿 {{
|
223
|
+
"placeholder": "입력 템플릿 {{input_template}}은 실시간 입력 정보로 대체됩니다",
|
224
224
|
"title": "사용자 입력 전처리"
|
225
225
|
},
|
226
226
|
"submit": "채팅 선호도 업데이트",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "De meest recente gebruikersboodschap wordt ingevuld in dit sjabloon",
|
223
|
-
"placeholder": "Voorbewerkingssjabloon {{
|
223
|
+
"placeholder": "Voorbewerkingssjabloon {{input_template}} wordt vervangen door realtime invoer",
|
224
224
|
"title": "Voorbewerking van gebruikersinvoer"
|
225
225
|
},
|
226
226
|
"submit": "Chatvoorkeuren bijwerken",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Ostatnia wiadomość użytkownika zostanie wypełniona w tym szablonie",
|
223
|
-
"placeholder": "Szablon wejściowy {{
|
223
|
+
"placeholder": "Szablon wejściowy {{input_template}} zostanie zastąpiony rzeczywistą wiadomością",
|
224
224
|
"title": "Szablon wejściowy"
|
225
225
|
},
|
226
226
|
"submit": "Zaktualizuj preferencje czatu",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "A última mensagem do usuário será preenchida neste modelo",
|
223
|
-
"placeholder": "O modelo de pré-processamento {{
|
223
|
+
"placeholder": "O modelo de pré-processamento {{input_template}} será substituído pela entrada em tempo real",
|
224
224
|
"title": "Pré-processamento de entrada do usuário"
|
225
225
|
},
|
226
226
|
"submit": "Atualizar preferências de chat",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Последнее сообщение пользователя будет использовано в этом шаблоне",
|
223
|
-
"placeholder": "Шаблон ввода {{
|
223
|
+
"placeholder": "Шаблон ввода {{input_template}} будет заменен на реальные данные",
|
224
224
|
"title": "Шаблон ввода пользователя"
|
225
225
|
},
|
226
226
|
"submit": "Обновить предпочтения чата",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Kullanıcının son mesajı bu şablona doldurulur",
|
223
|
-
"placeholder": "Ön işleme şablonu {{
|
223
|
+
"placeholder": "Ön işleme şablonu {{input_template}}, gerçek zamanlı giriş bilgileri ile değiştirilir",
|
224
224
|
"title": "Kullanıcı Girişi Ön İşleme"
|
225
225
|
},
|
226
226
|
"submit": "Sohbet tercihlerini güncelle",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Tin nhắn mới nhất của người dùng sẽ được điền vào mẫu này",
|
223
|
-
"placeholder": "Mẫu xử lý trước {{
|
223
|
+
"placeholder": "Mẫu xử lý trước {{input_template}} sẽ được thay thế bằng thông tin nhập thời gian thực",
|
224
224
|
"title": "Mẫu xử lý đầu vào của người dùng"
|
225
225
|
},
|
226
226
|
"submit": "Cập nhật sở thích trò chuyện",
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.
|
3
|
+
"version": "1.92.1",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { IconAvatarProps, ModelIcon, ProviderIcon } from '@lobehub/icons';
|
2
2
|
import { Avatar, Icon, Tag, Tooltip } from '@lobehub/ui';
|
3
3
|
import { Typography } from 'antd';
|
4
|
-
import { createStyles } from 'antd-style';
|
4
|
+
import { createStyles, useResponsive } from 'antd-style';
|
5
5
|
import {
|
6
6
|
Infinity,
|
7
7
|
AtomIcon,
|
@@ -167,17 +167,20 @@ interface ModelItemRenderProps extends ChatModelCard {
|
|
167
167
|
}
|
168
168
|
|
169
169
|
export const ModelItemRender = memo<ModelItemRenderProps>(({ showInfoTag = true, ...model }) => {
|
170
|
+
const { mobile } = useResponsive();
|
170
171
|
return (
|
171
172
|
<Flexbox
|
172
173
|
align={'center'}
|
173
174
|
gap={32}
|
174
175
|
horizontal
|
175
176
|
justify={'space-between'}
|
176
|
-
style={{ overflow: 'hidden', position: 'relative' }}
|
177
|
+
style={{ overflow: 'hidden', position: 'relative', width: mobile ? '80vw' : 'auto' }}
|
177
178
|
>
|
178
179
|
<Flexbox align={'center'} gap={8} horizontal style={{ overflow: 'hidden' }}>
|
179
180
|
<ModelIcon model={model.id} size={20} />
|
180
|
-
<Typography.Text
|
181
|
+
<Typography.Text style={mobile ? { overflowX: 'auto', whiteSpace: 'nowrap' } : {}}>
|
182
|
+
{model.displayName || model.id}
|
183
|
+
</Typography.Text>
|
181
184
|
</Flexbox>
|
182
185
|
{showInfoTag && <ModelInfoTags {...model} />}
|
183
186
|
</Flexbox>
|
@@ -1,6 +1,31 @@
|
|
1
1
|
import { AIChatModelCard } from '@/types/aiModel';
|
2
2
|
|
3
3
|
const googleChatModels: AIChatModelCard[] = [
|
4
|
+
{
|
5
|
+
abilities: {
|
6
|
+
functionCall: true,
|
7
|
+
reasoning: true,
|
8
|
+
search: true,
|
9
|
+
vision: true,
|
10
|
+
},
|
11
|
+
contextWindowTokens: 1_048_576 + 65_536,
|
12
|
+
description:
|
13
|
+
'Gemini 2.5 Pro Preview 是 Google 最先进的思维模型,能够对代码、数学和STEM领域的复杂问题进行推理,以及使用长上下文分析大型数据集、代码库和文档。',
|
14
|
+
displayName: 'Gemini 2.5 Pro Preview 06-05 (Paid)',
|
15
|
+
id: 'gemini-2.5-pro-preview-06-05',
|
16
|
+
maxOutput: 65_536,
|
17
|
+
pricing: {
|
18
|
+
input: 1.25, // prompts <= 200k tokens
|
19
|
+
output: 10, // prompts <= 200k tokens
|
20
|
+
},
|
21
|
+
releasedAt: '2025-06-05',
|
22
|
+
settings: {
|
23
|
+
extendParams: ['enableReasoning', 'reasoningBudgetToken'],
|
24
|
+
searchImpl: 'params',
|
25
|
+
searchProvider: 'google',
|
26
|
+
},
|
27
|
+
type: 'chat',
|
28
|
+
},
|
4
29
|
{
|
5
30
|
abilities: {
|
6
31
|
functionCall: true,
|
@@ -25,6 +25,28 @@ const hunyuanChatModels: AIChatModelCard[] = [
|
|
25
25
|
},
|
26
26
|
type: 'chat',
|
27
27
|
},
|
28
|
+
{
|
29
|
+
abilities: {
|
30
|
+
reasoning: true,
|
31
|
+
search: true,
|
32
|
+
},
|
33
|
+
contextWindowTokens: 92_000,
|
34
|
+
description:
|
35
|
+
'优化文本创作、作文写作,优化代码前端、数学、逻辑推理等理科能力,提升指令遵循能力。',
|
36
|
+
displayName: 'Hunyuan T1 20250529',
|
37
|
+
id: 'hunyuan-t1-20250529',
|
38
|
+
maxOutput: 64_000,
|
39
|
+
pricing: {
|
40
|
+
currency: 'CNY',
|
41
|
+
input: 1,
|
42
|
+
output: 4,
|
43
|
+
},
|
44
|
+
releasedAt: '2025-05-29',
|
45
|
+
settings: {
|
46
|
+
searchImpl: 'params',
|
47
|
+
},
|
48
|
+
type: 'chat',
|
49
|
+
},
|
28
50
|
{
|
29
51
|
abilities: {
|
30
52
|
reasoning: true,
|
@@ -258,6 +280,28 @@ const hunyuanChatModels: AIChatModelCard[] = [
|
|
258
280
|
},
|
259
281
|
type: 'chat',
|
260
282
|
},
|
283
|
+
{
|
284
|
+
abilities: {
|
285
|
+
functionCall: true,
|
286
|
+
search: true,
|
287
|
+
},
|
288
|
+
contextWindowTokens: 44_000,
|
289
|
+
description:
|
290
|
+
'预训练底座升级,写作、阅读理解能力提升,较大幅度提升代码和理科能力,复杂指令遵循等持续提升。',
|
291
|
+
displayName: 'Hunyuan TurboS 20250604',
|
292
|
+
id: 'hunyuan-turbos-20250604',
|
293
|
+
maxOutput: 16_000,
|
294
|
+
pricing: {
|
295
|
+
currency: 'CNY',
|
296
|
+
input: 0.8,
|
297
|
+
output: 2,
|
298
|
+
},
|
299
|
+
releasedAt: '2025-06-04',
|
300
|
+
settings: {
|
301
|
+
searchImpl: 'params',
|
302
|
+
},
|
303
|
+
type: 'chat',
|
304
|
+
},
|
261
305
|
{
|
262
306
|
abilities: {
|
263
307
|
functionCall: true,
|
@@ -43,6 +43,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
43
43
|
type: 'chat',
|
44
44
|
},
|
45
45
|
{
|
46
|
+
abilities: {
|
47
|
+
functionCall: true,
|
48
|
+
},
|
46
49
|
contextWindowTokens: 131_072,
|
47
50
|
displayName: 'Llama 3.3 70B Instruct',
|
48
51
|
id: 'meta-llama/llama-3.3-70b-instruct',
|
@@ -63,6 +66,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
63
66
|
type: 'chat',
|
64
67
|
},
|
65
68
|
{
|
69
|
+
abilities: {
|
70
|
+
functionCall: true,
|
71
|
+
},
|
66
72
|
contextWindowTokens: 131_072,
|
67
73
|
displayName: 'Llama 4 Scout 17B Instruct',
|
68
74
|
enabled: true,
|
@@ -74,6 +80,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
74
80
|
type: 'chat',
|
75
81
|
},
|
76
82
|
{
|
83
|
+
abilities: {
|
84
|
+
functionCall: true,
|
85
|
+
},
|
77
86
|
contextWindowTokens: 1_048_576,
|
78
87
|
displayName: 'Llama 4 Maverick 17B Instruct',
|
79
88
|
enabled: true,
|
@@ -129,7 +138,7 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
129
138
|
type: 'chat',
|
130
139
|
},
|
131
140
|
{
|
132
|
-
contextWindowTokens:
|
141
|
+
contextWindowTokens: 60_288,
|
133
142
|
description: 'Mistral Nemo 是多语言支持和高性能编程的7.3B参数模型。',
|
134
143
|
displayName: 'Mistral Nemo',
|
135
144
|
id: 'mistralai/mistral-nemo',
|
@@ -205,6 +214,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
205
214
|
type: 'chat',
|
206
215
|
},
|
207
216
|
{
|
217
|
+
abilities: {
|
218
|
+
functionCall: true,
|
219
|
+
},
|
208
220
|
contextWindowTokens: 64_000,
|
209
221
|
displayName: 'Deepseek V3 Turbo',
|
210
222
|
id: 'deepseek/deepseek-v3-turbo',
|
@@ -215,6 +227,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
215
227
|
type: 'chat',
|
216
228
|
},
|
217
229
|
{
|
230
|
+
abilities: {
|
231
|
+
functionCall: true,
|
232
|
+
},
|
218
233
|
contextWindowTokens: 128_000,
|
219
234
|
displayName: 'Deepseek V3 0324',
|
220
235
|
enabled: true,
|
@@ -242,7 +257,6 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
242
257
|
},
|
243
258
|
{
|
244
259
|
abilities: {
|
245
|
-
functionCall: true,
|
246
260
|
reasoning: true,
|
247
261
|
},
|
248
262
|
contextWindowTokens: 128_000,
|
@@ -256,6 +270,7 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
256
270
|
},
|
257
271
|
{
|
258
272
|
abilities: {
|
273
|
+
functionCall: true,
|
259
274
|
reasoning: true,
|
260
275
|
},
|
261
276
|
contextWindowTokens: 64_000,
|
@@ -330,6 +345,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
330
345
|
type: 'chat',
|
331
346
|
},
|
332
347
|
{
|
348
|
+
abilities: {
|
349
|
+
functionCall: true,
|
350
|
+
},
|
333
351
|
contextWindowTokens: 32_000,
|
334
352
|
displayName: 'Qwen2.5 72B Instruct',
|
335
353
|
id: 'qwen/qwen-2.5-72b-instruct',
|
@@ -373,7 +391,7 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
373
391
|
abilities: {
|
374
392
|
vision: true,
|
375
393
|
},
|
376
|
-
contextWindowTokens:
|
394
|
+
contextWindowTokens: 32_768,
|
377
395
|
displayName: 'Qwen2.5 VL 72B Instruct',
|
378
396
|
enabled: true,
|
379
397
|
id: 'qwen/qwen2.5-vl-72b-instruct',
|
@@ -394,6 +412,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
394
412
|
type: 'chat',
|
395
413
|
},
|
396
414
|
{
|
415
|
+
abilities: {
|
416
|
+
functionCall: true,
|
417
|
+
},
|
397
418
|
contextWindowTokens: 32_768,
|
398
419
|
displayName: 'Llama 3.2 3B Instruct',
|
399
420
|
id: 'meta-llama/llama-3.2-3b-instruct',
|
@@ -434,6 +455,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
434
455
|
type: 'chat',
|
435
456
|
},
|
436
457
|
{
|
458
|
+
abilities: {
|
459
|
+
functionCall: true,
|
460
|
+
},
|
437
461
|
contextWindowTokens: 32_000,
|
438
462
|
displayName: 'GLM 4 9B 0414',
|
439
463
|
id: 'thudm/glm-4-9b-0414',
|
@@ -444,6 +468,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
444
468
|
type: 'chat',
|
445
469
|
},
|
446
470
|
{
|
471
|
+
abilities: {
|
472
|
+
functionCall: true,
|
473
|
+
},
|
447
474
|
contextWindowTokens: 32_000,
|
448
475
|
displayName: 'GLM Z1 9B 0414',
|
449
476
|
id: 'thudm/glm-z1-9b-0414',
|
@@ -454,6 +481,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
454
481
|
type: 'chat',
|
455
482
|
},
|
456
483
|
{
|
484
|
+
abilities: {
|
485
|
+
functionCall: true,
|
486
|
+
},
|
457
487
|
contextWindowTokens: 32_000,
|
458
488
|
displayName: 'GLM Z1 32B 0414',
|
459
489
|
id: 'thudm/glm-z1-32b-0414',
|
@@ -464,6 +494,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
464
494
|
type: 'chat',
|
465
495
|
},
|
466
496
|
{
|
497
|
+
abilities: {
|
498
|
+
functionCall: true,
|
499
|
+
},
|
467
500
|
contextWindowTokens: 32_000,
|
468
501
|
displayName: 'GLM 4 32B 0414',
|
469
502
|
id: 'thudm/glm-4-32b-0414',
|
@@ -474,6 +507,9 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
474
507
|
type: 'chat',
|
475
508
|
},
|
476
509
|
{
|
510
|
+
abilities: {
|
511
|
+
functionCall: true,
|
512
|
+
},
|
477
513
|
contextWindowTokens: 32_000,
|
478
514
|
displayName: 'GLM Z1 Rumination 32B 0414',
|
479
515
|
id: 'thudm/glm-z1-rumination-32b-0414',
|