@lobehub/chat 1.91.2 → 1.92.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +2 -0
- package/CHANGELOG.md +74 -0
- package/changelog/v1.json +27 -0
- package/locales/ar/setting.json +1 -1
- package/locales/bg-BG/setting.json +1 -1
- package/locales/de-DE/setting.json +1 -1
- package/locales/en-US/setting.json +1 -1
- package/locales/es-ES/setting.json +1 -1
- package/locales/fa-IR/setting.json +1 -1
- package/locales/fr-FR/setting.json +1 -1
- package/locales/it-IT/setting.json +1 -1
- package/locales/ja-JP/setting.json +1 -1
- package/locales/ko-KR/setting.json +1 -1
- package/locales/nl-NL/setting.json +1 -1
- package/locales/pl-PL/setting.json +1 -1
- package/locales/pt-BR/setting.json +1 -1
- package/locales/ru-RU/setting.json +1 -1
- package/locales/tr-TR/setting.json +1 -1
- package/locales/vi-VN/setting.json +1 -1
- package/locales/zh-CN/setting.json +1 -1
- package/locales/zh-TW/setting.json +1 -1
- package/package.json +1 -1
- package/src/app/[variants]/(main)/profile/features/ClerkProfile.tsx +1 -4
- package/src/config/aiModels/modelscope.ts +4 -1
- package/src/config/aiModels/novita.ts +2 -0
- package/src/config/aiModels/openrouter.ts +2 -0
- package/src/config/aiModels/siliconcloud.ts +1 -0
- package/src/config/modelProviders/anthropic.ts +30 -11
- package/src/config/modelProviders/openai.ts +14 -0
- package/src/features/AgentSetting/AgentModal/index.tsx +3 -2
- package/src/features/ChatInput/ActionBar/Search/Controls.tsx +6 -2
- package/src/layout/AuthProvider/Clerk/useAppearance.ts +1 -4
- package/src/libs/model-runtime/utils/streams/vertex-ai.ts +12 -0
- package/src/locales/default/setting.ts +1 -1
- package/src/services/chat.ts +17 -9
- package/src/store/chat/slices/aiChat/actions/generateAIChat.ts +23 -31
- package/src/store/user/slices/auth/selectors.test.ts +18 -0
- package/src/store/user/slices/auth/selectors.ts +1 -0
- package/src/utils/client/parserPlaceholder.test.ts +326 -0
- package/src/utils/client/parserPlaceholder.ts +190 -0
- package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/index.tsx +0 -0
package/.eslintrc.js
CHANGED
@@ -19,6 +19,8 @@ config.rules['unicorn/no-array-for-each'] = 0;
|
|
19
19
|
config.rules['unicorn/prefer-number-properties'] = 0;
|
20
20
|
config.rules['unicorn/prefer-query-selector'] = 0;
|
21
21
|
config.rules['unicorn/no-array-callback-reference'] = 0;
|
22
|
+
// FIXME: Linting error in src/app/[variants]/(main)/chat/features/Migration/DBReader.ts, the fundamental solution should be upgrading typescript-eslint
|
23
|
+
config.rules['@typescript-eslint/no-useless-constructor'] = 0;
|
22
24
|
|
23
25
|
config.overrides = [
|
24
26
|
{
|
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,80 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
## [Version 1.92.0](https://github.com/lobehub/lobe-chat/compare/v1.91.3...v1.92.0)
|
6
|
+
|
7
|
+
<sup>Released on **2025-06-06**</sup>
|
8
|
+
|
9
|
+
#### ✨ Features
|
10
|
+
|
11
|
+
- **misc**: Support placeholder variables in prompts and input.
|
12
|
+
|
13
|
+
#### 🐛 Bug Fixes
|
14
|
+
|
15
|
+
- **misc**: Some web search bugs.
|
16
|
+
|
17
|
+
#### 💄 Styles
|
18
|
+
|
19
|
+
- **misc**: Support Vertex AI thought summaries.
|
20
|
+
|
21
|
+
<br/>
|
22
|
+
|
23
|
+
<details>
|
24
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
25
|
+
|
26
|
+
#### What's improved
|
27
|
+
|
28
|
+
- **misc**: Support placeholder variables in prompts and input, closes [#8060](https://github.com/lobehub/lobe-chat/issues/8060) ([3752739](https://github.com/lobehub/lobe-chat/commit/3752739))
|
29
|
+
|
30
|
+
#### What's fixed
|
31
|
+
|
32
|
+
- **misc**: Some web search bugs, closes [#8068](https://github.com/lobehub/lobe-chat/issues/8068) ([bebe7a3](https://github.com/lobehub/lobe-chat/commit/bebe7a3))
|
33
|
+
|
34
|
+
#### Styles
|
35
|
+
|
36
|
+
- **misc**: Support Vertex AI thought summaries, closes [#8090](https://github.com/lobehub/lobe-chat/issues/8090) ([1355a2e](https://github.com/lobehub/lobe-chat/commit/1355a2e))
|
37
|
+
|
38
|
+
</details>
|
39
|
+
|
40
|
+
<div align="right">
|
41
|
+
|
42
|
+
[](#readme-top)
|
43
|
+
|
44
|
+
</div>
|
45
|
+
|
46
|
+
### [Version 1.91.3](https://github.com/lobehub/lobe-chat/compare/v1.91.2...v1.91.3)
|
47
|
+
|
48
|
+
<sup>Released on **2025-06-05**</sup>
|
49
|
+
|
50
|
+
#### 🐛 Bug Fixes
|
51
|
+
|
52
|
+
- **misc**: Correct deepseek R1 fc support display.
|
53
|
+
|
54
|
+
#### 💄 Styles
|
55
|
+
|
56
|
+
- **misc**: Add openAI websearch and claude 4 to modelproviders.
|
57
|
+
|
58
|
+
<br/>
|
59
|
+
|
60
|
+
<details>
|
61
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
62
|
+
|
63
|
+
#### What's fixed
|
64
|
+
|
65
|
+
- **misc**: Correct deepseek R1 fc support display, closes [#8069](https://github.com/lobehub/lobe-chat/issues/8069) ([ed5bb5f](https://github.com/lobehub/lobe-chat/commit/ed5bb5f))
|
66
|
+
|
67
|
+
#### Styles
|
68
|
+
|
69
|
+
- **misc**: Add openAI websearch and claude 4 to modelproviders, closes [#7988](https://github.com/lobehub/lobe-chat/issues/7988) ([95994f4](https://github.com/lobehub/lobe-chat/commit/95994f4))
|
70
|
+
|
71
|
+
</details>
|
72
|
+
|
73
|
+
<div align="right">
|
74
|
+
|
75
|
+
[](#readme-top)
|
76
|
+
|
77
|
+
</div>
|
78
|
+
|
5
79
|
### [Version 1.91.2](https://github.com/lobehub/lobe-chat/compare/v1.91.1...v1.91.2)
|
6
80
|
|
7
81
|
<sup>Released on **2025-06-05**</sup>
|
package/changelog/v1.json
CHANGED
@@ -1,4 +1,31 @@
|
|
1
1
|
[
|
2
|
+
{
|
3
|
+
"children": {
|
4
|
+
"features": [
|
5
|
+
"Support placeholder variables in prompts and input."
|
6
|
+
],
|
7
|
+
"fixes": [
|
8
|
+
"Some web search bugs."
|
9
|
+
],
|
10
|
+
"improvements": [
|
11
|
+
"Support Vertex AI thought summaries."
|
12
|
+
]
|
13
|
+
},
|
14
|
+
"date": "2025-06-06",
|
15
|
+
"version": "1.92.0"
|
16
|
+
},
|
17
|
+
{
|
18
|
+
"children": {
|
19
|
+
"fixes": [
|
20
|
+
"Correct deepseek R1 fc support display."
|
21
|
+
],
|
22
|
+
"improvements": [
|
23
|
+
"Add openAI websearch and claude 4 to modelproviders."
|
24
|
+
]
|
25
|
+
},
|
26
|
+
"date": "2025-06-05",
|
27
|
+
"version": "1.91.3"
|
28
|
+
},
|
2
29
|
{
|
3
30
|
"children": {
|
4
31
|
"improvements": [
|
package/locales/ar/setting.json
CHANGED
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "سيتم ملء أحدث رسالة من المستخدم في هذا القالب",
|
223
|
-
"placeholder": "القالب المُعالج مسبقًا {{
|
223
|
+
"placeholder": "القالب المُعالج مسبقًا {{input_template}} سيتم استبداله بالمعلومات المُدخلة في الوقت الحقيقي",
|
224
224
|
"title": "معالجة مُدخلات المستخدم"
|
225
225
|
},
|
226
226
|
"submit": "تحديث تفضيلات الدردشة",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Последното съобщение на потребителя ще бъде попълнено в този шаблон",
|
223
|
-
"placeholder": "Шаблонът за предварителна обработка {{
|
223
|
+
"placeholder": "Шаблонът за предварителна обработка {{input_template}} ще бъде заменен с информация за въвеждане в реално време",
|
224
224
|
"title": "Предварителна обработка на потребителския вход"
|
225
225
|
},
|
226
226
|
"submit": "Актуализиране на предпочитанията за чат",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Die neueste Benutzernachricht wird in dieses Template eingefügt",
|
223
|
-
"placeholder": "Vorlagen-{{
|
223
|
+
"placeholder": "Vorlagen-{{input_template}} werden durch Echtzeit-Eingabeinformationen ersetzt",
|
224
224
|
"title": "Benutzereingabe-Vorverarbeitung"
|
225
225
|
},
|
226
226
|
"submit": "Chat-Präferenzen aktualisieren",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "The user's latest message will be filled into this template",
|
223
|
-
"placeholder": "Preprocessing template {{
|
223
|
+
"placeholder": "Preprocessing template {{input_template}} will be replaced with real-time input information",
|
224
224
|
"title": "User Input Preprocessing"
|
225
225
|
},
|
226
226
|
"submit": "Update Chat Preferences",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "El último mensaje del usuario se completará en esta plantilla",
|
223
|
-
"placeholder": "La plantilla de preprocesamiento {{
|
223
|
+
"placeholder": "La plantilla de preprocesamiento {{input_template}} se reemplazará por la información de entrada en tiempo real",
|
224
224
|
"title": "Preprocesamiento de entrada del usuario"
|
225
225
|
},
|
226
226
|
"submit": "Actualizar preferencias de chat",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "آخرین پیام کاربر در این قالب پر میشود",
|
223
|
-
"placeholder": "قالب پیشپردازش {{
|
223
|
+
"placeholder": "قالب پیشپردازش {{input_template}} با اطلاعات ورودی لحظهای جایگزین میشود",
|
224
224
|
"title": "پیشپردازش ورودی کاربر"
|
225
225
|
},
|
226
226
|
"submit": "بهروزرسانی ترجیحات چت",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Le dernier message de l'utilisateur sera rempli dans ce modèle",
|
223
|
-
"placeholder": "Le modèle de prétraitement {{
|
223
|
+
"placeholder": "Le modèle de prétraitement {{input_template}} sera remplacé par les informations d'entrée en temps réel",
|
224
224
|
"title": "Modèle de prétraitement de l'entrée utilisateur"
|
225
225
|
},
|
226
226
|
"submit": "Mettre à jour les préférences de chat",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Il template verrà popolato con l'ultimo messaggio dell'utente",
|
223
|
-
"placeholder": "Il modello di input {{
|
223
|
+
"placeholder": "Il modello di input {{input_template}} verrà sostituito con le informazioni in tempo reale",
|
224
224
|
"title": "Pre-elaborazione dell'input dell'utente"
|
225
225
|
},
|
226
226
|
"submit": "Aggiorna preferenze chat",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "ユーザーの最新メッセージがこのテンプレートに埋め込まれます",
|
223
|
-
"placeholder": "入力テンプレート {{
|
223
|
+
"placeholder": "入力テンプレート {{input_template}} はリアルタイムの入力情報に置き換えられます",
|
224
224
|
"title": "ユーザー入力のプリプロセス"
|
225
225
|
},
|
226
226
|
"submit": "チャットの好みを更新",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "사용자의 최신 메시지가이 템플릿에 채워집니다",
|
223
|
-
"placeholder": "입력 템플릿 {{
|
223
|
+
"placeholder": "입력 템플릿 {{input_template}}은 실시간 입력 정보로 대체됩니다",
|
224
224
|
"title": "사용자 입력 전처리"
|
225
225
|
},
|
226
226
|
"submit": "채팅 선호도 업데이트",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "De meest recente gebruikersboodschap wordt ingevuld in dit sjabloon",
|
223
|
-
"placeholder": "Voorbewerkingssjabloon {{
|
223
|
+
"placeholder": "Voorbewerkingssjabloon {{input_template}} wordt vervangen door realtime invoer",
|
224
224
|
"title": "Voorbewerking van gebruikersinvoer"
|
225
225
|
},
|
226
226
|
"submit": "Chatvoorkeuren bijwerken",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Ostatnia wiadomość użytkownika zostanie wypełniona w tym szablonie",
|
223
|
-
"placeholder": "Szablon wejściowy {{
|
223
|
+
"placeholder": "Szablon wejściowy {{input_template}} zostanie zastąpiony rzeczywistą wiadomością",
|
224
224
|
"title": "Szablon wejściowy"
|
225
225
|
},
|
226
226
|
"submit": "Zaktualizuj preferencje czatu",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "A última mensagem do usuário será preenchida neste modelo",
|
223
|
-
"placeholder": "O modelo de pré-processamento {{
|
223
|
+
"placeholder": "O modelo de pré-processamento {{input_template}} será substituído pela entrada em tempo real",
|
224
224
|
"title": "Pré-processamento de entrada do usuário"
|
225
225
|
},
|
226
226
|
"submit": "Atualizar preferências de chat",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Последнее сообщение пользователя будет использовано в этом шаблоне",
|
223
|
-
"placeholder": "Шаблон ввода {{
|
223
|
+
"placeholder": "Шаблон ввода {{input_template}} будет заменен на реальные данные",
|
224
224
|
"title": "Шаблон ввода пользователя"
|
225
225
|
},
|
226
226
|
"submit": "Обновить предпочтения чата",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Kullanıcının son mesajı bu şablona doldurulur",
|
223
|
-
"placeholder": "Ön işleme şablonu {{
|
223
|
+
"placeholder": "Ön işleme şablonu {{input_template}}, gerçek zamanlı giriş bilgileri ile değiştirilir",
|
224
224
|
"title": "Kullanıcı Girişi Ön İşleme"
|
225
225
|
},
|
226
226
|
"submit": "Sohbet tercihlerini güncelle",
|
@@ -220,7 +220,7 @@
|
|
220
220
|
},
|
221
221
|
"inputTemplate": {
|
222
222
|
"desc": "Tin nhắn mới nhất của người dùng sẽ được điền vào mẫu này",
|
223
|
-
"placeholder": "Mẫu xử lý trước {{
|
223
|
+
"placeholder": "Mẫu xử lý trước {{input_template}} sẽ được thay thế bằng thông tin nhập thời gian thực",
|
224
224
|
"title": "Mẫu xử lý đầu vào của người dùng"
|
225
225
|
},
|
226
226
|
"submit": "Cập nhật sở thích trò chuyện",
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.
|
3
|
+
"version": "1.92.0",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -50,10 +50,7 @@ export const useStyles = createStyles(
|
|
50
50
|
scrollBox: css`
|
51
51
|
background: transparent;
|
52
52
|
`,
|
53
|
-
}) as Partial<
|
54
|
-
// eslint-disable-next-line unused-imports/no-unused-vars
|
55
|
-
[k in keyof ElementsConfig]: any;
|
56
|
-
}>,
|
53
|
+
}) as Partial<Record<keyof ElementsConfig, any>>,
|
57
54
|
);
|
58
55
|
|
59
56
|
const Client = memo<{ mobile?: boolean }>(({ mobile }) => {
|
@@ -4,9 +4,11 @@ const modelscopeChatModels: AIChatModelCard[] = [
|
|
4
4
|
{
|
5
5
|
abilities: {
|
6
6
|
functionCall: true,
|
7
|
+
reasoning: true,
|
7
8
|
},
|
8
9
|
contextWindowTokens: 131_072,
|
9
|
-
description:
|
10
|
+
description:
|
11
|
+
'DeepSeek R1 通过利用增加的计算资源和在后训练过程中引入算法优化机制,显著提高了其推理和推断能力的深度。该模型在各种基准评估中表现出色,包括数学、编程和一般逻辑方面。其整体性能现已接近领先模型,如 O3 和 Gemini 2.5 Pro。',
|
10
12
|
displayName: 'DeepSeek-R1-0528',
|
11
13
|
enabled: true,
|
12
14
|
id: 'deepseek-ai/DeepSeek-R1-0528',
|
@@ -26,6 +28,7 @@ const modelscopeChatModels: AIChatModelCard[] = [
|
|
26
28
|
{
|
27
29
|
abilities: {
|
28
30
|
functionCall: true,
|
31
|
+
reasoning: true,
|
29
32
|
},
|
30
33
|
contextWindowTokens: 131_072,
|
31
34
|
description: 'DeepSeek-R1是DeepSeek最新的推理模型,专注于复杂推理任务。',
|
@@ -227,6 +227,7 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
227
227
|
},
|
228
228
|
{
|
229
229
|
abilities: {
|
230
|
+
functionCall: true,
|
230
231
|
reasoning: true,
|
231
232
|
},
|
232
233
|
contextWindowTokens: 128_000,
|
@@ -241,6 +242,7 @@ const novitaChatModels: AIChatModelCard[] = [
|
|
241
242
|
},
|
242
243
|
{
|
243
244
|
abilities: {
|
245
|
+
functionCall: true,
|
244
246
|
reasoning: true,
|
245
247
|
},
|
246
248
|
contextWindowTokens: 128_000,
|
@@ -501,6 +501,7 @@ const openrouterChatModels: AIChatModelCard[] = [
|
|
501
501
|
},
|
502
502
|
{
|
503
503
|
abilities: {
|
504
|
+
functionCall: true,
|
504
505
|
reasoning: true,
|
505
506
|
},
|
506
507
|
contextWindowTokens: 163_840,
|
@@ -517,6 +518,7 @@ const openrouterChatModels: AIChatModelCard[] = [
|
|
517
518
|
},
|
518
519
|
{
|
519
520
|
abilities: {
|
521
|
+
functionCall: true,
|
520
522
|
reasoning: true,
|
521
523
|
},
|
522
524
|
contextWindowTokens: 163_840,
|
@@ -6,25 +6,44 @@ const Anthropic: ModelProviderCard = {
|
|
6
6
|
{
|
7
7
|
contextWindowTokens: 200_000,
|
8
8
|
description:
|
9
|
-
'Claude
|
10
|
-
displayName: 'Claude
|
9
|
+
'Claude 4 Opus 是 Anthropic 最强大的下一代模型,具有卓越的推理能力和创造力,适用于最复杂的任务和高级分析。',
|
10
|
+
displayName: 'Claude 4 Opus',
|
11
11
|
enabled: true,
|
12
12
|
functionCall: true,
|
13
|
-
id: 'claude-
|
14
|
-
maxOutput:
|
13
|
+
id: 'claude-opus-4-20250514',
|
14
|
+
maxOutput: 32_000,
|
15
15
|
pricing: {
|
16
|
-
cachedInput:
|
17
|
-
input:
|
18
|
-
output:
|
19
|
-
writeCacheInput:
|
16
|
+
cachedInput: 7.5,
|
17
|
+
input: 30,
|
18
|
+
output: 150,
|
19
|
+
writeCacheInput: 37.5,
|
20
20
|
},
|
21
|
-
releasedAt: '2025-
|
21
|
+
releasedAt: '2025-05-14',
|
22
|
+
vision: true,
|
23
|
+
},
|
24
|
+
{
|
25
|
+
contextWindowTokens: 200_000,
|
26
|
+
description:
|
27
|
+
'Claude 4 Sonnet 提供了优异的性能和速度平衡,是新一代模型中的理想选择,适用于广泛的企业和创意任务。',
|
28
|
+
displayName: 'Claude 4 Sonnet',
|
29
|
+
enabled: true,
|
30
|
+
functionCall: true,
|
31
|
+
id: 'claude-sonnet-4-20250514',
|
32
|
+
maxOutput: 64_000,
|
33
|
+
pricing: {
|
34
|
+
cachedInput: 1.5,
|
35
|
+
input: 6,
|
36
|
+
output: 30,
|
37
|
+
writeCacheInput: 7.5,
|
38
|
+
},
|
39
|
+
releasedAt: '2025-05-14',
|
40
|
+
vision: true,
|
22
41
|
},
|
23
42
|
{
|
24
43
|
contextWindowTokens: 200_000,
|
25
44
|
description:
|
26
|
-
'Claude 3.7 sonnet
|
27
|
-
displayName: 'Claude 3.7 Sonnet
|
45
|
+
'Claude 3.7 sonnet 是 Anthropic 最快的下一代模型。与 Claude 3 Haiku 相比,Claude 3.7 Sonnet 在各项技能上都有所提升,并在许多智力基准测试中超越了上一代最大的模型 Claude 3 Opus。',
|
46
|
+
displayName: 'Claude 3.7 Sonnet',
|
28
47
|
enabled: true,
|
29
48
|
functionCall: true,
|
30
49
|
id: 'claude-3-7-sonnet-20250219',
|
@@ -78,6 +78,20 @@ const OpenAI: ModelProviderCard = {
|
|
78
78
|
},
|
79
79
|
vision: true,
|
80
80
|
},
|
81
|
+
{
|
82
|
+
contextWindowTokens: 128_000,
|
83
|
+
description: 'GPT-4o mini search preview 是一个专门为搜索功能优化的预览版本,具有增强的网络搜索能力和实时信息检索功能。',
|
84
|
+
displayName: 'GPT-4o mini Search Preview',
|
85
|
+
functionCall: true,
|
86
|
+
id: 'gpt-4o-mini-search-preview',
|
87
|
+
maxOutput: 16_384,
|
88
|
+
pricing: {
|
89
|
+
input: 0.15,
|
90
|
+
output: 0.6,
|
91
|
+
},
|
92
|
+
releasedAt: '2024-12-01',
|
93
|
+
vision: true,
|
94
|
+
},
|
81
95
|
{
|
82
96
|
contextWindowTokens: 128_000,
|
83
97
|
description:
|
@@ -1,7 +1,7 @@
|
|
1
1
|
'use client';
|
2
2
|
|
3
3
|
import { Form, type FormGroupItemType, SliderWithInput } from '@lobehub/ui';
|
4
|
-
import { Switch } from 'antd';
|
4
|
+
import { Form as AntdForm, Switch } from 'antd';
|
5
5
|
import isEqual from 'fast-deep-equal';
|
6
6
|
import { memo } from 'react';
|
7
7
|
import { useTranslation } from 'react-i18next';
|
@@ -15,6 +15,7 @@ import { selectors, useStore } from '../store';
|
|
15
15
|
const AgentModal = memo(() => {
|
16
16
|
const { t } = useTranslation('setting');
|
17
17
|
const [form] = Form.useForm();
|
18
|
+
const enableMaxTokens = AntdForm.useWatch(['chatConfig', 'enableMaxTokens'], form);
|
18
19
|
const config = useStore(selectors.currentAgentConfig, isEqual);
|
19
20
|
|
20
21
|
const updateConfig = useStore((s) => s.setAgentConfig);
|
@@ -69,7 +70,7 @@ const AgentModal = memo(() => {
|
|
69
70
|
children: <SliderWithInput max={32_000} min={0} step={100} unlimitedInput={true} />,
|
70
71
|
desc: t('settingModel.maxTokens.desc'),
|
71
72
|
divider: false,
|
72
|
-
hidden: !
|
73
|
+
hidden: !enableMaxTokens,
|
73
74
|
label: t('settingModel.maxTokens.title'),
|
74
75
|
name: ['params', 'max_tokens'],
|
75
76
|
tag: 'max_tokens',
|
@@ -9,7 +9,7 @@ import { Center, Flexbox } from 'react-layout-kit';
|
|
9
9
|
|
10
10
|
import { useAgentStore } from '@/store/agent';
|
11
11
|
import { agentChatConfigSelectors, agentSelectors } from '@/store/agent/slices/chat';
|
12
|
-
import { aiModelSelectors, useAiInfraStore } from '@/store/aiInfra';
|
12
|
+
import { aiModelSelectors, aiProviderSelectors, useAiInfraStore } from '@/store/aiInfra';
|
13
13
|
import { SearchMode } from '@/types/search';
|
14
14
|
|
15
15
|
import FCSearchModel from './FCSearchModel';
|
@@ -99,6 +99,9 @@ const Controls = memo(() => {
|
|
99
99
|
]);
|
100
100
|
|
101
101
|
const supportFC = useAiInfraStore(aiModelSelectors.isModelSupportToolUse(model, provider));
|
102
|
+
const isProviderHasBuiltinSearchConfig = useAiInfraStore(
|
103
|
+
aiProviderSelectors.isProviderHasBuiltinSearchConfig(provider),
|
104
|
+
);
|
102
105
|
const isModelHasBuiltinSearchConfig = useAiInfraStore(
|
103
106
|
aiModelSelectors.isModelHasBuiltinSearchConfig(model, provider),
|
104
107
|
);
|
@@ -119,6 +122,7 @@ const Controls = memo(() => {
|
|
119
122
|
];
|
120
123
|
|
121
124
|
const showDivider = isModelHasBuiltinSearchConfig || !supportFC;
|
125
|
+
const showModelBuiltinSearch = isModelHasBuiltinSearchConfig || isProviderHasBuiltinSearchConfig;
|
122
126
|
|
123
127
|
return (
|
124
128
|
<Flexbox gap={4}>
|
@@ -126,7 +130,7 @@ const Controls = memo(() => {
|
|
126
130
|
<Item {...option} key={option.value} />
|
127
131
|
))}
|
128
132
|
{showDivider && <Divider style={{ margin: 0 }} />}
|
129
|
-
{
|
133
|
+
{showModelBuiltinSearch && <ModelBuiltinSearch />}
|
130
134
|
{!supportFC && <FCSearchModel />}
|
131
135
|
</Flexbox>
|
132
136
|
);
|
@@ -89,10 +89,7 @@ export const useStyles = createStyles(
|
|
89
89
|
socialButtonsBlockButton__google: css`
|
90
90
|
order: -1;
|
91
91
|
`,
|
92
|
-
}) as Partial<
|
93
|
-
// eslint-disable-next-line unused-imports/no-unused-vars
|
94
|
-
[k in keyof ElementsConfig]: any;
|
95
|
-
}>,
|
92
|
+
}) as Partial<Record<keyof ElementsConfig, any>>,
|
96
93
|
);
|
97
94
|
|
98
95
|
export const useAppearance = () => {
|
@@ -48,6 +48,18 @@ const transformVertexAIStream = (
|
|
48
48
|
);
|
49
49
|
}
|
50
50
|
|
51
|
+
if (
|
52
|
+
candidate && // 首先检查是否为 reasoning 内容 (thought: true)
|
53
|
+
Array.isArray(candidate.content.parts) &&
|
54
|
+
candidate.content.parts.length > 0
|
55
|
+
) {
|
56
|
+
for (const part of candidate.content.parts) {
|
57
|
+
if (part && part.text && (part as any).thought === true) {
|
58
|
+
return { data: part.text, id: context.id, type: 'reasoning' };
|
59
|
+
}
|
60
|
+
}
|
61
|
+
}
|
62
|
+
|
51
63
|
const candidates = chunk.candidates;
|
52
64
|
if (!candidates)
|
53
65
|
return {
|