@lobehub/chat 1.45.11 → 1.45.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/changelog/v1.json +18 -0
- package/locales/ar/modelProvider.json +6 -5
- package/locales/bg-BG/modelProvider.json +6 -5
- package/locales/de-DE/modelProvider.json +6 -5
- package/locales/en-US/modelProvider.json +6 -5
- package/locales/es-ES/modelProvider.json +6 -5
- package/locales/fa-IR/modelProvider.json +6 -5
- package/locales/fr-FR/modelProvider.json +6 -5
- package/locales/it-IT/modelProvider.json +6 -5
- package/locales/ja-JP/modelProvider.json +6 -5
- package/locales/ko-KR/modelProvider.json +6 -5
- package/locales/nl-NL/modelProvider.json +6 -5
- package/locales/pl-PL/modelProvider.json +6 -5
- package/locales/pt-BR/modelProvider.json +6 -5
- package/locales/ru-RU/modelProvider.json +6 -5
- package/locales/tr-TR/modelProvider.json +6 -5
- package/locales/vi-VN/modelProvider.json +6 -5
- package/locales/zh-CN/modelProvider.json +6 -5
- package/locales/zh-TW/modelProvider.json +6 -5
- package/package.json +1 -1
- package/scripts/serverLauncher/startServer.js +33 -16
- package/src/app/(main)/settings/provider/features/ModelList/CreateNewModelModal/Form.tsx +23 -17
- package/src/app/(main)/settings/provider/features/ModelList/CreateNewModelModal/index.tsx +9 -3
- package/src/app/(main)/settings/provider/features/ModelList/DisabledModels.tsx +25 -22
- package/src/app/(main)/settings/provider/features/ModelList/ModelConfigModal/index.tsx +2 -3
- package/src/config/aiModels/bedrock.ts +0 -68
- package/src/libs/agent-runtime/qwen/index.ts +16 -1
- package/src/locales/default/modelProvider.ts +6 -5
- package/src/app/(main)/settings/provider/features/ModelList/ModelConfigModal/Form.tsx +0 -109
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,56 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.45.13](https://github.com/lobehub/lobe-chat/compare/v1.45.12...v1.45.13)
|
6
|
+
|
7
|
+
<sup>Released on **2025-01-14**</sup>
|
8
|
+
|
9
|
+
#### 💄 Styles
|
10
|
+
|
11
|
+
- **misc**: Improve model config form modal.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### Styles
|
19
|
+
|
20
|
+
- **misc**: Improve model config form modal, closes [#5438](https://github.com/lobehub/lobe-chat/issues/5438) ([9b303e0](https://github.com/lobehub/lobe-chat/commit/9b303e0))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
30
|
+
### [Version 1.45.12](https://github.com/lobehub/lobe-chat/compare/v1.45.11...v1.45.12)
|
31
|
+
|
32
|
+
<sup>Released on **2025-01-14**</sup>
|
33
|
+
|
34
|
+
#### 🐛 Bug Fixes
|
35
|
+
|
36
|
+
- **misc**: Fix `enable_search` parameter intro condition in Qwen.
|
37
|
+
|
38
|
+
<br/>
|
39
|
+
|
40
|
+
<details>
|
41
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
42
|
+
|
43
|
+
#### What's fixed
|
44
|
+
|
45
|
+
- **misc**: Fix `enable_search` parameter intro condition in Qwen, closes [#5297](https://github.com/lobehub/lobe-chat/issues/5297) ([0b19c20](https://github.com/lobehub/lobe-chat/commit/0b19c20))
|
46
|
+
|
47
|
+
</details>
|
48
|
+
|
49
|
+
<div align="right">
|
50
|
+
|
51
|
+
[](#readme-top)
|
52
|
+
|
53
|
+
</div>
|
54
|
+
|
5
55
|
### [Version 1.45.11](https://github.com/lobehub/lobe-chat/compare/v1.45.10...v1.45.11)
|
6
56
|
|
7
57
|
<sup>Released on **2025-01-14**</sup>
|
package/changelog/v1.json
CHANGED
@@ -1,4 +1,22 @@
|
|
1
1
|
[
|
2
|
+
{
|
3
|
+
"children": {
|
4
|
+
"improvements": [
|
5
|
+
"Improve model config form modal."
|
6
|
+
]
|
7
|
+
},
|
8
|
+
"date": "2025-01-14",
|
9
|
+
"version": "1.45.13"
|
10
|
+
},
|
11
|
+
{
|
12
|
+
"children": {
|
13
|
+
"fixes": [
|
14
|
+
"Fix enable_search parameter intro condition in Qwen."
|
15
|
+
]
|
16
|
+
},
|
17
|
+
"date": "2025-01-14",
|
18
|
+
"version": "1.45.12"
|
19
|
+
},
|
2
20
|
{
|
3
21
|
"children": {
|
4
22
|
"fixes": [
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "دعم تحميل الملفات"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "
|
247
|
-
"title": "دعم
|
246
|
+
"extra": "هذا الإعداد سيفتح فقط قدرة النموذج على استخدام الأدوات، مما يسمح بإضافة مكونات إضافية من نوع الأدوات للنموذج. لكن ما إذا كان يمكن استخدام الأدوات فعليًا يعتمد تمامًا على النموذج نفسه، يرجى اختبار مدى قابليته للاستخدام",
|
247
|
+
"title": "دعم استخدام الأدوات"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "سيتم
|
251
|
-
"placeholder": "يرجى إدخال معرف النموذج، مثل gpt-
|
250
|
+
"extra": "لا يمكن تعديله بعد الإنشاء، سيتم استخدامه كمعرف نموذج عند استدعاء الذكاء الاصطناعي",
|
251
|
+
"placeholder": "يرجى إدخال معرف النموذج، مثل gpt-4o أو claude-3.5-sonnet",
|
252
252
|
"title": "معرف النموذج"
|
253
253
|
},
|
254
254
|
"modalTitle": "تكوين النموذج المخصص",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "تعيين الحد الأقصى لعدد الرموز المدعومة من قبل النموذج",
|
257
|
+
"title": "أقصى نافذة سياق",
|
257
258
|
"unlimited": "غير محدود"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Поддръжка на качване на файлове"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Тази конфигурация ще активира само
|
247
|
-
"title": "Поддръжка на
|
246
|
+
"extra": "Тази конфигурация ще активира само способността на модела да използва инструменти, което позволява добавянето на плъгини от клас инструменти. Но дали наистина ще се поддържа използването на инструменти зависи изцяло от самия модел, моля, тествайте неговата наличност",
|
247
|
+
"title": "Поддръжка на използването на инструменти"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "Моля, въведете
|
250
|
+
"extra": "След създаването не може да бъде променян, ще се използва като идентификатор на модела при извикване на AI",
|
251
|
+
"placeholder": "Моля, въведете идентификатор на модела, например gpt-4o или claude-3.5-sonnet",
|
252
252
|
"title": "ID на модела"
|
253
253
|
},
|
254
254
|
"modalTitle": "Конфигурация на персонализиран модел",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Настройте максималния брой токени, поддържани от модела",
|
257
|
+
"title": "Максимален контекстуален прозорец",
|
257
258
|
"unlimited": "Без ограничения"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Datei-Upload unterstützen"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Diese Konfiguration aktiviert nur die
|
247
|
-
"title": "
|
246
|
+
"extra": "Diese Konfiguration aktiviert nur die Fähigkeit des Modells, Werkzeuge zu verwenden, und ermöglicht es, pluginartige Werkzeuge hinzuzufügen. Ob das Modell tatsächlich in der Lage ist, Werkzeuge zu verwenden, hängt jedoch vollständig vom Modell selbst ab. Bitte testen Sie die Verwendbarkeit selbst.",
|
247
|
+
"title": "Unterstützung der Werkzeugnutzung"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "Bitte
|
250
|
+
"extra": "Nach der Erstellung nicht mehr änderbar, wird als Modell-ID verwendet, wenn AI aufgerufen wird",
|
251
|
+
"placeholder": "Bitte Modell-ID eingeben, z. B. gpt-4o oder claude-3.5-sonnet",
|
252
252
|
"title": "Modell-ID"
|
253
253
|
},
|
254
254
|
"modalTitle": "Benutzerdefinierte Modellkonfiguration",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Maximale Token-Anzahl für das Modell festlegen",
|
257
|
+
"title": "Maximales Kontextfenster",
|
257
258
|
"unlimited": "Unbegrenzt"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "File Upload Support"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "This configuration will only enable
|
247
|
-
"title": "Support
|
246
|
+
"extra": "This configuration will only enable the model's ability to use tools, allowing for the addition of tool-type plugins. However, whether the model can truly use the tools depends entirely on the model itself; please test for usability on your own.",
|
247
|
+
"title": "Support for Tool Usage"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "Please enter model
|
250
|
+
"extra": "This cannot be modified after creation and will be used as the model ID when calling AI",
|
251
|
+
"placeholder": "Please enter the model ID, e.g., gpt-4o or claude-3.5-sonnet",
|
252
252
|
"title": "Model ID"
|
253
253
|
},
|
254
254
|
"modalTitle": "Custom Model Configuration",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Set the maximum number of tokens supported by the model",
|
257
|
+
"title": "Maximum Context Window",
|
257
258
|
"unlimited": "Unlimited"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Soporte para carga de archivos"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Esta configuración solo habilitará la capacidad de
|
247
|
-
"title": "Soporte para
|
246
|
+
"extra": "Esta configuración solo habilitará la capacidad del modelo para usar herramientas, lo que permite agregar complementos de tipo herramienta al modelo. Sin embargo, si realmente se admiten las herramientas depende completamente del modelo en sí, por favor pruebe su disponibilidad",
|
247
|
+
"title": "Soporte para el uso de herramientas"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "
|
250
|
+
"extra": "No se puede modificar después de la creación, se utilizará como id del modelo al llamar a la IA",
|
251
|
+
"placeholder": "Introduce el id del modelo, por ejemplo gpt-4o o claude-3.5-sonnet",
|
252
252
|
"title": "ID del modelo"
|
253
253
|
},
|
254
254
|
"modalTitle": "Configuración del modelo personalizado",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Establecer el número máximo de tokens que el modelo puede soportar",
|
257
|
+
"title": "Máximo de ventana de contexto",
|
257
258
|
"unlimited": "Sin límite"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "پشتیبانی از بارگذاری فایل"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "این پیکربندی تنها قابلیت
|
247
|
-
"title": "پشتیبانی از
|
246
|
+
"extra": "این پیکربندی تنها قابلیت استفاده از ابزارها را برای مدل فعال میکند و به این ترتیب میتوان افزونههای نوع ابزار را به مدل اضافه کرد. اما اینکه آیا واقعاً از ابزارها استفاده میشود به خود مدل بستگی دارد، لطفاً قابلیت استفاده را خودتان آزمایش کنید",
|
247
|
+
"title": "پشتیبانی از استفاده از ابزار"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "به عنوان
|
251
|
-
"placeholder": "لطفاً شناسه مدل را وارد کنید، مانند gpt-
|
250
|
+
"extra": "پس از ایجاد قابل ویرایش نیست و در هنگام فراخوانی AI به عنوان شناسه مدل استفاده خواهد شد",
|
251
|
+
"placeholder": "لطفاً شناسه مدل را وارد کنید، مانند gpt-4o یا claude-3.5-sonnet",
|
252
252
|
"title": "شناسه مدل"
|
253
253
|
},
|
254
254
|
"modalTitle": "پیکربندی مدل سفارشی",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "حداکثر تعداد توکنهای پشتیبانی شده توسط مدل را تنظیم کنید",
|
257
|
+
"title": "حداکثر پنجره زمینه",
|
257
258
|
"unlimited": "بدون محدودیت"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Téléchargement de fichiers pris en charge"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Cette configuration
|
247
|
-
"title": "
|
246
|
+
"extra": "Cette configuration activera uniquement la capacité du modèle à utiliser des outils, permettant ainsi d'ajouter des plugins de type outil au modèle. Cependant, la prise en charge de l'utilisation réelle des outils dépend entièrement du modèle lui-même, veuillez tester la disponibilité par vous-même.",
|
247
|
+
"title": "Support de l'utilisation des outils"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "Veuillez entrer l'
|
250
|
+
"extra": "Une fois créé, il ne peut pas être modifié et sera utilisé comme identifiant du modèle lors de l'appel à l'IA",
|
251
|
+
"placeholder": "Veuillez entrer l'identifiant du modèle, par exemple gpt-4o ou claude-3.5-sonnet",
|
252
252
|
"title": "ID du modèle"
|
253
253
|
},
|
254
254
|
"modalTitle": "Configuration du modèle personnalisé",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Définir le nombre maximal de tokens pris en charge par le modèle",
|
257
|
+
"title": "Fenêtre de contexte maximale",
|
257
258
|
"unlimited": "Illimité"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Supporto per il caricamento file"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Questa configurazione abiliterà solo la capacità
|
247
|
-
"title": "Supporto
|
246
|
+
"extra": "Questa configurazione abiliterà solo la capacità del modello di utilizzare strumenti, consentendo così di aggiungere plugin di tipo strumento al modello. Tuttavia, se il modello supporta realmente l'uso degli strumenti dipende interamente dal modello stesso; si prega di testarne l'usabilità",
|
247
|
+
"title": "Supporto all'uso degli strumenti"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "Inserisci l'
|
250
|
+
"extra": "Non modificabile dopo la creazione, verrà utilizzato come ID del modello durante la chiamata all'AI",
|
251
|
+
"placeholder": "Inserisci l'ID del modello, ad esempio gpt-4o o claude-3.5-sonnet",
|
252
252
|
"title": "ID del modello"
|
253
253
|
},
|
254
254
|
"modalTitle": "Configurazione modello personalizzato",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Imposta il numero massimo di token supportati dal modello",
|
257
|
+
"title": "Finestra di contesto massima",
|
257
258
|
"unlimited": "Illimitato"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "ファイルアップロードをサポート"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "
|
247
|
-
"title": "
|
246
|
+
"extra": "この設定は、モデルがツールを使用する機能を有効にし、モデルにツールタイプのプラグインを追加できるようにします。ただし、実際にツールを使用できるかどうかはモデル自体に依存するため、使用可能性を自分でテストしてください",
|
247
|
+
"title": "ツール使用のサポート"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "モデル
|
250
|
+
"extra": "作成後は変更できません。AIを呼び出す際にモデルIDとして使用されます。",
|
251
|
+
"placeholder": "モデルIDを入力してください。例:gpt-4o または claude-3.5-sonnet",
|
252
252
|
"title": "モデル ID"
|
253
253
|
},
|
254
254
|
"modalTitle": "カスタムモデル設定",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "モデルがサポートする最大トークン数を設定する",
|
257
|
+
"title": "最大コンテキストウィンドウ",
|
257
258
|
"unlimited": "無制限"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "파일 업로드 지원"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "이 설정은
|
247
|
-
"title": "
|
246
|
+
"extra": "이 설정은 모델이 도구를 사용할 수 있는 기능을 활성화하며, 이를 통해 모델에 도구형 플러그인을 추가할 수 있습니다. 그러나 실제 도구 사용 지원 여부는 모델 자체에 따라 다르므로 사용 가능성을 직접 테스트해 보시기 바랍니다.",
|
247
|
+
"title": "도구 사용 지원"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "모델
|
251
|
-
"placeholder": "모델 ID를 입력하세요, 예: gpt-
|
250
|
+
"extra": "생성 후 수정할 수 없으며, AI 호출 시 모델 ID로 사용됩니다.",
|
251
|
+
"placeholder": "모델 ID를 입력하세요, 예: gpt-4o 또는 claude-3.5-sonnet",
|
252
252
|
"title": "모델 ID"
|
253
253
|
},
|
254
254
|
"modalTitle": "사용자 정의 모델 구성",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "모델이 지원하는 최대 토큰 수 설정",
|
257
|
+
"title": "최대 컨텍스트 창",
|
257
258
|
"unlimited": "제한 없음"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Ondersteuning voor bestandsupload"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Deze configuratie
|
247
|
-
"title": "Ondersteuning voor
|
246
|
+
"extra": "Deze configuratie schakelt alleen de mogelijkheid in voor het model om tools te gebruiken, waardoor het mogelijk is om plug-ins voor tools aan het model toe te voegen. Of het model daadwerkelijk tools kan gebruiken, hangt echter volledig af van het model zelf; test de bruikbaarheid zelf.",
|
247
|
+
"title": "Ondersteuning voor het gebruik van tools"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "Voer model-id in, bijvoorbeeld gpt-
|
250
|
+
"extra": "Kan niet worden gewijzigd na creatie, wordt gebruikt als model-id bij het aanroepen van AI",
|
251
|
+
"placeholder": "Voer model-id in, bijvoorbeeld gpt-4o of claude-3.5-sonnet",
|
252
252
|
"title": "Model ID"
|
253
253
|
},
|
254
254
|
"modalTitle": "Configuratie van aangepast model",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Stel het maximale aantal tokens in dat door het model wordt ondersteund",
|
257
|
+
"title": "Maximale contextvenster",
|
257
258
|
"unlimited": "Onbeperkt"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Wsparcie dla przesyłania plików"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Ta konfiguracja włączy
|
247
|
-
"title": "Wsparcie dla
|
246
|
+
"extra": "Ta konfiguracja włączy jedynie możliwość korzystania z narzędzi przez model, co pozwoli na dodanie wtyczek narzędziowych. Jednakże, czy model rzeczywiście obsługuje korzystanie z narzędzi, zależy całkowicie od samego modelu, proszę samodzielnie przetestować jego użyteczność",
|
247
|
+
"title": "Wsparcie dla korzystania z narzędzi"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "
|
250
|
+
"extra": "Nie można zmieniać po utworzeniu, będzie używane jako identyfikator modelu podczas wywoływania AI",
|
251
|
+
"placeholder": "Wprowadź identyfikator modelu, na przykład gpt-4o lub claude-3.5-sonnet",
|
252
252
|
"title": "ID modelu"
|
253
253
|
},
|
254
254
|
"modalTitle": "Konfiguracja niestandardowego modelu",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Ustaw maksymalną liczbę tokenów wspieranych przez model",
|
257
|
+
"title": "Maksymalne okno kontekstu",
|
257
258
|
"unlimited": "Bez ograniczeń"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Suporte a Upload de Arquivos"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Esta configuração apenas
|
247
|
-
"title": "Suporte
|
246
|
+
"extra": "Esta configuração ativará apenas a capacidade do modelo de usar ferramentas, permitindo assim a adição de plugins do tipo ferramenta. No entanto, se o uso real das ferramentas é suportado depende inteiramente do modelo em si, teste a usabilidade por conta própria.",
|
247
|
+
"title": "Suporte ao uso de ferramentas"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "
|
250
|
+
"extra": "Não pode ser modificado após a criação, será usado como ID do modelo ao chamar a IA",
|
251
|
+
"placeholder": "Insira o ID do modelo, por exemplo, gpt-4o ou claude-3.5-sonnet",
|
252
252
|
"title": "ID do Modelo"
|
253
253
|
},
|
254
254
|
"modalTitle": "Configuração do Modelo Personalizado",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Configurar o número máximo de tokens suportados pelo modelo",
|
257
|
+
"title": "Janela de contexto máxima",
|
257
258
|
"unlimited": "Ilimitado"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Поддержка загрузки файлов"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Эта настройка
|
247
|
-
"title": "Поддержка
|
246
|
+
"extra": "Эта настройка позволит модели использовать инструменты, что даст возможность добавлять плагины инструментов. Однако возможность фактического использования инструментов полностью зависит от самой модели, пожалуйста, протестируйте их работоспособность самостоятельно",
|
247
|
+
"title": "Поддержка использования инструментов"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "
|
250
|
+
"extra": "После создания изменить нельзя, будет использоваться как идентификатор модели при вызове AI",
|
251
|
+
"placeholder": "Введите идентификатор модели, например, gpt-4o или claude-3.5-sonnet",
|
252
252
|
"title": "ID модели"
|
253
253
|
},
|
254
254
|
"modalTitle": "Настройка пользовательской модели",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Установите максимальное количество токенов, поддерживаемое моделью",
|
257
|
+
"title": "Максимальное окно контекста",
|
257
258
|
"unlimited": "Без ограничений"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Dosya Yüklemeyi Destekle"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Bu yapılandırma
|
247
|
-
"title": "
|
246
|
+
"extra": "Bu yapılandırma, modelin araçları kullanma yeteneğini açacak ve böylece modele araç sınıfı eklentileri eklenebilecektir. Ancak, gerçek araç kullanımı tamamen modele bağlıdır, kullanılabilirliğini kendiniz test etmelisiniz.",
|
247
|
+
"title": "Araç kullanımını destekle"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "
|
250
|
+
"extra": "Oluşturulduktan sonra değiştirilemez, AI çağrıldığında model kimliği olarak kullanılacaktır",
|
251
|
+
"placeholder": "Model kimliğini girin, örneğin gpt-4o veya claude-3.5-sonnet",
|
252
252
|
"title": "Model ID"
|
253
253
|
},
|
254
254
|
"modalTitle": "Özel Model Yapılandırması",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Modelin desteklediği maksimum Token sayısını ayarlayın",
|
257
|
+
"title": "Maksimum bağlam penceresi",
|
257
258
|
"unlimited": "Sınırsız"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "Hỗ trợ tải lên tệp"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "Cấu hình này chỉ
|
247
|
-
"title": "Hỗ trợ
|
246
|
+
"extra": "Cấu hình này chỉ kích hoạt khả năng sử dụng công cụ của mô hình, từ đó có thể thêm các plugin loại công cụ cho mô hình. Tuy nhiên, việc hỗ trợ sử dụng công cụ thực sự hoàn toàn phụ thuộc vào chính mô hình, vui lòng tự kiểm tra tính khả dụng",
|
247
|
+
"title": "Hỗ trợ sử dụng công cụ"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "Vui lòng nhập id mô hình, ví dụ gpt-
|
250
|
+
"extra": "Không thể sửa đổi sau khi tạo, sẽ được sử dụng làm id mô hình khi gọi AI",
|
251
|
+
"placeholder": "Vui lòng nhập id mô hình, ví dụ gpt-4o hoặc claude-3.5-sonnet",
|
252
252
|
"title": "ID mô hình"
|
253
253
|
},
|
254
254
|
"modalTitle": "Cấu hình mô hình tùy chỉnh",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "Cài đặt số Token tối đa mà mô hình hỗ trợ",
|
257
|
+
"title": "Cửa sổ ngữ cảnh tối đa",
|
257
258
|
"unlimited": "Không giới hạn"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "支持文件上传"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "
|
247
|
-
"title": "
|
246
|
+
"extra": "此配置将仅开启模型使用工具的能力,进而可以为模型添加工具类的插件。但是否支持真正使用工具完全取决于模型本身,请自行测试的可用性",
|
247
|
+
"title": "支持工具使用"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "请输入模型id,例如 gpt-
|
250
|
+
"extra": "创建后不可修改,调用 AI 时将作为模型 id 使用",
|
251
|
+
"placeholder": "请输入模型 id,例如 gpt-4o 或 claude-3.5-sonnet",
|
252
252
|
"title": "模型 ID"
|
253
253
|
},
|
254
254
|
"modalTitle": "自定义模型配置",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "设置模型支持的最大 Token 数",
|
257
|
+
"title": "最大上下文窗口",
|
257
258
|
"unlimited": "无限制"
|
258
259
|
},
|
259
260
|
"vision": {
|
@@ -243,17 +243,18 @@
|
|
243
243
|
"title": "支持文件上傳"
|
244
244
|
},
|
245
245
|
"functionCall": {
|
246
|
-
"extra": "
|
247
|
-
"title": "
|
246
|
+
"extra": "此配置將僅開啟模型使用工具的能力,進而可以為模型添加工具類的插件。但是否支持真正使用工具完全取決於模型本身,請自行測試其可用性",
|
247
|
+
"title": "支持工具使用"
|
248
248
|
},
|
249
249
|
"id": {
|
250
|
-
"extra": "
|
251
|
-
"placeholder": "請輸入模型id,例如 gpt-
|
250
|
+
"extra": "創建後不可修改,調用 AI 時將作為模型 id 使用",
|
251
|
+
"placeholder": "請輸入模型 id,例如 gpt-4o 或 claude-3.5-sonnet",
|
252
252
|
"title": "模型 ID"
|
253
253
|
},
|
254
254
|
"modalTitle": "自定義模型配置",
|
255
255
|
"tokens": {
|
256
|
-
"
|
256
|
+
"extra": "設定模型支持的最大 Token 數",
|
257
|
+
"title": "最大上下文窗口",
|
257
258
|
"unlimited": "無限制"
|
258
259
|
},
|
259
260
|
"vision": {
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.45.
|
3
|
+
"version": "1.45.13",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -1,6 +1,6 @@
|
|
1
|
-
const dns = require('dns').promises;
|
2
|
-
const fs = require('fs').promises;
|
3
|
-
const { spawn } = require('child_process');
|
1
|
+
const dns = require('node:dns').promises;
|
2
|
+
const fs = require('node:fs').promises;
|
3
|
+
const { spawn } = require('node:child_process');
|
4
4
|
|
5
5
|
// Set file paths
|
6
6
|
const DB_MIGRATION_SCRIPT_PATH = '/app/docker.cjs';
|
@@ -9,23 +9,28 @@ const PROXYCHAINS_CONF_PATH = '/etc/proxychains4.conf';
|
|
9
9
|
|
10
10
|
// Function to check if a string is a valid IP address
|
11
11
|
const isValidIP = (ip, version = 4) => {
|
12
|
-
const ipv4Regex =
|
13
|
-
|
12
|
+
const ipv4Regex =
|
13
|
+
/^(25[0-5]|2[0-4]\d|[01]?\d{1,2})(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2})){3}$/;
|
14
|
+
const ipv6Regex =
|
15
|
+
/^(([\da-f]{1,4}:){7}[\da-f]{1,4}|([\da-f]{1,4}:){1,7}:|([\da-f]{1,4}:){1,6}:[\da-f]{1,4}|([\da-f]{1,4}:){1,5}(:[\da-f]{1,4}){1,2}|([\da-f]{1,4}:){1,4}(:[\da-f]{1,4}){1,3}|([\da-f]{1,4}:){1,3}(:[\da-f]{1,4}){1,4}|([\da-f]{1,4}:){1,2}(:[\da-f]{1,4}){1,5}|[\da-f]{1,4}:((:[\da-f]{1,4}){1,6})|:((:[\da-f]{1,4}){1,7}|:)|fe80:(:[\da-f]{0,4}){0,4}%[\da-z]+|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}\d){0,1}\d)\.){3}(25[0-5]|(2[0-4]|1{0,1}\d){0,1}\d)|([\da-f]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}\d){0,1}\d)\.){3}(25[0-5]|(2[0-4]|1{0,1}\d){0,1}\d))$/;
|
14
16
|
|
15
17
|
switch (version) {
|
16
|
-
case 4:
|
18
|
+
case 4: {
|
17
19
|
return ipv4Regex.test(ip);
|
18
|
-
|
20
|
+
}
|
21
|
+
case 6: {
|
19
22
|
return ipv6Regex.test(ip);
|
20
|
-
|
23
|
+
}
|
24
|
+
default: {
|
21
25
|
return ipv4Regex.test(ip) || ipv6Regex.test(ip);
|
26
|
+
}
|
22
27
|
}
|
23
28
|
};
|
24
29
|
|
25
30
|
// Function to parse protocol, host and port from a URL
|
26
31
|
const parseUrl = (url) => {
|
27
32
|
const { protocol, hostname: host, port } = new URL(url);
|
28
|
-
return { protocol: protocol.replace(':', '')
|
33
|
+
return { host, port: port || 443, protocol: protocol.replace(':', '') };
|
29
34
|
};
|
30
35
|
|
31
36
|
// Function to resolve host IP via DNS
|
@@ -34,7 +39,9 @@ const resolveHostIP = async (host, version = 4) => {
|
|
34
39
|
const { address } = await dns.lookup(host, { family: version });
|
35
40
|
|
36
41
|
if (!isValidIP(address, version)) {
|
37
|
-
console.error(
|
42
|
+
console.error(
|
43
|
+
`❌ DNS Error: Invalid resolved IP: ${address}. IP address must be IPv${version}.`,
|
44
|
+
);
|
38
45
|
process.exit(1);
|
39
46
|
}
|
40
47
|
|
@@ -51,13 +58,17 @@ const runProxyChainsConfGenerator = async (url) => {
|
|
51
58
|
const { protocol, host, port } = parseUrl(url);
|
52
59
|
|
53
60
|
if (!['http', 'socks4', 'socks5'].includes(protocol)) {
|
54
|
-
console.error(
|
61
|
+
console.error(
|
62
|
+
`❌ ProxyChains: Invalid protocol (${protocol}). Protocol must be 'http', 'socks4' and 'socks5'.`,
|
63
|
+
);
|
55
64
|
process.exit(1);
|
56
65
|
}
|
57
66
|
|
58
67
|
const validPort = parseInt(port, 10);
|
59
|
-
if (isNaN(validPort) || validPort <= 0 || validPort >
|
60
|
-
console.error(
|
68
|
+
if (isNaN(validPort) || validPort <= 0 || validPort > 65_535) {
|
69
|
+
console.error(
|
70
|
+
`❌ ProxyChains: Invalid port (${port}). Port must be a number between 1 and 65535.`,
|
71
|
+
);
|
61
72
|
process.exit(1);
|
62
73
|
}
|
63
74
|
|
@@ -82,10 +93,14 @@ ${protocol} ${ip} ${port}
|
|
82
93
|
|
83
94
|
// Function to execute a script with child process spawn
|
84
95
|
const runScript = (scriptPath, useProxy = false) => {
|
85
|
-
const command = useProxy
|
96
|
+
const command = useProxy
|
97
|
+
? ['/bin/proxychains', '-q', '/bin/node', scriptPath]
|
98
|
+
: ['/bin/node', scriptPath];
|
86
99
|
return new Promise((resolve, reject) => {
|
87
100
|
const process = spawn(command.shift(), command, { stdio: 'inherit' });
|
88
|
-
process.on('close', (code) =>
|
101
|
+
process.on('close', (code) =>
|
102
|
+
code === 0 ? resolve() : reject(new Error(`🔴 Process exited with code ${code}`)),
|
103
|
+
);
|
89
104
|
});
|
90
105
|
};
|
91
106
|
|
@@ -112,7 +127,9 @@ const runServer = async () => {
|
|
112
127
|
await runScript(DB_MIGRATION_SCRIPT_PATH);
|
113
128
|
} catch (err) {
|
114
129
|
if (err.code === 'ENOENT') {
|
115
|
-
console.log(
|
130
|
+
console.log(
|
131
|
+
`⚠️ DB Migration: Not found ${DB_MIGRATION_SCRIPT_PATH}. Skipping DB migration. Ensure to migrate database manually.`,
|
132
|
+
);
|
116
133
|
console.log('-------------------------------------');
|
117
134
|
} else {
|
118
135
|
console.error('❌ Error during DB migration:');
|
@@ -8,6 +8,7 @@ import { AiModelType } from '@/types/aiModel';
|
|
8
8
|
import { ChatModelCard } from '@/types/llm';
|
9
9
|
|
10
10
|
interface ModelConfigFormProps {
|
11
|
+
idEditable?: boolean;
|
11
12
|
initialValues?: ChatModelCard;
|
12
13
|
onFormInstanceReady: (instance: FormInstance) => void;
|
13
14
|
showAzureDeployName?: boolean;
|
@@ -15,8 +16,8 @@ interface ModelConfigFormProps {
|
|
15
16
|
}
|
16
17
|
|
17
18
|
const ModelConfigForm = memo<ModelConfigFormProps>(
|
18
|
-
({ showAzureDeployName, onFormInstanceReady, initialValues }) => {
|
19
|
-
const { t } = useTranslation('
|
19
|
+
({ showAzureDeployName, idEditable = true, onFormInstanceReady, initialValues }) => {
|
20
|
+
const { t } = useTranslation('modelProvider');
|
20
21
|
|
21
22
|
const [formInstance] = Form.useForm();
|
22
23
|
|
@@ -44,54 +45,59 @@ const ModelConfigForm = memo<ModelConfigFormProps>(
|
|
44
45
|
wrapperCol={isMobile ? { span: 18 } : { offset: 1, span: 18 }}
|
45
46
|
>
|
46
47
|
<Form.Item
|
47
|
-
extra={t('
|
48
|
-
label={t('
|
48
|
+
extra={t('providerModels.item.modelConfig.id.extra')}
|
49
|
+
label={t('providerModels.item.modelConfig.id.title')}
|
49
50
|
name={'id'}
|
51
|
+
rules={[{ required: true }]}
|
50
52
|
>
|
51
|
-
<Input
|
53
|
+
<Input
|
54
|
+
disabled={!idEditable}
|
55
|
+
placeholder={t('providerModels.item.modelConfig.id.placeholder')}
|
56
|
+
/>
|
52
57
|
</Form.Item>
|
53
58
|
{showAzureDeployName && (
|
54
59
|
<Form.Item
|
55
|
-
extra={t('
|
56
|
-
label={t('
|
60
|
+
extra={t('providerModels.item.modelConfig.azureDeployName.extra')}
|
61
|
+
label={t('providerModels.item.modelConfig.azureDeployName.title')}
|
57
62
|
name={['config', 'deploymentName']}
|
58
63
|
>
|
59
64
|
<Input
|
60
|
-
placeholder={t('
|
65
|
+
placeholder={t('providerModels.item.modelConfig.azureDeployName.placeholder')}
|
61
66
|
/>
|
62
67
|
</Form.Item>
|
63
68
|
)}
|
64
69
|
<Form.Item
|
65
|
-
label={t('
|
70
|
+
label={t('providerModels.item.modelConfig.displayName.title')}
|
66
71
|
name={'displayName'}
|
67
72
|
>
|
68
|
-
<Input placeholder={t('
|
73
|
+
<Input placeholder={t('providerModels.item.modelConfig.displayName.placeholder')} />
|
69
74
|
</Form.Item>
|
70
75
|
<Form.Item
|
71
|
-
|
76
|
+
extra={t('providerModels.item.modelConfig.tokens.extra')}
|
77
|
+
label={t('providerModels.item.modelConfig.tokens.title')}
|
72
78
|
name={'contextWindowTokens'}
|
73
79
|
>
|
74
80
|
<MaxTokenSlider />
|
75
81
|
</Form.Item>
|
76
82
|
<Form.Item
|
77
|
-
extra={t('
|
78
|
-
label={t('
|
83
|
+
extra={t('providerModels.item.modelConfig.functionCall.extra')}
|
84
|
+
label={t('providerModels.item.modelConfig.functionCall.title')}
|
79
85
|
name={['abilities', 'functionCall']}
|
80
86
|
valuePropName={'checked'}
|
81
87
|
>
|
82
88
|
<Checkbox />
|
83
89
|
</Form.Item>
|
84
90
|
<Form.Item
|
85
|
-
extra={t('
|
86
|
-
label={t('
|
91
|
+
extra={t('providerModels.item.modelConfig.vision.extra')}
|
92
|
+
label={t('providerModels.item.modelConfig.vision.title')}
|
87
93
|
name={['abilities', 'vision']}
|
88
94
|
valuePropName={'checked'}
|
89
95
|
>
|
90
96
|
<Checkbox />
|
91
97
|
</Form.Item>
|
92
98
|
{/*<Form.Item*/}
|
93
|
-
{/* extra={t('
|
94
|
-
{/* label={t('
|
99
|
+
{/* extra={t('providerModels.item.modelConfig.files.extra')}*/}
|
100
|
+
{/* label={t('providerModels.item.modelConfig.files.title')}*/}
|
95
101
|
{/* name={['abilities', 'files']}*/}
|
96
102
|
{/* valuePropName={'checked'}*/}
|
97
103
|
{/*>*/}
|
@@ -42,10 +42,16 @@ const ModelConfigModal = memo<ModelConfigModalProps>(({ open, setOpen }) => {
|
|
42
42
|
const data = formInstance.getFieldsValue();
|
43
43
|
|
44
44
|
setLoading(true);
|
45
|
-
await createNewAiModel({ ...data, providerId: editingProvider });
|
46
|
-
setLoading(false);
|
47
45
|
|
48
|
-
|
46
|
+
try {
|
47
|
+
await formInstance.validateFields();
|
48
|
+
await createNewAiModel({ ...data, providerId: editingProvider });
|
49
|
+
setLoading(false);
|
50
|
+
closeModal();
|
51
|
+
} catch {
|
52
|
+
/* */
|
53
|
+
setLoading(false);
|
54
|
+
}
|
49
55
|
}}
|
50
56
|
style={{ marginInlineStart: '16px' }}
|
51
57
|
type="primary"
|
@@ -18,29 +18,32 @@ const DisabledModels = memo(() => {
|
|
18
18
|
const disabledModels = useAiInfraStore(aiModelSelectors.disabledAiProviderModelList, isEqual);
|
19
19
|
|
20
20
|
const displayModels = showMore ? disabledModels : disabledModels.slice(0, 10);
|
21
|
+
|
21
22
|
return (
|
22
|
-
|
23
|
-
<
|
24
|
-
{
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
23
|
+
disabledModels.length > 0 && (
|
24
|
+
<Flexbox>
|
25
|
+
<Typography.Text style={{ fontSize: 12, marginTop: 8 }} type={'secondary'}>
|
26
|
+
{t('providerModels.list.disabled')}
|
27
|
+
</Typography.Text>
|
28
|
+
{displayModels.map((item) => (
|
29
|
+
<ModelItem {...item} key={item.id} />
|
30
|
+
))}
|
31
|
+
{!showMore && disabledModels.length > 10 && (
|
32
|
+
<Button
|
33
|
+
block
|
34
|
+
color={'default'}
|
35
|
+
icon={<Icon icon={ChevronDown} />}
|
36
|
+
onClick={() => {
|
37
|
+
setShowMore(true);
|
38
|
+
}}
|
39
|
+
size={'small'}
|
40
|
+
variant={'filled'}
|
41
|
+
>
|
42
|
+
{t('providerModels.list.disabledActions.showMore')}
|
43
|
+
</Button>
|
44
|
+
)}
|
45
|
+
</Flexbox>
|
46
|
+
)
|
44
47
|
);
|
45
48
|
});
|
46
49
|
|
@@ -6,9 +6,8 @@ import { useTranslation } from 'react-i18next';
|
|
6
6
|
|
7
7
|
import { ModelProvider } from '@/libs/agent-runtime';
|
8
8
|
import { aiModelSelectors, useAiInfraStore } from '@/store/aiInfra';
|
9
|
-
import { AiModelSourceEnum } from '@/types/aiModel';
|
10
9
|
|
11
|
-
import ModelConfigForm from '
|
10
|
+
import ModelConfigForm from '../CreateNewModelModal/Form';
|
12
11
|
|
13
12
|
interface ModelConfigModalProps {
|
14
13
|
id: string;
|
@@ -64,7 +63,7 @@ const ModelConfigModal = memo<ModelConfigModalProps>(({ id, open, setOpen }) =>
|
|
64
63
|
zIndex={1251} // Select is 1150
|
65
64
|
>
|
66
65
|
<ModelConfigForm
|
67
|
-
idEditable={
|
66
|
+
idEditable={false}
|
68
67
|
initialValues={model}
|
69
68
|
onFormInstanceReady={setFormInstance}
|
70
69
|
showAzureDeployName={editingProvider === ModelProvider.Azure}
|
@@ -97,74 +97,6 @@ const bedrockChatModels: AIChatModelCard[] = [
|
|
97
97
|
releasedAt: '2024-03-07',
|
98
98
|
type: 'chat',
|
99
99
|
},
|
100
|
-
{
|
101
|
-
abilities: {
|
102
|
-
functionCall: true,
|
103
|
-
vision: true,
|
104
|
-
},
|
105
|
-
contextWindowTokens: 200_000,
|
106
|
-
description:
|
107
|
-
'Claude 3.5 Sonnet 提升了行业标准,性能超过竞争对手模型和 Claude 3 Opus,在广泛的评估中表现出色,同时具有我们中等层级模型的速度和成本。',
|
108
|
-
displayName: 'Claude 3.5 Sonnet',
|
109
|
-
enabled: true,
|
110
|
-
id: 'anthropic.claude-3-5-sonnet-20241022-v2:0',
|
111
|
-
pricing: {
|
112
|
-
input: 3,
|
113
|
-
output: 15,
|
114
|
-
},
|
115
|
-
type: 'chat',
|
116
|
-
},
|
117
|
-
{
|
118
|
-
abilities: {
|
119
|
-
functionCall: true,
|
120
|
-
vision: true,
|
121
|
-
},
|
122
|
-
contextWindowTokens: 200_000,
|
123
|
-
description:
|
124
|
-
'Claude 3.5 Sonnet 提升了行业标准,性能超过竞争对手模型和 Claude 3 Opus,在广泛的评估中表现出色,同时具有我们中等层级模型的速度和成本。',
|
125
|
-
displayName: 'Claude 3.5 Sonnet v2 (Inference profile)',
|
126
|
-
enabled: true,
|
127
|
-
id: 'us.anthropic.claude-3-5-sonnet-20241022-v2:0',
|
128
|
-
pricing: {
|
129
|
-
input: 3,
|
130
|
-
output: 15,
|
131
|
-
},
|
132
|
-
type: 'chat',
|
133
|
-
},
|
134
|
-
{
|
135
|
-
abilities: {
|
136
|
-
functionCall: true,
|
137
|
-
vision: true,
|
138
|
-
},
|
139
|
-
contextWindowTokens: 200_000,
|
140
|
-
description:
|
141
|
-
'Claude 3.5 Sonnet 提升了行业标准,性能超过竞争对手模型和 Claude 3 Opus,在广泛的评估中表现出色,同时具有我们中等层级模型的速度和成本。',
|
142
|
-
displayName: 'Claude 3.5 Sonnet 0620',
|
143
|
-
enabled: true,
|
144
|
-
id: 'anthropic.claude-3-5-sonnet-20240620-v1:0',
|
145
|
-
pricing: {
|
146
|
-
input: 3,
|
147
|
-
output: 15,
|
148
|
-
},
|
149
|
-
type: 'chat',
|
150
|
-
},
|
151
|
-
{
|
152
|
-
abilities: {
|
153
|
-
functionCall: true,
|
154
|
-
vision: true,
|
155
|
-
},
|
156
|
-
contextWindowTokens: 200_000,
|
157
|
-
description:
|
158
|
-
'Claude 3 Haiku 是 Anthropic 最快、最紧凑的模型,提供近乎即时的响应速度。它可以快速回答简单的查询和请求。客户将能够构建模仿人类互动的无缝 AI 体验。Claude 3 Haiku 可以处理图像并返回文本输出,具有 200K 的上下文窗口。',
|
159
|
-
displayName: 'Claude 3 Haiku',
|
160
|
-
enabled: true,
|
161
|
-
id: 'anthropic.claude-3-haiku-20240307-v1:0',
|
162
|
-
pricing: {
|
163
|
-
input: 0.25,
|
164
|
-
output: 1.25,
|
165
|
-
},
|
166
|
-
type: 'chat',
|
167
|
-
},
|
168
100
|
{
|
169
101
|
abilities: {
|
170
102
|
functionCall: true,
|
@@ -3,6 +3,16 @@ import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';
|
|
3
3
|
|
4
4
|
import { QwenAIStream } from '../utils/streams';
|
5
5
|
|
6
|
+
/*
|
7
|
+
QwenEnableSearchModelSeries: An array of Qwen model series that support the enable_search parameter.
|
8
|
+
Currently, enable_search is only supported on Qwen commercial series, excluding Qwen-VL and Qwen-Long series.
|
9
|
+
*/
|
10
|
+
export const QwenEnableSearchModelSeries = [
|
11
|
+
'qwen-max',
|
12
|
+
'qwen-plus',
|
13
|
+
'qwen-turbo',
|
14
|
+
];
|
15
|
+
|
6
16
|
/*
|
7
17
|
QwenLegacyModels: A set of legacy Qwen models that do not support presence_penalty.
|
8
18
|
Currently, presence_penalty is only supported on Qwen commercial models and open-source models starting from Qwen 1.5 and later.
|
@@ -36,9 +46,14 @@ export const LobeQwenAI = LobeOpenAICompatibleFactory({
|
|
36
46
|
...(model.startsWith('qwen-vl') ? {
|
37
47
|
top_p: (top_p !== undefined && top_p > 0 && top_p <= 1) ? top_p : undefined,
|
38
48
|
} : {
|
39
|
-
enable_search: true,
|
40
49
|
top_p: (top_p !== undefined && top_p > 0 && top_p < 1) ? top_p : undefined,
|
41
50
|
}),
|
51
|
+
...(process.env.QWEN_ENABLE_SEARCH === '1' && QwenEnableSearchModelSeries.some(prefix => model.startsWith(prefix)) && {
|
52
|
+
enable_search: true,
|
53
|
+
search_options: {
|
54
|
+
search_strategy: process.env.QWEN_SEARCH_STRATEGY || 'standard', // standard or pro
|
55
|
+
}
|
56
|
+
}),
|
42
57
|
} as any;
|
43
58
|
},
|
44
59
|
handleStream: QwenAIStream,
|
@@ -247,17 +247,18 @@ export default {
|
|
247
247
|
},
|
248
248
|
functionCall: {
|
249
249
|
extra:
|
250
|
-
'
|
251
|
-
title: '
|
250
|
+
'此配置将仅开启模型使用工具的能力,进而可以为模型添加工具类的插件。但是否支持真正使用工具完全取决于模型本身,请自行测试的可用性',
|
251
|
+
title: '支持工具使用',
|
252
252
|
},
|
253
253
|
id: {
|
254
|
-
extra: '
|
255
|
-
placeholder: '请输入模型id,例如 gpt-
|
254
|
+
extra: '创建后不可修改,调用 AI 时将作为模型 id 使用',
|
255
|
+
placeholder: '请输入模型 id,例如 gpt-4o 或 claude-3.5-sonnet',
|
256
256
|
title: '模型 ID',
|
257
257
|
},
|
258
258
|
modalTitle: '自定义模型配置',
|
259
259
|
tokens: {
|
260
|
-
|
260
|
+
extra: '设置模型支持的最大 Token 数',
|
261
|
+
title: '最大上下文窗口',
|
261
262
|
unlimited: '无限制',
|
262
263
|
},
|
263
264
|
vision: {
|
@@ -1,109 +0,0 @@
|
|
1
|
-
import { Checkbox, Form, FormInstance, Input } from 'antd';
|
2
|
-
import { memo, useEffect } from 'react';
|
3
|
-
import { useTranslation } from 'react-i18next';
|
4
|
-
|
5
|
-
import MaxTokenSlider from '@/components/MaxTokenSlider';
|
6
|
-
import { useIsMobile } from '@/hooks/useIsMobile';
|
7
|
-
import { AiModelType } from '@/types/aiModel';
|
8
|
-
import { ChatModelCard } from '@/types/llm';
|
9
|
-
|
10
|
-
interface ModelConfigFormProps {
|
11
|
-
idEditable?: boolean;
|
12
|
-
initialValues?: ChatModelCard;
|
13
|
-
onFormInstanceReady: (instance: FormInstance) => void;
|
14
|
-
showAzureDeployName?: boolean;
|
15
|
-
type?: AiModelType;
|
16
|
-
}
|
17
|
-
|
18
|
-
const ModelConfigForm = memo<ModelConfigFormProps>(
|
19
|
-
({ showAzureDeployName, idEditable, onFormInstanceReady, initialValues }) => {
|
20
|
-
const { t } = useTranslation('setting');
|
21
|
-
|
22
|
-
const [formInstance] = Form.useForm();
|
23
|
-
|
24
|
-
const isMobile = useIsMobile();
|
25
|
-
|
26
|
-
useEffect(() => {
|
27
|
-
onFormInstanceReady(formInstance);
|
28
|
-
}, []);
|
29
|
-
|
30
|
-
return (
|
31
|
-
<div
|
32
|
-
onClick={(e) => {
|
33
|
-
e.stopPropagation();
|
34
|
-
}}
|
35
|
-
onKeyDown={(e) => {
|
36
|
-
e.stopPropagation();
|
37
|
-
}}
|
38
|
-
>
|
39
|
-
<Form
|
40
|
-
colon={false}
|
41
|
-
form={formInstance}
|
42
|
-
initialValues={initialValues}
|
43
|
-
labelCol={{ span: 4 }}
|
44
|
-
style={{ marginTop: 16 }}
|
45
|
-
wrapperCol={isMobile ? { span: 18 } : { offset: 1, span: 18 }}
|
46
|
-
>
|
47
|
-
<Form.Item
|
48
|
-
extra={t('llm.customModelCards.modelConfig.id.extra')}
|
49
|
-
label={t('llm.customModelCards.modelConfig.id.title')}
|
50
|
-
name={'id'}
|
51
|
-
>
|
52
|
-
<Input
|
53
|
-
disabled={!idEditable}
|
54
|
-
placeholder={t('llm.customModelCards.modelConfig.id.placeholder')}
|
55
|
-
/>
|
56
|
-
</Form.Item>
|
57
|
-
{showAzureDeployName && (
|
58
|
-
<Form.Item
|
59
|
-
extra={t('llm.customModelCards.modelConfig.azureDeployName.extra')}
|
60
|
-
label={t('llm.customModelCards.modelConfig.azureDeployName.title')}
|
61
|
-
name={['config', 'deploymentName']}
|
62
|
-
>
|
63
|
-
<Input
|
64
|
-
placeholder={t('llm.customModelCards.modelConfig.azureDeployName.placeholder')}
|
65
|
-
/>
|
66
|
-
</Form.Item>
|
67
|
-
)}
|
68
|
-
<Form.Item
|
69
|
-
label={t('llm.customModelCards.modelConfig.displayName.title')}
|
70
|
-
name={'displayName'}
|
71
|
-
>
|
72
|
-
<Input placeholder={t('llm.customModelCards.modelConfig.displayName.placeholder')} />
|
73
|
-
</Form.Item>
|
74
|
-
<Form.Item
|
75
|
-
label={t('llm.customModelCards.modelConfig.tokens.title')}
|
76
|
-
name={'contextWindowTokens'}
|
77
|
-
>
|
78
|
-
<MaxTokenSlider />
|
79
|
-
</Form.Item>
|
80
|
-
<Form.Item
|
81
|
-
extra={t('llm.customModelCards.modelConfig.functionCall.extra')}
|
82
|
-
label={t('llm.customModelCards.modelConfig.functionCall.title')}
|
83
|
-
name={['abilities', 'functionCall']}
|
84
|
-
valuePropName={'checked'}
|
85
|
-
>
|
86
|
-
<Checkbox />
|
87
|
-
</Form.Item>
|
88
|
-
<Form.Item
|
89
|
-
extra={t('llm.customModelCards.modelConfig.vision.extra')}
|
90
|
-
label={t('llm.customModelCards.modelConfig.vision.title')}
|
91
|
-
name={['abilities', 'vision']}
|
92
|
-
valuePropName={'checked'}
|
93
|
-
>
|
94
|
-
<Checkbox />
|
95
|
-
</Form.Item>
|
96
|
-
{/*<Form.Item*/}
|
97
|
-
{/* extra={t('llm.customModelCards.modelConfig.files.extra')}*/}
|
98
|
-
{/* label={t('llm.customModelCards.modelConfig.files.title')}*/}
|
99
|
-
{/* name={['abilities', 'files']}*/}
|
100
|
-
{/* valuePropName={'checked'}*/}
|
101
|
-
{/*>*/}
|
102
|
-
{/* <Checkbox />*/}
|
103
|
-
{/*</Form.Item>*/}
|
104
|
-
</Form>
|
105
|
-
</div>
|
106
|
-
);
|
107
|
-
},
|
108
|
-
);
|
109
|
-
export default ModelConfigForm;
|