@lobehub/chat 1.77.16 → 1.77.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/changelog/v1.json +9 -0
  3. package/docker-compose/local/docker-compose.yml +2 -1
  4. package/locales/ar/components.json +4 -0
  5. package/locales/ar/modelProvider.json +1 -0
  6. package/locales/ar/models.json +8 -5
  7. package/locales/bg-BG/components.json +4 -0
  8. package/locales/bg-BG/modelProvider.json +1 -0
  9. package/locales/bg-BG/models.json +8 -5
  10. package/locales/de-DE/components.json +4 -0
  11. package/locales/de-DE/modelProvider.json +1 -0
  12. package/locales/de-DE/models.json +8 -5
  13. package/locales/en-US/components.json +4 -0
  14. package/locales/en-US/modelProvider.json +1 -0
  15. package/locales/en-US/models.json +8 -5
  16. package/locales/es-ES/components.json +4 -0
  17. package/locales/es-ES/modelProvider.json +1 -0
  18. package/locales/es-ES/models.json +7 -4
  19. package/locales/fa-IR/components.json +4 -0
  20. package/locales/fa-IR/modelProvider.json +1 -0
  21. package/locales/fa-IR/models.json +7 -4
  22. package/locales/fr-FR/components.json +4 -0
  23. package/locales/fr-FR/modelProvider.json +1 -0
  24. package/locales/fr-FR/models.json +8 -5
  25. package/locales/it-IT/components.json +4 -0
  26. package/locales/it-IT/modelProvider.json +1 -0
  27. package/locales/it-IT/models.json +7 -4
  28. package/locales/ja-JP/components.json +4 -0
  29. package/locales/ja-JP/modelProvider.json +1 -0
  30. package/locales/ja-JP/models.json +8 -5
  31. package/locales/ko-KR/components.json +4 -0
  32. package/locales/ko-KR/modelProvider.json +1 -0
  33. package/locales/ko-KR/models.json +8 -5
  34. package/locales/nl-NL/components.json +4 -0
  35. package/locales/nl-NL/modelProvider.json +1 -0
  36. package/locales/nl-NL/models.json +8 -5
  37. package/locales/pl-PL/components.json +4 -0
  38. package/locales/pl-PL/modelProvider.json +1 -0
  39. package/locales/pl-PL/models.json +8 -5
  40. package/locales/pt-BR/components.json +4 -0
  41. package/locales/pt-BR/modelProvider.json +1 -0
  42. package/locales/pt-BR/models.json +7 -4
  43. package/locales/ru-RU/components.json +4 -0
  44. package/locales/ru-RU/modelProvider.json +1 -0
  45. package/locales/ru-RU/models.json +7 -4
  46. package/locales/tr-TR/components.json +4 -0
  47. package/locales/tr-TR/modelProvider.json +1 -0
  48. package/locales/tr-TR/models.json +8 -5
  49. package/locales/vi-VN/components.json +4 -0
  50. package/locales/vi-VN/modelProvider.json +1 -0
  51. package/locales/vi-VN/models.json +8 -5
  52. package/locales/zh-CN/components.json +4 -0
  53. package/locales/zh-CN/modelProvider.json +1 -0
  54. package/locales/zh-CN/models.json +9 -6
  55. package/locales/zh-TW/components.json +4 -0
  56. package/locales/zh-TW/modelProvider.json +1 -0
  57. package/locales/zh-TW/models.json +7 -4
  58. package/package.json +1 -1
  59. package/src/app/(backend)/webapi/models/[provider]/pull/route.ts +34 -0
  60. package/src/app/(backend)/webapi/{chat/models → models}/[provider]/route.ts +1 -2
  61. package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/index.tsx +0 -7
  62. package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/CheckError.tsx +1 -1
  63. package/src/components/FormAction/index.tsx +1 -1
  64. package/src/database/models/__tests__/aiProvider.test.ts +100 -0
  65. package/src/database/models/aiProvider.ts +11 -1
  66. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel.tsx +43 -0
  67. package/src/features/Conversation/Error/OllamaDesktopSetupGuide/index.tsx +61 -0
  68. package/src/features/Conversation/Error/index.tsx +7 -0
  69. package/src/features/DevPanel/SystemInspector/ServerConfig.tsx +18 -2
  70. package/src/features/DevPanel/SystemInspector/index.tsx +25 -6
  71. package/src/features/OllamaModelDownloader/index.tsx +149 -0
  72. package/src/libs/agent-runtime/AgentRuntime.ts +6 -0
  73. package/src/libs/agent-runtime/BaseAI.ts +7 -0
  74. package/src/libs/agent-runtime/ollama/index.ts +84 -2
  75. package/src/libs/agent-runtime/openrouter/__snapshots__/index.test.ts.snap +24 -3263
  76. package/src/libs/agent-runtime/openrouter/fixtures/frontendModels.json +25 -0
  77. package/src/libs/agent-runtime/openrouter/fixtures/models.json +0 -3353
  78. package/src/libs/agent-runtime/openrouter/index.test.ts +56 -1
  79. package/src/libs/agent-runtime/openrouter/index.ts +9 -4
  80. package/src/libs/agent-runtime/types/index.ts +1 -0
  81. package/src/libs/agent-runtime/types/model.ts +44 -0
  82. package/src/libs/agent-runtime/utils/streams/index.ts +1 -0
  83. package/src/libs/agent-runtime/utils/streams/model.ts +110 -0
  84. package/src/locales/default/components.ts +4 -0
  85. package/src/locales/default/modelProvider.ts +1 -0
  86. package/src/services/__tests__/models.test.ts +21 -0
  87. package/src/services/_url.ts +4 -1
  88. package/src/services/chat.ts +1 -1
  89. package/src/services/models.ts +153 -7
  90. package/src/store/aiInfra/slices/aiModel/action.ts +1 -1
  91. package/src/store/aiInfra/slices/aiProvider/action.ts +2 -1
  92. package/src/store/user/slices/modelList/action.test.ts +2 -2
  93. package/src/store/user/slices/modelList/action.ts +1 -1
  94. package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/Checker.tsx +0 -73
  95. package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/index.tsx +0 -127
  96. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/index.tsx +0 -154
  97. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/useDownloadMonitor.ts +0 -29
  98. package/src/services/__tests__/ollama.test.ts +0 -28
  99. package/src/services/ollama.ts +0 -83
  100. /package/src/{app/[variants]/(main)/settings/provider/(detail)/ollama → features}/OllamaModelDownloader/useDownloadMonitor.ts +0 -0
@@ -1,8 +1,9 @@
1
1
  import { Ollama, Tool } from 'ollama/browser';
2
2
  import { ClientOptions } from 'openai';
3
3
 
4
- import { OpenAIChatMessage } from '@/libs/agent-runtime';
4
+ import { ModelRequestOptions, OpenAIChatMessage } from '@/libs/agent-runtime';
5
5
  import { ChatModelCard } from '@/types/llm';
6
+ import { createErrorResponse } from '@/utils/errorResponse';
6
7
 
7
8
  import { LobeRuntimeAI } from '../BaseAI';
8
9
  import { AgentRuntimeErrorType } from '../error';
@@ -12,11 +13,12 @@ import {
12
13
  Embeddings,
13
14
  EmbeddingsPayload,
14
15
  ModelProvider,
16
+ PullModelParams,
15
17
  } from '../types';
16
18
  import { AgentRuntimeError } from '../utils/createError';
17
19
  import { debugStream } from '../utils/debugStream';
18
20
  import { StreamingResponse } from '../utils/response';
19
- import { OllamaStream, convertIterableToStream } from '../utils/streams';
21
+ import { OllamaStream, convertIterableToStream, createModelPullStream } from '../utils/streams';
20
22
  import { parseDataUri } from '../utils/uriParser';
21
23
  import { OllamaMessage } from './type';
22
24
 
@@ -193,6 +195,86 @@ export class LobeOllamaAI implements LobeRuntimeAI {
193
195
 
194
196
  return ollamaMessage;
195
197
  };
198
+
199
+ async pullModel(params: PullModelParams, options?: ModelRequestOptions): Promise<Response> {
200
+ const { model, insecure } = params;
201
+ const signal = options?.signal; // 获取传入的 AbortSignal
202
+
203
+ // eslint-disable-next-line unicorn/consistent-function-scoping
204
+ const abortOllama = () => {
205
+ // 假设 this.client.abort() 是幂等的或者可以安全地多次调用
206
+ this.client.abort();
207
+ };
208
+
209
+ // 如果有 AbortSignal,监听 abort 事件
210
+ // 使用 { once: true } 确保监听器只触发一次
211
+ signal?.addEventListener('abort', abortOllama, { once: true });
212
+
213
+ try {
214
+ // 获取 Ollama pull 的迭代器
215
+ const iterable = await this.client.pull({
216
+ insecure: insecure ?? false,
217
+ model,
218
+ stream: true,
219
+ });
220
+
221
+ // 使用专门的模型下载流转换方法
222
+ const progressStream = createModelPullStream(iterable, model, {
223
+ onCancel: () => {
224
+ // 当流被取消时,调用 abortOllama
225
+ // 移除 signal 的监听器,避免重复调用(如果 abortOllama 不是幂等的)
226
+ signal?.removeEventListener('abort', abortOllama);
227
+ abortOllama(); // 执行中止逻辑
228
+ },
229
+ });
230
+
231
+ // 返回标准响应
232
+ return new Response(progressStream, {
233
+ headers: { 'Content-Type': 'application/json' },
234
+ });
235
+ } catch (error) {
236
+ // 如果在调用 client.pull 或创建流的初始阶段出错,需要移除监听器
237
+ signal?.removeEventListener('abort', abortOllama);
238
+
239
+ // 处理错误
240
+ if ((error as Error).message === 'fetch failed') {
241
+ return createErrorResponse(AgentRuntimeErrorType.OllamaServiceUnavailable, {
242
+ message: 'please check whether your ollama service is available',
243
+ provider: ModelProvider.Ollama,
244
+ });
245
+ }
246
+
247
+ console.error('model download error:', error);
248
+
249
+ // 检查是否是取消操作
250
+ if ((error as Error).name === 'AbortError') {
251
+ return new Response(
252
+ JSON.stringify({
253
+ model,
254
+ status: 'cancelled',
255
+ }),
256
+ {
257
+ headers: { 'Content-Type': 'application/json' },
258
+ status: 499,
259
+ },
260
+ );
261
+ }
262
+
263
+ // 返回错误响应
264
+ const errorMessage = error instanceof Error ? error.message : String(error);
265
+ return new Response(
266
+ JSON.stringify({
267
+ error: errorMessage,
268
+ model,
269
+ status: 'error',
270
+ }),
271
+ {
272
+ headers: { 'Content-Type': 'application/json' },
273
+ status: 500,
274
+ },
275
+ );
276
+ }
277
+ }
196
278
  }
197
279
 
198
280
  export default LobeOllamaAI;