@lobehub/chat 1.77.15 → 1.77.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/changelog/v1.json +18 -0
- package/docker-compose/local/docker-compose.yml +2 -1
- package/locales/ar/components.json +4 -0
- package/locales/ar/modelProvider.json +1 -0
- package/locales/ar/models.json +8 -5
- package/locales/bg-BG/components.json +4 -0
- package/locales/bg-BG/modelProvider.json +1 -0
- package/locales/bg-BG/models.json +8 -5
- package/locales/de-DE/components.json +4 -0
- package/locales/de-DE/modelProvider.json +1 -0
- package/locales/de-DE/models.json +8 -5
- package/locales/en-US/components.json +4 -0
- package/locales/en-US/modelProvider.json +1 -0
- package/locales/en-US/models.json +8 -5
- package/locales/es-ES/components.json +4 -0
- package/locales/es-ES/modelProvider.json +1 -0
- package/locales/es-ES/models.json +7 -4
- package/locales/fa-IR/components.json +4 -0
- package/locales/fa-IR/modelProvider.json +1 -0
- package/locales/fa-IR/models.json +7 -4
- package/locales/fr-FR/components.json +4 -0
- package/locales/fr-FR/modelProvider.json +1 -0
- package/locales/fr-FR/models.json +8 -5
- package/locales/it-IT/components.json +4 -0
- package/locales/it-IT/modelProvider.json +1 -0
- package/locales/it-IT/models.json +7 -4
- package/locales/ja-JP/components.json +4 -0
- package/locales/ja-JP/modelProvider.json +1 -0
- package/locales/ja-JP/models.json +8 -5
- package/locales/ko-KR/components.json +4 -0
- package/locales/ko-KR/modelProvider.json +1 -0
- package/locales/ko-KR/models.json +8 -5
- package/locales/nl-NL/components.json +4 -0
- package/locales/nl-NL/modelProvider.json +1 -0
- package/locales/nl-NL/models.json +8 -5
- package/locales/pl-PL/components.json +4 -0
- package/locales/pl-PL/modelProvider.json +1 -0
- package/locales/pl-PL/models.json +8 -5
- package/locales/pt-BR/components.json +4 -0
- package/locales/pt-BR/modelProvider.json +1 -0
- package/locales/pt-BR/models.json +7 -4
- package/locales/ru-RU/components.json +4 -0
- package/locales/ru-RU/modelProvider.json +1 -0
- package/locales/ru-RU/models.json +7 -4
- package/locales/tr-TR/components.json +4 -0
- package/locales/tr-TR/modelProvider.json +1 -0
- package/locales/tr-TR/models.json +8 -5
- package/locales/vi-VN/components.json +4 -0
- package/locales/vi-VN/modelProvider.json +1 -0
- package/locales/vi-VN/models.json +8 -5
- package/locales/zh-CN/components.json +4 -0
- package/locales/zh-CN/modelProvider.json +1 -0
- package/locales/zh-CN/models.json +9 -6
- package/locales/zh-TW/components.json +4 -0
- package/locales/zh-TW/modelProvider.json +1 -0
- package/locales/zh-TW/models.json +7 -4
- package/package.json +1 -1
- package/src/app/(backend)/webapi/models/[provider]/pull/route.ts +34 -0
- package/src/app/(backend)/webapi/{chat/models → models}/[provider]/route.ts +1 -2
- package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/index.tsx +0 -7
- package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/CheckError.tsx +1 -1
- package/src/components/FormAction/index.tsx +1 -1
- package/src/database/models/__tests__/aiProvider.test.ts +100 -0
- package/src/database/models/aiProvider.ts +11 -1
- package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel.tsx +43 -0
- package/src/features/Conversation/Error/OllamaDesktopSetupGuide/index.tsx +61 -0
- package/src/features/Conversation/Error/index.tsx +7 -0
- package/src/features/DevPanel/SystemInspector/ServerConfig.tsx +18 -2
- package/src/features/DevPanel/SystemInspector/index.tsx +25 -6
- package/src/features/OllamaModelDownloader/index.tsx +149 -0
- package/src/libs/agent-runtime/AgentRuntime.ts +6 -0
- package/src/libs/agent-runtime/BaseAI.ts +7 -0
- package/src/libs/agent-runtime/ollama/index.ts +84 -2
- package/src/libs/agent-runtime/openrouter/__snapshots__/index.test.ts.snap +24 -3263
- package/src/libs/agent-runtime/openrouter/fixtures/frontendModels.json +25 -0
- package/src/libs/agent-runtime/openrouter/fixtures/models.json +0 -3353
- package/src/libs/agent-runtime/openrouter/index.test.ts +56 -1
- package/src/libs/agent-runtime/openrouter/index.ts +9 -4
- package/src/libs/agent-runtime/types/index.ts +1 -0
- package/src/libs/agent-runtime/types/model.ts +44 -0
- package/src/libs/agent-runtime/utils/streams/index.ts +1 -0
- package/src/libs/agent-runtime/utils/streams/model.ts +110 -0
- package/src/locales/default/components.ts +4 -0
- package/src/locales/default/modelProvider.ts +1 -0
- package/src/server/routers/async/file.ts +3 -4
- package/src/server/routers/lambda/file.ts +8 -11
- package/src/server/routers/lambda/importer.ts +3 -4
- package/src/server/routers/lambda/message.ts +9 -3
- package/src/server/routers/lambda/ragEval.ts +5 -6
- package/src/server/services/file/impls/index.ts +12 -0
- package/src/server/services/file/impls/s3.test.ts +110 -0
- package/src/server/services/file/impls/s3.ts +60 -0
- package/src/server/services/file/impls/type.ts +44 -0
- package/src/server/services/file/index.ts +65 -0
- package/src/services/__tests__/models.test.ts +21 -0
- package/src/services/_url.ts +4 -1
- package/src/services/chat.ts +1 -1
- package/src/services/electron/__tests__/devtools.test.ts +34 -0
- package/src/services/models.ts +153 -7
- package/src/store/aiInfra/slices/aiModel/action.ts +1 -1
- package/src/store/aiInfra/slices/aiProvider/action.ts +2 -1
- package/src/store/user/slices/modelList/action.test.ts +2 -2
- package/src/store/user/slices/modelList/action.ts +1 -1
- package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/Checker.tsx +0 -73
- package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/index.tsx +0 -127
- package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/index.tsx +0 -154
- package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/useDownloadMonitor.ts +0 -29
- package/src/server/utils/files.test.ts +0 -37
- package/src/server/utils/files.ts +0 -20
- package/src/services/__tests__/ollama.test.ts +0 -28
- package/src/services/ollama.ts +0 -83
- /package/src/{app/[variants]/(main)/settings/provider/(detail)/ollama → features}/OllamaModelDownloader/useDownloadMonitor.ts +0 -0
@@ -1,8 +1,9 @@
|
|
1
1
|
import { Ollama, Tool } from 'ollama/browser';
|
2
2
|
import { ClientOptions } from 'openai';
|
3
3
|
|
4
|
-
import { OpenAIChatMessage } from '@/libs/agent-runtime';
|
4
|
+
import { ModelRequestOptions, OpenAIChatMessage } from '@/libs/agent-runtime';
|
5
5
|
import { ChatModelCard } from '@/types/llm';
|
6
|
+
import { createErrorResponse } from '@/utils/errorResponse';
|
6
7
|
|
7
8
|
import { LobeRuntimeAI } from '../BaseAI';
|
8
9
|
import { AgentRuntimeErrorType } from '../error';
|
@@ -12,11 +13,12 @@ import {
|
|
12
13
|
Embeddings,
|
13
14
|
EmbeddingsPayload,
|
14
15
|
ModelProvider,
|
16
|
+
PullModelParams,
|
15
17
|
} from '../types';
|
16
18
|
import { AgentRuntimeError } from '../utils/createError';
|
17
19
|
import { debugStream } from '../utils/debugStream';
|
18
20
|
import { StreamingResponse } from '../utils/response';
|
19
|
-
import { OllamaStream, convertIterableToStream } from '../utils/streams';
|
21
|
+
import { OllamaStream, convertIterableToStream, createModelPullStream } from '../utils/streams';
|
20
22
|
import { parseDataUri } from '../utils/uriParser';
|
21
23
|
import { OllamaMessage } from './type';
|
22
24
|
|
@@ -193,6 +195,86 @@ export class LobeOllamaAI implements LobeRuntimeAI {
|
|
193
195
|
|
194
196
|
return ollamaMessage;
|
195
197
|
};
|
198
|
+
|
199
|
+
async pullModel(params: PullModelParams, options?: ModelRequestOptions): Promise<Response> {
|
200
|
+
const { model, insecure } = params;
|
201
|
+
const signal = options?.signal; // 获取传入的 AbortSignal
|
202
|
+
|
203
|
+
// eslint-disable-next-line unicorn/consistent-function-scoping
|
204
|
+
const abortOllama = () => {
|
205
|
+
// 假设 this.client.abort() 是幂等的或者可以安全地多次调用
|
206
|
+
this.client.abort();
|
207
|
+
};
|
208
|
+
|
209
|
+
// 如果有 AbortSignal,监听 abort 事件
|
210
|
+
// 使用 { once: true } 确保监听器只触发一次
|
211
|
+
signal?.addEventListener('abort', abortOllama, { once: true });
|
212
|
+
|
213
|
+
try {
|
214
|
+
// 获取 Ollama pull 的迭代器
|
215
|
+
const iterable = await this.client.pull({
|
216
|
+
insecure: insecure ?? false,
|
217
|
+
model,
|
218
|
+
stream: true,
|
219
|
+
});
|
220
|
+
|
221
|
+
// 使用专门的模型下载流转换方法
|
222
|
+
const progressStream = createModelPullStream(iterable, model, {
|
223
|
+
onCancel: () => {
|
224
|
+
// 当流被取消时,调用 abortOllama
|
225
|
+
// 移除 signal 的监听器,避免重复调用(如果 abortOllama 不是幂等的)
|
226
|
+
signal?.removeEventListener('abort', abortOllama);
|
227
|
+
abortOllama(); // 执行中止逻辑
|
228
|
+
},
|
229
|
+
});
|
230
|
+
|
231
|
+
// 返回标准响应
|
232
|
+
return new Response(progressStream, {
|
233
|
+
headers: { 'Content-Type': 'application/json' },
|
234
|
+
});
|
235
|
+
} catch (error) {
|
236
|
+
// 如果在调用 client.pull 或创建流的初始阶段出错,需要移除监听器
|
237
|
+
signal?.removeEventListener('abort', abortOllama);
|
238
|
+
|
239
|
+
// 处理错误
|
240
|
+
if ((error as Error).message === 'fetch failed') {
|
241
|
+
return createErrorResponse(AgentRuntimeErrorType.OllamaServiceUnavailable, {
|
242
|
+
message: 'please check whether your ollama service is available',
|
243
|
+
provider: ModelProvider.Ollama,
|
244
|
+
});
|
245
|
+
}
|
246
|
+
|
247
|
+
console.error('model download error:', error);
|
248
|
+
|
249
|
+
// 检查是否是取消操作
|
250
|
+
if ((error as Error).name === 'AbortError') {
|
251
|
+
return new Response(
|
252
|
+
JSON.stringify({
|
253
|
+
model,
|
254
|
+
status: 'cancelled',
|
255
|
+
}),
|
256
|
+
{
|
257
|
+
headers: { 'Content-Type': 'application/json' },
|
258
|
+
status: 499,
|
259
|
+
},
|
260
|
+
);
|
261
|
+
}
|
262
|
+
|
263
|
+
// 返回错误响应
|
264
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
265
|
+
return new Response(
|
266
|
+
JSON.stringify({
|
267
|
+
error: errorMessage,
|
268
|
+
model,
|
269
|
+
status: 'error',
|
270
|
+
}),
|
271
|
+
{
|
272
|
+
headers: { 'Content-Type': 'application/json' },
|
273
|
+
status: 500,
|
274
|
+
},
|
275
|
+
);
|
276
|
+
}
|
277
|
+
}
|
196
278
|
}
|
197
279
|
|
198
280
|
export default LobeOllamaAI;
|