@lobehub/chat 1.77.16 → 1.77.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. package/CHANGELOG.md +50 -0
  2. package/changelog/v1.json +18 -0
  3. package/contributing/Basic/Architecture.md +1 -1
  4. package/contributing/Basic/Architecture.zh-CN.md +1 -1
  5. package/contributing/Basic/Chat-API.md +326 -108
  6. package/contributing/Basic/Chat-API.zh-CN.md +313 -133
  7. package/contributing/Basic/Contributing-Guidelines.md +7 -4
  8. package/contributing/Basic/Contributing-Guidelines.zh-CN.md +7 -6
  9. package/contributing/Home.md +5 -5
  10. package/contributing/State-Management/State-Management-Intro.md +1 -1
  11. package/contributing/State-Management/State-Management-Intro.zh-CN.md +1 -1
  12. package/docker-compose/local/docker-compose.yml +2 -1
  13. package/locales/ar/components.json +4 -0
  14. package/locales/ar/modelProvider.json +1 -0
  15. package/locales/ar/models.json +8 -5
  16. package/locales/ar/tool.json +21 -1
  17. package/locales/bg-BG/components.json +4 -0
  18. package/locales/bg-BG/modelProvider.json +1 -0
  19. package/locales/bg-BG/models.json +8 -5
  20. package/locales/bg-BG/tool.json +21 -1
  21. package/locales/de-DE/components.json +4 -0
  22. package/locales/de-DE/modelProvider.json +1 -0
  23. package/locales/de-DE/models.json +8 -5
  24. package/locales/de-DE/tool.json +21 -1
  25. package/locales/en-US/components.json +4 -0
  26. package/locales/en-US/modelProvider.json +1 -0
  27. package/locales/en-US/models.json +8 -5
  28. package/locales/en-US/tool.json +21 -1
  29. package/locales/es-ES/components.json +4 -0
  30. package/locales/es-ES/modelProvider.json +1 -0
  31. package/locales/es-ES/models.json +7 -4
  32. package/locales/es-ES/tool.json +21 -1
  33. package/locales/fa-IR/components.json +4 -0
  34. package/locales/fa-IR/modelProvider.json +1 -0
  35. package/locales/fa-IR/models.json +7 -4
  36. package/locales/fa-IR/tool.json +21 -1
  37. package/locales/fr-FR/components.json +4 -0
  38. package/locales/fr-FR/modelProvider.json +1 -0
  39. package/locales/fr-FR/models.json +8 -5
  40. package/locales/fr-FR/tool.json +21 -1
  41. package/locales/it-IT/components.json +4 -0
  42. package/locales/it-IT/modelProvider.json +1 -0
  43. package/locales/it-IT/models.json +7 -4
  44. package/locales/it-IT/tool.json +21 -1
  45. package/locales/ja-JP/components.json +4 -0
  46. package/locales/ja-JP/modelProvider.json +1 -0
  47. package/locales/ja-JP/models.json +8 -5
  48. package/locales/ja-JP/tool.json +21 -1
  49. package/locales/ko-KR/components.json +4 -0
  50. package/locales/ko-KR/modelProvider.json +1 -0
  51. package/locales/ko-KR/models.json +8 -5
  52. package/locales/ko-KR/tool.json +21 -1
  53. package/locales/nl-NL/components.json +4 -0
  54. package/locales/nl-NL/modelProvider.json +1 -0
  55. package/locales/nl-NL/models.json +8 -5
  56. package/locales/nl-NL/tool.json +21 -1
  57. package/locales/pl-PL/components.json +4 -0
  58. package/locales/pl-PL/modelProvider.json +1 -0
  59. package/locales/pl-PL/models.json +8 -5
  60. package/locales/pl-PL/tool.json +21 -1
  61. package/locales/pt-BR/components.json +4 -0
  62. package/locales/pt-BR/modelProvider.json +1 -0
  63. package/locales/pt-BR/models.json +7 -4
  64. package/locales/pt-BR/tool.json +21 -1
  65. package/locales/ru-RU/components.json +4 -0
  66. package/locales/ru-RU/modelProvider.json +1 -0
  67. package/locales/ru-RU/models.json +7 -4
  68. package/locales/ru-RU/tool.json +21 -1
  69. package/locales/tr-TR/components.json +4 -0
  70. package/locales/tr-TR/modelProvider.json +1 -0
  71. package/locales/tr-TR/models.json +8 -5
  72. package/locales/tr-TR/tool.json +21 -1
  73. package/locales/vi-VN/components.json +4 -0
  74. package/locales/vi-VN/modelProvider.json +1 -0
  75. package/locales/vi-VN/models.json +8 -5
  76. package/locales/vi-VN/tool.json +21 -1
  77. package/locales/zh-CN/components.json +4 -0
  78. package/locales/zh-CN/modelProvider.json +1 -0
  79. package/locales/zh-CN/models.json +9 -6
  80. package/locales/zh-CN/tool.json +30 -1
  81. package/locales/zh-TW/components.json +4 -0
  82. package/locales/zh-TW/modelProvider.json +1 -0
  83. package/locales/zh-TW/models.json +7 -4
  84. package/locales/zh-TW/tool.json +21 -1
  85. package/package.json +1 -1
  86. package/src/app/(backend)/webapi/models/[provider]/pull/route.ts +34 -0
  87. package/src/app/(backend)/webapi/{chat/models → models}/[provider]/route.ts +1 -2
  88. package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/index.tsx +0 -7
  89. package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/CheckError.tsx +1 -1
  90. package/src/components/FormAction/index.tsx +1 -1
  91. package/src/database/models/__tests__/aiProvider.test.ts +100 -0
  92. package/src/database/models/aiProvider.ts +11 -1
  93. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel.tsx +43 -0
  94. package/src/features/Conversation/Error/OllamaDesktopSetupGuide/index.tsx +61 -0
  95. package/src/features/Conversation/Error/index.tsx +7 -0
  96. package/src/features/DevPanel/SystemInspector/ServerConfig.tsx +18 -2
  97. package/src/features/DevPanel/SystemInspector/index.tsx +25 -6
  98. package/src/features/OllamaModelDownloader/index.tsx +149 -0
  99. package/src/libs/agent-runtime/AgentRuntime.ts +6 -0
  100. package/src/libs/agent-runtime/BaseAI.ts +7 -0
  101. package/src/libs/agent-runtime/ollama/index.ts +84 -2
  102. package/src/libs/agent-runtime/openrouter/__snapshots__/index.test.ts.snap +24 -3263
  103. package/src/libs/agent-runtime/openrouter/fixtures/frontendModels.json +25 -0
  104. package/src/libs/agent-runtime/openrouter/fixtures/models.json +0 -3353
  105. package/src/libs/agent-runtime/openrouter/index.test.ts +56 -1
  106. package/src/libs/agent-runtime/openrouter/index.ts +9 -4
  107. package/src/libs/agent-runtime/types/index.ts +1 -0
  108. package/src/libs/agent-runtime/types/model.ts +44 -0
  109. package/src/libs/agent-runtime/utils/streams/index.ts +1 -0
  110. package/src/libs/agent-runtime/utils/streams/model.ts +110 -0
  111. package/src/locales/default/components.ts +4 -0
  112. package/src/locales/default/modelProvider.ts +1 -0
  113. package/src/locales/default/tool.ts +30 -1
  114. package/src/server/modules/SearXNG.ts +10 -2
  115. package/src/server/routers/tools/__test__/search.test.ts +3 -1
  116. package/src/server/routers/tools/search.ts +10 -2
  117. package/src/services/__tests__/models.test.ts +21 -0
  118. package/src/services/_url.ts +4 -1
  119. package/src/services/chat.ts +1 -1
  120. package/src/services/models.ts +153 -7
  121. package/src/services/search.ts +2 -2
  122. package/src/store/aiInfra/slices/aiModel/action.ts +1 -1
  123. package/src/store/aiInfra/slices/aiProvider/action.ts +2 -1
  124. package/src/store/chat/slices/builtinTool/actions/searXNG.test.ts +28 -8
  125. package/src/store/chat/slices/builtinTool/actions/searXNG.ts +22 -5
  126. package/src/store/user/slices/modelList/action.test.ts +2 -2
  127. package/src/store/user/slices/modelList/action.ts +1 -1
  128. package/src/tools/web-browsing/Portal/Search/index.tsx +1 -1
  129. package/src/tools/web-browsing/Render/Search/SearchQuery/SearchView.tsx +1 -1
  130. package/src/tools/web-browsing/Render/Search/SearchQuery/index.tsx +1 -1
  131. package/src/tools/web-browsing/Render/Search/SearchResult/index.tsx +1 -1
  132. package/src/tools/web-browsing/components/CategoryAvatar.tsx +27 -0
  133. package/src/tools/web-browsing/components/SearchBar.tsx +84 -4
  134. package/src/tools/web-browsing/const.ts +26 -0
  135. package/src/tools/web-browsing/index.ts +58 -28
  136. package/src/tools/web-browsing/systemRole.ts +62 -1
  137. package/src/types/tool/search.ts +10 -1
  138. package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/Checker.tsx +0 -73
  139. package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/index.tsx +0 -127
  140. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/index.tsx +0 -154
  141. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/useDownloadMonitor.ts +0 -29
  142. package/src/helpers/url.ts +0 -17
  143. package/src/services/__tests__/ollama.test.ts +0 -28
  144. package/src/services/ollama.ts +0 -83
  145. /package/src/{app/[variants]/(main)/settings/provider/(detail)/ollama → features}/OllamaModelDownloader/useDownloadMonitor.ts +0 -0
@@ -372,5 +372,105 @@ describe('AiProviderModel', () => {
372
372
  settings: {},
373
373
  });
374
374
  });
375
+
376
+ it('should handle decrypt error gracefully', async () => {
377
+ const failingDecryptor = vi.fn().mockImplementation(() => {
378
+ throw new Error('Decryption failed');
379
+ });
380
+
381
+ await serverDB.insert(aiProviders).values({
382
+ id: 'provider-with-bad-keys',
383
+ keyVaults: 'invalid-encrypted-data',
384
+ name: 'Bad Provider',
385
+ source: 'custom',
386
+ userId,
387
+ });
388
+
389
+ const config = await aiProviderModel.getAiProviderRuntimeConfig(failingDecryptor);
390
+
391
+ expect(config['provider-with-bad-keys'].keyVaults).toEqual({});
392
+ expect(failingDecryptor).toHaveBeenCalled();
393
+ });
394
+
395
+ it('should handle null keyVaults gracefully', async () => {
396
+ await serverDB.insert(aiProviders).values({
397
+ id: 'provider-no-keys',
398
+ keyVaults: null,
399
+ name: 'No Keys Provider',
400
+ source: 'custom',
401
+ userId,
402
+ });
403
+
404
+ const config = await aiProviderModel.getAiProviderRuntimeConfig();
405
+
406
+ expect(config['provider-no-keys'].keyVaults).toEqual({});
407
+ });
408
+
409
+ it('should respect fetchOnClient property', async () => {
410
+ await serverDB.insert(aiProviders).values([
411
+ {
412
+ fetchOnClient: true,
413
+ id: 'client-provider',
414
+ name: 'Client Provider',
415
+ source: 'custom',
416
+ userId,
417
+ },
418
+ {
419
+ fetchOnClient: false,
420
+ id: 'server-provider',
421
+ name: 'Server Provider',
422
+ source: 'custom',
423
+ userId,
424
+ },
425
+ {
426
+ id: 'undefined-provider',
427
+ name: 'Undefined Provider',
428
+ source: 'custom',
429
+ userId,
430
+ },
431
+ ]);
432
+
433
+ const config = await aiProviderModel.getAiProviderRuntimeConfig();
434
+
435
+ expect(config['client-provider'].fetchOnClient).toBe(true);
436
+ expect(config['server-provider'].fetchOnClient).toBe(false);
437
+ expect(config['undefined-provider'].fetchOnClient).toBeUndefined();
438
+ });
439
+
440
+ it('should use empty object as default for settings', async () => {
441
+ await serverDB.insert(aiProviders).values({
442
+ id: 'no-settings-provider',
443
+ name: 'No Settings Provider',
444
+ settings: null as any,
445
+ source: 'custom',
446
+ userId,
447
+ });
448
+
449
+ const config = await aiProviderModel.getAiProviderRuntimeConfig();
450
+
451
+ expect(config['no-settings-provider'].settings).toEqual({});
452
+ });
453
+
454
+ it('should only include providers for the current user', async () => {
455
+ await serverDB.insert(aiProviders).values([
456
+ {
457
+ id: 'user1-provider',
458
+ name: 'User 1 Provider',
459
+ source: 'custom',
460
+ userId,
461
+ },
462
+ {
463
+ id: 'user2-provider',
464
+ name: 'User 2 Provider',
465
+ source: 'custom',
466
+ userId: 'user2',
467
+ },
468
+ ]);
469
+
470
+ const config = await aiProviderModel.getAiProviderRuntimeConfig();
471
+
472
+ expect(config['user1-provider']).toBeDefined();
473
+ expect(config['user2-provider']).toBeUndefined();
474
+ });
375
475
  });
376
476
  });
@@ -251,9 +251,19 @@ export class AiProviderModel {
251
251
  const builtin = DEFAULT_MODEL_PROVIDER_LIST.find((provider) => provider.id === item.id);
252
252
 
253
253
  const userSettings = item.settings || {};
254
+
255
+ let keyVaults = {};
256
+ if (!!item.keyVaults) {
257
+ try {
258
+ keyVaults = await decrypt(item.keyVaults);
259
+ } catch {
260
+ /* empty */
261
+ }
262
+ }
263
+
254
264
  runtimeConfig[item.id] = {
255
265
  fetchOnClient: typeof item.fetchOnClient === 'boolean' ? item.fetchOnClient : undefined,
256
- keyVaults: !!item.keyVaults ? await decrypt(item.keyVaults) : {},
266
+ keyVaults,
257
267
  settings: !!builtin ? merge(builtin.settings, userSettings) : userSettings,
258
268
  };
259
269
  }
@@ -0,0 +1,43 @@
1
+ import { Button } from 'antd';
2
+ import { memo } from 'react';
3
+ import { useTranslation } from 'react-i18next';
4
+
5
+ import OllamaModelDownloader from '@/features/OllamaModelDownloader';
6
+ import { useChatStore } from '@/store/chat';
7
+
8
+ import { ErrorActionContainer } from '../style';
9
+
10
+ interface InvalidOllamaModelProps {
11
+ id: string;
12
+ model: string;
13
+ }
14
+
15
+ const InvalidOllamaModel = memo<InvalidOllamaModelProps>(({ id, model }) => {
16
+ const { t } = useTranslation('error');
17
+
18
+ const [delAndRegenerateMessage, deleteMessage] = useChatStore((s) => [
19
+ s.delAndRegenerateMessage,
20
+ s.deleteMessage,
21
+ ]);
22
+ return (
23
+ <ErrorActionContainer>
24
+ <OllamaModelDownloader
25
+ extraAction={
26
+ <Button
27
+ onClick={() => {
28
+ deleteMessage(id);
29
+ }}
30
+ >
31
+ {t('unlock.closeMessage')}
32
+ </Button>
33
+ }
34
+ model={model}
35
+ onSuccessDownload={() => {
36
+ delAndRegenerateMessage(id);
37
+ }}
38
+ />
39
+ </ErrorActionContainer>
40
+ );
41
+ });
42
+
43
+ export default InvalidOllamaModel;
@@ -0,0 +1,61 @@
1
+ import { Ollama } from '@lobehub/icons';
2
+ import { Button } from 'antd';
3
+ import { useTheme } from 'antd-style';
4
+ import Link from 'next/link';
5
+ import { memo } from 'react';
6
+ import { Trans, useTranslation } from 'react-i18next';
7
+ import { Center } from 'react-layout-kit';
8
+
9
+ import FormAction from '@/components/FormAction';
10
+ import { useChatStore } from '@/store/chat';
11
+
12
+ import { ErrorActionContainer } from '../style';
13
+
14
+ const OllamaDesktopSetupGuide = memo<{ id: string }>(({ id }) => {
15
+ const theme = useTheme();
16
+ const { t } = useTranslation('components');
17
+
18
+ const [delAndRegenerateMessage, deleteMessage] = useChatStore((s) => [
19
+ s.delAndRegenerateMessage,
20
+ s.deleteMessage,
21
+ ]);
22
+
23
+ return (
24
+ <ErrorActionContainer style={{ paddingBlock: 0 }}>
25
+ <Center gap={16} paddingBlock={32} style={{ maxWidth: 300, width: '100%' }}>
26
+ <FormAction
27
+ avatar={<Ollama color={theme.colorPrimary} size={64} />}
28
+ description={
29
+ <span>
30
+ <Trans i18nKey={'OllamaSetupGuide.install.description'} ns={'components'}>
31
+ 请确认你已经开启 Ollama ,如果没有安装 Ollama ,请前往官网
32
+ <Link href={'https://ollama.com/download'}>下载</Link>
33
+ </Trans>
34
+ </span>
35
+ }
36
+ title={t('OllamaSetupGuide.install.title')}
37
+ />
38
+ <Button
39
+ block
40
+ onClick={() => {
41
+ delAndRegenerateMessage(id);
42
+ }}
43
+ style={{ marginTop: 8 }}
44
+ type={'primary'}
45
+ >
46
+ {t('OllamaSetupGuide.action.start')}
47
+ </Button>
48
+ <Button
49
+ block
50
+ onClick={() => {
51
+ deleteMessage(id);
52
+ }}
53
+ >
54
+ {t('OllamaSetupGuide.action.close')}
55
+ </Button>
56
+ </Center>
57
+ </ErrorActionContainer>
58
+ );
59
+ });
60
+
61
+ export default OllamaDesktopSetupGuide;
@@ -5,6 +5,7 @@ import dynamic from 'next/dynamic';
5
5
  import { Suspense, memo, useMemo } from 'react';
6
6
  import { useTranslation } from 'react-i18next';
7
7
 
8
+ import { isDesktop } from '@/const/version';
8
9
  import { useProviderName } from '@/hooks/useProviderName';
9
10
  import { AgentRuntimeErrorType, ILobeAgentRuntimeErrorType } from '@/libs/agent-runtime';
10
11
  import { ChatErrorType, ErrorType } from '@/types/fetch';
@@ -23,6 +24,10 @@ const OllamaSetupGuide = dynamic(() => import('./OllamaBizError/SetupGuide'), {
23
24
  loading,
24
25
  ssr: false,
25
26
  });
27
+ const OllamaDesktopSetupGuide = dynamic(() => import('./OllamaDesktopSetupGuide'), {
28
+ loading,
29
+ ssr: false,
30
+ });
26
31
 
27
32
  // Config for the errorMessage display
28
33
  const getErrorAlertConfig = (
@@ -92,6 +97,8 @@ const ErrorMessageExtra = memo<{ data: ChatMessage }>(({ data }) => {
92
97
  switch (error.type) {
93
98
  // TODO: 优化 Ollama setup 的流程,isDesktop 模式下可以直接做到端到端检测
94
99
  case AgentRuntimeErrorType.OllamaServiceUnavailable: {
100
+ if (isDesktop) return <OllamaDesktopSetupGuide id={data.id} />;
101
+
95
102
  return <OllamaSetupGuide />;
96
103
  }
97
104
 
@@ -2,10 +2,26 @@ import { useServerConfigStore } from '@/store/serverConfig';
2
2
 
3
3
  import JsonViewer from './JsonViewer';
4
4
 
5
- const ServerConfig = () => {
5
+ export const ServerConfig = () => {
6
6
  const serverConfig = useServerConfigStore((s) => s.serverConfig);
7
7
 
8
8
  return <JsonViewer data={serverConfig} />;
9
9
  };
10
10
 
11
- export default ServerConfig;
11
+ export const SystemAgent = () => {
12
+ const serverConfig = useServerConfigStore((s) => s.serverConfig);
13
+
14
+ return <JsonViewer data={serverConfig.systemAgent || {}} />;
15
+ };
16
+
17
+ export const DefaultAgentConfig = () => {
18
+ const serverConfig = useServerConfigStore((s) => s.serverConfig);
19
+
20
+ return <JsonViewer data={serverConfig.defaultAgent || {}} />;
21
+ };
22
+
23
+ export const AIProvider = () => {
24
+ const serverConfig = useServerConfigStore((s) => s.serverConfig);
25
+
26
+ return <JsonViewer data={serverConfig.aiProvider || {}} />;
27
+ };
@@ -5,11 +5,14 @@ import { useState } from 'react';
5
5
  import { Flexbox } from 'react-layout-kit';
6
6
 
7
7
  import AiProviderRuntimeConfig from './AiProviderRuntimeConfig';
8
- import ServerConfig from './ServerConfig';
8
+ import { AIProvider, DefaultAgentConfig, ServerConfig, SystemAgent } from './ServerConfig';
9
9
 
10
10
  enum TabKey {
11
+ AIProvider = 'aiProvider',
11
12
  AiProviderRuntimeConfig = 'aiProviderRuntimeConfig',
13
+ DefaultAgentConfig = 'defaultAgentConfig',
12
14
  ServerConfig = 'serverConfig',
15
+ SystemAgent = 'systemAgent',
13
16
  }
14
17
 
15
18
  const SystemInspector = () => {
@@ -20,21 +23,37 @@ const SystemInspector = () => {
20
23
  <TabsNav
21
24
  activeKey={activeTab}
22
25
  items={[
23
- {
24
- key: TabKey.ServerConfig,
25
- label: 'Server Config',
26
- },
27
26
  {
28
27
  key: TabKey.AiProviderRuntimeConfig,
29
28
  label: 'Ai Provider Runtime Config',
30
29
  },
30
+ {
31
+ key: TabKey.AIProvider,
32
+ label: 'AI Provider Config',
33
+ },
34
+
35
+ {
36
+ key: TabKey.DefaultAgentConfig,
37
+ label: 'Default Agent Config',
38
+ },
39
+ {
40
+ key: TabKey.SystemAgent,
41
+ label: 'System Agent',
42
+ },
43
+ {
44
+ key: TabKey.ServerConfig,
45
+ label: 'Server Config',
46
+ },
31
47
  ]}
32
48
  onChange={(activeTab) => setActiveTab(activeTab as TabKey)}
33
49
  variant={'compact'}
34
50
  />
35
51
 
36
- {activeTab === TabKey.ServerConfig && <ServerConfig />}
37
52
  {activeTab === TabKey.AiProviderRuntimeConfig && <AiProviderRuntimeConfig />}
53
+ {activeTab === TabKey.DefaultAgentConfig && <DefaultAgentConfig />}
54
+ {activeTab === TabKey.SystemAgent && <SystemAgent />}
55
+ {activeTab === TabKey.AIProvider && <AIProvider />}
56
+ {activeTab === TabKey.ServerConfig && <ServerConfig />}
38
57
  </Flexbox>
39
58
  );
40
59
  };
@@ -0,0 +1,149 @@
1
+ import { Ollama } from '@lobehub/icons';
2
+ import { Alert } from '@lobehub/ui';
3
+ import { Button, Input, Progress } from 'antd';
4
+ import { useTheme } from 'antd-style';
5
+ import { ReactNode, memo, useCallback, useMemo, useState } from 'react';
6
+ import { useTranslation } from 'react-i18next';
7
+ import { Center, Flexbox } from 'react-layout-kit';
8
+
9
+ import FormAction from '@/components/FormAction';
10
+ import { useActionSWR } from '@/libs/swr';
11
+ import { ModelProgressInfo, modelsService } from '@/services/models';
12
+ import { formatSize } from '@/utils/format';
13
+
14
+ import { useDownloadMonitor } from './useDownloadMonitor';
15
+
16
+ interface OllamaModelDownloaderProps {
17
+ extraAction?: ReactNode;
18
+ model: string;
19
+ onSuccessDownload?: () => void;
20
+ }
21
+
22
+ const OllamaModelDownloader = memo<OllamaModelDownloaderProps>(
23
+ ({ model, onSuccessDownload, extraAction }) => {
24
+ const { t } = useTranslation(['modelProvider', 'error']);
25
+
26
+ const [modelToPull, setModelToPull] = useState(model);
27
+ const [completed, setCompleted] = useState(0);
28
+ const [total, setTotal] = useState(0);
29
+ const { remainingTime, downloadSpeed } = useDownloadMonitor(total, completed);
30
+ const percent = useMemo(() => {
31
+ return total ? Number(((completed / total) * 100).toFixed(1)) : 0;
32
+ }, [completed, total]);
33
+
34
+ const theme = useTheme();
35
+
36
+ // 定义进度回调函数
37
+ const handleProgress = useCallback((progress: ModelProgressInfo) => {
38
+ if (progress.completed) setCompleted(progress.completed);
39
+ if (progress.total) setTotal(progress.total);
40
+ }, []);
41
+
42
+ const {
43
+ mutate,
44
+ isValidating: isDownloading,
45
+ error,
46
+ } = useActionSWR(
47
+ [modelToPull],
48
+ async ([model]) => {
49
+ await modelsService.downloadModel(
50
+ { model, provider: 'ollama' },
51
+ { onProgress: handleProgress },
52
+ );
53
+
54
+ return true;
55
+ },
56
+ {
57
+ onSuccess: onSuccessDownload,
58
+ },
59
+ );
60
+
61
+ return (
62
+ <Center gap={16} paddingBlock={32} style={{ width: '100%' }}>
63
+ <FormAction
64
+ avatar={<Ollama color={theme.colorPrimary} size={64} />}
65
+ description={isDownloading ? t('ollama.download.desc') : t('ollama.unlock.description')}
66
+ title={
67
+ isDownloading
68
+ ? t('ollama.download.title', { model: modelToPull })
69
+ : t('ollama.unlock.title')
70
+ }
71
+ >
72
+ {!isDownloading && (
73
+ <Input
74
+ onChange={(e) => {
75
+ setModelToPull(e.target.value);
76
+ }}
77
+ value={modelToPull}
78
+ />
79
+ )}
80
+ </FormAction>
81
+ {isDownloading && (
82
+ <Flexbox flex={1} gap={8} style={{ maxWidth: 300 }} width={'100%'}>
83
+ <Progress
84
+ percent={percent}
85
+ showInfo
86
+ strokeColor={theme.colorSuccess}
87
+ trailColor={theme.colorSuccessBg}
88
+ />
89
+ <Flexbox
90
+ distribution={'space-between'}
91
+ horizontal
92
+ style={{ color: theme.colorTextDescription, fontSize: 12 }}
93
+ >
94
+ <span>
95
+ {t('ollama.download.remainingTime')}: {remainingTime}
96
+ </span>
97
+ <span>
98
+ {t('ollama.download.speed')}: {downloadSpeed}
99
+ </span>
100
+ </Flexbox>
101
+ </Flexbox>
102
+ )}
103
+ <Flexbox gap={12} style={{ maxWidth: 300 }} width={'100%'}>
104
+ {error?.message && (
105
+ <Alert
106
+ closable
107
+ description={error.message}
108
+ message={t('ollama.download.failed')}
109
+ showIcon={false}
110
+ type={'error'}
111
+ />
112
+ )}
113
+ <Button
114
+ block
115
+ loading={isDownloading}
116
+ onClick={() => {
117
+ mutate();
118
+ }}
119
+ style={{ marginTop: 8 }}
120
+ type={'primary'}
121
+ >
122
+ {!isDownloading
123
+ ? t('ollama.unlock.confirm')
124
+ : // if total is 0, show starting, else show downloaded
125
+ !total
126
+ ? t('ollama.unlock.starting')
127
+ : t('ollama.unlock.downloaded', {
128
+ completed: formatSize(completed, 2),
129
+ total: formatSize(total, 2),
130
+ })}
131
+ </Button>
132
+ {isDownloading ? (
133
+ <Button
134
+ onClick={() => {
135
+ modelsService.abortPull();
136
+ }}
137
+ >
138
+ {t('ollama.unlock.cancel')}
139
+ </Button>
140
+ ) : (
141
+ extraAction
142
+ )}
143
+ </Flexbox>
144
+ </Center>
145
+ );
146
+ },
147
+ );
148
+
149
+ export default OllamaModelDownloader;
@@ -12,6 +12,8 @@ import {
12
12
  ChatStreamPayload,
13
13
  EmbeddingsOptions,
14
14
  EmbeddingsPayload,
15
+ ModelRequestOptions,
16
+ PullModelParams,
15
17
  TextToImagePayload,
16
18
  TextToSpeechPayload,
17
19
  } from './types';
@@ -77,6 +79,10 @@ class AgentRuntime {
77
79
  return this._runtime.textToSpeech?.(payload, options);
78
80
  }
79
81
 
82
+ async pullModel(params: PullModelParams, options?: ModelRequestOptions) {
83
+ return this._runtime.pullModel?.(params, options);
84
+ }
85
+
80
86
  /**
81
87
  * @description Initialize the runtime with the provider and the options
82
88
  * @param provider choose a model provider
@@ -8,11 +8,14 @@ import {
8
8
  Embeddings,
9
9
  EmbeddingsOptions,
10
10
  EmbeddingsPayload,
11
+ ModelRequestOptions,
12
+ PullModelParams,
11
13
  TextToImagePayload,
12
14
  TextToSpeechOptions,
13
15
  TextToSpeechPayload,
14
16
  } from './types';
15
17
 
18
+ /* eslint-disable sort-keys-fix/sort-keys-fix , typescript-sort-keys/interface */
16
19
  export interface LobeRuntimeAI {
17
20
  baseURL?: string;
18
21
  chat(payload: ChatStreamPayload, options?: ChatCompetitionOptions): Promise<Response>;
@@ -27,7 +30,11 @@ export interface LobeRuntimeAI {
27
30
  payload: TextToSpeechPayload,
28
31
  options?: TextToSpeechOptions,
29
32
  ) => Promise<ArrayBuffer>;
33
+
34
+ // 模型管理相关接口
35
+ pullModel?(params: PullModelParams, options?: ModelRequestOptions): Promise<Response>;
30
36
  }
37
+ /* eslint-enabled */
31
38
 
32
39
  export abstract class LobeOpenAICompatibleRuntime {
33
40
  abstract baseURL: string;
@@ -1,8 +1,9 @@
1
1
  import { Ollama, Tool } from 'ollama/browser';
2
2
  import { ClientOptions } from 'openai';
3
3
 
4
- import { OpenAIChatMessage } from '@/libs/agent-runtime';
4
+ import { ModelRequestOptions, OpenAIChatMessage } from '@/libs/agent-runtime';
5
5
  import { ChatModelCard } from '@/types/llm';
6
+ import { createErrorResponse } from '@/utils/errorResponse';
6
7
 
7
8
  import { LobeRuntimeAI } from '../BaseAI';
8
9
  import { AgentRuntimeErrorType } from '../error';
@@ -12,11 +13,12 @@ import {
12
13
  Embeddings,
13
14
  EmbeddingsPayload,
14
15
  ModelProvider,
16
+ PullModelParams,
15
17
  } from '../types';
16
18
  import { AgentRuntimeError } from '../utils/createError';
17
19
  import { debugStream } from '../utils/debugStream';
18
20
  import { StreamingResponse } from '../utils/response';
19
- import { OllamaStream, convertIterableToStream } from '../utils/streams';
21
+ import { OllamaStream, convertIterableToStream, createModelPullStream } from '../utils/streams';
20
22
  import { parseDataUri } from '../utils/uriParser';
21
23
  import { OllamaMessage } from './type';
22
24
 
@@ -193,6 +195,86 @@ export class LobeOllamaAI implements LobeRuntimeAI {
193
195
 
194
196
  return ollamaMessage;
195
197
  };
198
+
199
+ async pullModel(params: PullModelParams, options?: ModelRequestOptions): Promise<Response> {
200
+ const { model, insecure } = params;
201
+ const signal = options?.signal; // 获取传入的 AbortSignal
202
+
203
+ // eslint-disable-next-line unicorn/consistent-function-scoping
204
+ const abortOllama = () => {
205
+ // 假设 this.client.abort() 是幂等的或者可以安全地多次调用
206
+ this.client.abort();
207
+ };
208
+
209
+ // 如果有 AbortSignal,监听 abort 事件
210
+ // 使用 { once: true } 确保监听器只触发一次
211
+ signal?.addEventListener('abort', abortOllama, { once: true });
212
+
213
+ try {
214
+ // 获取 Ollama pull 的迭代器
215
+ const iterable = await this.client.pull({
216
+ insecure: insecure ?? false,
217
+ model,
218
+ stream: true,
219
+ });
220
+
221
+ // 使用专门的模型下载流转换方法
222
+ const progressStream = createModelPullStream(iterable, model, {
223
+ onCancel: () => {
224
+ // 当流被取消时,调用 abortOllama
225
+ // 移除 signal 的监听器,避免重复调用(如果 abortOllama 不是幂等的)
226
+ signal?.removeEventListener('abort', abortOllama);
227
+ abortOllama(); // 执行中止逻辑
228
+ },
229
+ });
230
+
231
+ // 返回标准响应
232
+ return new Response(progressStream, {
233
+ headers: { 'Content-Type': 'application/json' },
234
+ });
235
+ } catch (error) {
236
+ // 如果在调用 client.pull 或创建流的初始阶段出错,需要移除监听器
237
+ signal?.removeEventListener('abort', abortOllama);
238
+
239
+ // 处理错误
240
+ if ((error as Error).message === 'fetch failed') {
241
+ return createErrorResponse(AgentRuntimeErrorType.OllamaServiceUnavailable, {
242
+ message: 'please check whether your ollama service is available',
243
+ provider: ModelProvider.Ollama,
244
+ });
245
+ }
246
+
247
+ console.error('model download error:', error);
248
+
249
+ // 检查是否是取消操作
250
+ if ((error as Error).name === 'AbortError') {
251
+ return new Response(
252
+ JSON.stringify({
253
+ model,
254
+ status: 'cancelled',
255
+ }),
256
+ {
257
+ headers: { 'Content-Type': 'application/json' },
258
+ status: 499,
259
+ },
260
+ );
261
+ }
262
+
263
+ // 返回错误响应
264
+ const errorMessage = error instanceof Error ? error.message : String(error);
265
+ return new Response(
266
+ JSON.stringify({
267
+ error: errorMessage,
268
+ model,
269
+ status: 'error',
270
+ }),
271
+ {
272
+ headers: { 'Content-Type': 'application/json' },
273
+ status: 500,
274
+ },
275
+ );
276
+ }
277
+ }
196
278
  }
197
279
 
198
280
  export default LobeOllamaAI;