@lobehub/chat 1.77.16 → 1.77.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/changelog/v1.json +9 -0
  3. package/docker-compose/local/docker-compose.yml +2 -1
  4. package/locales/ar/components.json +4 -0
  5. package/locales/ar/modelProvider.json +1 -0
  6. package/locales/ar/models.json +8 -5
  7. package/locales/bg-BG/components.json +4 -0
  8. package/locales/bg-BG/modelProvider.json +1 -0
  9. package/locales/bg-BG/models.json +8 -5
  10. package/locales/de-DE/components.json +4 -0
  11. package/locales/de-DE/modelProvider.json +1 -0
  12. package/locales/de-DE/models.json +8 -5
  13. package/locales/en-US/components.json +4 -0
  14. package/locales/en-US/modelProvider.json +1 -0
  15. package/locales/en-US/models.json +8 -5
  16. package/locales/es-ES/components.json +4 -0
  17. package/locales/es-ES/modelProvider.json +1 -0
  18. package/locales/es-ES/models.json +7 -4
  19. package/locales/fa-IR/components.json +4 -0
  20. package/locales/fa-IR/modelProvider.json +1 -0
  21. package/locales/fa-IR/models.json +7 -4
  22. package/locales/fr-FR/components.json +4 -0
  23. package/locales/fr-FR/modelProvider.json +1 -0
  24. package/locales/fr-FR/models.json +8 -5
  25. package/locales/it-IT/components.json +4 -0
  26. package/locales/it-IT/modelProvider.json +1 -0
  27. package/locales/it-IT/models.json +7 -4
  28. package/locales/ja-JP/components.json +4 -0
  29. package/locales/ja-JP/modelProvider.json +1 -0
  30. package/locales/ja-JP/models.json +8 -5
  31. package/locales/ko-KR/components.json +4 -0
  32. package/locales/ko-KR/modelProvider.json +1 -0
  33. package/locales/ko-KR/models.json +8 -5
  34. package/locales/nl-NL/components.json +4 -0
  35. package/locales/nl-NL/modelProvider.json +1 -0
  36. package/locales/nl-NL/models.json +8 -5
  37. package/locales/pl-PL/components.json +4 -0
  38. package/locales/pl-PL/modelProvider.json +1 -0
  39. package/locales/pl-PL/models.json +8 -5
  40. package/locales/pt-BR/components.json +4 -0
  41. package/locales/pt-BR/modelProvider.json +1 -0
  42. package/locales/pt-BR/models.json +7 -4
  43. package/locales/ru-RU/components.json +4 -0
  44. package/locales/ru-RU/modelProvider.json +1 -0
  45. package/locales/ru-RU/models.json +7 -4
  46. package/locales/tr-TR/components.json +4 -0
  47. package/locales/tr-TR/modelProvider.json +1 -0
  48. package/locales/tr-TR/models.json +8 -5
  49. package/locales/vi-VN/components.json +4 -0
  50. package/locales/vi-VN/modelProvider.json +1 -0
  51. package/locales/vi-VN/models.json +8 -5
  52. package/locales/zh-CN/components.json +4 -0
  53. package/locales/zh-CN/modelProvider.json +1 -0
  54. package/locales/zh-CN/models.json +9 -6
  55. package/locales/zh-TW/components.json +4 -0
  56. package/locales/zh-TW/modelProvider.json +1 -0
  57. package/locales/zh-TW/models.json +7 -4
  58. package/package.json +1 -1
  59. package/src/app/(backend)/webapi/models/[provider]/pull/route.ts +34 -0
  60. package/src/app/(backend)/webapi/{chat/models → models}/[provider]/route.ts +1 -2
  61. package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/index.tsx +0 -7
  62. package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/CheckError.tsx +1 -1
  63. package/src/components/FormAction/index.tsx +1 -1
  64. package/src/database/models/__tests__/aiProvider.test.ts +100 -0
  65. package/src/database/models/aiProvider.ts +11 -1
  66. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel.tsx +43 -0
  67. package/src/features/Conversation/Error/OllamaDesktopSetupGuide/index.tsx +61 -0
  68. package/src/features/Conversation/Error/index.tsx +7 -0
  69. package/src/features/DevPanel/SystemInspector/ServerConfig.tsx +18 -2
  70. package/src/features/DevPanel/SystemInspector/index.tsx +25 -6
  71. package/src/features/OllamaModelDownloader/index.tsx +149 -0
  72. package/src/libs/agent-runtime/AgentRuntime.ts +6 -0
  73. package/src/libs/agent-runtime/BaseAI.ts +7 -0
  74. package/src/libs/agent-runtime/ollama/index.ts +84 -2
  75. package/src/libs/agent-runtime/openrouter/__snapshots__/index.test.ts.snap +24 -3263
  76. package/src/libs/agent-runtime/openrouter/fixtures/frontendModels.json +25 -0
  77. package/src/libs/agent-runtime/openrouter/fixtures/models.json +0 -3353
  78. package/src/libs/agent-runtime/openrouter/index.test.ts +56 -1
  79. package/src/libs/agent-runtime/openrouter/index.ts +9 -4
  80. package/src/libs/agent-runtime/types/index.ts +1 -0
  81. package/src/libs/agent-runtime/types/model.ts +44 -0
  82. package/src/libs/agent-runtime/utils/streams/index.ts +1 -0
  83. package/src/libs/agent-runtime/utils/streams/model.ts +110 -0
  84. package/src/locales/default/components.ts +4 -0
  85. package/src/locales/default/modelProvider.ts +1 -0
  86. package/src/services/__tests__/models.test.ts +21 -0
  87. package/src/services/_url.ts +4 -1
  88. package/src/services/chat.ts +1 -1
  89. package/src/services/models.ts +153 -7
  90. package/src/store/aiInfra/slices/aiModel/action.ts +1 -1
  91. package/src/store/aiInfra/slices/aiProvider/action.ts +2 -1
  92. package/src/store/user/slices/modelList/action.test.ts +2 -2
  93. package/src/store/user/slices/modelList/action.ts +1 -1
  94. package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/Checker.tsx +0 -73
  95. package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/index.tsx +0 -127
  96. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/index.tsx +0 -154
  97. package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/useDownloadMonitor.ts +0 -29
  98. package/src/services/__tests__/ollama.test.ts +0 -28
  99. package/src/services/ollama.ts +0 -83
  100. /package/src/{app/[variants]/(main)/settings/provider/(detail)/ollama → features}/OllamaModelDownloader/useDownloadMonitor.ts +0 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.77.16",
3
+ "version": "1.77.17",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -0,0 +1,34 @@
1
+ import { checkAuth } from '@/app/(backend)/middleware/auth';
2
+ import { ChatCompletionErrorPayload, PullModelParams } from '@/libs/agent-runtime';
3
+ import { initAgentRuntimeWithUserPayload } from '@/server/modules/AgentRuntime';
4
+ import { ChatErrorType } from '@/types/fetch';
5
+ import { createErrorResponse } from '@/utils/errorResponse';
6
+
7
+ export const runtime = 'edge';
8
+
9
+ export const POST = checkAuth(async (req, { params, jwtPayload }) => {
10
+ const { provider } = await params;
11
+
12
+ try {
13
+ const agentRuntime = await initAgentRuntimeWithUserPayload(provider, jwtPayload);
14
+
15
+ const data = (await req.json()) as PullModelParams;
16
+
17
+ const res = await agentRuntime.pullModel(data, { signal: req.signal });
18
+ if (res) return res;
19
+
20
+ throw new Error('No response');
21
+ } catch (e) {
22
+ const {
23
+ errorType = ChatErrorType.InternalServerError,
24
+ error: errorContent,
25
+ ...res
26
+ } = e as ChatCompletionErrorPayload;
27
+
28
+ const error = errorContent || e;
29
+ // track the error at server side
30
+ console.error(`Route: [${provider}] ${errorType}:`, error);
31
+
32
+ return createErrorResponse(errorType, { error, ...res, provider });
33
+ }
34
+ });
@@ -8,8 +8,7 @@ import { createErrorResponse } from '@/utils/errorResponse';
8
8
 
9
9
  export const runtime = 'edge';
10
10
 
11
- const noNeedAPIKey = (provider: string) =>
12
- [ModelProvider.OpenRouter].includes(provider as any);
11
+ const noNeedAPIKey = (provider: string) => [ModelProvider.OpenRouter].includes(provider as any);
13
12
 
14
13
  export const GET = checkAuth(async (req, { params, jwtPayload }) => {
15
14
  const { provider } = await params;
@@ -5,19 +5,12 @@ import { useTranslation } from 'react-i18next';
5
5
  import { OllamaProviderCard } from '@/config/modelProviders';
6
6
 
7
7
  import { ProviderItem } from '../../type';
8
- import Checker from './Checker';
9
8
 
10
9
  export const useOllamaProvider = (): ProviderItem => {
11
10
  const { t } = useTranslation('modelProvider');
12
11
 
13
12
  return {
14
13
  ...OllamaProviderCard,
15
- checkerItem: {
16
- children: <Checker />,
17
- desc: t('ollama.checker.desc'),
18
- label: t('ollama.checker.title'),
19
- minWidth: undefined,
20
- },
21
14
  proxyUrl: {
22
15
  desc: t('ollama.endpoint.desc'),
23
16
  placeholder: 'http://127.0.0.1:11434',
@@ -13,7 +13,7 @@ const OllamaSetupGuide = dynamic(() => import('@/components/OllamaSetupGuide'),
13
13
  ssr: false,
14
14
  });
15
15
 
16
- const InvalidModel = dynamic(() => import('./OllamaModelDownloader'), {
16
+ const InvalidModel = dynamic(() => import('@/features/OllamaModelDownloader'), {
17
17
  loading,
18
18
  ssr: false,
19
19
  });
@@ -25,7 +25,7 @@ const FormAction = memo<
25
25
  animation?: boolean;
26
26
  avatar: ReactNode;
27
27
  background?: string;
28
- description: string;
28
+ description: ReactNode;
29
29
  title: string;
30
30
  } & CenterProps
31
31
  >(
@@ -372,5 +372,105 @@ describe('AiProviderModel', () => {
372
372
  settings: {},
373
373
  });
374
374
  });
375
+
376
+ it('should handle decrypt error gracefully', async () => {
377
+ const failingDecryptor = vi.fn().mockImplementation(() => {
378
+ throw new Error('Decryption failed');
379
+ });
380
+
381
+ await serverDB.insert(aiProviders).values({
382
+ id: 'provider-with-bad-keys',
383
+ keyVaults: 'invalid-encrypted-data',
384
+ name: 'Bad Provider',
385
+ source: 'custom',
386
+ userId,
387
+ });
388
+
389
+ const config = await aiProviderModel.getAiProviderRuntimeConfig(failingDecryptor);
390
+
391
+ expect(config['provider-with-bad-keys'].keyVaults).toEqual({});
392
+ expect(failingDecryptor).toHaveBeenCalled();
393
+ });
394
+
395
+ it('should handle null keyVaults gracefully', async () => {
396
+ await serverDB.insert(aiProviders).values({
397
+ id: 'provider-no-keys',
398
+ keyVaults: null,
399
+ name: 'No Keys Provider',
400
+ source: 'custom',
401
+ userId,
402
+ });
403
+
404
+ const config = await aiProviderModel.getAiProviderRuntimeConfig();
405
+
406
+ expect(config['provider-no-keys'].keyVaults).toEqual({});
407
+ });
408
+
409
+ it('should respect fetchOnClient property', async () => {
410
+ await serverDB.insert(aiProviders).values([
411
+ {
412
+ fetchOnClient: true,
413
+ id: 'client-provider',
414
+ name: 'Client Provider',
415
+ source: 'custom',
416
+ userId,
417
+ },
418
+ {
419
+ fetchOnClient: false,
420
+ id: 'server-provider',
421
+ name: 'Server Provider',
422
+ source: 'custom',
423
+ userId,
424
+ },
425
+ {
426
+ id: 'undefined-provider',
427
+ name: 'Undefined Provider',
428
+ source: 'custom',
429
+ userId,
430
+ },
431
+ ]);
432
+
433
+ const config = await aiProviderModel.getAiProviderRuntimeConfig();
434
+
435
+ expect(config['client-provider'].fetchOnClient).toBe(true);
436
+ expect(config['server-provider'].fetchOnClient).toBe(false);
437
+ expect(config['undefined-provider'].fetchOnClient).toBeUndefined();
438
+ });
439
+
440
+ it('should use empty object as default for settings', async () => {
441
+ await serverDB.insert(aiProviders).values({
442
+ id: 'no-settings-provider',
443
+ name: 'No Settings Provider',
444
+ settings: null as any,
445
+ source: 'custom',
446
+ userId,
447
+ });
448
+
449
+ const config = await aiProviderModel.getAiProviderRuntimeConfig();
450
+
451
+ expect(config['no-settings-provider'].settings).toEqual({});
452
+ });
453
+
454
+ it('should only include providers for the current user', async () => {
455
+ await serverDB.insert(aiProviders).values([
456
+ {
457
+ id: 'user1-provider',
458
+ name: 'User 1 Provider',
459
+ source: 'custom',
460
+ userId,
461
+ },
462
+ {
463
+ id: 'user2-provider',
464
+ name: 'User 2 Provider',
465
+ source: 'custom',
466
+ userId: 'user2',
467
+ },
468
+ ]);
469
+
470
+ const config = await aiProviderModel.getAiProviderRuntimeConfig();
471
+
472
+ expect(config['user1-provider']).toBeDefined();
473
+ expect(config['user2-provider']).toBeUndefined();
474
+ });
375
475
  });
376
476
  });
@@ -251,9 +251,19 @@ export class AiProviderModel {
251
251
  const builtin = DEFAULT_MODEL_PROVIDER_LIST.find((provider) => provider.id === item.id);
252
252
 
253
253
  const userSettings = item.settings || {};
254
+
255
+ let keyVaults = {};
256
+ if (!!item.keyVaults) {
257
+ try {
258
+ keyVaults = await decrypt(item.keyVaults);
259
+ } catch {
260
+ /* empty */
261
+ }
262
+ }
263
+
254
264
  runtimeConfig[item.id] = {
255
265
  fetchOnClient: typeof item.fetchOnClient === 'boolean' ? item.fetchOnClient : undefined,
256
- keyVaults: !!item.keyVaults ? await decrypt(item.keyVaults) : {},
266
+ keyVaults,
257
267
  settings: !!builtin ? merge(builtin.settings, userSettings) : userSettings,
258
268
  };
259
269
  }
@@ -0,0 +1,43 @@
1
+ import { Button } from 'antd';
2
+ import { memo } from 'react';
3
+ import { useTranslation } from 'react-i18next';
4
+
5
+ import OllamaModelDownloader from '@/features/OllamaModelDownloader';
6
+ import { useChatStore } from '@/store/chat';
7
+
8
+ import { ErrorActionContainer } from '../style';
9
+
10
+ interface InvalidOllamaModelProps {
11
+ id: string;
12
+ model: string;
13
+ }
14
+
15
+ const InvalidOllamaModel = memo<InvalidOllamaModelProps>(({ id, model }) => {
16
+ const { t } = useTranslation('error');
17
+
18
+ const [delAndRegenerateMessage, deleteMessage] = useChatStore((s) => [
19
+ s.delAndRegenerateMessage,
20
+ s.deleteMessage,
21
+ ]);
22
+ return (
23
+ <ErrorActionContainer>
24
+ <OllamaModelDownloader
25
+ extraAction={
26
+ <Button
27
+ onClick={() => {
28
+ deleteMessage(id);
29
+ }}
30
+ >
31
+ {t('unlock.closeMessage')}
32
+ </Button>
33
+ }
34
+ model={model}
35
+ onSuccessDownload={() => {
36
+ delAndRegenerateMessage(id);
37
+ }}
38
+ />
39
+ </ErrorActionContainer>
40
+ );
41
+ });
42
+
43
+ export default InvalidOllamaModel;
@@ -0,0 +1,61 @@
1
+ import { Ollama } from '@lobehub/icons';
2
+ import { Button } from 'antd';
3
+ import { useTheme } from 'antd-style';
4
+ import Link from 'next/link';
5
+ import { memo } from 'react';
6
+ import { Trans, useTranslation } from 'react-i18next';
7
+ import { Center } from 'react-layout-kit';
8
+
9
+ import FormAction from '@/components/FormAction';
10
+ import { useChatStore } from '@/store/chat';
11
+
12
+ import { ErrorActionContainer } from '../style';
13
+
14
+ const OllamaDesktopSetupGuide = memo<{ id: string }>(({ id }) => {
15
+ const theme = useTheme();
16
+ const { t } = useTranslation('components');
17
+
18
+ const [delAndRegenerateMessage, deleteMessage] = useChatStore((s) => [
19
+ s.delAndRegenerateMessage,
20
+ s.deleteMessage,
21
+ ]);
22
+
23
+ return (
24
+ <ErrorActionContainer style={{ paddingBlock: 0 }}>
25
+ <Center gap={16} paddingBlock={32} style={{ maxWidth: 300, width: '100%' }}>
26
+ <FormAction
27
+ avatar={<Ollama color={theme.colorPrimary} size={64} />}
28
+ description={
29
+ <span>
30
+ <Trans i18nKey={'OllamaSetupGuide.install.description'} ns={'components'}>
31
+ 请确认你已经开启 Ollama ,如果没有安装 Ollama ,请前往官网
32
+ <Link href={'https://ollama.com/download'}>下载</Link>
33
+ </Trans>
34
+ </span>
35
+ }
36
+ title={t('OllamaSetupGuide.install.title')}
37
+ />
38
+ <Button
39
+ block
40
+ onClick={() => {
41
+ delAndRegenerateMessage(id);
42
+ }}
43
+ style={{ marginTop: 8 }}
44
+ type={'primary'}
45
+ >
46
+ {t('OllamaSetupGuide.action.start')}
47
+ </Button>
48
+ <Button
49
+ block
50
+ onClick={() => {
51
+ deleteMessage(id);
52
+ }}
53
+ >
54
+ {t('OllamaSetupGuide.action.close')}
55
+ </Button>
56
+ </Center>
57
+ </ErrorActionContainer>
58
+ );
59
+ });
60
+
61
+ export default OllamaDesktopSetupGuide;
@@ -5,6 +5,7 @@ import dynamic from 'next/dynamic';
5
5
  import { Suspense, memo, useMemo } from 'react';
6
6
  import { useTranslation } from 'react-i18next';
7
7
 
8
+ import { isDesktop } from '@/const/version';
8
9
  import { useProviderName } from '@/hooks/useProviderName';
9
10
  import { AgentRuntimeErrorType, ILobeAgentRuntimeErrorType } from '@/libs/agent-runtime';
10
11
  import { ChatErrorType, ErrorType } from '@/types/fetch';
@@ -23,6 +24,10 @@ const OllamaSetupGuide = dynamic(() => import('./OllamaBizError/SetupGuide'), {
23
24
  loading,
24
25
  ssr: false,
25
26
  });
27
+ const OllamaDesktopSetupGuide = dynamic(() => import('./OllamaDesktopSetupGuide'), {
28
+ loading,
29
+ ssr: false,
30
+ });
26
31
 
27
32
  // Config for the errorMessage display
28
33
  const getErrorAlertConfig = (
@@ -92,6 +97,8 @@ const ErrorMessageExtra = memo<{ data: ChatMessage }>(({ data }) => {
92
97
  switch (error.type) {
93
98
  // TODO: 优化 Ollama setup 的流程,isDesktop 模式下可以直接做到端到端检测
94
99
  case AgentRuntimeErrorType.OllamaServiceUnavailable: {
100
+ if (isDesktop) return <OllamaDesktopSetupGuide id={data.id} />;
101
+
95
102
  return <OllamaSetupGuide />;
96
103
  }
97
104
 
@@ -2,10 +2,26 @@ import { useServerConfigStore } from '@/store/serverConfig';
2
2
 
3
3
  import JsonViewer from './JsonViewer';
4
4
 
5
- const ServerConfig = () => {
5
+ export const ServerConfig = () => {
6
6
  const serverConfig = useServerConfigStore((s) => s.serverConfig);
7
7
 
8
8
  return <JsonViewer data={serverConfig} />;
9
9
  };
10
10
 
11
- export default ServerConfig;
11
+ export const SystemAgent = () => {
12
+ const serverConfig = useServerConfigStore((s) => s.serverConfig);
13
+
14
+ return <JsonViewer data={serverConfig.systemAgent || {}} />;
15
+ };
16
+
17
+ export const DefaultAgentConfig = () => {
18
+ const serverConfig = useServerConfigStore((s) => s.serverConfig);
19
+
20
+ return <JsonViewer data={serverConfig.defaultAgent || {}} />;
21
+ };
22
+
23
+ export const AIProvider = () => {
24
+ const serverConfig = useServerConfigStore((s) => s.serverConfig);
25
+
26
+ return <JsonViewer data={serverConfig.aiProvider || {}} />;
27
+ };
@@ -5,11 +5,14 @@ import { useState } from 'react';
5
5
  import { Flexbox } from 'react-layout-kit';
6
6
 
7
7
  import AiProviderRuntimeConfig from './AiProviderRuntimeConfig';
8
- import ServerConfig from './ServerConfig';
8
+ import { AIProvider, DefaultAgentConfig, ServerConfig, SystemAgent } from './ServerConfig';
9
9
 
10
10
  enum TabKey {
11
+ AIProvider = 'aiProvider',
11
12
  AiProviderRuntimeConfig = 'aiProviderRuntimeConfig',
13
+ DefaultAgentConfig = 'defaultAgentConfig',
12
14
  ServerConfig = 'serverConfig',
15
+ SystemAgent = 'systemAgent',
13
16
  }
14
17
 
15
18
  const SystemInspector = () => {
@@ -20,21 +23,37 @@ const SystemInspector = () => {
20
23
  <TabsNav
21
24
  activeKey={activeTab}
22
25
  items={[
23
- {
24
- key: TabKey.ServerConfig,
25
- label: 'Server Config',
26
- },
27
26
  {
28
27
  key: TabKey.AiProviderRuntimeConfig,
29
28
  label: 'Ai Provider Runtime Config',
30
29
  },
30
+ {
31
+ key: TabKey.AIProvider,
32
+ label: 'AI Provider Config',
33
+ },
34
+
35
+ {
36
+ key: TabKey.DefaultAgentConfig,
37
+ label: 'Default Agent Config',
38
+ },
39
+ {
40
+ key: TabKey.SystemAgent,
41
+ label: 'System Agent',
42
+ },
43
+ {
44
+ key: TabKey.ServerConfig,
45
+ label: 'Server Config',
46
+ },
31
47
  ]}
32
48
  onChange={(activeTab) => setActiveTab(activeTab as TabKey)}
33
49
  variant={'compact'}
34
50
  />
35
51
 
36
- {activeTab === TabKey.ServerConfig && <ServerConfig />}
37
52
  {activeTab === TabKey.AiProviderRuntimeConfig && <AiProviderRuntimeConfig />}
53
+ {activeTab === TabKey.DefaultAgentConfig && <DefaultAgentConfig />}
54
+ {activeTab === TabKey.SystemAgent && <SystemAgent />}
55
+ {activeTab === TabKey.AIProvider && <AIProvider />}
56
+ {activeTab === TabKey.ServerConfig && <ServerConfig />}
38
57
  </Flexbox>
39
58
  );
40
59
  };
@@ -0,0 +1,149 @@
1
+ import { Ollama } from '@lobehub/icons';
2
+ import { Alert } from '@lobehub/ui';
3
+ import { Button, Input, Progress } from 'antd';
4
+ import { useTheme } from 'antd-style';
5
+ import { ReactNode, memo, useCallback, useMemo, useState } from 'react';
6
+ import { useTranslation } from 'react-i18next';
7
+ import { Center, Flexbox } from 'react-layout-kit';
8
+
9
+ import FormAction from '@/components/FormAction';
10
+ import { useActionSWR } from '@/libs/swr';
11
+ import { ModelProgressInfo, modelsService } from '@/services/models';
12
+ import { formatSize } from '@/utils/format';
13
+
14
+ import { useDownloadMonitor } from './useDownloadMonitor';
15
+
16
+ interface OllamaModelDownloaderProps {
17
+ extraAction?: ReactNode;
18
+ model: string;
19
+ onSuccessDownload?: () => void;
20
+ }
21
+
22
+ const OllamaModelDownloader = memo<OllamaModelDownloaderProps>(
23
+ ({ model, onSuccessDownload, extraAction }) => {
24
+ const { t } = useTranslation(['modelProvider', 'error']);
25
+
26
+ const [modelToPull, setModelToPull] = useState(model);
27
+ const [completed, setCompleted] = useState(0);
28
+ const [total, setTotal] = useState(0);
29
+ const { remainingTime, downloadSpeed } = useDownloadMonitor(total, completed);
30
+ const percent = useMemo(() => {
31
+ return total ? Number(((completed / total) * 100).toFixed(1)) : 0;
32
+ }, [completed, total]);
33
+
34
+ const theme = useTheme();
35
+
36
+ // 定义进度回调函数
37
+ const handleProgress = useCallback((progress: ModelProgressInfo) => {
38
+ if (progress.completed) setCompleted(progress.completed);
39
+ if (progress.total) setTotal(progress.total);
40
+ }, []);
41
+
42
+ const {
43
+ mutate,
44
+ isValidating: isDownloading,
45
+ error,
46
+ } = useActionSWR(
47
+ [modelToPull],
48
+ async ([model]) => {
49
+ await modelsService.downloadModel(
50
+ { model, provider: 'ollama' },
51
+ { onProgress: handleProgress },
52
+ );
53
+
54
+ return true;
55
+ },
56
+ {
57
+ onSuccess: onSuccessDownload,
58
+ },
59
+ );
60
+
61
+ return (
62
+ <Center gap={16} paddingBlock={32} style={{ width: '100%' }}>
63
+ <FormAction
64
+ avatar={<Ollama color={theme.colorPrimary} size={64} />}
65
+ description={isDownloading ? t('ollama.download.desc') : t('ollama.unlock.description')}
66
+ title={
67
+ isDownloading
68
+ ? t('ollama.download.title', { model: modelToPull })
69
+ : t('ollama.unlock.title')
70
+ }
71
+ >
72
+ {!isDownloading && (
73
+ <Input
74
+ onChange={(e) => {
75
+ setModelToPull(e.target.value);
76
+ }}
77
+ value={modelToPull}
78
+ />
79
+ )}
80
+ </FormAction>
81
+ {isDownloading && (
82
+ <Flexbox flex={1} gap={8} style={{ maxWidth: 300 }} width={'100%'}>
83
+ <Progress
84
+ percent={percent}
85
+ showInfo
86
+ strokeColor={theme.colorSuccess}
87
+ trailColor={theme.colorSuccessBg}
88
+ />
89
+ <Flexbox
90
+ distribution={'space-between'}
91
+ horizontal
92
+ style={{ color: theme.colorTextDescription, fontSize: 12 }}
93
+ >
94
+ <span>
95
+ {t('ollama.download.remainingTime')}: {remainingTime}
96
+ </span>
97
+ <span>
98
+ {t('ollama.download.speed')}: {downloadSpeed}
99
+ </span>
100
+ </Flexbox>
101
+ </Flexbox>
102
+ )}
103
+ <Flexbox gap={12} style={{ maxWidth: 300 }} width={'100%'}>
104
+ {error?.message && (
105
+ <Alert
106
+ closable
107
+ description={error.message}
108
+ message={t('ollama.download.failed')}
109
+ showIcon={false}
110
+ type={'error'}
111
+ />
112
+ )}
113
+ <Button
114
+ block
115
+ loading={isDownloading}
116
+ onClick={() => {
117
+ mutate();
118
+ }}
119
+ style={{ marginTop: 8 }}
120
+ type={'primary'}
121
+ >
122
+ {!isDownloading
123
+ ? t('ollama.unlock.confirm')
124
+ : // if total is 0, show starting, else show downloaded
125
+ !total
126
+ ? t('ollama.unlock.starting')
127
+ : t('ollama.unlock.downloaded', {
128
+ completed: formatSize(completed, 2),
129
+ total: formatSize(total, 2),
130
+ })}
131
+ </Button>
132
+ {isDownloading ? (
133
+ <Button
134
+ onClick={() => {
135
+ modelsService.abortPull();
136
+ }}
137
+ >
138
+ {t('ollama.unlock.cancel')}
139
+ </Button>
140
+ ) : (
141
+ extraAction
142
+ )}
143
+ </Flexbox>
144
+ </Center>
145
+ );
146
+ },
147
+ );
148
+
149
+ export default OllamaModelDownloader;
@@ -12,6 +12,8 @@ import {
12
12
  ChatStreamPayload,
13
13
  EmbeddingsOptions,
14
14
  EmbeddingsPayload,
15
+ ModelRequestOptions,
16
+ PullModelParams,
15
17
  TextToImagePayload,
16
18
  TextToSpeechPayload,
17
19
  } from './types';
@@ -77,6 +79,10 @@ class AgentRuntime {
77
79
  return this._runtime.textToSpeech?.(payload, options);
78
80
  }
79
81
 
82
+ async pullModel(params: PullModelParams, options?: ModelRequestOptions) {
83
+ return this._runtime.pullModel?.(params, options);
84
+ }
85
+
80
86
  /**
81
87
  * @description Initialize the runtime with the provider and the options
82
88
  * @param provider choose a model provider
@@ -8,11 +8,14 @@ import {
8
8
  Embeddings,
9
9
  EmbeddingsOptions,
10
10
  EmbeddingsPayload,
11
+ ModelRequestOptions,
12
+ PullModelParams,
11
13
  TextToImagePayload,
12
14
  TextToSpeechOptions,
13
15
  TextToSpeechPayload,
14
16
  } from './types';
15
17
 
18
+ /* eslint-disable sort-keys-fix/sort-keys-fix , typescript-sort-keys/interface */
16
19
  export interface LobeRuntimeAI {
17
20
  baseURL?: string;
18
21
  chat(payload: ChatStreamPayload, options?: ChatCompetitionOptions): Promise<Response>;
@@ -27,7 +30,11 @@ export interface LobeRuntimeAI {
27
30
  payload: TextToSpeechPayload,
28
31
  options?: TextToSpeechOptions,
29
32
  ) => Promise<ArrayBuffer>;
33
+
34
+ // 模型管理相关接口
35
+ pullModel?(params: PullModelParams, options?: ModelRequestOptions): Promise<Response>;
30
36
  }
37
+ /* eslint-enabled */
31
38
 
32
39
  export abstract class LobeOpenAICompatibleRuntime {
33
40
  abstract baseURL: string;