@lobehub/chat 0.135.4 → 0.137.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/CHANGELOG.md +50 -0
  2. package/README.md +8 -8
  3. package/README.zh-CN.md +8 -8
  4. package/docs/self-hosting/advanced/authentication.mdx +14 -81
  5. package/docs/self-hosting/advanced/authentication.zh-CN.mdx +14 -75
  6. package/docs/self-hosting/advanced/sso-providers/auth0.mdx +91 -0
  7. package/docs/self-hosting/advanced/sso-providers/auth0.zh-CN.mdx +85 -0
  8. package/docs/self-hosting/advanced/sso-providers/microsoft-entra-id.mdx +69 -0
  9. package/docs/self-hosting/advanced/sso-providers/microsoft-entra-id.zh-CN.mdx +75 -0
  10. package/docs/self-hosting/environment-variables/basic.mdx +37 -1
  11. package/docs/self-hosting/environment-variables/basic.zh-CN.mdx +36 -1
  12. package/docs/self-hosting/environment-variables/model-provider.mdx +0 -2
  13. package/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx +0 -1
  14. package/docs/self-hosting/start.mdx +1 -9
  15. package/docs/usage/features/agent-market.mdx +1 -1
  16. package/docs/usage/features/tts.mdx +0 -3
  17. package/docs/usage/features/vision.mdx +0 -1
  18. package/docs/usage/providers/ollama/gemma.mdx +0 -1
  19. package/docs/usage/providers/ollama.mdx +0 -3
  20. package/docs/usage/start.mdx +1 -1
  21. package/docs/usage/start.zh-CN.mdx +1 -1
  22. package/locales/ar/error.json +8 -0
  23. package/locales/ar/setting.json +9 -1
  24. package/locales/de-DE/error.json +8 -0
  25. package/locales/de-DE/setting.json +9 -1
  26. package/locales/en-US/error.json +8 -0
  27. package/locales/en-US/setting.json +9 -1
  28. package/locales/es-ES/error.json +8 -0
  29. package/locales/es-ES/setting.json +9 -1
  30. package/locales/fr-FR/error.json +8 -0
  31. package/locales/fr-FR/setting.json +9 -1
  32. package/locales/it-IT/error.json +8 -0
  33. package/locales/it-IT/setting.json +9 -1
  34. package/locales/ja-JP/error.json +8 -0
  35. package/locales/ja-JP/setting.json +9 -1
  36. package/locales/ko-KR/error.json +8 -0
  37. package/locales/ko-KR/setting.json +9 -1
  38. package/locales/nl-NL/error.json +8 -0
  39. package/locales/nl-NL/setting.json +9 -1
  40. package/locales/pl-PL/error.json +8 -0
  41. package/locales/pl-PL/setting.json +9 -1
  42. package/locales/pt-BR/error.json +8 -0
  43. package/locales/pt-BR/setting.json +9 -1
  44. package/locales/ru-RU/error.json +8 -0
  45. package/locales/ru-RU/setting.json +9 -1
  46. package/locales/tr-TR/error.json +8 -0
  47. package/locales/tr-TR/setting.json +9 -1
  48. package/locales/vi-VN/error.json +8 -0
  49. package/locales/vi-VN/setting.json +9 -1
  50. package/locales/zh-CN/error.json +8 -0
  51. package/locales/zh-CN/setting.json +9 -1
  52. package/locales/zh-TW/error.json +8 -0
  53. package/locales/zh-TW/setting.json +9 -1
  54. package/package.json +3 -2
  55. package/src/app/api/auth/next-auth.ts +39 -12
  56. package/src/app/api/config/route.ts +3 -2
  57. package/src/app/api/errorResponse.ts +2 -1
  58. package/src/app/settings/common/Common.tsx +1 -1
  59. package/src/app/settings/llm/Ollama/Checker.tsx +73 -0
  60. package/src/app/settings/llm/Ollama/index.tsx +2 -4
  61. package/src/app/settings/llm/components/Checker.tsx +23 -17
  62. package/src/components/ModelIcon/index.tsx +2 -0
  63. package/src/components/ModelTag/ModelIcon.tsx +2 -0
  64. package/src/config/modelProviders/ollama.ts +14 -0
  65. package/src/config/server/app.ts +4 -0
  66. package/src/config/server/provider.ts +2 -0
  67. package/src/features/Conversation/Error/InvalidOllamaModel/index.tsx +138 -0
  68. package/src/features/Conversation/Error/InvalidOllamaModel/useDownloadMonitor.ts +48 -0
  69. package/src/features/Conversation/Error/OAuthForm.tsx +1 -1
  70. package/src/features/Conversation/Error/OllamaBizError.tsx +34 -0
  71. package/src/features/Conversation/Error/index.tsx +5 -0
  72. package/src/features/Conversation/Error/style.tsx +2 -2
  73. package/src/locales/default/error.ts +8 -0
  74. package/src/locales/default/setting.ts +9 -1
  75. package/src/services/__tests__/ollama.test.ts +26 -0
  76. package/src/services/ollama.ts +64 -0
  77. package/src/store/global/slices/settings/selectors/modelProvider.ts +11 -10
  78. package/src/store/middleware/createHyperStorage/index.ts +1 -2
  79. package/src/store/middleware/createHyperStorage/indexedDB.ts +1 -1
  80. package/src/store/middleware/createHyperStorage/localStorage.ts +1 -1
  81. package/src/store/middleware/createHyperStorage/urlStorage.ts +1 -1
  82. package/src/types/fetch.ts +1 -0
  83. package/tsconfig.json +1 -1
@@ -5,6 +5,7 @@ import {
5
5
  Claude,
6
6
  Gemini,
7
7
  Gemma,
8
+ LLaVA,
8
9
  Meta,
9
10
  Minimax,
10
11
  Mistral,
@@ -29,6 +30,7 @@ const ModelIcon = memo<ModelProviderIconProps>(({ model, size = 12 }) => {
29
30
  if (model.includes('claude')) return <Claude.Avatar size={size} />;
30
31
  if (model.includes('titan')) return <Aws.Avatar size={size} />;
31
32
  if (model.includes('llama')) return <Meta.Avatar size={size} />;
33
+ if (model.includes('llava')) return <LLaVA.Avatar size={size} />;
32
34
  if (model.includes('gemini')) return <Gemini.Avatar size={size} />;
33
35
  if (model.includes('gemma')) return <Gemma.Avatar size={size} />;
34
36
  if (model.includes('qwen')) return <Tongyi.Avatar background={Tongyi.colorPrimary} size={size} />;
@@ -5,6 +5,7 @@ import {
5
5
  Claude,
6
6
  Gemini,
7
7
  Gemma,
8
+ LLaVA,
8
9
  Meta,
9
10
  Minimax,
10
11
  Mistral,
@@ -28,6 +29,7 @@ const ModelIcon = memo<ModelIconProps>(({ model, size = 12 }) => {
28
29
  if (model.includes('claude')) return <Claude size={size} />;
29
30
  if (model.includes('titan')) return <Aws size={size} />;
30
31
  if (model.includes('llama')) return <Meta size={size} />;
32
+ if (model.includes('llava')) return <LLaVA size={size} />;
31
33
  if (model.includes('gemini')) return <Gemini size={size} />;
32
34
  if (model.includes('gemma')) return <Gemma.Simple size={size} />;
33
35
  if (model.includes('moonshot')) return <Moonshot size={size} />;
@@ -92,6 +92,20 @@ const Ollama: ModelProviderCard = {
92
92
  tokens: 4800,
93
93
  vision: false,
94
94
  },
95
+ {
96
+ displayName: 'Mixtral 8x7B',
97
+ functionCall: false,
98
+ id: 'mixtral',
99
+ tokens: 32_000,
100
+ vision: false,
101
+ },
102
+ {
103
+ displayName: 'Qwen Chat 4B',
104
+ functionCall: false,
105
+ id: 'qwen',
106
+ tokens: 32_768,
107
+ vision: false,
108
+ },
95
109
  {
96
110
  displayName: 'Qwen Chat 7B',
97
111
  functionCall: false,
@@ -58,9 +58,13 @@ export const getAppConfig = () => {
58
58
  PLUGIN_SETTINGS: process.env.PLUGIN_SETTINGS,
59
59
 
60
60
  ENABLE_OAUTH_SSO: !!process.env.ENABLE_OAUTH_SSO,
61
+ SSO_PROVIDERS: process.env.SSO_PROVIDERS || 'auth0',
61
62
  AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID || '',
62
63
  AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET || '',
63
64
  AUTH0_ISSUER: process.env.AUTH0_ISSUER || '',
65
+ AZURE_AD_CLIENT_ID: process.env.AZURE_AD_CLIENT_ID || '',
66
+ AZURE_AD_CLIENT_SECRET: process.env.AZURE_AD_CLIENT_SECRET || '',
67
+ AZURE_AD_TENANT_ID: process.env.AZURE_AD_TENANT_ID || '',
64
68
  NEXTAUTH_SECRET: process.env.NEXTAUTH_SECRET || '',
65
69
 
66
70
  ENABLE_LANGFUSE: process.env.ENABLE_LANGFUSE === '1',
@@ -47,6 +47,7 @@ declare global {
47
47
 
48
48
  // Ollama Provider;
49
49
  OLLAMA_PROXY_URL?: string;
50
+ OLLAMA_CUSTOM_MODELS?: string;
50
51
  }
51
52
  }
52
53
  }
@@ -116,5 +117,6 @@ export const getProviderConfig = () => {
116
117
 
117
118
  ENABLE_OLLAMA: !!process.env.OLLAMA_PROXY_URL,
118
119
  OLLAMA_PROXY_URL: process.env.OLLAMA_PROXY_URL || '',
120
+ OLLAMA_CUSTOM_MODELS: process.env.OLLAMA_CUSTOM_MODELS,
119
121
  };
120
122
  };
@@ -0,0 +1,138 @@
1
+ import { Ollama } from '@lobehub/icons';
2
+ import { Button, Input, Progress } from 'antd';
3
+ import { useTheme } from 'antd-style';
4
+ import { memo, useMemo, useState } from 'react';
5
+ import { useTranslation } from 'react-i18next';
6
+ import { Center, Flexbox } from 'react-layout-kit';
7
+ import useSWR from 'swr';
8
+
9
+ import { ollamaService } from '@/services/ollama';
10
+ import { useChatStore } from '@/store/chat';
11
+
12
+ import { ErrorActionContainer, FormAction } from '../style';
13
+ import { useDownloadMonitor } from './useDownloadMonitor';
14
+
15
+ interface OllamaModelFormProps {
16
+ id: string;
17
+ model: string;
18
+ }
19
+
20
+ const OllamaModelForm = memo<OllamaModelFormProps>(({ id, model }) => {
21
+ const { t } = useTranslation('error');
22
+ const { t: settingT } = useTranslation('setting');
23
+
24
+ const [modelToPull, setModelToPull] = useState(model);
25
+ const [completed, setCompleted] = useState(0);
26
+ const [total, setTotal] = useState(0);
27
+ const { remainingTime, downloadSpeed } = useDownloadMonitor(total, completed);
28
+ const percent = useMemo(() => {
29
+ return total ? Number(((completed / total) * 100).toFixed(0)) : 0;
30
+ }, [completed, total]);
31
+
32
+ const [delAndRegenerateMessage, deleteMessage] = useChatStore((s) => [
33
+ s.delAndRegenerateMessage,
34
+ s.deleteMessage,
35
+ ]);
36
+ const theme = useTheme();
37
+
38
+ const { mutate, isLoading: isDownloading } = useSWR(
39
+ [id, modelToPull],
40
+ async ([, model]) => {
41
+ const generator = await ollamaService.pullModel(model);
42
+ for await (const progress of generator) {
43
+ if (progress.completed) {
44
+ setCompleted(progress.completed);
45
+ setTotal(progress.total);
46
+ }
47
+ }
48
+ return null;
49
+ },
50
+ {
51
+ onSuccess: () => {
52
+ delAndRegenerateMessage(id);
53
+ },
54
+ revalidateOnFocus: false,
55
+ revalidateOnMount: false,
56
+ },
57
+ );
58
+
59
+ return (
60
+ <Center gap={16} style={{ maxWidth: 300, width: '100%' }}>
61
+ <FormAction
62
+ avatar={<Ollama color={theme.colorPrimary} size={64} />}
63
+ description={
64
+ isDownloading ? settingT('ollama.download.desc') : t('unlock.model.Ollama.description')
65
+ }
66
+ title={
67
+ isDownloading
68
+ ? settingT('ollama.download.title', { model: modelToPull })
69
+ : t('unlock.model.Ollama.title')
70
+ }
71
+ >
72
+ {!isDownloading && (
73
+ <Input
74
+ onChange={(e) => {
75
+ setModelToPull(e.target.value);
76
+ }}
77
+ value={modelToPull}
78
+ />
79
+ )}
80
+ </FormAction>
81
+ {isDownloading && (
82
+ <Flexbox flex={1} gap={8} width={'100%'}>
83
+ <Progress
84
+ percent={percent}
85
+ showInfo
86
+ strokeColor={theme.colorSuccess}
87
+ trailColor={theme.colorSuccessBg}
88
+ />
89
+ <Flexbox
90
+ distribution={'space-between'}
91
+ horizontal
92
+ style={{ color: theme.colorTextDescription, fontSize: 12 }}
93
+ >
94
+ <span>
95
+ {settingT('ollama.download.remainingTime')}: {remainingTime}
96
+ </span>
97
+ <span>
98
+ {settingT('ollama.download.speed')}: {downloadSpeed}
99
+ </span>
100
+ </Flexbox>
101
+ </Flexbox>
102
+ )}
103
+ <Flexbox gap={12} width={'100%'}>
104
+ <Button
105
+ block
106
+ loading={isDownloading}
107
+ onClick={() => {
108
+ mutate();
109
+ }}
110
+ style={{ marginTop: 8 }}
111
+ type={'primary'}
112
+ >
113
+ {t('unlock.model.Ollama.confirm')}
114
+ </Button>
115
+ <Button
116
+ onClick={() => {
117
+ deleteMessage(id);
118
+ }}
119
+ >
120
+ {t('unlock.closeMessage')}
121
+ </Button>
122
+ </Flexbox>
123
+ </Center>
124
+ );
125
+ });
126
+
127
+ interface InvalidOllamaModelProps {
128
+ id: string;
129
+ model: string;
130
+ }
131
+
132
+ const InvalidOllamaModel = memo<InvalidOllamaModelProps>(({ id, model }) => (
133
+ <ErrorActionContainer>
134
+ <OllamaModelForm id={id} model={model} />
135
+ </ErrorActionContainer>
136
+ ));
137
+
138
+ export default InvalidOllamaModel;
@@ -0,0 +1,48 @@
1
+ import { useEffect, useMemo, useState } from 'react';
2
+
3
+ const formatSpeed = (speed: number): string => {
4
+ const kbPerSecond = speed / 1024;
5
+ if (kbPerSecond < 1024) {
6
+ return `${kbPerSecond.toFixed(1)} KB/s`;
7
+ } else {
8
+ const mbPerSecond = kbPerSecond / 1024;
9
+ return `${mbPerSecond.toFixed(1)} MB/s`;
10
+ }
11
+ };
12
+
13
+ const formatTime = (timeInSeconds: number): string => {
14
+ if (timeInSeconds < 60) {
15
+ return `${timeInSeconds.toFixed(1)} s`;
16
+ } else if (timeInSeconds < 3600) {
17
+ return `${(timeInSeconds / 60).toFixed(1)} min`;
18
+ } else {
19
+ return `${(timeInSeconds / 3600).toFixed(2)} h`;
20
+ }
21
+ };
22
+
23
+ export const useDownloadMonitor = (totalSize: number, completedSize: number) => {
24
+ const [startTime, setStartTime] = useState<number>(Date.now());
25
+ const [downloadSpeed, setDownloadSpeed] = useState<string>('0 KB/s');
26
+ const [remainingTime, setRemainingTime] = useState<string>('-');
27
+
28
+ const isReady = useMemo(() => completedSize > 0, [completedSize]);
29
+
30
+ useEffect(() => {
31
+ const currentTime = Date.now();
32
+ // mark as start download
33
+ if (isReady) {
34
+ const elapsedTime = (currentTime - startTime) / 1000; // in seconds
35
+ const speed = completedSize / elapsedTime; // in bytes per second
36
+
37
+ const remainingSize = totalSize - completedSize;
38
+ const time = remainingSize / speed; // in seconds
39
+
40
+ setDownloadSpeed(formatSpeed(speed));
41
+ setRemainingTime(formatTime(time));
42
+ } else {
43
+ setStartTime(currentTime);
44
+ }
45
+ }, [isReady, completedSize]);
46
+
47
+ return { downloadSpeed, remainingTime };
48
+ };
@@ -57,7 +57,7 @@ const OAuthForm = memo<{ id: string }>(({ id }) => {
57
57
  block
58
58
  icon={<Icon icon={ScanFace} />}
59
59
  loading={status === 'loading'}
60
- onClick={() => signIn('auth0')}
60
+ onClick={() => signIn()}
61
61
  style={{ marginTop: 8 }}
62
62
  type={'primary'}
63
63
  >
@@ -0,0 +1,34 @@
1
+ import { memo } from 'react';
2
+
3
+ import { ChatMessage } from '@/types/message';
4
+
5
+ import ErrorJsonViewer from './ErrorJsonViewer';
6
+ import InvalidModel from './InvalidOllamaModel';
7
+
8
+ interface OllamaError {
9
+ code: string | null;
10
+ message: string;
11
+ param?: any;
12
+ type: string;
13
+ }
14
+
15
+ interface OllamaErrorResponse {
16
+ error: OllamaError;
17
+ }
18
+
19
+ const UNRESOLVED_MODEL_REGEXP = /model '([\w+,-_]+)' not found/;
20
+
21
+ const OllamaBizError = memo<ChatMessage>(({ error, id }) => {
22
+ const errorBody: OllamaErrorResponse = (error as any)?.body;
23
+
24
+ const errorMessage = errorBody.error?.message;
25
+
26
+ const unresolvedModel = errorMessage?.match(UNRESOLVED_MODEL_REGEXP)?.[1];
27
+ if (unresolvedModel) {
28
+ return <InvalidModel id={id} model={unresolvedModel} />;
29
+ }
30
+
31
+ return <ErrorJsonViewer error={error} id={id} />;
32
+ });
33
+
34
+ export default OllamaBizError;
@@ -9,6 +9,7 @@ import { ChatMessage, ChatMessageError } from '@/types/message';
9
9
  import ErrorJsonViewer from './ErrorJsonViewer';
10
10
  import InvalidAPIKey from './InvalidAPIKey';
11
11
  import InvalidAccessCode from './InvalidAccessCode';
12
+ import OllamaBizError from './OllamaBizError';
12
13
  import OpenAiBizError from './OpenAiBizError';
13
14
  import PluginSettings from './PluginSettings';
14
15
 
@@ -58,6 +59,10 @@ const ErrorMessageExtra = memo<{ data: ChatMessage }>(({ data }) => {
58
59
  return <OpenAiBizError {...data} />;
59
60
  }
60
61
 
62
+ case AgentRuntimeErrorType.OllamaBizError: {
63
+ return <OllamaBizError {...data} />;
64
+ }
65
+
61
66
  case ChatErrorType.InvalidAccessCode: {
62
67
  return <InvalidAccessCode id={data.id} provider={data.error?.body?.provider} />;
63
68
  }
@@ -36,14 +36,14 @@ export const FormAction = memo<{
36
36
  const { styles, theme } = useStyles();
37
37
 
38
38
  return (
39
- <Center gap={16} style={{ maxWidth: 300 }}>
39
+ <Center gap={16} style={{ maxWidth: 300, width: '100%' }}>
40
40
  <Avatar
41
41
  avatar={avatar}
42
42
  background={background ?? theme.colorFillContent}
43
43
  gap={12}
44
44
  size={80}
45
45
  />
46
- <Flexbox style={{ fontSize: 20 }}>{title}</Flexbox>
46
+ <Flexbox style={{ fontSize: 20, textAlign: 'center' }}>{title}</Flexbox>
47
47
  <Flexbox className={styles.desc}>{description}</Flexbox>
48
48
  {children}
49
49
  </Center>
@@ -84,6 +84,7 @@ export default {
84
84
 
85
85
  InvalidOllamaArgs: 'Ollama 配置不正确,请检查 Ollama 配置后重试',
86
86
  OllamaBizError: '请求 Ollama 服务出错,请根据以下信息排查或重试',
87
+ OllamaServiceUnavailable: '未检测到 Ollama 服务,请检查是否正常启动',
87
88
 
88
89
  AgentRuntimeError: 'Lobe 语言模型运行时执行出错,请根据以下信息排查或重试',
89
90
  /* eslint-enable */
@@ -134,6 +135,13 @@ export default {
134
135
  },
135
136
  closeMessage: '关闭提示',
136
137
  confirm: '确认并重试',
138
+ model: {
139
+ Ollama: {
140
+ confirm: '下载',
141
+ description: '输入你的 Ollama 模型标签,完成即可继续会话',
142
+ title: '下载指定的 Ollama 模型',
143
+ },
144
+ },
137
145
  oauth: {
138
146
  description: '管理员已开启统一登录认证,点击下方按钮登录,即可解锁应用',
139
147
  success: '登录成功',
@@ -195,6 +195,14 @@ export default {
195
195
  },
196
196
  waitingForMore: '更多模型正在 <1>计划接入</1> 中,敬请期待 ✨',
197
197
  },
198
+ ollama: {
199
+ download: {
200
+ desc: 'Ollama 正在下载该模型,请尽量不要关闭本页面。重新下载时将会中断处继续',
201
+ remainingTime: '剩余时间',
202
+ speed: '下载速度',
203
+ title: '正在下载模型 {{model}} ',
204
+ },
205
+ },
198
206
  plugin: {
199
207
  addTooltip: '自定义插件',
200
208
  clearDeprecated: '移除无效插件',
@@ -410,6 +418,7 @@ export default {
410
418
  placeholder: '请输入助手的标识符,需要是唯一的,比如 web-development',
411
419
  tooltips: '分享到助手市场',
412
420
  },
421
+
413
422
  tab: {
414
423
  about: '关于',
415
424
  agent: '默认助手',
@@ -417,7 +426,6 @@ export default {
417
426
  llm: '语言模型',
418
427
  tts: '语音服务',
419
428
  },
420
-
421
429
  tools: {
422
430
  builtins: {
423
431
  groupName: '内置插件',
@@ -0,0 +1,26 @@
1
+ import { Mock, describe, expect, it, vi } from 'vitest';
2
+
3
+ import { ollamaService } from '../ollama';
4
+
5
+ vi.stubGlobal('fetch', vi.fn());
6
+
7
+ describe('OllamaService', () => {
8
+ describe('list models', async () => {
9
+ it('should make a GET request with the correct payload', async () => {
10
+ (fetch as Mock).mockResolvedValueOnce(new Response(JSON.stringify({ models: [] })));
11
+
12
+ expect(await ollamaService.getModels()).toEqual({ models: [] });
13
+
14
+ expect(global.fetch).toHaveBeenCalled();
15
+ });
16
+
17
+ it('should make a GET request with the error', async () => {
18
+ const mockResponse = new Response(null, { status: 503 });
19
+ (fetch as Mock).mockResolvedValueOnce(mockResponse);
20
+
21
+ await expect(ollamaService.getModels()).rejects.toThrow();
22
+
23
+ expect(global.fetch).toHaveBeenCalled();
24
+ });
25
+ });
26
+ });
@@ -0,0 +1,64 @@
1
+ import { ListResponse, Ollama as OllamaBrowser, ProgressResponse } from 'ollama/browser';
2
+
3
+ import { createErrorResponse } from '@/app/api/errorResponse';
4
+ import { ModelProvider } from '@/libs/agent-runtime';
5
+ import { useGlobalStore } from '@/store/global';
6
+ import { modelProviderSelectors } from '@/store/global/selectors';
7
+ import { ChatErrorType } from '@/types/fetch';
8
+ import { getMessageError } from '@/utils/fetch';
9
+
10
+ const DEFAULT_BASE_URL = 'http://127.0.0.1:11434/v1';
11
+
12
+ class OllamaService {
13
+ getHost = (): string => {
14
+ const endpoint = modelProviderSelectors.ollamaProxyUrl(useGlobalStore.getState());
15
+ const url = new URL(endpoint || DEFAULT_BASE_URL);
16
+ return url.host;
17
+ };
18
+
19
+ getOllamaClient = () => {
20
+ return new OllamaBrowser({ host: this.getHost() });
21
+ };
22
+
23
+ pullModel = async (model: string): Promise<AsyncGenerator<ProgressResponse>> => {
24
+ let response: Response | AsyncGenerator<ProgressResponse>;
25
+ try {
26
+ response = await this.getOllamaClient().pull({ insecure: true, model, stream: true });
27
+ return response;
28
+ } catch {
29
+ response = createErrorResponse(ChatErrorType.OllamaServiceUnavailable, {
30
+ host: this.getHost(),
31
+ message: 'please check whether your ollama service is available',
32
+ provider: ModelProvider.Ollama,
33
+ });
34
+ }
35
+
36
+ if (!response.ok) {
37
+ const messageError = await getMessageError(response);
38
+ throw messageError;
39
+ }
40
+ return response.json();
41
+ };
42
+
43
+ getModels = async (): Promise<ListResponse> => {
44
+ let response: Response | ListResponse;
45
+ try {
46
+ const response = await this.getOllamaClient().list();
47
+ return response;
48
+ } catch {
49
+ response = createErrorResponse(ChatErrorType.OllamaServiceUnavailable, {
50
+ host: this.getHost(),
51
+ message: 'please check whether your ollama service is available',
52
+ provider: ModelProvider.Ollama,
53
+ });
54
+ }
55
+
56
+ if (!response.ok) {
57
+ const messageError = await getMessageError(response);
58
+ throw messageError;
59
+ }
60
+ return response.json();
61
+ };
62
+ }
63
+
64
+ export const ollamaService = new OllamaService();
@@ -48,9 +48,6 @@ const mistralAPIKey = (s: GlobalStore) => modelProvider(s).mistral.apiKey;
48
48
  const enableMoonshot = (s: GlobalStore) => modelProvider(s).moonshot.enabled;
49
49
  const moonshotAPIKey = (s: GlobalStore) => modelProvider(s).moonshot.apiKey;
50
50
 
51
- const enableOllamaConfigInSettings = (s: GlobalStore) =>
52
- s.serverConfig.languageModel?.ollama?.enabled || false;
53
-
54
51
  const enableOllama = (s: GlobalStore) => modelProvider(s).ollama.enabled;
55
52
  const ollamaProxyUrl = (s: GlobalStore) => modelProvider(s).ollama.endpoint;
56
53
 
@@ -118,27 +115,32 @@ const processChatModels = (
118
115
  };
119
116
 
120
117
  const modelSelectList = (s: GlobalStore): ModelProviderCard[] => {
121
- const string = [
118
+ const openaiModelString = [
122
119
  s.serverConfig.customModelName,
123
120
  currentSettings(s).languageModel.openAI.customModelName,
124
121
  ]
125
122
  .filter(Boolean)
126
123
  .join(',');
127
124
 
128
- const modelConfig = parseModelString(string);
125
+ const openaiModelConfig = parseModelString(openaiModelString);
129
126
 
130
- const chatModels = processChatModels(modelConfig);
127
+ const openaiChatModels = processChatModels(openaiModelConfig);
131
128
 
132
- const ollamaModelConfig = parseModelString(
129
+ const ollamaModelString = [
130
+ s.serverConfig.languageModel?.ollama?.customModelName,
133
131
  currentSettings(s).languageModel.ollama.customModelName,
134
- );
132
+ ]
133
+ .filter(Boolean)
134
+ .join(',');
135
+
136
+ const ollamaModelConfig = parseModelString(ollamaModelString);
135
137
 
136
138
  const ollamaChatModels = processChatModels(ollamaModelConfig, OllamaProvider.chatModels);
137
139
 
138
140
  return [
139
141
  {
140
142
  ...OpenAIProvider,
141
- chatModels,
143
+ chatModels: openaiChatModels,
142
144
  },
143
145
  // { ...azureModelList(s), enabled: enableAzure(s) },
144
146
  { ...ZhiPuProvider, enabled: enableZhipu(s) },
@@ -216,7 +218,6 @@ export const modelProviderSelectors = {
216
218
  moonshotAPIKey,
217
219
 
218
220
  // Ollama
219
- enableOllamaConfigInSettings,
220
221
  enableOllama,
221
222
  ollamaProxyUrl,
222
223
 
@@ -1,5 +1,4 @@
1
- import { PersistStorage } from 'zustand/middleware';
2
- import { StorageValue } from 'zustand/middleware/persist';
1
+ import { PersistStorage, StorageValue } from 'zustand/middleware';
3
2
 
4
3
  import { createIndexedDB } from './indexedDB';
5
4
  import { createKeyMapper } from './keyMapper';
@@ -1,5 +1,5 @@
1
1
  import { createStore, delMany, getMany, setMany } from 'idb-keyval';
2
- import { StorageValue } from 'zustand/middleware/persist';
2
+ import { StorageValue } from 'zustand/middleware';
3
3
 
4
4
  export const createIndexedDB = <State extends any>(dbName: string = 'indexedDB') => ({
5
5
  getItem: async <T extends State>(name: string): Promise<StorageValue<T> | undefined> => {
@@ -1,4 +1,4 @@
1
- import { StorageValue } from 'zustand/middleware/persist';
1
+ import { StorageValue } from 'zustand/middleware';
2
2
 
3
3
  export const createLocalStorage = <State extends any>() => ({
4
4
  getItem: <T extends State>(name: string): StorageValue<T> | undefined => {
@@ -1,5 +1,5 @@
1
1
  import { isEmpty } from 'lodash-es';
2
- import { StorageValue } from 'zustand/middleware/persist';
2
+ import { StorageValue } from 'zustand/middleware';
3
3
 
4
4
  interface UrlSearchHelper {
5
5
  getUrlSearch: () => string;
@@ -7,6 +7,7 @@ export const ChatErrorType = {
7
7
  InvalidAccessCode: 'InvalidAccessCode', // 密码无效
8
8
  OpenAIBizError: 'OpenAIBizError', // OpenAI 返回的业务错误
9
9
  NoOpenAIAPIKey: 'NoOpenAIAPIKey',
10
+ OllamaServiceUnavailable: 'OllamaServiceUnavailable', // 未启动/检测到 Ollama 服务
10
11
 
11
12
  // ******* 客户端错误 ******* //
12
13
  BadRequest: 400,
package/tsconfig.json CHANGED
@@ -10,7 +10,7 @@
10
10
  "noEmit": true,
11
11
  "esModuleInterop": true,
12
12
  "module": "esnext",
13
- "moduleResolution": "node",
13
+ "moduleResolution": "bundler",
14
14
  "resolveJsonModule": true,
15
15
  "isolatedModules": true,
16
16
  "jsx": "preserve",