@lobehub/chat 1.49.9 → 1.49.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/CHANGELOG.md +50 -0
  2. package/changelog/v1.json +18 -0
  3. package/locales/ar/components.json +24 -0
  4. package/locales/ar/modelProvider.json +0 -24
  5. package/locales/ar/models.json +15 -0
  6. package/locales/bg-BG/components.json +24 -0
  7. package/locales/bg-BG/modelProvider.json +0 -24
  8. package/locales/bg-BG/models.json +15 -0
  9. package/locales/de-DE/components.json +24 -0
  10. package/locales/de-DE/modelProvider.json +0 -24
  11. package/locales/de-DE/models.json +15 -0
  12. package/locales/en-US/components.json +24 -0
  13. package/locales/en-US/modelProvider.json +0 -24
  14. package/locales/en-US/models.json +15 -0
  15. package/locales/es-ES/components.json +24 -0
  16. package/locales/es-ES/modelProvider.json +0 -24
  17. package/locales/es-ES/models.json +15 -0
  18. package/locales/fa-IR/components.json +24 -0
  19. package/locales/fa-IR/modelProvider.json +0 -24
  20. package/locales/fa-IR/models.json +15 -0
  21. package/locales/fr-FR/components.json +24 -0
  22. package/locales/fr-FR/modelProvider.json +0 -24
  23. package/locales/fr-FR/models.json +15 -0
  24. package/locales/it-IT/components.json +24 -0
  25. package/locales/it-IT/modelProvider.json +0 -24
  26. package/locales/it-IT/models.json +15 -0
  27. package/locales/ja-JP/components.json +24 -0
  28. package/locales/ja-JP/modelProvider.json +0 -24
  29. package/locales/ja-JP/models.json +15 -0
  30. package/locales/ko-KR/components.json +24 -0
  31. package/locales/ko-KR/modelProvider.json +0 -24
  32. package/locales/ko-KR/models.json +4 -0
  33. package/locales/nl-NL/components.json +24 -0
  34. package/locales/nl-NL/modelProvider.json +0 -24
  35. package/locales/nl-NL/models.json +15 -0
  36. package/locales/pl-PL/components.json +24 -0
  37. package/locales/pl-PL/modelProvider.json +0 -24
  38. package/locales/pl-PL/models.json +15 -0
  39. package/locales/pt-BR/components.json +24 -0
  40. package/locales/pt-BR/modelProvider.json +0 -24
  41. package/locales/pt-BR/models.json +15 -0
  42. package/locales/ru-RU/components.json +24 -0
  43. package/locales/ru-RU/modelProvider.json +0 -24
  44. package/locales/ru-RU/models.json +15 -0
  45. package/locales/tr-TR/components.json +24 -0
  46. package/locales/tr-TR/modelProvider.json +0 -24
  47. package/locales/tr-TR/models.json +15 -0
  48. package/locales/vi-VN/components.json +24 -0
  49. package/locales/vi-VN/modelProvider.json +0 -24
  50. package/locales/vi-VN/models.json +15 -0
  51. package/locales/zh-CN/components.json +24 -0
  52. package/locales/zh-CN/modelProvider.json +0 -24
  53. package/locales/zh-CN/models.json +16 -1
  54. package/locales/zh-TW/components.json +24 -0
  55. package/locales/zh-TW/modelProvider.json +0 -24
  56. package/locales/zh-TW/models.json +15 -0
  57. package/package.json +1 -1
  58. package/src/app/(main)/settings/provider/(detail)/[id]/page.tsx +10 -3
  59. package/src/app/(main)/settings/provider/(detail)/ollama/CheckError.tsx +70 -0
  60. package/src/app/(main)/settings/provider/(detail)/ollama/Container.tsx +57 -0
  61. package/src/app/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/index.tsx +127 -0
  62. package/src/app/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/useDownloadMonitor.ts +29 -0
  63. package/src/app/(main)/settings/provider/(detail)/ollama/page.tsx +2 -7
  64. package/src/app/(main)/settings/provider/features/ProviderConfig/Checker.tsx +90 -69
  65. package/src/app/(main)/settings/provider/features/ProviderConfig/index.tsx +6 -6
  66. package/src/components/FormAction/index.tsx +66 -0
  67. package/src/components/OllamaSetupGuide/index.tsx +217 -0
  68. package/src/config/aiModels/ollama.ts +12 -19
  69. package/src/config/modelProviders/ollama.ts +1 -0
  70. package/src/database/repositories/aiInfra/index.ts +33 -2
  71. package/src/database/server/models/aiProvider.ts +5 -1
  72. package/src/features/Conversation/Error/OllamaBizError/SetupGuide.tsx +2 -209
  73. package/src/features/Conversation/components/MarkdownElements/Thinking/remarkPlugin.ts +8 -1
  74. package/src/libs/agent-runtime/ollama/index.ts +1 -1
  75. package/src/locales/default/components.ts +26 -0
  76. package/src/locales/default/modelProvider.ts +0 -26
  77. package/src/server/routers/lambda/aiProvider.ts +2 -10
  78. package/src/services/aiProvider/client.ts +2 -8
  79. package/src/store/serverConfig/selectors.test.ts +3 -0
  80. package/src/store/serverConfig/store.test.ts +3 -2
  81. package/src/store/serverConfig/store.ts +1 -1
  82. package/src/store/user/slices/common/action.test.ts +1 -0
  83. package/src/types/serverConfig.ts +1 -1
  84. package/src/app/(main)/settings/provider/(detail)/ollama/Checker.tsx +0 -73
@@ -5,9 +5,16 @@ import { AiModelModel } from '@/database/server/models/aiModel';
5
5
  import { AiProviderModel } from '@/database/server/models/aiProvider';
6
6
  import { LobeChatDatabase } from '@/database/type';
7
7
  import { AIChatModelCard, AiModelSourceEnum, AiProviderModelListItem } from '@/types/aiModel';
8
- import { AiProviderListItem, EnabledAiModel } from '@/types/aiProvider';
8
+ import {
9
+ AiProviderDetailItem,
10
+ AiProviderListItem,
11
+ AiProviderRuntimeState,
12
+ EnabledAiModel,
13
+ } from '@/types/aiProvider';
9
14
  import { ProviderConfig } from '@/types/user/settings';
10
- import { mergeArrayById } from '@/utils/merge';
15
+ import { merge, mergeArrayById } from '@/utils/merge';
16
+
17
+ type DecryptUserKeyVaults = (encryptKeyVaultsStr: string | null) => Promise<any>;
11
18
 
12
19
  export class AiInfraRepos {
13
20
  private userId: string;
@@ -112,6 +119,30 @@ export class AiInfraRepos {
112
119
  return mergeArrayById(defaultModels, aiModels) as AiProviderModelListItem[];
113
120
  };
114
121
 
122
+ getAiProviderRuntimeState = async (
123
+ decryptor?: DecryptUserKeyVaults,
124
+ ): Promise<AiProviderRuntimeState> => {
125
+ const result = await this.aiProviderModel.getAiProviderRuntimeConfig(decryptor);
126
+
127
+ const runtimeConfig = result;
128
+
129
+ Object.entries(result).forEach(([key, value]) => {
130
+ runtimeConfig[key] = merge(this.providerConfigs[key] || {}, value);
131
+ });
132
+
133
+ const enabledAiProviders = await this.getUserEnabledProviderList();
134
+
135
+ const enabledAiModels = await this.getEnabledModels();
136
+
137
+ return { enabledAiModels, enabledAiProviders, runtimeConfig };
138
+ };
139
+
140
+ getAiProviderDetail = async (id: string, decryptor?: DecryptUserKeyVaults) => {
141
+ const config = await this.aiProviderModel.getAiProviderById(id, decryptor);
142
+
143
+ return merge(this.providerConfigs[id] || {}, config) as AiProviderDetailItem;
144
+ };
145
+
115
146
  /**
116
147
  * Fetch builtin models from config
117
148
  */
@@ -202,7 +202,11 @@ export class AiProviderModel {
202
202
 
203
203
  const keyVaults = !!result.keyVaults ? await decrypt(result.keyVaults) : {};
204
204
 
205
- return { ...result, keyVaults } as AiProviderDetailItem;
205
+ return {
206
+ ...result,
207
+ fetchOnClient: typeof result.fetchOnClient === 'boolean' ? result.fetchOnClient : undefined,
208
+ keyVaults,
209
+ } as AiProviderDetailItem;
206
210
  };
207
211
 
208
212
  getAiProviderRuntimeConfig = async (decryptor?: DecryptUserKeyVaults) => {
@@ -1,219 +1,12 @@
1
- import { Highlighter, Snippet, TabsNav } from '@lobehub/ui';
2
- import { Steps } from 'antd';
3
- import { createStyles } from 'antd-style';
4
- import Link from 'next/link';
5
- import { readableColor } from 'polished';
6
1
  import { memo } from 'react';
7
- import { Trans, useTranslation } from 'react-i18next';
8
- import { Flexbox } from 'react-layout-kit';
9
2
 
3
+ import OllamaSetupGuide from '@/components/OllamaSetupGuide';
10
4
  import { ErrorActionContainer } from '@/features/Conversation/Error/style';
11
5
 
12
- const useStyles = createStyles(({ css, prefixCls, token }) => ({
13
- steps: css`
14
- margin-block-start: 32px;
15
- &.${prefixCls}-steps-small .${prefixCls}-steps-item-title {
16
- margin-block-end: 16px;
17
- font-size: 16px;
18
- font-weight: bold;
19
- }
20
-
21
- .${prefixCls}-steps-item-description {
22
- margin-block-end: 24px;
23
- }
24
-
25
- .${prefixCls}-steps-icon {
26
- color: ${readableColor(token.colorPrimary)} !important;
27
- }
28
- `,
29
- }));
30
-
31
6
  const SetupGuide = memo(() => {
32
- const { styles } = useStyles();
33
- const { t } = useTranslation('modelProvider');
34
7
  return (
35
8
  <ErrorActionContainer style={{ paddingBlock: 0 }}>
36
- <TabsNav
37
- items={[
38
- {
39
- children: (
40
- <Steps
41
- className={styles.steps}
42
- direction={'vertical'}
43
- items={[
44
- {
45
- description: (
46
- <Trans i18nKey={'ollama.setup.install.description'} ns={'modelProvider'}>
47
- 请确认你已经开启 Ollama ,如果没有安装 Ollama ,请前往官网
48
- <Link href={'https://ollama.com/download'}>下载</Link>
49
- </Trans>
50
- ),
51
- status: 'process',
52
- title: t('ollama.setup.install.title'),
53
- },
54
- {
55
- description: (
56
- <Flexbox gap={8}>
57
- {t('ollama.setup.cors.description')}
58
-
59
- <Flexbox gap={8}>
60
- {t('ollama.setup.cors.macos')}
61
- <Snippet language={'bash'}>
62
- {/* eslint-disable-next-line react/no-unescaped-entities */}
63
- launchctl setenv OLLAMA_ORIGINS "*"
64
- </Snippet>
65
- {t('ollama.setup.cors.reboot')}
66
- </Flexbox>
67
- </Flexbox>
68
- ),
69
- status: 'process',
70
- title: t('ollama.setup.cors.title'),
71
- },
72
- ]}
73
- size={'small'}
74
- />
75
- ),
76
- key: 'macos',
77
- label: 'macOS',
78
- },
79
- {
80
- children: (
81
- <Steps
82
- className={styles.steps}
83
- direction={'vertical'}
84
- items={[
85
- {
86
- description: (
87
- <Trans i18nKey={'ollama.setup.install.description'} ns={'modelProvider'}>
88
- 请确认你已经开启 Ollama ,如果没有安装 Ollama ,请前往官网
89
- <Link href={'https://ollama.com/download'}>下载</Link>
90
- </Trans>
91
- ),
92
- status: 'process',
93
- title: t('ollama.setup.install.title'),
94
- },
95
- {
96
- description: (
97
- <Flexbox gap={8}>
98
- {t('ollama.setup.cors.description')}
99
- <div>{t('ollama.setup.cors.windows')}</div>
100
- <div>{t('ollama.setup.cors.reboot')}</div>
101
- </Flexbox>
102
- ),
103
- status: 'process',
104
- title: t('ollama.setup.cors.title'),
105
- },
106
- ]}
107
- size={'small'}
108
- />
109
- ),
110
- key: 'windows',
111
- label: t('ollama.setup.install.windowsTab'),
112
- },
113
- {
114
- children: (
115
- <Steps
116
- className={styles.steps}
117
- direction={'vertical'}
118
- items={[
119
- {
120
- description: (
121
- <Flexbox gap={8}>
122
- {t('ollama.setup.install.linux.command')}
123
- <Snippet language={'bash'}>
124
- curl -fsSL https://ollama.com/install.sh | sh
125
- </Snippet>
126
- <div>
127
- <Trans i18nKey={'ollama.setup.install.linux.manual'} ns={'modelProvider'}>
128
- 或者,你也可以参考
129
- <Link href={'https://github.com/ollama/ollama/blob/main/docs/linux.md'}>
130
- Linux 手动安装指南
131
- </Link>
132
-
133
- </Trans>
134
- </div>
135
- </Flexbox>
136
- ),
137
- status: 'process',
138
- title: t('ollama.setup.install.title'),
139
- },
140
- {
141
- description: (
142
- <Flexbox gap={8}>
143
- <div>{t('ollama.setup.cors.description')}</div>
144
-
145
- <div>{t('ollama.setup.cors.linux.systemd')}</div>
146
- {/* eslint-disable-next-line react/no-unescaped-entities */}
147
- <Snippet language={'bash'}> sudo systemctl edit ollama.service</Snippet>
148
- {t('ollama.setup.cors.linux.env')}
149
- <Highlighter
150
- // eslint-disable-next-line react/no-children-prop
151
- children={`[Service]
152
-
153
- Environment="OLLAMA_ORIGINS=*"`}
154
- fileName={'ollama.service'}
155
- fullFeatured
156
- language={'bash'}
157
- showLanguage
158
- />
159
- {t('ollama.setup.cors.linux.reboot')}
160
- </Flexbox>
161
- ),
162
- status: 'process',
163
- title: t('ollama.setup.cors.title'),
164
- },
165
- ]}
166
- size={'small'}
167
- />
168
- ),
169
- key: 'linux',
170
- label: 'Linux',
171
- },
172
- {
173
- children: (
174
- <Steps
175
- className={styles.steps}
176
- direction={'vertical'}
177
- items={[
178
- {
179
- description: (
180
- <Flexbox gap={8}>
181
- {t('ollama.setup.install.description')}
182
- <div>{t('ollama.setup.install.docker')}</div>
183
- <Snippet language={'bash'}>docker pull ollama/ollama</Snippet>
184
- </Flexbox>
185
- ),
186
- status: 'process',
187
- title: t('ollama.setup.install.title'),
188
- },
189
- {
190
- description: (
191
- <Flexbox gap={8}>
192
- {t('ollama.setup.cors.description')}
193
- <Highlighter
194
- fileName={'ollama.service'}
195
- fullFeatured
196
- language={'bash'}
197
- showLanguage
198
- >
199
- {/* eslint-disable-next-line react/no-unescaped-entities */}
200
- docker run -d --gpus=all -v ollama:/root/.ollama -e OLLAMA_ORIGINS="*" -p
201
- 11434:11434 --name ollama ollama/ollama
202
- </Highlighter>
203
- </Flexbox>
204
- ),
205
- status: 'process',
206
- title: t('ollama.setup.cors.title'),
207
- },
208
- ]}
209
- size={'small'}
210
- />
211
- ),
212
- key: 'docker',
213
- label: 'Docker',
214
- },
215
- ]}
216
- />
9
+ <OllamaSetupGuide />
217
10
  </ErrorActionContainer>
218
11
  );
219
12
  });
@@ -32,7 +32,14 @@ export const createRemarkCustomTagPlugin = (tag: string) => () => {
32
32
 
33
33
  // 转换为 Markdown 字符串
34
34
  const content = contentNodes
35
- .map((n: any) => toMarkdown(n))
35
+ .map((n: any) => {
36
+ // fix https://github.com/lobehub/lobe-chat/issues/5668
37
+ if (n.type === 'paragraph') {
38
+ return n.children.map((child: any) => child.value).join('');
39
+ }
40
+
41
+ return toMarkdown(n);
42
+ })
36
43
  .join('\n\n')
37
44
  .trim();
38
45
 
@@ -79,7 +79,7 @@ export class LobeOllamaAI implements LobeRuntimeAI {
79
79
 
80
80
  throw AgentRuntimeError.chat({
81
81
  error: {
82
- ...e.error,
82
+ ...(typeof e.error !== 'string' ? e.error : undefined),
83
83
  message: String(e.error?.message || e.message),
84
84
  name: e.name,
85
85
  status_code: e.status_code,
@@ -88,6 +88,32 @@ export default {
88
88
  emptyModel: '没有启用的模型,请前往设置开启',
89
89
  provider: '提供商',
90
90
  },
91
+ OllamaSetupGuide: {
92
+ cors: {
93
+ description: '因浏览器安全限制,你需要为 Ollama 进行跨域配置后方可正常使用。',
94
+ linux: {
95
+ env: '在 [Service] 部分下添加 `Environment`,添加 OLLAMA_ORIGINS 环境变量:',
96
+ reboot: '重载 systemd 并重启 Ollama',
97
+ systemd: '调用 systemd 编辑 ollama 服务:',
98
+ },
99
+ macos: '请打开「终端」应用程序,并粘贴以下指令,并按回车运行',
100
+ reboot: '请在执行完成后重启 Ollama 服务',
101
+ title: '配置 Ollama 允许跨域访问',
102
+ windows:
103
+ '在 Windows 上,点击「控制面板」,进入编辑系统环境变量。为您的用户账户新建名为 「OLLAMA_ORIGINS」 的环境变量,值为 * ,点击 「OK/应用」 保存',
104
+ },
105
+ install: {
106
+ description: '请确认你已经开启 Ollama ,如果没有下载 Ollama ,请前往官网<1>下载</1>',
107
+ docker:
108
+ '如果你更倾向于使用 Docker,Ollama 也提供了官方 Docker 镜像,你可以通过以下命令拉取:',
109
+ linux: {
110
+ command: '通过以下命令安装:',
111
+ manual: '或者,你也可以参考 <1>Linux 手动安装指南</1> 自行安装',
112
+ },
113
+ title: '在本地安装并开启 Ollama 应用',
114
+ windowsTab: 'Windows (预览版)',
115
+ },
116
+ },
91
117
  Thinking: {
92
118
  thinking: '深度思考中...',
93
119
  thought: '已深度思考(用时 {{duration}} 秒)',
@@ -155,32 +155,6 @@ export default {
155
155
  desc: '必须包含http(s)://,本地未额外指定可留空',
156
156
  title: 'Ollama 服务地址',
157
157
  },
158
- setup: {
159
- cors: {
160
- description: '因浏览器安全限制,你需要为 Ollama 进行跨域配置后方可正常使用。',
161
- linux: {
162
- env: '在 [Service] 部分下添加 `Environment`,添加 OLLAMA_ORIGINS 环境变量:',
163
- reboot: '重载 systemd 并重启 Ollama',
164
- systemd: '调用 systemd 编辑 ollama 服务:',
165
- },
166
- macos: '请打开「终端」应用程序,并粘贴以下指令,并按回车运行',
167
- reboot: '请在执行完成后重启 Ollama 服务',
168
- title: '配置 Ollama 允许跨域访问',
169
- windows:
170
- '在 Windows 上,点击「控制面板」,进入编辑系统环境变量。为您的用户账户新建名为 「OLLAMA_ORIGINS」 的环境变量,值为 * ,点击 「OK/应用」 保存',
171
- },
172
- install: {
173
- description: '请确认你已经开启 Ollama ,如果没有下载 Ollama ,请前往官网<1>下载</1>',
174
- docker:
175
- '如果你更倾向于使用 Docker,Ollama 也提供了官方 Docker 镜像,你可以通过以下命令拉取:',
176
- linux: {
177
- command: '通过以下命令安装:',
178
- manual: '或者,你也可以参考 <1>Linux 手动安装指南</1> 自行安装',
179
- },
180
- title: '在本地安装并开启 Ollama 应用',
181
- windowsTab: 'Windows (预览版)',
182
- },
183
- },
184
158
  title: 'Ollama',
185
159
  unlock: {
186
160
  cancel: '取消下载',
@@ -48,7 +48,7 @@ export const aiProviderRouter = router({
48
48
  .input(z.object({ id: z.string() }))
49
49
 
50
50
  .query(async ({ input, ctx }): Promise<AiProviderDetailItem | undefined> => {
51
- return ctx.aiProviderModel.getAiProviderById(input.id, KeyVaultsGateKeeper.getUserKeyVaults);
51
+ return ctx.aiInfraRepos.getAiProviderDetail(input.id, KeyVaultsGateKeeper.getUserKeyVaults);
52
52
  }),
53
53
 
54
54
  getAiProviderList: aiProviderProcedure.query(async ({ ctx }) => {
@@ -58,15 +58,7 @@ export const aiProviderRouter = router({
58
58
  getAiProviderRuntimeState: aiProviderProcedure
59
59
  .input(z.object({ isLogin: z.boolean().optional() }))
60
60
  .query(async ({ ctx }): Promise<AiProviderRuntimeState> => {
61
- const runtimeConfig = await ctx.aiProviderModel.getAiProviderRuntimeConfig(
62
- KeyVaultsGateKeeper.getUserKeyVaults,
63
- );
64
-
65
- const enabledAiProviders = await ctx.aiInfraRepos.getUserEnabledProviderList();
66
-
67
- const enabledAiModels = await ctx.aiInfraRepos.getEnabledModels();
68
-
69
- return { enabledAiModels, enabledAiProviders, runtimeConfig };
61
+ return ctx.aiInfraRepos.getAiProviderRuntimeState(KeyVaultsGateKeeper.getUserKeyVaults);
70
62
  }),
71
63
 
72
64
  removeAiProvider: aiProviderProcedure
@@ -25,7 +25,7 @@ export class ClientService extends BaseClientService implements IAiProviderServi
25
25
  };
26
26
 
27
27
  getAiProviderById: IAiProviderService['getAiProviderById'] = async (id) => {
28
- return this.aiProviderModel.getAiProviderById(id);
28
+ return this.aiInfraRepos.getAiProviderDetail(id);
29
29
  };
30
30
 
31
31
  getAiProviderList: IAiProviderService['getAiProviderList'] = async () => {
@@ -33,13 +33,7 @@ export class ClientService extends BaseClientService implements IAiProviderServi
33
33
  };
34
34
 
35
35
  getAiProviderRuntimeState: IAiProviderService['getAiProviderRuntimeState'] = async () => {
36
- const runtimeConfig = await this.aiProviderModel.getAiProviderRuntimeConfig();
37
-
38
- const enabledAiProviders = await this.aiInfraRepos.getUserEnabledProviderList();
39
-
40
- const enabledAiModels = await this.aiInfraRepos.getEnabledModels();
41
-
42
- return { enabledAiModels, enabledAiProviders, runtimeConfig };
36
+ return await this.aiInfraRepos.getAiProviderRuntimeState();
43
37
  };
44
38
 
45
39
  toggleProviderEnabled: IAiProviderService['toggleProviderEnabled'] = async (id, enabled) => {
@@ -49,6 +49,7 @@ describe('serverConfigSelectors', () => {
49
49
  serverConfig: {
50
50
  enabledOAuthSSO: true,
51
51
  telemetry: {},
52
+ aiProvider: {},
52
53
  },
53
54
  });
54
55
 
@@ -63,6 +64,7 @@ describe('serverConfigSelectors', () => {
63
64
  const store = initServerConfigStore({
64
65
  serverConfig: {
65
66
  telemetry: { langfuse: true },
67
+ aiProvider: {},
66
68
  },
67
69
  });
68
70
 
@@ -75,6 +77,7 @@ describe('serverConfigSelectors', () => {
75
77
  const store = initServerConfigStore({
76
78
  serverConfig: {
77
79
  telemetry: {},
80
+ aiProvider: {},
78
81
  },
79
82
  });
80
83
 
@@ -23,14 +23,14 @@ describe('createServerConfigStore', () => {
23
23
 
24
24
  expect(store.getState()).toEqual({
25
25
  featureFlags: DEFAULT_FEATURE_FLAGS,
26
- serverConfig: { telemetry: {} },
26
+ serverConfig: { telemetry: {}, aiProvider: {} },
27
27
  });
28
28
  });
29
29
 
30
30
  it('should initialize store with custom initial state', () => {
31
31
  const initialState: Partial<ServerConfigStore> = {
32
32
  featureFlags: { edit_agent: false },
33
- serverConfig: { telemetry: { langfuse: true } },
33
+ serverConfig: { telemetry: { langfuse: true }, aiProvider: {} },
34
34
  };
35
35
 
36
36
  const store = initServerConfigStore(initialState);
@@ -38,6 +38,7 @@ describe('createServerConfigStore', () => {
38
38
  expect(store.getState().featureFlags.edit_agent).toBeFalsy();
39
39
  expect(store.getState().serverConfig).toEqual({
40
40
  telemetry: { langfuse: true },
41
+ aiProvider: {},
41
42
  });
42
43
  });
43
44
 
@@ -12,7 +12,7 @@ import { StoreApiWithSelector } from '@/utils/zustand';
12
12
 
13
13
  const initialState: ServerConfigStore = {
14
14
  featureFlags: DEFAULT_FEATURE_FLAGS,
15
- serverConfig: { telemetry: {} },
15
+ serverConfig: { aiProvider: {}, telemetry: {} },
16
16
  };
17
17
 
18
18
  // =============== 聚合 createStoreFn ============ //
@@ -55,6 +55,7 @@ describe('createCommonSlice', () => {
55
55
  defaultAgent: 'agent1',
56
56
  languageModel: 'model1',
57
57
  telemetry: {},
58
+ aiProvider: {},
58
59
  } as GlobalServerConfig;
59
60
 
60
61
  it('should not fetch user state if user is not login', async () => {
@@ -20,7 +20,7 @@ export interface ServerModelProviderConfig {
20
20
  export type ServerLanguageModel = Partial<Record<GlobalLLMProviderKey, ServerModelProviderConfig>>;
21
21
 
22
22
  export interface GlobalServerConfig {
23
- aiProvider?: ServerLanguageModel;
23
+ aiProvider: ServerLanguageModel;
24
24
  defaultAgent?: DeepPartial<UserDefaultAgent>;
25
25
  enableUploadFileToServer?: boolean;
26
26
  enabledAccessCode?: boolean;
@@ -1,73 +0,0 @@
1
- import { CheckCircleFilled } from '@ant-design/icons';
2
- import { Alert, Highlighter } from '@lobehub/ui';
3
- import { Button } from 'antd';
4
- import { useTheme } from 'antd-style';
5
- import { ListResponse } from 'ollama/browser';
6
- import { memo } from 'react';
7
- import { useTranslation } from 'react-i18next';
8
- import { Flexbox } from 'react-layout-kit';
9
- import useSWR from 'swr';
10
-
11
- import { useIsMobile } from '@/hooks/useIsMobile';
12
- import { ollamaService } from '@/services/ollama';
13
-
14
- const OllamaChecker = memo(() => {
15
- const { t } = useTranslation('setting');
16
-
17
- const theme = useTheme();
18
-
19
- const { data, error, isLoading, mutate } = useSWR<ListResponse>(
20
- 'ollama.list',
21
- ollamaService.getModels,
22
- {
23
- revalidateOnFocus: false,
24
- revalidateOnMount: false,
25
- revalidateOnReconnect: false,
26
- },
27
- );
28
-
29
- const checkConnection = () => {
30
- mutate().catch();
31
- };
32
-
33
- const isMobile = useIsMobile();
34
-
35
- return (
36
- <Flexbox align={isMobile ? 'flex-start' : 'flex-end'} gap={8}>
37
- <Flexbox align={'center'} direction={isMobile ? 'horizontal-reverse' : 'horizontal'} gap={12}>
38
- {!error && data?.models && (
39
- <Flexbox gap={4} horizontal>
40
- <CheckCircleFilled
41
- style={{
42
- color: theme.colorSuccess,
43
- }}
44
- />
45
- {t('llm.checker.pass')}
46
- </Flexbox>
47
- )}
48
- <Button loading={isLoading} onClick={checkConnection}>
49
- {t('llm.checker.button')}
50
- </Button>
51
- </Flexbox>
52
- {error && (
53
- <Flexbox gap={8} style={{ maxWidth: '600px', width: '100%' }}>
54
- <Alert
55
- banner
56
- extra={
57
- <Flexbox>
58
- <Highlighter copyButtonSize={'small'} language={'json'} type={'pure'}>
59
- {JSON.stringify(error.body || error, null, 2)}
60
- </Highlighter>
61
- </Flexbox>
62
- }
63
- message={t(`response.${error.type}` as any, { ns: 'error' })}
64
- showIcon
65
- type={'error'}
66
- />
67
- </Flexbox>
68
- )}
69
- </Flexbox>
70
- );
71
- });
72
-
73
- export default OllamaChecker;