@lobehub/chat 0.136.0 → 0.137.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/README.md +8 -8
- package/README.zh-CN.md +8 -8
- package/docs/self-hosting/advanced/authentication.mdx +6 -7
- package/docs/self-hosting/advanced/authentication.zh-CN.mdx +6 -7
- package/docs/self-hosting/advanced/sso-providers/auth0.mdx +13 -14
- package/docs/self-hosting/advanced/sso-providers/auth0.zh-CN.mdx +13 -13
- package/docs/self-hosting/advanced/sso-providers/microsoft-entra-id.mdx +16 -21
- package/docs/self-hosting/advanced/sso-providers/microsoft-entra-id.zh-CN.mdx +39 -40
- package/docs/self-hosting/environment-variables/basic.zh-CN.mdx +1 -1
- package/docs/self-hosting/environment-variables/model-provider.mdx +0 -2
- package/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx +0 -1
- package/docs/self-hosting/start.mdx +1 -9
- package/docs/usage/features/agent-market.mdx +1 -1
- package/docs/usage/features/tts.mdx +0 -3
- package/docs/usage/features/vision.mdx +0 -1
- package/docs/usage/providers/ollama/gemma.mdx +0 -1
- package/docs/usage/providers/ollama.mdx +0 -3
- package/docs/usage/start.mdx +1 -1
- package/docs/usage/start.zh-CN.mdx +1 -1
- package/locales/ar/error.json +8 -0
- package/locales/ar/setting.json +9 -1
- package/locales/de-DE/error.json +8 -0
- package/locales/de-DE/setting.json +9 -1
- package/locales/en-US/error.json +8 -0
- package/locales/en-US/setting.json +9 -1
- package/locales/es-ES/error.json +8 -0
- package/locales/es-ES/setting.json +9 -1
- package/locales/fr-FR/error.json +8 -0
- package/locales/fr-FR/setting.json +9 -1
- package/locales/it-IT/error.json +8 -0
- package/locales/it-IT/setting.json +9 -1
- package/locales/ja-JP/error.json +8 -0
- package/locales/ja-JP/setting.json +9 -1
- package/locales/ko-KR/error.json +8 -0
- package/locales/ko-KR/setting.json +9 -1
- package/locales/nl-NL/error.json +8 -0
- package/locales/nl-NL/setting.json +9 -1
- package/locales/pl-PL/error.json +8 -0
- package/locales/pl-PL/setting.json +9 -1
- package/locales/pt-BR/error.json +8 -0
- package/locales/pt-BR/setting.json +9 -1
- package/locales/ru-RU/error.json +8 -0
- package/locales/ru-RU/setting.json +9 -1
- package/locales/tr-TR/error.json +8 -0
- package/locales/tr-TR/setting.json +9 -1
- package/locales/vi-VN/error.json +8 -0
- package/locales/vi-VN/setting.json +9 -1
- package/locales/zh-CN/error.json +8 -0
- package/locales/zh-CN/setting.json +9 -1
- package/locales/zh-TW/error.json +8 -0
- package/locales/zh-TW/setting.json +9 -1
- package/package.json +2 -1
- package/src/app/api/config/route.ts +3 -2
- package/src/app/api/errorResponse.ts +2 -1
- package/src/app/settings/llm/Ollama/Checker.tsx +73 -0
- package/src/app/settings/llm/Ollama/index.tsx +2 -4
- package/src/app/settings/llm/components/Checker.tsx +23 -17
- package/src/components/ModelIcon/index.tsx +2 -0
- package/src/components/ModelTag/ModelIcon.tsx +2 -0
- package/src/config/modelProviders/ollama.ts +14 -0
- package/src/config/server/provider.ts +2 -0
- package/src/features/Conversation/Error/InvalidOllamaModel/index.tsx +138 -0
- package/src/features/Conversation/Error/InvalidOllamaModel/useDownloadMonitor.ts +48 -0
- package/src/features/Conversation/Error/OllamaBizError.tsx +34 -0
- package/src/features/Conversation/Error/index.tsx +5 -0
- package/src/features/Conversation/Error/style.tsx +2 -2
- package/src/locales/default/error.ts +8 -0
- package/src/locales/default/setting.ts +9 -1
- package/src/services/__tests__/ollama.test.ts +26 -0
- package/src/services/ollama.ts +64 -0
- package/src/store/global/slices/settings/selectors/modelProvider.ts +11 -10
- package/src/store/middleware/createHyperStorage/index.ts +1 -2
- package/src/store/middleware/createHyperStorage/indexedDB.ts +1 -1
- package/src/store/middleware/createHyperStorage/localStorage.ts +1 -1
- package/src/store/middleware/createHyperStorage/urlStorage.ts +1 -1
- package/src/types/fetch.ts +1 -0
- package/tsconfig.json +1 -1
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { ListResponse, Ollama as OllamaBrowser, ProgressResponse } from 'ollama/browser';
|
|
2
|
+
|
|
3
|
+
import { createErrorResponse } from '@/app/api/errorResponse';
|
|
4
|
+
import { ModelProvider } from '@/libs/agent-runtime';
|
|
5
|
+
import { useGlobalStore } from '@/store/global';
|
|
6
|
+
import { modelProviderSelectors } from '@/store/global/selectors';
|
|
7
|
+
import { ChatErrorType } from '@/types/fetch';
|
|
8
|
+
import { getMessageError } from '@/utils/fetch';
|
|
9
|
+
|
|
10
|
+
const DEFAULT_BASE_URL = 'http://127.0.0.1:11434/v1';
|
|
11
|
+
|
|
12
|
+
class OllamaService {
|
|
13
|
+
getHost = (): string => {
|
|
14
|
+
const endpoint = modelProviderSelectors.ollamaProxyUrl(useGlobalStore.getState());
|
|
15
|
+
const url = new URL(endpoint || DEFAULT_BASE_URL);
|
|
16
|
+
return url.host;
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
getOllamaClient = () => {
|
|
20
|
+
return new OllamaBrowser({ host: this.getHost() });
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
pullModel = async (model: string): Promise<AsyncGenerator<ProgressResponse>> => {
|
|
24
|
+
let response: Response | AsyncGenerator<ProgressResponse>;
|
|
25
|
+
try {
|
|
26
|
+
response = await this.getOllamaClient().pull({ insecure: true, model, stream: true });
|
|
27
|
+
return response;
|
|
28
|
+
} catch {
|
|
29
|
+
response = createErrorResponse(ChatErrorType.OllamaServiceUnavailable, {
|
|
30
|
+
host: this.getHost(),
|
|
31
|
+
message: 'please check whether your ollama service is available',
|
|
32
|
+
provider: ModelProvider.Ollama,
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (!response.ok) {
|
|
37
|
+
const messageError = await getMessageError(response);
|
|
38
|
+
throw messageError;
|
|
39
|
+
}
|
|
40
|
+
return response.json();
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
getModels = async (): Promise<ListResponse> => {
|
|
44
|
+
let response: Response | ListResponse;
|
|
45
|
+
try {
|
|
46
|
+
const response = await this.getOllamaClient().list();
|
|
47
|
+
return response;
|
|
48
|
+
} catch {
|
|
49
|
+
response = createErrorResponse(ChatErrorType.OllamaServiceUnavailable, {
|
|
50
|
+
host: this.getHost(),
|
|
51
|
+
message: 'please check whether your ollama service is available',
|
|
52
|
+
provider: ModelProvider.Ollama,
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
if (!response.ok) {
|
|
57
|
+
const messageError = await getMessageError(response);
|
|
58
|
+
throw messageError;
|
|
59
|
+
}
|
|
60
|
+
return response.json();
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export const ollamaService = new OllamaService();
|
|
@@ -48,9 +48,6 @@ const mistralAPIKey = (s: GlobalStore) => modelProvider(s).mistral.apiKey;
|
|
|
48
48
|
const enableMoonshot = (s: GlobalStore) => modelProvider(s).moonshot.enabled;
|
|
49
49
|
const moonshotAPIKey = (s: GlobalStore) => modelProvider(s).moonshot.apiKey;
|
|
50
50
|
|
|
51
|
-
const enableOllamaConfigInSettings = (s: GlobalStore) =>
|
|
52
|
-
s.serverConfig.languageModel?.ollama?.enabled || false;
|
|
53
|
-
|
|
54
51
|
const enableOllama = (s: GlobalStore) => modelProvider(s).ollama.enabled;
|
|
55
52
|
const ollamaProxyUrl = (s: GlobalStore) => modelProvider(s).ollama.endpoint;
|
|
56
53
|
|
|
@@ -118,27 +115,32 @@ const processChatModels = (
|
|
|
118
115
|
};
|
|
119
116
|
|
|
120
117
|
const modelSelectList = (s: GlobalStore): ModelProviderCard[] => {
|
|
121
|
-
const
|
|
118
|
+
const openaiModelString = [
|
|
122
119
|
s.serverConfig.customModelName,
|
|
123
120
|
currentSettings(s).languageModel.openAI.customModelName,
|
|
124
121
|
]
|
|
125
122
|
.filter(Boolean)
|
|
126
123
|
.join(',');
|
|
127
124
|
|
|
128
|
-
const
|
|
125
|
+
const openaiModelConfig = parseModelString(openaiModelString);
|
|
129
126
|
|
|
130
|
-
const
|
|
127
|
+
const openaiChatModels = processChatModels(openaiModelConfig);
|
|
131
128
|
|
|
132
|
-
const
|
|
129
|
+
const ollamaModelString = [
|
|
130
|
+
s.serverConfig.languageModel?.ollama?.customModelName,
|
|
133
131
|
currentSettings(s).languageModel.ollama.customModelName,
|
|
134
|
-
|
|
132
|
+
]
|
|
133
|
+
.filter(Boolean)
|
|
134
|
+
.join(',');
|
|
135
|
+
|
|
136
|
+
const ollamaModelConfig = parseModelString(ollamaModelString);
|
|
135
137
|
|
|
136
138
|
const ollamaChatModels = processChatModels(ollamaModelConfig, OllamaProvider.chatModels);
|
|
137
139
|
|
|
138
140
|
return [
|
|
139
141
|
{
|
|
140
142
|
...OpenAIProvider,
|
|
141
|
-
chatModels,
|
|
143
|
+
chatModels: openaiChatModels,
|
|
142
144
|
},
|
|
143
145
|
// { ...azureModelList(s), enabled: enableAzure(s) },
|
|
144
146
|
{ ...ZhiPuProvider, enabled: enableZhipu(s) },
|
|
@@ -216,7 +218,6 @@ export const modelProviderSelectors = {
|
|
|
216
218
|
moonshotAPIKey,
|
|
217
219
|
|
|
218
220
|
// Ollama
|
|
219
|
-
enableOllamaConfigInSettings,
|
|
220
221
|
enableOllama,
|
|
221
222
|
ollamaProxyUrl,
|
|
222
223
|
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import { PersistStorage } from 'zustand/middleware';
|
|
2
|
-
import { StorageValue } from 'zustand/middleware/persist';
|
|
1
|
+
import { PersistStorage, StorageValue } from 'zustand/middleware';
|
|
3
2
|
|
|
4
3
|
import { createIndexedDB } from './indexedDB';
|
|
5
4
|
import { createKeyMapper } from './keyMapper';
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { createStore, delMany, getMany, setMany } from 'idb-keyval';
|
|
2
|
-
import { StorageValue } from 'zustand/middleware
|
|
2
|
+
import { StorageValue } from 'zustand/middleware';
|
|
3
3
|
|
|
4
4
|
export const createIndexedDB = <State extends any>(dbName: string = 'indexedDB') => ({
|
|
5
5
|
getItem: async <T extends State>(name: string): Promise<StorageValue<T> | undefined> => {
|
package/src/types/fetch.ts
CHANGED
|
@@ -7,6 +7,7 @@ export const ChatErrorType = {
|
|
|
7
7
|
InvalidAccessCode: 'InvalidAccessCode', // 密码无效
|
|
8
8
|
OpenAIBizError: 'OpenAIBizError', // OpenAI 返回的业务错误
|
|
9
9
|
NoOpenAIAPIKey: 'NoOpenAIAPIKey',
|
|
10
|
+
OllamaServiceUnavailable: 'OllamaServiceUnavailable', // 未启动/检测到 Ollama 服务
|
|
10
11
|
|
|
11
12
|
// ******* 客户端错误 ******* //
|
|
12
13
|
BadRequest: 400,
|