@lobehub/chat 1.77.16 → 1.77.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/changelog/v1.json +9 -0
- package/docker-compose/local/docker-compose.yml +2 -1
- package/locales/ar/components.json +4 -0
- package/locales/ar/modelProvider.json +1 -0
- package/locales/ar/models.json +8 -5
- package/locales/bg-BG/components.json +4 -0
- package/locales/bg-BG/modelProvider.json +1 -0
- package/locales/bg-BG/models.json +8 -5
- package/locales/de-DE/components.json +4 -0
- package/locales/de-DE/modelProvider.json +1 -0
- package/locales/de-DE/models.json +8 -5
- package/locales/en-US/components.json +4 -0
- package/locales/en-US/modelProvider.json +1 -0
- package/locales/en-US/models.json +8 -5
- package/locales/es-ES/components.json +4 -0
- package/locales/es-ES/modelProvider.json +1 -0
- package/locales/es-ES/models.json +7 -4
- package/locales/fa-IR/components.json +4 -0
- package/locales/fa-IR/modelProvider.json +1 -0
- package/locales/fa-IR/models.json +7 -4
- package/locales/fr-FR/components.json +4 -0
- package/locales/fr-FR/modelProvider.json +1 -0
- package/locales/fr-FR/models.json +8 -5
- package/locales/it-IT/components.json +4 -0
- package/locales/it-IT/modelProvider.json +1 -0
- package/locales/it-IT/models.json +7 -4
- package/locales/ja-JP/components.json +4 -0
- package/locales/ja-JP/modelProvider.json +1 -0
- package/locales/ja-JP/models.json +8 -5
- package/locales/ko-KR/components.json +4 -0
- package/locales/ko-KR/modelProvider.json +1 -0
- package/locales/ko-KR/models.json +8 -5
- package/locales/nl-NL/components.json +4 -0
- package/locales/nl-NL/modelProvider.json +1 -0
- package/locales/nl-NL/models.json +8 -5
- package/locales/pl-PL/components.json +4 -0
- package/locales/pl-PL/modelProvider.json +1 -0
- package/locales/pl-PL/models.json +8 -5
- package/locales/pt-BR/components.json +4 -0
- package/locales/pt-BR/modelProvider.json +1 -0
- package/locales/pt-BR/models.json +7 -4
- package/locales/ru-RU/components.json +4 -0
- package/locales/ru-RU/modelProvider.json +1 -0
- package/locales/ru-RU/models.json +7 -4
- package/locales/tr-TR/components.json +4 -0
- package/locales/tr-TR/modelProvider.json +1 -0
- package/locales/tr-TR/models.json +8 -5
- package/locales/vi-VN/components.json +4 -0
- package/locales/vi-VN/modelProvider.json +1 -0
- package/locales/vi-VN/models.json +8 -5
- package/locales/zh-CN/components.json +4 -0
- package/locales/zh-CN/modelProvider.json +1 -0
- package/locales/zh-CN/models.json +9 -6
- package/locales/zh-TW/components.json +4 -0
- package/locales/zh-TW/modelProvider.json +1 -0
- package/locales/zh-TW/models.json +7 -4
- package/package.json +1 -1
- package/src/app/(backend)/webapi/models/[provider]/pull/route.ts +34 -0
- package/src/app/(backend)/webapi/{chat/models → models}/[provider]/route.ts +1 -2
- package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/index.tsx +0 -7
- package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/CheckError.tsx +1 -1
- package/src/components/FormAction/index.tsx +1 -1
- package/src/database/models/__tests__/aiProvider.test.ts +100 -0
- package/src/database/models/aiProvider.ts +11 -1
- package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel.tsx +43 -0
- package/src/features/Conversation/Error/OllamaDesktopSetupGuide/index.tsx +61 -0
- package/src/features/Conversation/Error/index.tsx +7 -0
- package/src/features/DevPanel/SystemInspector/ServerConfig.tsx +18 -2
- package/src/features/DevPanel/SystemInspector/index.tsx +25 -6
- package/src/features/OllamaModelDownloader/index.tsx +149 -0
- package/src/libs/agent-runtime/AgentRuntime.ts +6 -0
- package/src/libs/agent-runtime/BaseAI.ts +7 -0
- package/src/libs/agent-runtime/ollama/index.ts +84 -2
- package/src/libs/agent-runtime/openrouter/__snapshots__/index.test.ts.snap +24 -3263
- package/src/libs/agent-runtime/openrouter/fixtures/frontendModels.json +25 -0
- package/src/libs/agent-runtime/openrouter/fixtures/models.json +0 -3353
- package/src/libs/agent-runtime/openrouter/index.test.ts +56 -1
- package/src/libs/agent-runtime/openrouter/index.ts +9 -4
- package/src/libs/agent-runtime/types/index.ts +1 -0
- package/src/libs/agent-runtime/types/model.ts +44 -0
- package/src/libs/agent-runtime/utils/streams/index.ts +1 -0
- package/src/libs/agent-runtime/utils/streams/model.ts +110 -0
- package/src/locales/default/components.ts +4 -0
- package/src/locales/default/modelProvider.ts +1 -0
- package/src/services/__tests__/models.test.ts +21 -0
- package/src/services/_url.ts +4 -1
- package/src/services/chat.ts +1 -1
- package/src/services/models.ts +153 -7
- package/src/store/aiInfra/slices/aiModel/action.ts +1 -1
- package/src/store/aiInfra/slices/aiProvider/action.ts +2 -1
- package/src/store/user/slices/modelList/action.test.ts +2 -2
- package/src/store/user/slices/modelList/action.ts +1 -1
- package/src/app/[variants]/(main)/settings/llm/ProviderList/Ollama/Checker.tsx +0 -73
- package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/index.tsx +0 -127
- package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/index.tsx +0 -154
- package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/useDownloadMonitor.ts +0 -29
- package/src/services/__tests__/ollama.test.ts +0 -28
- package/src/services/ollama.ts +0 -83
- /package/src/{app/[variants]/(main)/settings/provider/(detail)/ollama → features}/OllamaModelDownloader/useDownloadMonitor.ts +0 -0
package/src/app/[variants]/(main)/settings/provider/(detail)/ollama/OllamaModelDownloader/index.tsx
DELETED
@@ -1,127 +0,0 @@
|
|
1
|
-
import { Ollama } from '@lobehub/icons';
|
2
|
-
import { Button, Input, Progress } from 'antd';
|
3
|
-
import { useTheme } from 'antd-style';
|
4
|
-
import { memo, useMemo, useState } from 'react';
|
5
|
-
import { useTranslation } from 'react-i18next';
|
6
|
-
import { Center, Flexbox } from 'react-layout-kit';
|
7
|
-
import useSWR from 'swr';
|
8
|
-
|
9
|
-
import FormAction from '@/components/FormAction';
|
10
|
-
import { ollamaService } from '@/services/ollama';
|
11
|
-
import { formatSize } from '@/utils/format';
|
12
|
-
|
13
|
-
import { useDownloadMonitor } from './useDownloadMonitor';
|
14
|
-
|
15
|
-
interface OllamaModelDownloaderProps {
|
16
|
-
model: string;
|
17
|
-
}
|
18
|
-
|
19
|
-
const OllamaModelDownloader = memo<OllamaModelDownloaderProps>(({ model }) => {
|
20
|
-
const { t } = useTranslation(['modelProvider', 'error']);
|
21
|
-
|
22
|
-
const [modelToPull, setModelToPull] = useState(model);
|
23
|
-
const [completed, setCompleted] = useState(0);
|
24
|
-
const [total, setTotal] = useState(0);
|
25
|
-
const { remainingTime, downloadSpeed } = useDownloadMonitor(total, completed);
|
26
|
-
const percent = useMemo(() => {
|
27
|
-
return total ? Number(((completed / total) * 100).toFixed(1)) : 0;
|
28
|
-
}, [completed, total]);
|
29
|
-
|
30
|
-
const theme = useTheme();
|
31
|
-
|
32
|
-
const { mutate, isLoading: isDownloading } = useSWR(
|
33
|
-
[modelToPull],
|
34
|
-
async ([model]) => {
|
35
|
-
const generator = await ollamaService.pullModel(model);
|
36
|
-
for await (const progress of generator) {
|
37
|
-
if (progress.completed) {
|
38
|
-
setCompleted(progress.completed);
|
39
|
-
setTotal(progress.total);
|
40
|
-
}
|
41
|
-
}
|
42
|
-
return null;
|
43
|
-
},
|
44
|
-
{
|
45
|
-
onSuccess: () => {},
|
46
|
-
revalidateOnFocus: false,
|
47
|
-
revalidateOnMount: false,
|
48
|
-
},
|
49
|
-
);
|
50
|
-
|
51
|
-
return (
|
52
|
-
<Center gap={16} paddingBlock={32} style={{ width: '100%' }}>
|
53
|
-
<FormAction
|
54
|
-
avatar={<Ollama color={theme.colorPrimary} size={64} />}
|
55
|
-
description={isDownloading ? t('ollama.download.desc') : t('ollama.unlock.description')}
|
56
|
-
title={
|
57
|
-
isDownloading
|
58
|
-
? t('ollama.download.title', { model: modelToPull })
|
59
|
-
: t('ollama.unlock.title')
|
60
|
-
}
|
61
|
-
>
|
62
|
-
{!isDownloading && (
|
63
|
-
<Input
|
64
|
-
onChange={(e) => {
|
65
|
-
setModelToPull(e.target.value);
|
66
|
-
}}
|
67
|
-
value={modelToPull}
|
68
|
-
/>
|
69
|
-
)}
|
70
|
-
</FormAction>
|
71
|
-
{isDownloading && (
|
72
|
-
<Flexbox flex={1} gap={8} style={{ maxWidth: 300 }} width={'100%'}>
|
73
|
-
<Progress
|
74
|
-
percent={percent}
|
75
|
-
showInfo
|
76
|
-
strokeColor={theme.colorSuccess}
|
77
|
-
trailColor={theme.colorSuccessBg}
|
78
|
-
/>
|
79
|
-
<Flexbox
|
80
|
-
distribution={'space-between'}
|
81
|
-
horizontal
|
82
|
-
style={{ color: theme.colorTextDescription, fontSize: 12 }}
|
83
|
-
>
|
84
|
-
<span>
|
85
|
-
{t('ollama.download.remainingTime')}: {remainingTime}
|
86
|
-
</span>
|
87
|
-
<span>
|
88
|
-
{t('ollama.download.speed')}: {downloadSpeed}
|
89
|
-
</span>
|
90
|
-
</Flexbox>
|
91
|
-
</Flexbox>
|
92
|
-
)}
|
93
|
-
<Flexbox gap={12} style={{ maxWidth: 300 }} width={'100%'}>
|
94
|
-
<Button
|
95
|
-
block
|
96
|
-
loading={isDownloading}
|
97
|
-
onClick={() => {
|
98
|
-
mutate();
|
99
|
-
}}
|
100
|
-
style={{ marginTop: 8 }}
|
101
|
-
type={'primary'}
|
102
|
-
>
|
103
|
-
{!isDownloading
|
104
|
-
? t('ollama.unlock.confirm')
|
105
|
-
: // if total is 0, show starting, else show downloaded
|
106
|
-
!total
|
107
|
-
? t('ollama.unlock.starting')
|
108
|
-
: t('ollama.unlock.downloaded', {
|
109
|
-
completed: formatSize(completed, 2),
|
110
|
-
total: formatSize(total, 2),
|
111
|
-
})}
|
112
|
-
</Button>
|
113
|
-
{isDownloading && (
|
114
|
-
<Button
|
115
|
-
onClick={() => {
|
116
|
-
ollamaService.abort();
|
117
|
-
}}
|
118
|
-
>
|
119
|
-
{t('ollama.unlock.cancel')}
|
120
|
-
</Button>
|
121
|
-
)}
|
122
|
-
</Flexbox>
|
123
|
-
</Center>
|
124
|
-
);
|
125
|
-
});
|
126
|
-
|
127
|
-
export default OllamaModelDownloader;
|
@@ -1,154 +0,0 @@
|
|
1
|
-
import { Ollama } from '@lobehub/icons';
|
2
|
-
import { Button, Input, Progress } from 'antd';
|
3
|
-
import { useTheme } from 'antd-style';
|
4
|
-
import { memo, useMemo, useState } from 'react';
|
5
|
-
import { useTranslation } from 'react-i18next';
|
6
|
-
import { Center, Flexbox } from 'react-layout-kit';
|
7
|
-
import useSWR from 'swr';
|
8
|
-
|
9
|
-
import { ollamaService } from '@/services/ollama';
|
10
|
-
import { useChatStore } from '@/store/chat';
|
11
|
-
import { formatSize } from '@/utils/format';
|
12
|
-
|
13
|
-
import { ErrorActionContainer, FormAction } from '../../style';
|
14
|
-
import { useDownloadMonitor } from './useDownloadMonitor';
|
15
|
-
|
16
|
-
interface OllamaModelFormProps {
|
17
|
-
id: string;
|
18
|
-
model: string;
|
19
|
-
}
|
20
|
-
|
21
|
-
const OllamaModelForm = memo<OllamaModelFormProps>(({ id, model }) => {
|
22
|
-
const { t } = useTranslation(['modelProvider', 'error']);
|
23
|
-
|
24
|
-
const [modelToPull, setModelToPull] = useState(model);
|
25
|
-
const [completed, setCompleted] = useState(0);
|
26
|
-
const [total, setTotal] = useState(0);
|
27
|
-
const { remainingTime, downloadSpeed } = useDownloadMonitor(total, completed);
|
28
|
-
const percent = useMemo(() => {
|
29
|
-
return total ? Number(((completed / total) * 100).toFixed(1)) : 0;
|
30
|
-
}, [completed, total]);
|
31
|
-
|
32
|
-
const [delAndRegenerateMessage, deleteMessage] = useChatStore((s) => [
|
33
|
-
s.delAndRegenerateMessage,
|
34
|
-
s.deleteMessage,
|
35
|
-
]);
|
36
|
-
const theme = useTheme();
|
37
|
-
|
38
|
-
const { mutate, isLoading: isDownloading } = useSWR(
|
39
|
-
[id, modelToPull],
|
40
|
-
async ([, model]) => {
|
41
|
-
const generator = await ollamaService.pullModel(model);
|
42
|
-
for await (const progress of generator) {
|
43
|
-
if (progress.completed) {
|
44
|
-
setCompleted(progress.completed);
|
45
|
-
setTotal(progress.total);
|
46
|
-
}
|
47
|
-
}
|
48
|
-
return null;
|
49
|
-
},
|
50
|
-
{
|
51
|
-
onSuccess: () => {
|
52
|
-
delAndRegenerateMessage(id);
|
53
|
-
},
|
54
|
-
revalidateOnFocus: false,
|
55
|
-
revalidateOnMount: false,
|
56
|
-
},
|
57
|
-
);
|
58
|
-
|
59
|
-
return (
|
60
|
-
<Center gap={16} style={{ maxWidth: 300, width: '100%' }}>
|
61
|
-
<FormAction
|
62
|
-
avatar={<Ollama color={theme.colorPrimary} size={64} />}
|
63
|
-
description={isDownloading ? t('ollama.download.desc') : t('ollama.unlock.description')}
|
64
|
-
title={
|
65
|
-
isDownloading
|
66
|
-
? t('ollama.download.title', { model: modelToPull })
|
67
|
-
: t('ollama.unlock.title')
|
68
|
-
}
|
69
|
-
>
|
70
|
-
{!isDownloading && (
|
71
|
-
<Input
|
72
|
-
onChange={(e) => {
|
73
|
-
setModelToPull(e.target.value);
|
74
|
-
}}
|
75
|
-
value={modelToPull}
|
76
|
-
/>
|
77
|
-
)}
|
78
|
-
</FormAction>
|
79
|
-
{isDownloading && (
|
80
|
-
<Flexbox flex={1} gap={8} width={'100%'}>
|
81
|
-
<Progress
|
82
|
-
percent={percent}
|
83
|
-
showInfo
|
84
|
-
strokeColor={theme.colorSuccess}
|
85
|
-
trailColor={theme.colorSuccessBg}
|
86
|
-
/>
|
87
|
-
<Flexbox
|
88
|
-
distribution={'space-between'}
|
89
|
-
horizontal
|
90
|
-
style={{ color: theme.colorTextDescription, fontSize: 12 }}
|
91
|
-
>
|
92
|
-
<span>
|
93
|
-
{t('ollama.download.remainingTime')}: {remainingTime}
|
94
|
-
</span>
|
95
|
-
<span>
|
96
|
-
{t('ollama.download.speed')}: {downloadSpeed}
|
97
|
-
</span>
|
98
|
-
</Flexbox>
|
99
|
-
</Flexbox>
|
100
|
-
)}
|
101
|
-
<Flexbox gap={12} width={'100%'}>
|
102
|
-
<Button
|
103
|
-
block
|
104
|
-
loading={isDownloading}
|
105
|
-
onClick={() => {
|
106
|
-
mutate();
|
107
|
-
}}
|
108
|
-
style={{ marginTop: 8 }}
|
109
|
-
type={'primary'}
|
110
|
-
>
|
111
|
-
{!isDownloading
|
112
|
-
? t('ollama.unlock.confirm')
|
113
|
-
: // if total is 0, show starting, else show downloaded
|
114
|
-
!total
|
115
|
-
? t('ollama.unlock.starting')
|
116
|
-
: t('ollama.unlock.downloaded', {
|
117
|
-
completed: formatSize(completed, 2),
|
118
|
-
total: formatSize(total, 2),
|
119
|
-
})}
|
120
|
-
</Button>
|
121
|
-
{isDownloading ? (
|
122
|
-
<Button
|
123
|
-
onClick={() => {
|
124
|
-
ollamaService.abort();
|
125
|
-
}}
|
126
|
-
>
|
127
|
-
{t('ollama.unlock.cancel')}
|
128
|
-
</Button>
|
129
|
-
) : (
|
130
|
-
<Button
|
131
|
-
onClick={() => {
|
132
|
-
deleteMessage(id);
|
133
|
-
}}
|
134
|
-
>
|
135
|
-
{t('unlock.closeMessage', { ns: 'error' })}
|
136
|
-
</Button>
|
137
|
-
)}
|
138
|
-
</Flexbox>
|
139
|
-
</Center>
|
140
|
-
);
|
141
|
-
});
|
142
|
-
|
143
|
-
interface InvalidOllamaModelProps {
|
144
|
-
id: string;
|
145
|
-
model: string;
|
146
|
-
}
|
147
|
-
|
148
|
-
const InvalidOllamaModel = memo<InvalidOllamaModelProps>(({ id, model }) => (
|
149
|
-
<ErrorActionContainer>
|
150
|
-
<OllamaModelForm id={id} model={model} />
|
151
|
-
</ErrorActionContainer>
|
152
|
-
));
|
153
|
-
|
154
|
-
export default InvalidOllamaModel;
|
package/src/features/Conversation/Error/OllamaBizError/InvalidOllamaModel/useDownloadMonitor.ts
DELETED
@@ -1,29 +0,0 @@
|
|
1
|
-
import { useEffect, useRef, useState } from 'react';
|
2
|
-
|
3
|
-
import { formatSpeed, formatTime } from '@/utils/format';
|
4
|
-
|
5
|
-
export const useDownloadMonitor = (totalSize: number, completedSize: number) => {
|
6
|
-
const [downloadSpeed, setDownloadSpeed] = useState<string>('0 KB/s');
|
7
|
-
const [remainingTime, setRemainingTime] = useState<string>('-');
|
8
|
-
|
9
|
-
const lastCompletedRef = useRef(completedSize);
|
10
|
-
const lastTimedRef = useRef(Date.now());
|
11
|
-
|
12
|
-
useEffect(() => {
|
13
|
-
const currentTime = Date.now();
|
14
|
-
const elapsedTime = (currentTime - lastTimedRef.current) / 1000; // in seconds
|
15
|
-
if (completedSize > 0 && elapsedTime > 1) {
|
16
|
-
const speed = Math.max(0, (completedSize - lastCompletedRef.current) / elapsedTime); // in bytes per second
|
17
|
-
setDownloadSpeed(formatSpeed(speed));
|
18
|
-
|
19
|
-
const remainingSize = totalSize - completedSize;
|
20
|
-
const time = remainingSize / speed; // in seconds
|
21
|
-
setRemainingTime(formatTime(time));
|
22
|
-
|
23
|
-
lastCompletedRef.current = completedSize;
|
24
|
-
lastTimedRef.current = currentTime;
|
25
|
-
}
|
26
|
-
}, [completedSize]);
|
27
|
-
|
28
|
-
return { downloadSpeed, remainingTime };
|
29
|
-
};
|
@@ -1,28 +0,0 @@
|
|
1
|
-
import { Mock, describe, expect, it, vi } from 'vitest';
|
2
|
-
|
3
|
-
import { OllamaService } from '../ollama';
|
4
|
-
|
5
|
-
vi.stubGlobal('fetch', vi.fn());
|
6
|
-
|
7
|
-
const ollamaService = new OllamaService({ fetch });
|
8
|
-
|
9
|
-
describe('OllamaService', () => {
|
10
|
-
describe('list models', async () => {
|
11
|
-
it('should make a GET request with the correct payload', async () => {
|
12
|
-
(fetch as Mock).mockResolvedValueOnce(new Response(JSON.stringify({ models: [] })));
|
13
|
-
|
14
|
-
expect(await ollamaService.getModels()).toEqual({ models: [] });
|
15
|
-
|
16
|
-
expect(fetch).toHaveBeenCalled();
|
17
|
-
});
|
18
|
-
|
19
|
-
it('should make a GET request with the error', async () => {
|
20
|
-
const mockResponse = new Response(null, { status: 503 });
|
21
|
-
(fetch as Mock).mockResolvedValueOnce(mockResponse);
|
22
|
-
|
23
|
-
await expect(ollamaService.getModels()).rejects.toThrow();
|
24
|
-
|
25
|
-
expect(fetch).toHaveBeenCalled();
|
26
|
-
});
|
27
|
-
});
|
28
|
-
});
|
package/src/services/ollama.ts
DELETED
@@ -1,83 +0,0 @@
|
|
1
|
-
import { ListResponse, Ollama as OllamaBrowser, ProgressResponse } from 'ollama/browser';
|
2
|
-
|
3
|
-
import { ModelProvider } from '@/libs/agent-runtime';
|
4
|
-
import { useUserStore } from '@/store/user';
|
5
|
-
import { keyVaultsConfigSelectors } from '@/store/user/selectors';
|
6
|
-
import { ChatErrorType } from '@/types/fetch';
|
7
|
-
import { createErrorResponse } from '@/utils/errorResponse';
|
8
|
-
import { getMessageError } from '@/utils/fetch';
|
9
|
-
|
10
|
-
const DEFAULT_BASE_URL = 'http://127.0.0.1:11434';
|
11
|
-
|
12
|
-
interface OllamaServiceParams {
|
13
|
-
fetch?: typeof fetch;
|
14
|
-
}
|
15
|
-
|
16
|
-
export class OllamaService {
|
17
|
-
private _host: string;
|
18
|
-
private _client: OllamaBrowser;
|
19
|
-
private _fetch?: typeof fetch;
|
20
|
-
|
21
|
-
constructor(params: OllamaServiceParams = {}) {
|
22
|
-
this._host = this.getHost();
|
23
|
-
this._fetch = params.fetch;
|
24
|
-
this._client = new OllamaBrowser({ fetch: params?.fetch, host: this._host });
|
25
|
-
}
|
26
|
-
|
27
|
-
getHost = (): string => {
|
28
|
-
const config = keyVaultsConfigSelectors.ollamaConfig(useUserStore.getState());
|
29
|
-
|
30
|
-
return config.baseURL || DEFAULT_BASE_URL;
|
31
|
-
};
|
32
|
-
|
33
|
-
getOllamaClient = () => {
|
34
|
-
if (this.getHost() !== this._host) {
|
35
|
-
this._host = this.getHost();
|
36
|
-
this._client = new OllamaBrowser({ fetch: this._fetch, host: this.getHost() });
|
37
|
-
}
|
38
|
-
return this._client;
|
39
|
-
};
|
40
|
-
|
41
|
-
abort = () => {
|
42
|
-
this._client.abort();
|
43
|
-
};
|
44
|
-
|
45
|
-
pullModel = async (model: string): Promise<AsyncIterable<ProgressResponse>> => {
|
46
|
-
let response: Response | AsyncIterable<ProgressResponse>;
|
47
|
-
try {
|
48
|
-
response = await this.getOllamaClient().pull({ insecure: true, model, stream: true });
|
49
|
-
return response;
|
50
|
-
} catch {
|
51
|
-
response = createErrorResponse(ChatErrorType.OllamaServiceUnavailable, {
|
52
|
-
host: this.getHost(),
|
53
|
-
message: 'please check whether your ollama service is available or set the CORS rules',
|
54
|
-
provider: ModelProvider.Ollama,
|
55
|
-
});
|
56
|
-
}
|
57
|
-
|
58
|
-
if (!response.ok) {
|
59
|
-
throw await getMessageError(response);
|
60
|
-
}
|
61
|
-
return response.json();
|
62
|
-
};
|
63
|
-
|
64
|
-
getModels = async (): Promise<ListResponse> => {
|
65
|
-
let response: Response | ListResponse;
|
66
|
-
try {
|
67
|
-
return await this.getOllamaClient().list();
|
68
|
-
} catch {
|
69
|
-
response = createErrorResponse(ChatErrorType.OllamaServiceUnavailable, {
|
70
|
-
host: this.getHost(),
|
71
|
-
message: 'please check whether your ollama service is available or set the CORS rules',
|
72
|
-
provider: ModelProvider.Ollama,
|
73
|
-
});
|
74
|
-
}
|
75
|
-
|
76
|
-
if (!response.ok) {
|
77
|
-
throw await getMessageError(response);
|
78
|
-
}
|
79
|
-
return response.json();
|
80
|
-
};
|
81
|
-
}
|
82
|
-
|
83
|
-
export const ollamaService = new OllamaService();
|