@lobehub/chat 1.77.10 → 1.77.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/changelog/v1.json +9 -0
- package/package.json +1 -1
- package/packages/web-crawler/src/crawler.ts +3 -1
- package/src/app/(backend)/webapi/assistant/store/route.ts +7 -1
- package/src/app/(backend)/webapi/plugin/store/route.ts +6 -1
- package/src/app/[variants]/(main)/chat/(workspace)/@conversation/features/ChatList/WelcomeChatItem/InboxWelcome/AgentsSuggest.tsx +3 -0
- package/src/features/Conversation/Error/index.tsx +10 -0
- package/src/libs/agent-runtime/error.ts +1 -0
- package/src/libs/agent-runtime/ollama/index.ts +16 -12
- package/src/server/modules/AssistantStore/index.ts +8 -0
- package/src/utils/errorResponse.ts +2 -0
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,31 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.77.11](https://github.com/lobehub/lobe-chat/compare/v1.77.10...v1.77.11)
|
6
|
+
|
7
|
+
<sup>Released on **2025-04-04**</sup>
|
8
|
+
|
9
|
+
#### 🐛 Bug Fixes
|
10
|
+
|
11
|
+
- **misc**: Fix fetch issue in offline mode and make jina crawler first.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### What's fixed
|
19
|
+
|
20
|
+
- **misc**: Fix fetch issue in offline mode and make jina crawler first, closes [#7288](https://github.com/lobehub/lobe-chat/issues/7288) ([6bed7a3](https://github.com/lobehub/lobe-chat/commit/6bed7a3))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
5
30
|
### [Version 1.77.10](https://github.com/lobehub/lobe-chat/compare/v1.77.9...v1.77.10)
|
6
31
|
|
7
32
|
<sup>Released on **2025-04-03**</sup>
|
package/changelog/v1.json
CHANGED
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.77.
|
3
|
+
"version": "1.77.11",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -3,6 +3,8 @@ import { CrawlUrlRule } from './type';
|
|
3
3
|
import { crawUrlRules } from './urlRules';
|
4
4
|
import { applyUrlRules } from './utils/appUrlRules';
|
5
5
|
|
6
|
+
const defaultImpls = ['jina', 'naive', 'search1api', 'browserless'] as CrawlImplType[];
|
7
|
+
|
6
8
|
interface CrawlOptions {
|
7
9
|
impls?: string[];
|
8
10
|
}
|
@@ -13,7 +15,7 @@ export class Crawler {
|
|
13
15
|
constructor(options: CrawlOptions = {}) {
|
14
16
|
this.impls = !!options.impls?.length
|
15
17
|
? (options.impls.filter((impl) => Object.keys(crawlImpls).includes(impl)) as CrawlImplType[])
|
16
|
-
:
|
18
|
+
: defaultImpls;
|
17
19
|
}
|
18
20
|
|
19
21
|
/**
|
@@ -13,7 +13,13 @@ export const GET = async (req: Request) => {
|
|
13
13
|
const data = await market.getAgentIndex(locale as any);
|
14
14
|
|
15
15
|
return NextResponse.json(data);
|
16
|
-
} catch {
|
16
|
+
} catch (e) {
|
17
|
+
// it means failed to fetch
|
18
|
+
if ((e as Error).message.includes('fetch failed')) {
|
19
|
+
return NextResponse.json([]);
|
20
|
+
}
|
21
|
+
|
22
|
+
console.error(e);
|
17
23
|
return new Response(`failed to fetch agent market index`, {
|
18
24
|
headers: {
|
19
25
|
'Access-Control-Allow-Origin': '*',
|
@@ -26,8 +26,13 @@ export const GET = async (req: Request) => {
|
|
26
26
|
|
27
27
|
return res;
|
28
28
|
} catch (e) {
|
29
|
+
// it means failed to fetch
|
30
|
+
if ((e as Error).message.includes('fetch failed')) {
|
31
|
+
return NextResponse.json([]);
|
32
|
+
}
|
33
|
+
|
29
34
|
console.error(e);
|
30
|
-
return new Response(`failed to fetch
|
35
|
+
return new Response(`failed to fetch plugin market index`, {
|
31
36
|
headers: {
|
32
37
|
'Access-Control-Allow-Origin': '*',
|
33
38
|
'Content-Type': 'application/json',
|
@@ -88,6 +88,9 @@ const AgentsSuggest = memo<{ mobile?: boolean }>(({ mobile }) => {
|
|
88
88
|
setSliceStart(Math.floor((Math.random() * assistantList.length) / 2));
|
89
89
|
};
|
90
90
|
|
91
|
+
// if no assistant data, just hide the component
|
92
|
+
if (!isLoading && assistantList?.length === 0) return null;
|
93
|
+
|
91
94
|
return (
|
92
95
|
<Flexbox gap={8} width={'100%'}>
|
93
96
|
<Flexbox align={'center'} horizontal justify={'space-between'}>
|
@@ -19,6 +19,10 @@ import { ErrorActionContainer } from './style';
|
|
19
19
|
const loading = () => <Skeleton active />;
|
20
20
|
|
21
21
|
const OllamaBizError = dynamic(() => import('./OllamaBizError'), { loading, ssr: false });
|
22
|
+
const OllamaSetupGuide = dynamic(() => import('./OllamaBizError/SetupGuide'), {
|
23
|
+
loading,
|
24
|
+
ssr: false,
|
25
|
+
});
|
22
26
|
|
23
27
|
// Config for the errorMessage display
|
24
28
|
const getErrorAlertConfig = (
|
@@ -49,6 +53,7 @@ const getErrorAlertConfig = (
|
|
49
53
|
};
|
50
54
|
}
|
51
55
|
|
56
|
+
case AgentRuntimeErrorType.OllamaServiceUnavailable:
|
52
57
|
case AgentRuntimeErrorType.NoOpenAIAPIKey: {
|
53
58
|
return {
|
54
59
|
extraDefaultExpand: true,
|
@@ -85,6 +90,11 @@ const ErrorMessageExtra = memo<{ data: ChatMessage }>(({ data }) => {
|
|
85
90
|
if (!error?.type) return;
|
86
91
|
|
87
92
|
switch (error.type) {
|
93
|
+
// TODO: 优化 Ollama setup 的流程,isDesktop 模式下可以直接做到端到端检测
|
94
|
+
case AgentRuntimeErrorType.OllamaServiceUnavailable: {
|
95
|
+
return <OllamaSetupGuide />;
|
96
|
+
}
|
97
|
+
|
88
98
|
case AgentRuntimeErrorType.OllamaBizError: {
|
89
99
|
return <OllamaBizError {...data} />;
|
90
100
|
}
|
@@ -17,6 +17,7 @@ export const AgentRuntimeErrorType = {
|
|
17
17
|
|
18
18
|
InvalidOllamaArgs: 'InvalidOllamaArgs',
|
19
19
|
OllamaBizError: 'OllamaBizError',
|
20
|
+
OllamaServiceUnavailable: 'OllamaServiceUnavailable',
|
20
21
|
|
21
22
|
InvalidBedrockCredentials: 'InvalidBedrockCredentials',
|
22
23
|
InvalidVertexCredentials: 'InvalidVertexCredentials',
|
@@ -2,6 +2,7 @@ import { Ollama, Tool } from 'ollama/browser';
|
|
2
2
|
import { ClientOptions } from 'openai';
|
3
3
|
|
4
4
|
import { OpenAIChatMessage } from '@/libs/agent-runtime';
|
5
|
+
import { ChatModelCard } from '@/types/llm';
|
5
6
|
|
6
7
|
import { LobeRuntimeAI } from '../BaseAI';
|
7
8
|
import { AgentRuntimeErrorType } from '../error';
|
@@ -19,8 +20,6 @@ import { OllamaStream, convertIterableToStream } from '../utils/streams';
|
|
19
20
|
import { parseDataUri } from '../utils/uriParser';
|
20
21
|
import { OllamaMessage } from './type';
|
21
22
|
|
22
|
-
import { ChatModelCard } from '@/types/llm';
|
23
|
-
|
24
23
|
export interface OllamaModelCard {
|
25
24
|
name: string;
|
26
25
|
}
|
@@ -81,6 +80,15 @@ export class LobeOllamaAI implements LobeRuntimeAI {
|
|
81
80
|
name: string;
|
82
81
|
status_code: number;
|
83
82
|
};
|
83
|
+
if (e.message === 'fetch failed') {
|
84
|
+
throw AgentRuntimeError.chat({
|
85
|
+
error: {
|
86
|
+
message: 'please check whether your ollama service is available',
|
87
|
+
},
|
88
|
+
errorType: AgentRuntimeErrorType.OllamaServiceUnavailable,
|
89
|
+
provider: ModelProvider.Ollama,
|
90
|
+
});
|
91
|
+
}
|
84
92
|
|
85
93
|
throw AgentRuntimeError.chat({
|
86
94
|
error: {
|
@@ -116,22 +124,18 @@ export class LobeOllamaAI implements LobeRuntimeAI {
|
|
116
124
|
|
117
125
|
return modelList
|
118
126
|
.map((model) => {
|
119
|
-
const knownModel = LOBE_DEFAULT_MODEL_LIST.find(
|
127
|
+
const knownModel = LOBE_DEFAULT_MODEL_LIST.find(
|
128
|
+
(m) => model.name.toLowerCase() === m.id.toLowerCase(),
|
129
|
+
);
|
120
130
|
|
121
131
|
return {
|
122
132
|
contextWindowTokens: knownModel?.contextWindowTokens ?? undefined,
|
123
133
|
displayName: knownModel?.displayName ?? undefined,
|
124
134
|
enabled: knownModel?.enabled || false,
|
125
|
-
functionCall:
|
126
|
-
knownModel?.abilities?.functionCall
|
127
|
-
|| false,
|
135
|
+
functionCall: knownModel?.abilities?.functionCall || false,
|
128
136
|
id: model.name,
|
129
|
-
reasoning:
|
130
|
-
|
131
|
-
|| false,
|
132
|
-
vision:
|
133
|
-
knownModel?.abilities?.functionCall
|
134
|
-
|| false,
|
137
|
+
reasoning: knownModel?.abilities?.functionCall || false,
|
138
|
+
vision: knownModel?.abilities?.functionCall || false,
|
135
139
|
};
|
136
140
|
})
|
137
141
|
.filter(Boolean) as ChatModelCard[];
|
@@ -66,6 +66,14 @@ export class AssistantStore {
|
|
66
66
|
|
67
67
|
return data;
|
68
68
|
} catch (e) {
|
69
|
+
// it means failed to fetch
|
70
|
+
if ((e as Error).message.includes('fetch failed')) {
|
71
|
+
return {
|
72
|
+
agents: [],
|
73
|
+
schemaVersion: 1,
|
74
|
+
};
|
75
|
+
}
|
76
|
+
|
69
77
|
console.error('[AgentIndexFetchError] failed to fetch agent index, error detail:');
|
70
78
|
console.error(e);
|
71
79
|
|
@@ -40,6 +40,8 @@ const getStatus = (errorType: ILobeAgentRuntimeErrorType | ErrorType) => {
|
|
40
40
|
return 471;
|
41
41
|
}
|
42
42
|
|
43
|
+
// all local provider connection error
|
44
|
+
case AgentRuntimeErrorType.OllamaServiceUnavailable:
|
43
45
|
case ChatErrorType.OllamaServiceUnavailable:
|
44
46
|
case AgentRuntimeErrorType.OllamaBizError: {
|
45
47
|
return 472;
|