@lobehub/chat 1.67.2 → 1.68.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/.env.example +4 -0
  2. package/CHANGELOG.md +58 -0
  3. package/Dockerfile +2 -0
  4. package/Dockerfile.database +2 -0
  5. package/README.md +6 -5
  6. package/README.zh-CN.md +4 -3
  7. package/changelog/v1.json +21 -0
  8. package/docs/self-hosting/advanced/auth.mdx +6 -5
  9. package/docs/self-hosting/advanced/auth.zh-CN.mdx +6 -5
  10. package/docs/self-hosting/environment-variables/model-provider.mdx +16 -0
  11. package/docs/self-hosting/environment-variables/model-provider.zh-CN.mdx +16 -0
  12. package/docs/usage/providers/ppio.mdx +57 -0
  13. package/docs/usage/providers/ppio.zh-CN.mdx +55 -0
  14. package/locales/en-US/providers.json +3 -0
  15. package/locales/zh-CN/providers.json +4 -0
  16. package/package.json +5 -5
  17. package/packages/web-crawler/README.md +47 -20
  18. package/packages/web-crawler/README.zh-CN.md +61 -0
  19. package/packages/web-crawler/src/__test__/crawler.test.ts +179 -0
  20. package/packages/web-crawler/src/crawler.ts +8 -3
  21. package/packages/web-crawler/src/type.ts +2 -3
  22. package/packages/web-crawler/src/urlRules.ts +5 -0
  23. package/packages/web-crawler/src/utils/appUrlRules.test.ts +76 -0
  24. package/src/app/[variants]/(main)/settings/llm/ProviderList/providers.tsx +2 -0
  25. package/src/config/aiModels/index.ts +3 -0
  26. package/src/config/aiModels/ppio.ts +276 -0
  27. package/src/config/llm.ts +6 -0
  28. package/src/config/modelProviders/index.ts +4 -0
  29. package/src/config/modelProviders/ppio.ts +249 -0
  30. package/src/libs/agent-runtime/AgentRuntime.ts +7 -0
  31. package/src/libs/agent-runtime/ppio/__snapshots__/index.test.ts.snap +26 -0
  32. package/src/libs/agent-runtime/ppio/fixtures/models.json +42 -0
  33. package/src/libs/agent-runtime/ppio/index.test.ts +264 -0
  34. package/src/libs/agent-runtime/ppio/index.ts +51 -0
  35. package/src/libs/agent-runtime/ppio/type.ts +12 -0
  36. package/src/libs/agent-runtime/types/type.ts +1 -0
  37. package/src/libs/agent-runtime/utils/anthropicHelpers.ts +2 -2
  38. package/src/server/routers/tools/__test__/search.test.ts +146 -0
  39. package/src/store/chat/slices/builtinTool/actions/searXNG.test.ts +67 -0
  40. package/src/store/tool/slices/builtin/selectors.test.ts +12 -0
  41. package/src/store/tool/slices/builtin/selectors.ts +4 -1
  42. package/src/tools/web-browsing/Portal/PageContent/index.tsx +37 -2
  43. package/src/tools/web-browsing/Render/PageContent/Result.tsx +36 -3
  44. package/src/tools/web-browsing/Render/PageContent/index.tsx +11 -1
  45. package/src/types/tool/crawler.ts +2 -2
  46. package/src/types/user/settings/keyVaults.ts +1 -0
@@ -0,0 +1,51 @@
1
+ import { ModelProvider } from '../types';
2
+ import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';
3
+ import { PPIOModelCard } from './type';
4
+
5
+ import type { ChatModelCard } from '@/types/llm';
6
+
7
+ export const LobePPIOAI = LobeOpenAICompatibleFactory({
8
+ baseURL: 'https://api.ppinfra.com/v3/openai',
9
+ constructorOptions: {
10
+ defaultHeaders: {
11
+ 'X-API-Source': 'lobechat',
12
+ },
13
+ },
14
+ debug: {
15
+ chatCompletion: () => process.env.DEBUG_PPIO_CHAT_COMPLETION === '1',
16
+ },
17
+ models: async ({ client }) => {
18
+ const { LOBE_DEFAULT_MODEL_LIST } = await import('@/config/aiModels');
19
+
20
+ const reasoningKeywords = [
21
+ 'deepseek-r1',
22
+ ];
23
+
24
+ const modelsPage = await client.models.list() as any;
25
+ const modelList: PPIOModelCard[] = modelsPage.data;
26
+
27
+ return modelList
28
+ .map((model) => {
29
+ const knownModel = LOBE_DEFAULT_MODEL_LIST.find((m) => model.id.toLowerCase() === m.id.toLowerCase());
30
+
31
+ return {
32
+ contextWindowTokens: model.context_size,
33
+ description: model.description,
34
+ displayName: model.display_name?.replace("(", " (").replace(")", ")").replace("\t", "") || model.title || model.id,
35
+ enabled: knownModel?.enabled || false,
36
+ functionCall: knownModel?.abilities?.functionCall || false,
37
+ id: model.id,
38
+ reasoning:
39
+ reasoningKeywords.some(keyword => model.id.toLowerCase().includes(keyword))
40
+ || knownModel?.abilities?.reasoning
41
+ || false,
42
+ vision:
43
+ model.description.toLowerCase().includes('视觉')
44
+ || knownModel?.abilities?.vision
45
+ || false,
46
+ };
47
+ })
48
+ .filter(Boolean) as ChatModelCard[];
49
+ },
50
+ provider: ModelProvider.PPIO,
51
+ });
@@ -0,0 +1,12 @@
1
+ export interface PPIOModelCard {
2
+ context_size: number;
3
+ created: number;
4
+ description: string;
5
+ display_name: string;
6
+ id: string;
7
+ input_token_price_per_m: number;
8
+ output_token_price_per_m: number;
9
+ status: number;
10
+ tags: string[];
11
+ title: string;
12
+ }
@@ -54,6 +54,7 @@ export enum ModelProvider {
54
54
  Ollama = 'ollama',
55
55
  OpenAI = 'openai',
56
56
  OpenRouter = 'openrouter',
57
+ PPIO = 'ppio',
57
58
  Perplexity = 'perplexity',
58
59
  Qwen = 'qwen',
59
60
  SambaNova = 'sambanova',
@@ -28,7 +28,7 @@ export const buildAnthropicBlock = async (
28
28
  return {
29
29
  source: {
30
30
  data: base64 as string,
31
- media_type: mimeType as Anthropic.ImageBlockParam.Source['media_type'],
31
+ media_type: mimeType as Anthropic.Base64ImageSource['media_type'],
32
32
  type: 'base64',
33
33
  },
34
34
  type: 'image',
@@ -39,7 +39,7 @@ export const buildAnthropicBlock = async (
39
39
  return {
40
40
  source: {
41
41
  data: base64 as string,
42
- media_type: mimeType as Anthropic.ImageBlockParam.Source['media_type'],
42
+ media_type: mimeType as Anthropic.Base64ImageSource['media_type'],
43
43
  type: 'base64',
44
44
  },
45
45
  type: 'image',
@@ -0,0 +1,146 @@
1
+ // @vitest-environment node
2
+ import { TRPCError } from '@trpc/server';
3
+ import { beforeEach, describe, expect, it, vi } from 'vitest';
4
+
5
+ import { toolsEnv } from '@/config/tools';
6
+ import { SearXNGClient } from '@/server/modules/SearXNG';
7
+ import { SEARCH_SEARXNG_NOT_CONFIG } from '@/types/tool/search';
8
+
9
+ import { searchRouter } from '../search';
10
+
11
+ // Mock JWT verification
12
+ vi.mock('@/utils/server/jwt', () => ({
13
+ getJWTPayload: vi.fn().mockResolvedValue({ userId: '1' }),
14
+ }));
15
+
16
+ vi.mock('@lobechat/web-crawler', () => ({
17
+ Crawler: vi.fn().mockImplementation(() => ({
18
+ crawl: vi.fn().mockResolvedValue({ content: 'test content' }),
19
+ })),
20
+ }));
21
+
22
+ vi.mock('@/server/modules/SearXNG');
23
+
24
+ describe('searchRouter', () => {
25
+ const mockContext = {
26
+ req: {
27
+ headers: {
28
+ authorization: 'Bearer mock-token',
29
+ },
30
+ },
31
+ authorizationHeader: 'Bearer mock-token',
32
+ jwtPayload: { userId: '1' },
33
+ };
34
+
35
+ beforeEach(() => {
36
+ vi.clearAllMocks();
37
+ // @ts-ignore
38
+ toolsEnv.SEARXNG_URL = 'http://test-searxng.com';
39
+ });
40
+
41
+ describe('crawlPages', () => {
42
+ it('should crawl multiple pages successfully', async () => {
43
+ const caller = searchRouter.createCaller(mockContext as any);
44
+
45
+ const result = await caller.crawlPages({
46
+ urls: ['http://test1.com', 'http://test2.com'],
47
+ impls: ['naive'],
48
+ });
49
+
50
+ expect(result.results).toHaveLength(2);
51
+ expect(result.results[0]).toEqual({ content: 'test content' });
52
+ expect(result.results[1]).toEqual({ content: 'test content' });
53
+ });
54
+
55
+ it('should work without specifying impls', async () => {
56
+ const caller = searchRouter.createCaller(mockContext as any);
57
+
58
+ const result = await caller.crawlPages({
59
+ urls: ['http://test.com'],
60
+ });
61
+
62
+ expect(result.results).toHaveLength(1);
63
+ expect(result.results[0]).toEqual({ content: 'test content' });
64
+ });
65
+ });
66
+
67
+ describe('query', () => {
68
+ it('should throw error if SEARXNG_URL is not configured', async () => {
69
+ // @ts-ignore
70
+ toolsEnv.SEARXNG_URL = undefined;
71
+
72
+ const caller = searchRouter.createCaller(mockContext as any);
73
+
74
+ await expect(
75
+ caller.query({
76
+ query: 'test query',
77
+ }),
78
+ ).rejects.toThrow(
79
+ new TRPCError({ code: 'NOT_IMPLEMENTED', message: SEARCH_SEARXNG_NOT_CONFIG }),
80
+ );
81
+ });
82
+
83
+ it('should return search results successfully', async () => {
84
+ const mockSearchResult = {
85
+ results: [
86
+ {
87
+ title: 'Test Result',
88
+ url: 'http://test.com',
89
+ content: 'Test content',
90
+ },
91
+ ],
92
+ };
93
+
94
+ (SearXNGClient as any).mockImplementation(() => ({
95
+ search: vi.fn().mockResolvedValue(mockSearchResult),
96
+ }));
97
+
98
+ const caller = searchRouter.createCaller(mockContext as any);
99
+
100
+ const result = await caller.query({
101
+ query: 'test query',
102
+ searchEngine: ['google'],
103
+ });
104
+
105
+ expect(result).toEqual(mockSearchResult);
106
+ });
107
+
108
+ it('should work without specifying search engines', async () => {
109
+ const mockSearchResult = {
110
+ results: [
111
+ {
112
+ title: 'Test Result',
113
+ url: 'http://test.com',
114
+ content: 'Test content',
115
+ },
116
+ ],
117
+ };
118
+
119
+ (SearXNGClient as any).mockImplementation(() => ({
120
+ search: vi.fn().mockResolvedValue(mockSearchResult),
121
+ }));
122
+
123
+ const caller = searchRouter.createCaller(mockContext as any);
124
+
125
+ const result = await caller.query({
126
+ query: 'test query',
127
+ });
128
+
129
+ expect(result).toEqual(mockSearchResult);
130
+ });
131
+
132
+ it('should handle search errors', async () => {
133
+ (SearXNGClient as any).mockImplementation(() => ({
134
+ search: vi.fn().mockRejectedValue(new Error('Search failed')),
135
+ }));
136
+
137
+ const caller = searchRouter.createCaller(mockContext as any);
138
+
139
+ await expect(
140
+ caller.query({
141
+ query: 'test query',
142
+ }),
143
+ ).rejects.toThrow(new TRPCError({ code: 'SERVICE_UNAVAILABLE', message: 'Search failed' }));
144
+ });
145
+ });
146
+ });
@@ -4,6 +4,7 @@ import { Mock, beforeEach, describe, expect, it, vi } from 'vitest';
4
4
  import { searchService } from '@/services/search';
5
5
  import { useChatStore } from '@/store/chat';
6
6
  import { chatSelectors } from '@/store/chat/selectors';
7
+ import { CRAWL_CONTENT_LIMITED_COUNT } from '@/tools/web-browsing/const';
7
8
  import { ChatMessage } from '@/types/message';
8
9
  import { SearchContent, SearchQuery, SearchResponse } from '@/types/tool/search';
9
10
 
@@ -11,6 +12,7 @@ import { SearchContent, SearchQuery, SearchResponse } from '@/types/tool/search'
11
12
  vi.mock('@/services/search', () => ({
12
13
  searchService: {
13
14
  search: vi.fn(),
15
+ crawlPages: vi.fn(),
14
16
  },
15
17
  }));
16
18
 
@@ -181,6 +183,71 @@ describe('searXNG actions', () => {
181
183
  });
182
184
  });
183
185
 
186
+ describe('crawlMultiPages', () => {
187
+ it('should truncate content that exceeds limit', async () => {
188
+ const longContent = 'a'.repeat(CRAWL_CONTENT_LIMITED_COUNT + 1000);
189
+ const mockResponse = {
190
+ results: [
191
+ {
192
+ data: {
193
+ content: longContent,
194
+ title: 'Test Page',
195
+ },
196
+ crawler: 'naive',
197
+ originalUrl: 'https://test.com',
198
+ },
199
+ ],
200
+ };
201
+
202
+ (searchService.crawlPages as Mock).mockResolvedValue(mockResponse);
203
+
204
+ const { result } = renderHook(() => useChatStore());
205
+ const messageId = 'test-message-id';
206
+
207
+ await act(async () => {
208
+ await result.current.crawlMultiPages(messageId, { urls: ['https://test.com'] });
209
+ });
210
+
211
+ const expectedContent = [
212
+ {
213
+ content: longContent.slice(0, CRAWL_CONTENT_LIMITED_COUNT),
214
+ title: 'Test Page',
215
+ },
216
+ ];
217
+
218
+ expect(result.current.internal_updateMessageContent).toHaveBeenCalledWith(
219
+ messageId,
220
+ JSON.stringify(expectedContent),
221
+ );
222
+ });
223
+
224
+ it('should handle crawl errors', async () => {
225
+ const mockResponse = {
226
+ results: [
227
+ {
228
+ errorMessage: 'Failed to crawl',
229
+ errorType: 'CRAWL_ERROR',
230
+ originalUrl: 'https://test.com',
231
+ },
232
+ ],
233
+ };
234
+
235
+ (searchService.crawlPages as Mock).mockResolvedValue(mockResponse);
236
+
237
+ const { result } = renderHook(() => useChatStore());
238
+ const messageId = 'test-message-id';
239
+
240
+ await act(async () => {
241
+ await result.current.crawlMultiPages(messageId, { urls: ['https://test.com'] });
242
+ });
243
+
244
+ expect(result.current.internal_updateMessageContent).toHaveBeenCalledWith(
245
+ messageId,
246
+ JSON.stringify(mockResponse.results),
247
+ );
248
+ });
249
+ });
250
+
184
251
  describe('reSearchWithSearXNG', () => {
185
252
  it('should update arguments and perform search', async () => {
186
253
  const { result } = renderHook(() => useChatStore());
@@ -41,6 +41,18 @@ describe('builtinToolSelectors', () => {
41
41
  ]);
42
42
  });
43
43
 
44
+ it('should hide tool when not need visible with hidden', () => {
45
+ const state = {
46
+ ...initialState,
47
+ builtinTools: [
48
+ { identifier: 'tool-1', hidden: true, manifest: { meta: { title: 'Tool 1' } } },
49
+ { identifier: DalleManifest.identifier, manifest: { meta: { title: 'Dalle' } } },
50
+ ],
51
+ } as ToolStoreState;
52
+ const result = builtinToolSelectors.metaList(false)(state);
53
+ expect(result).toEqual([]);
54
+ });
55
+
44
56
  it('should return an empty list if no builtin tools are available', () => {
45
57
  const state: ToolStoreState = {
46
58
  ...initialState,
@@ -7,7 +7,10 @@ const metaList =
7
7
  (showDalle?: boolean) =>
8
8
  (s: ToolStoreState): LobeToolMeta[] =>
9
9
  s.builtinTools
10
- .filter((item) => (!showDalle ? item.identifier !== DalleManifest.identifier : !item.hidden))
10
+ .filter(
11
+ (item) =>
12
+ !item.hidden && (!showDalle ? item.identifier !== DalleManifest.identifier : true),
13
+ )
11
14
  .map((t) => ({
12
15
  author: 'LobeHub',
13
16
  identifier: t.identifier,
@@ -1,4 +1,4 @@
1
- import { Alert, CopyButton, Icon, Markdown } from '@lobehub/ui';
1
+ import { Alert, CopyButton, Highlighter, Icon, Markdown } from '@lobehub/ui';
2
2
  import { Descriptions, Segmented, Typography } from 'antd';
3
3
  import { createStyles } from 'antd-style';
4
4
  import { ExternalLink } from 'lucide-react';
@@ -90,7 +90,42 @@ const PageContent = memo<PageContentProps>(({ result }) => {
90
90
  const { styles } = useStyles();
91
91
  const [display, setDisplay] = useState('render');
92
92
 
93
- if (!result) return undefined;
93
+ if (!result || !result.data) return undefined;
94
+
95
+ if ('errorType' in result.data) {
96
+ return (
97
+ <Flexbox className={styles.footer} gap={4}>
98
+ <div>
99
+ <Descriptions
100
+ classNames={{
101
+ content: styles.footerText,
102
+ }}
103
+ column={1}
104
+ items={[
105
+ {
106
+ children: result.crawler,
107
+ label: t('search.crawPages.meta.crawler'),
108
+ },
109
+ ]}
110
+ size="small"
111
+ />
112
+ </div>
113
+ <Alert
114
+ extra={
115
+ <div style={{ maxWidth: 500, overflowX: 'scroll' }}>
116
+ <Highlighter language={'json'}>{JSON.stringify(result.data, null, 2)}</Highlighter>
117
+ </div>
118
+ }
119
+ message={
120
+ <div style={{ textAlign: 'start' }}>
121
+ {result.data.errorMessage || result.data.content}
122
+ </div>
123
+ }
124
+ type={'error'}
125
+ />
126
+ </Flexbox>
127
+ );
128
+ }
94
129
 
95
130
  const { url, title, description, content } = result.data;
96
131
  return (
@@ -1,7 +1,7 @@
1
1
  'use client';
2
2
 
3
- import { CrawlSuccessResult } from '@lobechat/web-crawler';
4
- import { Icon } from '@lobehub/ui';
3
+ import { CrawlErrorResult, CrawlSuccessResult } from '@lobechat/web-crawler';
4
+ import { Alert, Highlighter, Icon } from '@lobehub/ui';
5
5
  import { Descriptions, Typography } from 'antd';
6
6
  import { createStyles } from 'antd-style';
7
7
  import { ExternalLink } from 'lucide-react';
@@ -82,7 +82,7 @@ interface CrawlerData {
82
82
  crawler: string;
83
83
  messageId: string;
84
84
  originalUrl: string;
85
- result: CrawlSuccessResult;
85
+ result: CrawlSuccessResult | CrawlErrorResult;
86
86
  }
87
87
 
88
88
  const CrawlerResultCard = memo<CrawlerData>(({ result, messageId, crawler, originalUrl }) => {
@@ -90,6 +90,39 @@ const CrawlerResultCard = memo<CrawlerData>(({ result, messageId, crawler, origi
90
90
  const { styles } = useStyles();
91
91
  const [openToolUI, togglePageContent] = useChatStore((s) => [s.openToolUI, s.togglePageContent]);
92
92
 
93
+ if ('errorType' in result) {
94
+ return (
95
+ <Flexbox className={styles.footer} gap={4}>
96
+ <div>
97
+ <Descriptions
98
+ classNames={{
99
+ content: styles.footerText,
100
+ }}
101
+ column={1}
102
+ items={[
103
+ {
104
+ children: crawler,
105
+ label: t('search.crawPages.meta.crawler'),
106
+ },
107
+ ]}
108
+ size="small"
109
+ />
110
+ </div>
111
+ <Alert
112
+ extra={
113
+ <div style={{ maxWidth: 500, overflowX: 'scroll' }}>
114
+ <Highlighter language={'json'}>{JSON.stringify(result, null, 2)}</Highlighter>
115
+ </div>
116
+ }
117
+ message={
118
+ <div style={{ textAlign: 'start' }}>{result.errorMessage || result.content}</div>
119
+ }
120
+ type={'error'}
121
+ />
122
+ </Flexbox>
123
+ );
124
+ }
125
+
93
126
  const { url, title, description } = result;
94
127
 
95
128
  return (
@@ -1,3 +1,4 @@
1
+ import { CrawlErrorResult } from '@lobechat/web-crawler';
1
2
  import { memo } from 'react';
2
3
  import { Flexbox } from 'react-layout-kit';
3
4
 
@@ -31,7 +32,16 @@ const PagesContent = memo<PagesContentProps>(({ results, messageId, urls }) => {
31
32
  key={result.originalUrl}
32
33
  messageId={messageId}
33
34
  originalUrl={result.originalUrl}
34
- result={result.data}
35
+ result={
36
+ result.data ||
37
+ // TODO: Remove this in v2 as it's deprecated
38
+ ({
39
+ content: (result as any)?.content,
40
+ errorMessage: (result as any)?.errorMessage,
41
+ errorType: (result as any)?.errorType,
42
+ url: result.originalUrl,
43
+ } as CrawlErrorResult)
44
+ }
35
45
  />
36
46
  ))}
37
47
  </Flexbox>
@@ -1,4 +1,4 @@
1
- import { CrawlSuccessResult } from '@lobechat/web-crawler';
1
+ import { CrawlErrorResult, CrawlSuccessResult } from '@lobechat/web-crawler';
2
2
 
3
3
  export interface CrawlSinglePageQuery {
4
4
  url: string;
@@ -10,7 +10,7 @@ export interface CrawlMultiPagesQuery {
10
10
 
11
11
  export interface CrawlResult {
12
12
  crawler: string;
13
- data: CrawlSuccessResult;
13
+ data: CrawlSuccessResult | CrawlErrorResult;
14
14
  originalUrl: string;
15
15
  }
16
16
 
@@ -65,6 +65,7 @@ export interface UserKeyVaults extends SearchEngineKeyVaults {
65
65
  openrouter?: OpenAICompatibleKeyVault;
66
66
  password?: string;
67
67
  perplexity?: OpenAICompatibleKeyVault;
68
+ ppio?: OpenAICompatibleKeyVault;
68
69
  qwen?: OpenAICompatibleKeyVault;
69
70
  sambanova?: OpenAICompatibleKeyVault;
70
71
  sensenova?: OpenAICompatibleKeyVault;