@lobehub/chat 1.80.5 → 1.81.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/changelog/v1.json +9 -0
  3. package/package.json +1 -1
  4. package/packages/electron-client-ipc/src/events/index.ts +6 -2
  5. package/packages/electron-client-ipc/src/events/remoteServer.ts +28 -0
  6. package/packages/electron-client-ipc/src/types/index.ts +1 -0
  7. package/packages/electron-client-ipc/src/types/remoteServer.ts +8 -0
  8. package/packages/electron-server-ipc/package.json +7 -1
  9. package/packages/electron-server-ipc/src/ipcClient.ts +54 -20
  10. package/packages/electron-server-ipc/src/ipcServer.ts +42 -9
  11. package/packages/web-crawler/src/crawImpl/__tests__/search1api.test.ts +33 -39
  12. package/packages/web-crawler/src/crawImpl/search1api.ts +1 -7
  13. package/packages/web-crawler/src/index.ts +1 -0
  14. package/packages/web-crawler/src/urlRules.ts +3 -1
  15. package/src/config/tools.ts +2 -0
  16. package/src/features/Conversation/Messages/Assistant/Tool/Inspector/Debug.tsx +9 -3
  17. package/src/features/Conversation/Messages/Assistant/Tool/Inspector/PluginState.tsx +21 -0
  18. package/src/features/Conversation/Messages/Assistant/Tool/Render/Arguments.tsx +1 -1
  19. package/src/locales/default/plugin.ts +1 -0
  20. package/src/server/routers/tools/{__test__/search.test.ts → search.test.ts} +27 -5
  21. package/src/server/routers/tools/search.ts +3 -44
  22. package/src/server/services/search/impls/index.ts +30 -0
  23. package/src/server/services/search/impls/search1api/index.ts +154 -0
  24. package/src/server/services/search/impls/search1api/type.ts +81 -0
  25. package/src/server/{modules/SearXNG.ts → services/search/impls/searxng/client.ts} +32 -2
  26. package/src/server/{routers/tools/__tests__ → services/search/impls/searxng}/fixtures/searXNG.ts +2 -2
  27. package/src/server/services/search/impls/searxng/index.test.ts +26 -0
  28. package/src/server/services/search/impls/searxng/index.ts +62 -0
  29. package/src/server/services/search/impls/type.ts +11 -0
  30. package/src/server/services/search/index.ts +59 -0
  31. package/src/store/chat/slices/builtinTool/actions/index.ts +1 -1
  32. package/src/store/chat/slices/builtinTool/actions/{searXNG.test.ts → search.test.ts} +30 -55
  33. package/src/store/chat/slices/builtinTool/actions/{searXNG.ts → search.ts} +25 -32
  34. package/src/tools/web-browsing/Portal/Search/Footer.tsx +1 -1
  35. package/src/tools/web-browsing/Portal/Search/ResultList/SearchItem/TitleExtra.tsx +2 -2
  36. package/src/tools/web-browsing/Portal/Search/ResultList/SearchItem/Video.tsx +9 -7
  37. package/src/tools/web-browsing/Portal/Search/ResultList/SearchItem/index.tsx +2 -2
  38. package/src/tools/web-browsing/Portal/Search/ResultList/index.tsx +3 -3
  39. package/src/tools/web-browsing/Portal/Search/index.tsx +4 -4
  40. package/src/tools/web-browsing/Portal/index.tsx +3 -1
  41. package/src/tools/web-browsing/Render/Search/SearchQuery/SearchView.tsx +4 -2
  42. package/src/tools/web-browsing/Render/Search/SearchQuery/index.tsx +6 -13
  43. package/src/tools/web-browsing/Render/Search/SearchResult/SearchResultItem.tsx +2 -2
  44. package/src/tools/web-browsing/Render/Search/SearchResult/index.tsx +5 -5
  45. package/src/tools/web-browsing/Render/Search/index.tsx +2 -2
  46. package/src/tools/web-browsing/Render/index.tsx +4 -3
  47. package/src/tools/web-browsing/components/SearchBar.tsx +4 -6
  48. package/src/tools/web-browsing/index.ts +54 -60
  49. package/src/tools/web-browsing/systemRole.ts +22 -13
  50. package/src/types/tool/search/index.ts +44 -0
  51. package/src/server/routers/tools/__tests__/search.test.ts +0 -48
  52. package/src/types/tool/search.ts +0 -48
@@ -1,14 +1,14 @@
1
1
  import { describe, expect, it, vi } from 'vitest';
2
2
 
3
- import * as withTimeoutModule from '../../utils/withTimeout';
4
3
  import { NetworkConnectionError, PageNotFoundError, TimeoutError } from '../../utils/errorType';
4
+ import * as withTimeoutModule from '../../utils/withTimeout';
5
5
  import { search1api } from '../search1api';
6
6
 
7
7
  describe('search1api crawler', () => {
8
8
  // Mock fetch function
9
9
  const mockFetch = vi.fn();
10
10
  global.fetch = mockFetch;
11
-
11
+
12
12
  // Original env
13
13
  let originalEnv: NodeJS.ProcessEnv;
14
14
 
@@ -16,7 +16,7 @@ describe('search1api crawler', () => {
16
16
  vi.resetAllMocks();
17
17
  originalEnv = { ...process.env };
18
18
  process.env.SEARCH1API_API_KEY = 'test-api-key';
19
-
19
+
20
20
  // Mock withTimeout to directly return the promise
21
21
  vi.spyOn(withTimeoutModule, 'withTimeout').mockImplementation((promise) => promise);
22
22
  });
@@ -25,17 +25,9 @@ describe('search1api crawler', () => {
25
25
  process.env = originalEnv;
26
26
  });
27
27
 
28
- it('should throw error when API key is not set', async () => {
29
- delete process.env.SEARCH1API_API_KEY;
30
-
31
- await expect(search1api('https://example.com', { filterOptions: {} })).rejects.toThrow(
32
- 'SEARCH1API_API_KEY environment variable is not set',
33
- );
34
- });
35
-
36
28
  it('should throw NetworkConnectionError when fetch fails', async () => {
37
29
  mockFetch.mockRejectedValue(new Error('fetch failed'));
38
-
30
+
39
31
  await expect(search1api('https://example.com', { filterOptions: {} })).rejects.toThrow(
40
32
  NetworkConnectionError,
41
33
  );
@@ -44,12 +36,12 @@ describe('search1api crawler', () => {
44
36
  it('should throw TimeoutError when request times out', async () => {
45
37
  // Restore original withTimeout implementation for this test
46
38
  vi.spyOn(withTimeoutModule, 'withTimeout').mockRestore();
47
-
39
+
48
40
  // Mock withTimeout to throw TimeoutError
49
41
  vi.spyOn(withTimeoutModule, 'withTimeout').mockImplementation(() => {
50
42
  throw new TimeoutError('Request timeout after 10000ms');
51
43
  });
52
-
44
+
53
45
  await expect(search1api('https://example.com', { filterOptions: {} })).rejects.toThrow(
54
46
  TimeoutError,
55
47
  );
@@ -61,7 +53,7 @@ describe('search1api crawler', () => {
61
53
  status: 404,
62
54
  statusText: 'Not Found',
63
55
  });
64
-
56
+
65
57
  await expect(search1api('https://example.com', { filterOptions: {} })).rejects.toThrow(
66
58
  PageNotFoundError,
67
59
  );
@@ -73,7 +65,7 @@ describe('search1api crawler', () => {
73
65
  status: 500,
74
66
  statusText: 'Internal Server Error',
75
67
  });
76
-
68
+
77
69
  await expect(search1api('https://example.com', { filterOptions: {} })).rejects.toThrow(
78
70
  'Search1API request failed with status 500: Internal Server Error',
79
71
  );
@@ -82,37 +74,39 @@ describe('search1api crawler', () => {
82
74
  it('should return undefined when content is too short', async () => {
83
75
  mockFetch.mockResolvedValue({
84
76
  ok: true,
85
- json: () => Promise.resolve({
86
- crawlParameters: { url: 'https://example.com' },
87
- results: {
88
- title: 'Test Title',
89
- link: 'https://example.com',
90
- content: 'Short', // Less than 100 characters
91
- },
92
- }),
77
+ json: () =>
78
+ Promise.resolve({
79
+ crawlParameters: { url: 'https://example.com' },
80
+ results: {
81
+ title: 'Test Title',
82
+ link: 'https://example.com',
83
+ content: 'Short', // Less than 100 characters
84
+ },
85
+ }),
93
86
  });
94
-
87
+
95
88
  const result = await search1api('https://example.com', { filterOptions: {} });
96
89
  expect(result).toBeUndefined();
97
90
  });
98
91
 
99
92
  it('should return crawl result on successful fetch', async () => {
100
93
  const mockContent = 'This is a test content that is longer than 100 characters. '.repeat(3);
101
-
94
+
102
95
  mockFetch.mockResolvedValue({
103
96
  ok: true,
104
- json: () => Promise.resolve({
105
- crawlParameters: { url: 'https://example.com' },
106
- results: {
107
- title: 'Test Title',
108
- link: 'https://example.com',
109
- content: mockContent,
110
- },
111
- }),
97
+ json: () =>
98
+ Promise.resolve({
99
+ crawlParameters: { url: 'https://example.com' },
100
+ results: {
101
+ title: 'Test Title',
102
+ link: 'https://example.com',
103
+ content: mockContent,
104
+ },
105
+ }),
112
106
  });
113
-
107
+
114
108
  const result = await search1api('https://example.com', { filterOptions: {} });
115
-
109
+
116
110
  expect(mockFetch).toHaveBeenCalledWith('https://api.search1api.com/crawl', {
117
111
  method: 'POST',
118
112
  headers: {
@@ -123,7 +117,7 @@ describe('search1api crawler', () => {
123
117
  url: 'https://example.com',
124
118
  }),
125
119
  });
126
-
120
+
127
121
  expect(result).toEqual({
128
122
  content: mockContent,
129
123
  contentType: 'text',
@@ -140,8 +134,8 @@ describe('search1api crawler', () => {
140
134
  ok: true,
141
135
  json: () => Promise.reject(new Error('Invalid JSON')),
142
136
  });
143
-
137
+
144
138
  const result = await search1api('https://example.com', { filterOptions: {} });
145
139
  expect(result).toBeUndefined();
146
140
  });
147
- });
141
+ });
@@ -17,12 +17,6 @@ export const search1api: CrawlImpl = async (url) => {
17
17
  // Get API key from environment variable
18
18
  const apiKey = process.env.SEARCH1API_CRAWL_API_KEY || process.env.SEARCH1API_API_KEY;
19
19
 
20
- if (!apiKey) {
21
- throw new Error(
22
- 'SEARCH1API_API_KEY environment variable is not set. Visit https://www.search1api.com to get free quota.',
23
- );
24
- }
25
-
26
20
  let res: Response;
27
21
 
28
22
  try {
@@ -32,7 +26,7 @@ export const search1api: CrawlImpl = async (url) => {
32
26
  url,
33
27
  }),
34
28
  headers: {
35
- 'Authorization': `Bearer ${apiKey}`,
29
+ 'Authorization': !apiKey ? '' : `Bearer ${apiKey}`,
36
30
  'Content-Type': 'application/json',
37
31
  },
38
32
  method: 'POST',
@@ -1,2 +1,3 @@
1
+ export type { CrawlImplType } from './crawImpl';
1
2
  export { Crawler } from './crawler';
2
3
  export * from './type';
@@ -31,6 +31,7 @@ export const crawUrlRules: CrawlUrlRule[] = [
31
31
  filterOptions: {
32
32
  enableReadability: false,
33
33
  },
34
+ impls: ['naive', 'jina'],
34
35
  urlPattern: 'https://github.com/([^/]+)/([^/]+)/blob/([^/]+)/(.*)',
35
36
  urlTransform: 'https://github.com/$1/$2/raw/refs/heads/$3/$4',
36
37
  },
@@ -38,6 +39,7 @@ export const crawUrlRules: CrawlUrlRule[] = [
38
39
  filterOptions: {
39
40
  enableReadability: false,
40
41
  },
42
+ impls: ['naive', 'jina'],
41
43
  // GitHub discussion
42
44
  urlPattern: 'https://github.com/(.*)/discussions/(.*)',
43
45
  },
@@ -79,9 +81,9 @@ export const crawUrlRules: CrawlUrlRule[] = [
79
81
  enableReadability: false,
80
82
  pureText: true,
81
83
  },
84
+ impls: ['naive'],
82
85
  urlPattern: 'https://www.qiumiwu.com/standings/(.*)',
83
86
  },
84
-
85
87
  // mozilla use jina
86
88
  {
87
89
  impls: ['jina'],
@@ -5,11 +5,13 @@ export const getToolsConfig = () => {
5
5
  return createEnv({
6
6
  runtimeEnv: {
7
7
  CRAWLER_IMPLS: process.env.CRAWLER_IMPLS,
8
+ SEARCH_PROVIDERS: process.env.SEARCH_PROVIDERS,
8
9
  SEARXNG_URL: process.env.SEARXNG_URL,
9
10
  },
10
11
 
11
12
  server: {
12
13
  CRAWLER_IMPLS: z.string().optional(),
14
+ SEARCH_PROVIDERS: z.string().optional(),
13
15
  SEARXNG_URL: z.string().url().optional(),
14
16
  },
15
17
  });
@@ -4,6 +4,7 @@ import { memo } from 'react';
4
4
  import { useTranslation } from 'react-i18next';
5
5
 
6
6
  import PluginResult from './PluginResultJSON';
7
+ import PluginState from './PluginState';
7
8
 
8
9
  interface DebugProps {
9
10
  payload: object;
@@ -28,15 +29,20 @@ const Debug = memo<DebugProps>(({ payload, requestArgs, toolCallId }) => {
28
29
  key: 'arguments',
29
30
  label: t('debug.arguments'),
30
31
  },
32
+ {
33
+ children: <PluginResult toolCallId={toolCallId} />,
34
+ key: 'response',
35
+ label: t('debug.response'),
36
+ },
31
37
  {
32
38
  children: <Highlighter language={'json'}>{JSON.stringify(payload, null, 2)}</Highlighter>,
33
39
  key: 'function_call',
34
40
  label: t('debug.function_call'),
35
41
  },
36
42
  {
37
- children: <PluginResult toolCallId={toolCallId} />,
38
- key: 'response',
39
- label: t('debug.response'),
43
+ children: <PluginState toolCallId={toolCallId} />,
44
+ key: 'pluginState',
45
+ label: t('debug.pluginState'),
40
46
  },
41
47
  ]}
42
48
  style={{ display: 'grid', maxWidth: 800, minWidth: 400 }}
@@ -0,0 +1,21 @@
1
+ import { Highlighter } from '@lobehub/ui';
2
+ import { memo } from 'react';
3
+
4
+ import { useChatStore } from '@/store/chat';
5
+ import { chatSelectors } from '@/store/chat/selectors';
6
+
7
+ export interface FunctionMessageProps {
8
+ toolCallId: string;
9
+ }
10
+
11
+ const PluginState = memo<FunctionMessageProps>(({ toolCallId }) => {
12
+ const toolMessage = useChatStore(chatSelectors.getMessageByToolCallId(toolCallId));
13
+
14
+ return (
15
+ <Highlighter language={'json'} style={{ maxHeight: 200, maxWidth: 800, overflow: 'scroll' }}>
16
+ {JSON.stringify(toolMessage?.pluginState, null, 2)}
17
+ </Highlighter>
18
+ );
19
+ });
20
+
21
+ export default PluginState;
@@ -93,7 +93,7 @@ const ObjectDisplay = memo(({ data, shine }: ObjectDisplayProps) => {
93
93
  <div className={styles.row} key={key}>
94
94
  <span
95
95
  className={styles.key}
96
- style={{ minWidth: hasMinWidth ? (isMobile ? 60 : 80) : undefined }}
96
+ style={{ minWidth: hasMinWidth ? (isMobile ? 60 : 140) : undefined }}
97
97
  >
98
98
  {key}
99
99
  </span>
@@ -5,6 +5,7 @@ export default {
5
5
  off: '关闭调试',
6
6
  on: '查看插件调用信息',
7
7
  payload: '插件载荷',
8
+ pluginState: '插件 State',
8
9
  response: '返回结果',
9
10
  tool_call: '工具调用请求',
10
11
  },
@@ -3,10 +3,10 @@ import { TRPCError } from '@trpc/server';
3
3
  import { beforeEach, describe, expect, it, vi } from 'vitest';
4
4
 
5
5
  import { toolsEnv } from '@/config/tools';
6
- import { SearXNGClient } from '@/server/modules/SearXNG';
6
+ import { SearXNGClient } from '@/server/services/search/impls/searxng/client';
7
7
  import { SEARCH_SEARXNG_NOT_CONFIG } from '@/types/tool/search';
8
8
 
9
- import { searchRouter } from '../search';
9
+ import { searchRouter } from './search';
10
10
 
11
11
  // Mock JWT verification
12
12
  vi.mock('@/utils/server/jwt', () => ({
@@ -19,7 +19,7 @@ vi.mock('@lobechat/web-crawler', () => ({
19
19
  })),
20
20
  }));
21
21
 
22
- vi.mock('@/server/modules/SearXNG');
22
+ vi.mock('@/server/services/search/impls/searxng/client');
23
23
 
24
24
  describe('searchRouter', () => {
25
25
  const mockContext = {
@@ -104,7 +104,18 @@ describe('searchRouter', () => {
104
104
  query: 'test query',
105
105
  });
106
106
 
107
- expect(result).toEqual(mockSearchResult);
107
+ expect(result).toEqual({
108
+ costTime: 0,
109
+ query: 'test query',
110
+ results: [
111
+ {
112
+ title: 'Test Result',
113
+ parsedUrl: 'test.com',
114
+ url: 'http://test.com',
115
+ content: 'Test content',
116
+ },
117
+ ],
118
+ });
108
119
  });
109
120
 
110
121
  it('should work without specifying search engines', async () => {
@@ -128,7 +139,18 @@ describe('searchRouter', () => {
128
139
  query: 'test query',
129
140
  });
130
141
 
131
- expect(result).toEqual(mockSearchResult);
142
+ expect(result).toEqual({
143
+ costTime: 0,
144
+ query: 'test query',
145
+ results: [
146
+ {
147
+ title: 'Test Result',
148
+ parsedUrl: 'test.com',
149
+ url: 'http://test.com',
150
+ content: 'Test content',
151
+ },
152
+ ],
153
+ });
132
154
  });
133
155
 
134
156
  it('should handle search errors', async () => {
@@ -1,14 +1,9 @@
1
- import { Crawler } from '@lobechat/web-crawler';
2
- import { TRPCError } from '@trpc/server';
3
- import pMap from 'p-map';
4
1
  import { z } from 'zod';
5
2
 
6
- import { toolsEnv } from '@/config/tools';
7
3
  import { isServerMode } from '@/const/version';
8
4
  import { passwordProcedure } from '@/libs/trpc/edge';
9
5
  import { authedProcedure, router } from '@/libs/trpc/lambda';
10
- import { SearXNGClient } from '@/server/modules/SearXNG';
11
- import { SEARCH_SEARXNG_NOT_CONFIG } from '@/types/tool/search';
6
+ import { searchService } from '@/server/services/search';
12
7
 
13
8
  // TODO: password procedure 未来的处理方式可能要思考下
14
9
  const searchProcedure = isServerMode ? authedProcedure : passwordProcedure;
@@ -22,24 +17,7 @@ export const searchRouter = router({
22
17
  }),
23
18
  )
24
19
  .mutation(async ({ input }) => {
25
- const envString = toolsEnv.CRAWLER_IMPLS || '';
26
-
27
- // 处理全角逗号和多余空格
28
- let envValue = envString.replaceAll(',', ',').trim();
29
-
30
- const impls = envValue.split(',').filter(Boolean);
31
-
32
- const crawler = new Crawler({ impls });
33
-
34
- const results = await pMap(
35
- input.urls,
36
- async (url) => {
37
- return await crawler.crawl({ impls: input.impls, url });
38
- },
39
- { concurrency: 3 },
40
- );
41
-
42
- return { results };
20
+ return searchService.crawlPages(input);
43
21
  }),
44
22
 
45
23
  query: searchProcedure
@@ -56,25 +34,6 @@ export const searchRouter = router({
56
34
  }),
57
35
  )
58
36
  .query(async ({ input }) => {
59
- if (!toolsEnv.SEARXNG_URL) {
60
- throw new TRPCError({ code: 'NOT_IMPLEMENTED', message: SEARCH_SEARXNG_NOT_CONFIG });
61
- }
62
-
63
- const client = new SearXNGClient(toolsEnv.SEARXNG_URL);
64
-
65
- try {
66
- return await client.search(input.query, {
67
- categories: input.optionalParams?.searchCategories,
68
- engines: input.optionalParams?.searchEngines,
69
- time_range: input.optionalParams?.searchTimeRange,
70
- });
71
- } catch (e) {
72
- console.error(e);
73
-
74
- throw new TRPCError({
75
- code: 'SERVICE_UNAVAILABLE',
76
- message: (e as Error).message,
77
- });
78
- }
37
+ return await searchService.query(input.query, input.optionalParams);
79
38
  }),
80
39
  });
@@ -0,0 +1,30 @@
1
+ import { Search1APIImpl } from './search1api';
2
+ import { SearXNGImpl } from './searxng';
3
+ import { SearchServiceImpl } from './type';
4
+
5
+ /**
6
+ * Available search service implementations
7
+ */
8
+ export enum SearchImplType {
9
+ SearXNG = 'searxng',
10
+ Search1API = 'search1api',
11
+ }
12
+
13
+ /**
14
+ * Create a search service implementation instance
15
+ */
16
+ export const createSearchServiceImpl = (
17
+ type: SearchImplType = SearchImplType.SearXNG,
18
+ ): SearchServiceImpl => {
19
+ switch (type) {
20
+ case SearchImplType.SearXNG: {
21
+ return new SearXNGImpl();
22
+ }
23
+
24
+ default: {
25
+ return new Search1APIImpl();
26
+ }
27
+ }
28
+ };
29
+
30
+ export type { SearchServiceImpl } from './type';
@@ -0,0 +1,154 @@
1
+ import { TRPCError } from '@trpc/server';
2
+ import debug from 'debug';
3
+ import urlJoin from 'url-join';
4
+
5
+ import { SearchParams, UniformSearchResponse, UniformSearchResult } from '@/types/tool/search';
6
+
7
+ import { SearchServiceImpl } from '../type';
8
+ import { Search1ApiResponse } from './type';
9
+
10
+ interface Search1APIQueryParams {
11
+ crawl_results?: 0 | 1;
12
+ exclude_sites?: string[];
13
+ image?: boolean;
14
+ include_sites?: string[];
15
+ language?: string;
16
+ max_results: number;
17
+ query: string;
18
+ search_service?: string;
19
+ time_range?: string;
20
+ }
21
+
22
+ const log = debug('lobe-search:search1api');
23
+
24
+ /**
25
+ * Search1API implementation of the search service
26
+ * Primarily used for web crawling
27
+ */
28
+ export class Search1APIImpl implements SearchServiceImpl {
29
+ private get apiKey(): string | undefined {
30
+ return process.env.SEARCH1API_SEARCH_API_KEY || process.env.SEARCH1API_API_KEY;
31
+ }
32
+
33
+ private get baseUrl(): string {
34
+ // Assuming the base URL is consistent with the crawl endpoint
35
+ return 'https://api.search1api.com';
36
+ }
37
+
38
+ async query(query: string, params: SearchParams = {}): Promise<UniformSearchResponse> {
39
+ log('Starting Search1API query with query: "%s", params: %o', query, params);
40
+ const endpoint = urlJoin(this.baseUrl, '/search');
41
+
42
+ const { searchEngines } = params;
43
+
44
+ const defaultQueryParams: Search1APIQueryParams = {
45
+ crawl_results: 0, // 默认不做抓取
46
+ image: false,
47
+ max_results: 15, // Default max results
48
+ query,
49
+ };
50
+
51
+ let body: Search1APIQueryParams[] = [
52
+ {
53
+ ...defaultQueryParams,
54
+ time_range:
55
+ params?.searchTimeRange && params.searchTimeRange !== 'anytime'
56
+ ? params.searchTimeRange
57
+ : undefined,
58
+ },
59
+ ];
60
+
61
+ if (searchEngines && searchEngines.length > 0) {
62
+ body = searchEngines.map((searchEngine) => ({
63
+ ...defaultQueryParams,
64
+
65
+ max_results: parseInt((20 / searchEngines.length).toFixed(0)),
66
+ search_service: searchEngine,
67
+ time_range:
68
+ params?.searchTimeRange && params.searchTimeRange !== 'anytime'
69
+ ? params.searchTimeRange
70
+ : undefined,
71
+ }));
72
+ }
73
+
74
+ // Note: Other SearchParams like searchCategories, searchEngines (beyond the first one)
75
+ // and Search1API specific params like include_sites, exclude_sites, language
76
+ // are not currently mapped.
77
+
78
+ log('Constructed request body: %o', body);
79
+
80
+ let response: Response;
81
+ const startAt = Date.now();
82
+ let costTime = 0;
83
+ try {
84
+ log('Sending request to endpoint: %s', endpoint);
85
+ response = await fetch(endpoint, {
86
+ body: JSON.stringify(body),
87
+ headers: {
88
+ 'Authorization': this.apiKey ? `Bearer ${this.apiKey}` : '',
89
+ 'Content-Type': 'application/json',
90
+ },
91
+ method: 'POST',
92
+ });
93
+ log('Received response with status: %d', response.status);
94
+ costTime = Date.now() - startAt;
95
+ } catch (error) {
96
+ log.extend('error')('Search1API fetch error: %o', error);
97
+ throw new TRPCError({
98
+ cause: error,
99
+ code: 'SERVICE_UNAVAILABLE',
100
+ message: 'Failed to connect to Search1API.',
101
+ });
102
+ }
103
+
104
+ if (!response.ok) {
105
+ const errorBody = await response.text();
106
+ log.extend('error')(
107
+ `Search1API request failed with status ${response.status}: %s`,
108
+ errorBody.length > 200 ? `${errorBody.slice(0, 200)}...` : errorBody,
109
+ );
110
+ throw new TRPCError({
111
+ cause: errorBody,
112
+ code: 'SERVICE_UNAVAILABLE',
113
+ message: `Search1API request failed: ${response.statusText}`,
114
+ });
115
+ }
116
+
117
+ try {
118
+ const search1ApiResponse = (await response.json()) as Search1ApiResponse[]; // Use a specific type if defined elsewhere
119
+
120
+ log('Parsed Search1API response: %o', search1ApiResponse);
121
+
122
+ const mappedResults = search1ApiResponse.flatMap((response) => {
123
+ // Map Search1API response to SearchResponse
124
+ return (response.results || []).map(
125
+ (result): UniformSearchResult => ({
126
+ category: 'general', // Default category
127
+ content: result.content || result.snippet || '', // Prioritize content, fallback to snippet
128
+ engines: [response.searchParameters?.search_service || ''],
129
+ parsedUrl: result.link ? new URL(result.link).hostname : '', // Basic URL parsing
130
+ score: 1, // Default score
131
+ title: result.title || '',
132
+ url: result.link,
133
+ }),
134
+ );
135
+ });
136
+
137
+ log('Mapped %d results to SearchResult format', mappedResults.length);
138
+
139
+ return {
140
+ costTime,
141
+ query: query,
142
+ resultNumbers: mappedResults.length,
143
+ results: mappedResults,
144
+ };
145
+ } catch (error) {
146
+ log.extend('error')('Error parsing Search1API response: %o', error);
147
+ throw new TRPCError({
148
+ cause: error,
149
+ code: 'INTERNAL_SERVER_ERROR',
150
+ message: 'Failed to parse Search1API response.',
151
+ });
152
+ }
153
+ }
154
+ }