@librechat/agents 2.4.30 → 2.4.311

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/cjs/common/enum.cjs +1 -0
  2. package/dist/cjs/common/enum.cjs.map +1 -1
  3. package/dist/cjs/main.cjs +2 -0
  4. package/dist/cjs/main.cjs.map +1 -1
  5. package/dist/cjs/tools/search/firecrawl.cjs +149 -0
  6. package/dist/cjs/tools/search/firecrawl.cjs.map +1 -0
  7. package/dist/cjs/tools/search/format.cjs +116 -0
  8. package/dist/cjs/tools/search/format.cjs.map +1 -0
  9. package/dist/cjs/tools/search/highlights.cjs +194 -0
  10. package/dist/cjs/tools/search/highlights.cjs.map +1 -0
  11. package/dist/cjs/tools/search/rerankers.cjs +187 -0
  12. package/dist/cjs/tools/search/rerankers.cjs.map +1 -0
  13. package/dist/cjs/tools/search/search.cjs +410 -0
  14. package/dist/cjs/tools/search/search.cjs.map +1 -0
  15. package/dist/cjs/tools/search/tool.cjs +103 -0
  16. package/dist/cjs/tools/search/tool.cjs.map +1 -0
  17. package/dist/esm/common/enum.mjs +1 -0
  18. package/dist/esm/common/enum.mjs.map +1 -1
  19. package/dist/esm/main.mjs +1 -0
  20. package/dist/esm/main.mjs.map +1 -1
  21. package/dist/esm/tools/search/firecrawl.mjs +145 -0
  22. package/dist/esm/tools/search/firecrawl.mjs.map +1 -0
  23. package/dist/esm/tools/search/format.mjs +114 -0
  24. package/dist/esm/tools/search/format.mjs.map +1 -0
  25. package/dist/esm/tools/search/highlights.mjs +192 -0
  26. package/dist/esm/tools/search/highlights.mjs.map +1 -0
  27. package/dist/esm/tools/search/rerankers.mjs +181 -0
  28. package/dist/esm/tools/search/rerankers.mjs.map +1 -0
  29. package/dist/esm/tools/search/search.mjs +407 -0
  30. package/dist/esm/tools/search/search.mjs.map +1 -0
  31. package/dist/esm/tools/search/tool.mjs +101 -0
  32. package/dist/esm/tools/search/tool.mjs.map +1 -0
  33. package/dist/types/common/enum.d.ts +1 -0
  34. package/dist/types/index.d.ts +1 -0
  35. package/dist/types/scripts/search.d.ts +1 -0
  36. package/dist/types/tools/search/firecrawl.d.ts +117 -0
  37. package/dist/types/tools/search/format.d.ts +2 -0
  38. package/dist/types/tools/search/highlights.d.ts +13 -0
  39. package/dist/types/tools/search/index.d.ts +2 -0
  40. package/dist/types/tools/search/rerankers.d.ts +32 -0
  41. package/dist/types/tools/search/search.d.ts +9 -0
  42. package/dist/types/tools/search/tool.d.ts +12 -0
  43. package/dist/types/tools/search/types.d.ts +150 -0
  44. package/package.json +2 -1
  45. package/src/common/enum.ts +1 -0
  46. package/src/index.ts +1 -0
  47. package/src/scripts/search.ts +141 -0
  48. package/src/tools/search/firecrawl.ts +270 -0
  49. package/src/tools/search/format.ts +121 -0
  50. package/src/tools/search/highlights.ts +238 -0
  51. package/src/tools/search/index.ts +2 -0
  52. package/src/tools/search/rerankers.ts +248 -0
  53. package/src/tools/search/search.ts +567 -0
  54. package/src/tools/search/tool.ts +151 -0
  55. package/src/tools/search/types.ts +179 -0
@@ -0,0 +1,407 @@
1
+ import axios from 'axios';
2
+ import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
3
+ import { getAttribution } from './firecrawl.mjs';
4
+
5
+ /* eslint-disable no-console */
6
+ const chunker = {
7
+ cleanText: (text) => {
8
+ if (!text)
9
+ return '';
10
+ /** Normalized all line endings to '\n' */
11
+ const normalizedText = text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
12
+ /** Handle multiple backslashes followed by newlines
13
+ * This replaces patterns like '\\\\\\n' with a single newline */
14
+ const fixedBackslashes = normalizedText.replace(/\\+\n/g, '\n');
15
+ /** Cleaned up consecutive newlines, tabs, and spaces around newlines */
16
+ const cleanedNewlines = fixedBackslashes.replace(/[\t ]*\n[\t \n]*/g, '\n');
17
+ /** Cleaned up excessive spaces and tabs */
18
+ const cleanedSpaces = cleanedNewlines.replace(/[ \t]+/g, ' ');
19
+ return cleanedSpaces.trim();
20
+ },
21
+ splitText: async (text, options) => {
22
+ const chunkSize = options?.chunkSize ?? 150;
23
+ const chunkOverlap = options?.chunkOverlap ?? 50;
24
+ const separators = options?.separators || ['\n\n', '\n'];
25
+ const splitter = new RecursiveCharacterTextSplitter({
26
+ separators,
27
+ chunkSize,
28
+ chunkOverlap,
29
+ });
30
+ return await splitter.splitText(text);
31
+ },
32
+ splitTexts: async (texts, options) => {
33
+ // Split multiple texts
34
+ const promises = texts.map((text) => chunker.splitText(text, options).catch((error) => {
35
+ console.error('Error splitting text:', error);
36
+ return [text];
37
+ }));
38
+ return Promise.all(promises);
39
+ },
40
+ };
41
+ const createSourceUpdateCallback = (sourceMap) => {
42
+ return (link, update) => {
43
+ const source = sourceMap.get(link);
44
+ if (source) {
45
+ sourceMap.set(link, {
46
+ ...source,
47
+ ...update,
48
+ });
49
+ }
50
+ };
51
+ };
52
+ const getHighlights = async ({ query, content, reranker, topResults = 5, }) => {
53
+ if (!content) {
54
+ console.warn('No content provided for highlights');
55
+ return;
56
+ }
57
+ if (!reranker) {
58
+ console.warn('No reranker provided for highlights');
59
+ return;
60
+ }
61
+ try {
62
+ const documents = await chunker.splitText(content);
63
+ if (Array.isArray(documents)) {
64
+ return await reranker.rerank(query, documents, topResults);
65
+ }
66
+ else {
67
+ console.error('Expected documents to be an array, got:', typeof documents);
68
+ return;
69
+ }
70
+ }
71
+ catch (error) {
72
+ console.error('Error in content processing:', error);
73
+ return;
74
+ }
75
+ };
76
+ const createSerperAPI = (apiKey) => {
77
+ const config = {
78
+ apiKey: apiKey ?? process.env.SERPER_API_KEY,
79
+ apiUrl: 'https://google.serper.dev/search',
80
+ defaultLocation: 'us',
81
+ timeout: 10000,
82
+ };
83
+ if (config.apiKey == null || config.apiKey === '') {
84
+ throw new Error('SERPER_API_KEY is required for SerperAPI');
85
+ }
86
+ const getSources = async (query, numResults = 8, storedLocation) => {
87
+ if (!query.trim()) {
88
+ return { success: false, error: 'Query cannot be empty' };
89
+ }
90
+ try {
91
+ const searchLocation = (storedLocation ?? config.defaultLocation).toLowerCase();
92
+ const payload = {
93
+ q: query,
94
+ num: Math.min(Math.max(1, numResults), 10),
95
+ gl: searchLocation,
96
+ };
97
+ const response = await axios.post(config.apiUrl, payload, {
98
+ headers: {
99
+ 'X-API-KEY': config.apiKey,
100
+ 'Content-Type': 'application/json',
101
+ },
102
+ timeout: config.timeout,
103
+ });
104
+ const data = response.data;
105
+ const results = {
106
+ organic: data.organic,
107
+ images: data.images ?? [],
108
+ topStories: data.topStories ?? [],
109
+ knowledgeGraph: data.knowledgeGraph,
110
+ answerBox: data.answerBox,
111
+ peopleAlsoAsk: data.peopleAlsoAsk,
112
+ relatedSearches: data.relatedSearches,
113
+ };
114
+ return { success: true, data: results };
115
+ }
116
+ catch (error) {
117
+ const errorMessage = error instanceof Error ? error.message : String(error);
118
+ return { success: false, error: `API request failed: ${errorMessage}` };
119
+ }
120
+ };
121
+ return { getSources };
122
+ };
123
+ const createSearXNGAPI = (instanceUrl, apiKey) => {
124
+ const config = {
125
+ instanceUrl: instanceUrl ?? process.env.SEARXNG_INSTANCE_URL,
126
+ apiKey: apiKey ?? process.env.SEARXNG_API_KEY,
127
+ timeout: 10000,
128
+ };
129
+ if (config.instanceUrl == null || config.instanceUrl === '') {
130
+ throw new Error('SEARXNG_INSTANCE_URL is required for SearXNG API');
131
+ }
132
+ const getSources = async (query, numResults = 8, storedLocation) => {
133
+ if (!query.trim()) {
134
+ return { success: false, error: 'Query cannot be empty' };
135
+ }
136
+ try {
137
+ // Ensure the instance URL ends with /search
138
+ if (config.instanceUrl == null || config.instanceUrl === '') {
139
+ return { success: false, error: 'Instance URL is not defined' };
140
+ }
141
+ let searchUrl = config.instanceUrl;
142
+ if (!searchUrl.endsWith('/search')) {
143
+ searchUrl = searchUrl.replace(/\/$/, '') + '/search';
144
+ }
145
+ // Prepare parameters for SearXNG
146
+ const params = {
147
+ q: query,
148
+ format: 'json',
149
+ pageno: 1,
150
+ categories: 'general',
151
+ language: 'all',
152
+ safesearch: 0,
153
+ engines: 'google,bing,duckduckgo',
154
+ max_results: Math.min(Math.max(1, numResults), 20),
155
+ };
156
+ if (storedLocation != null && storedLocation !== 'all') {
157
+ params.language = storedLocation;
158
+ }
159
+ const headers = {
160
+ 'Content-Type': 'application/json',
161
+ };
162
+ if (config.apiKey != null && config.apiKey !== '') {
163
+ headers['X-API-Key'] = config.apiKey;
164
+ }
165
+ const response = await axios.get(searchUrl, {
166
+ headers,
167
+ params,
168
+ timeout: config.timeout,
169
+ });
170
+ const data = response.data;
171
+ // Transform SearXNG results to match SerperAPI format
172
+ const organicResults = (data.results ?? [])
173
+ .slice(0, numResults)
174
+ .map((result) => ({
175
+ title: result.title ?? '',
176
+ link: result.url ?? '',
177
+ snippet: result.content ?? '',
178
+ date: result.publishedDate ?? '',
179
+ }));
180
+ // Extract image results if available
181
+ const imageResults = (data.results ?? [])
182
+ .filter((result) => result.img_src)
183
+ .slice(0, 6)
184
+ .map((result) => ({
185
+ title: result.title ?? '',
186
+ imageUrl: result.img_src ?? '',
187
+ }));
188
+ // Format results to match SerperAPI structure
189
+ const results = {
190
+ organic: organicResults,
191
+ images: imageResults,
192
+ topStories: [],
193
+ // Use undefined instead of null for optional properties
194
+ relatedSearches: data.suggestions ?? [],
195
+ };
196
+ return { success: true, data: results };
197
+ }
198
+ catch (error) {
199
+ const errorMessage = error instanceof Error ? error.message : String(error);
200
+ return {
201
+ success: false,
202
+ error: `SearXNG API request failed: ${errorMessage}`,
203
+ };
204
+ }
205
+ };
206
+ return { getSources };
207
+ };
208
+ const createSearchAPI = (config) => {
209
+ const { searchProvider = 'serper', serperApiKey, searxngInstanceUrl, searxngApiKey, } = config;
210
+ if (searchProvider.toLowerCase() === 'serper') {
211
+ return createSerperAPI(serperApiKey);
212
+ }
213
+ else if (searchProvider.toLowerCase() === 'searxng') {
214
+ return createSearXNGAPI(searxngInstanceUrl, searxngApiKey);
215
+ }
216
+ else {
217
+ throw new Error(`Invalid search provider: ${searchProvider}. Must be 'serper' or 'searxng'`);
218
+ }
219
+ };
220
+ const createSourceProcessor = (config = {}, scraperInstance) => {
221
+ if (!scraperInstance) {
222
+ throw new Error('Firecrawl scraper instance is required');
223
+ }
224
+ const { topResults = 5,
225
+ // strategies = ['no_extraction'],
226
+ // filterContent = true,
227
+ reranker, } = config;
228
+ const firecrawlScraper = scraperInstance;
229
+ const webScraper = {
230
+ scrapeMany: async ({ query, links, }) => {
231
+ console.log(`Scraping ${links.length} links with Firecrawl`);
232
+ const promises = [];
233
+ try {
234
+ for (const currentLink of links) {
235
+ const promise = firecrawlScraper
236
+ .scrapeUrl(currentLink, {})
237
+ .then(([url, response]) => {
238
+ const attribution = getAttribution(url, response.data?.metadata);
239
+ if (response.success && response.data) {
240
+ const content = firecrawlScraper.extractContent(response);
241
+ return {
242
+ url,
243
+ attribution,
244
+ content: chunker.cleanText(content),
245
+ };
246
+ }
247
+ return {
248
+ url,
249
+ attribution,
250
+ error: true,
251
+ content: `Failed to scrape ${url}: ${response.error ?? 'Unknown error'}`,
252
+ };
253
+ })
254
+ .then(async (result) => {
255
+ try {
256
+ if (result.error != null) {
257
+ console.error(`Error scraping ${result.url}: ${result.content}`);
258
+ return {
259
+ ...result,
260
+ };
261
+ }
262
+ const highlights = await getHighlights({
263
+ query,
264
+ reranker,
265
+ content: result.content,
266
+ });
267
+ return {
268
+ ...result,
269
+ highlights,
270
+ };
271
+ }
272
+ catch (error) {
273
+ console.error('Error processing scraped content:', error);
274
+ return {
275
+ ...result,
276
+ };
277
+ }
278
+ })
279
+ .catch((error) => {
280
+ console.error(`Error scraping ${currentLink}:`, error);
281
+ return {
282
+ url: currentLink,
283
+ error: true,
284
+ content: `Failed to scrape ${currentLink}: ${error.message ?? 'Unknown error'}`,
285
+ };
286
+ });
287
+ promises.push(promise);
288
+ }
289
+ return await Promise.all(promises);
290
+ }
291
+ catch (error) {
292
+ console.error('Error in scrapeMany:', error);
293
+ return [];
294
+ }
295
+ },
296
+ };
297
+ const fetchContents = async ({ links, query, target, onContentScraped, }) => {
298
+ const initialLinks = links.slice(0, target);
299
+ // const remainingLinks = links.slice(target).reverse();
300
+ const results = await webScraper.scrapeMany({ query, links: initialLinks });
301
+ for (const result of results) {
302
+ if (result.error === true) {
303
+ continue;
304
+ }
305
+ const { url, content, attribution, highlights } = result;
306
+ onContentScraped?.(url, {
307
+ content,
308
+ attribution,
309
+ highlights,
310
+ });
311
+ }
312
+ };
313
+ const processSources = async (result, numElements, query, proMode = false) => {
314
+ try {
315
+ if (!result.data) {
316
+ return {
317
+ organic: [],
318
+ topStories: [],
319
+ images: [],
320
+ relatedSearches: [],
321
+ };
322
+ }
323
+ else if (!result.data.organic) {
324
+ return result.data;
325
+ }
326
+ if (!proMode) {
327
+ const wikiSources = result.data.organic.filter((source) => source.link.includes('wikipedia.org'));
328
+ if (!wikiSources.length) {
329
+ return result.data;
330
+ }
331
+ const wikiSourceMap = new Map();
332
+ wikiSourceMap.set(wikiSources[0].link, wikiSources[0]);
333
+ const onContentScraped = createSourceUpdateCallback(wikiSourceMap);
334
+ await fetchContents({
335
+ query,
336
+ target: 1,
337
+ onContentScraped,
338
+ links: [wikiSources[0].link],
339
+ });
340
+ for (let i = 0; i < result.data.organic.length; i++) {
341
+ const source = result.data.organic[i];
342
+ const updatedSource = wikiSourceMap.get(source.link);
343
+ if (updatedSource) {
344
+ result.data.organic[i] = {
345
+ ...source,
346
+ ...updatedSource,
347
+ };
348
+ }
349
+ }
350
+ return result.data;
351
+ }
352
+ const sourceMap = new Map();
353
+ const allLinks = [];
354
+ for (const source of result.data.organic) {
355
+ if (source.link) {
356
+ allLinks.push(source.link);
357
+ sourceMap.set(source.link, source);
358
+ }
359
+ }
360
+ if (allLinks.length === 0) {
361
+ return result.data;
362
+ }
363
+ const onContentScraped = createSourceUpdateCallback(sourceMap);
364
+ await fetchContents({
365
+ links: allLinks,
366
+ query,
367
+ onContentScraped,
368
+ target: numElements,
369
+ });
370
+ for (let i = 0; i < result.data.organic.length; i++) {
371
+ const source = result.data.organic[i];
372
+ const updatedSource = sourceMap.get(source.link);
373
+ if (updatedSource) {
374
+ result.data.organic[i] = {
375
+ ...source,
376
+ ...updatedSource,
377
+ };
378
+ }
379
+ }
380
+ const successfulSources = result.data.organic
381
+ .filter((source) => source.content != null && !source.content.startsWith('Failed'))
382
+ .slice(0, numElements);
383
+ if (successfulSources.length > 0) {
384
+ result.data.organic = successfulSources;
385
+ }
386
+ return result.data;
387
+ }
388
+ catch (error) {
389
+ console.error('Error in processSources:', error);
390
+ return {
391
+ organic: [],
392
+ topStories: [],
393
+ images: [],
394
+ relatedSearches: [],
395
+ ...result.data,
396
+ error: error instanceof Error ? error.message : String(error),
397
+ };
398
+ }
399
+ };
400
+ return {
401
+ processSources,
402
+ topResults,
403
+ };
404
+ };
405
+
406
+ export { createSearchAPI, createSourceProcessor };
407
+ //# sourceMappingURL=search.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"search.mjs","sources":["../../../../src/tools/search/search.ts"],"sourcesContent":["/* eslint-disable no-console */\nimport axios from 'axios';\nimport { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';\nimport type * as t from './types';\nimport { getAttribution, FirecrawlScraper } from './firecrawl';\nimport { BaseReranker } from './rerankers';\n\nconst chunker = {\n cleanText: (text: string): string => {\n if (!text) return '';\n\n /** Normalized all line endings to '\\n' */\n const normalizedText = text.replace(/\\r\\n/g, '\\n').replace(/\\r/g, '\\n');\n\n /** Handle multiple backslashes followed by newlines\n * This replaces patterns like '\\\\\\\\\\\\n' with a single newline */\n const fixedBackslashes = normalizedText.replace(/\\\\+\\n/g, '\\n');\n\n /** Cleaned up consecutive newlines, tabs, and spaces around newlines */\n const cleanedNewlines = fixedBackslashes.replace(/[\\t ]*\\n[\\t \\n]*/g, '\\n');\n\n /** Cleaned up excessive spaces and tabs */\n const cleanedSpaces = cleanedNewlines.replace(/[ \\t]+/g, ' ');\n\n return cleanedSpaces.trim();\n },\n splitText: async (\n text: string,\n options?: {\n chunkSize?: number;\n chunkOverlap?: number;\n separators?: string[];\n }\n ): Promise<string[]> => {\n const chunkSize = options?.chunkSize ?? 150;\n const chunkOverlap = options?.chunkOverlap ?? 50;\n const separators = options?.separators || ['\\n\\n', '\\n'];\n\n const splitter = new RecursiveCharacterTextSplitter({\n separators,\n chunkSize,\n chunkOverlap,\n });\n\n return await splitter.splitText(text);\n },\n\n splitTexts: async (\n texts: string[],\n options?: {\n chunkSize?: number;\n chunkOverlap?: number;\n separators?: string[];\n }\n ): Promise<string[][]> => {\n // Split multiple texts\n const promises = texts.map((text) =>\n chunker.splitText(text, options).catch((error) => {\n console.error('Error splitting text:', error);\n return [text];\n })\n );\n return Promise.all(promises);\n },\n};\n\nconst createSourceUpdateCallback = (sourceMap: Map<string, t.ValidSource>) => {\n return (link: string, update?: Partial<t.ValidSource>): void => {\n const source = sourceMap.get(link);\n if (source) {\n sourceMap.set(link, {\n ...source,\n ...update,\n });\n }\n };\n};\n\nconst getHighlights = async ({\n query,\n content,\n reranker,\n topResults = 5,\n}: {\n content: string;\n query: string;\n reranker?: BaseReranker;\n topResults?: number;\n}): Promise<t.Highlight[] | undefined> => {\n if (!content) {\n console.warn('No content provided for highlights');\n return;\n }\n if (!reranker) {\n console.warn('No reranker provided for highlights');\n return;\n }\n\n try {\n const documents = await chunker.splitText(content);\n if (Array.isArray(documents)) {\n return await reranker.rerank(query, documents, topResults);\n } else {\n console.error(\n 'Expected documents to be an array, got:',\n typeof documents\n );\n return;\n }\n } catch (error) {\n console.error('Error in content processing:', error);\n return;\n }\n};\n\nconst createSerperAPI = (\n apiKey?: string\n): {\n getSources: (\n query: string,\n numResults?: number,\n storedLocation?: string\n ) => Promise<t.SearchResult>;\n} => {\n const config = {\n apiKey: apiKey ?? process.env.SERPER_API_KEY,\n apiUrl: 'https://google.serper.dev/search',\n defaultLocation: 'us',\n timeout: 10000,\n };\n\n if (config.apiKey == null || config.apiKey === '') {\n throw new Error('SERPER_API_KEY is required for SerperAPI');\n }\n\n const getSources = async (\n query: string,\n numResults: number = 8,\n storedLocation?: string\n ): Promise<t.SearchResult> => {\n if (!query.trim()) {\n return { success: false, error: 'Query cannot be empty' };\n }\n\n try {\n const searchLocation = (\n storedLocation ?? config.defaultLocation\n ).toLowerCase();\n\n const payload = {\n q: query,\n num: Math.min(Math.max(1, numResults), 10),\n gl: searchLocation,\n };\n\n const response = await axios.post(config.apiUrl, payload, {\n headers: {\n 'X-API-KEY': config.apiKey,\n 'Content-Type': 'application/json',\n },\n timeout: config.timeout,\n });\n\n const data = response.data;\n const results: t.SearchResultData = {\n organic: data.organic,\n images: data.images ?? [],\n topStories: data.topStories ?? [],\n knowledgeGraph: data.knowledgeGraph as t.KnowledgeGraphResult,\n answerBox: data.answerBox as t.AnswerBoxResult,\n peopleAlsoAsk: data.peopleAlsoAsk as t.PeopleAlsoAskResult[],\n relatedSearches: data.relatedSearches as string[],\n };\n\n return { success: true, data: results };\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n return { success: false, error: `API request failed: ${errorMessage}` };\n }\n };\n\n return { getSources };\n};\n\nconst createSearXNGAPI = (\n instanceUrl?: string,\n apiKey?: string\n): {\n getSources: (\n query: string,\n numResults?: number,\n storedLocation?: string\n ) => Promise<t.SearchResult>;\n} => {\n const config = {\n instanceUrl: instanceUrl ?? process.env.SEARXNG_INSTANCE_URL,\n apiKey: apiKey ?? process.env.SEARXNG_API_KEY,\n defaultLocation: 'all',\n timeout: 10000,\n };\n\n if (config.instanceUrl == null || config.instanceUrl === '') {\n throw new Error('SEARXNG_INSTANCE_URL is required for SearXNG API');\n }\n\n const getSources = async (\n query: string,\n numResults: number = 8,\n storedLocation?: string\n ): Promise<t.SearchResult> => {\n if (!query.trim()) {\n return { success: false, error: 'Query cannot be empty' };\n }\n\n try {\n // Ensure the instance URL ends with /search\n if (config.instanceUrl == null || config.instanceUrl === '') {\n return { success: false, error: 'Instance URL is not defined' };\n }\n\n let searchUrl = config.instanceUrl;\n if (!searchUrl.endsWith('/search')) {\n searchUrl = searchUrl.replace(/\\/$/, '') + '/search';\n }\n\n // Prepare parameters for SearXNG\n const params: Record<string, string | number> = {\n q: query,\n format: 'json',\n pageno: 1,\n categories: 'general',\n language: 'all',\n safesearch: 0,\n engines: 'google,bing,duckduckgo',\n max_results: Math.min(Math.max(1, numResults), 20),\n };\n\n if (storedLocation != null && storedLocation !== 'all') {\n params.language = storedLocation;\n }\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n\n if (config.apiKey != null && config.apiKey !== '') {\n headers['X-API-Key'] = config.apiKey;\n }\n\n const response = await axios.get(searchUrl, {\n headers,\n params,\n timeout: config.timeout,\n });\n\n const data = response.data;\n\n // Transform SearXNG results to match SerperAPI format\n const organicResults = (data.results ?? [])\n .slice(0, numResults)\n .map((result: t.SearXNGResult) => ({\n title: result.title ?? '',\n link: result.url ?? '',\n snippet: result.content ?? '',\n date: result.publishedDate ?? '',\n }));\n\n // Extract image results if available\n const imageResults = (data.results ?? [])\n .filter((result: t.SearXNGResult) => result.img_src)\n .slice(0, 6)\n .map((result: t.SearXNGResult) => ({\n title: result.title ?? '',\n imageUrl: result.img_src ?? '',\n }));\n\n // Format results to match SerperAPI structure\n const results: t.SearchResultData = {\n organic: organicResults,\n images: imageResults,\n topStories: [],\n // Use undefined instead of null for optional properties\n relatedSearches: data.suggestions ?? [],\n };\n\n return { success: true, data: results };\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n return {\n success: false,\n error: `SearXNG API request failed: ${errorMessage}`,\n };\n }\n };\n\n return { getSources };\n};\n\nexport const createSearchAPI = (\n config: t.SearchConfig\n): {\n getSources: (\n query: string,\n numResults?: number,\n storedLocation?: string\n ) => Promise<t.SearchResult>;\n} => {\n const {\n searchProvider = 'serper',\n serperApiKey,\n searxngInstanceUrl,\n searxngApiKey,\n } = config;\n\n if (searchProvider.toLowerCase() === 'serper') {\n return createSerperAPI(serperApiKey);\n } else if (searchProvider.toLowerCase() === 'searxng') {\n return createSearXNGAPI(searxngInstanceUrl, searxngApiKey);\n } else {\n throw new Error(\n `Invalid search provider: ${searchProvider}. Must be 'serper' or 'searxng'`\n );\n }\n};\n\nexport const createSourceProcessor = (\n config: t.ProcessSourcesConfig = {},\n scraperInstance?: FirecrawlScraper\n): {\n processSources: (\n result: t.SearchResult,\n numElements: number,\n query: string,\n proMode?: boolean\n ) => Promise<t.SearchResultData>;\n topResults: number;\n} => {\n if (!scraperInstance) {\n throw new Error('Firecrawl scraper instance is required');\n }\n const {\n topResults = 5,\n // strategies = ['no_extraction'],\n // filterContent = true,\n reranker,\n } = config;\n\n const firecrawlScraper = scraperInstance;\n\n const webScraper = {\n scrapeMany: async ({\n query,\n links,\n }: {\n query: string;\n links: string[];\n }): Promise<Array<t.ScrapeResult>> => {\n console.log(`Scraping ${links.length} links with Firecrawl`);\n const promises: Array<Promise<t.ScrapeResult>> = [];\n try {\n for (const currentLink of links) {\n const promise: Promise<t.ScrapeResult> = firecrawlScraper\n .scrapeUrl(currentLink, {})\n .then(([url, response]) => {\n const attribution = getAttribution(url, response.data?.metadata);\n if (response.success && response.data) {\n const content = firecrawlScraper.extractContent(response);\n return {\n url,\n attribution,\n content: chunker.cleanText(content),\n };\n }\n\n return {\n url,\n attribution,\n error: true,\n content: `Failed to scrape ${url}: ${response.error ?? 'Unknown error'}`,\n };\n })\n .then(async (result) => {\n try {\n if (result.error != null) {\n console.error(\n `Error scraping ${result.url}: ${result.content}`\n );\n return {\n ...result,\n };\n }\n const highlights = await getHighlights({\n query,\n reranker,\n content: result.content,\n });\n return {\n ...result,\n highlights,\n };\n } catch (error) {\n console.error('Error processing scraped content:', error);\n return {\n ...result,\n };\n }\n })\n .catch((error) => {\n console.error(`Error scraping ${currentLink}:`, error);\n return {\n url: currentLink,\n error: true,\n content: `Failed to scrape ${currentLink}: ${error.message ?? 'Unknown error'}`,\n };\n });\n promises.push(promise);\n }\n return await Promise.all(promises);\n } catch (error) {\n console.error('Error in scrapeMany:', error);\n return [];\n }\n },\n };\n\n const fetchContents = async ({\n links,\n query,\n target,\n onContentScraped,\n }: {\n links: string[];\n query: string;\n target: number;\n onContentScraped?: (link: string, update?: Partial<t.ValidSource>) => void;\n }): Promise<void> => {\n const initialLinks = links.slice(0, target);\n // const remainingLinks = links.slice(target).reverse();\n const results = await webScraper.scrapeMany({ query, links: initialLinks });\n for (const result of results) {\n if (result.error === true) {\n continue;\n }\n const { url, content, attribution, highlights } = result;\n onContentScraped?.(url, {\n content,\n attribution,\n highlights,\n });\n }\n };\n\n const processSources = async (\n result: t.SearchResult,\n numElements: number,\n query: string,\n proMode: boolean = false\n ): Promise<t.SearchResultData> => {\n try {\n if (!result.data) {\n return {\n organic: [],\n topStories: [],\n images: [],\n relatedSearches: [],\n };\n } else if (!result.data.organic) {\n return result.data;\n }\n\n if (!proMode) {\n const wikiSources = result.data.organic.filter((source) =>\n source.link.includes('wikipedia.org')\n );\n\n if (!wikiSources.length) {\n return result.data;\n }\n\n const wikiSourceMap = new Map<string, t.ValidSource>();\n wikiSourceMap.set(wikiSources[0].link, wikiSources[0]);\n const onContentScraped = createSourceUpdateCallback(wikiSourceMap);\n await fetchContents({\n query,\n target: 1,\n onContentScraped,\n links: [wikiSources[0].link],\n });\n\n for (let i = 0; i < result.data.organic.length; i++) {\n const source = result.data.organic[i];\n const updatedSource = wikiSourceMap.get(source.link);\n if (updatedSource) {\n result.data.organic[i] = {\n ...source,\n ...updatedSource,\n };\n }\n }\n\n return result.data;\n }\n\n const sourceMap = new Map<string, t.ValidSource>();\n const allLinks: string[] = [];\n\n for (const source of result.data.organic) {\n if (source.link) {\n allLinks.push(source.link);\n sourceMap.set(source.link, source);\n }\n }\n\n if (allLinks.length === 0) {\n return result.data;\n }\n\n const onContentScraped = createSourceUpdateCallback(sourceMap);\n await fetchContents({\n links: allLinks,\n query,\n onContentScraped,\n target: numElements,\n });\n\n for (let i = 0; i < result.data.organic.length; i++) {\n const source = result.data.organic[i];\n const updatedSource = sourceMap.get(source.link);\n if (updatedSource) {\n result.data.organic[i] = {\n ...source,\n ...updatedSource,\n };\n }\n }\n\n const successfulSources = result.data.organic\n .filter(\n (source) =>\n source.content != null && !source.content.startsWith('Failed')\n )\n .slice(0, numElements);\n\n if (successfulSources.length > 0) {\n result.data.organic = successfulSources;\n }\n return result.data;\n } catch (error) {\n console.error('Error in processSources:', error);\n return {\n organic: [],\n topStories: [],\n images: [],\n relatedSearches: [],\n ...result.data,\n error: error instanceof Error ? error.message : String(error),\n };\n }\n };\n\n return {\n processSources,\n topResults,\n };\n};\n"],"names":[],"mappings":";;;;AAAA;AAOA,MAAM,OAAO,GAAG;AACd,IAAA,SAAS,EAAE,CAAC,IAAY,KAAY;AAClC,QAAA,IAAI,CAAC,IAAI;AAAE,YAAA,OAAO,EAAE;;AAGpB,QAAA,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC;AAEvE;AACiE;QACjE,MAAM,gBAAgB,GAAG,cAAc,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC;;QAG/D,MAAM,eAAe,GAAG,gBAAgB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC;;QAG3E,MAAM,aAAa,GAAG,eAAe,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,CAAC;AAE7D,QAAA,OAAO,aAAa,CAAC,IAAI,EAAE;KAC5B;AACD,IAAA,SAAS,EAAE,OACT,IAAY,EACZ,OAIC,KACoB;AACrB,QAAA,MAAM,SAAS,GAAG,OAAO,EAAE,SAAS,IAAI,GAAG;AAC3C,QAAA,MAAM,YAAY,GAAG,OAAO,EAAE,YAAY,IAAI,EAAE;QAChD,MAAM,UAAU,GAAG,OAAO,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AAExD,QAAA,MAAM,QAAQ,GAAG,IAAI,8BAA8B,CAAC;YAClD,UAAU;YACV,SAAS;YACT,YAAY;AACb,SAAA,CAAC;AAEF,QAAA,OAAO,MAAM,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC;KACtC;AAED,IAAA,UAAU,EAAE,OACV,KAAe,EACf,OAIC,KACsB;;QAEvB,MAAM,QAAQ,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAC9B,OAAO,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,KAAK,KAAI;AAC/C,YAAA,OAAO,CAAC,KAAK,CAAC,uBAAuB,EAAE,KAAK,CAAC;YAC7C,OAAO,CAAC,IAAI,CAAC;SACd,CAAC,CACH;AACD,QAAA,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC;KAC7B;CACF;AAED,MAAM,0BAA0B,GAAG,CAAC,SAAqC,KAAI;AAC3E,IAAA,OAAO,CAAC,IAAY,EAAE,MAA+B,KAAU;QAC7D,MAAM,MAAM,GAAG,SAAS,CAAC,GAAG,CAAC,IAAI,CAAC;QAClC,IAAI,MAAM,EAAE;AACV,YAAA,SAAS,CAAC,GAAG,CAAC,IAAI,EAAE;AAClB,gBAAA,GAAG,MAAM;AACT,gBAAA,GAAG,MAAM;AACV,aAAA,CAAC;;AAEN,KAAC;AACH,CAAC;AAED,MAAM,aAAa,GAAG,OAAO,EAC3B,KAAK,EACL,OAAO,EACP,QAAQ,EACR,UAAU,GAAG,CAAC,GAMf,KAAwC;IACvC,IAAI,CAAC,OAAO,EAAE;AACZ,QAAA,OAAO,CAAC,IAAI,CAAC,oCAAoC,CAAC;QAClD;;IAEF,IAAI,CAAC,QAAQ,EAAE;AACb,QAAA,OAAO,CAAC,IAAI,CAAC,qCAAqC,CAAC;QACnD;;AAGF,IAAA,IAAI;QACF,MAAM,SAAS,GAAG,MAAM,OAAO,CAAC,SAAS,CAAC,OAAO,CAAC;AAClD,QAAA,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;YAC5B,OAAO,MAAM,QAAQ,CAAC,MAAM,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,CAAC;;aACrD;YACL,OAAO,CAAC,KAAK,CACX,yCAAyC,EACzC,OAAO,SAAS,CACjB;YACD;;;IAEF,OAAO,KAAK,EAAE;AACd,QAAA,OAAO,CAAC,KAAK,CAAC,8BAA8B,EAAE,KAAK,CAAC;QACpD;;AAEJ,CAAC;AAED,MAAM,eAAe,GAAG,CACtB,MAAe,KAOb;AACF,IAAA,MAAM,MAAM,GAAG;AACb,QAAA,MAAM,EAAE,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,cAAc;AAC5C,QAAA,MAAM,EAAE,kCAAkC;AAC1C,QAAA,eAAe,EAAE,IAAI;AACrB,QAAA,OAAO,EAAE,KAAK;KACf;AAED,IAAA,IAAI,MAAM,CAAC,MAAM,IAAI,IAAI,IAAI,MAAM,CAAC,MAAM,KAAK,EAAE,EAAE;AACjD,QAAA,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC;;IAG7D,MAAM,UAAU,GAAG,OACjB,KAAa,EACb,UAAA,GAAqB,CAAC,EACtB,cAAuB,KACI;AAC3B,QAAA,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE;YACjB,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,uBAAuB,EAAE;;AAG3D,QAAA,IAAI;AACF,YAAA,MAAM,cAAc,GAAG,CACrB,cAAc,IAAI,MAAM,CAAC,eAAe,EACxC,WAAW,EAAE;AAEf,YAAA,MAAM,OAAO,GAAG;AACd,gBAAA,CAAC,EAAE,KAAK;AACR,gBAAA,GAAG,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,UAAU,CAAC,EAAE,EAAE,CAAC;AAC1C,gBAAA,EAAE,EAAE,cAAc;aACnB;AAED,YAAA,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,EAAE;AACxD,gBAAA,OAAO,EAAE;oBACP,WAAW,EAAE,MAAM,CAAC,MAAM;AAC1B,oBAAA,cAAc,EAAE,kBAAkB;AACnC,iBAAA;gBACD,OAAO,EAAE,MAAM,CAAC,OAAO;AACxB,aAAA,CAAC;AAEF,YAAA,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI;AAC1B,YAAA,MAAM,OAAO,GAAuB;gBAClC,OAAO,EAAE,IAAI,CAAC,OAAO;AACrB,gBAAA,MAAM,EAAE,IAAI,CAAC,MAAM,IAAI,EAAE;AACzB,gBAAA,UAAU,EAAE,IAAI,CAAC,UAAU,IAAI,EAAE;gBACjC,cAAc,EAAE,IAAI,CAAC,cAAwC;gBAC7D,SAAS,EAAE,IAAI,CAAC,SAA8B;gBAC9C,aAAa,EAAE,IAAI,CAAC,aAAwC;gBAC5D,eAAe,EAAE,IAAI,CAAC,eAA2B;aAClD;YAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE;;QACvC,OAAO,KAAK,EAAE;AACd,YAAA,MAAM,YAAY,GAChB,KAAK,YAAY,KAAK,GAAG,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC;YACxD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,CAAuB,oBAAA,EAAA,YAAY,CAAE,CAAA,EAAE;;AAE3E,KAAC;IAED,OAAO,EAAE,UAAU,EAAE;AACvB,CAAC;AAED,MAAM,gBAAgB,GAAG,CACvB,WAAoB,EACpB,MAAe,KAOb;AACF,IAAA,MAAM,MAAM,GAAG;AACb,QAAA,WAAW,EAAE,WAAW,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAoB;AAC5D,QAAA,MAAM,EAAE,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe;AAC7C,QACA,OAAO,EAAE,KAAK;KACf;AAED,IAAA,IAAI,MAAM,CAAC,WAAW,IAAI,IAAI,IAAI,MAAM,CAAC,WAAW,KAAK,EAAE,EAAE;AAC3D,QAAA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC;;IAGrE,MAAM,UAAU,GAAG,OACjB,KAAa,EACb,UAAA,GAAqB,CAAC,EACtB,cAAuB,KACI;AAC3B,QAAA,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE;YACjB,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,uBAAuB,EAAE;;AAG3D,QAAA,IAAI;;AAEF,YAAA,IAAI,MAAM,CAAC,WAAW,IAAI,IAAI,IAAI,MAAM,CAAC,WAAW,KAAK,EAAE,EAAE;gBAC3D,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,6BAA6B,EAAE;;AAGjE,YAAA,IAAI,SAAS,GAAG,MAAM,CAAC,WAAW;YAClC,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;gBAClC,SAAS,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,SAAS;;;AAItD,YAAA,MAAM,MAAM,GAAoC;AAC9C,gBAAA,CAAC,EAAE,KAAK;AACR,gBAAA,MAAM,EAAE,MAAM;AACd,gBAAA,MAAM,EAAE,CAAC;AACT,gBAAA,UAAU,EAAE,SAAS;AACrB,gBAAA,QAAQ,EAAE,KAAK;AACf,gBAAA,UAAU,EAAE,CAAC;AACb,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,WAAW,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,UAAU,CAAC,EAAE,EAAE,CAAC;aACnD;YAED,IAAI,cAAc,IAAI,IAAI,IAAI,cAAc,KAAK,KAAK,EAAE;AACtD,gBAAA,MAAM,CAAC,QAAQ,GAAG,cAAc;;AAGlC,YAAA,MAAM,OAAO,GAA2B;AACtC,gBAAA,cAAc,EAAE,kBAAkB;aACnC;AAED,YAAA,IAAI,MAAM,CAAC,MAAM,IAAI,IAAI,IAAI,MAAM,CAAC,MAAM,KAAK,EAAE,EAAE;AACjD,gBAAA,OAAO,CAAC,WAAW,CAAC,GAAG,MAAM,CAAC,MAAM;;YAGtC,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,CAAC,SAAS,EAAE;gBAC1C,OAAO;gBACP,MAAM;gBACN,OAAO,EAAE,MAAM,CAAC,OAAO;AACxB,aAAA,CAAC;AAEF,YAAA,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI;;YAG1B,MAAM,cAAc,GAAG,CAAC,IAAI,CAAC,OAAO,IAAI,EAAE;AACvC,iBAAA,KAAK,CAAC,CAAC,EAAE,UAAU;AACnB,iBAAA,GAAG,CAAC,CAAC,MAAuB,MAAM;AACjC,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK,IAAI,EAAE;AACzB,gBAAA,IAAI,EAAE,MAAM,CAAC,GAAG,IAAI,EAAE;AACtB,gBAAA,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,EAAE;AAC7B,gBAAA,IAAI,EAAE,MAAM,CAAC,aAAa,IAAI,EAAE;AACjC,aAAA,CAAC,CAAC;;YAGL,MAAM,YAAY,GAAG,CAAC,IAAI,CAAC,OAAO,IAAI,EAAE;iBACrC,MAAM,CAAC,CAAC,MAAuB,KAAK,MAAM,CAAC,OAAO;AAClD,iBAAA,KAAK,CAAC,CAAC,EAAE,CAAC;AACV,iBAAA,GAAG,CAAC,CAAC,MAAuB,MAAM;AACjC,gBAAA,KAAK,EAAE,MAAM,CAAC,KAAK,IAAI,EAAE;AACzB,gBAAA,QAAQ,EAAE,MAAM,CAAC,OAAO,IAAI,EAAE;AAC/B,aAAA,CAAC,CAAC;;AAGL,YAAA,MAAM,OAAO,GAAuB;AAClC,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,MAAM,EAAE,YAAY;AACpB,gBAAA,UAAU,EAAE,EAAE;;AAEd,gBAAA,eAAe,EAAE,IAAI,CAAC,WAAW,IAAI,EAAE;aACxC;YAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE;;QACvC,OAAO,KAAK,EAAE;AACd,YAAA,MAAM,YAAY,GAChB,KAAK,YAAY,KAAK,GAAG,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC;YACxD,OAAO;AACL,gBAAA,OAAO,EAAE,KAAK;gBACd,KAAK,EAAE,CAA+B,4BAAA,EAAA,YAAY,CAAE,CAAA;aACrD;;AAEL,KAAC;IAED,OAAO,EAAE,UAAU,EAAE;AACvB,CAAC;AAEY,MAAA,eAAe,GAAG,CAC7B,MAAsB,KAOpB;AACF,IAAA,MAAM,EACJ,cAAc,GAAG,QAAQ,EACzB,YAAY,EACZ,kBAAkB,EAClB,aAAa,GACd,GAAG,MAAM;AAEV,IAAA,IAAI,cAAc,CAAC,WAAW,EAAE,KAAK,QAAQ,EAAE;AAC7C,QAAA,OAAO,eAAe,CAAC,YAAY,CAAC;;AAC/B,SAAA,IAAI,cAAc,CAAC,WAAW,EAAE,KAAK,SAAS,EAAE;AACrD,QAAA,OAAO,gBAAgB,CAAC,kBAAkB,EAAE,aAAa,CAAC;;SACrD;AACL,QAAA,MAAM,IAAI,KAAK,CACb,4BAA4B,cAAc,CAAA,+BAAA,CAAiC,CAC5E;;AAEL;AAEa,MAAA,qBAAqB,GAAG,CACnC,SAAiC,EAAE,EACnC,eAAkC,KAShC;IACF,IAAI,CAAC,eAAe,EAAE;AACpB,QAAA,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC;;IAE3D,MAAM,EACJ,UAAU,GAAG,CAAC;;;IAGd,QAAQ,GACT,GAAG,MAAM;IAEV,MAAM,gBAAgB,GAAG,eAAe;AAExC,IAAA,MAAM,UAAU,GAAG;QACjB,UAAU,EAAE,OAAO,EACjB,KAAK,EACL,KAAK,GAIN,KAAoC;YACnC,OAAO,CAAC,GAAG,CAAC,CAAA,SAAA,EAAY,KAAK,CAAC,MAAM,CAAuB,qBAAA,CAAA,CAAC;YAC5D,MAAM,QAAQ,GAAmC,EAAE;AACnD,YAAA,IAAI;AACF,gBAAA,KAAK,MAAM,WAAW,IAAI,KAAK,EAAE;oBAC/B,MAAM,OAAO,GAA4B;AACtC,yBAAA,SAAS,CAAC,WAAW,EAAE,EAAE;yBACzB,IAAI,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC,KAAI;AACxB,wBAAA,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,EAAE,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC;wBAChE,IAAI,QAAQ,CAAC,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE;4BACrC,MAAM,OAAO,GAAG,gBAAgB,CAAC,cAAc,CAAC,QAAQ,CAAC;4BACzD,OAAO;gCACL,GAAG;gCACH,WAAW;AACX,gCAAA,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,OAAO,CAAC;6BACpC;;wBAGH,OAAO;4BACL,GAAG;4BACH,WAAW;AACX,4BAAA,KAAK,EAAE,IAAI;4BACX,OAAO,EAAE,oBAAoB,GAAG,CAAA,EAAA,EAAK,QAAQ,CAAC,KAAK,IAAI,eAAe,CAAE,CAAA;yBACzE;AACH,qBAAC;AACA,yBAAA,IAAI,CAAC,OAAO,MAAM,KAAI;AACrB,wBAAA,IAAI;AACF,4BAAA,IAAI,MAAM,CAAC,KAAK,IAAI,IAAI,EAAE;AACxB,gCAAA,OAAO,CAAC,KAAK,CACX,CAAA,eAAA,EAAkB,MAAM,CAAC,GAAG,CAAA,EAAA,EAAK,MAAM,CAAC,OAAO,CAAA,CAAE,CAClD;gCACD,OAAO;AACL,oCAAA,GAAG,MAAM;iCACV;;AAEH,4BAAA,MAAM,UAAU,GAAG,MAAM,aAAa,CAAC;gCACrC,KAAK;gCACL,QAAQ;gCACR,OAAO,EAAE,MAAM,CAAC,OAAO;AACxB,6BAAA,CAAC;4BACF,OAAO;AACL,gCAAA,GAAG,MAAM;gCACT,UAAU;6BACX;;wBACD,OAAO,KAAK,EAAE;AACd,4BAAA,OAAO,CAAC,KAAK,CAAC,mCAAmC,EAAE,KAAK,CAAC;4BACzD,OAAO;AACL,gCAAA,GAAG,MAAM;6BACV;;AAEL,qBAAC;AACA,yBAAA,KAAK,CAAC,CAAC,KAAK,KAAI;wBACf,OAAO,CAAC,KAAK,CAAC,CAAA,eAAA,EAAkB,WAAW,CAAG,CAAA,CAAA,EAAE,KAAK,CAAC;wBACtD,OAAO;AACL,4BAAA,GAAG,EAAE,WAAW;AAChB,4BAAA,KAAK,EAAE,IAAI;4BACX,OAAO,EAAE,oBAAoB,WAAW,CAAA,EAAA,EAAK,KAAK,CAAC,OAAO,IAAI,eAAe,CAAE,CAAA;yBAChF;AACH,qBAAC,CAAC;AACJ,oBAAA,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC;;AAExB,gBAAA,OAAO,MAAM,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC;;YAClC,OAAO,KAAK,EAAE;AACd,gBAAA,OAAO,CAAC,KAAK,CAAC,sBAAsB,EAAE,KAAK,CAAC;AAC5C,gBAAA,OAAO,EAAE;;SAEZ;KACF;AAED,IAAA,MAAM,aAAa,GAAG,OAAO,EAC3B,KAAK,EACL,KAAK,EACL,MAAM,EACN,gBAAgB,GAMjB,KAAmB;QAClB,MAAM,YAAY,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC;;AAE3C,QAAA,MAAM,OAAO,GAAG,MAAM,UAAU,CAAC,UAAU,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,YAAY,EAAE,CAAC;AAC3E,QAAA,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;AAC5B,YAAA,IAAI,MAAM,CAAC,KAAK,KAAK,IAAI,EAAE;gBACzB;;YAEF,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,WAAW,EAAE,UAAU,EAAE,GAAG,MAAM;YACxD,gBAAgB,GAAG,GAAG,EAAE;gBACtB,OAAO;gBACP,WAAW;gBACX,UAAU;AACX,aAAA,CAAC;;AAEN,KAAC;AAED,IAAA,MAAM,cAAc,GAAG,OACrB,MAAsB,EACtB,WAAmB,EACnB,KAAa,EACb,OAAmB,GAAA,KAAK,KACO;AAC/B,QAAA,IAAI;AACF,YAAA,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;gBAChB,OAAO;AACL,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,UAAU,EAAE,EAAE;AACd,oBAAA,MAAM,EAAE,EAAE;AACV,oBAAA,eAAe,EAAE,EAAE;iBACpB;;AACI,iBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE;gBAC/B,OAAO,MAAM,CAAC,IAAI;;YAGpB,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,MAAM,KACpD,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,CACtC;AAED,gBAAA,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;oBACvB,OAAO,MAAM,CAAC,IAAI;;AAGpB,gBAAA,MAAM,aAAa,GAAG,IAAI,GAAG,EAAyB;AACtD,gBAAA,aAAa,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;AACtD,gBAAA,MAAM,gBAAgB,GAAG,0BAA0B,CAAC,aAAa,CAAC;AAClE,gBAAA,MAAM,aAAa,CAAC;oBAClB,KAAK;AACL,oBAAA,MAAM,EAAE,CAAC;oBACT,gBAAgB;oBAChB,KAAK,EAAE,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;AAC7B,iBAAA,CAAC;AAEF,gBAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;oBACnD,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC;oBACrC,MAAM,aAAa,GAAG,aAAa,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC;oBACpD,IAAI,aAAa,EAAE;AACjB,wBAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AACvB,4BAAA,GAAG,MAAM;AACT,4BAAA,GAAG,aAAa;yBACjB;;;gBAIL,OAAO,MAAM,CAAC,IAAI;;AAGpB,YAAA,MAAM,SAAS,GAAG,IAAI,GAAG,EAAyB;YAClD,MAAM,QAAQ,GAAa,EAAE;YAE7B,KAAK,MAAM,MAAM,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE;AACxC,gBAAA,IAAI,MAAM,CAAC,IAAI,EAAE;AACf,oBAAA,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC;oBAC1B,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC;;;AAItC,YAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;gBACzB,OAAO,MAAM,CAAC,IAAI;;AAGpB,YAAA,MAAM,gBAAgB,GAAG,0BAA0B,CAAC,SAAS,CAAC;AAC9D,YAAA,MAAM,aAAa,CAAC;AAClB,gBAAA,KAAK,EAAE,QAAQ;gBACf,KAAK;gBACL,gBAAgB;AAChB,gBAAA,MAAM,EAAE,WAAW;AACpB,aAAA,CAAC;AAEF,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACnD,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC;gBACrC,MAAM,aAAa,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC;gBAChD,IAAI,aAAa,EAAE;AACjB,oBAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG;AACvB,wBAAA,GAAG,MAAM;AACT,wBAAA,GAAG,aAAa;qBACjB;;;AAIL,YAAA,MAAM,iBAAiB,GAAG,MAAM,CAAC,IAAI,CAAC;iBACnC,MAAM,CACL,CAAC,MAAM,KACL,MAAM,CAAC,OAAO,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,QAAQ,CAAC;AAEjE,iBAAA,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC;AAExB,YAAA,IAAI,iBAAiB,CAAC,MAAM,GAAG,CAAC,EAAE;AAChC,gBAAA,MAAM,CAAC,IAAI,CAAC,OAAO,GAAG,iBAAiB;;YAEzC,OAAO,MAAM,CAAC,IAAI;;QAClB,OAAO,KAAK,EAAE;AACd,YAAA,OAAO,CAAC,KAAK,CAAC,0BAA0B,EAAE,KAAK,CAAC;YAChD,OAAO;AACL,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,UAAU,EAAE,EAAE;AACd,gBAAA,MAAM,EAAE,EAAE;AACV,gBAAA,eAAe,EAAE,EAAE;gBACnB,GAAG,MAAM,CAAC,IAAI;AACd,gBAAA,KAAK,EAAE,KAAK,YAAY,KAAK,GAAG,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC;aAC9D;;AAEL,KAAC;IAED,OAAO;QACL,cAAc;QACd,UAAU;KACX;AACH;;;;"}
@@ -0,0 +1,101 @@
1
+ import { z } from 'zod';
2
+ import { tool } from '@langchain/core/tools';
3
+ import { createSearchAPI, createSourceProcessor } from './search.mjs';
4
+ import { createFirecrawlScraper } from './firecrawl.mjs';
5
+ import { expandHighlights } from './highlights.mjs';
6
+ import { formatResultsForLLM } from './format.mjs';
7
+ import { createReranker } from './rerankers.mjs';
8
+ import { Constants } from '../../common/enum.mjs';
9
+
10
+ /* eslint-disable no-console */
11
+ const SearchToolSchema = z.object({
12
+ query: z
13
+ .string()
14
+ .describe('The search query string that specifies what should be searched for.'),
15
+ });
16
+ const createSearchTool = (config = {}) => {
17
+ const { searchProvider = 'serper', serperApiKey, searxngInstanceUrl, searxngApiKey, rerankerType = 'cohere', topResults = 5, strategies = ['no_extraction'], filterContent = true, firecrawlApiKey, firecrawlApiUrl, firecrawlFormats = ['markdown', 'html'], jinaApiKey, cohereApiKey, onSearchResults: _onSearchResults, } = config;
18
+ const searchAPI = createSearchAPI({
19
+ searchProvider,
20
+ serperApiKey,
21
+ searxngInstanceUrl,
22
+ searxngApiKey,
23
+ });
24
+ const firecrawlScraper = createFirecrawlScraper({
25
+ apiKey: firecrawlApiKey ?? process.env.FIRECRAWL_API_KEY,
26
+ apiUrl: firecrawlApiUrl,
27
+ formats: firecrawlFormats,
28
+ });
29
+ const selectedReranker = createReranker({
30
+ rerankerType,
31
+ jinaApiKey,
32
+ cohereApiKey,
33
+ });
34
+ if (!selectedReranker) {
35
+ console.warn('No reranker selected. Using default ranking.');
36
+ }
37
+ const sourceProcessor = createSourceProcessor({
38
+ reranker: selectedReranker,
39
+ topResults}, firecrawlScraper);
40
+ const search = async ({ query, proMode = true, maxSources = 5, onSearchResults, }) => {
41
+ try {
42
+ const sources = await searchAPI.getSources(query);
43
+ onSearchResults?.(sources);
44
+ if (!sources.success) {
45
+ throw new Error(sources.error ?? 'Search failed');
46
+ }
47
+ const processedSources = await sourceProcessor.processSources(sources, maxSources, query, proMode);
48
+ return expandHighlights(processedSources);
49
+ }
50
+ catch (error) {
51
+ console.error('Error in search:', error);
52
+ return {
53
+ organic: [],
54
+ topStories: [],
55
+ images: [],
56
+ relatedSearches: [],
57
+ error: error instanceof Error ? error.message : String(error),
58
+ };
59
+ }
60
+ };
61
+ return tool(async ({ query }, runnableConfig) => {
62
+ const searchResult = await search({
63
+ query,
64
+ onSearchResults: _onSearchResults
65
+ ? (result) => {
66
+ _onSearchResults(result, runnableConfig);
67
+ }
68
+ : undefined,
69
+ });
70
+ const output = formatResultsForLLM(searchResult);
71
+ return [output, { [Constants.WEB_SEARCH]: { ...searchResult } }];
72
+ }, {
73
+ name: Constants.WEB_SEARCH,
74
+ description: `
75
+ Real-time search. Results have required unique citation anchors.
76
+
77
+ Anchors:
78
+ - \\ue202turn0searchN (web), \\ue202turn0newsN (news), \\ue202turn0imageN (image)
79
+
80
+ Special Markers:
81
+ - \\ue203...\\ue204 — mark start/end of cited span
82
+ - \\ue200...\\ue201 — composite/group block (e.g. \\ue200cite\\ue202turn0search1\\ue202turn0news2\\ue201)
83
+ - \\ue206 — marks grouped/summary citation areas
84
+
85
+ **CITE EVERY NON-OBVIOUS FACT/QUOTE:**
86
+ Insert the anchor marker(s) immediately after the statement:
87
+ - "Pure functions produce same output \\ue202turn0search0."
88
+ - Multiple: "Benefits \\ue202turn0search0\\ue202turn0news0."
89
+ - Span: \\ue203Key: first-class functions\\ue204\\ue202turn0news1
90
+ - Group: "Functional languages."\\ue206 or \\ue200cite\\ue202turn0search0\\ue202turn0news1\\ue201
91
+ - Image: "See photo \\ue202turn0image0."
92
+
93
+ **NEVER use markdown links, [1], or footnotes. CITE ONLY with anchors provided.**
94
+ `.trim(),
95
+ schema: SearchToolSchema,
96
+ responseFormat: Constants.CONTENT_AND_ARTIFACT,
97
+ });
98
+ };
99
+
100
+ export { createSearchTool };
101
+ //# sourceMappingURL=tool.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tool.mjs","sources":["../../../../src/tools/search/tool.ts"],"sourcesContent":["/* eslint-disable no-console */\nimport { z } from 'zod';\nimport { tool, DynamicStructuredTool } from '@langchain/core/tools';\nimport type * as t from './types';\nimport { createSearchAPI, createSourceProcessor } from './search';\nimport { createFirecrawlScraper } from './firecrawl';\nimport { expandHighlights } from './highlights';\nimport { formatResultsForLLM } from './format';\nimport { createReranker } from './rerankers';\nimport { Constants } from '@/common';\n\nconst SearchToolSchema = z.object({\n query: z\n .string()\n .describe(\n 'The search query string that specifies what should be searched for.'\n ),\n});\n\nexport const createSearchTool = (\n config: t.SearchToolConfig = {}\n): DynamicStructuredTool<typeof SearchToolSchema> => {\n const {\n searchProvider = 'serper',\n serperApiKey,\n searxngInstanceUrl,\n searxngApiKey,\n rerankerType = 'cohere',\n topResults = 5,\n strategies = ['no_extraction'],\n filterContent = true,\n firecrawlApiKey,\n firecrawlApiUrl,\n firecrawlFormats = ['markdown', 'html'],\n jinaApiKey,\n cohereApiKey,\n onSearchResults: _onSearchResults,\n } = config;\n\n const searchAPI = createSearchAPI({\n searchProvider,\n serperApiKey,\n searxngInstanceUrl,\n searxngApiKey,\n });\n\n const firecrawlScraper = createFirecrawlScraper({\n apiKey: firecrawlApiKey ?? process.env.FIRECRAWL_API_KEY,\n apiUrl: firecrawlApiUrl,\n formats: firecrawlFormats,\n });\n\n const selectedReranker = createReranker({\n rerankerType,\n jinaApiKey,\n cohereApiKey,\n });\n\n if (!selectedReranker) {\n console.warn('No reranker selected. Using default ranking.');\n }\n\n const sourceProcessor = createSourceProcessor(\n {\n reranker: selectedReranker,\n topResults,\n strategies,\n filterContent,\n },\n firecrawlScraper\n );\n\n const search = async ({\n query,\n proMode = true,\n maxSources = 5,\n onSearchResults,\n }: {\n query: string;\n proMode?: boolean;\n maxSources?: number;\n onSearchResults?: (sources: t.SearchResult) => void;\n }): Promise<t.SearchResultData> => {\n try {\n const sources = await searchAPI.getSources(query);\n onSearchResults?.(sources);\n\n if (!sources.success) {\n throw new Error(sources.error ?? 'Search failed');\n }\n\n const processedSources = await sourceProcessor.processSources(\n sources,\n maxSources,\n query,\n proMode\n );\n return expandHighlights(processedSources);\n } catch (error) {\n console.error('Error in search:', error);\n return {\n organic: [],\n topStories: [],\n images: [],\n relatedSearches: [],\n error: error instanceof Error ? error.message : String(error),\n };\n }\n };\n\n return tool<typeof SearchToolSchema>(\n async ({ query }, runnableConfig) => {\n const searchResult = await search({\n query,\n onSearchResults: _onSearchResults\n ? (result): void => {\n _onSearchResults(result, runnableConfig);\n }\n : undefined,\n });\n const output = formatResultsForLLM(searchResult);\n return [output, { [Constants.WEB_SEARCH]: { ...searchResult } }];\n },\n {\n name: Constants.WEB_SEARCH,\n description: `\nReal-time search. Results have required unique citation anchors.\n\nAnchors:\n- \\\\ue202turn0searchN (web), \\\\ue202turn0newsN (news), \\\\ue202turn0imageN (image)\n\nSpecial Markers:\n- \\\\ue203...\\\\ue204 — mark start/end of cited span\n- \\\\ue200...\\\\ue201 — composite/group block (e.g. \\\\ue200cite\\\\ue202turn0search1\\\\ue202turn0news2\\\\ue201)\n- \\\\ue206 — marks grouped/summary citation areas\n\n**CITE EVERY NON-OBVIOUS FACT/QUOTE:**\nInsert the anchor marker(s) immediately after the statement:\n- \"Pure functions produce same output \\\\ue202turn0search0.\"\n- Multiple: \"Benefits \\\\ue202turn0search0\\\\ue202turn0news0.\"\n- Span: \\\\ue203Key: first-class functions\\\\ue204\\\\ue202turn0news1\n- Group: \"Functional languages.\"\\\\ue206 or \\\\ue200cite\\\\ue202turn0search0\\\\ue202turn0news1\\\\ue201\n- Image: \"See photo \\\\ue202turn0image0.\"\n\n**NEVER use markdown links, [1], or footnotes. CITE ONLY with anchors provided.**\n`.trim(),\n schema: SearchToolSchema,\n responseFormat: Constants.CONTENT_AND_ARTIFACT,\n }\n );\n};\n"],"names":[],"mappings":";;;;;;;;;AAAA;AAWA,MAAM,gBAAgB,GAAG,CAAC,CAAC,MAAM,CAAC;AAChC,IAAA,KAAK,EAAE;AACJ,SAAA,MAAM;SACN,QAAQ,CACP,qEAAqE,CACtE;AACJ,CAAA,CAAC;MAEW,gBAAgB,GAAG,CAC9B,MAA6B,GAAA,EAAE,KACmB;IAClD,MAAM,EACJ,cAAc,GAAG,QAAQ,EACzB,YAAY,EACZ,kBAAkB,EAClB,aAAa,EACb,YAAY,GAAG,QAAQ,EACvB,UAAU,GAAG,CAAC,EACd,UAAU,GAAG,CAAC,eAAe,CAAC,EAC9B,aAAa,GAAG,IAAI,EACpB,eAAe,EACf,eAAe,EACf,gBAAgB,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC,EACvC,UAAU,EACV,YAAY,EACZ,eAAe,EAAE,gBAAgB,GAClC,GAAG,MAAM;IAEV,MAAM,SAAS,GAAG,eAAe,CAAC;QAChC,cAAc;QACd,YAAY;QACZ,kBAAkB;QAClB,aAAa;AACd,KAAA,CAAC;IAEF,MAAM,gBAAgB,GAAG,sBAAsB,CAAC;AAC9C,QAAA,MAAM,EAAE,eAAe,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB;AACxD,QAAA,MAAM,EAAE,eAAe;AACvB,QAAA,OAAO,EAAE,gBAAgB;AAC1B,KAAA,CAAC;IAEF,MAAM,gBAAgB,GAAG,cAAc,CAAC;QACtC,YAAY;QACZ,UAAU;QACV,YAAY;AACb,KAAA,CAAC;IAEF,IAAI,CAAC,gBAAgB,EAAE;AACrB,QAAA,OAAO,CAAC,IAAI,CAAC,8CAA8C,CAAC;;IAG9D,MAAM,eAAe,GAAG,qBAAqB,CAC3C;AACE,QAAA,QAAQ,EAAE,gBAAgB;QAC1B,WAGD,EACD,gBAAgB,CACjB;AAED,IAAA,MAAM,MAAM,GAAG,OAAO,EACpB,KAAK,EACL,OAAO,GAAG,IAAI,EACd,UAAU,GAAG,CAAC,EACd,eAAe,GAMhB,KAAiC;AAChC,QAAA,IAAI;YACF,MAAM,OAAO,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,KAAK,CAAC;AACjD,YAAA,eAAe,GAAG,OAAO,CAAC;AAE1B,YAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,eAAe,CAAC;;AAGnD,YAAA,MAAM,gBAAgB,GAAG,MAAM,eAAe,CAAC,cAAc,CAC3D,OAAO,EACP,UAAU,EACV,KAAK,EACL,OAAO,CACR;AACD,YAAA,OAAO,gBAAgB,CAAC,gBAAgB,CAAC;;QACzC,OAAO,KAAK,EAAE;AACd,YAAA,OAAO,CAAC,KAAK,CAAC,kBAAkB,EAAE,KAAK,CAAC;YACxC,OAAO;AACL,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,UAAU,EAAE,EAAE;AACd,gBAAA,MAAM,EAAE,EAAE;AACV,gBAAA,eAAe,EAAE,EAAE;AACnB,gBAAA,KAAK,EAAE,KAAK,YAAY,KAAK,GAAG,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC;aAC9D;;AAEL,KAAC;IAED,OAAO,IAAI,CACT,OAAO,EAAE,KAAK,EAAE,EAAE,cAAc,KAAI;AAClC,QAAA,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC;YAChC,KAAK;AACL,YAAA,eAAe,EAAE;AACf,kBAAE,CAAC,MAAM,KAAU;AACjB,oBAAA,gBAAgB,CAAC,MAAM,EAAE,cAAc,CAAC;;AAE1C,kBAAE,SAAS;AACd,SAAA,CAAC;AACF,QAAA,MAAM,MAAM,GAAG,mBAAmB,CAAC,YAAY,CAAC;AAChD,QAAA,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,SAAS,CAAC,UAAU,GAAG,EAAE,GAAG,YAAY,EAAE,EAAE,CAAC;AAClE,KAAC,EACD;QACE,IAAI,EAAE,SAAS,CAAC,UAAU;AAC1B,QAAA,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;AAoBlB,CAAA,CAAC,IAAI,EAAE;AACF,QAAA,MAAM,EAAE,gBAAgB;QACxB,cAAc,EAAE,SAAS,CAAC,oBAAoB;AAC/C,KAAA,CACF;AACH;;;;"}
@@ -112,6 +112,7 @@ export declare enum Callback {
112
112
  export declare enum Constants {
113
113
  OFFICIAL_CODE_BASEURL = "https://api.librechat.ai/v1",
114
114
  EXECUTE_CODE = "execute_code",
115
+ WEB_SEARCH = "web_search",
115
116
  CONTENT_AND_ARTIFACT = "content_and_artifact"
116
117
  }
117
118
  export declare enum EnvVar {
@@ -5,6 +5,7 @@ export * from './events';
5
5
  export * from './messages';
6
6
  export * from './graphs';
7
7
  export * from './tools/CodeExecutor';
8
+ export * from './tools/search';
8
9
  export * from './common';
9
10
  export * from './utils';
10
11
  export type * from './types';
@@ -0,0 +1 @@
1
+ export {};