@librechat/agents 3.1.75-dev.1 → 3.1.76
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/llm/openai/index.cjs +43 -0
- package/dist/cjs/llm/openai/index.cjs.map +1 -1
- package/dist/cjs/llm/openai/utils/index.cjs +19 -10
- package/dist/cjs/llm/openai/utils/index.cjs.map +1 -1
- package/dist/cjs/messages/format.cjs +67 -10
- package/dist/cjs/messages/format.cjs.map +1 -1
- package/dist/cjs/tools/search/search.cjs +55 -66
- package/dist/cjs/tools/search/search.cjs.map +1 -1
- package/dist/cjs/tools/search/tavily-scraper.cjs +189 -0
- package/dist/cjs/tools/search/tavily-scraper.cjs.map +1 -0
- package/dist/cjs/tools/search/tavily-search.cjs +372 -0
- package/dist/cjs/tools/search/tavily-search.cjs.map +1 -0
- package/dist/cjs/tools/search/tool.cjs +26 -4
- package/dist/cjs/tools/search/tool.cjs.map +1 -1
- package/dist/cjs/tools/search/utils.cjs +10 -3
- package/dist/cjs/tools/search/utils.cjs.map +1 -1
- package/dist/esm/llm/openai/index.mjs +43 -0
- package/dist/esm/llm/openai/index.mjs.map +1 -1
- package/dist/esm/llm/openai/utils/index.mjs +19 -10
- package/dist/esm/llm/openai/utils/index.mjs.map +1 -1
- package/dist/esm/messages/format.mjs +67 -10
- package/dist/esm/messages/format.mjs.map +1 -1
- package/dist/esm/tools/search/search.mjs +55 -66
- package/dist/esm/tools/search/search.mjs.map +1 -1
- package/dist/esm/tools/search/tavily-scraper.mjs +186 -0
- package/dist/esm/tools/search/tavily-scraper.mjs.map +1 -0
- package/dist/esm/tools/search/tavily-search.mjs +370 -0
- package/dist/esm/tools/search/tavily-search.mjs.map +1 -0
- package/dist/esm/tools/search/tool.mjs +26 -4
- package/dist/esm/tools/search/tool.mjs.map +1 -1
- package/dist/esm/tools/search/utils.mjs +10 -3
- package/dist/esm/tools/search/utils.mjs.map +1 -1
- package/dist/types/messages/format.d.ts +4 -1
- package/dist/types/tools/search/tavily-scraper.d.ts +19 -0
- package/dist/types/tools/search/tavily-search.d.ts +4 -0
- package/dist/types/tools/search/types.d.ts +99 -5
- package/dist/types/tools/search/utils.d.ts +2 -2
- package/package.json +1 -1
- package/src/llm/custom-chat-models.smoke.test.ts +175 -1
- package/src/llm/openai/index.ts +124 -0
- package/src/llm/openai/utils/index.ts +23 -14
- package/src/llm/openai/utils/messages.test.ts +159 -0
- package/src/messages/format.ts +90 -13
- package/src/messages/formatAgentMessages.test.ts +166 -1
- package/src/tools/search/search.ts +83 -73
- package/src/tools/search/tavily-scraper.ts +235 -0
- package/src/tools/search/tavily-search.ts +424 -0
- package/src/tools/search/tavily.test.ts +965 -0
- package/src/tools/search/tool.ts +36 -26
- package/src/tools/search/types.ts +134 -11
- package/src/tools/search/utils.ts +13 -5
|
@@ -2,6 +2,7 @@ import axios from 'axios';
|
|
|
2
2
|
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
|
3
3
|
import type * as t from './types';
|
|
4
4
|
import { getAttribution, createDefaultLogger } from './utils';
|
|
5
|
+
import { createTavilyAPI } from './tavily-search';
|
|
5
6
|
import { BaseReranker } from './rerankers';
|
|
6
7
|
|
|
7
8
|
const chunker = {
|
|
@@ -418,15 +419,20 @@ export const createSearchAPI = (
|
|
|
418
419
|
serperApiKey,
|
|
419
420
|
searxngInstanceUrl,
|
|
420
421
|
searxngApiKey,
|
|
422
|
+
tavilyApiKey,
|
|
423
|
+
tavilySearchUrl,
|
|
424
|
+
tavilySearchOptions,
|
|
421
425
|
} = config;
|
|
422
426
|
|
|
423
427
|
if (searchProvider.toLowerCase() === 'serper') {
|
|
424
428
|
return createSerperAPI(serperApiKey);
|
|
425
429
|
} else if (searchProvider.toLowerCase() === 'searxng') {
|
|
426
430
|
return createSearXNGAPI(searxngInstanceUrl, searxngApiKey);
|
|
431
|
+
} else if (searchProvider.toLowerCase() === 'tavily') {
|
|
432
|
+
return createTavilyAPI(tavilyApiKey, tavilySearchUrl, tavilySearchOptions);
|
|
427
433
|
} else {
|
|
428
434
|
throw new Error(
|
|
429
|
-
`Invalid search provider: ${searchProvider}. Must be 'serper' or '
|
|
435
|
+
`Invalid search provider: ${searchProvider}. Must be 'serper', 'searxng', or 'tavily'`
|
|
430
436
|
);
|
|
431
437
|
}
|
|
432
438
|
};
|
|
@@ -454,6 +460,56 @@ export const createSourceProcessor = (
|
|
|
454
460
|
const logger_ = logger || createDefaultLogger();
|
|
455
461
|
const scraper = scraperInstance;
|
|
456
462
|
|
|
463
|
+
const processResponse = (
|
|
464
|
+
url: string,
|
|
465
|
+
response: t.AnyScraperResponse
|
|
466
|
+
): t.ScrapeResult => {
|
|
467
|
+
const rawMetadata = scraper.extractMetadata(response);
|
|
468
|
+
const metadata =
|
|
469
|
+
Object.keys(rawMetadata).length > 0 ? rawMetadata : undefined;
|
|
470
|
+
const attribution = getAttribution(url, metadata, logger_);
|
|
471
|
+
|
|
472
|
+
if (response.success && response.data) {
|
|
473
|
+
const [content, references] = scraper.extractContent(response);
|
|
474
|
+
return {
|
|
475
|
+
url,
|
|
476
|
+
references,
|
|
477
|
+
attribution,
|
|
478
|
+
content: chunker.cleanText(content),
|
|
479
|
+
};
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
logger_.error(
|
|
483
|
+
`Error scraping ${url}: ${response.error ?? 'Unknown error'}`
|
|
484
|
+
);
|
|
485
|
+
return { url, attribution, error: true, content: '' };
|
|
486
|
+
};
|
|
487
|
+
|
|
488
|
+
const addHighlights = async (
|
|
489
|
+
result: t.ScrapeResult,
|
|
490
|
+
query: string,
|
|
491
|
+
onGetHighlights: t.SearchToolConfig['onGetHighlights']
|
|
492
|
+
): Promise<t.ScrapeResult> => {
|
|
493
|
+
if (result.error != null) {
|
|
494
|
+
return result;
|
|
495
|
+
}
|
|
496
|
+
try {
|
|
497
|
+
const highlights = await getHighlights({
|
|
498
|
+
query,
|
|
499
|
+
reranker,
|
|
500
|
+
content: result.content,
|
|
501
|
+
logger: logger_,
|
|
502
|
+
});
|
|
503
|
+
if (onGetHighlights) {
|
|
504
|
+
onGetHighlights(result.url);
|
|
505
|
+
}
|
|
506
|
+
return { ...result, highlights };
|
|
507
|
+
} catch (error) {
|
|
508
|
+
logger_.error('Error processing scraped content:', error);
|
|
509
|
+
return result;
|
|
510
|
+
}
|
|
511
|
+
};
|
|
512
|
+
|
|
457
513
|
const webScraper = {
|
|
458
514
|
scrapeMany: async ({
|
|
459
515
|
query,
|
|
@@ -465,80 +521,34 @@ export const createSourceProcessor = (
|
|
|
465
521
|
onGetHighlights: t.SearchToolConfig['onGetHighlights'];
|
|
466
522
|
}): Promise<Array<t.ScrapeResult>> => {
|
|
467
523
|
logger_.debug(`Scraping ${links.length} links`);
|
|
468
|
-
const promises: Array<Promise<t.ScrapeResult>> = [];
|
|
469
524
|
try {
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
attribution,
|
|
486
|
-
content: chunker.cleanText(content),
|
|
487
|
-
} as t.ScrapeResult;
|
|
488
|
-
} else {
|
|
489
|
-
logger_.error(
|
|
490
|
-
`Error scraping ${url}: ${response.error ?? 'Unknown error'}`
|
|
491
|
-
);
|
|
492
|
-
}
|
|
493
|
-
|
|
494
|
-
return {
|
|
495
|
-
url,
|
|
496
|
-
attribution,
|
|
497
|
-
error: true,
|
|
498
|
-
content: '',
|
|
499
|
-
} as t.ScrapeResult;
|
|
500
|
-
})
|
|
501
|
-
.then(async (result) => {
|
|
502
|
-
try {
|
|
503
|
-
if (result.error != null) {
|
|
504
|
-
logger_.error(
|
|
505
|
-
`Error scraping ${result.url}: ${result.content}`
|
|
506
|
-
);
|
|
507
|
-
return {
|
|
508
|
-
...result,
|
|
509
|
-
};
|
|
510
|
-
}
|
|
511
|
-
const highlights = await getHighlights({
|
|
512
|
-
query,
|
|
513
|
-
reranker,
|
|
514
|
-
content: result.content,
|
|
515
|
-
logger: logger_,
|
|
516
|
-
});
|
|
517
|
-
if (onGetHighlights) {
|
|
518
|
-
onGetHighlights(result.url);
|
|
519
|
-
}
|
|
520
|
-
return {
|
|
521
|
-
...result,
|
|
522
|
-
highlights,
|
|
523
|
-
};
|
|
524
|
-
} catch (error) {
|
|
525
|
-
logger_.error('Error processing scraped content:', error);
|
|
526
|
-
return {
|
|
527
|
-
...result,
|
|
528
|
-
};
|
|
529
|
-
}
|
|
530
|
-
})
|
|
531
|
-
.catch((error) => {
|
|
532
|
-
logger_.error(`Error scraping ${currentLink}:`, error);
|
|
533
|
-
return {
|
|
534
|
-
url: currentLink,
|
|
535
|
-
error: true,
|
|
536
|
-
content: '',
|
|
537
|
-
};
|
|
538
|
-
});
|
|
539
|
-
promises.push(promise);
|
|
525
|
+
let responses: Array<[string, t.AnyScraperResponse]>;
|
|
526
|
+
|
|
527
|
+
if (scraper.scrapeUrls) {
|
|
528
|
+
responses = await scraper.scrapeUrls(links);
|
|
529
|
+
} else {
|
|
530
|
+
responses = await Promise.all(
|
|
531
|
+
links.map((link) =>
|
|
532
|
+
scraper
|
|
533
|
+
.scrapeUrl(link, {})
|
|
534
|
+
.catch((error): [string, t.AnyScraperResponse] => {
|
|
535
|
+
logger_.error(`Error scraping ${link}:`, error);
|
|
536
|
+
return [link, { success: false, error: String(error) }];
|
|
537
|
+
})
|
|
538
|
+
)
|
|
539
|
+
);
|
|
540
540
|
}
|
|
541
|
-
|
|
541
|
+
|
|
542
|
+
const withHighlights = await Promise.all(
|
|
543
|
+
responses.map(([url, response]) =>
|
|
544
|
+
addHighlights(
|
|
545
|
+
processResponse(url, response),
|
|
546
|
+
query,
|
|
547
|
+
onGetHighlights
|
|
548
|
+
)
|
|
549
|
+
)
|
|
550
|
+
);
|
|
551
|
+
return withHighlights;
|
|
542
552
|
} catch (error) {
|
|
543
553
|
logger_.error('Error in scrapeMany:', error);
|
|
544
554
|
return [];
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
import axios from 'axios';
|
|
2
|
+
import type * as t from './types';
|
|
3
|
+
import { createDefaultLogger } from './utils';
|
|
4
|
+
|
|
5
|
+
const DEFAULT_BASIC_TIMEOUT = 15000;
|
|
6
|
+
const DEFAULT_ADVANCED_TIMEOUT = 30000;
|
|
7
|
+
const MAX_BATCH_SIZE = 20;
|
|
8
|
+
|
|
9
|
+
const getDefaultTimeout = (extractDepth: 'basic' | 'advanced'): number =>
|
|
10
|
+
extractDepth === 'advanced'
|
|
11
|
+
? DEFAULT_ADVANCED_TIMEOUT
|
|
12
|
+
: DEFAULT_BASIC_TIMEOUT;
|
|
13
|
+
|
|
14
|
+
const normalizeUrlKey = (url: string): string => {
|
|
15
|
+
try {
|
|
16
|
+
const parsedUrl = new URL(url);
|
|
17
|
+
parsedUrl.hash = '';
|
|
18
|
+
if (parsedUrl.pathname.length > 1) {
|
|
19
|
+
parsedUrl.pathname = parsedUrl.pathname.replace(/\/+$/, '');
|
|
20
|
+
}
|
|
21
|
+
return parsedUrl.toString();
|
|
22
|
+
} catch {
|
|
23
|
+
return url;
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
const setUrlResult = (
|
|
28
|
+
map: Map<string, t.TavilyExtractResult>,
|
|
29
|
+
result: t.TavilyExtractResult
|
|
30
|
+
): void => {
|
|
31
|
+
map.set(result.url, result);
|
|
32
|
+
const normalizedUrl = normalizeUrlKey(result.url);
|
|
33
|
+
if (!map.has(normalizedUrl)) {
|
|
34
|
+
map.set(normalizedUrl, result);
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
export class TavilyScraper implements t.BaseScraper {
|
|
39
|
+
private apiKey: string;
|
|
40
|
+
private apiUrl: string;
|
|
41
|
+
private timeout: number;
|
|
42
|
+
private payloadTimeout: number | undefined;
|
|
43
|
+
private logger: t.Logger;
|
|
44
|
+
private extractDepth: 'basic' | 'advanced';
|
|
45
|
+
private includeImages: boolean;
|
|
46
|
+
private includeFavicon: boolean;
|
|
47
|
+
private format: 'markdown' | 'text' | undefined;
|
|
48
|
+
|
|
49
|
+
constructor(config: t.TavilyScraperConfig = {}) {
|
|
50
|
+
this.apiKey = config.apiKey ?? process.env.TAVILY_API_KEY ?? '';
|
|
51
|
+
this.apiUrl =
|
|
52
|
+
config.apiUrl ??
|
|
53
|
+
process.env.TAVILY_EXTRACT_URL ??
|
|
54
|
+
'https://api.tavily.com/extract';
|
|
55
|
+
this.payloadTimeout = config.timeout;
|
|
56
|
+
this.extractDepth = config.extractDepth ?? 'basic';
|
|
57
|
+
this.timeout = config.timeout ?? getDefaultTimeout(this.extractDepth);
|
|
58
|
+
this.includeImages = config.includeImages ?? false;
|
|
59
|
+
this.includeFavicon = config.includeFavicon ?? false;
|
|
60
|
+
this.format = config.format;
|
|
61
|
+
this.logger = config.logger || createDefaultLogger();
|
|
62
|
+
|
|
63
|
+
if (!this.apiKey) {
|
|
64
|
+
this.logger.warn('TAVILY_API_KEY is not set. Scraping will not work.');
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async scrapeUrl(
|
|
69
|
+
url: string,
|
|
70
|
+
options: t.TavilyScrapeOptions = {}
|
|
71
|
+
): Promise<[string, t.TavilyScrapeResponse]> {
|
|
72
|
+
const results = await this.scrapeUrls([url], options);
|
|
73
|
+
return results[0];
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async scrapeUrls(
|
|
77
|
+
urls: string[],
|
|
78
|
+
options: t.TavilyScrapeOptions = {}
|
|
79
|
+
): Promise<Array<[string, t.TavilyScrapeResponse]>> {
|
|
80
|
+
if (!this.apiKey) {
|
|
81
|
+
return urls.map((url) => [
|
|
82
|
+
url,
|
|
83
|
+
{ success: false, error: 'TAVILY_API_KEY is not set' },
|
|
84
|
+
]);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
const batches: string[][] = [];
|
|
88
|
+
for (let i = 0; i < urls.length; i += MAX_BATCH_SIZE) {
|
|
89
|
+
batches.push(urls.slice(i, i + MAX_BATCH_SIZE));
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const allResults: Array<[string, t.TavilyScrapeResponse]> = [];
|
|
93
|
+
|
|
94
|
+
for (const batch of batches) {
|
|
95
|
+
const batchResults = await this.extractBatch(batch, options);
|
|
96
|
+
allResults.push(...batchResults);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return allResults;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
private async extractBatch(
|
|
103
|
+
urls: string[],
|
|
104
|
+
options: t.TavilyScrapeOptions = {}
|
|
105
|
+
): Promise<Array<[string, t.TavilyScrapeResponse]>> {
|
|
106
|
+
try {
|
|
107
|
+
const includeFavicon = options.includeFavicon ?? this.includeFavicon;
|
|
108
|
+
const format = options.format ?? this.format;
|
|
109
|
+
const extractDepth = options.extractDepth ?? this.extractDepth;
|
|
110
|
+
const payload: t.TavilyExtractPayload = {
|
|
111
|
+
urls,
|
|
112
|
+
extract_depth: extractDepth,
|
|
113
|
+
include_images: options.includeImages ?? this.includeImages,
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
if (includeFavicon) {
|
|
117
|
+
payload.include_favicon = true;
|
|
118
|
+
}
|
|
119
|
+
if (format != null) {
|
|
120
|
+
payload.format = format;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const effectiveTimeout =
|
|
124
|
+
options.timeout ??
|
|
125
|
+
this.payloadTimeout ??
|
|
126
|
+
(options.extractDepth != null
|
|
127
|
+
? getDefaultTimeout(extractDepth)
|
|
128
|
+
: this.timeout);
|
|
129
|
+
const payloadTimeout = options.timeout ?? this.payloadTimeout;
|
|
130
|
+
if (payloadTimeout != null) {
|
|
131
|
+
payload.timeout = Math.min(Math.max(payloadTimeout / 1000, 1), 60);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
const response = await axios.post<{
|
|
135
|
+
results?: t.TavilyExtractResult[];
|
|
136
|
+
failed_results?: t.TavilyExtractResult[];
|
|
137
|
+
}>(this.apiUrl, payload, {
|
|
138
|
+
headers: {
|
|
139
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
140
|
+
'Content-Type': 'application/json',
|
|
141
|
+
},
|
|
142
|
+
timeout: effectiveTimeout,
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
const data = response.data;
|
|
146
|
+
const successMap = new Map<string, t.TavilyExtractResult>();
|
|
147
|
+
const failedMap = new Map<string, t.TavilyExtractResult>();
|
|
148
|
+
|
|
149
|
+
for (const result of data.results ?? []) {
|
|
150
|
+
setUrlResult(successMap, result);
|
|
151
|
+
}
|
|
152
|
+
for (const result of data.failed_results ?? []) {
|
|
153
|
+
setUrlResult(failedMap, result);
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return urls.map((url): [string, t.TavilyScrapeResponse] => {
|
|
157
|
+
const success =
|
|
158
|
+
successMap.get(url) ?? successMap.get(normalizeUrlKey(url));
|
|
159
|
+
if (success && success.error == null) {
|
|
160
|
+
return [
|
|
161
|
+
url,
|
|
162
|
+
{
|
|
163
|
+
success: true,
|
|
164
|
+
data: {
|
|
165
|
+
rawContent: success.raw_content ?? '',
|
|
166
|
+
images: success.images ?? [],
|
|
167
|
+
favicon: success.favicon,
|
|
168
|
+
},
|
|
169
|
+
},
|
|
170
|
+
];
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const failed =
|
|
174
|
+
failedMap.get(url) ?? failedMap.get(normalizeUrlKey(url));
|
|
175
|
+
const error =
|
|
176
|
+
success?.error ??
|
|
177
|
+
failed?.error ??
|
|
178
|
+
'URL not found in Tavily Extract response';
|
|
179
|
+
return [url, { success: false, error }];
|
|
180
|
+
});
|
|
181
|
+
} catch (error) {
|
|
182
|
+
const errorMessage =
|
|
183
|
+
error instanceof Error ? error.message : String(error);
|
|
184
|
+
return urls.map((url) => [
|
|
185
|
+
url,
|
|
186
|
+
{
|
|
187
|
+
success: false,
|
|
188
|
+
error: `Tavily Extract API request failed: ${errorMessage}`,
|
|
189
|
+
},
|
|
190
|
+
]);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
extractContent(
|
|
195
|
+
response: t.TavilyScrapeResponse
|
|
196
|
+
): [string, undefined | t.References] {
|
|
197
|
+
if (!response.success || !response.data) {
|
|
198
|
+
return ['', undefined];
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
const content = response.data.rawContent ?? '';
|
|
202
|
+
const images = response.data.images ?? [];
|
|
203
|
+
|
|
204
|
+
const references: t.References | undefined =
|
|
205
|
+
images.length > 0
|
|
206
|
+
? {
|
|
207
|
+
links: [],
|
|
208
|
+
images: images.map((imageUrl) => ({ originalUrl: imageUrl })),
|
|
209
|
+
videos: [],
|
|
210
|
+
}
|
|
211
|
+
: undefined;
|
|
212
|
+
|
|
213
|
+
return [content, references];
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
extractMetadata(response: t.TavilyScrapeResponse): t.GenericScrapeMetadata {
|
|
217
|
+
if (!response.success || !response.data) {
|
|
218
|
+
return {};
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
const metadata: t.GenericScrapeMetadata = {
|
|
222
|
+
images_count: response.data.images?.length ?? 0,
|
|
223
|
+
};
|
|
224
|
+
if (response.data.favicon != null) {
|
|
225
|
+
metadata.favicon = response.data.favicon;
|
|
226
|
+
}
|
|
227
|
+
return metadata;
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
export const createTavilyScraper = (
|
|
232
|
+
config: t.TavilyScraperConfig = {}
|
|
233
|
+
): TavilyScraper => {
|
|
234
|
+
return new TavilyScraper(config);
|
|
235
|
+
};
|