@librechat/agents 2.4.316 → 2.4.318

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/cjs/tools/search/content.cjs +140 -0
  2. package/dist/cjs/tools/search/content.cjs.map +1 -0
  3. package/dist/cjs/tools/search/firecrawl.cjs +17 -37
  4. package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
  5. package/dist/cjs/tools/search/format.cjs +79 -29
  6. package/dist/cjs/tools/search/format.cjs.map +1 -1
  7. package/dist/cjs/tools/search/highlights.cjs +64 -13
  8. package/dist/cjs/tools/search/highlights.cjs.map +1 -1
  9. package/dist/cjs/tools/search/search.cjs +13 -15
  10. package/dist/cjs/tools/search/search.cjs.map +1 -1
  11. package/dist/cjs/tools/search/tool.cjs +44 -12
  12. package/dist/cjs/tools/search/tool.cjs.map +1 -1
  13. package/dist/cjs/tools/search/utils.cjs +35 -0
  14. package/dist/cjs/tools/search/utils.cjs.map +1 -0
  15. package/dist/esm/tools/search/content.mjs +119 -0
  16. package/dist/esm/tools/search/content.mjs.map +1 -0
  17. package/dist/esm/tools/search/firecrawl.mjs +18 -37
  18. package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
  19. package/dist/esm/tools/search/format.mjs +79 -29
  20. package/dist/esm/tools/search/format.mjs.map +1 -1
  21. package/dist/esm/tools/search/highlights.mjs +64 -13
  22. package/dist/esm/tools/search/highlights.mjs.map +1 -1
  23. package/dist/esm/tools/search/search.mjs +12 -14
  24. package/dist/esm/tools/search/search.mjs.map +1 -1
  25. package/dist/esm/tools/search/tool.mjs +44 -12
  26. package/dist/esm/tools/search/tool.mjs.map +1 -1
  27. package/dist/esm/tools/search/utils.mjs +32 -0
  28. package/dist/esm/tools/search/utils.mjs.map +1 -0
  29. package/dist/types/tools/search/content.d.ts +4 -0
  30. package/dist/types/tools/search/firecrawl.d.ts +6 -86
  31. package/dist/types/tools/search/format.d.ts +4 -1
  32. package/dist/types/tools/search/highlights.d.ts +1 -1
  33. package/dist/types/tools/search/search.d.ts +1 -1
  34. package/dist/types/tools/search/test.d.ts +1 -0
  35. package/dist/types/tools/search/tool.d.ts +12 -4
  36. package/dist/types/tools/search/types.d.ts +380 -46
  37. package/dist/types/tools/search/utils.d.ts +3 -0
  38. package/package.json +3 -2
  39. package/src/scripts/search.ts +5 -3
  40. package/src/tools/search/content.test.ts +173 -0
  41. package/src/tools/search/content.ts +147 -0
  42. package/src/tools/search/firecrawl.ts +27 -144
  43. package/src/tools/search/format.ts +89 -31
  44. package/src/tools/search/highlights.ts +99 -17
  45. package/src/tools/search/output.md +2775 -0
  46. package/src/tools/search/search.ts +42 -54
  47. package/src/tools/search/test.html +884 -0
  48. package/src/tools/search/test.md +643 -0
  49. package/src/tools/search/test.ts +159 -0
  50. package/src/tools/search/tool.ts +54 -15
  51. package/src/tools/search/types.ts +430 -52
  52. package/src/tools/search/utils.ts +43 -0
@@ -0,0 +1,147 @@
1
+ import * as cheerio from 'cheerio';
2
+ import type { References, MediaReference } from './types';
3
+
4
+ export function processContent(
5
+ html: string,
6
+ markdown: string
7
+ ): {
8
+ markdown: string;
9
+ } & References {
10
+ const linkMap = new Map<string, MediaReference>();
11
+ const imageMap = new Map<string, MediaReference>();
12
+ const videoMap = new Map<string, MediaReference>();
13
+ const iframeMap = new Map<string, MediaReference>();
14
+
15
+ const $ = cheerio.load(html, {
16
+ xmlMode: false,
17
+ });
18
+
19
+ // Extract all media references
20
+ $('a[href]').each((_, el) => {
21
+ const href = $(el).attr('href');
22
+ if (href != null && href) {
23
+ linkMap.set(href, {
24
+ originalUrl: href,
25
+ title: $(el).attr('title'),
26
+ text: $(el).text().trim(),
27
+ });
28
+ }
29
+ });
30
+
31
+ $('img[src]').each((_, el) => {
32
+ const src = $(el).attr('src');
33
+ if (src != null && src) {
34
+ imageMap.set(src, {
35
+ originalUrl: src,
36
+ title: $(el).attr('alt') ?? $(el).attr('title'),
37
+ });
38
+ }
39
+ });
40
+
41
+ // Handle videos (dedicated video elements and video platforms in iframes)
42
+ $('video[src], iframe[src*="youtube"], iframe[src*="vimeo"]').each(
43
+ (_, el) => {
44
+ const src = $(el).attr('src');
45
+ if (src != null && src) {
46
+ videoMap.set(src, {
47
+ originalUrl: src,
48
+ title: $(el).attr('title'),
49
+ });
50
+ }
51
+ }
52
+ );
53
+
54
+ // Handle all other generic iframes that aren't already captured as videos
55
+ $('iframe').each((_, el) => {
56
+ const src = $(el).attr('src');
57
+ if (
58
+ src != null &&
59
+ src &&
60
+ !src.includes('youtube') &&
61
+ !src.includes('vimeo')
62
+ ) {
63
+ iframeMap.set(src, {
64
+ originalUrl: src,
65
+ title: $(el).attr('title'),
66
+ });
67
+ }
68
+ });
69
+
70
+ // Create lookup maps with indices
71
+ const linkIndexMap = new Map<string, number>();
72
+ const imageIndexMap = new Map<string, number>();
73
+ const videoIndexMap = new Map<string, number>();
74
+ const iframeIndexMap = new Map<string, number>();
75
+
76
+ Array.from(linkMap.keys()).forEach((url, i) => linkIndexMap.set(url, i + 1));
77
+ Array.from(imageMap.keys()).forEach((url, i) =>
78
+ imageIndexMap.set(url, i + 1)
79
+ );
80
+ Array.from(videoMap.keys()).forEach((url, i) =>
81
+ videoIndexMap.set(url, i + 1)
82
+ );
83
+ Array.from(iframeMap.keys()).forEach((url, i) =>
84
+ iframeIndexMap.set(url, i + 1)
85
+ );
86
+
87
+ // Process the markdown
88
+ let result = markdown;
89
+
90
+ // Replace each URL one by one, starting with the longest URLs first to avoid partial matches
91
+ const allUrls = [
92
+ ...Array.from(imageMap.keys()).map((url) => ({
93
+ url,
94
+ type: 'image',
95
+ idx: imageIndexMap.get(url),
96
+ })),
97
+ ...Array.from(videoMap.keys()).map((url) => ({
98
+ url,
99
+ type: 'video',
100
+ idx: videoIndexMap.get(url),
101
+ })),
102
+ ...Array.from(iframeMap.keys()).map((url) => ({
103
+ url,
104
+ type: 'iframe',
105
+ idx: iframeIndexMap.get(url),
106
+ })),
107
+ ...Array.from(linkMap.keys()).map((url) => ({
108
+ url,
109
+ type: 'link',
110
+ idx: linkIndexMap.get(url),
111
+ })),
112
+ ].sort((a, b) => b.url.length - a.url.length);
113
+
114
+ // Create a function to escape special characters in URLs for regex
115
+ function escapeRegex(string: string): string {
116
+ return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
117
+ }
118
+
119
+ // Replace each URL in the markdown
120
+ for (const { url, type, idx } of allUrls) {
121
+ // Create a regex that captures URLs in markdown links
122
+ const regex = new RegExp(`\\(${escapeRegex(url)}(?:\\s+"[^"]*")?\\)`, 'g');
123
+
124
+ result = result.replace(regex, (match) => {
125
+ // Keep any title attribute that might exist
126
+ const titleMatch = match.match(/\s+"([^"]*)"/);
127
+ const titlePart = titleMatch ? ` "${titleMatch[1]}"` : '';
128
+
129
+ return `(${type}#${idx}${titlePart})`;
130
+ });
131
+ }
132
+
133
+ iframeMap.clear();
134
+ const links = Array.from(linkMap.values());
135
+ linkMap.clear();
136
+ const images = Array.from(imageMap.values());
137
+ imageMap.clear();
138
+ const videos = Array.from(videoMap.values());
139
+ videoMap.clear();
140
+
141
+ return {
142
+ markdown: result,
143
+ links,
144
+ images,
145
+ videos,
146
+ };
147
+ }
@@ -1,136 +1,7 @@
1
1
  /* eslint-disable no-console */
2
2
  import axios from 'axios';
3
-
4
- export interface FirecrawlScrapeOptions {
5
- formats?: string[];
6
- includeTags?: string[];
7
- excludeTags?: string[];
8
- headers?: Record<string, string>;
9
- waitFor?: number;
10
- timeout?: number;
11
- }
12
-
13
- interface ScrapeMetadata {
14
- // Core source information
15
- sourceURL?: string;
16
- url?: string;
17
- scrapeId?: string;
18
- statusCode?: number;
19
- // Basic metadata
20
- title?: string;
21
- description?: string;
22
- language?: string;
23
- favicon?: string;
24
- viewport?: string;
25
- robots?: string;
26
- 'theme-color'?: string;
27
- // Open Graph metadata
28
- 'og:url'?: string;
29
- 'og:title'?: string;
30
- 'og:description'?: string;
31
- 'og:type'?: string;
32
- 'og:image'?: string;
33
- 'og:image:width'?: string;
34
- 'og:image:height'?: string;
35
- 'og:site_name'?: string;
36
- ogUrl?: string;
37
- ogTitle?: string;
38
- ogDescription?: string;
39
- ogImage?: string;
40
- ogSiteName?: string;
41
- // Article metadata
42
- 'article:author'?: string;
43
- 'article:published_time'?: string;
44
- 'article:modified_time'?: string;
45
- 'article:section'?: string;
46
- 'article:tag'?: string;
47
- 'article:publisher'?: string;
48
- publishedTime?: string;
49
- modifiedTime?: string;
50
- // Twitter metadata
51
- 'twitter:site'?: string;
52
- 'twitter:creator'?: string;
53
- 'twitter:card'?: string;
54
- 'twitter:image'?: string;
55
- 'twitter:dnt'?: string;
56
- 'twitter:app:name:iphone'?: string;
57
- 'twitter:app:id:iphone'?: string;
58
- 'twitter:app:url:iphone'?: string;
59
- 'twitter:app:name:ipad'?: string;
60
- 'twitter:app:id:ipad'?: string;
61
- 'twitter:app:url:ipad'?: string;
62
- 'twitter:app:name:googleplay'?: string;
63
- 'twitter:app:id:googleplay'?: string;
64
- 'twitter:app:url:googleplay'?: string;
65
- // Facebook metadata
66
- 'fb:app_id'?: string;
67
- // App links
68
- 'al:ios:url'?: string;
69
- 'al:ios:app_name'?: string;
70
- 'al:ios:app_store_id'?: string;
71
- // Allow for additional properties that might be present
72
- [key: string]: string | number | boolean | null | undefined;
73
- }
74
-
75
- export interface FirecrawlScrapeResponse {
76
- success: boolean;
77
- data?: {
78
- markdown?: string;
79
- html?: string;
80
- rawHtml?: string;
81
- screenshot?: string;
82
- links?: string[];
83
- metadata?: ScrapeMetadata;
84
- };
85
- error?: string;
86
- }
87
-
88
- export interface FirecrawlScraperConfig {
89
- apiKey?: string;
90
- apiUrl?: string;
91
- formats?: string[];
92
- timeout?: number;
93
- }
94
- const getDomainName = (
95
- link: string,
96
- metadata?: ScrapeMetadata
97
- ): string | undefined => {
98
- try {
99
- const url = metadata?.sourceURL ?? metadata?.url ?? (link || '');
100
- const domain = new URL(url).hostname.replace(/^www\./, '');
101
- if (domain) {
102
- return domain;
103
- }
104
- } catch (e) {
105
- // URL parsing failed
106
- console.error('Error parsing URL:', e);
107
- }
108
-
109
- return;
110
- };
111
-
112
- export function getAttribution(
113
- link: string,
114
- metadata?: ScrapeMetadata
115
- ): string | undefined {
116
- if (!metadata) return getDomainName(link, metadata);
117
-
118
- const possibleAttributions = [
119
- metadata.ogSiteName,
120
- metadata['og:site_name'],
121
- metadata.title?.split('|').pop()?.trim(),
122
- metadata['twitter:site']?.replace(/^@/, ''),
123
- ];
124
-
125
- const attribution = possibleAttributions.find(
126
- (attr) => attr != null && typeof attr === 'string' && attr.trim() !== ''
127
- );
128
- if (attribution != null) {
129
- return attribution;
130
- }
131
-
132
- return getDomainName(link, metadata);
133
- }
3
+ import { processContent } from './content';
4
+ import type * as t from './types';
134
5
 
135
6
  /**
136
7
  * Firecrawl scraper implementation
@@ -142,7 +13,7 @@ export class FirecrawlScraper {
142
13
  private defaultFormats: string[];
143
14
  private timeout: number;
144
15
 
145
- constructor(config: FirecrawlScraperConfig = {}) {
16
+ constructor(config: t.FirecrawlScraperConfig = {}) {
146
17
  this.apiKey = config.apiKey ?? process.env.FIRECRAWL_API_KEY ?? '';
147
18
 
148
19
  const baseUrl =
@@ -169,8 +40,8 @@ export class FirecrawlScraper {
169
40
  */
170
41
  async scrapeUrl(
171
42
  url: string,
172
- options: FirecrawlScrapeOptions = {}
173
- ): Promise<[string, FirecrawlScrapeResponse]> {
43
+ options: t.FirecrawlScrapeOptions = {}
44
+ ): Promise<[string, t.FirecrawlScrapeResponse]> {
174
45
  if (!this.apiKey) {
175
46
  return [
176
47
  url,
@@ -221,27 +92,39 @@ export class FirecrawlScraper {
221
92
  * @param response Scrape response
222
93
  * @returns Extracted content or empty string if not available
223
94
  */
224
- extractContent(response: FirecrawlScrapeResponse): string {
95
+ extractContent(
96
+ response: t.FirecrawlScrapeResponse
97
+ ): [string, undefined | t.References] {
225
98
  if (!response.success || !response.data) {
226
- return '';
99
+ return ['', undefined];
227
100
  }
228
101
 
229
- // Prefer markdown content if available
230
- if (response.data.markdown != null) {
231
- return response.data.markdown;
102
+ if (response.data.markdown != null && response.data.html != null) {
103
+ try {
104
+ const { markdown, ...rest } = processContent(
105
+ response.data.html,
106
+ response.data.markdown
107
+ );
108
+ return [markdown, rest];
109
+ } catch (error) {
110
+ console.error('Error processing content:', error);
111
+ return [response.data.markdown, undefined];
112
+ }
113
+ } else if (response.data.markdown != null) {
114
+ return [response.data.markdown, undefined];
232
115
  }
233
116
 
234
117
  // Fall back to HTML content
235
118
  if (response.data.html != null) {
236
- return response.data.html;
119
+ return [response.data.html, undefined];
237
120
  }
238
121
 
239
122
  // Fall back to raw HTML content
240
123
  if (response.data.rawHtml != null) {
241
- return response.data.rawHtml;
124
+ return [response.data.rawHtml, undefined];
242
125
  }
243
126
 
244
- return '';
127
+ return ['', undefined];
245
128
  }
246
129
 
247
130
  /**
@@ -249,7 +132,7 @@ export class FirecrawlScraper {
249
132
  * @param response Scrape response
250
133
  * @returns Metadata object
251
134
  */
252
- extractMetadata(response: FirecrawlScrapeResponse): ScrapeMetadata {
135
+ extractMetadata(response: t.FirecrawlScrapeResponse): t.ScrapeMetadata {
253
136
  if (!response.success || !response.data || !response.data.metadata) {
254
137
  return {};
255
138
  }
@@ -264,7 +147,7 @@ export class FirecrawlScraper {
264
147
  * @returns Firecrawl scraper instance
265
148
  */
266
149
  export const createFirecrawlScraper = (
267
- config: FirecrawlScraperConfig = {}
150
+ config: t.FirecrawlScraperConfig = {}
268
151
  ): FirecrawlScraper => {
269
152
  return new FirecrawlScraper(config);
270
153
  };
@@ -1,37 +1,69 @@
1
1
  import type * as t from './types';
2
+ import { getDomainName } from './utils';
2
3
 
3
4
  export function formatResultsForLLM(
4
5
  turn: number,
5
6
  results: t.SearchResultData
6
- ): string {
7
+ ): { output: string; references: t.ResultReference[] } {
7
8
  let output = '';
8
9
 
9
10
  const addSection = (title: string): void => {
10
11
  output += `\n=== ${title} ===\n`;
11
12
  };
12
13
 
14
+ const references: t.ResultReference[] = [];
13
15
  // Organic (web) results
14
- const organic = results.organic ?? [];
15
- if (organic.length) {
16
- addSection('Web Results, Turn ' + turn);
17
- organic.forEach((r, i) => {
16
+ if (results.organic?.length != null && results.organic.length > 0) {
17
+ addSection(`Web Results, Turn ${turn}`);
18
+ for (let i = 0; i < results.organic.length; i++) {
19
+ const r = results.organic[i];
18
20
  output += [
19
- `Source ${i}: ${r.title ?? '(no title)'}`,
20
- `Citation Anchor: \\ue202turn${turn}search${i}`,
21
+ `# Source ${i}: "${r.title ?? '(no title)'}"`,
22
+ `Anchor: \\ue202turn${turn}search${i}`,
21
23
  `URL: ${r.link}`,
22
24
  r.snippet != null ? `Summary: ${r.snippet}` : '',
23
25
  r.date != null ? `Date: ${r.date}` : '',
24
26
  r.attribution != null ? `Source: ${r.attribution}` : '',
25
27
  '',
26
- '--- Content Highlights ---',
27
- ...(r.highlights ?? [])
28
- .filter((h) => h.text.trim().length > 0)
29
- .map((h) => `[Relevance: ${h.score.toFixed(2)}]\n${h.text.trim()}`),
28
+ '\n## Highlights\n\n',
29
+ '',
30
30
  '',
31
31
  ]
32
32
  .filter(Boolean)
33
33
  .join('\n');
34
- });
34
+
35
+ (r.highlights ?? [])
36
+ .filter((h) => h.text.trim().length > 0)
37
+ .forEach((h, hIndex) => {
38
+ output += `### Highlight ${hIndex + 1} [Relevance: ${h.score.toFixed(2)}]\n\n`;
39
+ output += '```text\n' + h.text.trim() + '\n```\n\n';
40
+
41
+ if (h.references != null && h.references.length) {
42
+ output += 'Core References:\n';
43
+ output += h.references
44
+ .map((ref) => {
45
+ references.push({
46
+ link: ref.reference.originalUrl,
47
+ attribution: getDomainName(ref.reference.originalUrl),
48
+ title: (
49
+ ((ref.reference.title ?? '') || ref.reference.text) ??
50
+ ''
51
+ ).split('\n')[0],
52
+ });
53
+ return `- ${ref.type}#${ref.originalIndex + 1}: ${ref.reference.originalUrl}\n\t- Anchor: \\ue202turn${turn}ref${references.length - 1}`;
54
+ })
55
+ .join('\n');
56
+ output += '\n\n';
57
+ }
58
+
59
+ if (hIndex < (r.highlights?.length ?? 0) - 1) {
60
+ output += '---\n\n';
61
+ }
62
+ });
63
+
64
+ delete results.organic[i].highlights;
65
+ output += '\n';
66
+ }
35
67
  }
36
68
 
37
69
  // Ignoring these sections for now
@@ -70,23 +102,36 @@ export function formatResultsForLLM(
70
102
  if (results.knowledgeGraph != null) {
71
103
  addSection('Knowledge Graph');
72
104
  output += [
73
- `Title: ${results.knowledgeGraph.title ?? '(no title)'}`,
105
+ `**Title:** ${results.knowledgeGraph.title ?? '(no title)'}`,
106
+ results.knowledgeGraph.type != null
107
+ ? `**Type:** ${results.knowledgeGraph.type}`
108
+ : '',
74
109
  results.knowledgeGraph.description != null
75
- ? `Description: ${results.knowledgeGraph.description}`
110
+ ? `**Description:** ${results.knowledgeGraph.description}`
76
111
  : '',
77
- results.knowledgeGraph.type != null
78
- ? `Type: ${results.knowledgeGraph.type}`
112
+ results.knowledgeGraph.descriptionSource != null
113
+ ? `**Description Source:** ${results.knowledgeGraph.descriptionSource}`
114
+ : '',
115
+ results.knowledgeGraph.descriptionLink != null
116
+ ? `**Description Link:** ${results.knowledgeGraph.descriptionLink}`
79
117
  : '',
80
118
  results.knowledgeGraph.imageUrl != null
81
- ? `Image URL: ${results.knowledgeGraph.imageUrl}`
119
+ ? `**Image URL:** ${results.knowledgeGraph.imageUrl}`
120
+ : '',
121
+ results.knowledgeGraph.website != null
122
+ ? `**Website:** ${results.knowledgeGraph.website}`
82
123
  : '',
83
124
  results.knowledgeGraph.attributes != null
84
- ? `Attributes: ${JSON.stringify(results.knowledgeGraph.attributes, null, 2)}`
125
+ ? `**Attributes:**\n\`\`\`json\n${JSON.stringify(
126
+ results.knowledgeGraph.attributes,
127
+ null,
128
+ 2
129
+ )}\n\`\`\``
85
130
  : '',
86
131
  '',
87
132
  ]
88
133
  .filter(Boolean)
89
- .join('\n');
134
+ .join('\n\n');
90
135
  }
91
136
 
92
137
  // Answer Box
@@ -94,31 +139,44 @@ export function formatResultsForLLM(
94
139
  addSection('Answer Box');
95
140
  output += [
96
141
  results.answerBox.title != null
97
- ? `Title: ${results.answerBox.title}`
98
- : '',
99
- results.answerBox.answer != null
100
- ? `Answer: ${results.answerBox.answer}`
142
+ ? `**Title:** ${results.answerBox.title}`
101
143
  : '',
102
144
  results.answerBox.snippet != null
103
- ? `Snippet: ${results.answerBox.snippet}`
145
+ ? `**Snippet:** ${results.answerBox.snippet}`
146
+ : '',
147
+ results.answerBox.snippetHighlighted != null
148
+ ? `**Snippet Highlighted:** ${results.answerBox.snippetHighlighted
149
+ .map((s) => `\`${s}\``)
150
+ .join(' ')}`
151
+ : '',
152
+ results.answerBox.link != null
153
+ ? `**Link:** ${results.answerBox.link}`
104
154
  : '',
105
- results.answerBox.date != null ? `Date: ${results.answerBox.date}` : '',
106
155
  '',
107
156
  ]
108
157
  .filter(Boolean)
109
- .join('\n');
158
+ .join('\n\n');
110
159
  }
111
160
 
112
161
  // People also ask
113
162
  const peopleAlsoAsk = results.peopleAlsoAsk ?? [];
114
163
  if (peopleAlsoAsk.length) {
115
164
  addSection('People Also Ask');
116
- peopleAlsoAsk.forEach((p, _i) => {
117
- output += [`Q: ${p.question}`, `A: ${p.answer}`, '']
165
+ peopleAlsoAsk.forEach((p, i) => {
166
+ output += [
167
+ `### Question ${i + 1}:`,
168
+ `"${p.question}"`,
169
+ `${p.snippet != null && p.snippet ? `Snippet: ${p.snippet}}` : ''}`,
170
+ `${p.title != null && p.title ? `Title: ${p.title}` : ''}`,
171
+ `${p.link != null && p.link ? `Link: ${p.link}` : ''}`,
172
+ '',
173
+ ]
118
174
  .filter(Boolean)
119
- .join('\n');
175
+ .join('\n\n');
120
176
  });
121
177
  }
122
-
123
- return output.trim();
178
+ return {
179
+ output: output.trim(),
180
+ references,
181
+ };
124
182
  }