@librechat/agents 3.0.0-rc10 → 3.0.0-rc12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (147) hide show
  1. package/dist/cjs/agents/AgentContext.cjs +6 -2
  2. package/dist/cjs/agents/AgentContext.cjs.map +1 -1
  3. package/dist/cjs/graphs/Graph.cjs +23 -2
  4. package/dist/cjs/graphs/Graph.cjs.map +1 -1
  5. package/dist/cjs/llm/anthropic/index.cjs +21 -2
  6. package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
  7. package/dist/cjs/llm/google/index.cjs +3 -0
  8. package/dist/cjs/llm/google/index.cjs.map +1 -1
  9. package/dist/cjs/llm/google/utils/common.cjs +13 -0
  10. package/dist/cjs/llm/google/utils/common.cjs.map +1 -1
  11. package/dist/cjs/llm/ollama/index.cjs +3 -0
  12. package/dist/cjs/llm/ollama/index.cjs.map +1 -1
  13. package/dist/cjs/llm/openai/index.cjs +20 -1
  14. package/dist/cjs/llm/openai/index.cjs.map +1 -1
  15. package/dist/cjs/llm/openai/utils/index.cjs +6 -1
  16. package/dist/cjs/llm/openai/utils/index.cjs.map +1 -1
  17. package/dist/cjs/llm/openrouter/index.cjs +5 -1
  18. package/dist/cjs/llm/openrouter/index.cjs.map +1 -1
  19. package/dist/cjs/llm/vertexai/index.cjs +1 -1
  20. package/dist/cjs/llm/vertexai/index.cjs.map +1 -1
  21. package/dist/cjs/main.cjs +7 -2
  22. package/dist/cjs/main.cjs.map +1 -1
  23. package/dist/cjs/messages/cache.cjs +49 -0
  24. package/dist/cjs/messages/cache.cjs.map +1 -0
  25. package/dist/cjs/messages/content.cjs +53 -0
  26. package/dist/cjs/messages/content.cjs.map +1 -0
  27. package/dist/cjs/messages/core.cjs +5 -1
  28. package/dist/cjs/messages/core.cjs.map +1 -1
  29. package/dist/cjs/messages/format.cjs +50 -59
  30. package/dist/cjs/messages/format.cjs.map +1 -1
  31. package/dist/cjs/messages/prune.cjs +28 -0
  32. package/dist/cjs/messages/prune.cjs.map +1 -1
  33. package/dist/cjs/tools/ToolNode.cjs +2 -0
  34. package/dist/cjs/tools/ToolNode.cjs.map +1 -1
  35. package/dist/cjs/tools/search/firecrawl.cjs +3 -1
  36. package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
  37. package/dist/cjs/tools/search/rerankers.cjs +8 -6
  38. package/dist/cjs/tools/search/rerankers.cjs.map +1 -1
  39. package/dist/cjs/tools/search/search.cjs +5 -5
  40. package/dist/cjs/tools/search/search.cjs.map +1 -1
  41. package/dist/cjs/tools/search/serper-scraper.cjs +132 -0
  42. package/dist/cjs/tools/search/serper-scraper.cjs.map +1 -0
  43. package/dist/cjs/tools/search/tool.cjs +46 -9
  44. package/dist/cjs/tools/search/tool.cjs.map +1 -1
  45. package/dist/cjs/utils/handlers.cjs +70 -0
  46. package/dist/cjs/utils/handlers.cjs.map +1 -0
  47. package/dist/esm/agents/AgentContext.mjs +6 -2
  48. package/dist/esm/agents/AgentContext.mjs.map +1 -1
  49. package/dist/esm/graphs/Graph.mjs +23 -2
  50. package/dist/esm/graphs/Graph.mjs.map +1 -1
  51. package/dist/esm/llm/anthropic/index.mjs +21 -2
  52. package/dist/esm/llm/anthropic/index.mjs.map +1 -1
  53. package/dist/esm/llm/google/index.mjs +3 -0
  54. package/dist/esm/llm/google/index.mjs.map +1 -1
  55. package/dist/esm/llm/google/utils/common.mjs +13 -0
  56. package/dist/esm/llm/google/utils/common.mjs.map +1 -1
  57. package/dist/esm/llm/ollama/index.mjs +3 -0
  58. package/dist/esm/llm/ollama/index.mjs.map +1 -1
  59. package/dist/esm/llm/openai/index.mjs +20 -1
  60. package/dist/esm/llm/openai/index.mjs.map +1 -1
  61. package/dist/esm/llm/openai/utils/index.mjs +6 -1
  62. package/dist/esm/llm/openai/utils/index.mjs.map +1 -1
  63. package/dist/esm/llm/openrouter/index.mjs +5 -1
  64. package/dist/esm/llm/openrouter/index.mjs.map +1 -1
  65. package/dist/esm/llm/vertexai/index.mjs +1 -1
  66. package/dist/esm/llm/vertexai/index.mjs.map +1 -1
  67. package/dist/esm/main.mjs +4 -1
  68. package/dist/esm/main.mjs.map +1 -1
  69. package/dist/esm/messages/cache.mjs +47 -0
  70. package/dist/esm/messages/cache.mjs.map +1 -0
  71. package/dist/esm/messages/content.mjs +51 -0
  72. package/dist/esm/messages/content.mjs.map +1 -0
  73. package/dist/esm/messages/core.mjs +5 -1
  74. package/dist/esm/messages/core.mjs.map +1 -1
  75. package/dist/esm/messages/format.mjs +50 -58
  76. package/dist/esm/messages/format.mjs.map +1 -1
  77. package/dist/esm/messages/prune.mjs +28 -0
  78. package/dist/esm/messages/prune.mjs.map +1 -1
  79. package/dist/esm/tools/ToolNode.mjs +2 -0
  80. package/dist/esm/tools/ToolNode.mjs.map +1 -1
  81. package/dist/esm/tools/search/firecrawl.mjs +3 -1
  82. package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
  83. package/dist/esm/tools/search/rerankers.mjs +8 -6
  84. package/dist/esm/tools/search/rerankers.mjs.map +1 -1
  85. package/dist/esm/tools/search/search.mjs +5 -5
  86. package/dist/esm/tools/search/search.mjs.map +1 -1
  87. package/dist/esm/tools/search/serper-scraper.mjs +129 -0
  88. package/dist/esm/tools/search/serper-scraper.mjs.map +1 -0
  89. package/dist/esm/tools/search/tool.mjs +46 -9
  90. package/dist/esm/tools/search/tool.mjs.map +1 -1
  91. package/dist/esm/utils/handlers.mjs +68 -0
  92. package/dist/esm/utils/handlers.mjs.map +1 -0
  93. package/dist/types/agents/AgentContext.d.ts +4 -1
  94. package/dist/types/llm/anthropic/index.d.ts +3 -0
  95. package/dist/types/llm/google/index.d.ts +1 -0
  96. package/dist/types/llm/ollama/index.d.ts +1 -0
  97. package/dist/types/llm/openai/index.d.ts +4 -0
  98. package/dist/types/llm/openrouter/index.d.ts +4 -2
  99. package/dist/types/llm/vertexai/index.d.ts +1 -1
  100. package/dist/types/messages/cache.d.ts +8 -0
  101. package/dist/types/messages/content.d.ts +7 -0
  102. package/dist/types/messages/format.d.ts +22 -25
  103. package/dist/types/messages/index.d.ts +2 -0
  104. package/dist/types/tools/search/firecrawl.d.ts +2 -1
  105. package/dist/types/tools/search/rerankers.d.ts +4 -1
  106. package/dist/types/tools/search/search.d.ts +1 -2
  107. package/dist/types/tools/search/serper-scraper.d.ts +59 -0
  108. package/dist/types/tools/search/tool.d.ts +25 -4
  109. package/dist/types/tools/search/types.d.ts +31 -1
  110. package/dist/types/types/graph.d.ts +2 -0
  111. package/dist/types/types/messages.d.ts +4 -0
  112. package/dist/types/utils/handlers.d.ts +34 -0
  113. package/dist/types/utils/index.d.ts +1 -0
  114. package/package.json +2 -2
  115. package/src/agents/AgentContext.ts +8 -0
  116. package/src/graphs/Graph.ts +31 -2
  117. package/src/llm/anthropic/index.ts +23 -2
  118. package/src/llm/google/index.ts +4 -0
  119. package/src/llm/google/utils/common.ts +14 -0
  120. package/src/llm/ollama/index.ts +3 -0
  121. package/src/llm/openai/index.ts +19 -1
  122. package/src/llm/openai/utils/index.ts +7 -1
  123. package/src/llm/openrouter/index.ts +15 -6
  124. package/src/llm/vertexai/index.ts +2 -2
  125. package/src/messages/cache.test.ts +262 -0
  126. package/src/messages/cache.ts +56 -0
  127. package/src/messages/content.test.ts +362 -0
  128. package/src/messages/content.ts +63 -0
  129. package/src/messages/core.ts +5 -2
  130. package/src/messages/format.ts +65 -71
  131. package/src/messages/formatMessage.test.ts +418 -2
  132. package/src/messages/index.ts +2 -0
  133. package/src/messages/prune.ts +51 -0
  134. package/src/scripts/search.ts +5 -1
  135. package/src/scripts/tools.ts +4 -1
  136. package/src/tools/search/firecrawl.ts +5 -2
  137. package/src/tools/search/jina-reranker.test.ts +126 -0
  138. package/src/tools/search/rerankers.ts +11 -5
  139. package/src/tools/search/search.ts +6 -8
  140. package/src/tools/search/serper-scraper.ts +155 -0
  141. package/src/tools/search/tool.ts +49 -8
  142. package/src/tools/search/types.ts +46 -0
  143. package/src/types/graph.ts +2 -0
  144. package/src/types/messages.ts +4 -0
  145. package/src/utils/handlers.ts +107 -0
  146. package/src/utils/index.ts +2 -1
  147. package/src/utils/llmConfig.ts +35 -1
@@ -28,15 +28,20 @@ export abstract class BaseReranker {
28
28
  }
29
29
 
30
30
  export class JinaReranker extends BaseReranker {
31
+ private apiUrl: string;
32
+
31
33
  constructor({
32
34
  apiKey = process.env.JINA_API_KEY,
35
+ apiUrl = process.env.JINA_API_URL || 'https://api.jina.ai/v1/rerank',
33
36
  logger,
34
37
  }: {
35
38
  apiKey?: string;
39
+ apiUrl?: string;
36
40
  logger?: t.Logger;
37
41
  }) {
38
42
  super(logger);
39
43
  this.apiKey = apiKey;
44
+ this.apiUrl = apiUrl;
40
45
  }
41
46
 
42
47
  async rerank(
@@ -44,7 +49,7 @@ export class JinaReranker extends BaseReranker {
44
49
  documents: string[],
45
50
  topK: number = 5
46
51
  ): Promise<t.Highlight[]> {
47
- this.logger.debug(`Reranking ${documents.length} chunks with Jina`);
52
+ this.logger.debug(`Reranking ${documents.length} chunks with Jina using API URL: ${this.apiUrl}`);
48
53
 
49
54
  try {
50
55
  if (this.apiKey == null || this.apiKey === '') {
@@ -61,7 +66,7 @@ export class JinaReranker extends BaseReranker {
61
66
  };
62
67
 
63
68
  const response = await axios.post<t.JinaRerankerResponse | undefined>(
64
- 'https://api.jina.ai/v1/rerank',
69
+ this.apiUrl,
65
70
  requestData,
66
71
  {
67
72
  headers: {
@@ -201,17 +206,18 @@ export class InfinityReranker extends BaseReranker {
201
206
  export const createReranker = (config: {
202
207
  rerankerType: t.RerankerType;
203
208
  jinaApiKey?: string;
209
+ jinaApiUrl?: string;
204
210
  cohereApiKey?: string;
205
211
  logger?: t.Logger;
206
212
  }): BaseReranker | undefined => {
207
- const { rerankerType, jinaApiKey, cohereApiKey, logger } = config;
213
+ const { rerankerType, jinaApiKey, jinaApiUrl, cohereApiKey, logger } = config;
208
214
 
209
215
  // Create a default logger if none is provided
210
216
  const defaultLogger = logger || createDefaultLogger();
211
217
 
212
218
  switch (rerankerType.toLowerCase()) {
213
219
  case 'jina':
214
- return new JinaReranker({ apiKey: jinaApiKey, logger: defaultLogger });
220
+ return new JinaReranker({ apiKey: jinaApiKey, apiUrl: jinaApiUrl, logger: defaultLogger });
215
221
  case 'cohere':
216
222
  return new CohereReranker({
217
223
  apiKey: cohereApiKey,
@@ -226,7 +232,7 @@ export const createReranker = (config: {
226
232
  defaultLogger.warn(
227
233
  `Unknown reranker type: ${rerankerType}. Defaulting to InfinityReranker.`
228
234
  );
229
- return new JinaReranker({ apiKey: jinaApiKey, logger: defaultLogger });
235
+ return new JinaReranker({ apiKey: jinaApiKey, apiUrl: jinaApiUrl, logger: defaultLogger });
230
236
  }
231
237
  };
232
238
 
@@ -2,7 +2,6 @@ import axios from 'axios';
2
2
  import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
3
3
  import type * as t from './types';
4
4
  import { getAttribution, createDefaultLogger } from './utils';
5
- import { FirecrawlScraper } from './firecrawl';
6
5
  import { BaseReranker } from './rerankers';
7
6
 
8
7
  const chunker = {
@@ -434,7 +433,7 @@ export const createSearchAPI = (
434
433
 
435
434
  export const createSourceProcessor = (
436
435
  config: t.ProcessSourcesConfig = {},
437
- scraperInstance?: FirecrawlScraper
436
+ scraperInstance?: t.BaseScraper
438
437
  ): {
439
438
  processSources: (
440
439
  fields: t.ProcessSourcesFields
@@ -442,7 +441,7 @@ export const createSourceProcessor = (
442
441
  topResults: number;
443
442
  } => {
444
443
  if (!scraperInstance) {
445
- throw new Error('Firecrawl scraper instance is required');
444
+ throw new Error('Scraper instance is required');
446
445
  }
447
446
  const {
448
447
  topResults = 5,
@@ -453,7 +452,7 @@ export const createSourceProcessor = (
453
452
  } = config;
454
453
 
455
454
  const logger_ = logger || createDefaultLogger();
456
- const firecrawlScraper = scraperInstance;
455
+ const scraper = scraperInstance;
457
456
 
458
457
  const webScraper = {
459
458
  scrapeMany: async ({
@@ -465,12 +464,12 @@ export const createSourceProcessor = (
465
464
  links: string[];
466
465
  onGetHighlights: t.SearchToolConfig['onGetHighlights'];
467
466
  }): Promise<Array<t.ScrapeResult>> => {
468
- logger_.debug(`Scraping ${links.length} links with Firecrawl`);
467
+ logger_.debug(`Scraping ${links.length} links`);
469
468
  const promises: Array<Promise<t.ScrapeResult>> = [];
470
469
  try {
471
470
  for (let i = 0; i < links.length; i++) {
472
471
  const currentLink = links[i];
473
- const promise: Promise<t.ScrapeResult> = firecrawlScraper
472
+ const promise: Promise<t.ScrapeResult> = scraper
474
473
  .scrapeUrl(currentLink, {})
475
474
  .then(([url, response]) => {
476
475
  const attribution = getAttribution(
@@ -479,8 +478,7 @@ export const createSourceProcessor = (
479
478
  logger_
480
479
  );
481
480
  if (response.success && response.data) {
482
- const [content, references] =
483
- firecrawlScraper.extractContent(response);
481
+ const [content, references] = scraper.extractContent(response);
484
482
  return {
485
483
  url,
486
484
  references,
@@ -0,0 +1,155 @@
1
+ import axios from 'axios';
2
+ import type * as t from './types';
3
+ import { createDefaultLogger } from './utils';
4
+
5
+ /**
6
+ * Serper scraper implementation
7
+ * Uses the Serper Scrape API (https://scrape.serper.dev) to scrape web pages
8
+ *
9
+ * Features:
10
+ * - Simple API with single endpoint
11
+ * - Returns both text and markdown content
12
+ * - Includes metadata from scraped pages
13
+ * - Credits-based pricing model
14
+ *
15
+ * @example
16
+ * ```typescript
17
+ * const scraper = createSerperScraper({
18
+ * apiKey: 'your-serper-api-key',
19
+ * includeMarkdown: true,
20
+ * timeout: 10000
21
+ * });
22
+ *
23
+ * const [url, response] = await scraper.scrapeUrl('https://example.com');
24
+ * if (response.success) {
25
+ * const [content] = scraper.extractContent(response);
26
+ * console.log(content);
27
+ * }
28
+ * ```
29
+ */
30
+ export class SerperScraper implements t.BaseScraper {
31
+ private apiKey: string;
32
+ private apiUrl: string;
33
+ private timeout: number;
34
+ private logger: t.Logger;
35
+ private includeMarkdown: boolean;
36
+
37
+ constructor(config: t.SerperScraperConfig = {}) {
38
+ this.apiKey = config.apiKey ?? process.env.SERPER_API_KEY ?? '';
39
+
40
+ this.apiUrl =
41
+ config.apiUrl ??
42
+ process.env.SERPER_SCRAPE_URL ??
43
+ 'https://scrape.serper.dev';
44
+
45
+ this.timeout = config.timeout ?? 7500;
46
+ this.includeMarkdown = config.includeMarkdown ?? true;
47
+
48
+ this.logger = config.logger || createDefaultLogger();
49
+
50
+ if (!this.apiKey) {
51
+ this.logger.warn('SERPER_API_KEY is not set. Scraping will not work.');
52
+ }
53
+
54
+ this.logger.debug(
55
+ `Serper scraper initialized with API URL: ${this.apiUrl}`
56
+ );
57
+ }
58
+
59
+ /**
60
+ * Scrape a single URL
61
+ * @param url URL to scrape
62
+ * @param options Scrape options
63
+ * @returns Scrape response
64
+ */
65
+ async scrapeUrl(
66
+ url: string,
67
+ options: t.SerperScrapeOptions = {}
68
+ ): Promise<[string, t.SerperScrapeResponse]> {
69
+ if (!this.apiKey) {
70
+ return [
71
+ url,
72
+ {
73
+ success: false,
74
+ error: 'SERPER_API_KEY is not set',
75
+ },
76
+ ];
77
+ }
78
+
79
+ try {
80
+ const payload = {
81
+ url,
82
+ includeMarkdown: options.includeMarkdown ?? this.includeMarkdown,
83
+ };
84
+
85
+ const response = await axios.post(this.apiUrl, payload, {
86
+ headers: {
87
+ 'X-API-KEY': this.apiKey,
88
+ 'Content-Type': 'application/json',
89
+ },
90
+ timeout: options.timeout ?? this.timeout,
91
+ });
92
+
93
+ return [url, { success: true, data: response.data }];
94
+ } catch (error) {
95
+ const errorMessage =
96
+ error instanceof Error ? error.message : String(error);
97
+ return [
98
+ url,
99
+ {
100
+ success: false,
101
+ error: `Serper Scrape API request failed: ${errorMessage}`,
102
+ },
103
+ ];
104
+ }
105
+ }
106
+
107
+ /**
108
+ * Extract content from scrape response
109
+ * @param response Scrape response
110
+ * @returns Extracted content or empty string if not available
111
+ */
112
+ extractContent(
113
+ response: t.SerperScrapeResponse
114
+ ): [string, undefined | t.References] {
115
+ if (!response.success || !response.data) {
116
+ return ['', undefined];
117
+ }
118
+
119
+ if (response.data.markdown != null) {
120
+ return [response.data.markdown, undefined];
121
+ }
122
+
123
+ if (response.data.text != null) {
124
+ return [response.data.text, undefined];
125
+ }
126
+
127
+ return ['', undefined];
128
+ }
129
+
130
+ /**
131
+ * Extract metadata from scrape response
132
+ * @param response Scrape response
133
+ * @returns Metadata object
134
+ */
135
+ extractMetadata(
136
+ response: t.SerperScrapeResponse
137
+ ): Record<string, string | number | boolean | null | undefined> {
138
+ if (!response.success || !response.data || !response.data.metadata) {
139
+ return {};
140
+ }
141
+
142
+ return response.data.metadata;
143
+ }
144
+ }
145
+
146
+ /**
147
+ * Create a Serper scraper instance
148
+ * @param config Scraper configuration
149
+ * @returns Serper scraper instance
150
+ */
151
+ export const createSerperScraper = (
152
+ config: t.SerperScraperConfig = {}
153
+ ): SerperScraper => {
154
+ return new SerperScraper(config);
155
+ };
@@ -12,6 +12,7 @@ import {
12
12
  newsSchema,
13
13
  } from './schema';
14
14
  import { createSearchAPI, createSourceProcessor } from './search';
15
+ import { createSerperScraper } from './serper-scraper';
15
16
  import { createFirecrawlScraper } from './firecrawl';
16
17
  import { expandHighlights } from './highlights';
17
18
  import { formatResultsForLLM } from './format';
@@ -328,6 +329,27 @@ Use anchor marker(s) immediately after the statement:
328
329
  * Creates a search tool with a schema that dynamically includes the country field
329
330
  * only when the searchProvider is 'serper'.
330
331
  *
332
+ * Supports multiple scraper providers:
333
+ * - Firecrawl (default): Full-featured web scraping with multiple formats
334
+ * - Serper: Lightweight scraping using Serper's scrape API
335
+ *
336
+ * @example
337
+ * ```typescript
338
+ * // Using Firecrawl scraper (default)
339
+ * const searchTool = createSearchTool({
340
+ * searchProvider: 'serper',
341
+ * scraperProvider: 'firecrawl',
342
+ * firecrawlApiKey: 'your-firecrawl-key'
343
+ * });
344
+ *
345
+ * // Using Serper scraper
346
+ * const searchTool = createSearchTool({
347
+ * searchProvider: 'serper',
348
+ * scraperProvider: 'serper',
349
+ * serperApiKey: 'your-serper-key'
350
+ * });
351
+ * ```
352
+ *
331
353
  * @param config - The search tool configuration
332
354
  * @returns A DynamicStructuredTool with a schema that depends on the searchProvider
333
355
  */
@@ -344,11 +366,15 @@ export const createSearchTool = (
344
366
  strategies = ['no_extraction'],
345
367
  filterContent = true,
346
368
  safeSearch = 1,
369
+ scraperProvider = 'firecrawl',
347
370
  firecrawlApiKey,
348
371
  firecrawlApiUrl,
372
+ firecrawlVersion,
349
373
  firecrawlOptions,
374
+ serperScraperOptions,
350
375
  scraperTimeout,
351
376
  jinaApiKey,
377
+ jinaApiUrl,
352
378
  cohereApiKey,
353
379
  onSearchResults: _onSearchResults,
354
380
  onGetHighlights,
@@ -384,17 +410,32 @@ export const createSearchTool = (
384
410
  searxngApiKey,
385
411
  });
386
412
 
387
- const firecrawlScraper = createFirecrawlScraper({
388
- ...firecrawlOptions,
389
- apiKey: firecrawlApiKey ?? process.env.FIRECRAWL_API_KEY,
390
- apiUrl: firecrawlApiUrl,
391
- timeout: scraperTimeout ?? firecrawlOptions?.timeout,
392
- formats: firecrawlOptions?.formats ?? ['markdown', 'rawHtml'],
393
- });
413
+ /** Create scraper based on scraperProvider */
414
+ let scraperInstance: t.BaseScraper;
415
+
416
+ if (scraperProvider === 'serper') {
417
+ scraperInstance = createSerperScraper({
418
+ ...serperScraperOptions,
419
+ apiKey: serperApiKey,
420
+ timeout: scraperTimeout ?? serperScraperOptions?.timeout,
421
+ logger,
422
+ });
423
+ } else {
424
+ scraperInstance = createFirecrawlScraper({
425
+ ...firecrawlOptions,
426
+ apiKey: firecrawlApiKey ?? process.env.FIRECRAWL_API_KEY,
427
+ apiUrl: firecrawlApiUrl,
428
+ version: firecrawlVersion,
429
+ timeout: scraperTimeout ?? firecrawlOptions?.timeout,
430
+ formats: firecrawlOptions?.formats ?? ['markdown', 'rawHtml'],
431
+ logger,
432
+ });
433
+ }
394
434
 
395
435
  const selectedReranker = createReranker({
396
436
  rerankerType,
397
437
  jinaApiKey,
438
+ jinaApiUrl,
398
439
  cohereApiKey,
399
440
  logger,
400
441
  });
@@ -411,7 +452,7 @@ export const createSearchTool = (
411
452
  filterContent,
412
453
  logger,
413
454
  },
414
- firecrawlScraper
455
+ scraperInstance
415
456
  );
416
457
 
417
458
  const search = createSearchProcessor({
@@ -5,6 +5,7 @@ import type { BaseReranker } from './rerankers';
5
5
  import { DATE_RANGE } from './schema';
6
6
 
7
7
  export type SearchProvider = 'serper' | 'searxng';
8
+ export type ScraperProvider = 'firecrawl' | 'serper';
8
9
  export type RerankerType = 'infinity' | 'jina' | 'cohere' | 'none';
9
10
 
10
11
  export interface Highlight {
@@ -94,9 +95,18 @@ export interface ProcessSourcesConfig {
94
95
  export interface FirecrawlConfig {
95
96
  firecrawlApiKey?: string;
96
97
  firecrawlApiUrl?: string;
98
+ firecrawlVersion?: string;
97
99
  firecrawlOptions?: FirecrawlScraperConfig;
98
100
  }
99
101
 
102
+ export interface SerperScraperConfig {
103
+ apiKey?: string;
104
+ apiUrl?: string;
105
+ timeout?: number;
106
+ logger?: Logger;
107
+ includeMarkdown?: boolean;
108
+ }
109
+
100
110
  export interface ScraperContentResult {
101
111
  content: string;
102
112
  }
@@ -148,9 +158,12 @@ export interface SearchToolConfig
148
158
  logger?: Logger;
149
159
  safeSearch?: SafeSearchLevel;
150
160
  jinaApiKey?: string;
161
+ jinaApiUrl?: string;
151
162
  cohereApiKey?: string;
152
163
  rerankerType?: RerankerType;
164
+ scraperProvider?: ScraperProvider;
153
165
  scraperTimeout?: number;
166
+ serperScraperOptions?: SerperScraperConfig;
154
167
  onSearchResults?: (
155
168
  results: SearchResult,
156
169
  runnableConfig?: RunnableConfig
@@ -169,9 +182,30 @@ export type UsedReferences = {
169
182
  reference: MediaReference;
170
183
  }[];
171
184
 
185
+ /** Base Scraper Interface */
186
+ export interface BaseScraper {
187
+ scrapeUrl(
188
+ url: string,
189
+ options?: unknown
190
+ ): Promise<[string, FirecrawlScrapeResponse | SerperScrapeResponse]>;
191
+ extractContent(
192
+ response: FirecrawlScrapeResponse | SerperScrapeResponse
193
+ ): [string, undefined | References];
194
+ extractMetadata(
195
+ response: FirecrawlScrapeResponse | SerperScrapeResponse
196
+ ):
197
+ | ScrapeMetadata
198
+ | Record<string, string | number | boolean | null | undefined>;
199
+ }
200
+
172
201
  /** Firecrawl */
173
202
  export type FirecrawlScrapeOptions = Omit<
174
203
  FirecrawlScraperConfig,
204
+ 'apiKey' | 'apiUrl' | 'version' | 'logger'
205
+ >;
206
+
207
+ export type SerperScrapeOptions = Omit<
208
+ SerperScraperConfig,
175
209
  'apiKey' | 'apiUrl' | 'logger'
176
210
  >;
177
211
 
@@ -250,9 +284,21 @@ export interface FirecrawlScrapeResponse {
250
284
  error?: string;
251
285
  }
252
286
 
287
+ export interface SerperScrapeResponse {
288
+ success: boolean;
289
+ data?: {
290
+ text?: string;
291
+ markdown?: string;
292
+ metadata?: Record<string, string | number | boolean | null | undefined>;
293
+ credits?: number;
294
+ };
295
+ error?: string;
296
+ }
297
+
253
298
  export interface FirecrawlScraperConfig {
254
299
  apiKey?: string;
255
300
  apiUrl?: string;
301
+ version?: string;
256
302
  formats?: string[];
257
303
  timeout?: number;
258
304
  logger?: Logger;
@@ -367,4 +367,6 @@ export interface AgentInputs {
367
367
  clientOptions?: ClientOptions;
368
368
  additional_instructions?: string;
369
369
  reasoningKey?: 'reasoning_content' | 'reasoning';
370
+ /** Format content blocks as strings (for legacy compatibility i.e. Ollama/Azure Serverless) */
371
+ useLegacyContent?: boolean;
370
372
  }
@@ -0,0 +1,4 @@
1
+ import type Anthropic from '@anthropic-ai/sdk';
2
+ import type { BaseMessage } from '@langchain/core/messages';
3
+ export type AnthropicMessages = Array<AnthropicMessage | BaseMessage>;
4
+ export type AnthropicMessage = Anthropic.MessageParam;
@@ -0,0 +1,107 @@
1
+ /**
2
+ * Multi-Agent Handler Utilities
3
+ *
4
+ * Provides a simple helper to create handlers with content aggregation for multi-agent scripts.
5
+ *
6
+ * Usage:
7
+ * ```typescript
8
+ * const { contentParts, aggregateContent, handlers } = createHandlers();
9
+ *
10
+ * // With callbacks
11
+ * const { contentParts, aggregateContent, handlers } = createHandlers({
12
+ * onRunStep: (event, data) => console.log('Step:', data),
13
+ * onRunStepCompleted: (event, data) => console.log('Completed:', data)
14
+ * });
15
+ * ```
16
+ */
17
+
18
+ import { GraphEvents } from '@/common';
19
+ import { ChatModelStreamHandler, createContentAggregator } from '@/stream';
20
+ import { ToolEndHandler, ModelEndHandler } from '@/events';
21
+ import type * as t from '@/types';
22
+
23
+ interface HandlerCallbacks {
24
+ onRunStep?: (event: GraphEvents.ON_RUN_STEP, data: t.StreamEventData) => void;
25
+ onRunStepCompleted?: (
26
+ event: GraphEvents.ON_RUN_STEP_COMPLETED,
27
+ data: t.StreamEventData
28
+ ) => void;
29
+ onRunStepDelta?: (
30
+ event: GraphEvents.ON_RUN_STEP_DELTA,
31
+ data: t.StreamEventData
32
+ ) => void;
33
+ onMessageDelta?: (
34
+ event: GraphEvents.ON_MESSAGE_DELTA,
35
+ data: t.StreamEventData
36
+ ) => void;
37
+ }
38
+
39
+ /**
40
+ * Creates handlers with content aggregation for multi-agent scripts
41
+ */
42
+ export function createHandlers(callbacks?: HandlerCallbacks): {
43
+ contentParts: Array<t.MessageContentComplex | undefined>;
44
+ aggregateContent: ReturnType<
45
+ typeof createContentAggregator
46
+ >['aggregateContent'];
47
+ handlers: Record<string, t.EventHandler>;
48
+ } {
49
+ // Set up content aggregator
50
+ const { contentParts, aggregateContent } = createContentAggregator();
51
+
52
+ // Create the handlers object
53
+ const handlers = {
54
+ [GraphEvents.TOOL_END]: new ToolEndHandler(),
55
+ [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
56
+ [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
57
+
58
+ [GraphEvents.ON_RUN_STEP]: {
59
+ handle: (
60
+ event: GraphEvents.ON_RUN_STEP,
61
+ data: t.StreamEventData
62
+ ): void => {
63
+ aggregateContent({ event, data: data as t.RunStep });
64
+ callbacks?.onRunStep?.(event, data);
65
+ },
66
+ },
67
+
68
+ [GraphEvents.ON_RUN_STEP_COMPLETED]: {
69
+ handle: (
70
+ event: GraphEvents.ON_RUN_STEP_COMPLETED,
71
+ data: t.StreamEventData
72
+ ): void => {
73
+ aggregateContent({
74
+ event,
75
+ data: data as unknown as { result: t.ToolEndEvent },
76
+ });
77
+ callbacks?.onRunStepCompleted?.(event, data);
78
+ },
79
+ },
80
+
81
+ [GraphEvents.ON_RUN_STEP_DELTA]: {
82
+ handle: (
83
+ event: GraphEvents.ON_RUN_STEP_DELTA,
84
+ data: t.StreamEventData
85
+ ): void => {
86
+ aggregateContent({ event, data: data as t.RunStepDeltaEvent });
87
+ callbacks?.onRunStepDelta?.(event, data);
88
+ },
89
+ },
90
+
91
+ [GraphEvents.ON_MESSAGE_DELTA]: {
92
+ handle: (
93
+ event: GraphEvents.ON_MESSAGE_DELTA,
94
+ data: t.StreamEventData
95
+ ): void => {
96
+ aggregateContent({ event, data: data as t.MessageDeltaEvent });
97
+ callbacks?.onMessageDelta?.(event, data);
98
+ },
99
+ },
100
+ };
101
+
102
+ return {
103
+ contentParts,
104
+ aggregateContent,
105
+ handlers,
106
+ };
107
+ }
@@ -1,5 +1,6 @@
1
1
  export * from './graph';
2
2
  export * from './llm';
3
3
  export * from './misc';
4
+ export * from './handlers';
4
5
  export * from './run';
5
- export * from './tokens';
6
+ export * from './tokens';
@@ -12,6 +12,30 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
12
12
  streamUsage: true,
13
13
  // disableStreaming: true,
14
14
  },
15
+ anthropicLITELLM: {
16
+ provider: Providers.OPENAI,
17
+ streaming: true,
18
+ streamUsage: false,
19
+ apiKey: 'sk-1234',
20
+ model: 'claude-sonnet-4',
21
+ maxTokens: 8192,
22
+ modelKwargs: {
23
+ metadata: {
24
+ user_id: 'some_user_id',
25
+ },
26
+ thinking: {
27
+ type: 'enabled',
28
+ budget_tokens: 2000,
29
+ },
30
+ },
31
+ configuration: {
32
+ baseURL: 'http://host.docker.internal:4000/v1',
33
+ defaultHeaders: {
34
+ 'anthropic-beta': 'prompt-caching-2024-07-31,context-1m-2025-08-07',
35
+ },
36
+ },
37
+ // disableStreaming: true,
38
+ },
15
39
  [Providers.XAI]: {
16
40
  provider: Providers.XAI,
17
41
  model: 'grok-2-latest',
@@ -70,6 +94,16 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
70
94
  baseURL: 'http://192.168.254.183:1233/v1',
71
95
  },
72
96
  },
97
+ zhipu: {
98
+ provider: Providers.OPENAI,
99
+ streaming: true,
100
+ streamUsage: false,
101
+ model: 'glm-4.5-air',
102
+ apiKey: process.env.ZHIPU_API_KEY,
103
+ configuration: {
104
+ baseURL: 'https://open.bigmodel.cn/api/paas/v4',
105
+ },
106
+ },
73
107
  [Providers.DEEPSEEK]: {
74
108
  provider: Providers.DEEPSEEK,
75
109
  model: 'deepseek-reasoner',
@@ -78,7 +112,7 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
78
112
  },
79
113
  [Providers.ANTHROPIC]: {
80
114
  provider: Providers.ANTHROPIC,
81
- model: 'claude-3-5-sonnet-20240620',
115
+ model: 'claude-sonnet-4-5',
82
116
  streaming: true,
83
117
  streamUsage: true,
84
118
  },