@librechat/agents 3.1.75 → 3.1.76

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (221) hide show
  1. package/dist/cjs/graphs/Graph.cjs +13 -3
  2. package/dist/cjs/graphs/Graph.cjs.map +1 -1
  3. package/dist/cjs/langchain/google-common.cjs +3 -0
  4. package/dist/cjs/langchain/google-common.cjs.map +1 -0
  5. package/dist/cjs/langchain/index.cjs +86 -0
  6. package/dist/cjs/langchain/index.cjs.map +1 -0
  7. package/dist/cjs/langchain/language_models/chat_models.cjs +3 -0
  8. package/dist/cjs/langchain/language_models/chat_models.cjs.map +1 -0
  9. package/dist/cjs/langchain/messages/tool.cjs +3 -0
  10. package/dist/cjs/langchain/messages/tool.cjs.map +1 -0
  11. package/dist/cjs/langchain/messages.cjs +51 -0
  12. package/dist/cjs/langchain/messages.cjs.map +1 -0
  13. package/dist/cjs/langchain/openai.cjs +3 -0
  14. package/dist/cjs/langchain/openai.cjs.map +1 -0
  15. package/dist/cjs/langchain/prompts.cjs +11 -0
  16. package/dist/cjs/langchain/prompts.cjs.map +1 -0
  17. package/dist/cjs/langchain/runnables.cjs +19 -0
  18. package/dist/cjs/langchain/runnables.cjs.map +1 -0
  19. package/dist/cjs/langchain/tools.cjs +23 -0
  20. package/dist/cjs/langchain/tools.cjs.map +1 -0
  21. package/dist/cjs/langchain/utils/env.cjs +11 -0
  22. package/dist/cjs/langchain/utils/env.cjs.map +1 -0
  23. package/dist/cjs/llm/anthropic/index.cjs +145 -52
  24. package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
  25. package/dist/cjs/llm/anthropic/types.cjs.map +1 -1
  26. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +21 -14
  27. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
  28. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +84 -70
  29. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -1
  30. package/dist/cjs/llm/bedrock/index.cjs +1 -1
  31. package/dist/cjs/llm/bedrock/index.cjs.map +1 -1
  32. package/dist/cjs/llm/bedrock/utils/message_inputs.cjs +213 -3
  33. package/dist/cjs/llm/bedrock/utils/message_inputs.cjs.map +1 -1
  34. package/dist/cjs/llm/bedrock/utils/message_outputs.cjs +2 -1
  35. package/dist/cjs/llm/bedrock/utils/message_outputs.cjs.map +1 -1
  36. package/dist/cjs/llm/google/utils/common.cjs +5 -4
  37. package/dist/cjs/llm/google/utils/common.cjs.map +1 -1
  38. package/dist/cjs/llm/openai/index.cjs +519 -655
  39. package/dist/cjs/llm/openai/index.cjs.map +1 -1
  40. package/dist/cjs/llm/openai/utils/index.cjs +20 -458
  41. package/dist/cjs/llm/openai/utils/index.cjs.map +1 -1
  42. package/dist/cjs/llm/openrouter/index.cjs +57 -175
  43. package/dist/cjs/llm/openrouter/index.cjs.map +1 -1
  44. package/dist/cjs/llm/vertexai/index.cjs +5 -3
  45. package/dist/cjs/llm/vertexai/index.cjs.map +1 -1
  46. package/dist/cjs/main.cjs +83 -3
  47. package/dist/cjs/main.cjs.map +1 -1
  48. package/dist/cjs/messages/cache.cjs +2 -1
  49. package/dist/cjs/messages/cache.cjs.map +1 -1
  50. package/dist/cjs/messages/core.cjs +7 -6
  51. package/dist/cjs/messages/core.cjs.map +1 -1
  52. package/dist/cjs/messages/format.cjs +73 -15
  53. package/dist/cjs/messages/format.cjs.map +1 -1
  54. package/dist/cjs/messages/langchain.cjs +26 -0
  55. package/dist/cjs/messages/langchain.cjs.map +1 -0
  56. package/dist/cjs/messages/prune.cjs +7 -6
  57. package/dist/cjs/messages/prune.cjs.map +1 -1
  58. package/dist/cjs/tools/ToolNode.cjs +5 -1
  59. package/dist/cjs/tools/ToolNode.cjs.map +1 -1
  60. package/dist/cjs/tools/search/search.cjs +55 -66
  61. package/dist/cjs/tools/search/search.cjs.map +1 -1
  62. package/dist/cjs/tools/search/tavily-scraper.cjs +189 -0
  63. package/dist/cjs/tools/search/tavily-scraper.cjs.map +1 -0
  64. package/dist/cjs/tools/search/tavily-search.cjs +372 -0
  65. package/dist/cjs/tools/search/tavily-search.cjs.map +1 -0
  66. package/dist/cjs/tools/search/tool.cjs +26 -4
  67. package/dist/cjs/tools/search/tool.cjs.map +1 -1
  68. package/dist/cjs/tools/search/utils.cjs +10 -3
  69. package/dist/cjs/tools/search/utils.cjs.map +1 -1
  70. package/dist/esm/graphs/Graph.mjs +13 -3
  71. package/dist/esm/graphs/Graph.mjs.map +1 -1
  72. package/dist/esm/langchain/google-common.mjs +2 -0
  73. package/dist/esm/langchain/google-common.mjs.map +1 -0
  74. package/dist/esm/langchain/index.mjs +5 -0
  75. package/dist/esm/langchain/index.mjs.map +1 -0
  76. package/dist/esm/langchain/language_models/chat_models.mjs +2 -0
  77. package/dist/esm/langchain/language_models/chat_models.mjs.map +1 -0
  78. package/dist/esm/langchain/messages/tool.mjs +2 -0
  79. package/dist/esm/langchain/messages/tool.mjs.map +1 -0
  80. package/dist/esm/langchain/messages.mjs +2 -0
  81. package/dist/esm/langchain/messages.mjs.map +1 -0
  82. package/dist/esm/langchain/openai.mjs +2 -0
  83. package/dist/esm/langchain/openai.mjs.map +1 -0
  84. package/dist/esm/langchain/prompts.mjs +2 -0
  85. package/dist/esm/langchain/prompts.mjs.map +1 -0
  86. package/dist/esm/langchain/runnables.mjs +2 -0
  87. package/dist/esm/langchain/runnables.mjs.map +1 -0
  88. package/dist/esm/langchain/tools.mjs +2 -0
  89. package/dist/esm/langchain/tools.mjs.map +1 -0
  90. package/dist/esm/langchain/utils/env.mjs +2 -0
  91. package/dist/esm/langchain/utils/env.mjs.map +1 -0
  92. package/dist/esm/llm/anthropic/index.mjs +146 -54
  93. package/dist/esm/llm/anthropic/index.mjs.map +1 -1
  94. package/dist/esm/llm/anthropic/types.mjs.map +1 -1
  95. package/dist/esm/llm/anthropic/utils/message_inputs.mjs +21 -14
  96. package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
  97. package/dist/esm/llm/anthropic/utils/message_outputs.mjs +84 -71
  98. package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -1
  99. package/dist/esm/llm/bedrock/index.mjs +1 -1
  100. package/dist/esm/llm/bedrock/index.mjs.map +1 -1
  101. package/dist/esm/llm/bedrock/utils/message_inputs.mjs +214 -4
  102. package/dist/esm/llm/bedrock/utils/message_inputs.mjs.map +1 -1
  103. package/dist/esm/llm/bedrock/utils/message_outputs.mjs +2 -1
  104. package/dist/esm/llm/bedrock/utils/message_outputs.mjs.map +1 -1
  105. package/dist/esm/llm/google/utils/common.mjs +5 -4
  106. package/dist/esm/llm/google/utils/common.mjs.map +1 -1
  107. package/dist/esm/llm/openai/index.mjs +520 -656
  108. package/dist/esm/llm/openai/index.mjs.map +1 -1
  109. package/dist/esm/llm/openai/utils/index.mjs +23 -459
  110. package/dist/esm/llm/openai/utils/index.mjs.map +1 -1
  111. package/dist/esm/llm/openrouter/index.mjs +57 -175
  112. package/dist/esm/llm/openrouter/index.mjs.map +1 -1
  113. package/dist/esm/llm/vertexai/index.mjs +5 -3
  114. package/dist/esm/llm/vertexai/index.mjs.map +1 -1
  115. package/dist/esm/main.mjs +4 -0
  116. package/dist/esm/main.mjs.map +1 -1
  117. package/dist/esm/messages/cache.mjs +2 -1
  118. package/dist/esm/messages/cache.mjs.map +1 -1
  119. package/dist/esm/messages/core.mjs +7 -6
  120. package/dist/esm/messages/core.mjs.map +1 -1
  121. package/dist/esm/messages/format.mjs +73 -15
  122. package/dist/esm/messages/format.mjs.map +1 -1
  123. package/dist/esm/messages/langchain.mjs +23 -0
  124. package/dist/esm/messages/langchain.mjs.map +1 -0
  125. package/dist/esm/messages/prune.mjs +7 -6
  126. package/dist/esm/messages/prune.mjs.map +1 -1
  127. package/dist/esm/tools/ToolNode.mjs +5 -1
  128. package/dist/esm/tools/ToolNode.mjs.map +1 -1
  129. package/dist/esm/tools/search/search.mjs +55 -66
  130. package/dist/esm/tools/search/search.mjs.map +1 -1
  131. package/dist/esm/tools/search/tavily-scraper.mjs +186 -0
  132. package/dist/esm/tools/search/tavily-scraper.mjs.map +1 -0
  133. package/dist/esm/tools/search/tavily-search.mjs +370 -0
  134. package/dist/esm/tools/search/tavily-search.mjs.map +1 -0
  135. package/dist/esm/tools/search/tool.mjs +26 -4
  136. package/dist/esm/tools/search/tool.mjs.map +1 -1
  137. package/dist/esm/tools/search/utils.mjs +10 -3
  138. package/dist/esm/tools/search/utils.mjs.map +1 -1
  139. package/dist/types/index.d.ts +1 -0
  140. package/dist/types/langchain/google-common.d.ts +1 -0
  141. package/dist/types/langchain/index.d.ts +8 -0
  142. package/dist/types/langchain/language_models/chat_models.d.ts +1 -0
  143. package/dist/types/langchain/messages/tool.d.ts +1 -0
  144. package/dist/types/langchain/messages.d.ts +2 -0
  145. package/dist/types/langchain/openai.d.ts +1 -0
  146. package/dist/types/langchain/prompts.d.ts +1 -0
  147. package/dist/types/langchain/runnables.d.ts +2 -0
  148. package/dist/types/langchain/tools.d.ts +2 -0
  149. package/dist/types/langchain/utils/env.d.ts +1 -0
  150. package/dist/types/llm/anthropic/index.d.ts +22 -9
  151. package/dist/types/llm/anthropic/types.d.ts +5 -1
  152. package/dist/types/llm/anthropic/utils/message_outputs.d.ts +13 -6
  153. package/dist/types/llm/anthropic/utils/output_parsers.d.ts +1 -1
  154. package/dist/types/llm/openai/index.d.ts +21 -24
  155. package/dist/types/llm/openrouter/index.d.ts +11 -9
  156. package/dist/types/llm/vertexai/index.d.ts +1 -0
  157. package/dist/types/messages/cache.d.ts +4 -1
  158. package/dist/types/messages/format.d.ts +4 -1
  159. package/dist/types/messages/langchain.d.ts +27 -0
  160. package/dist/types/tools/search/tavily-scraper.d.ts +19 -0
  161. package/dist/types/tools/search/tavily-search.d.ts +4 -0
  162. package/dist/types/tools/search/types.d.ts +99 -5
  163. package/dist/types/tools/search/utils.d.ts +2 -2
  164. package/dist/types/types/graph.d.ts +23 -37
  165. package/dist/types/types/llm.d.ts +3 -3
  166. package/dist/types/types/stream.d.ts +1 -1
  167. package/package.json +80 -17
  168. package/src/graphs/Graph.ts +24 -4
  169. package/src/graphs/__tests__/composition.smoke.test.ts +188 -0
  170. package/src/index.ts +3 -0
  171. package/src/langchain/google-common.ts +1 -0
  172. package/src/langchain/index.ts +8 -0
  173. package/src/langchain/language_models/chat_models.ts +1 -0
  174. package/src/langchain/messages/tool.ts +5 -0
  175. package/src/langchain/messages.ts +21 -0
  176. package/src/langchain/openai.ts +1 -0
  177. package/src/langchain/prompts.ts +1 -0
  178. package/src/langchain/runnables.ts +7 -0
  179. package/src/langchain/tools.ts +8 -0
  180. package/src/langchain/utils/env.ts +1 -0
  181. package/src/llm/anthropic/index.ts +252 -84
  182. package/src/llm/anthropic/llm.spec.ts +751 -102
  183. package/src/llm/anthropic/types.ts +9 -1
  184. package/src/llm/anthropic/utils/message_inputs.ts +37 -19
  185. package/src/llm/anthropic/utils/message_outputs.ts +119 -101
  186. package/src/llm/bedrock/index.ts +2 -2
  187. package/src/llm/bedrock/llm.spec.ts +341 -0
  188. package/src/llm/bedrock/utils/message_inputs.ts +303 -4
  189. package/src/llm/bedrock/utils/message_outputs.ts +2 -1
  190. package/src/llm/custom-chat-models.smoke.test.ts +836 -0
  191. package/src/llm/google/llm.spec.ts +339 -57
  192. package/src/llm/google/utils/common.ts +53 -48
  193. package/src/llm/openai/contentBlocks.test.ts +346 -0
  194. package/src/llm/openai/index.ts +856 -833
  195. package/src/llm/openai/utils/index.ts +107 -78
  196. package/src/llm/openai/utils/messages.test.ts +159 -0
  197. package/src/llm/openrouter/index.ts +124 -247
  198. package/src/llm/openrouter/reasoning.test.ts +8 -1
  199. package/src/llm/vertexai/index.ts +11 -5
  200. package/src/llm/vertexai/llm.spec.ts +28 -1
  201. package/src/messages/cache.test.ts +4 -3
  202. package/src/messages/cache.ts +3 -2
  203. package/src/messages/core.ts +16 -9
  204. package/src/messages/format.ts +96 -16
  205. package/src/messages/formatAgentMessages.test.ts +166 -1
  206. package/src/messages/langchain.ts +39 -0
  207. package/src/messages/prune.ts +12 -8
  208. package/src/scripts/caching.ts +2 -3
  209. package/src/specs/summarization.test.ts +51 -58
  210. package/src/tools/ToolNode.ts +5 -1
  211. package/src/tools/search/search.ts +83 -73
  212. package/src/tools/search/tavily-scraper.ts +235 -0
  213. package/src/tools/search/tavily-search.ts +424 -0
  214. package/src/tools/search/tavily.test.ts +965 -0
  215. package/src/tools/search/tool.ts +36 -26
  216. package/src/tools/search/types.ts +134 -11
  217. package/src/tools/search/utils.ts +13 -5
  218. package/src/types/graph.ts +32 -87
  219. package/src/types/llm.ts +3 -3
  220. package/src/types/stream.ts +1 -1
  221. package/src/utils/llmConfig.ts +1 -6
@@ -24,6 +24,7 @@ export * from './common';
24
24
  export * from './utils';
25
25
  export * from './hooks';
26
26
  export type * from './types';
27
+ export * from './langchain';
27
28
  export { CustomOpenAIClient } from './llm/openai';
28
29
  export { ChatOpenRouter } from './llm/openrouter';
29
30
  export type { OpenRouterReasoning, OpenRouterReasoningEffort, ChatOpenRouterCallOptions, } from './llm/openrouter';
@@ -0,0 +1 @@
1
+ export type { GoogleAIToolType } from '@langchain/google-common';
@@ -0,0 +1,8 @@
1
+ export * from './messages';
2
+ export * from './prompts';
3
+ export * from './runnables';
4
+ export * from './tools';
5
+ export * from './google-common';
6
+ export * from './language_models/chat_models';
7
+ export * from './messages/tool';
8
+ export * from './openai';
@@ -0,0 +1 @@
1
+ export type { BindToolsInput } from '@langchain/core/language_models/chat_models';
@@ -0,0 +1 @@
1
+ export type { InvalidToolCall, ToolCall, ToolCallChunk, } from '@langchain/core/messages/tool';
@@ -0,0 +1,2 @@
1
+ export { AIMessage, AIMessageChunk, BaseMessage, BaseMessageChunk, HumanMessage, SystemMessage, ToolMessage, getBufferString, isAIMessage, isBaseMessage, isToolMessage, } from '@langchain/core/messages';
2
+ export type { BaseMessageFields, MessageContent, MessageContentText, MessageContentImageUrl, UsageMetadata, } from '@langchain/core/messages';
@@ -0,0 +1 @@
1
+ export type { AzureOpenAIInput } from '@langchain/openai';
@@ -0,0 +1 @@
1
+ export { PromptTemplate } from '@langchain/core/prompts';
@@ -0,0 +1,2 @@
1
+ export { Runnable, RunnableLambda, RunnableSequence, } from '@langchain/core/runnables';
2
+ export type { RunnableConfig } from '@langchain/core/runnables';
@@ -0,0 +1,2 @@
1
+ export { DynamicStructuredTool, StructuredTool, Tool, tool, } from '@langchain/core/tools';
2
+ export type { StructuredToolInterface } from '@langchain/core/tools';
@@ -0,0 +1 @@
1
+ export { getEnvironmentVariable } from '@langchain/core/utils/env';
@@ -4,18 +4,31 @@ import type { BaseChatModelParams } from '@langchain/core/language_models/chat_m
4
4
  import type { BaseMessage, UsageMetadata } from '@langchain/core/messages';
5
5
  import type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
6
6
  import type { AnthropicInput } from '@langchain/anthropic';
7
- import type { AnthropicMessageCreateParams, AnthropicStreamingMessageCreateParams, AnthropicOutputConfig } from '@/llm/anthropic/types';
7
+ import type { Anthropic } from '@anthropic-ai/sdk';
8
+ import type { AnthropicMessageCreateParams, AnthropicStreamingMessageCreateParams, AnthropicOutputConfig, AnthropicBeta, AnthropicMCPServerURLDefinition, AnthropicContextManagementConfigParam } from '@/llm/anthropic/types';
9
+ export declare function _documentsInParams(params: AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams): boolean;
8
10
  export type CustomAnthropicInput = AnthropicInput & {
9
11
  _lc_stream_delay?: number;
10
12
  outputConfig?: AnthropicOutputConfig;
11
13
  inferenceGeo?: string;
12
- contextManagement?: any;
14
+ contextManagement?: AnthropicContextManagementConfigParam;
13
15
  } & BaseChatModelParams;
14
- /**
15
- * A type representing additional parameters that can be passed to the
16
- * Anthropic API.
17
- */
18
- type Kwargs = Record<string, any>;
16
+ export type CustomAnthropicCallOptions = {
17
+ outputConfig?: AnthropicOutputConfig;
18
+ outputFormat?: Anthropic.Messages.JSONOutputFormat;
19
+ inferenceGeo?: string;
20
+ betas?: AnthropicBeta[];
21
+ container?: string;
22
+ mcp_servers?: AnthropicMCPServerURLDefinition[];
23
+ };
24
+ type CustomAnthropicInvocationParams = {
25
+ betas?: AnthropicBeta[];
26
+ container?: string;
27
+ context_management?: AnthropicContextManagementConfigParam;
28
+ inference_geo?: string;
29
+ mcp_servers?: AnthropicMCPServerURLDefinition[];
30
+ output_config?: AnthropicOutputConfig;
31
+ };
19
32
  export declare class CustomAnthropic extends ChatAnthropicMessages {
20
33
  _lc_stream_delay: number;
21
34
  private message_start;
@@ -25,13 +38,13 @@ export declare class CustomAnthropic extends ChatAnthropicMessages {
25
38
  top_k: number | undefined;
26
39
  outputConfig?: AnthropicOutputConfig;
27
40
  inferenceGeo?: string;
28
- contextManagement?: any;
41
+ contextManagement?: AnthropicContextManagementConfigParam;
29
42
  constructor(fields?: CustomAnthropicInput);
30
43
  static lc_name(): 'LibreChatAnthropic';
31
44
  /**
32
45
  * Get the parameters used to invoke the model
33
46
  */
34
- invocationParams(options?: this['ParsedCallOptions']): Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, 'messages'> & Kwargs;
47
+ invocationParams(options?: this['ParsedCallOptions'] & CustomAnthropicCallOptions): Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, 'messages'> & CustomAnthropicInvocationParams;
35
48
  /**
36
49
  * Get stream usage as returned by this client's API response.
37
50
  * @returns The stream usage object.
@@ -1,5 +1,7 @@
1
1
  import Anthropic from '@anthropic-ai/sdk';
2
- import { BindToolsInput } from '@langchain/core/language_models/chat_models';
2
+ import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
3
+ import type { AnthropicBeta } from '@anthropic-ai/sdk/resources';
4
+ export type { AnthropicBeta };
3
5
  export type AnthropicStreamUsage = Anthropic.Usage;
4
6
  export type AnthropicMessageDeltaEvent = Anthropic.MessageDeltaEvent;
5
7
  export type AnthropicMessageStartEvent = Anthropic.MessageStartEvent;
@@ -14,8 +16,10 @@ export type AnthropicMessageResponse = Anthropic.ContentBlock | AnthropicToolRes
14
16
  export type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
15
17
  export type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
16
18
  export type AnthropicThinkingConfigParam = Anthropic.ThinkingConfigParam;
19
+ export type AnthropicContextManagementConfigParam = Anthropic.Beta.BetaContextManagementConfig;
17
20
  export type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
18
21
  export type AnthropicRequestOptions = Anthropic.RequestOptions;
22
+ export type AnthropicMCPServerURLDefinition = Anthropic.Beta.Messages.BetaRequestMCPServerURLDefinition;
19
23
  export type AnthropicToolChoice = {
20
24
  type: 'tool';
21
25
  name: string;
@@ -1,10 +1,16 @@
1
- /**
2
- * This util file contains functions for converting Anthropic messages to LangChain messages.
3
- */
4
- import Anthropic from '@anthropic-ai/sdk';
1
+ /** This util file contains functions for converting Anthropic messages to LangChain messages. */
5
2
  import { AIMessageChunk } from '@langchain/core/messages';
6
- import { ChatGeneration } from '@langchain/core/outputs';
7
- import { AnthropicMessageResponse } from '../types';
3
+ import type Anthropic from '@anthropic-ai/sdk';
4
+ import type { UsageMetadata } from '@langchain/core/messages';
5
+ import type { ChatGeneration } from '@langchain/core/outputs';
6
+ import type { AnthropicMessageResponse } from '../types';
7
+ interface AnthropicUsageData {
8
+ input_tokens?: number | null;
9
+ output_tokens?: number | null;
10
+ cache_creation_input_tokens?: number | null;
11
+ cache_read_input_tokens?: number | null;
12
+ }
13
+ export declare function getAnthropicUsageMetadata(usage: AnthropicUsageData | null | undefined): UsageMetadata | undefined;
8
14
  export declare function _makeMessageChunkFromAnthropicEvent(data: Anthropic.Beta.Messages.BetaRawMessageStreamEvent, fields: {
9
15
  streamUsage: boolean;
10
16
  coerceContentToString: boolean;
@@ -12,3 +18,4 @@ export declare function _makeMessageChunkFromAnthropicEvent(data: Anthropic.Beta
12
18
  chunk: AIMessageChunk;
13
19
  } | null;
14
20
  export declare function anthropicResponseToChatMessages(messages: AnthropicMessageResponse[], additionalKwargs: Record<string, unknown>): ChatGeneration[];
21
+ export {};
@@ -18,5 +18,5 @@ export declare class AnthropicToolsOutputParser<T extends Record<string, any> =
18
18
  protected _validateResult(result: unknown): Promise<T>;
19
19
  parseResult(generations: ChatGeneration[]): Promise<T>;
20
20
  }
21
- export declare function extractToolCalls(content: Record<string, any>[]): ToolCall[];
21
+ export declare function extractToolCalls(content: Record<string, any>[]): ToolCall<string, Record<string, any>>[];
22
22
  export {};
@@ -1,20 +1,26 @@
1
1
  import { AzureOpenAI as AzureOpenAIClient } from 'openai';
2
2
  import { ChatXAI as OriginalChatXAI } from '@langchain/xai';
3
3
  import { ChatGenerationChunk } from '@langchain/core/outputs';
4
- import { AIMessage } from '@langchain/core/messages';
5
4
  import { ChatDeepSeek as OriginalChatDeepSeek } from '@langchain/deepseek';
6
5
  import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
7
6
  import { OpenAIClient, ChatOpenAI as OriginalChatOpenAI, AzureChatOpenAI as OriginalAzureChatOpenAI } from '@langchain/openai';
8
7
  import type { HeaderValue, HeadersLike } from './types';
9
- import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
10
8
  import type { BaseMessage } from '@langchain/core/messages';
11
- import type { ChatResult } from '@langchain/core/outputs';
9
+ import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
12
10
  import type { ChatXAIInput } from '@langchain/xai';
13
11
  import type * as t from '@langchain/openai';
14
12
  export declare function isHeaders(headers: unknown): headers is Headers;
15
13
  export declare function normalizeHeaders(headers: HeadersLike): Record<string, HeaderValue | readonly HeaderValue[]>;
16
- type OpenAICompletionParam = OpenAIClient.Chat.Completions.ChatCompletionMessageParam;
17
14
  type OpenAICoreRequestOptions = OpenAIClient.RequestOptions;
15
+ type LibreChatOpenAIFields = t.ChatOpenAIFields & {
16
+ _lc_stream_delay?: number;
17
+ includeReasoningContent?: boolean;
18
+ includeReasoningDetails?: boolean;
19
+ convertReasoningDetailsToContent?: boolean;
20
+ };
21
+ type LibreChatAzureOpenAIFields = t.AzureOpenAIInput & {
22
+ _lc_stream_delay?: number;
23
+ };
18
24
  /**
19
25
  * Formats a tool in either OpenAI format, or LangChain structured tool format
20
26
  * into an OpenAI tool format. If the tool is already in OpenAI format, return without
@@ -41,15 +47,12 @@ export declare class CustomAzureOpenAIClient extends AzureOpenAIClient {
41
47
  abortHandler?: () => void;
42
48
  fetchWithTimeout(url: RequestInfo, init: RequestInit | undefined, ms: number, controller: AbortController): Promise<Response>;
43
49
  }
44
- /** @ts-expect-error We are intentionally overriding `getReasoningParams` */
45
50
  export declare class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptions> {
46
51
  _lc_stream_delay?: number;
47
- constructor(fields?: t.ChatOpenAICallOptions & {
48
- _lc_stream_delay?: number;
49
- } & t.OpenAIChatInput['modelKwargs']);
52
+ constructor(fields?: LibreChatOpenAIFields & t.OpenAIChatInput['modelKwargs']);
50
53
  get exposedClient(): CustomOpenAIClient;
51
54
  static lc_name(): string;
52
- protected _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
55
+ _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
53
56
  /**
54
57
  * Returns backwards compatible reasoning parameters from constructor params and call options
55
58
  * @internal
@@ -57,14 +60,10 @@ export declare class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptio
57
60
  getReasoningParams(options?: this['ParsedCallOptions']): OpenAIClient.Reasoning | undefined;
58
61
  protected _getReasoningParams(options?: this['ParsedCallOptions']): OpenAIClient.Reasoning | undefined;
59
62
  _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
60
- _streamResponseChunks2(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
61
63
  }
62
- /** @ts-expect-error We are intentionally overriding `getReasoningParams` */
63
64
  export declare class AzureChatOpenAI extends OriginalAzureChatOpenAI {
64
65
  _lc_stream_delay?: number;
65
- constructor(fields?: t.AzureOpenAIInput & {
66
- _lc_stream_delay?: number;
67
- });
66
+ constructor(fields?: LibreChatAzureOpenAIFields);
68
67
  get exposedClient(): CustomOpenAIClient;
69
68
  static lc_name(): 'LibreChatAzureOpenAI';
70
69
  /**
@@ -73,16 +72,17 @@ export declare class AzureChatOpenAI extends OriginalAzureChatOpenAI {
73
72
  */
74
73
  getReasoningParams(options?: this['ParsedCallOptions']): OpenAIClient.Reasoning | undefined;
75
74
  protected _getReasoningParams(options?: this['ParsedCallOptions']): OpenAIClient.Reasoning | undefined;
76
- protected _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
75
+ _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
77
76
  _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
78
77
  }
79
78
  export declare class ChatDeepSeek extends OriginalChatDeepSeek {
79
+ _lc_stream_delay?: number;
80
+ constructor(fields?: ConstructorParameters<typeof OriginalChatDeepSeek>[0] & {
81
+ _lc_stream_delay?: number;
82
+ });
80
83
  get exposedClient(): CustomOpenAIClient;
81
84
  static lc_name(): 'LibreChatDeepSeek';
82
- protected _convertMessages(messages: BaseMessage[]): OpenAICompletionParam[];
83
- _generate(messages: BaseMessage[], options: this['ParsedCallOptions'] | undefined, runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
84
- protected _convertResponseToMessage(choice: OpenAIClient.Chat.Completions.ChatCompletion.Choice, data: OpenAIClient.Chat.Completions.ChatCompletion): AIMessage;
85
- protected _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
85
+ _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
86
86
  _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
87
87
  }
88
88
  /** xAI-specific usage metadata type */
@@ -102,11 +102,8 @@ export interface XAIUsageMetadata extends OpenAIClient.Completions.CompletionUsa
102
102
  num_sources_used?: number;
103
103
  }
104
104
  export declare class ChatMoonshot extends ChatOpenAI {
105
+ constructor(fields?: LibreChatOpenAIFields & t.OpenAIChatInput['modelKwargs']);
105
106
  static lc_name(): 'LibreChatMoonshot';
106
- protected _convertMessages(messages: BaseMessage[]): OpenAICompletionParam[];
107
- _generate(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
108
- protected _convertResponseToMessage(choice: OpenAIClient.Chat.Completions.ChatCompletion.Choice, data: OpenAIClient.Chat.Completions.ChatCompletion): AIMessage;
109
- _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
110
107
  }
111
108
  export declare class ChatXAI extends OriginalChatXAI {
112
109
  _lc_stream_delay?: number;
@@ -121,7 +118,7 @@ export declare class ChatXAI extends OriginalChatXAI {
121
118
  });
122
119
  static lc_name(): 'LibreChatXAI';
123
120
  get exposedClient(): CustomOpenAIClient;
124
- protected _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
121
+ _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
125
122
  _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
126
123
  }
127
124
  export {};
@@ -1,7 +1,7 @@
1
1
  import { ChatOpenAI } from '@/llm/openai';
2
- import { ChatGenerationChunk } from '@langchain/core/outputs';
3
- import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
4
- import type { FunctionMessageChunk, SystemMessageChunk, HumanMessageChunk, ToolMessageChunk, ChatMessageChunk, AIMessageChunk, BaseMessage } from '@langchain/core/messages';
2
+ import type { BaseMessage } from '@langchain/core/messages';
3
+ import type { ChatGenerationChunk } from '@langchain/core/outputs';
4
+ import type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
5
5
  import type { ChatOpenAICallOptions, OpenAIChatInput, OpenAIClient } from '@langchain/openai';
6
6
  export type OpenRouterReasoningEffort = 'xhigh' | 'high' | 'medium' | 'low' | 'minimal' | 'none';
7
7
  export interface OpenRouterReasoning {
@@ -16,20 +16,22 @@ export interface ChatOpenRouterCallOptions extends Omit<ChatOpenAICallOptions, '
16
16
  reasoning?: OpenRouterReasoning;
17
17
  modelKwargs?: OpenAIChatInput['modelKwargs'];
18
18
  }
19
+ export type ChatOpenRouterInput = Partial<ChatOpenRouterCallOptions & OpenAIChatInput>;
19
20
  /** invocationParams return type extended with OpenRouter reasoning */
20
21
  export type OpenRouterInvocationParams = Omit<OpenAIClient.Chat.ChatCompletionCreateParams, 'messages'> & {
21
22
  reasoning?: OpenRouterReasoning;
22
23
  };
24
+ type InvocationParamsExtra = {
25
+ streaming?: boolean;
26
+ };
23
27
  export declare class ChatOpenRouter extends ChatOpenAI {
24
28
  private openRouterReasoning?;
25
29
  /** @deprecated Use `reasoning` object instead */
26
30
  private includeReasoning?;
27
- constructor(_fields: Partial<ChatOpenRouterCallOptions>);
31
+ constructor(_fields: ChatOpenRouterInput);
28
32
  static lc_name(): 'LibreChatOpenRouter';
29
- invocationParams(options?: this['ParsedCallOptions'], extra?: {
30
- streaming?: boolean;
31
- }): OpenRouterInvocationParams;
33
+ invocationParams(options?: this['ParsedCallOptions'], extra?: InvocationParamsExtra): OpenRouterInvocationParams;
32
34
  private buildOpenRouterReasoning;
33
- protected _convertOpenAIDeltaToBaseMessageChunk(delta: Record<string, any>, rawResponse: OpenAIClient.ChatCompletionChunk, defaultRole?: 'function' | 'user' | 'system' | 'developer' | 'assistant' | 'tool'): AIMessageChunk | HumanMessageChunk | SystemMessageChunk | FunctionMessageChunk | ToolMessageChunk | ChatMessageChunk;
34
- _streamResponseChunks2(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
35
+ _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
35
36
  }
37
+ export {};
@@ -288,6 +288,7 @@ export declare class ChatVertexAI extends ChatGoogle {
288
288
  dynamicThinkingBudget: boolean;
289
289
  thinkingConfig?: GoogleThinkingConfig;
290
290
  static lc_name(): 'LibreChatVertexAI';
291
+ constructor(model: string, fields?: Omit<VertexAIClientOptions, 'model'>);
291
292
  constructor(fields?: VertexAIClientOptions);
292
293
  invocationParams(options?: this['ParsedCallOptions'] | undefined): GoogleAIModelRequestParams;
293
294
  buildConnection(fields: VertexAIClientOptions | undefined, client: GoogleAbstractedClient): void;
@@ -36,5 +36,8 @@ export declare function stripBedrockCacheControl<T extends MessageWithContent>(m
36
36
  * @param messages - The array of message objects.
37
37
  * @returns - A new array of message objects with cache points added.
38
38
  */
39
- export declare function addBedrockCacheControl<T extends Partial<BaseMessage> & MessageWithContent>(messages: T[]): T[];
39
+ export declare function addBedrockCacheControl<T extends MessageWithContent & {
40
+ getType?: () => string;
41
+ role?: string;
42
+ }>(messages: T[]): T[];
40
43
  export {};
@@ -84,6 +84,9 @@ interface LangChainMessage {
84
84
  * @returns - The formatted LangChain message.
85
85
  */
86
86
  export declare const formatFromLangChain: (message: LangChainMessage) => Record<string, any>;
87
+ interface FormatAgentMessagesOptions {
88
+ provider?: Providers;
89
+ }
87
90
  /**
88
91
  * Groups content parts by agent and formats them with agent labels
89
92
  * This preprocesses multi-agent content to prevent identity confusion
@@ -107,7 +110,7 @@ export declare const labelContentByAgent: (contentParts: MessageContentComplex[]
107
110
  * @param skills - Optional map of skill name to body for reconstructing skill HumanMessages.
108
111
  * @returns - Object containing formatted messages and updated indexTokenCountMap if provided.
109
112
  */
110
- export declare const formatAgentMessages: (payload: TPayload, indexTokenCountMap?: Record<number, number | undefined>, tools?: Set<string>, skills?: Map<string, string>) => {
113
+ export declare const formatAgentMessages: (payload: TPayload, indexTokenCountMap?: Record<number, number | undefined>, tools?: Set<string>, skills?: Map<string, string>, options?: FormatAgentMessagesOptions) => {
111
114
  messages: Array<HumanMessage | AIMessage | SystemMessage | ToolMessage>;
112
115
  indexTokenCountMap?: Record<number, number>;
113
116
  /** Cross-run summary extracted from the payload. Should be forwarded to the
@@ -0,0 +1,27 @@
1
+ import type { MessageContent } from '@langchain/core/messages';
2
+ import type * as t from '@/types';
3
+ type LibreChatMessageContent = MessageContent | string | t.MessageContentComplex[] | t.ExtendedMessageContent[];
4
+ type WithLangChainContent<T extends {
5
+ content: LibreChatMessageContent;
6
+ }> = Omit<T, 'content'> & {
7
+ content: MessageContent;
8
+ };
9
+ /**
10
+ * Bridges LibreChat's extended content blocks to LangChain 1.x MessageContent.
11
+ *
12
+ * LangChain 1.x narrowed message constructor types around ContentBlock, while
13
+ * LibreChat still carries provider-specific blocks through the same content
14
+ * field. This helper keeps the runtime shape unchanged during the dependency
15
+ * upgrade; tracking issue: https://github.com/danny-avila/agents/issues/130.
16
+ */
17
+ export declare function toLangChainContent(content: LibreChatMessageContent): MessageContent;
18
+ /**
19
+ * Applies the same LangChain 1.x content bridge to message constructor fields.
20
+ *
21
+ * Keep this cast-only helper local to constructor boundaries so follow-up work
22
+ * can replace it with aligned content types or explicit conversion logic.
23
+ */
24
+ export declare function toLangChainMessageFields<T extends {
25
+ content: LibreChatMessageContent;
26
+ }>(message: T): WithLangChainContent<T>;
27
+ export {};
@@ -0,0 +1,19 @@
1
+ import type * as t from './types';
2
+ export declare class TavilyScraper implements t.BaseScraper {
3
+ private apiKey;
4
+ private apiUrl;
5
+ private timeout;
6
+ private payloadTimeout;
7
+ private logger;
8
+ private extractDepth;
9
+ private includeImages;
10
+ private includeFavicon;
11
+ private format;
12
+ constructor(config?: t.TavilyScraperConfig);
13
+ scrapeUrl(url: string, options?: t.TavilyScrapeOptions): Promise<[string, t.TavilyScrapeResponse]>;
14
+ scrapeUrls(urls: string[], options?: t.TavilyScrapeOptions): Promise<Array<[string, t.TavilyScrapeResponse]>>;
15
+ private extractBatch;
16
+ extractContent(response: t.TavilyScrapeResponse): [string, undefined | t.References];
17
+ extractMetadata(response: t.TavilyScrapeResponse): t.GenericScrapeMetadata;
18
+ }
19
+ export declare const createTavilyScraper: (config?: t.TavilyScraperConfig) => TavilyScraper;
@@ -0,0 +1,4 @@
1
+ import type * as t from './types';
2
+ export declare const createTavilyAPI: (apiKey?: string, apiUrl?: string, options?: t.TavilySearchOptions) => {
3
+ getSources: (params: t.GetSourcesParams) => Promise<t.SearchResult>;
4
+ };
@@ -2,8 +2,8 @@ import type { Logger as WinstonLogger } from 'winston';
2
2
  import type { RunnableConfig } from '@langchain/core/runnables';
3
3
  import type { BaseReranker } from './rerankers';
4
4
  import { DATE_RANGE } from './schema';
5
- export type SearchProvider = 'serper' | 'searxng';
6
- export type ScraperProvider = 'firecrawl' | 'serper';
5
+ export type SearchProvider = 'serper' | 'searxng' | 'tavily';
6
+ export type ScraperProvider = 'firecrawl' | 'serper' | 'tavily';
7
7
  export type RerankerType = 'infinity' | 'jina' | 'cohere' | 'none';
8
8
  export interface Highlight {
9
9
  score: number;
@@ -56,11 +56,50 @@ export interface Source {
56
56
  snippet?: string;
57
57
  date?: string;
58
58
  }
59
+ export type TavilyTimeRange = 'day' | 'week' | 'month' | 'year';
60
+ export type TavilyTimeRangeInput = TavilyTimeRange | 'h' | 'd' | 'w' | 'm' | 'y';
61
+ export interface TavilySearchOptions {
62
+ searchDepth?: 'basic' | 'advanced' | 'fast' | 'ultra-fast';
63
+ maxResults?: number;
64
+ includeImages?: boolean;
65
+ includeAnswer?: boolean | 'basic' | 'advanced';
66
+ includeRawContent?: boolean | 'markdown' | 'text';
67
+ includeDomains?: string[];
68
+ excludeDomains?: string[];
69
+ topic?: 'general' | 'news' | 'finance';
70
+ timeRange?: TavilyTimeRangeInput;
71
+ includeImageDescriptions?: boolean;
72
+ includeFavicon?: boolean;
73
+ chunksPerSource?: number;
74
+ safeSearch?: boolean;
75
+ timeout?: number;
76
+ }
77
+ export interface TavilySearchPayload {
78
+ query: string;
79
+ search_depth: NonNullable<TavilySearchOptions['searchDepth']>;
80
+ topic: NonNullable<TavilySearchOptions['topic']>;
81
+ max_results: number;
82
+ safe_search?: boolean;
83
+ time_range?: TavilyTimeRange;
84
+ country?: string;
85
+ include_images?: boolean;
86
+ include_answer?: NonNullable<TavilySearchOptions['includeAnswer']>;
87
+ include_raw_content?: NonNullable<TavilySearchOptions['includeRawContent']>;
88
+ include_domains?: string[];
89
+ exclude_domains?: string[];
90
+ include_image_descriptions?: boolean;
91
+ include_favicon?: boolean;
92
+ chunks_per_source?: number;
93
+ }
59
94
  export interface SearchConfig {
60
95
  searchProvider?: SearchProvider;
61
96
  serperApiKey?: string;
62
97
  searxngInstanceUrl?: string;
63
98
  searxngApiKey?: string;
99
+ tavilyApiKey?: string;
100
+ tavilySearchUrl?: string;
101
+ tavilyExtractUrl?: string;
102
+ tavilySearchOptions?: TavilySearchOptions;
64
103
  }
65
104
  export type References = {
66
105
  links: MediaReference[];
@@ -95,6 +134,16 @@ export interface SerperScraperConfig {
95
134
  logger?: Logger;
96
135
  includeMarkdown?: boolean;
97
136
  }
137
+ export interface TavilyScraperConfig {
138
+ apiKey?: string;
139
+ apiUrl?: string;
140
+ timeout?: number;
141
+ logger?: Logger;
142
+ extractDepth?: 'basic' | 'advanced';
143
+ includeImages?: boolean;
144
+ includeFavicon?: boolean;
145
+ format?: 'markdown' | 'text';
146
+ }
98
147
  export interface ScraperContentResult {
99
148
  content: string;
100
149
  }
@@ -135,6 +184,7 @@ export interface CohereRerankerResponse {
135
184
  export type SafeSearchLevel = 0 | 1 | 2;
136
185
  export type Logger = WinstonLogger;
137
186
  export interface SearchToolConfig extends SearchConfig, ProcessSourcesConfig, FirecrawlConfig {
187
+ tavilyScraperOptions?: TavilyScraperConfig;
138
188
  logger?: Logger;
139
189
  safeSearch?: SafeSearchLevel;
140
190
  jinaApiKey?: string;
@@ -157,15 +207,27 @@ export type UsedReferences = {
157
207
  originalIndex: number;
158
208
  reference: MediaReference;
159
209
  }[];
210
+ export type AnyScraperResponse = FirecrawlScrapeResponse | SerperScrapeResponse | TavilyScrapeResponse;
160
211
  /** Base Scraper Interface */
161
212
  export interface BaseScraper {
162
- scrapeUrl(url: string, options?: unknown): Promise<[string, FirecrawlScrapeResponse | SerperScrapeResponse]>;
163
- extractContent(response: FirecrawlScrapeResponse | SerperScrapeResponse): [string, undefined | References];
164
- extractMetadata(response: FirecrawlScrapeResponse | SerperScrapeResponse): ScrapeMetadata | Record<string, string | number | boolean | null | undefined>;
213
+ scrapeUrl(url: string, options?: unknown): Promise<[string, AnyScraperResponse]>;
214
+ scrapeUrls?(urls: string[], options?: unknown): Promise<Array<[string, AnyScraperResponse]>>;
215
+ extractContent(response: AnyScraperResponse): [string, undefined | References];
216
+ extractMetadata(response: AnyScraperResponse): ScrapeMetadata | GenericScrapeMetadata;
165
217
  }
166
218
  /** Firecrawl */
167
219
  export type FirecrawlScrapeOptions = Omit<FirecrawlScraperConfig, 'apiKey' | 'apiUrl' | 'version' | 'logger'>;
168
220
  export type SerperScrapeOptions = Omit<SerperScraperConfig, 'apiKey' | 'apiUrl' | 'logger'>;
221
+ export type TavilyScrapeOptions = Omit<TavilyScraperConfig, 'apiKey' | 'apiUrl' | 'logger'>;
222
+ export interface TavilyExtractPayload {
223
+ urls: string[];
224
+ extract_depth: NonNullable<TavilyScraperConfig['extractDepth']>;
225
+ include_images: boolean;
226
+ include_favicon?: boolean;
227
+ format?: NonNullable<TavilyScraperConfig['format']>;
228
+ timeout?: number;
229
+ }
230
+ export type GenericScrapeMetadata = Record<string, string | number | boolean | null | undefined>;
169
231
  export interface ScrapeMetadata {
170
232
  sourceURL?: string;
171
233
  url?: string;
@@ -241,6 +303,38 @@ export interface SerperScrapeResponse {
241
303
  };
242
304
  error?: string;
243
305
  }
306
+ export interface TavilyScrapeResponse {
307
+ success: boolean;
308
+ data?: {
309
+ rawContent?: string;
310
+ images?: string[];
311
+ favicon?: string;
312
+ };
313
+ error?: string;
314
+ }
315
+ export interface TavilySearchResult {
316
+ title?: string;
317
+ url?: string;
318
+ content?: string;
319
+ score?: number;
320
+ published_date?: string;
321
+ }
322
+ export type TavilyImageResult = string | {
323
+ url?: string;
324
+ description?: string;
325
+ };
326
+ export interface TavilySearchResponse {
327
+ answer?: string;
328
+ images?: TavilyImageResult[];
329
+ results?: TavilySearchResult[];
330
+ }
331
+ export interface TavilyExtractResult {
332
+ url: string;
333
+ raw_content?: string;
334
+ images?: string[];
335
+ favicon?: string;
336
+ error?: string;
337
+ }
244
338
  export interface FirecrawlScraperConfig {
245
339
  apiKey?: string;
246
340
  apiUrl?: string;
@@ -6,5 +6,5 @@ import type * as t from './types';
6
6
  */
7
7
  export declare const createDefaultLogger: () => t.Logger;
8
8
  export declare const fileExtRegex: RegExp;
9
- export declare const getDomainName: (link: string, metadata?: t.ScrapeMetadata, logger?: t.Logger) => string | undefined;
10
- export declare function getAttribution(link: string, metadata?: t.ScrapeMetadata, logger?: t.Logger): string | undefined;
9
+ export declare const getDomainName: (link: string, metadata?: t.ScrapeMetadata | t.GenericScrapeMetadata, logger?: t.Logger) => string | undefined;
10
+ export declare function getAttribution(link: string, metadata?: t.ScrapeMetadata | t.GenericScrapeMetadata, logger?: t.Logger): string | undefined;