@librechat/agents 2.4.321 → 3.0.0-rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (266) hide show
  1. package/dist/cjs/agents/AgentContext.cjs +218 -0
  2. package/dist/cjs/agents/AgentContext.cjs.map +1 -0
  3. package/dist/cjs/common/enum.cjs +14 -5
  4. package/dist/cjs/common/enum.cjs.map +1 -1
  5. package/dist/cjs/events.cjs +10 -6
  6. package/dist/cjs/events.cjs.map +1 -1
  7. package/dist/cjs/graphs/Graph.cjs +309 -212
  8. package/dist/cjs/graphs/Graph.cjs.map +1 -1
  9. package/dist/cjs/graphs/MultiAgentGraph.cjs +322 -0
  10. package/dist/cjs/graphs/MultiAgentGraph.cjs.map +1 -0
  11. package/dist/cjs/llm/anthropic/index.cjs +54 -9
  12. package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
  13. package/dist/cjs/llm/anthropic/types.cjs.map +1 -1
  14. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +52 -6
  15. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
  16. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +22 -2
  17. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -1
  18. package/dist/cjs/llm/anthropic/utils/tools.cjs +29 -0
  19. package/dist/cjs/llm/anthropic/utils/tools.cjs.map +1 -0
  20. package/dist/cjs/llm/google/index.cjs +144 -0
  21. package/dist/cjs/llm/google/index.cjs.map +1 -0
  22. package/dist/cjs/llm/google/utils/common.cjs +477 -0
  23. package/dist/cjs/llm/google/utils/common.cjs.map +1 -0
  24. package/dist/cjs/llm/ollama/index.cjs +67 -0
  25. package/dist/cjs/llm/ollama/index.cjs.map +1 -0
  26. package/dist/cjs/llm/ollama/utils.cjs +158 -0
  27. package/dist/cjs/llm/ollama/utils.cjs.map +1 -0
  28. package/dist/cjs/llm/openai/index.cjs +389 -3
  29. package/dist/cjs/llm/openai/index.cjs.map +1 -1
  30. package/dist/cjs/llm/openai/utils/index.cjs +672 -0
  31. package/dist/cjs/llm/openai/utils/index.cjs.map +1 -0
  32. package/dist/cjs/llm/providers.cjs +15 -15
  33. package/dist/cjs/llm/providers.cjs.map +1 -1
  34. package/dist/cjs/llm/text.cjs +14 -3
  35. package/dist/cjs/llm/text.cjs.map +1 -1
  36. package/dist/cjs/llm/vertexai/index.cjs +330 -0
  37. package/dist/cjs/llm/vertexai/index.cjs.map +1 -0
  38. package/dist/cjs/main.cjs +11 -0
  39. package/dist/cjs/main.cjs.map +1 -1
  40. package/dist/cjs/run.cjs +120 -81
  41. package/dist/cjs/run.cjs.map +1 -1
  42. package/dist/cjs/stream.cjs +85 -51
  43. package/dist/cjs/stream.cjs.map +1 -1
  44. package/dist/cjs/tools/ToolNode.cjs +10 -4
  45. package/dist/cjs/tools/ToolNode.cjs.map +1 -1
  46. package/dist/cjs/tools/handlers.cjs +119 -13
  47. package/dist/cjs/tools/handlers.cjs.map +1 -1
  48. package/dist/cjs/tools/search/anthropic.cjs +40 -0
  49. package/dist/cjs/tools/search/anthropic.cjs.map +1 -0
  50. package/dist/cjs/tools/search/firecrawl.cjs +61 -13
  51. package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
  52. package/dist/cjs/tools/search/format.cjs +9 -3
  53. package/dist/cjs/tools/search/format.cjs.map +1 -1
  54. package/dist/cjs/tools/search/rerankers.cjs +35 -50
  55. package/dist/cjs/tools/search/rerankers.cjs.map +1 -1
  56. package/dist/cjs/tools/search/schema.cjs +70 -0
  57. package/dist/cjs/tools/search/schema.cjs.map +1 -0
  58. package/dist/cjs/tools/search/search.cjs +145 -38
  59. package/dist/cjs/tools/search/search.cjs.map +1 -1
  60. package/dist/cjs/tools/search/tool.cjs +165 -48
  61. package/dist/cjs/tools/search/tool.cjs.map +1 -1
  62. package/dist/cjs/tools/search/utils.cjs +34 -5
  63. package/dist/cjs/tools/search/utils.cjs.map +1 -1
  64. package/dist/cjs/utils/events.cjs +31 -0
  65. package/dist/cjs/utils/events.cjs.map +1 -0
  66. package/dist/cjs/utils/title.cjs +57 -21
  67. package/dist/cjs/utils/title.cjs.map +1 -1
  68. package/dist/cjs/utils/tokens.cjs +54 -7
  69. package/dist/cjs/utils/tokens.cjs.map +1 -1
  70. package/dist/esm/agents/AgentContext.mjs +216 -0
  71. package/dist/esm/agents/AgentContext.mjs.map +1 -0
  72. package/dist/esm/common/enum.mjs +15 -6
  73. package/dist/esm/common/enum.mjs.map +1 -1
  74. package/dist/esm/events.mjs +10 -6
  75. package/dist/esm/events.mjs.map +1 -1
  76. package/dist/esm/graphs/Graph.mjs +311 -214
  77. package/dist/esm/graphs/Graph.mjs.map +1 -1
  78. package/dist/esm/graphs/MultiAgentGraph.mjs +320 -0
  79. package/dist/esm/graphs/MultiAgentGraph.mjs.map +1 -0
  80. package/dist/esm/llm/anthropic/index.mjs +54 -9
  81. package/dist/esm/llm/anthropic/index.mjs.map +1 -1
  82. package/dist/esm/llm/anthropic/types.mjs.map +1 -1
  83. package/dist/esm/llm/anthropic/utils/message_inputs.mjs +52 -6
  84. package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
  85. package/dist/esm/llm/anthropic/utils/message_outputs.mjs +22 -2
  86. package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -1
  87. package/dist/esm/llm/anthropic/utils/tools.mjs +27 -0
  88. package/dist/esm/llm/anthropic/utils/tools.mjs.map +1 -0
  89. package/dist/esm/llm/google/index.mjs +142 -0
  90. package/dist/esm/llm/google/index.mjs.map +1 -0
  91. package/dist/esm/llm/google/utils/common.mjs +471 -0
  92. package/dist/esm/llm/google/utils/common.mjs.map +1 -0
  93. package/dist/esm/llm/ollama/index.mjs +65 -0
  94. package/dist/esm/llm/ollama/index.mjs.map +1 -0
  95. package/dist/esm/llm/ollama/utils.mjs +155 -0
  96. package/dist/esm/llm/ollama/utils.mjs.map +1 -0
  97. package/dist/esm/llm/openai/index.mjs +388 -4
  98. package/dist/esm/llm/openai/index.mjs.map +1 -1
  99. package/dist/esm/llm/openai/utils/index.mjs +666 -0
  100. package/dist/esm/llm/openai/utils/index.mjs.map +1 -0
  101. package/dist/esm/llm/providers.mjs +5 -5
  102. package/dist/esm/llm/providers.mjs.map +1 -1
  103. package/dist/esm/llm/text.mjs +14 -3
  104. package/dist/esm/llm/text.mjs.map +1 -1
  105. package/dist/esm/llm/vertexai/index.mjs +328 -0
  106. package/dist/esm/llm/vertexai/index.mjs.map +1 -0
  107. package/dist/esm/main.mjs +6 -5
  108. package/dist/esm/main.mjs.map +1 -1
  109. package/dist/esm/run.mjs +121 -83
  110. package/dist/esm/run.mjs.map +1 -1
  111. package/dist/esm/stream.mjs +87 -54
  112. package/dist/esm/stream.mjs.map +1 -1
  113. package/dist/esm/tools/ToolNode.mjs +10 -4
  114. package/dist/esm/tools/ToolNode.mjs.map +1 -1
  115. package/dist/esm/tools/handlers.mjs +119 -15
  116. package/dist/esm/tools/handlers.mjs.map +1 -1
  117. package/dist/esm/tools/search/anthropic.mjs +37 -0
  118. package/dist/esm/tools/search/anthropic.mjs.map +1 -0
  119. package/dist/esm/tools/search/firecrawl.mjs +61 -13
  120. package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
  121. package/dist/esm/tools/search/format.mjs +10 -4
  122. package/dist/esm/tools/search/format.mjs.map +1 -1
  123. package/dist/esm/tools/search/rerankers.mjs +35 -50
  124. package/dist/esm/tools/search/rerankers.mjs.map +1 -1
  125. package/dist/esm/tools/search/schema.mjs +61 -0
  126. package/dist/esm/tools/search/schema.mjs.map +1 -0
  127. package/dist/esm/tools/search/search.mjs +146 -39
  128. package/dist/esm/tools/search/search.mjs.map +1 -1
  129. package/dist/esm/tools/search/tool.mjs +164 -47
  130. package/dist/esm/tools/search/tool.mjs.map +1 -1
  131. package/dist/esm/tools/search/utils.mjs +33 -6
  132. package/dist/esm/tools/search/utils.mjs.map +1 -1
  133. package/dist/esm/utils/events.mjs +29 -0
  134. package/dist/esm/utils/events.mjs.map +1 -0
  135. package/dist/esm/utils/title.mjs +57 -22
  136. package/dist/esm/utils/title.mjs.map +1 -1
  137. package/dist/esm/utils/tokens.mjs +54 -8
  138. package/dist/esm/utils/tokens.mjs.map +1 -1
  139. package/dist/types/agents/AgentContext.d.ts +91 -0
  140. package/dist/types/common/enum.d.ts +15 -6
  141. package/dist/types/events.d.ts +5 -4
  142. package/dist/types/graphs/Graph.d.ts +64 -67
  143. package/dist/types/graphs/MultiAgentGraph.d.ts +37 -0
  144. package/dist/types/graphs/index.d.ts +1 -0
  145. package/dist/types/llm/anthropic/index.d.ts +11 -0
  146. package/dist/types/llm/anthropic/types.d.ts +9 -3
  147. package/dist/types/llm/anthropic/utils/message_inputs.d.ts +1 -1
  148. package/dist/types/llm/anthropic/utils/output_parsers.d.ts +4 -4
  149. package/dist/types/llm/anthropic/utils/tools.d.ts +3 -0
  150. package/dist/types/llm/google/index.d.ts +13 -0
  151. package/dist/types/llm/google/types.d.ts +32 -0
  152. package/dist/types/llm/google/utils/common.d.ts +19 -0
  153. package/dist/types/llm/google/utils/tools.d.ts +10 -0
  154. package/dist/types/llm/google/utils/zod_to_genai_parameters.d.ts +14 -0
  155. package/dist/types/llm/ollama/index.d.ts +7 -0
  156. package/dist/types/llm/ollama/utils.d.ts +7 -0
  157. package/dist/types/llm/openai/index.d.ts +72 -3
  158. package/dist/types/llm/openai/types.d.ts +10 -0
  159. package/dist/types/llm/openai/utils/index.d.ts +20 -0
  160. package/dist/types/llm/text.d.ts +1 -1
  161. package/dist/types/llm/vertexai/index.d.ts +293 -0
  162. package/dist/types/messages/reducer.d.ts +9 -0
  163. package/dist/types/run.d.ts +19 -12
  164. package/dist/types/scripts/ant_web_search.d.ts +1 -0
  165. package/dist/types/scripts/args.d.ts +2 -1
  166. package/dist/types/scripts/handoff-test.d.ts +1 -0
  167. package/dist/types/scripts/multi-agent-conditional.d.ts +1 -0
  168. package/dist/types/scripts/multi-agent-parallel.d.ts +1 -0
  169. package/dist/types/scripts/multi-agent-sequence.d.ts +1 -0
  170. package/dist/types/scripts/multi-agent-test.d.ts +1 -0
  171. package/dist/types/stream.d.ts +10 -3
  172. package/dist/types/tools/CodeExecutor.d.ts +2 -2
  173. package/dist/types/tools/ToolNode.d.ts +1 -1
  174. package/dist/types/tools/handlers.d.ts +17 -4
  175. package/dist/types/tools/search/anthropic.d.ts +16 -0
  176. package/dist/types/tools/search/firecrawl.d.ts +16 -0
  177. package/dist/types/tools/search/rerankers.d.ts +8 -5
  178. package/dist/types/tools/search/schema.d.ts +16 -0
  179. package/dist/types/tools/search/tool.d.ts +13 -0
  180. package/dist/types/tools/search/types.d.ts +64 -9
  181. package/dist/types/tools/search/utils.d.ts +9 -2
  182. package/dist/types/types/graph.d.ts +95 -15
  183. package/dist/types/types/llm.d.ts +24 -10
  184. package/dist/types/types/run.d.ts +46 -8
  185. package/dist/types/types/stream.d.ts +16 -2
  186. package/dist/types/types/tools.d.ts +1 -1
  187. package/dist/types/utils/events.d.ts +6 -0
  188. package/dist/types/utils/title.d.ts +2 -1
  189. package/dist/types/utils/tokens.d.ts +24 -0
  190. package/package.json +35 -18
  191. package/src/agents/AgentContext.ts +315 -0
  192. package/src/common/enum.ts +14 -5
  193. package/src/events.ts +24 -13
  194. package/src/graphs/Graph.ts +495 -312
  195. package/src/graphs/MultiAgentGraph.ts +381 -0
  196. package/src/graphs/index.ts +2 -1
  197. package/src/llm/anthropic/Jacob_Lee_Resume_2023.pdf +0 -0
  198. package/src/llm/anthropic/index.ts +78 -13
  199. package/src/llm/anthropic/llm.spec.ts +491 -115
  200. package/src/llm/anthropic/types.ts +39 -3
  201. package/src/llm/anthropic/utils/message_inputs.ts +67 -11
  202. package/src/llm/anthropic/utils/message_outputs.ts +21 -2
  203. package/src/llm/anthropic/utils/output_parsers.ts +25 -6
  204. package/src/llm/anthropic/utils/tools.ts +29 -0
  205. package/src/llm/google/index.ts +218 -0
  206. package/src/llm/google/types.ts +43 -0
  207. package/src/llm/google/utils/common.ts +646 -0
  208. package/src/llm/google/utils/tools.ts +160 -0
  209. package/src/llm/google/utils/zod_to_genai_parameters.ts +86 -0
  210. package/src/llm/ollama/index.ts +89 -0
  211. package/src/llm/ollama/utils.ts +193 -0
  212. package/src/llm/openai/index.ts +600 -14
  213. package/src/llm/openai/types.ts +24 -0
  214. package/src/llm/openai/utils/index.ts +912 -0
  215. package/src/llm/openai/utils/isReasoningModel.test.ts +90 -0
  216. package/src/llm/providers.ts +10 -9
  217. package/src/llm/text.ts +26 -7
  218. package/src/llm/vertexai/index.ts +360 -0
  219. package/src/messages/reducer.ts +80 -0
  220. package/src/run.ts +181 -112
  221. package/src/scripts/ant_web_search.ts +158 -0
  222. package/src/scripts/args.ts +12 -8
  223. package/src/scripts/cli4.ts +29 -21
  224. package/src/scripts/cli5.ts +29 -21
  225. package/src/scripts/code_exec.ts +54 -23
  226. package/src/scripts/code_exec_files.ts +48 -17
  227. package/src/scripts/code_exec_simple.ts +46 -27
  228. package/src/scripts/handoff-test.ts +135 -0
  229. package/src/scripts/image.ts +52 -20
  230. package/src/scripts/multi-agent-conditional.ts +220 -0
  231. package/src/scripts/multi-agent-example-output.md +110 -0
  232. package/src/scripts/multi-agent-parallel.ts +337 -0
  233. package/src/scripts/multi-agent-sequence.ts +212 -0
  234. package/src/scripts/multi-agent-test.ts +186 -0
  235. package/src/scripts/search.ts +4 -12
  236. package/src/scripts/simple.ts +25 -10
  237. package/src/scripts/tools.ts +48 -18
  238. package/src/specs/anthropic.simple.test.ts +150 -34
  239. package/src/specs/azure.simple.test.ts +325 -0
  240. package/src/specs/openai.simple.test.ts +140 -33
  241. package/src/specs/openrouter.simple.test.ts +107 -0
  242. package/src/specs/prune.test.ts +4 -9
  243. package/src/specs/reasoning.test.ts +80 -44
  244. package/src/specs/token-memoization.test.ts +39 -0
  245. package/src/stream.test.ts +94 -0
  246. package/src/stream.ts +139 -60
  247. package/src/tools/ToolNode.ts +21 -7
  248. package/src/tools/handlers.ts +192 -18
  249. package/src/tools/search/anthropic.ts +51 -0
  250. package/src/tools/search/firecrawl.ts +78 -24
  251. package/src/tools/search/format.ts +10 -5
  252. package/src/tools/search/rerankers.ts +50 -62
  253. package/src/tools/search/schema.ts +63 -0
  254. package/src/tools/search/search.ts +167 -34
  255. package/src/tools/search/tool.ts +222 -46
  256. package/src/tools/search/types.ts +65 -10
  257. package/src/tools/search/utils.ts +37 -5
  258. package/src/types/graph.ts +272 -103
  259. package/src/types/llm.ts +25 -12
  260. package/src/types/run.ts +51 -13
  261. package/src/types/stream.ts +22 -1
  262. package/src/types/tools.ts +16 -10
  263. package/src/utils/events.ts +32 -0
  264. package/src/utils/llmConfig.ts +20 -8
  265. package/src/utils/title.ts +104 -30
  266. package/src/utils/tokens.ts +69 -10
@@ -4,37 +4,44 @@ import { nanoid } from 'nanoid';
4
4
  import { concat } from '@langchain/core/utils/stream';
5
5
  import { ToolNode } from '@langchain/langgraph/prebuilt';
6
6
  import { ChatVertexAI } from '@langchain/google-vertexai';
7
- import { START, END, StateGraph } from '@langchain/langgraph';
8
- import { Runnable, RunnableConfig } from '@langchain/core/runnables';
9
- import { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';
10
7
  import {
11
- AIMessageChunk,
8
+ START,
9
+ END,
10
+ Command,
11
+ StateGraph,
12
+ Annotation,
13
+ messagesStateReducer,
14
+ } from '@langchain/langgraph';
15
+ import {
16
+ Runnable,
17
+ RunnableConfig,
18
+ RunnableLambda,
19
+ } from '@langchain/core/runnables';
20
+ import {
12
21
  ToolMessage,
13
22
  SystemMessage,
23
+ AIMessageChunk,
14
24
  } from '@langchain/core/messages';
15
25
  import type {
16
- BaseMessage,
17
26
  BaseMessageFields,
18
27
  UsageMetadata,
28
+ BaseMessage,
19
29
  } from '@langchain/core/messages';
30
+ import type { ToolCall } from '@langchain/core/messages/tool';
20
31
  import type * as t from '@/types';
21
32
  import {
22
- Providers,
23
- GraphEvents,
24
33
  GraphNodeKeys,
25
- StepTypes,
26
- Callback,
27
34
  ContentTypes,
35
+ GraphEvents,
36
+ Providers,
37
+ StepTypes,
28
38
  } from '@/common';
29
- import type { ToolCall } from '@langchain/core/messages/tool';
30
- import { getChatModelClass, manualToolStreamProviders } from '@/llm/providers';
31
- import { ToolNode as CustomToolNode, toolsCondition } from '@/tools/ToolNode';
32
39
  import {
33
- createPruneMessages,
40
+ formatAnthropicArtifactContent,
41
+ convertMessagesToContent,
34
42
  modifyDeltaProperties,
35
43
  formatArtifactPayload,
36
- convertMessagesToContent,
37
- formatAnthropicArtifactContent,
44
+ createPruneMessages,
38
45
  } from '@/messages';
39
46
  import {
40
47
  resetIfNotEmpty,
@@ -43,38 +50,37 @@ import {
43
50
  joinKeys,
44
51
  sleep,
45
52
  } from '@/utils';
53
+ import { getChatModelClass, manualToolStreamProviders } from '@/llm/providers';
54
+ import { ToolNode as CustomToolNode, toolsCondition } from '@/tools/ToolNode';
46
55
  import { ChatOpenAI, AzureChatOpenAI } from '@/llm/openai';
56
+ import { safeDispatchCustomEvent } from '@/utils/events';
57
+ import { AgentContext } from '@/agents/AgentContext';
47
58
  import { createFakeStreamingLLM } from '@/llm/fake';
48
59
  import { HandlerRegistry } from '@/events';
49
60
 
50
61
  const { AGENT, TOOLS } = GraphNodeKeys;
51
- export type GraphNode = GraphNodeKeys | typeof START;
52
- export type ClientCallback<T extends unknown[]> = (
53
- graph: StandardGraph,
54
- ...args: T
55
- ) => void;
56
- export type ClientCallbacks = {
57
- [Callback.TOOL_ERROR]?: ClientCallback<[Error, string]>;
58
- [Callback.TOOL_START]?: ClientCallback<unknown[]>;
59
- [Callback.TOOL_END]?: ClientCallback<unknown[]>;
60
- };
61
- export type SystemCallbacks = {
62
- [K in keyof ClientCallbacks]: ClientCallbacks[K] extends ClientCallback<
63
- infer Args
64
- >
65
- ? (...args: Args) => void
66
- : never;
67
- };
68
62
 
69
63
  export abstract class Graph<
70
64
  T extends t.BaseGraphState = t.BaseGraphState,
71
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
72
- TNodeName extends string = string,
65
+ _TNodeName extends string = string,
73
66
  > {
74
67
  abstract resetValues(): void;
75
- abstract createGraphState(): t.GraphStateChannels<T>;
76
- abstract initializeTools(): CustomToolNode<T> | ToolNode<T>;
77
- abstract initializeModel(): Runnable;
68
+ abstract initializeTools({
69
+ currentTools,
70
+ currentToolMap,
71
+ }: {
72
+ currentTools?: t.GraphTools;
73
+ currentToolMap?: t.ToolMap;
74
+ }): CustomToolNode<T> | ToolNode<T>;
75
+ abstract initializeModel({
76
+ currentModel,
77
+ tools,
78
+ clientOptions,
79
+ }: {
80
+ currentModel?: t.ChatModel;
81
+ tools?: t.GraphTools;
82
+ clientOptions?: t.ClientOptions;
83
+ }): Runnable;
78
84
  abstract getRunMessages(): BaseMessage[] | undefined;
79
85
  abstract getContentParts(): t.MessageContentComplex[] | undefined;
80
86
  abstract generateStepId(stepKey: string): [string, number];
@@ -85,28 +91,32 @@ export abstract class Graph<
85
91
  abstract checkKeyList(keyList: (string | number | undefined)[]): boolean;
86
92
  abstract getStepIdByKey(stepKey: string, index?: number): string;
87
93
  abstract getRunStep(stepId: string): t.RunStep | undefined;
88
- abstract dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string;
89
- abstract dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void;
90
- abstract dispatchMessageDelta(id: string, delta: t.MessageDelta): void;
94
+ abstract dispatchRunStep(
95
+ stepKey: string,
96
+ stepDetails: t.StepDetails
97
+ ): Promise<string>;
98
+ abstract dispatchRunStepDelta(
99
+ id: string,
100
+ delta: t.ToolCallDelta
101
+ ): Promise<void>;
102
+ abstract dispatchMessageDelta(
103
+ id: string,
104
+ delta: t.MessageDelta
105
+ ): Promise<void>;
91
106
  abstract dispatchReasoningDelta(
92
107
  stepId: string,
93
108
  delta: t.ReasoningDelta
94
- ): void;
109
+ ): Promise<void>;
95
110
  abstract handleToolCallCompleted(
96
111
  data: t.ToolEndData,
97
- metadata?: Record<string, unknown>
98
- ): void;
99
-
100
- abstract createCallModel(): (
101
- state: T,
102
- config?: RunnableConfig
103
- ) => Promise<Partial<T>>;
104
- abstract createWorkflow(): t.CompiledWorkflow<T>;
105
- lastToken?: string;
106
- tokenTypeSwitch?: 'reasoning' | 'content';
107
- reasoningKey: 'reasoning_content' | 'reasoning' = 'reasoning_content';
108
- currentTokenType: ContentTypes.TEXT | ContentTypes.THINK | 'think_and_text' =
109
- ContentTypes.TEXT;
112
+ metadata?: Record<string, unknown>,
113
+ omitOutput?: boolean
114
+ ): Promise<void>;
115
+
116
+ abstract createCallModel(
117
+ agentId?: string,
118
+ currentModel?: t.ChatModel
119
+ ): (state: T, config?: RunnableConfig) => Promise<Partial<T>>;
110
120
  messageStepHasToolCalls: Map<string, boolean> = new Map();
111
121
  messageIdsByStepKey: Map<string, string> = new Map();
112
122
  prelimMessageIdsByStepKey: Map<string, string> = new Map();
@@ -115,95 +125,52 @@ export abstract class Graph<
115
125
  stepKeyIds: Map<string, string[]> = new Map<string, string[]>();
116
126
  contentIndexMap: Map<string, number> = new Map();
117
127
  toolCallStepIds: Map<string, string> = new Map();
118
- currentUsage: Partial<UsageMetadata> | undefined;
119
- indexTokenCountMap: Record<string, number | undefined> = {};
120
- maxContextTokens: number | undefined;
121
- pruneMessages?: ReturnType<typeof createPruneMessages>;
122
- /** The amount of time that should pass before another consecutive API call */
123
- streamBuffer: number | undefined;
124
- tokenCounter?: t.TokenCounter;
125
128
  signal?: AbortSignal;
129
+ /** Set of invoked tool call IDs from non-message run steps completed mid-run, if any */
130
+ invokedToolIds?: Set<string>;
131
+ handlerRegistry: HandlerRegistry | undefined;
126
132
  }
127
133
 
128
- export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
129
- private graphState: t.GraphStateChannels<t.BaseGraphState>;
130
- clientOptions: t.ClientOptions;
131
- boundModel?: Runnable;
132
- /** The last recorded timestamp that a stream API call was invoked */
133
- lastStreamCall: number | undefined;
134
- handlerRegistry: HandlerRegistry | undefined;
135
- systemMessage: SystemMessage | undefined;
134
+ export class StandardGraph extends Graph<t.BaseGraphState, t.GraphNode> {
135
+ overrideModel?: t.ChatModel;
136
+ /** Optional compile options passed into workflow.compile() */
137
+ compileOptions?: t.CompileOptions | undefined;
136
138
  messages: BaseMessage[] = [];
137
139
  runId: string | undefined;
138
- tools?: t.GenericTool[];
139
- toolMap?: t.ToolMap;
140
140
  startIndex: number = 0;
141
- provider: Providers;
142
- toolEnd: boolean;
143
141
  signal?: AbortSignal;
142
+ /** Map of agent contexts by agent ID */
143
+ agentContexts: Map<string, AgentContext> = new Map();
144
+ /** Default agent ID to use */
145
+ defaultAgentId: string;
144
146
 
145
147
  constructor({
148
+ // parent-level graph inputs
146
149
  runId,
147
- tools,
148
150
  signal,
149
- toolMap,
150
- provider,
151
- streamBuffer,
152
- instructions,
153
- reasoningKey,
154
- clientOptions,
155
- toolEnd = false,
156
- additional_instructions = '',
151
+ agents,
152
+ tokenCounter,
153
+ indexTokenCountMap,
157
154
  }: t.StandardGraphInput) {
158
155
  super();
159
156
  this.runId = runId;
160
- this.tools = tools;
161
157
  this.signal = signal;
162
- this.toolEnd = toolEnd;
163
- this.toolMap = toolMap;
164
- this.provider = provider;
165
- this.streamBuffer = streamBuffer;
166
- this.clientOptions = clientOptions;
167
- this.graphState = this.createGraphState();
168
- this.boundModel = this.initializeModel();
169
- if (reasoningKey) {
170
- this.reasoningKey = reasoningKey;
171
- }
172
158
 
173
- let finalInstructions: string | BaseMessageFields | undefined =
174
- instructions;
175
- if (additional_instructions) {
176
- finalInstructions =
177
- finalInstructions != null && finalInstructions
178
- ? `${finalInstructions}\n\n${additional_instructions}`
179
- : additional_instructions;
159
+ if (agents.length === 0) {
160
+ throw new Error('At least one agent configuration is required');
180
161
  }
181
162
 
182
- if (
183
- finalInstructions != null &&
184
- finalInstructions &&
185
- provider === Providers.ANTHROPIC &&
186
- ((
187
- clientOptions as t.AnthropicClientOptions
188
- ).clientOptions?.defaultHeaders?.['anthropic-beta']?.includes(
189
- 'prompt-caching'
190
- ) ??
191
- false)
192
- ) {
193
- finalInstructions = {
194
- content: [
195
- {
196
- type: 'text',
197
- text: instructions,
198
- cache_control: { type: 'ephemeral' },
199
- },
200
- ],
201
- };
202
- }
163
+ for (const agentConfig of agents) {
164
+ const agentContext = AgentContext.fromConfig(
165
+ agentConfig,
166
+ tokenCounter,
167
+ indexTokenCountMap
168
+ );
203
169
 
204
- if (finalInstructions != null && finalInstructions !== '') {
205
- this.systemMessage = new SystemMessage(finalInstructions);
170
+ this.agentContexts.set(agentConfig.agentId, agentContext);
206
171
  }
172
+
173
+ this.defaultAgentId = agents[0].agentId;
207
174
  }
208
175
 
209
176
  /* Init */
@@ -222,23 +189,17 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
222
189
  new Map()
223
190
  );
224
191
  this.messageStepHasToolCalls = resetIfNotEmpty(
225
- this.prelimMessageIdsByStepKey,
192
+ this.messageStepHasToolCalls,
226
193
  new Map()
227
194
  );
228
195
  this.prelimMessageIdsByStepKey = resetIfNotEmpty(
229
196
  this.prelimMessageIdsByStepKey,
230
197
  new Map()
231
198
  );
232
- this.currentTokenType = resetIfNotEmpty(
233
- this.currentTokenType,
234
- ContentTypes.TEXT
235
- );
236
- this.lastToken = resetIfNotEmpty(this.lastToken, undefined);
237
- this.tokenTypeSwitch = resetIfNotEmpty(this.tokenTypeSwitch, undefined);
238
- this.indexTokenCountMap = resetIfNotEmpty(this.indexTokenCountMap, {});
239
- this.currentUsage = resetIfNotEmpty(this.currentUsage, undefined);
240
- this.tokenCounter = resetIfNotEmpty(this.tokenCounter, undefined);
241
- this.maxContextTokens = resetIfNotEmpty(this.maxContextTokens, undefined);
199
+ this.invokedToolIds = resetIfNotEmpty(this.invokedToolIds, undefined);
200
+ for (const context of this.agentContexts.values()) {
201
+ context.reset();
202
+ }
242
203
  }
243
204
 
244
205
  /* Run Step Processing */
@@ -251,6 +212,33 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
251
212
  return undefined;
252
213
  }
253
214
 
215
+ getAgentContext(metadata: Record<string, unknown> | undefined): AgentContext {
216
+ if (!metadata) {
217
+ throw new Error('No metadata provided to retrieve agent context');
218
+ }
219
+
220
+ const currentNode = metadata.langgraph_node as string;
221
+ if (!currentNode) {
222
+ throw new Error(
223
+ 'No langgraph_node in metadata to retrieve agent context'
224
+ );
225
+ }
226
+
227
+ let agentId: string | undefined;
228
+ if (currentNode.startsWith(AGENT)) {
229
+ agentId = currentNode.substring(AGENT.length);
230
+ } else if (currentNode.startsWith(TOOLS)) {
231
+ agentId = currentNode.substring(TOOLS.length);
232
+ }
233
+
234
+ const agentContext = this.agentContexts.get(agentId ?? '');
235
+ if (!agentContext) {
236
+ throw new Error(`No agent context found for agent ID ${agentId}`);
237
+ }
238
+
239
+ return agentContext;
240
+ }
241
+
254
242
  getStepKey(metadata: Record<string, unknown> | undefined): string {
255
243
  if (!metadata) return '';
256
244
 
@@ -304,13 +292,19 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
304
292
  metadata.langgraph_step as number,
305
293
  metadata.checkpoint_ns as string,
306
294
  ];
295
+
296
+ const agentContext = this.getAgentContext(metadata);
307
297
  if (
308
- this.currentTokenType === ContentTypes.THINK ||
309
- this.currentTokenType === 'think_and_text'
298
+ agentContext.currentTokenType === ContentTypes.THINK ||
299
+ agentContext.currentTokenType === 'think_and_text'
310
300
  ) {
311
301
  keyList.push('reasoning');
312
302
  }
313
303
 
304
+ if (this.invokedToolIds != null && this.invokedToolIds.size > 0) {
305
+ keyList.push(this.invokedToolIds.size + '');
306
+ }
307
+
314
308
  return keyList;
315
309
  }
316
310
 
@@ -330,87 +324,127 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
330
324
 
331
325
  /* Graph */
332
326
 
333
- createGraphState(): t.GraphStateChannels<t.BaseGraphState> {
334
- return {
335
- messages: {
336
- value: (x: BaseMessage[], y: BaseMessage[]): BaseMessage[] => {
337
- if (!x.length) {
338
- if (this.systemMessage) {
339
- x.push(this.systemMessage);
340
- }
327
+ createSystemRunnable({
328
+ provider,
329
+ clientOptions,
330
+ instructions,
331
+ additional_instructions,
332
+ }: {
333
+ provider?: Providers;
334
+ clientOptions?: t.ClientOptions;
335
+ instructions?: string;
336
+ additional_instructions?: string;
337
+ }): t.SystemRunnable | undefined {
338
+ let finalInstructions: string | BaseMessageFields | undefined =
339
+ instructions;
340
+ if (additional_instructions != null && additional_instructions !== '') {
341
+ finalInstructions =
342
+ finalInstructions != null && finalInstructions
343
+ ? `${finalInstructions}\n\n${additional_instructions}`
344
+ : additional_instructions;
345
+ }
341
346
 
342
- this.startIndex = x.length + y.length;
343
- }
344
- const current = x.concat(y);
345
- this.messages = current;
346
- return current;
347
- },
348
- default: () => [],
349
- },
350
- };
347
+ if (
348
+ finalInstructions != null &&
349
+ finalInstructions &&
350
+ provider === Providers.ANTHROPIC &&
351
+ ((
352
+ (clientOptions as t.AnthropicClientOptions).clientOptions
353
+ ?.defaultHeaders as Record<string, string> | undefined
354
+ )?.['anthropic-beta']?.includes('prompt-caching') ??
355
+ false)
356
+ ) {
357
+ finalInstructions = {
358
+ content: [
359
+ {
360
+ type: 'text',
361
+ text: instructions,
362
+ cache_control: { type: 'ephemeral' },
363
+ },
364
+ ],
365
+ };
366
+ }
367
+
368
+ if (finalInstructions != null && finalInstructions !== '') {
369
+ const systemMessage = new SystemMessage(finalInstructions);
370
+ return RunnableLambda.from((messages: BaseMessage[]) => {
371
+ return [systemMessage, ...messages];
372
+ }).withConfig({ runName: 'prompt' });
373
+ }
351
374
  }
352
375
 
353
- initializeTools():
354
- | CustomToolNode<t.BaseGraphState>
355
- | ToolNode<t.BaseGraphState> {
376
+ initializeTools({
377
+ currentTools,
378
+ currentToolMap,
379
+ }: {
380
+ currentTools?: t.GraphTools;
381
+ currentToolMap?: t.ToolMap;
382
+ }): CustomToolNode<t.BaseGraphState> | ToolNode<t.BaseGraphState> {
356
383
  // return new ToolNode<t.BaseGraphState>(this.tools);
357
384
  return new CustomToolNode<t.BaseGraphState>({
358
- tools: this.tools || [],
359
- toolMap: this.toolMap,
385
+ tools: (currentTools as t.GenericTool[] | undefined) ?? [],
386
+ toolMap: currentToolMap,
360
387
  toolCallStepIds: this.toolCallStepIds,
361
388
  errorHandler: (data, metadata) =>
362
389
  StandardGraph.handleToolCallErrorStatic(this, data, metadata),
363
390
  });
364
391
  }
365
392
 
366
- initializeModel(): Runnable {
367
- const ChatModelClass = getChatModelClass(this.provider);
368
- const model = new ChatModelClass(this.clientOptions);
393
+ initializeModel({
394
+ provider,
395
+ tools,
396
+ clientOptions,
397
+ }: {
398
+ provider: Providers;
399
+ tools?: t.GraphTools;
400
+ clientOptions?: t.ClientOptions;
401
+ }): Runnable {
402
+ const ChatModelClass = getChatModelClass(provider);
403
+ const model = new ChatModelClass(clientOptions ?? {});
369
404
 
370
405
  if (
371
- isOpenAILike(this.provider) &&
406
+ isOpenAILike(provider) &&
372
407
  (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)
373
408
  ) {
374
- model.temperature = (this.clientOptions as t.OpenAIClientOptions)
409
+ model.temperature = (clientOptions as t.OpenAIClientOptions)
375
410
  .temperature as number;
376
- model.topP = (this.clientOptions as t.OpenAIClientOptions).topP as number;
377
- model.frequencyPenalty = (this.clientOptions as t.OpenAIClientOptions)
411
+ model.topP = (clientOptions as t.OpenAIClientOptions).topP as number;
412
+ model.frequencyPenalty = (clientOptions as t.OpenAIClientOptions)
378
413
  .frequencyPenalty as number;
379
- model.presencePenalty = (this.clientOptions as t.OpenAIClientOptions)
414
+ model.presencePenalty = (clientOptions as t.OpenAIClientOptions)
380
415
  .presencePenalty as number;
381
- model.n = (this.clientOptions as t.OpenAIClientOptions).n as number;
416
+ model.n = (clientOptions as t.OpenAIClientOptions).n as number;
382
417
  } else if (
383
- this.provider === Providers.VERTEXAI &&
418
+ provider === Providers.VERTEXAI &&
384
419
  model instanceof ChatVertexAI
385
420
  ) {
386
- model.temperature = (this.clientOptions as t.VertexAIClientOptions)
421
+ model.temperature = (clientOptions as t.VertexAIClientOptions)
387
422
  .temperature as number;
388
- model.topP = (this.clientOptions as t.VertexAIClientOptions)
389
- .topP as number;
390
- model.topK = (this.clientOptions as t.VertexAIClientOptions)
391
- .topK as number;
392
- model.topLogprobs = (this.clientOptions as t.VertexAIClientOptions)
423
+ model.topP = (clientOptions as t.VertexAIClientOptions).topP as number;
424
+ model.topK = (clientOptions as t.VertexAIClientOptions).topK as number;
425
+ model.topLogprobs = (clientOptions as t.VertexAIClientOptions)
393
426
  .topLogprobs as number;
394
- model.frequencyPenalty = (this.clientOptions as t.VertexAIClientOptions)
427
+ model.frequencyPenalty = (clientOptions as t.VertexAIClientOptions)
395
428
  .frequencyPenalty as number;
396
- model.presencePenalty = (this.clientOptions as t.VertexAIClientOptions)
429
+ model.presencePenalty = (clientOptions as t.VertexAIClientOptions)
397
430
  .presencePenalty as number;
398
- model.maxOutputTokens = (this.clientOptions as t.VertexAIClientOptions)
431
+ model.maxOutputTokens = (clientOptions as t.VertexAIClientOptions)
399
432
  .maxOutputTokens as number;
400
433
  }
401
434
 
402
- if (!this.tools || this.tools.length === 0) {
435
+ if (!tools || tools.length === 0) {
403
436
  return model as unknown as Runnable;
404
437
  }
405
438
 
406
- return (model as t.ModelWithTools).bindTools(this.tools);
439
+ return (model as t.ModelWithTools).bindTools(tools);
407
440
  }
441
+
408
442
  overrideTestModel(
409
443
  responses: string[],
410
444
  sleep?: number,
411
445
  toolCalls?: ToolCall[]
412
446
  ): void {
413
- this.boundModel = createFakeStreamingLLM({
447
+ this.overrideModel = createFakeStreamingLLM({
414
448
  responses,
415
449
  sleep,
416
450
  toolCalls,
@@ -418,42 +452,84 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
418
452
  }
419
453
 
420
454
  getNewModel({
421
- clientOptions = {},
422
- omitOriginalOptions,
455
+ provider,
456
+ clientOptions,
423
457
  }: {
458
+ provider: Providers;
424
459
  clientOptions?: t.ClientOptions;
425
- omitOriginalOptions?: Set<string>;
426
460
  }): t.ChatModelInstance {
427
- const ChatModelClass = getChatModelClass(this.provider);
428
- const _options = omitOriginalOptions
429
- ? Object.fromEntries(
430
- Object.entries(this.clientOptions).filter(
431
- ([key]) => !omitOriginalOptions.has(key)
432
- )
433
- )
434
- : this.clientOptions;
435
- const options = Object.assign(_options, clientOptions);
436
- return new ChatModelClass(options);
461
+ const ChatModelClass = getChatModelClass(provider);
462
+ return new ChatModelClass(clientOptions ?? {});
437
463
  }
438
464
 
439
- storeUsageMetadata(finalMessage?: BaseMessage): void {
465
+ getUsageMetadata(
466
+ finalMessage?: BaseMessage
467
+ ): Partial<UsageMetadata> | undefined {
440
468
  if (
441
469
  finalMessage &&
442
470
  'usage_metadata' in finalMessage &&
443
471
  finalMessage.usage_metadata != null
444
472
  ) {
445
- this.currentUsage = finalMessage.usage_metadata as Partial<UsageMetadata>;
473
+ return finalMessage.usage_metadata as Partial<UsageMetadata>;
446
474
  }
447
475
  }
448
476
 
449
- cleanupSignalListener(): void {
477
+ /** Execute model invocation with streaming support */
478
+ private async attemptInvoke(
479
+ {
480
+ currentModel,
481
+ finalMessages,
482
+ provider,
483
+ tools,
484
+ }: {
485
+ currentModel?: t.ChatModel;
486
+ finalMessages: BaseMessage[];
487
+ provider: Providers;
488
+ tools?: t.GraphTools;
489
+ },
490
+ config?: RunnableConfig
491
+ ): Promise<Partial<t.BaseGraphState>> {
492
+ const model = this.overrideModel ?? currentModel;
493
+ if (!model) {
494
+ throw new Error('No model found');
495
+ }
496
+
497
+ if ((tools?.length ?? 0) > 0 && manualToolStreamProviders.has(provider)) {
498
+ if (!model.stream) {
499
+ throw new Error('Model does not support stream');
500
+ }
501
+ const stream = await model.stream(finalMessages, config);
502
+ let finalChunk: AIMessageChunk | undefined;
503
+ for await (const chunk of stream) {
504
+ await safeDispatchCustomEvent(
505
+ GraphEvents.CHAT_MODEL_STREAM,
506
+ { chunk, emitted: true },
507
+ config
508
+ );
509
+ finalChunk = finalChunk ? concat(finalChunk, chunk) : chunk;
510
+ }
511
+ finalChunk = modifyDeltaProperties(provider, finalChunk);
512
+ return { messages: [finalChunk as AIMessageChunk] };
513
+ } else {
514
+ const finalMessage = await model.invoke(finalMessages, config);
515
+ if ((finalMessage.tool_calls?.length ?? 0) > 0) {
516
+ finalMessage.tool_calls = finalMessage.tool_calls?.filter(
517
+ (tool_call: ToolCall) => !!tool_call.name
518
+ );
519
+ }
520
+ return { messages: [finalMessage] };
521
+ }
522
+ }
523
+
524
+ cleanupSignalListener(currentModel?: t.ChatModel): void {
450
525
  if (!this.signal) {
451
526
  return;
452
527
  }
453
- if (!this.boundModel) {
528
+ const model = this.overrideModel ?? currentModel;
529
+ if (!model) {
454
530
  return;
455
531
  }
456
- const client = (this.boundModel as ChatOpenAI | undefined)?.exposedClient;
532
+ const client = (model as ChatOpenAI | undefined)?.exposedClient;
457
533
  if (!client?.abortHandler) {
458
534
  return;
459
535
  }
@@ -461,18 +537,30 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
461
537
  client.abortHandler = undefined;
462
538
  }
463
539
 
464
- createCallModel() {
540
+ createCallModel(agentId = 'default', currentModel?: t.ChatModel) {
465
541
  return async (
466
542
  state: t.BaseGraphState,
467
543
  config?: RunnableConfig
468
544
  ): Promise<Partial<t.BaseGraphState>> => {
469
- const { provider = '' } =
470
- (config?.configurable as t.GraphConfig | undefined) ?? {};
471
- if (this.boundModel == null) {
545
+ /**
546
+ * Get agent context - it must exist by this point
547
+ */
548
+ const agentContext = this.agentContexts.get(agentId);
549
+ if (!agentContext) {
550
+ throw new Error(`Agent context not found for agentId: ${agentId}`);
551
+ }
552
+
553
+ const model = this.overrideModel ?? currentModel;
554
+ if (!model) {
472
555
  throw new Error('No Graph model found');
473
556
  }
474
- if (!config || !provider) {
475
- throw new Error(`No ${config ? 'provider' : 'config'} provided`);
557
+ if (!config) {
558
+ throw new Error('No config provided');
559
+ }
560
+
561
+ // Ensure token calculations are complete before proceeding
562
+ if (agentContext.tokenCalculationPromise) {
563
+ await agentContext.tokenCalculationPromise;
476
564
  }
477
565
  if (!config.signal) {
478
566
  config.signal = this.signal;
@@ -482,35 +570,35 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
482
570
 
483
571
  let messagesToUse = messages;
484
572
  if (
485
- !this.pruneMessages &&
486
- this.tokenCounter &&
487
- this.maxContextTokens != null &&
488
- this.indexTokenCountMap[0] != null
573
+ !agentContext.pruneMessages &&
574
+ agentContext.tokenCounter &&
575
+ agentContext.maxContextTokens != null &&
576
+ agentContext.indexTokenCountMap[0] != null
489
577
  ) {
490
578
  const isAnthropicWithThinking =
491
- (this.provider === Providers.ANTHROPIC &&
492
- (this.clientOptions as t.AnthropicClientOptions).thinking !=
579
+ (agentContext.provider === Providers.ANTHROPIC &&
580
+ (agentContext.clientOptions as t.AnthropicClientOptions).thinking !=
493
581
  null) ||
494
- (this.provider === Providers.BEDROCK &&
495
- (this.clientOptions as t.BedrockAnthropicInput)
582
+ (agentContext.provider === Providers.BEDROCK &&
583
+ (agentContext.clientOptions as t.BedrockAnthropicInput)
496
584
  .additionalModelRequestFields?.['thinking'] != null);
497
585
 
498
- this.pruneMessages = createPruneMessages({
499
- provider: this.provider,
500
- indexTokenCountMap: this.indexTokenCountMap,
501
- maxTokens: this.maxContextTokens,
502
- tokenCounter: this.tokenCounter,
586
+ agentContext.pruneMessages = createPruneMessages({
503
587
  startIndex: this.startIndex,
588
+ provider: agentContext.provider,
589
+ tokenCounter: agentContext.tokenCounter,
590
+ maxTokens: agentContext.maxContextTokens,
504
591
  thinkingEnabled: isAnthropicWithThinking,
592
+ indexTokenCountMap: agentContext.indexTokenCountMap,
505
593
  });
506
594
  }
507
- if (this.pruneMessages) {
508
- const { context, indexTokenCountMap } = this.pruneMessages({
595
+ if (agentContext.pruneMessages) {
596
+ const { context, indexTokenCountMap } = agentContext.pruneMessages({
509
597
  messages,
510
- usageMetadata: this.currentUsage,
598
+ usageMetadata: agentContext.currentUsage,
511
599
  // startOnMessageType: 'human',
512
600
  });
513
- this.indexTokenCountMap = indexTokenCountMap;
601
+ agentContext.indexTokenCountMap = indexTokenCountMap;
514
602
  messagesToUse = context;
515
603
  }
516
604
 
@@ -525,7 +613,7 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
525
613
  : null;
526
614
 
527
615
  if (
528
- provider === Providers.BEDROCK &&
616
+ agentContext.provider === Providers.BEDROCK &&
529
617
  lastMessageX instanceof AIMessageChunk &&
530
618
  lastMessageY instanceof ToolMessage &&
531
619
  typeof lastMessageX.content === 'string'
@@ -535,92 +623,165 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
535
623
 
536
624
  const isLatestToolMessage = lastMessageY instanceof ToolMessage;
537
625
 
538
- if (isLatestToolMessage && provider === Providers.ANTHROPIC) {
626
+ if (
627
+ isLatestToolMessage &&
628
+ agentContext.provider === Providers.ANTHROPIC
629
+ ) {
539
630
  formatAnthropicArtifactContent(finalMessages);
540
631
  } else if (
541
632
  isLatestToolMessage &&
542
- (isOpenAILike(provider) || isGoogleLike(provider))
633
+ (isOpenAILike(agentContext.provider) ||
634
+ isGoogleLike(agentContext.provider))
543
635
  ) {
544
636
  formatArtifactPayload(finalMessages);
545
637
  }
546
638
 
547
- if (this.lastStreamCall != null && this.streamBuffer != null) {
548
- const timeSinceLastCall = Date.now() - this.lastStreamCall;
549
- if (timeSinceLastCall < this.streamBuffer) {
639
+ if (
640
+ agentContext.lastStreamCall != null &&
641
+ agentContext.streamBuffer != null
642
+ ) {
643
+ const timeSinceLastCall = Date.now() - agentContext.lastStreamCall;
644
+ if (timeSinceLastCall < agentContext.streamBuffer) {
550
645
  const timeToWait =
551
- Math.ceil((this.streamBuffer - timeSinceLastCall) / 1000) * 1000;
646
+ Math.ceil((agentContext.streamBuffer - timeSinceLastCall) / 1000) *
647
+ 1000;
552
648
  await sleep(timeToWait);
553
649
  }
554
650
  }
555
651
 
556
- this.lastStreamCall = Date.now();
652
+ agentContext.lastStreamCall = Date.now();
557
653
 
558
- let result: Partial<t.BaseGraphState>;
559
- if (
560
- (this.tools?.length ?? 0) > 0 &&
561
- manualToolStreamProviders.has(provider)
562
- ) {
563
- const stream = await this.boundModel.stream(finalMessages, config);
564
- let finalChunk: AIMessageChunk | undefined;
565
- for await (const chunk of stream) {
566
- dispatchCustomEvent(GraphEvents.CHAT_MODEL_STREAM, { chunk }, config);
567
- if (!finalChunk) {
568
- finalChunk = chunk;
569
- } else {
570
- finalChunk = concat(finalChunk, chunk);
654
+ let result: Partial<t.BaseGraphState> | undefined;
655
+ const fallbacks =
656
+ (agentContext.clientOptions as t.LLMConfig | undefined)?.fallbacks ??
657
+ [];
658
+ try {
659
+ result = await this.attemptInvoke(
660
+ {
661
+ currentModel: model,
662
+ finalMessages,
663
+ provider: agentContext.provider,
664
+ tools: agentContext.tools,
665
+ },
666
+ config
667
+ );
668
+ } catch (primaryError) {
669
+ let lastError: unknown = primaryError;
670
+ for (const fb of fallbacks) {
671
+ try {
672
+ let model = this.getNewModel({
673
+ provider: fb.provider,
674
+ clientOptions: fb.clientOptions,
675
+ });
676
+ const bindableTools = agentContext.tools;
677
+ model = (
678
+ !bindableTools || bindableTools.length === 0
679
+ ? model
680
+ : model.bindTools(bindableTools)
681
+ ) as t.ChatModelInstance;
682
+ result = await this.attemptInvoke(
683
+ {
684
+ currentModel: model,
685
+ finalMessages,
686
+ provider: fb.provider,
687
+ tools: agentContext.tools,
688
+ },
689
+ config
690
+ );
691
+ lastError = undefined;
692
+ break;
693
+ } catch (e) {
694
+ lastError = e;
695
+ continue;
571
696
  }
572
697
  }
573
-
574
- finalChunk = modifyDeltaProperties(this.provider, finalChunk);
575
- result = { messages: [finalChunk as AIMessageChunk] };
576
- } else {
577
- const finalMessage = (await this.boundModel.invoke(
578
- finalMessages,
579
- config
580
- )) as AIMessageChunk;
581
- if ((finalMessage.tool_calls?.length ?? 0) > 0) {
582
- finalMessage.tool_calls = finalMessage.tool_calls?.filter(
583
- (tool_call) => {
584
- if (!tool_call.name) {
585
- return false;
586
- }
587
- return true;
588
- }
589
- );
698
+ if (lastError !== undefined) {
699
+ throw lastError;
590
700
  }
591
- result = { messages: [finalMessage] };
592
701
  }
593
702
 
594
- this.storeUsageMetadata(result.messages?.[0]);
703
+ if (!result) {
704
+ throw new Error('No result after model invocation');
705
+ }
706
+ agentContext.currentUsage = this.getUsageMetadata(result.messages?.[0]);
595
707
  this.cleanupSignalListener();
596
708
  return result;
597
709
  };
598
710
  }
599
711
 
600
- createWorkflow(): t.CompiledWorkflow<t.BaseGraphState> {
712
+ createAgentNode(agentId: string): t.CompiledAgentWorfklow {
713
+ const agentContext = this.agentContexts.get(agentId);
714
+ if (!agentContext) {
715
+ throw new Error(`Agent context not found for agentId: ${agentId}`);
716
+ }
717
+
718
+ let currentModel = this.initializeModel({
719
+ tools: agentContext.tools,
720
+ provider: agentContext.provider,
721
+ clientOptions: agentContext.clientOptions,
722
+ });
723
+
724
+ if (agentContext.systemRunnable) {
725
+ currentModel = agentContext.systemRunnable.pipe(currentModel);
726
+ }
727
+
728
+ const agentNode = `${AGENT}${agentId}` as const;
729
+ const toolNode = `${TOOLS}${agentId}` as const;
730
+
601
731
  const routeMessage = (
602
732
  state: t.BaseGraphState,
603
733
  config?: RunnableConfig
604
734
  ): string => {
605
735
  this.config = config;
606
- // const lastMessage = state.messages[state.messages.length - 1] as AIMessage;
607
- // if (!lastMessage?.tool_calls?.length) {
608
- // return END;
609
- // }
610
- // return TOOLS;
611
- return toolsCondition(state);
736
+ return toolsCondition(state, toolNode, this.invokedToolIds);
612
737
  };
613
738
 
614
- const workflow = new StateGraph<t.BaseGraphState>({
615
- channels: this.graphState,
616
- })
617
- .addNode(AGENT, this.createCallModel())
618
- .addNode(TOOLS, this.initializeTools())
619
- .addEdge(START, AGENT)
620
- .addConditionalEdges(AGENT, routeMessage)
621
- .addEdge(TOOLS, this.toolEnd ? END : AGENT);
739
+ const StateAnnotation = Annotation.Root({
740
+ messages: Annotation<BaseMessage[]>({
741
+ reducer: messagesStateReducer,
742
+ default: () => [],
743
+ }),
744
+ });
745
+
746
+ const workflow = new StateGraph(StateAnnotation)
747
+ .addNode(agentNode, this.createCallModel(agentId, currentModel))
748
+ .addNode(
749
+ toolNode,
750
+ this.initializeTools({
751
+ currentTools: agentContext.tools,
752
+ currentToolMap: agentContext.toolMap,
753
+ })
754
+ )
755
+ .addEdge(START, agentNode)
756
+ .addConditionalEdges(agentNode, routeMessage)
757
+ .addEdge(toolNode, agentContext.toolEnd ? END : agentNode);
758
+
759
+ // Cast to unknown to avoid tight coupling to external types; options are opt-in
760
+ return workflow.compile(this.compileOptions as unknown as never);
761
+ }
622
762
 
623
- return workflow.compile();
763
+ createWorkflow(): t.CompiledStateWorkflow {
764
+ /** Use the default (first) agent for now */
765
+ const agentNode = this.createAgentNode(this.defaultAgentId);
766
+ const StateAnnotation = Annotation.Root({
767
+ messages: Annotation<BaseMessage[]>({
768
+ reducer: (a, b) => {
769
+ if (!a.length) {
770
+ this.startIndex = a.length + b.length;
771
+ }
772
+ const result = messagesStateReducer(a, b);
773
+ this.messages = result;
774
+ return result;
775
+ },
776
+ default: () => [],
777
+ }),
778
+ });
779
+ const workflow = new StateGraph(StateAnnotation)
780
+ .addNode(this.defaultAgentId, agentNode, { ends: [END] })
781
+ .addEdge(START, this.defaultAgentId)
782
+ .compile();
783
+
784
+ return workflow;
624
785
  }
625
786
 
626
787
  /* Dispatchers */
@@ -628,7 +789,10 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
628
789
  /**
629
790
  * Dispatches a run step to the client, returns the step ID
630
791
  */
631
- dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string {
792
+ async dispatchRunStep(
793
+ stepKey: string,
794
+ stepDetails: t.StepDetails
795
+ ): Promise<string> {
632
796
  if (!this.config) {
633
797
  throw new Error('No config provided');
634
798
  }
@@ -660,14 +824,19 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
660
824
 
661
825
  this.contentData.push(runStep);
662
826
  this.contentIndexMap.set(stepId, runStep.index);
663
- dispatchCustomEvent(GraphEvents.ON_RUN_STEP, runStep, this.config);
827
+ await safeDispatchCustomEvent(
828
+ GraphEvents.ON_RUN_STEP,
829
+ runStep,
830
+ this.config
831
+ );
664
832
  return stepId;
665
833
  }
666
834
 
667
- handleToolCallCompleted(
835
+ async handleToolCallCompleted(
668
836
  data: t.ToolEndData,
669
- metadata?: Record<string, unknown>
670
- ): void {
837
+ metadata?: Record<string, unknown>,
838
+ omitOutput?: boolean
839
+ ): Promise<void> {
671
840
  if (!this.config) {
672
841
  throw new Error('No config provided');
673
842
  }
@@ -676,7 +845,11 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
676
845
  return;
677
846
  }
678
847
 
679
- const { input, output } = data;
848
+ const { input, output: _output } = data;
849
+ if ((_output as Command | undefined)?.lg_name === 'Command') {
850
+ return;
851
+ }
852
+ const output = _output as ToolMessage;
680
853
  const { tool_call_id } = output;
681
854
  const stepId = this.toolCallStepIds.get(tool_call_id) ?? '';
682
855
  if (!stepId) {
@@ -688,41 +861,45 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
688
861
  throw new Error(`No run step found for stepId ${stepId}`);
689
862
  }
690
863
 
864
+ const dispatchedOutput =
865
+ typeof output.content === 'string'
866
+ ? output.content
867
+ : JSON.stringify(output.content);
868
+
691
869
  const args = typeof input === 'string' ? input : input.input;
692
870
  const tool_call = {
693
871
  args: typeof args === 'string' ? args : JSON.stringify(args),
694
872
  name: output.name ?? '',
695
873
  id: output.tool_call_id,
696
- output:
697
- typeof output.content === 'string'
698
- ? output.content
699
- : JSON.stringify(output.content),
874
+ output: omitOutput === true ? '' : dispatchedOutput,
700
875
  progress: 1,
701
876
  };
702
877
 
703
- this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(
704
- GraphEvents.ON_RUN_STEP_COMPLETED,
705
- {
706
- result: {
707
- id: stepId,
708
- index: runStep.index,
709
- type: 'tool_call',
710
- tool_call,
711
- } as t.ToolCompleteEvent,
712
- },
713
- metadata,
714
- this
715
- );
878
+ await this.handlerRegistry
879
+ ?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)
880
+ ?.handle(
881
+ GraphEvents.ON_RUN_STEP_COMPLETED,
882
+ {
883
+ result: {
884
+ id: stepId,
885
+ index: runStep.index,
886
+ type: 'tool_call',
887
+ tool_call,
888
+ } as t.ToolCompleteEvent,
889
+ },
890
+ metadata,
891
+ this
892
+ );
716
893
  }
717
894
  /**
718
895
  * Static version of handleToolCallError to avoid creating strong references
719
896
  * that prevent garbage collection
720
897
  */
721
- static handleToolCallErrorStatic(
898
+ static async handleToolCallErrorStatic(
722
899
  graph: StandardGraph,
723
900
  data: t.ToolErrorData,
724
901
  metadata?: Record<string, unknown>
725
- ): void {
902
+ ): Promise<void> {
726
903
  if (!graph.config) {
727
904
  throw new Error('No config provided');
728
905
  }
@@ -752,7 +929,7 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
752
929
  progress: 1,
753
930
  };
754
931
 
755
- graph.handlerRegistry
932
+ await graph.handlerRegistry
756
933
  ?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)
757
934
  ?.handle(
758
935
  GraphEvents.ON_RUN_STEP_COMPLETED,
@@ -773,14 +950,17 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
773
950
  * Instance method that delegates to the static method
774
951
  * Kept for backward compatibility
775
952
  */
776
- handleToolCallError(
953
+ async handleToolCallError(
777
954
  data: t.ToolErrorData,
778
955
  metadata?: Record<string, unknown>
779
- ): void {
780
- StandardGraph.handleToolCallErrorStatic(this, data, metadata);
956
+ ): Promise<void> {
957
+ await StandardGraph.handleToolCallErrorStatic(this, data, metadata);
781
958
  }
782
959
 
783
- dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void {
960
+ async dispatchRunStepDelta(
961
+ id: string,
962
+ delta: t.ToolCallDelta
963
+ ): Promise<void> {
784
964
  if (!this.config) {
785
965
  throw new Error('No config provided');
786
966
  } else if (!id) {
@@ -790,14 +970,14 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
790
970
  id,
791
971
  delta,
792
972
  };
793
- dispatchCustomEvent(
973
+ await safeDispatchCustomEvent(
794
974
  GraphEvents.ON_RUN_STEP_DELTA,
795
975
  runStepDelta,
796
976
  this.config
797
977
  );
798
978
  }
799
979
 
800
- dispatchMessageDelta(id: string, delta: t.MessageDelta): void {
980
+ async dispatchMessageDelta(id: string, delta: t.MessageDelta): Promise<void> {
801
981
  if (!this.config) {
802
982
  throw new Error('No config provided');
803
983
  }
@@ -805,14 +985,17 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
805
985
  id,
806
986
  delta,
807
987
  };
808
- dispatchCustomEvent(
988
+ await safeDispatchCustomEvent(
809
989
  GraphEvents.ON_MESSAGE_DELTA,
810
990
  messageDelta,
811
991
  this.config
812
992
  );
813
993
  }
814
994
 
815
- dispatchReasoningDelta = (stepId: string, delta: t.ReasoningDelta): void => {
995
+ dispatchReasoningDelta = async (
996
+ stepId: string,
997
+ delta: t.ReasoningDelta
998
+ ): Promise<void> => {
816
999
  if (!this.config) {
817
1000
  throw new Error('No config provided');
818
1001
  }
@@ -820,7 +1003,7 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
820
1003
  id: stepId,
821
1004
  delta,
822
1005
  };
823
- dispatchCustomEvent(
1006
+ await safeDispatchCustomEvent(
824
1007
  GraphEvents.ON_REASONING_DELTA,
825
1008
  reasoningDelta,
826
1009
  this.config