illuma-agents 1.0.7 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,17 +1,24 @@
1
- import { isCommand, isGraphInterrupt, END } from '@langchain/langgraph';
2
- import { isBaseMessage, ToolMessage } from '@langchain/core/messages';
1
+ import { isBaseMessage, ToolMessage, isAIMessage } from '@langchain/core/messages';
2
+ import { isCommand, isGraphInterrupt, Command, END, Send } from '@langchain/langgraph';
3
3
  import '../common/enum.mjs';
4
4
  import 'nanoid';
5
5
  import '../messages/core.mjs';
6
6
  import { RunnableCallable } from '../utils/run.mjs';
7
7
  import 'js-tiktoken/lite';
8
8
 
9
+ /**
10
+ * Helper to check if a value is a Send object
11
+ */
12
+ function isSend(value) {
13
+ return value instanceof Send;
14
+ }
9
15
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
10
16
  class ToolNode extends RunnableCallable {
11
17
  tools;
12
18
  toolMap;
13
19
  loadRuntimeTools;
14
20
  handleToolErrors = true;
21
+ trace = false;
15
22
  toolCallStepIds;
16
23
  errorHandler;
17
24
  toolUsageCount;
@@ -32,74 +39,166 @@ class ToolNode extends RunnableCallable {
32
39
  getToolUsageCounts() {
33
40
  return new Map(this.toolUsageCount); // Return a copy
34
41
  }
35
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
36
- async run(input, config) {
37
- const message = Array.isArray(input)
38
- ? input[input.length - 1]
39
- : input.messages[input.messages.length - 1];
40
- if (message._getType() !== 'ai') {
41
- throw new Error('ToolNode only accepts AIMessages as input.');
42
- }
43
- if (this.loadRuntimeTools) {
44
- const { tools, toolMap } = this.loadRuntimeTools(message.tool_calls ?? []);
45
- this.tools = tools;
46
- this.toolMap = toolMap ?? new Map(tools.map((tool) => [tool.name, tool]));
42
+ /**
43
+ * Runs a single tool call with error handling
44
+ */
45
+ async runTool(call, config) {
46
+ const tool = this.toolMap.get(call.name);
47
+ try {
48
+ if (tool === undefined) {
49
+ throw new Error(`Tool "${call.name}" not found.`);
50
+ }
51
+ const turn = this.toolUsageCount.get(call.name) ?? 0;
52
+ this.toolUsageCount.set(call.name, turn + 1);
53
+ const args = call.args;
54
+ const stepId = this.toolCallStepIds?.get(call.id);
55
+ const output = await tool.invoke({ ...call, args, type: 'tool_call', stepId, turn }, config);
56
+ if ((isBaseMessage(output) && output._getType() === 'tool') ||
57
+ isCommand(output)) {
58
+ return output;
59
+ }
60
+ else {
61
+ return new ToolMessage({
62
+ status: 'success',
63
+ name: tool.name,
64
+ content: typeof output === 'string' ? output : JSON.stringify(output),
65
+ tool_call_id: call.id,
66
+ });
67
+ }
47
68
  }
48
- const outputs = await Promise.all(message.tool_calls?.map(async (call) => {
49
- const tool = this.toolMap.get(call.name);
50
- try {
51
- if (tool === undefined) {
52
- throw new Error(`Tool "${call.name}" not found.`);
53
- }
54
- const turn = this.toolUsageCount.get(call.name) ?? 0;
55
- this.toolUsageCount.set(call.name, turn + 1);
56
- const args = call.args;
57
- const stepId = this.toolCallStepIds?.get(call.id);
58
- const output = await tool.invoke({ ...call, args, type: 'tool_call', stepId, turn }, config);
59
- if ((isBaseMessage(output) && output._getType() === 'tool') ||
60
- isCommand(output)) {
61
- return output;
69
+ catch (_e) {
70
+ const e = _e;
71
+ if (!this.handleToolErrors) {
72
+ throw e;
73
+ }
74
+ if (isGraphInterrupt(e)) {
75
+ throw e;
76
+ }
77
+ if (this.errorHandler) {
78
+ try {
79
+ await this.errorHandler({
80
+ error: e,
81
+ id: call.id,
82
+ name: call.name,
83
+ input: call.args,
84
+ }, config.metadata);
62
85
  }
63
- else {
64
- return new ToolMessage({
65
- name: tool.name,
66
- content: typeof output === 'string' ? output : JSON.stringify(output),
67
- tool_call_id: call.id,
86
+ catch (handlerError) {
87
+ // eslint-disable-next-line no-console
88
+ console.error('Error in errorHandler:', {
89
+ toolName: call.name,
90
+ toolCallId: call.id,
91
+ toolArgs: call.args,
92
+ stepId: this.toolCallStepIds?.get(call.id),
93
+ turn: this.toolUsageCount.get(call.name),
94
+ originalError: {
95
+ message: e.message,
96
+ stack: e.stack ?? undefined,
97
+ },
98
+ handlerError: handlerError instanceof Error
99
+ ? {
100
+ message: handlerError.message,
101
+ stack: handlerError.stack ?? undefined,
102
+ }
103
+ : {
104
+ message: String(handlerError),
105
+ stack: undefined,
106
+ },
68
107
  });
69
108
  }
70
109
  }
71
- catch (_e) {
72
- const e = _e;
73
- if (!this.handleToolErrors) {
74
- throw e;
75
- }
76
- if (isGraphInterrupt(e)) {
77
- throw e;
110
+ return new ToolMessage({
111
+ status: 'error',
112
+ content: `Error: ${e.message}\n Please fix your mistakes.`,
113
+ name: call.name,
114
+ tool_call_id: call.id ?? '',
115
+ });
116
+ }
117
+ }
118
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
119
+ async run(input, config) {
120
+ let outputs;
121
+ if (this.isSendInput(input)) {
122
+ outputs = [await this.runTool(input.lg_tool_call, config)];
123
+ }
124
+ else {
125
+ let messages;
126
+ if (Array.isArray(input)) {
127
+ messages = input;
128
+ }
129
+ else if (this.isMessagesState(input)) {
130
+ messages = input.messages;
131
+ }
132
+ else {
133
+ throw new Error('ToolNode only accepts BaseMessage[] or { messages: BaseMessage[] } as input.');
134
+ }
135
+ const toolMessageIds = new Set(messages
136
+ .filter((msg) => msg._getType() === 'tool')
137
+ .map((msg) => msg.tool_call_id));
138
+ let aiMessage;
139
+ for (let i = messages.length - 1; i >= 0; i--) {
140
+ const message = messages[i];
141
+ if (isAIMessage(message)) {
142
+ aiMessage = message;
143
+ break;
78
144
  }
79
- this.errorHandler?.({
80
- error: e,
81
- id: call.id,
82
- name: call.name,
83
- input: call.args,
84
- }, config.metadata);
85
- return new ToolMessage({
86
- content: `Error: ${e.message}\n Please fix your mistakes.`,
87
- name: call.name,
88
- tool_call_id: call.id ?? '',
89
- });
90
145
  }
91
- }) ?? []);
146
+ if (aiMessage == null || !isAIMessage(aiMessage)) {
147
+ throw new Error('ToolNode only accepts AIMessages as input.');
148
+ }
149
+ if (this.loadRuntimeTools) {
150
+ const { tools, toolMap } = this.loadRuntimeTools(aiMessage.tool_calls ?? []);
151
+ this.tools = tools;
152
+ this.toolMap =
153
+ toolMap ?? new Map(tools.map((tool) => [tool.name, tool]));
154
+ }
155
+ outputs = await Promise.all(aiMessage.tool_calls
156
+ ?.filter((call) => call.id == null || !toolMessageIds.has(call.id))
157
+ .map((call) => this.runTool(call, config)) ?? []);
158
+ }
92
159
  if (!outputs.some(isCommand)) {
93
160
  return (Array.isArray(input) ? outputs : { messages: outputs });
94
161
  }
95
- const combinedOutputs = outputs.map((output) => {
162
+ const combinedOutputs = [];
163
+ let parentCommand = null;
164
+ for (const output of outputs) {
96
165
  if (isCommand(output)) {
97
- return output;
166
+ if (output.graph === Command.PARENT &&
167
+ Array.isArray(output.goto) &&
168
+ output.goto.every((send) => isSend(send))) {
169
+ if (parentCommand) {
170
+ parentCommand.goto.push(...output.goto);
171
+ }
172
+ else {
173
+ parentCommand = new Command({
174
+ graph: Command.PARENT,
175
+ goto: output.goto,
176
+ });
177
+ }
178
+ }
179
+ else {
180
+ combinedOutputs.push(output);
181
+ }
182
+ }
183
+ else {
184
+ combinedOutputs.push(Array.isArray(input) ? [output] : { messages: [output] });
98
185
  }
99
- return Array.isArray(input) ? [output] : { messages: [output] };
100
- });
186
+ }
187
+ if (parentCommand) {
188
+ combinedOutputs.push(parentCommand);
189
+ }
101
190
  return combinedOutputs;
102
191
  }
192
+ isSendInput(input) {
193
+ return (typeof input === 'object' && input != null && 'lg_tool_call' in input);
194
+ }
195
+ isMessagesState(input) {
196
+ return (typeof input === 'object' &&
197
+ input != null &&
198
+ 'messages' in input &&
199
+ Array.isArray(input.messages) &&
200
+ input.messages.every(isBaseMessage));
201
+ }
103
202
  }
104
203
  function areToolCallsInvoked(message, invokedToolIds) {
105
204
  if (!invokedToolIds || invokedToolIds.size === 0)
@@ -1 +1 @@
1
- {"version":3,"file":"ToolNode.mjs","sources":["../../../src/tools/ToolNode.ts"],"sourcesContent":["import {\n END,\n MessagesAnnotation,\n isCommand,\n isGraphInterrupt,\n} from '@langchain/langgraph';\nimport { ToolMessage, isBaseMessage } from '@langchain/core/messages';\nimport type {\n RunnableConfig,\n RunnableToolLike,\n} from '@langchain/core/runnables';\nimport type { BaseMessage, AIMessage } from '@langchain/core/messages';\nimport type { StructuredToolInterface } from '@langchain/core/tools';\nimport type * as t from '@/types';\nimport { RunnableCallable } from '@/utils';\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport class ToolNode<T = any> extends RunnableCallable<T, T> {\n tools: t.GenericTool[];\n private toolMap: Map<string, StructuredToolInterface | RunnableToolLike>;\n private loadRuntimeTools?: t.ToolRefGenerator;\n handleToolErrors = true;\n toolCallStepIds?: Map<string, string>;\n errorHandler?: t.ToolNodeConstructorParams['errorHandler'];\n private toolUsageCount: Map<string, number>;\n\n constructor({\n tools,\n toolMap,\n name,\n tags,\n errorHandler,\n toolCallStepIds,\n handleToolErrors,\n loadRuntimeTools,\n }: t.ToolNodeConstructorParams) {\n super({ name, tags, func: (input, config) => this.run(input, config) });\n this.tools = tools;\n this.toolMap = toolMap ?? new Map(tools.map((tool) => [tool.name, tool]));\n this.toolCallStepIds = toolCallStepIds;\n this.handleToolErrors = handleToolErrors ?? this.handleToolErrors;\n this.loadRuntimeTools = loadRuntimeTools;\n this.errorHandler = errorHandler;\n this.toolUsageCount = new Map<string, number>();\n }\n\n /**\n * Returns a snapshot of the current tool usage counts.\n * @returns A ReadonlyMap where keys are tool names and values are their usage counts.\n */\n public getToolUsageCounts(): ReadonlyMap<string, number> {\n return new Map(this.toolUsageCount); // Return a copy\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n protected async run(input: any, config: RunnableConfig): Promise<T> {\n const message = Array.isArray(input)\n ? input[input.length - 1]\n : input.messages[input.messages.length - 1];\n\n if (message._getType() !== 'ai') {\n throw new Error('ToolNode only accepts AIMessages as input.');\n }\n\n if (this.loadRuntimeTools) {\n const { tools, toolMap } = this.loadRuntimeTools(\n (message as AIMessage).tool_calls ?? []\n );\n this.tools = tools;\n this.toolMap = toolMap ?? new Map(tools.map((tool) => [tool.name, tool]));\n }\n const outputs = await Promise.all(\n (message as AIMessage).tool_calls?.map(async (call) => {\n const tool = this.toolMap.get(call.name);\n try {\n if (tool === undefined) {\n throw new Error(`Tool \"${call.name}\" not found.`);\n }\n const turn = this.toolUsageCount.get(call.name) ?? 0;\n this.toolUsageCount.set(call.name, turn + 1);\n const args = call.args;\n const stepId = this.toolCallStepIds?.get(call.id!);\n const output = await tool.invoke(\n { ...call, args, type: 'tool_call', stepId, turn },\n config\n );\n if (\n (isBaseMessage(output) && output._getType() === 'tool') ||\n isCommand(output)\n ) {\n return output;\n } else {\n return new ToolMessage({\n name: tool.name,\n content:\n typeof output === 'string' ? output : JSON.stringify(output),\n tool_call_id: call.id!,\n });\n }\n } catch (_e: unknown) {\n const e = _e as Error;\n if (!this.handleToolErrors) {\n throw e;\n }\n if (isGraphInterrupt(e)) {\n throw e;\n }\n this.errorHandler?.(\n {\n error: e,\n id: call.id!,\n name: call.name,\n input: call.args,\n },\n config.metadata\n );\n return new ToolMessage({\n content: `Error: ${e.message}\\n Please fix your mistakes.`,\n name: call.name,\n tool_call_id: call.id ?? '',\n });\n }\n }) ?? []\n );\n\n if (!outputs.some(isCommand)) {\n return (Array.isArray(input) ? outputs : { messages: outputs }) as T;\n }\n\n const combinedOutputs = outputs.map((output) => {\n if (isCommand(output)) {\n return output;\n }\n return Array.isArray(input) ? [output] : { messages: [output] };\n });\n return combinedOutputs as T;\n }\n}\n\nfunction areToolCallsInvoked(\n message: AIMessage,\n invokedToolIds?: Set<string>\n): boolean {\n if (!invokedToolIds || invokedToolIds.size === 0) return false;\n return (\n message.tool_calls?.every(\n (toolCall) => toolCall.id != null && invokedToolIds.has(toolCall.id)\n ) ?? false\n );\n}\n\nexport function toolsCondition<T extends string>(\n state: BaseMessage[] | typeof MessagesAnnotation.State,\n toolNode: T,\n invokedToolIds?: Set<string>\n): T | typeof END {\n const message: AIMessage = Array.isArray(state)\n ? state[state.length - 1]\n : state.messages[state.messages.length - 1];\n\n if (\n 'tool_calls' in message &&\n (message.tool_calls?.length ?? 0) > 0 &&\n !areToolCallsInvoked(message, invokedToolIds)\n ) {\n return toolNode;\n } else {\n return END;\n }\n}\n"],"names":[],"mappings":";;;;;;;;AAgBA;AACM,MAAO,QAAkB,SAAQ,gBAAsB,CAAA;AAC3D,IAAA,KAAK;AACG,IAAA,OAAO;AACP,IAAA,gBAAgB;IACxB,gBAAgB,GAAG,IAAI;AACvB,IAAA,eAAe;AACf,IAAA,YAAY;AACJ,IAAA,cAAc;AAEtB,IAAA,WAAA,CAAY,EACV,KAAK,EACL,OAAO,EACP,IAAI,EACJ,IAAI,EACJ,YAAY,EACZ,eAAe,EACf,gBAAgB,EAChB,gBAAgB,GACY,EAAA;QAC5B,KAAK,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,EAAE,CAAC;AACvE,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;QAClB,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,IAAI,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACzE,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;QACtC,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,IAAI,IAAI,CAAC,gBAAgB;AACjE,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB;AACxC,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY;AAChC,QAAA,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,EAAkB;;AAGjD;;;AAGG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;;;AAI5B,IAAA,MAAM,GAAG,CAAC,KAAU,EAAE,MAAsB,EAAA;AACpD,QAAA,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK;cAC/B,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC;AACxB,cAAE,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC;AAE7C,QAAA,IAAI,OAAO,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;AAC/B,YAAA,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC;;AAG/D,QAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;AACzB,YAAA,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,gBAAgB,CAC7C,OAAqB,CAAC,UAAU,IAAI,EAAE,CACxC;AACD,YAAA,IAAI,CAAC,KAAK,GAAG,KAAK;YAClB,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,IAAI,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;;AAE3E,QAAA,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,GAAG,CAC9B,OAAqB,CAAC,UAAU,EAAE,GAAG,CAAC,OAAO,IAAI,KAAI;AACpD,YAAA,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AACxC,YAAA,IAAI;AACF,gBAAA,IAAI,IAAI,KAAK,SAAS,EAAE;oBACtB,MAAM,IAAI,KAAK,CAAC,CAAA,MAAA,EAAS,IAAI,CAAC,IAAI,CAAc,YAAA,CAAA,CAAC;;AAEnD,gBAAA,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC;AACpD,gBAAA,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,GAAG,CAAC,CAAC;AAC5C,gBAAA,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI;AACtB,gBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,EAAE,GAAG,CAAC,IAAI,CAAC,EAAG,CAAC;gBAClD,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAC9B,EAAE,GAAG,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,WAAW,EAAE,MAAM,EAAE,IAAI,EAAE,EAClD,MAAM,CACP;AACD,gBAAA,IACE,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,MAAM,CAAC,QAAQ,EAAE,KAAK,MAAM;AACtD,oBAAA,SAAS,CAAC,MAAM,CAAC,EACjB;AACA,oBAAA,OAAO,MAAM;;qBACR;oBACL,OAAO,IAAI,WAAW,CAAC;wBACrB,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,wBAAA,OAAO,EACL,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;wBAC9D,YAAY,EAAE,IAAI,CAAC,EAAG;AACvB,qBAAA,CAAC;;;YAEJ,OAAO,EAAW,EAAE;gBACpB,MAAM,CAAC,GAAG,EAAW;AACrB,gBAAA,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE;AAC1B,oBAAA,MAAM,CAAC;;AAET,gBAAA,IAAI,gBAAgB,CAAC,CAAC,CAAC,EAAE;AACvB,oBAAA,MAAM,CAAC;;gBAET,IAAI,CAAC,YAAY,GACf;AACE,oBAAA,KAAK,EAAE,CAAC;oBACR,EAAE,EAAE,IAAI,CAAC,EAAG;oBACZ,IAAI,EAAE,IAAI,CAAC,IAAI;oBACf,KAAK,EAAE,IAAI,CAAC,IAAI;AACjB,iBAAA,EACD,MAAM,CAAC,QAAQ,CAChB;gBACD,OAAO,IAAI,WAAW,CAAC;AACrB,oBAAA,OAAO,EAAE,CAAA,OAAA,EAAU,CAAC,CAAC,OAAO,CAA8B,4BAAA,CAAA;oBAC1D,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,oBAAA,YAAY,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE;AAC5B,iBAAA,CAAC;;AAEN,SAAC,CAAC,IAAI,EAAE,CACT;QAED,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YAC5B,QAAQ,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE;;QAGhE,MAAM,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,KAAI;AAC7C,YAAA,IAAI,SAAS,CAAC,MAAM,CAAC,EAAE;AACrB,gBAAA,OAAO,MAAM;;YAEf,OAAO,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE,QAAQ,EAAE,CAAC,MAAM,CAAC,EAAE;AACjE,SAAC,CAAC;AACF,QAAA,OAAO,eAAoB;;AAE9B;AAED,SAAS,mBAAmB,CAC1B,OAAkB,EAClB,cAA4B,EAAA;AAE5B,IAAA,IAAI,CAAC,cAAc,IAAI,cAAc,CAAC,IAAI,KAAK,CAAC;AAAE,QAAA,OAAO,KAAK;AAC9D,IAAA,QACE,OAAO,CAAC,UAAU,EAAE,KAAK,CACvB,CAAC,QAAQ,KAAK,QAAQ,CAAC,EAAE,IAAI,IAAI,IAAI,cAAc,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,CACrE,IAAI,KAAK;AAEd;SAEgB,cAAc,CAC5B,KAAsD,EACtD,QAAW,EACX,cAA4B,EAAA;AAE5B,IAAA,MAAM,OAAO,GAAc,KAAK,CAAC,OAAO,CAAC,KAAK;UAC1C,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC;AACxB,UAAE,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC;IAE7C,IACE,YAAY,IAAI,OAAO;QACvB,CAAC,OAAO,CAAC,UAAU,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC;AACrC,QAAA,CAAC,mBAAmB,CAAC,OAAO,EAAE,cAAc,CAAC,EAC7C;AACA,QAAA,OAAO,QAAQ;;SACV;AACL,QAAA,OAAO,GAAG;;AAEd;;;;"}
1
+ {"version":3,"file":"ToolNode.mjs","sources":["../../../src/tools/ToolNode.ts"],"sourcesContent":["import { ToolCall } from '@langchain/core/messages/tool';\nimport {\n ToolMessage,\n isAIMessage,\n isBaseMessage,\n} from '@langchain/core/messages';\nimport {\n END,\n Send,\n Command,\n isCommand,\n isGraphInterrupt,\n MessagesAnnotation,\n} from '@langchain/langgraph';\nimport type {\n RunnableConfig,\n RunnableToolLike,\n} from '@langchain/core/runnables';\nimport type { BaseMessage, AIMessage } from '@langchain/core/messages';\nimport type { StructuredToolInterface } from '@langchain/core/tools';\nimport type * as t from '@/types';\nimport { RunnableCallable } from '@/utils';\n\n/**\n * Helper to check if a value is a Send object\n */\nfunction isSend(value: unknown): value is Send {\n return value instanceof Send;\n}\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport class ToolNode<T = any> extends RunnableCallable<T, T> {\n tools: t.GenericTool[];\n private toolMap: Map<string, StructuredToolInterface | RunnableToolLike>;\n private loadRuntimeTools?: t.ToolRefGenerator;\n handleToolErrors = true;\n trace = false;\n toolCallStepIds?: Map<string, string>;\n errorHandler?: t.ToolNodeConstructorParams['errorHandler'];\n private toolUsageCount: Map<string, number>;\n\n constructor({\n tools,\n toolMap,\n name,\n tags,\n errorHandler,\n toolCallStepIds,\n handleToolErrors,\n loadRuntimeTools,\n }: t.ToolNodeConstructorParams) {\n super({ name, tags, func: (input, config) => this.run(input, config) });\n this.tools = tools;\n this.toolMap = toolMap ?? new Map(tools.map((tool) => [tool.name, tool]));\n this.toolCallStepIds = toolCallStepIds;\n this.handleToolErrors = handleToolErrors ?? this.handleToolErrors;\n this.loadRuntimeTools = loadRuntimeTools;\n this.errorHandler = errorHandler;\n this.toolUsageCount = new Map<string, number>();\n }\n\n /**\n * Returns a snapshot of the current tool usage counts.\n * @returns A ReadonlyMap where keys are tool names and values are their usage counts.\n */\n public getToolUsageCounts(): ReadonlyMap<string, number> {\n return new Map(this.toolUsageCount); // Return a copy\n }\n\n /**\n * Runs a single tool call with error handling\n */\n protected async runTool(\n call: ToolCall,\n config: RunnableConfig\n ): Promise<BaseMessage | Command> {\n const tool = this.toolMap.get(call.name);\n try {\n if (tool === undefined) {\n throw new Error(`Tool \"${call.name}\" not found.`);\n }\n const turn = this.toolUsageCount.get(call.name) ?? 0;\n this.toolUsageCount.set(call.name, turn + 1);\n const args = call.args;\n const stepId = this.toolCallStepIds?.get(call.id!);\n const output = await tool.invoke(\n { ...call, args, type: 'tool_call', stepId, turn },\n config\n );\n if (\n (isBaseMessage(output) && output._getType() === 'tool') ||\n isCommand(output)\n ) {\n return output;\n } else {\n return new ToolMessage({\n status: 'success',\n name: tool.name,\n content: typeof output === 'string' ? output : JSON.stringify(output),\n tool_call_id: call.id!,\n });\n }\n } catch (_e: unknown) {\n const e = _e as Error;\n if (!this.handleToolErrors) {\n throw e;\n }\n if (isGraphInterrupt(e)) {\n throw e;\n }\n if (this.errorHandler) {\n try {\n await this.errorHandler(\n {\n error: e,\n id: call.id!,\n name: call.name,\n input: call.args,\n },\n config.metadata\n );\n } catch (handlerError) {\n // eslint-disable-next-line no-console\n console.error('Error in errorHandler:', {\n toolName: call.name,\n toolCallId: call.id,\n toolArgs: call.args,\n stepId: this.toolCallStepIds?.get(call.id!),\n turn: this.toolUsageCount.get(call.name),\n originalError: {\n message: e.message,\n stack: e.stack ?? undefined,\n },\n handlerError:\n handlerError instanceof Error\n ? {\n message: handlerError.message,\n stack: handlerError.stack ?? undefined,\n }\n : {\n message: String(handlerError),\n stack: undefined,\n },\n });\n }\n }\n return new ToolMessage({\n status: 'error',\n content: `Error: ${e.message}\\n Please fix your mistakes.`,\n name: call.name,\n tool_call_id: call.id ?? '',\n });\n }\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n protected async run(input: any, config: RunnableConfig): Promise<T> {\n let outputs: (BaseMessage | Command)[];\n\n if (this.isSendInput(input)) {\n outputs = [await this.runTool(input.lg_tool_call, config)];\n } else {\n let messages: BaseMessage[];\n if (Array.isArray(input)) {\n messages = input;\n } else if (this.isMessagesState(input)) {\n messages = input.messages;\n } else {\n throw new Error(\n 'ToolNode only accepts BaseMessage[] or { messages: BaseMessage[] } as input.'\n );\n }\n\n const toolMessageIds: Set<string> = new Set(\n messages\n .filter((msg) => msg._getType() === 'tool')\n .map((msg) => (msg as ToolMessage).tool_call_id)\n );\n\n let aiMessage: AIMessage | undefined;\n for (let i = messages.length - 1; i >= 0; i--) {\n const message = messages[i];\n if (isAIMessage(message)) {\n aiMessage = message;\n break;\n }\n }\n\n if (aiMessage == null || !isAIMessage(aiMessage)) {\n throw new Error('ToolNode only accepts AIMessages as input.');\n }\n\n if (this.loadRuntimeTools) {\n const { tools, toolMap } = this.loadRuntimeTools(\n aiMessage.tool_calls ?? []\n );\n this.tools = tools;\n this.toolMap =\n toolMap ?? new Map(tools.map((tool) => [tool.name, tool]));\n }\n\n outputs = await Promise.all(\n aiMessage.tool_calls\n ?.filter((call) => call.id == null || !toolMessageIds.has(call.id))\n .map((call) => this.runTool(call, config)) ?? []\n );\n }\n\n if (!outputs.some(isCommand)) {\n return (Array.isArray(input) ? outputs : { messages: outputs }) as T;\n }\n\n const combinedOutputs: (\n | { messages: BaseMessage[] }\n | BaseMessage[]\n | Command\n )[] = [];\n let parentCommand: Command | null = null;\n\n for (const output of outputs) {\n if (isCommand(output)) {\n if (\n output.graph === Command.PARENT &&\n Array.isArray(output.goto) &&\n output.goto.every((send): send is Send => isSend(send))\n ) {\n if (parentCommand) {\n (parentCommand.goto as Send[]).push(...(output.goto as Send[]));\n } else {\n parentCommand = new Command({\n graph: Command.PARENT,\n goto: output.goto,\n });\n }\n } else {\n combinedOutputs.push(output);\n }\n } else {\n combinedOutputs.push(\n Array.isArray(input) ? [output] : { messages: [output] }\n );\n }\n }\n\n if (parentCommand) {\n combinedOutputs.push(parentCommand);\n }\n\n return combinedOutputs as T;\n }\n\n private isSendInput(input: unknown): input is { lg_tool_call: ToolCall } {\n return (\n typeof input === 'object' && input != null && 'lg_tool_call' in input\n );\n }\n\n private isMessagesState(\n input: unknown\n ): input is { messages: BaseMessage[] } {\n return (\n typeof input === 'object' &&\n input != null &&\n 'messages' in input &&\n Array.isArray((input as { messages: unknown }).messages) &&\n (input as { messages: unknown[] }).messages.every(isBaseMessage)\n );\n }\n}\n\nfunction areToolCallsInvoked(\n message: AIMessage,\n invokedToolIds?: Set<string>\n): boolean {\n if (!invokedToolIds || invokedToolIds.size === 0) return false;\n return (\n message.tool_calls?.every(\n (toolCall) => toolCall.id != null && invokedToolIds.has(toolCall.id)\n ) ?? false\n );\n}\n\nexport function toolsCondition<T extends string>(\n state: BaseMessage[] | typeof MessagesAnnotation.State,\n toolNode: T,\n invokedToolIds?: Set<string>\n): T | typeof END {\n const message: AIMessage = Array.isArray(state)\n ? state[state.length - 1]\n : state.messages[state.messages.length - 1];\n\n if (\n 'tool_calls' in message &&\n (message.tool_calls?.length ?? 0) > 0 &&\n !areToolCallsInvoked(message, invokedToolIds)\n ) {\n return toolNode;\n } else {\n return END;\n }\n}\n"],"names":[],"mappings":";;;;;;;;AAuBA;;AAEG;AACH,SAAS,MAAM,CAAC,KAAc,EAAA;IAC5B,OAAO,KAAK,YAAY,IAAI;AAC9B;AAEA;AACM,MAAO,QAAkB,SAAQ,gBAAsB,CAAA;AAC3D,IAAA,KAAK;AACG,IAAA,OAAO;AACP,IAAA,gBAAgB;IACxB,gBAAgB,GAAG,IAAI;IACvB,KAAK,GAAG,KAAK;AACb,IAAA,eAAe;AACf,IAAA,YAAY;AACJ,IAAA,cAAc;AAEtB,IAAA,WAAA,CAAY,EACV,KAAK,EACL,OAAO,EACP,IAAI,EACJ,IAAI,EACJ,YAAY,EACZ,eAAe,EACf,gBAAgB,EAChB,gBAAgB,GACY,EAAA;QAC5B,KAAK,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,EAAE,CAAC;AACvE,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;QAClB,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,IAAI,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACzE,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;QACtC,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,IAAI,IAAI,CAAC,gBAAgB;AACjE,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB;AACxC,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY;AAChC,QAAA,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,EAAkB;;AAGjD;;;AAGG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;;AAGtC;;AAEG;AACO,IAAA,MAAM,OAAO,CACrB,IAAc,EACd,MAAsB,EAAA;AAEtB,QAAA,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AACxC,QAAA,IAAI;AACF,YAAA,IAAI,IAAI,KAAK,SAAS,EAAE;gBACtB,MAAM,IAAI,KAAK,CAAC,CAAA,MAAA,EAAS,IAAI,CAAC,IAAI,CAAc,YAAA,CAAA,CAAC;;AAEnD,YAAA,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC;AACpD,YAAA,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,GAAG,CAAC,CAAC;AAC5C,YAAA,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI;AACtB,YAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,EAAE,GAAG,CAAC,IAAI,CAAC,EAAG,CAAC;YAClD,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAC9B,EAAE,GAAG,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,WAAW,EAAE,MAAM,EAAE,IAAI,EAAE,EAClD,MAAM,CACP;AACD,YAAA,IACE,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,MAAM,CAAC,QAAQ,EAAE,KAAK,MAAM;AACtD,gBAAA,SAAS,CAAC,MAAM,CAAC,EACjB;AACA,gBAAA,OAAO,MAAM;;iBACR;gBACL,OAAO,IAAI,WAAW,CAAC;AACrB,oBAAA,MAAM,EAAE,SAAS;oBACjB,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,oBAAA,OAAO,EAAE,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;oBACrE,YAAY,EAAE,IAAI,CAAC,EAAG;AACvB,iBAAA,CAAC;;;QAEJ,OAAO,EAAW,EAAE;YACpB,MAAM,CAAC,GAAG,EAAW;AACrB,YAAA,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE;AAC1B,gBAAA,MAAM,CAAC;;AAET,YAAA,IAAI,gBAAgB,CAAC,CAAC,CAAC,EAAE;AACvB,gBAAA,MAAM,CAAC;;AAET,YAAA,IAAI,IAAI,CAAC,YAAY,EAAE;AACrB,gBAAA,IAAI;oBACF,MAAM,IAAI,CAAC,YAAY,CACrB;AACE,wBAAA,KAAK,EAAE,CAAC;wBACR,EAAE,EAAE,IAAI,CAAC,EAAG;wBACZ,IAAI,EAAE,IAAI,CAAC,IAAI;wBACf,KAAK,EAAE,IAAI,CAAC,IAAI;AACjB,qBAAA,EACD,MAAM,CAAC,QAAQ,CAChB;;gBACD,OAAO,YAAY,EAAE;;AAErB,oBAAA,OAAO,CAAC,KAAK,CAAC,wBAAwB,EAAE;wBACtC,QAAQ,EAAE,IAAI,CAAC,IAAI;wBACnB,UAAU,EAAE,IAAI,CAAC,EAAE;wBACnB,QAAQ,EAAE,IAAI,CAAC,IAAI;wBACnB,MAAM,EAAE,IAAI,CAAC,eAAe,EAAE,GAAG,CAAC,IAAI,CAAC,EAAG,CAAC;wBAC3C,IAAI,EAAE,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC;AACxC,wBAAA,aAAa,EAAE;4BACb,OAAO,EAAE,CAAC,CAAC,OAAO;AAClB,4BAAA,KAAK,EAAE,CAAC,CAAC,KAAK,IAAI,SAAS;AAC5B,yBAAA;wBACD,YAAY,EACV,YAAY,YAAY;AACtB,8BAAE;gCACA,OAAO,EAAE,YAAY,CAAC,OAAO;AAC7B,gCAAA,KAAK,EAAE,YAAY,CAAC,KAAK,IAAI,SAAS;AACvC;AACD,8BAAE;AACA,gCAAA,OAAO,EAAE,MAAM,CAAC,YAAY,CAAC;AAC7B,gCAAA,KAAK,EAAE,SAAS;AACjB,6BAAA;AACN,qBAAA,CAAC;;;YAGN,OAAO,IAAI,WAAW,CAAC;AACrB,gBAAA,MAAM,EAAE,OAAO;AACf,gBAAA,OAAO,EAAE,CAAA,OAAA,EAAU,CAAC,CAAC,OAAO,CAA8B,4BAAA,CAAA;gBAC1D,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,YAAY,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE;AAC5B,aAAA,CAAC;;;;AAKI,IAAA,MAAM,GAAG,CAAC,KAAU,EAAE,MAAsB,EAAA;AACpD,QAAA,IAAI,OAAkC;AAEtC,QAAA,IAAI,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE;AAC3B,YAAA,OAAO,GAAG,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;;aACrD;AACL,YAAA,IAAI,QAAuB;AAC3B,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;gBACxB,QAAQ,GAAG,KAAK;;AACX,iBAAA,IAAI,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE;AACtC,gBAAA,QAAQ,GAAG,KAAK,CAAC,QAAQ;;iBACpB;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;AAGH,YAAA,MAAM,cAAc,GAAgB,IAAI,GAAG,CACzC;AACG,iBAAA,MAAM,CAAC,CAAC,GAAG,KAAK,GAAG,CAAC,QAAQ,EAAE,KAAK,MAAM;iBACzC,GAAG,CAAC,CAAC,GAAG,KAAM,GAAmB,CAAC,YAAY,CAAC,CACnD;AAED,YAAA,IAAI,SAAgC;AACpC,YAAA,KAAK,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;AAC7C,gBAAA,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC;AAC3B,gBAAA,IAAI,WAAW,CAAC,OAAO,CAAC,EAAE;oBACxB,SAAS,GAAG,OAAO;oBACnB;;;YAIJ,IAAI,SAAS,IAAI,IAAI,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,EAAE;AAChD,gBAAA,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC;;AAG/D,YAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;AACzB,gBAAA,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,gBAAgB,CAC9C,SAAS,CAAC,UAAU,IAAI,EAAE,CAC3B;AACD,gBAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,gBAAA,IAAI,CAAC,OAAO;oBACV,OAAO,IAAI,IAAI,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;;YAG9D,OAAO,GAAG,MAAM,OAAO,CAAC,GAAG,CACzB,SAAS,CAAC;kBACN,MAAM,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,EAAE,IAAI,IAAI,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;AACjE,iBAAA,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,IAAI,EAAE,CACnD;;QAGH,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YAC5B,QAAQ,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE;;QAGhE,MAAM,eAAe,GAIf,EAAE;QACR,IAAI,aAAa,GAAmB,IAAI;AAExC,QAAA,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;AAC5B,YAAA,IAAI,SAAS,CAAC,MAAM,CAAC,EAAE;AACrB,gBAAA,IACE,MAAM,CAAC,KAAK,KAAK,OAAO,CAAC,MAAM;AAC/B,oBAAA,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;AAC1B,oBAAA,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,KAAmB,MAAM,CAAC,IAAI,CAAC,CAAC,EACvD;oBACA,IAAI,aAAa,EAAE;wBAChB,aAAa,CAAC,IAAe,CAAC,IAAI,CAAC,GAAI,MAAM,CAAC,IAAe,CAAC;;yBAC1D;wBACL,aAAa,GAAG,IAAI,OAAO,CAAC;4BAC1B,KAAK,EAAE,OAAO,CAAC,MAAM;4BACrB,IAAI,EAAE,MAAM,CAAC,IAAI;AAClB,yBAAA,CAAC;;;qBAEC;AACL,oBAAA,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC;;;iBAEzB;gBACL,eAAe,CAAC,IAAI,CAClB,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE,QAAQ,EAAE,CAAC,MAAM,CAAC,EAAE,CACzD;;;QAIL,IAAI,aAAa,EAAE;AACjB,YAAA,eAAe,CAAC,IAAI,CAAC,aAAa,CAAC;;AAGrC,QAAA,OAAO,eAAoB;;AAGrB,IAAA,WAAW,CAAC,KAAc,EAAA;AAChC,QAAA,QACE,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,IAAI,IAAI,IAAI,cAAc,IAAI,KAAK;;AAIjE,IAAA,eAAe,CACrB,KAAc,EAAA;AAEd,QAAA,QACE,OAAO,KAAK,KAAK,QAAQ;AACzB,YAAA,KAAK,IAAI,IAAI;AACb,YAAA,UAAU,IAAI,KAAK;AACnB,YAAA,KAAK,CAAC,OAAO,CAAE,KAA+B,CAAC,QAAQ,CAAC;YACvD,KAAiC,CAAC,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC;;AAGrE;AAED,SAAS,mBAAmB,CAC1B,OAAkB,EAClB,cAA4B,EAAA;AAE5B,IAAA,IAAI,CAAC,cAAc,IAAI,cAAc,CAAC,IAAI,KAAK,CAAC;AAAE,QAAA,OAAO,KAAK;AAC9D,IAAA,QACE,OAAO,CAAC,UAAU,EAAE,KAAK,CACvB,CAAC,QAAQ,KAAK,QAAQ,CAAC,EAAE,IAAI,IAAI,IAAI,cAAc,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,CACrE,IAAI,KAAK;AAEd;SAEgB,cAAc,CAC5B,KAAsD,EACtD,QAAW,EACX,cAA4B,EAAA;AAE5B,IAAA,MAAM,OAAO,GAAc,KAAK,CAAC,OAAO,CAAC,KAAK;UAC1C,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC;AACxB,UAAE,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC;IAE7C,IACE,YAAY,IAAI,OAAO;QACvB,CAAC,OAAO,CAAC,UAAU,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC;AACrC,QAAA,CAAC,mBAAmB,CAAC,OAAO,EAAE,cAAc,CAAC,EAC7C;AACA,QAAA,OAAO,QAAQ;;SACV;AACL,QAAA,OAAO,GAAG;;AAEd;;;;"}
@@ -7,8 +7,18 @@ import type { GeminiGenerationConfig } from '@langchain/google-common';
7
7
  import type { GoogleClientOptions } from '@/types';
8
8
  export declare class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
9
9
  thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];
10
+ /**
11
+ * Override to add gemini-3 model support for multimodal and function calling thought signatures
12
+ */
13
+ get _isMultimodalModel(): boolean;
10
14
  constructor(fields: GoogleClientOptions);
11
15
  static lc_name(): 'IllumaGoogleGenerativeAI';
16
+ /**
17
+ * Helper function to convert Gemini API usage metadata to LangChain format
18
+ * Includes support for cached tokens and tier-based tracking for gemini-3-pro-preview
19
+ */
20
+ private _convertToUsageMetadata;
12
21
  invocationParams(options?: this['ParsedCallOptions']): Omit<GenerateContentRequest, 'contents'>;
22
+ _generate(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): Promise<import('@langchain/core/outputs').ChatResult>;
13
23
  _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
14
24
  }
@@ -1,6 +1,10 @@
1
1
  import { CodeExecutionTool, FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool, GoogleSearchRetrievalTool } from '@google/generative-ai';
2
2
  import { BindToolsInput } from '@langchain/core/language_models/chat_models';
3
- export type GoogleGenerativeAIToolType = BindToolsInput | GoogleGenerativeAIFunctionDeclarationsTool | CodeExecutionTool | GoogleSearchRetrievalTool;
3
+ /** New GoogleSearch tool for Gemini 2.0+ models */
4
+ export interface GoogleSearchTool {
5
+ googleSearch: Record<string, never>;
6
+ }
7
+ export type GoogleGenerativeAIToolType = BindToolsInput | GoogleGenerativeAIFunctionDeclarationsTool | CodeExecutionTool | GoogleSearchRetrievalTool | GoogleSearchTool;
4
8
  /** Enum for content modality types */
5
9
  declare enum Modality {
6
10
  MODALITY_UNSPECIFIED = "MODALITY_UNSPECIFIED",
@@ -15,6 +19,12 @@ interface ModalityTokenCount {
15
19
  modality: Modality;
16
20
  tokenCount: number;
17
21
  }
22
+ /** Interface for input token details with cache and tier tracking */
23
+ export interface InputTokenDetails {
24
+ cache_read?: number;
25
+ over_200k?: number;
26
+ cache_read_over_200k?: number;
27
+ }
18
28
  /** Main interface for Gemini API usage metadata */
19
29
  export interface GeminiApiUsageMetadata {
20
30
  promptTokenCount?: number;
@@ -1,7 +1,16 @@
1
1
  import { POSSIBLE_ROLES, type Part, type Content, type EnhancedGenerateContentResponse, type FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool } from '@google/generative-ai';
2
2
  import { BaseMessage, UsageMetadata } from '@langchain/core/messages';
3
3
  import { ChatGenerationChunk } from '@langchain/core/outputs';
4
+ import type { ChatResult } from '@langchain/core/outputs';
4
5
  import { GoogleGenerativeAIToolType } from '../types';
6
+ export declare const _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY = "__gemini_function_call_thought_signatures__";
7
+ /**
8
+ * Executes a function immediately and returns its result.
9
+ * Functional utility similar to an Immediately Invoked Function Expression (IIFE).
10
+ * @param fn The function to execute.
11
+ * @returns The result of invoking fn.
12
+ */
13
+ export declare const iife: <T>(fn: () => T) => T;
5
14
  export declare function getMessageAuthor(message: BaseMessage): string;
6
15
  /**
7
16
  * Maps a message type to a Google Generative AI chat author.
@@ -10,10 +19,16 @@ export declare function getMessageAuthor(message: BaseMessage): string;
10
19
  * @returns The message type mapped to a Google Generative AI chat author.
11
20
  */
12
21
  export declare function convertAuthorToRole(author: string): (typeof POSSIBLE_ROLES)[number];
13
- export declare function convertMessageContentToParts(message: BaseMessage, isMultimodalModel: boolean, previousMessages: BaseMessage[]): Part[];
14
- export declare function convertBaseMessagesToContent(messages: BaseMessage[], isMultimodalModel: boolean, convertSystemMessageToHumanContent?: boolean): Content[] | undefined;
22
+ export declare function convertMessageContentToParts(message: BaseMessage, isMultimodalModel: boolean, previousMessages: BaseMessage[], model?: string): Part[];
23
+ export declare function convertBaseMessagesToContent(messages: BaseMessage[], isMultimodalModel: boolean, convertSystemMessageToHumanContent?: boolean, model?: string): Content[] | undefined;
15
24
  export declare function convertResponseContentToChatGenerationChunk(response: EnhancedGenerateContentResponse, extra: {
16
25
  usageMetadata?: UsageMetadata | undefined;
17
26
  index: number;
18
27
  }): ChatGenerationChunk | null;
28
+ /**
29
+ * Maps a Google GenerateContentResult to a LangChain ChatResult
30
+ */
31
+ export declare function mapGenerateContentResultToChatResult(response: EnhancedGenerateContentResponse, extra?: {
32
+ usageMetadata: UsageMetadata | undefined;
33
+ }): ChatResult;
19
34
  export declare function convertToGenerativeAITools(tools: GoogleGenerativeAIToolType[]): GoogleGenerativeAIFunctionDeclarationsTool[];
@@ -1,4 +1,5 @@
1
- import { END, MessagesAnnotation } from '@langchain/langgraph';
1
+ import { ToolCall } from '@langchain/core/messages/tool';
2
+ import { END, Command, MessagesAnnotation } from '@langchain/langgraph';
2
3
  import type { RunnableConfig } from '@langchain/core/runnables';
3
4
  import type { BaseMessage } from '@langchain/core/messages';
4
5
  import type * as t from '@/types';
@@ -8,6 +9,7 @@ export declare class ToolNode<T = any> extends RunnableCallable<T, T> {
8
9
  private toolMap;
9
10
  private loadRuntimeTools?;
10
11
  handleToolErrors: boolean;
12
+ trace: boolean;
11
13
  toolCallStepIds?: Map<string, string>;
12
14
  errorHandler?: t.ToolNodeConstructorParams['errorHandler'];
13
15
  private toolUsageCount;
@@ -17,6 +19,12 @@ export declare class ToolNode<T = any> extends RunnableCallable<T, T> {
17
19
  * @returns A ReadonlyMap where keys are tool names and values are their usage counts.
18
20
  */
19
21
  getToolUsageCounts(): ReadonlyMap<string, number>;
22
+ /**
23
+ * Runs a single tool call with error handling
24
+ */
25
+ protected runTool(call: ToolCall, config: RunnableConfig): Promise<BaseMessage | Command>;
20
26
  protected run(input: any, config: RunnableConfig): Promise<T>;
27
+ private isSendInput;
28
+ private isMessagesState;
21
29
  }
22
30
  export declare function toolsCondition<T extends string>(state: BaseMessage[] | typeof MessagesAnnotation.State, toolNode: T, invokedToolIds?: Set<string>): T | typeof END;
@@ -24,7 +24,7 @@ export type ToolNodeOptions = {
24
24
  handleToolErrors?: boolean;
25
25
  loadRuntimeTools?: ToolRefGenerator;
26
26
  toolCallStepIds?: Map<string, string>;
27
- errorHandler?: (data: ToolErrorData, metadata?: Record<string, unknown>) => void;
27
+ errorHandler?: (data: ToolErrorData, metadata?: Record<string, unknown>) => Promise<void>;
28
28
  };
29
29
  export type ToolNodeConstructorParams = ToolRefs & ToolNodeOptions;
30
30
  export type ToolEndEvent = {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "illuma-agents",
3
- "version": "1.0.7",
3
+ "version": "1.0.8",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
Binary file
@@ -11,15 +11,30 @@ import type {
11
11
  import type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
12
12
  import type { BaseMessage, UsageMetadata } from '@langchain/core/messages';
13
13
  import type { GeminiGenerationConfig } from '@langchain/google-common';
14
- import type { GeminiApiUsageMetadata } from './types';
14
+ import type { GeminiApiUsageMetadata, InputTokenDetails } from './types';
15
15
  import type { GoogleClientOptions } from '@/types';
16
16
  import {
17
17
  convertResponseContentToChatGenerationChunk,
18
18
  convertBaseMessagesToContent,
19
+ mapGenerateContentResultToChatResult,
19
20
  } from './utils/common';
20
21
 
21
22
  export class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
22
23
  thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];
24
+
25
+ /**
26
+ * Override to add gemini-3 model support for multimodal and function calling thought signatures
27
+ */
28
+ get _isMultimodalModel(): boolean {
29
+ return (
30
+ this.model.startsWith('gemini-1.5') ||
31
+ this.model.startsWith('gemini-2') ||
32
+ (this.model.startsWith('gemma-3-') &&
33
+ !this.model.startsWith('gemma-3-1b')) ||
34
+ this.model.startsWith('gemini-3')
35
+ );
36
+ }
37
+
23
38
  constructor(fields: GoogleClientOptions) {
24
39
  super(fields);
25
40
 
@@ -111,6 +126,59 @@ export class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
111
126
  return 'IllumaGoogleGenerativeAI';
112
127
  }
113
128
 
129
+ /**
130
+ * Helper function to convert Gemini API usage metadata to LangChain format
131
+ * Includes support for cached tokens and tier-based tracking for gemini-3-pro-preview
132
+ */
133
+ private _convertToUsageMetadata(
134
+ usageMetadata: GeminiApiUsageMetadata | undefined,
135
+ model: string
136
+ ): UsageMetadata | undefined {
137
+ if (!usageMetadata) {
138
+ return undefined;
139
+ }
140
+
141
+ const output: UsageMetadata = {
142
+ input_tokens: usageMetadata.promptTokenCount ?? 0,
143
+ output_tokens:
144
+ (usageMetadata.candidatesTokenCount ?? 0) +
145
+ (usageMetadata.thoughtsTokenCount ?? 0),
146
+ total_tokens: usageMetadata.totalTokenCount ?? 0,
147
+ };
148
+
149
+ if (usageMetadata.cachedContentTokenCount) {
150
+ output.input_token_details ??= {};
151
+ output.input_token_details.cache_read =
152
+ usageMetadata.cachedContentTokenCount;
153
+ }
154
+
155
+ // gemini-3-pro-preview has bracket based tracking of tokens per request
156
+ if (model === 'gemini-3-pro-preview') {
157
+ const over200k = Math.max(
158
+ 0,
159
+ (usageMetadata.promptTokenCount ?? 0) - 200000
160
+ );
161
+ const cachedOver200k = Math.max(
162
+ 0,
163
+ (usageMetadata.cachedContentTokenCount ?? 0) - 200000
164
+ );
165
+ if (over200k) {
166
+ output.input_token_details = {
167
+ ...output.input_token_details,
168
+ over_200k: over200k,
169
+ } as InputTokenDetails;
170
+ }
171
+ if (cachedOver200k) {
172
+ output.input_token_details = {
173
+ ...output.input_token_details,
174
+ cache_read_over_200k: cachedOver200k,
175
+ } as InputTokenDetails;
176
+ }
177
+ }
178
+
179
+ return output;
180
+ }
181
+
114
182
  invocationParams(
115
183
  options?: this['ParsedCallOptions']
116
184
  ): Omit<GenerateContentRequest, 'contents'> {
@@ -127,6 +195,60 @@ export class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
127
195
  return params;
128
196
  }
129
197
 
198
+ async _generate(
199
+ messages: BaseMessage[],
200
+ options: this['ParsedCallOptions'],
201
+ runManager?: CallbackManagerForLLMRun
202
+ ): Promise<import('@langchain/core/outputs').ChatResult> {
203
+ const prompt = convertBaseMessagesToContent(
204
+ messages,
205
+ this._isMultimodalModel,
206
+ this.useSystemInstruction,
207
+ this.model
208
+ );
209
+ let actualPrompt = prompt;
210
+ if (prompt?.[0].role === 'system') {
211
+ const [systemInstruction] = prompt;
212
+ /** @ts-ignore */
213
+ this.client.systemInstruction = systemInstruction;
214
+ actualPrompt = prompt.slice(1);
215
+ }
216
+ const parameters = this.invocationParams(options);
217
+ const request = {
218
+ ...parameters,
219
+ contents: actualPrompt,
220
+ };
221
+
222
+ const res = await this.caller.callWithOptions(
223
+ { signal: options.signal },
224
+ async () =>
225
+ /** @ts-ignore */
226
+ this.client.generateContent(request)
227
+ );
228
+
229
+ const response = res.response;
230
+ const usageMetadata = this._convertToUsageMetadata(
231
+ /** @ts-ignore */
232
+ response.usageMetadata,
233
+ this.model
234
+ );
235
+
236
+ /** @ts-ignore */
237
+ const generationResult = mapGenerateContentResultToChatResult(response, {
238
+ usageMetadata,
239
+ });
240
+
241
+ await runManager?.handleLLMNewToken(
242
+ generationResult.generations[0].text || '',
243
+ undefined,
244
+ undefined,
245
+ undefined,
246
+ undefined,
247
+ undefined
248
+ );
249
+ return generationResult;
250
+ }
251
+
130
252
  async *_streamResponseChunks(
131
253
  messages: BaseMessage[],
132
254
  options: this['ParsedCallOptions'],
@@ -135,7 +257,8 @@ export class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
135
257
  const prompt = convertBaseMessagesToContent(
136
258
  messages,
137
259
  this._isMultimodalModel,
138
- this.useSystemInstruction
260
+ this.useSystemInstruction,
261
+ this.model
139
262
  );
140
263
  let actualPrompt = prompt;
141
264
  if (prompt?.[0].role === 'system') {
@@ -166,18 +289,10 @@ export class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
166
289
  this.streamUsage !== false &&
167
290
  options.streamUsage !== false
168
291
  ) {
169
- const genAIUsageMetadata = response.usageMetadata as
170
- | GeminiApiUsageMetadata
171
- | undefined;
172
-
173
- const output_tokens =
174
- (genAIUsageMetadata?.candidatesTokenCount ?? 0) +
175
- (genAIUsageMetadata?.thoughtsTokenCount ?? 0);
176
- lastUsageMetadata = {
177
- input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,
178
- output_tokens,
179
- total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,
180
- };
292
+ lastUsageMetadata = this._convertToUsageMetadata(
293
+ response.usageMetadata as GeminiApiUsageMetadata | undefined,
294
+ this.model
295
+ );
181
296
  }
182
297
 
183
298
  const chunk = convertResponseContentToChatGenerationChunk(response, {