@librechat/agents 1.8.8 → 1.8.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/dist/cjs/common/enum.cjs +1 -0
  2. package/dist/cjs/common/enum.cjs.map +1 -1
  3. package/dist/cjs/events.cjs +8 -1
  4. package/dist/cjs/events.cjs.map +1 -1
  5. package/dist/cjs/llm/anthropic/llm.cjs +117 -0
  6. package/dist/cjs/llm/anthropic/llm.cjs.map +1 -0
  7. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +277 -0
  8. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -0
  9. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +135 -0
  10. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -0
  11. package/dist/cjs/llm/providers.cjs +5 -4
  12. package/dist/cjs/llm/providers.cjs.map +1 -1
  13. package/dist/cjs/llm/text.cjs +58 -0
  14. package/dist/cjs/llm/text.cjs.map +1 -0
  15. package/dist/cjs/main.cjs +3 -0
  16. package/dist/cjs/main.cjs.map +1 -1
  17. package/dist/cjs/messages.cjs +4 -4
  18. package/dist/cjs/messages.cjs.map +1 -1
  19. package/dist/cjs/stream.cjs +63 -48
  20. package/dist/cjs/stream.cjs.map +1 -1
  21. package/dist/cjs/tools/ToolNode.cjs +20 -5
  22. package/dist/cjs/tools/ToolNode.cjs.map +1 -1
  23. package/dist/cjs/utils/misc.cjs +49 -0
  24. package/dist/cjs/utils/misc.cjs.map +1 -0
  25. package/dist/esm/common/enum.mjs +1 -0
  26. package/dist/esm/common/enum.mjs.map +1 -1
  27. package/dist/esm/events.mjs +8 -1
  28. package/dist/esm/events.mjs.map +1 -1
  29. package/dist/esm/llm/anthropic/llm.mjs +115 -0
  30. package/dist/esm/llm/anthropic/llm.mjs.map +1 -0
  31. package/dist/esm/llm/anthropic/utils/message_inputs.mjs +274 -0
  32. package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -0
  33. package/dist/esm/llm/anthropic/utils/message_outputs.mjs +133 -0
  34. package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -0
  35. package/dist/esm/llm/providers.mjs +5 -4
  36. package/dist/esm/llm/providers.mjs.map +1 -1
  37. package/dist/esm/llm/text.mjs +56 -0
  38. package/dist/esm/llm/text.mjs.map +1 -0
  39. package/dist/esm/main.mjs +2 -1
  40. package/dist/esm/main.mjs.map +1 -1
  41. package/dist/esm/messages.mjs +4 -4
  42. package/dist/esm/messages.mjs.map +1 -1
  43. package/dist/esm/stream.mjs +63 -49
  44. package/dist/esm/stream.mjs.map +1 -1
  45. package/dist/esm/tools/ToolNode.mjs +22 -7
  46. package/dist/esm/tools/ToolNode.mjs.map +1 -1
  47. package/dist/esm/utils/misc.mjs +47 -0
  48. package/dist/esm/utils/misc.mjs.map +1 -0
  49. package/dist/types/common/enum.d.ts +2 -1
  50. package/dist/types/llm/anthropic/types.d.ts +4 -0
  51. package/dist/types/llm/anthropic/utils/message_inputs.d.ts +1 -1
  52. package/dist/types/llm/text.d.ts +6 -6
  53. package/dist/types/stream.d.ts +2 -0
  54. package/dist/types/types/llm.d.ts +6 -1
  55. package/dist/types/utils/index.d.ts +1 -0
  56. package/dist/types/utils/misc.d.ts +6 -0
  57. package/package.json +7 -6
  58. package/src/common/enum.ts +1 -0
  59. package/src/events.ts +9 -1
  60. package/src/llm/anthropic/llm.ts +1 -1
  61. package/src/llm/anthropic/types.ts +7 -1
  62. package/src/llm/anthropic/utils/message_inputs.ts +86 -8
  63. package/src/llm/providers.ts +6 -4
  64. package/src/llm/text.ts +30 -45
  65. package/src/messages.ts +4 -4
  66. package/src/scripts/args.ts +1 -1
  67. package/src/scripts/code_exec.ts +4 -0
  68. package/src/scripts/simple.ts +4 -0
  69. package/src/stream.ts +68 -50
  70. package/src/tools/ToolNode.ts +25 -9
  71. package/src/types/llm.ts +6 -1
  72. package/src/utils/index.ts +1 -0
  73. package/src/utils/llmConfig.ts +6 -0
  74. package/src/utils/misc.ts +45 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@librechat/agents",
3
- "version": "1.8.8",
3
+ "version": "1.8.9",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
@@ -42,10 +42,10 @@
42
42
  "start:cli": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/cli.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
43
43
  "content": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/content.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
44
44
  "stream": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/stream.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
45
- "code_exec": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
45
+ "code_exec": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec.ts --provider 'google' --name 'Jo' --location 'New York, NY'",
46
46
  "image": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/image.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
47
47
  "code_exec_simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_simple.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
48
- "simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
48
+ "simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
49
49
  "memory": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/memory.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
50
50
  "tool-test": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/tools.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
51
51
  "abort": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/abort.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
@@ -73,9 +73,10 @@
73
73
  "@langchain/anthropic": "^0.3.11",
74
74
  "@langchain/aws": "^0.1.2",
75
75
  "@langchain/community": "^0.3.14",
76
- "@langchain/core": "^0.3.18",
77
- "@langchain/google-vertexai": "^0.1.2",
78
- "@langchain/langgraph": "^0.2.19",
76
+ "@langchain/core": "^0.3.26",
77
+ "@langchain/google-genai": "^0.1.6",
78
+ "@langchain/google-vertexai": "^0.1.5",
79
+ "@langchain/langgraph": "^0.2.34",
79
80
  "@langchain/mistralai": "^0.0.26",
80
81
  "@langchain/ollama": "^0.1.1",
81
82
  "@langchain/openai": "^0.3.14",
@@ -74,6 +74,7 @@ export enum Providers {
74
74
  ANTHROPIC = 'anthropic',
75
75
  MISTRALAI = 'mistralai',
76
76
  OLLAMA = 'ollama',
77
+ GOOGLE = 'google',
77
78
  }
78
79
 
79
80
  export enum GraphNodeKeys {
package/src/events.ts CHANGED
@@ -2,6 +2,8 @@
2
2
  // src/events.ts
3
3
  import type { Graph } from '@/graphs';
4
4
  import type * as t from '@/types';
5
+ import { handleToolCalls } from '@/stream';
6
+ import { Providers } from '@/common';
5
7
 
6
8
  export class HandlerRegistry {
7
9
  private handlers: Map<string, t.EventHandler> = new Map();
@@ -28,6 +30,12 @@ export class ModelEndHandler implements t.EventHandler {
28
30
  console.dir({
29
31
  usage,
30
32
  }, { depth: null });
33
+
34
+ if (metadata.provider !== Providers.GOOGLE) {
35
+ return;
36
+ }
37
+
38
+ handleToolCalls(data?.output?.tool_calls, metadata, graph);
31
39
  }
32
40
  }
33
41
 
@@ -58,7 +66,7 @@ export class TestLLMStreamHandler implements t.EventHandler {
58
66
  handle(event: string, data: t.StreamEventData | undefined): void {
59
67
  const chunk = data?.chunk;
60
68
  const isMessageChunk = !!(chunk && 'message' in chunk);
61
- const msg = isMessageChunk && chunk.message;
69
+ const msg = isMessageChunk ? chunk.message : undefined;
62
70
  if (msg && msg.tool_call_chunks && msg.tool_call_chunks.length > 0) {
63
71
  console.log(msg.tool_call_chunks);
64
72
  } else if (msg && msg.content) {
@@ -84,7 +84,7 @@ export class CustomAnthropic extends ChatAnthropicMessages {
84
84
  );
85
85
 
86
86
  for await (const data of stream) {
87
- if (options.signal?.aborted) {
87
+ if (options.signal?.aborted === true) {
88
88
  stream.controller.abort();
89
89
  throw new Error('AbortError: User aborted the request.');
90
90
  }
@@ -10,6 +10,7 @@ export type AnthropicToolResponse = {
10
10
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
11
11
  input: Record<string, any>;
12
12
  };
13
+
13
14
  export type AnthropicMessageParam = Anthropic.MessageParam;
14
15
  export type AnthropicMessageResponse =
15
16
  | Anthropic.ContentBlock
@@ -29,4 +30,9 @@ export type AnthropicToolChoice =
29
30
  | 'auto'
30
31
  | 'none'
31
32
  | string;
32
- export type ChatAnthropicToolType = AnthropicTool | BindToolsInput;
33
+ export type ChatAnthropicToolType = AnthropicTool | BindToolsInput;
34
+ export type AnthropicTextBlockParam = Anthropic.Messages.TextBlockParam;
35
+ export type AnthropicImageBlockParam = Anthropic.Messages.ImageBlockParam;
36
+ export type AnthropicToolUseBlockParam = Anthropic.Messages.ToolUseBlockParam;
37
+ export type AnthropicToolResultBlockParam =
38
+ Anthropic.Messages.ToolResultBlockParam;
@@ -1,19 +1,25 @@
1
+ /* eslint-disable no-console */
1
2
  /**
2
3
  * This util file contains functions for converting LangChain messages to Anthropic messages.
3
4
  */
4
5
  import {
5
- BaseMessage,
6
- SystemMessage,
7
- HumanMessage,
8
6
  AIMessage,
7
+ BaseMessage,
9
8
  ToolMessage,
10
- MessageContent,
11
9
  isAIMessage,
10
+ HumanMessage,
11
+ SystemMessage,
12
+ MessageContent,
12
13
  } from '@langchain/core/messages';
13
14
  import { ToolCall } from '@langchain/core/messages/tool';
14
15
  import type {
15
- AnthropicMessageCreateParams,
16
16
  AnthropicToolResponse,
17
+ AnthropicMessageParam,
18
+ AnthropicTextBlockParam,
19
+ AnthropicImageBlockParam,
20
+ AnthropicToolUseBlockParam,
21
+ AnthropicMessageCreateParams,
22
+ AnthropicToolResultBlockParam,
17
23
  } from '@/llm/anthropic/types';
18
24
 
19
25
  function _formatImage(imageUrl: string): { type: string; media_type: string; data: string } {
@@ -130,7 +136,7 @@ function _formatContent(content: MessageContent): string | Record<string, any>[]
130
136
  ...(cacheControl ? { cache_control: cacheControl } : {}),
131
137
  };
132
138
  } else if (
133
- textTypes.find((t) => t === contentPart.type) &&
139
+ textTypes.find((t) => t === contentPart.type) != null &&
134
140
  'text' in contentPart
135
141
  ) {
136
142
  // Assuming contentPart is of type MessageContentText here
@@ -139,7 +145,7 @@ function _formatContent(content: MessageContent): string | Record<string, any>[]
139
145
  text: contentPart.text,
140
146
  ...(cacheControl ? { cache_control: cacheControl } : {}),
141
147
  };
142
- } else if (toolTypes.find((t) => t === contentPart.type)) {
148
+ } else if (toolTypes.find((t) => t === contentPart.type) != null) {
143
149
  const contentPartCopy = { ...contentPart };
144
150
  if ('index' in contentPartCopy) {
145
151
  // Anthropic does not support passing the index field here, so we remove it.
@@ -252,7 +258,79 @@ export function _convertMessagesToAnthropicPayload(
252
258
  }
253
259
  });
254
260
  return {
255
- messages: formattedMessages,
261
+ messages: mergeMessages(formattedMessages as AnthropicMessageCreateParams['messages']),
256
262
  system,
257
263
  } as AnthropicMessageCreateParams;
264
+ }
265
+
266
+ function mergeMessages(messages?: AnthropicMessageCreateParams['messages']): AnthropicMessageParam[] {
267
+ if (!messages || messages.length <= 1) {
268
+ return messages ?? [];
269
+ }
270
+
271
+ const result: AnthropicMessageCreateParams['messages'] = [];
272
+ let currentMessage = messages[0];
273
+
274
+ const normalizeContent = (
275
+ content:
276
+ | string
277
+ | Array<
278
+ | AnthropicTextBlockParam
279
+ | AnthropicImageBlockParam
280
+ | AnthropicToolUseBlockParam
281
+ | AnthropicToolResultBlockParam
282
+ >
283
+ ): Array<
284
+ | AnthropicTextBlockParam
285
+ | AnthropicImageBlockParam
286
+ | AnthropicToolUseBlockParam
287
+ | AnthropicToolResultBlockParam
288
+ > => {
289
+ if (typeof content === 'string') {
290
+ return [
291
+ {
292
+ type: 'text',
293
+ text: content,
294
+ },
295
+ ];
296
+ }
297
+ return content;
298
+ };
299
+
300
+ const isToolResultMessage = (msg: (typeof messages)[0]): boolean => {
301
+ if (msg.role !== 'user') return false;
302
+
303
+ if (typeof msg.content === 'string') {
304
+ return false;
305
+ }
306
+
307
+ return (
308
+ Array.isArray(msg.content) &&
309
+ msg.content.every((item) => item.type === 'tool_result')
310
+ );
311
+ };
312
+
313
+ for (let i = 1; i < messages.length; i += 1) {
314
+ const nextMessage = messages[i];
315
+
316
+ if (
317
+ isToolResultMessage(currentMessage) &&
318
+ isToolResultMessage(nextMessage)
319
+ ) {
320
+ // Merge the messages by combining their content arrays
321
+ currentMessage = {
322
+ ...currentMessage,
323
+ content: [
324
+ ...normalizeContent(currentMessage.content),
325
+ ...normalizeContent(nextMessage.content),
326
+ ],
327
+ };
328
+ } else {
329
+ result.push(currentMessage);
330
+ currentMessage = nextMessage;
331
+ }
332
+ }
333
+
334
+ result.push(currentMessage);
335
+ return result;
258
336
  }
@@ -2,13 +2,14 @@
2
2
  import { ChatOpenAI } from '@langchain/openai';
3
3
  import { ChatOllama } from '@langchain/ollama';
4
4
  import { ChatBedrockConverse } from '@langchain/aws';
5
- import { ChatAnthropic } from '@langchain/anthropic';
5
+ // import { ChatAnthropic } from '@langchain/anthropic';
6
6
  import { ChatMistralAI } from '@langchain/mistralai';
7
7
  import { ChatVertexAI } from '@langchain/google-vertexai';
8
+ import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
8
9
  import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
9
10
  import type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';
10
11
  import { Providers } from '@/common';
11
- // import { CustomAnthropic } from '@/llm/anthropic/llm';
12
+ import { CustomAnthropic } from '@/llm/anthropic/llm';
12
13
 
13
14
  export const llmProviders: Partial<ChatModelConstructorMap> = {
14
15
  [Providers.OPENAI]: ChatOpenAI,
@@ -17,8 +18,9 @@ export const llmProviders: Partial<ChatModelConstructorMap> = {
17
18
  [Providers.BEDROCK_LEGACY]: BedrockChat,
18
19
  [Providers.MISTRALAI]: ChatMistralAI,
19
20
  [Providers.BEDROCK]: ChatBedrockConverse,
20
- // [Providers.ANTHROPIC]: CustomAnthropic,
21
- [Providers.ANTHROPIC]: ChatAnthropic,
21
+ [Providers.ANTHROPIC]: CustomAnthropic,
22
+ // [Providers.ANTHROPIC]: ChatAnthropic,
23
+ [Providers.GOOGLE]: ChatGoogleGenerativeAI,
22
24
  };
23
25
 
24
26
  export const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);
package/src/llm/text.ts CHANGED
@@ -1,73 +1,52 @@
1
1
  /* eslint-disable no-console */
2
- import { Readable } from 'stream';
3
- import type { ReadableOptions } from 'stream';
4
- export interface TextStreamOptions extends ReadableOptions {
2
+ export interface TextStreamOptions {
5
3
  minChunkSize?: number;
6
4
  maxChunkSize?: number;
7
5
  delay?: number;
6
+ firstWordChunk?: boolean;
8
7
  }
9
8
 
10
9
  export type ProgressCallback = (chunk: string) => void;
11
10
  export type PostChunkCallback = (chunk: string) => void;
12
11
 
13
- export class TextStream extends Readable {
12
+ export class TextStream {
14
13
  private text: string;
15
14
  private currentIndex: number;
16
15
  private minChunkSize: number;
17
16
  private maxChunkSize: number;
18
17
  private delay: number;
18
+ private firstWordChunk: boolean;
19
19
 
20
20
  constructor(text: string, options: TextStreamOptions = {}) {
21
- super(options);
22
21
  this.text = text;
23
22
  this.currentIndex = 0;
24
- this.minChunkSize = options.minChunkSize ?? 2;
25
- this.maxChunkSize = options.maxChunkSize ?? 4;
26
- this.delay = options.delay ?? 20; // Time in milliseconds
27
- }
28
-
29
- _read(): void {
30
- const { delay, minChunkSize, maxChunkSize } = this;
31
-
32
- if (this.currentIndex < this.text.length) {
33
- setTimeout(() => {
34
- const remainingChars = this.text.length - this.currentIndex;
35
- const chunkSize = Math.min(this.randomInt(minChunkSize, maxChunkSize + 1), remainingChars);
36
-
37
- const chunk = this.text.slice(this.currentIndex, this.currentIndex + chunkSize);
38
- this.push(chunk);
39
- this.currentIndex += chunkSize;
40
- }, delay);
41
- } else {
42
- this.push(null); // signal end of data
43
- }
23
+ this.minChunkSize = options.minChunkSize ?? 4;
24
+ this.maxChunkSize = options.maxChunkSize ?? 8;
25
+ this.delay = options.delay ?? 20;
26
+ this.firstWordChunk = options.firstWordChunk ?? true;
44
27
  }
45
28
 
46
29
  private randomInt(min: number, max: number): number {
47
30
  return Math.floor(Math.random() * (max - min)) + min;
48
31
  }
49
32
 
50
- async processTextStream(progressCallback: ProgressCallback): Promise<void> {
51
- const streamPromise = new Promise<void>((resolve, reject) => {
52
- this.on('data', (chunk) => {
53
- progressCallback(chunk.toString());
54
- });
33
+ private static readonly BOUNDARIES = new Set([' ', '.', ',', '!', '?', ';', ':']);
55
34
 
56
- this.on('end', () => {
57
- resolve();
58
- });
35
+ private findFirstWordBoundary(text: string, minSize: number): number {
36
+ if (minSize >= text.length) return text.length;
59
37
 
60
- this.on('error', (err) => {
61
- reject(err);
62
- });
63
- });
38
+ // Ensure we meet the minimum size first
39
+ let pos = minSize;
64
40
 
65
- try {
66
- await streamPromise;
67
- } catch (err) {
68
- console.error('[processTextStream] Error in text stream:', err);
69
- // Handle the error appropriately, e.g., return an error message or throw an error
41
+ // Look forward until we find a boundary
42
+ while (pos < text.length) {
43
+ if (TextStream.BOUNDARIES.has(text[pos])) {
44
+ return pos + 1; // Include the boundary character
45
+ }
46
+ pos++;
70
47
  }
48
+
49
+ return text.length; // If no boundary found, return entire remaining text
71
50
  }
72
51
 
73
52
  async *generateText(progressCallback?: ProgressCallback): AsyncGenerator<string, void, unknown> {
@@ -76,11 +55,17 @@ export class TextStream extends Readable {
76
55
  while (this.currentIndex < this.text.length) {
77
56
  await new Promise(resolve => setTimeout(resolve, delay));
78
57
 
79
- const remainingChars = this.text.length - this.currentIndex;
80
- const chunkSize = Math.min(this.randomInt(minChunkSize, maxChunkSize + 1), remainingChars);
58
+ const remainingText = this.text.slice(this.currentIndex);
59
+ let chunkSize: number;
81
60
 
82
- const chunk = this.text.slice(this.currentIndex, this.currentIndex + chunkSize);
61
+ if (this.firstWordChunk) {
62
+ chunkSize = this.findFirstWordBoundary(remainingText, minChunkSize);
63
+ } else {
64
+ const remainingChars = remainingText.length;
65
+ chunkSize = Math.min(this.randomInt(minChunkSize, maxChunkSize + 1), remainingChars);
66
+ }
83
67
 
68
+ const chunk = this.text.slice(this.currentIndex, this.currentIndex + chunkSize);
84
69
  progressCallback?.(chunk);
85
70
 
86
71
  yield chunk;
package/src/messages.ts CHANGED
@@ -33,7 +33,7 @@ User: ${userMessage[1]}
33
33
 
34
34
  const modifyContent = (messageType: string, content: t.ExtendedMessageContent[]): t.ExtendedMessageContent[] => {
35
35
  return content.map(item => {
36
- if (item && typeof item === 'object' && 'type' in item && item.type) {
36
+ if (item && typeof item === 'object' && 'type' in item && item.type != null && item.type) {
37
37
  let newType = item.type;
38
38
  if (newType.endsWith('_delta')) {
39
39
  newType = newType.replace('_delta', '');
@@ -80,9 +80,9 @@ export function formatAnthropicMessage(message: AIMessageChunk): AIMessage {
80
80
  formattedContent = message.content.reduce<t.ExtendedMessageContent[]>((acc, item) => {
81
81
  if (typeof item === 'object' && item !== null) {
82
82
  const extendedItem = item as t.ExtendedMessageContent;
83
- if (extendedItem.type === 'text' && extendedItem.text) {
83
+ if (extendedItem.type === 'text' && extendedItem.text != null && extendedItem.text) {
84
84
  acc.push({ type: 'text', text: extendedItem.text });
85
- } else if (extendedItem.type === 'tool_use' && extendedItem.id) {
85
+ } else if (extendedItem.type === 'tool_use' && extendedItem.id != null && extendedItem.id) {
86
86
  const toolCall = toolCallMap.get(extendedItem.id);
87
87
  if (toolCall) {
88
88
  acc.push({
@@ -92,7 +92,7 @@ export function formatAnthropicMessage(message: AIMessageChunk): AIMessage {
92
92
  input: toolCall.args as unknown as string
93
93
  });
94
94
  }
95
- } else if ('input' in extendedItem && extendedItem.input) {
95
+ } else if ('input' in extendedItem && extendedItem.input != null && extendedItem.input) {
96
96
  try {
97
97
  const parsedInput = JSON.parse(extendedItem.input);
98
98
  const toolCall = message.tool_calls?.find(tc => tc.args.input === parsedInput.input);
@@ -20,7 +20,7 @@ export async function getArgs(): Promise<{ userName: string; location: string; p
20
20
  alias: 'p',
21
21
  type: 'string',
22
22
  description: 'LLM provider',
23
- choices: ['openAI', 'anthropic', 'mistralai', 'vertexai', 'bedrock', 'ollama'],
23
+ choices: ['openAI', 'anthropic', 'mistralai', 'vertexai', 'bedrock', 'ollama', 'google'],
24
24
  default: 'openAI'
25
25
  })
26
26
  .help()
@@ -171,6 +171,10 @@ process.on('unhandledRejection', (reason, promise) => {
171
171
  process.exit(1);
172
172
  });
173
173
 
174
+ process.on('uncaughtException', (err) => {
175
+ console.error('Uncaught Exception:', err);
176
+ });
177
+
174
178
  testCodeExecution().catch((err) => {
175
179
  console.error(err);
176
180
  console.log('Conversation history:');
@@ -117,6 +117,10 @@ process.on('unhandledRejection', (reason, promise) => {
117
117
  process.exit(1);
118
118
  });
119
119
 
120
+ process.on('uncaughtException', (err) => {
121
+ console.error('Uncaught Exception:', err);
122
+ });
123
+
120
124
  testStandardStreaming().catch((err) => {
121
125
  console.error(err);
122
126
  console.log('Conversation history:');
package/src/stream.ts CHANGED
@@ -33,6 +33,73 @@ const getMessageId = (stepKey: string, graph: Graph<t.BaseGraphState>, returnExi
33
33
  return message_id;
34
34
  };
35
35
 
36
+ export const handleToolCalls = (toolCalls?: ToolCall[], metadata?: Record<string, unknown>, graph?: Graph): void => {
37
+ if (!graph || !metadata) {
38
+ console.warn(`Graph or metadata not found in ${event} event`);
39
+ return;
40
+ }
41
+
42
+ if (!toolCalls) {
43
+ return;
44
+ }
45
+
46
+ if (toolCalls.length === 0) {
47
+ return;
48
+ }
49
+
50
+ const tool_calls: ToolCall[] = [];
51
+ const tool_call_ids: string[] = [];
52
+ for (const tool_call of toolCalls) {
53
+ const toolCallId = tool_call.id ?? `toolu_${nanoid()}`;
54
+ tool_call.id = toolCallId;
55
+ if (!toolCallId || graph.toolCallStepIds.has(toolCallId)) {
56
+ continue;
57
+ }
58
+
59
+ tool_calls.push(tool_call);
60
+ tool_call_ids.push(toolCallId);
61
+ }
62
+
63
+ const stepKey = graph.getStepKey(metadata);
64
+
65
+ let prevStepId = '';
66
+ let prevRunStep: t.RunStep | undefined;
67
+ try {
68
+ prevStepId = graph.getStepIdByKey(stepKey, graph.contentData.length - 1);
69
+ prevRunStep = graph.getRunStep(prevStepId);
70
+ } catch (e) {
71
+ // no previous step
72
+ }
73
+
74
+ const dispatchToolCallIds = (lastMessageStepId: string): void => {
75
+ graph.dispatchMessageDelta(lastMessageStepId, {
76
+ content: [{
77
+ type: 'text',
78
+ text: '',
79
+ tool_call_ids,
80
+ }],
81
+ });
82
+ };
83
+ /* If the previous step exists and is a message creation */
84
+ if (prevStepId && prevRunStep && prevRunStep.type === StepTypes.MESSAGE_CREATION) {
85
+ dispatchToolCallIds(prevStepId);
86
+ /* If the previous step doesn't exist or is not a message creation */
87
+ } else if (!prevRunStep || prevRunStep.type !== StepTypes.MESSAGE_CREATION) {
88
+ const messageId = getMessageId(stepKey, graph, true) ?? '';
89
+ const stepId = graph.dispatchRunStep(stepKey, {
90
+ type: StepTypes.MESSAGE_CREATION,
91
+ message_creation: {
92
+ message_id: messageId,
93
+ },
94
+ });
95
+ dispatchToolCallIds(stepId);
96
+ }
97
+ graph.dispatchRunStep(stepKey, {
98
+ type: StepTypes.TOOL_CALLS,
99
+ tool_calls,
100
+ });
101
+ };
102
+
36
103
  export class ChatModelStreamHandler implements t.EventHandler {
37
104
  handle(event: string, data: t.StreamEventData, metadata?: Record<string, unknown>, graph?: Graph): void {
38
105
  if (!graph) {
@@ -56,56 +123,7 @@ export class ChatModelStreamHandler implements t.EventHandler {
56
123
 
57
124
  if (chunk.tool_calls && chunk.tool_calls.length > 0 && chunk.tool_calls.every((tc) => tc.id)) {
58
125
  hasToolCalls = true;
59
- const tool_calls: ToolCall[] = [];
60
- const tool_call_ids: string[] = [];
61
- for (const tool_call of chunk.tool_calls) {
62
- const toolCallId = tool_call.id ?? '';
63
- if (!toolCallId || graph.toolCallStepIds.has(toolCallId)) {
64
- continue;
65
- }
66
-
67
- tool_calls.push(tool_call);
68
- tool_call_ids.push(toolCallId);
69
- }
70
-
71
- const stepKey = graph.getStepKey(metadata);
72
-
73
- let prevStepId = '';
74
- let prevRunStep: t.RunStep | undefined;
75
- try {
76
- prevStepId = graph.getStepIdByKey(stepKey, graph.contentData.length - 1);
77
- prevRunStep = graph.getRunStep(prevStepId);
78
- } catch (e) {
79
- // no previous step
80
- }
81
-
82
- const dispatchToolCallIds = (lastMessageStepId: string): void => {
83
- graph.dispatchMessageDelta(lastMessageStepId, {
84
- content: [{
85
- type: 'text',
86
- text: '',
87
- tool_call_ids,
88
- }],
89
- });
90
- };
91
- /* If the previous step exists and is a message creation */
92
- if (prevStepId && prevRunStep && prevRunStep.type === StepTypes.MESSAGE_CREATION) {
93
- dispatchToolCallIds(prevStepId);
94
- /* If the previous step doesn't exist or is not a message creation */
95
- } else if (!prevRunStep || prevRunStep.type !== StepTypes.MESSAGE_CREATION) {
96
- const messageId = getMessageId(stepKey, graph, true) ?? '';
97
- const stepId = graph.dispatchRunStep(stepKey, {
98
- type: StepTypes.MESSAGE_CREATION,
99
- message_creation: {
100
- message_id: messageId,
101
- },
102
- });
103
- dispatchToolCallIds(stepId);
104
- }
105
- graph.dispatchRunStep(stepKey, {
106
- type: StepTypes.TOOL_CALLS,
107
- tool_calls,
108
- });
126
+ handleToolCalls(chunk.tool_calls, metadata, graph);
109
127
  }
110
128
 
111
129
  const isEmptyContent = typeof content === 'undefined' || !content.length || typeof content === 'string' && !content;
@@ -1,11 +1,11 @@
1
- import { END, MessagesAnnotation } from '@langchain/langgraph';
1
+ import { END, MessagesAnnotation, isCommand, isGraphInterrupt } from '@langchain/langgraph';
2
2
  import { ToolMessage, isBaseMessage } from '@langchain/core/messages';
3
3
  import type { RunnableConfig, RunnableToolLike } from '@langchain/core/runnables';
4
4
  import type { BaseMessage, AIMessage } from '@langchain/core/messages';
5
5
  import type { StructuredToolInterface } from '@langchain/core/tools';
6
6
  import type * as t from '@/types';
7
- import{ RunnableCallable } from '@/utils';
8
- import { GraphNodeKeys } from '@/common';
7
+ import{ RunnableCallable, unescapeObject } from '@/utils';
8
+ import { GraphNodeKeys, Providers } from '@/common';
9
9
 
10
10
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
11
11
  export class ToolNode<T = any> extends RunnableCallable<T, T> {
@@ -46,7 +46,6 @@ export class ToolNode<T = any> extends RunnableCallable<T, T> {
46
46
  this.tools = tools;
47
47
  this.toolMap = toolMap ?? new Map(tools.map(tool => [tool.name, tool]));
48
48
  }
49
-
50
49
  const outputs = await Promise.all(
51
50
  (message as AIMessage).tool_calls?.map(async (call) => {
52
51
  const tool = this.toolMap.get(call.name);
@@ -54,11 +53,15 @@ export class ToolNode<T = any> extends RunnableCallable<T, T> {
54
53
  if (tool === undefined) {
55
54
  throw new Error(`Tool "${call.name}" not found.`);
56
55
  }
56
+ const args = config.metadata?.provider === Providers.GOOGLE ? unescapeObject(call.args) : call.args;
57
57
  const output = await tool.invoke(
58
- { ...call, type: 'tool_call' },
58
+ { ...call, args, type: 'tool_call' },
59
59
  config
60
60
  );
61
- if (isBaseMessage(output) && output._getType() === 'tool') {
61
+ if (
62
+ (isBaseMessage(output) && output._getType() === 'tool') ||
63
+ isCommand(output)
64
+ ) {
62
65
  return output;
63
66
  } else {
64
67
  return new ToolMessage({
@@ -68,11 +71,14 @@ export class ToolNode<T = any> extends RunnableCallable<T, T> {
68
71
  tool_call_id: call.id!,
69
72
  });
70
73
  }
71
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
72
- } catch (e: any) {
74
+ } catch (_e: unknown) {
75
+ const e = _e as Error;
73
76
  if (!this.handleToolErrors) {
74
77
  throw e;
75
78
  }
79
+ if (isGraphInterrupt(e)) {
80
+ throw e;
81
+ }
76
82
  return new ToolMessage({
77
83
  content: `Error: ${e.message}\n Please fix your mistakes.`,
78
84
  name: call.name,
@@ -82,7 +88,17 @@ export class ToolNode<T = any> extends RunnableCallable<T, T> {
82
88
  }) ?? []
83
89
  );
84
90
 
85
- return (Array.isArray(input) ? outputs : { messages: outputs }) as T;
91
+ if (!outputs.some(isCommand)) {
92
+ return (Array.isArray(input) ? outputs : { messages: outputs }) as T;
93
+ }
94
+
95
+ const combinedOutputs = outputs.map((output) => {
96
+ if (isCommand(output)) {
97
+ return output;
98
+ }
99
+ return Array.isArray(input) ? [output] : { messages: [output] };
100
+ });
101
+ return combinedOutputs as T;
86
102
  }
87
103
  }
88
104