@langchain/langgraph 0.2.20-rc.0 → 0.2.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -185,7 +185,7 @@ Is there anything else you'd like to know about the weather in New York or any o
185
185
  <summary>Initialize the model and tools.</summary>
186
186
 
187
187
  - We use `ChatAnthropic` as our LLM. **NOTE:** We need make sure the model knows that it has these tools available to call. We can do this by converting the LangChain tools into the format for Anthropic tool calling using the `.bindTools()` method.
188
- - We define the tools we want to use -- a weather tool in our case. See the documentation [here](https://js.langchain.com/docs/modules/agents/tools/dynamic) on how to create your own tools.
188
+ - We define the tools we want to use -- a weather tool in our case. See the documentation [here](https://js.langchain.com/docs/how_to/custom_tools/) on how to create your own tools.
189
189
  </details>
190
190
 
191
191
  2. <details>
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports._isSend = exports.Send = exports._isSendInterface = exports.CHECKPOINT_NAMESPACE_END = exports.CHECKPOINT_NAMESPACE_SEPARATOR = exports.RESERVED = exports.TASK_NAMESPACE = exports.PULL = exports.PUSH = exports.TASKS = exports.TAG_HIDDEN = exports.RECURSION_LIMIT_DEFAULT = exports.RUNTIME_PLACEHOLDER = exports.INTERRUPT = exports.CONFIG_KEY_CHECKPOINT_MAP = exports.CONFIG_KEY_STREAM = exports.CONFIG_KEY_TASK_ID = exports.CONFIG_KEY_RESUMING = exports.CONFIG_KEY_CHECKPOINTER = exports.CONFIG_KEY_READ = exports.CONFIG_KEY_SEND = exports.ERROR = exports.INPUT = void 0;
3
+ exports._isSend = exports.Send = exports._isSendInterface = exports.CHECKPOINT_NAMESPACE_END = exports.CHECKPOINT_NAMESPACE_SEPARATOR = exports.RESERVED = exports.TASK_NAMESPACE = exports.PULL = exports.PUSH = exports.TASKS = exports.TAG_NOSTREAM = exports.TAG_HIDDEN = exports.RECURSION_LIMIT_DEFAULT = exports.RUNTIME_PLACEHOLDER = exports.INTERRUPT = exports.CONFIG_KEY_CHECKPOINT_MAP = exports.CONFIG_KEY_STREAM = exports.CONFIG_KEY_TASK_ID = exports.CONFIG_KEY_RESUMING = exports.CONFIG_KEY_CHECKPOINTER = exports.CONFIG_KEY_READ = exports.CONFIG_KEY_SEND = exports.ERROR = exports.INPUT = void 0;
4
4
  exports.INPUT = "__input__";
5
5
  exports.ERROR = "__error__";
6
6
  exports.CONFIG_KEY_SEND = "__pregel_send";
@@ -15,6 +15,7 @@ exports.INTERRUPT = "__interrupt__";
15
15
  exports.RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
16
16
  exports.RECURSION_LIMIT_DEFAULT = 25;
17
17
  exports.TAG_HIDDEN = "langsmith:hidden";
18
+ exports.TAG_NOSTREAM = "langsmith:nostream";
18
19
  exports.TASKS = "__pregel_tasks";
19
20
  exports.PUSH = "__pregel_push";
20
21
  exports.PULL = "__pregel_pull";
@@ -11,6 +11,7 @@ export declare const INTERRUPT = "__interrupt__";
11
11
  export declare const RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
12
12
  export declare const RECURSION_LIMIT_DEFAULT = 25;
13
13
  export declare const TAG_HIDDEN = "langsmith:hidden";
14
+ export declare const TAG_NOSTREAM = "langsmith:nostream";
14
15
  export declare const TASKS = "__pregel_tasks";
15
16
  export declare const PUSH = "__pregel_push";
16
17
  export declare const PULL = "__pregel_pull";
package/dist/constants.js CHANGED
@@ -12,6 +12,7 @@ export const INTERRUPT = "__interrupt__";
12
12
  export const RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
13
13
  export const RECURSION_LIMIT_DEFAULT = 25;
14
14
  export const TAG_HIDDEN = "langsmith:hidden";
15
+ export const TAG_NOSTREAM = "langsmith:nostream";
15
16
  export const TASKS = "__pregel_tasks";
16
17
  export const PUSH = "__pregel_push";
17
18
  export const PULL = "__pregel_pull";
@@ -377,7 +377,7 @@ function _prepareSingleTask(taskPath, checkpoint, processes, channels, managed,
377
377
  config: (0, runnables_1.patchConfig)((0, runnables_1.mergeConfigs)(config, {
378
378
  metadata,
379
379
  tags: proc.tags,
380
- store: extra.store ?? config?.store,
380
+ store: extra.store ?? config.store,
381
381
  }), {
382
382
  runName: name,
383
383
  callbacks: manager?.getChild(`graph:step:${step}`),
@@ -368,7 +368,7 @@ export function _prepareSingleTask(taskPath, checkpoint, processes, channels, ma
368
368
  config: patchConfig(mergeConfigs(config, {
369
369
  metadata,
370
370
  tags: proc.tags,
371
- store: extra.store ?? config?.store,
371
+ store: extra.store ?? config.store,
372
372
  }), {
373
373
  runName: name,
374
374
  callbacks: manager?.getChild(`graph:step:${step}`),
@@ -20,6 +20,7 @@ const retry_js_1 = require("./retry.cjs");
20
20
  const base_js_2 = require("../managed/base.cjs");
21
21
  const utils_js_1 = require("../utils.cjs");
22
22
  const config_js_1 = require("./utils/config.cjs");
23
+ const messages_js_1 = require("./messages.cjs");
23
24
  function isString(value) {
24
25
  return typeof value === "string";
25
26
  }
@@ -575,6 +576,7 @@ class Pregel extends runnables_1.Runnable {
575
576
  }
576
577
  _defaults(config) {
577
578
  const { debug, streamMode, inputKeys, outputKeys, interruptAfter, interruptBefore, ...rest } = config;
579
+ let streamModeSingle = true;
578
580
  const defaultDebug = debug !== undefined ? debug : this.debug;
579
581
  let defaultOutputKeys = outputKeys;
580
582
  if (defaultOutputKeys === undefined) {
@@ -595,9 +597,11 @@ class Pregel extends runnables_1.Runnable {
595
597
  let defaultStreamMode;
596
598
  if (streamMode !== undefined) {
597
599
  defaultStreamMode = Array.isArray(streamMode) ? streamMode : [streamMode];
600
+ streamModeSingle = typeof streamMode === "string";
598
601
  }
599
602
  else {
600
603
  defaultStreamMode = this.streamMode;
604
+ streamModeSingle = true;
601
605
  }
602
606
  // if being called as a node in another graph, always use values mode
603
607
  if (config.configurable?.[constants_js_1.CONFIG_KEY_TASK_ID] !== undefined) {
@@ -625,6 +629,7 @@ class Pregel extends runnables_1.Runnable {
625
629
  defaultInterruptAfter,
626
630
  defaultCheckpointer,
627
631
  defaultStore,
632
+ streamModeSingle,
628
633
  ];
629
634
  }
630
635
  /**
@@ -643,7 +648,16 @@ class Pregel extends runnables_1.Runnable {
643
648
  * @param options.debug Whether to print debug information during execution.
644
649
  */
645
650
  async stream(input, options) {
646
- return super.stream(input, options);
651
+ // The ensureConfig method called internally defaults recursionLimit to 25 if not
652
+ // passed directly in `options`.
653
+ // There is currently no way in _streamIterator to determine whether this was
654
+ // set by by ensureConfig or manually by the user, so we specify the bound value here
655
+ // and override if it is passed as an explicit param in `options`.
656
+ const config = {
657
+ recursionLimit: this.config?.recursionLimit,
658
+ ...options,
659
+ };
660
+ return super.stream(input, config);
647
661
  }
648
662
  async prepareSpecs(config, options) {
649
663
  const configForManaged = {
@@ -701,15 +715,35 @@ class Pregel extends runnables_1.Runnable {
701
715
  inputConfig.configurable === undefined) {
702
716
  throw new Error(`Checkpointer requires one or more of the following "configurable" keys: "thread_id", "checkpoint_ns", "checkpoint_id"`);
703
717
  }
704
- const callbackManager = await (0, runnables_1.getCallbackManagerForConfig)(inputConfig);
705
- const runManager = await callbackManager?.handleChainStart(this.toJSON(), (0, index_js_1._coerceToDict)(input, "input"), inputConfig.runId, undefined, undefined, undefined, inputConfig?.runName ?? this.getName());
706
- delete inputConfig.runId;
718
+ const { runId, ...restConfig } = inputConfig;
707
719
  // assign defaults
708
- const [debug, streamMode, , outputKeys, config, interruptBefore, interruptAfter, checkpointer, store,] = this._defaults(inputConfig);
709
- const { channelSpecs, managed } = await this.prepareSpecs(config);
720
+ const [debug, streamMode, , outputKeys, config, interruptBefore, interruptAfter, checkpointer, store, streamModeSingle,] = this._defaults(restConfig);
710
721
  const stream = new loop_js_1.IterableReadableWritableStream({
711
722
  modes: new Set(streamMode),
712
723
  });
724
+ // set up messages stream mode
725
+ if (streamMode.includes("messages")) {
726
+ const messageStreamer = new messages_js_1.StreamMessagesHandler((chunk) => stream.push(chunk));
727
+ const { callbacks } = config;
728
+ if (callbacks === undefined) {
729
+ config.callbacks = [messageStreamer];
730
+ }
731
+ else if (Array.isArray(callbacks)) {
732
+ config.callbacks = callbacks.concat(messageStreamer);
733
+ }
734
+ else {
735
+ const copiedCallbacks = callbacks.copy();
736
+ copiedCallbacks.addHandler(messageStreamer, true);
737
+ config.callbacks = copiedCallbacks;
738
+ }
739
+ }
740
+ // setup custom stream mode
741
+ if (streamMode.includes("custom")) {
742
+ config.writer = (chunk) => stream.push([[], "custom", chunk]);
743
+ }
744
+ const callbackManager = await (0, runnables_1.getCallbackManagerForConfig)(config);
745
+ const runManager = await callbackManager?.handleChainStart(this.toJSON(), (0, index_js_1._coerceToDict)(input, "input"), runId, undefined, undefined, undefined, config?.runName ?? this.getName());
746
+ const { channelSpecs, managed } = await this.prepareSpecs(config);
713
747
  let loop;
714
748
  let loopError;
715
749
  const runLoop = async () => {
@@ -810,11 +844,16 @@ class Pregel extends runnables_1.Runnable {
810
844
  loopError = loopError ?? e;
811
845
  }
812
846
  if (loopError) {
813
- // Will throw an error outside of this method
847
+ // "Causes any future interactions with the associated stream to error".
848
+ // Wraps ReadableStreamDefaultController#error:
849
+ // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultController/error
814
850
  stream.error(loopError);
815
851
  }
816
852
  else {
817
- // Will end the iterator outside of this method
853
+ // Will end the iterator outside of this method,
854
+ // keeping previously enqueued chunks.
855
+ // Wraps ReadableStreamDefaultController#close:
856
+ // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultController/close
818
857
  stream.close();
819
858
  }
820
859
  }
@@ -827,10 +866,10 @@ class Pregel extends runnables_1.Runnable {
827
866
  }
828
867
  const [namespace, mode, payload] = chunk;
829
868
  if (streamMode.includes(mode)) {
830
- if (streamSubgraphs && streamMode.length > 1) {
869
+ if (streamSubgraphs && !streamModeSingle) {
831
870
  yield [namespace, mode, payload];
832
871
  }
833
- else if (streamMode.length > 1) {
872
+ else if (!streamModeSingle) {
834
873
  yield [mode, payload];
835
874
  }
836
875
  else if (streamSubgraphs) {
@@ -77,14 +77,15 @@ export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends
77
77
  string | string[],
78
78
  // input keys
79
79
  string | string[],
80
- RunnableConfig,
80
+ LangGraphRunnableConfig,
81
81
  // config without pregel keys
82
82
  All | string[],
83
83
  // interrupt before
84
84
  All | string[],
85
85
  // interrupt after
86
86
  BaseCheckpointSaver | undefined,
87
- BaseStore | undefined
87
+ BaseStore | undefined,
88
+ boolean
88
89
  ];
89
90
  /**
90
91
  * Stream graph steps for a single input.
@@ -17,6 +17,7 @@ import { executeTasksWithRetry } from "./retry.js";
17
17
  import { ChannelKeyPlaceholder, isConfiguredManagedValue, ManagedValueMapping, NoopManagedValue, } from "../managed/base.js";
18
18
  import { gatherIterator, patchConfigurable } from "../utils.js";
19
19
  import { ensureLangGraphConfig } from "./utils/config.js";
20
+ import { StreamMessagesHandler } from "./messages.js";
20
21
  function isString(value) {
21
22
  return typeof value === "string";
22
23
  }
@@ -571,6 +572,7 @@ export class Pregel extends Runnable {
571
572
  }
572
573
  _defaults(config) {
573
574
  const { debug, streamMode, inputKeys, outputKeys, interruptAfter, interruptBefore, ...rest } = config;
575
+ let streamModeSingle = true;
574
576
  const defaultDebug = debug !== undefined ? debug : this.debug;
575
577
  let defaultOutputKeys = outputKeys;
576
578
  if (defaultOutputKeys === undefined) {
@@ -591,9 +593,11 @@ export class Pregel extends Runnable {
591
593
  let defaultStreamMode;
592
594
  if (streamMode !== undefined) {
593
595
  defaultStreamMode = Array.isArray(streamMode) ? streamMode : [streamMode];
596
+ streamModeSingle = typeof streamMode === "string";
594
597
  }
595
598
  else {
596
599
  defaultStreamMode = this.streamMode;
600
+ streamModeSingle = true;
597
601
  }
598
602
  // if being called as a node in another graph, always use values mode
599
603
  if (config.configurable?.[CONFIG_KEY_TASK_ID] !== undefined) {
@@ -621,6 +625,7 @@ export class Pregel extends Runnable {
621
625
  defaultInterruptAfter,
622
626
  defaultCheckpointer,
623
627
  defaultStore,
628
+ streamModeSingle,
624
629
  ];
625
630
  }
626
631
  /**
@@ -639,7 +644,16 @@ export class Pregel extends Runnable {
639
644
  * @param options.debug Whether to print debug information during execution.
640
645
  */
641
646
  async stream(input, options) {
642
- return super.stream(input, options);
647
+ // The ensureConfig method called internally defaults recursionLimit to 25 if not
648
+ // passed directly in `options`.
649
+ // There is currently no way in _streamIterator to determine whether this was
650
+ // set by by ensureConfig or manually by the user, so we specify the bound value here
651
+ // and override if it is passed as an explicit param in `options`.
652
+ const config = {
653
+ recursionLimit: this.config?.recursionLimit,
654
+ ...options,
655
+ };
656
+ return super.stream(input, config);
643
657
  }
644
658
  async prepareSpecs(config, options) {
645
659
  const configForManaged = {
@@ -697,15 +711,35 @@ export class Pregel extends Runnable {
697
711
  inputConfig.configurable === undefined) {
698
712
  throw new Error(`Checkpointer requires one or more of the following "configurable" keys: "thread_id", "checkpoint_ns", "checkpoint_id"`);
699
713
  }
700
- const callbackManager = await getCallbackManagerForConfig(inputConfig);
701
- const runManager = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), inputConfig.runId, undefined, undefined, undefined, inputConfig?.runName ?? this.getName());
702
- delete inputConfig.runId;
714
+ const { runId, ...restConfig } = inputConfig;
703
715
  // assign defaults
704
- const [debug, streamMode, , outputKeys, config, interruptBefore, interruptAfter, checkpointer, store,] = this._defaults(inputConfig);
705
- const { channelSpecs, managed } = await this.prepareSpecs(config);
716
+ const [debug, streamMode, , outputKeys, config, interruptBefore, interruptAfter, checkpointer, store, streamModeSingle,] = this._defaults(restConfig);
706
717
  const stream = new IterableReadableWritableStream({
707
718
  modes: new Set(streamMode),
708
719
  });
720
+ // set up messages stream mode
721
+ if (streamMode.includes("messages")) {
722
+ const messageStreamer = new StreamMessagesHandler((chunk) => stream.push(chunk));
723
+ const { callbacks } = config;
724
+ if (callbacks === undefined) {
725
+ config.callbacks = [messageStreamer];
726
+ }
727
+ else if (Array.isArray(callbacks)) {
728
+ config.callbacks = callbacks.concat(messageStreamer);
729
+ }
730
+ else {
731
+ const copiedCallbacks = callbacks.copy();
732
+ copiedCallbacks.addHandler(messageStreamer, true);
733
+ config.callbacks = copiedCallbacks;
734
+ }
735
+ }
736
+ // setup custom stream mode
737
+ if (streamMode.includes("custom")) {
738
+ config.writer = (chunk) => stream.push([[], "custom", chunk]);
739
+ }
740
+ const callbackManager = await getCallbackManagerForConfig(config);
741
+ const runManager = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, config?.runName ?? this.getName());
742
+ const { channelSpecs, managed } = await this.prepareSpecs(config);
709
743
  let loop;
710
744
  let loopError;
711
745
  const runLoop = async () => {
@@ -806,11 +840,16 @@ export class Pregel extends Runnable {
806
840
  loopError = loopError ?? e;
807
841
  }
808
842
  if (loopError) {
809
- // Will throw an error outside of this method
843
+ // "Causes any future interactions with the associated stream to error".
844
+ // Wraps ReadableStreamDefaultController#error:
845
+ // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultController/error
810
846
  stream.error(loopError);
811
847
  }
812
848
  else {
813
- // Will end the iterator outside of this method
849
+ // Will end the iterator outside of this method,
850
+ // keeping previously enqueued chunks.
851
+ // Wraps ReadableStreamDefaultController#close:
852
+ // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultController/close
814
853
  stream.close();
815
854
  }
816
855
  }
@@ -823,10 +862,10 @@ export class Pregel extends Runnable {
823
862
  }
824
863
  const [namespace, mode, payload] = chunk;
825
864
  if (streamMode.includes(mode)) {
826
- if (streamSubgraphs && streamMode.length > 1) {
865
+ if (streamSubgraphs && !streamModeSingle) {
827
866
  yield [namespace, mode, payload];
828
867
  }
829
- else if (streamMode.length > 1) {
868
+ else if (!streamModeSingle) {
830
869
  yield [mode, payload];
831
870
  }
832
871
  else if (streamSubgraphs) {
@@ -18,7 +18,7 @@ const SPECIAL_CHANNELS = [constants_js_1.ERROR, constants_js_1.INTERRUPT];
18
18
  class IterableReadableWritableStream extends stream_1.IterableReadableStream {
19
19
  constructor(params) {
20
20
  let streamControllerPromiseResolver;
21
- let streamControllerPromise = new Promise((resolve) => {
21
+ const streamControllerPromise = new Promise((resolve) => {
22
22
  streamControllerPromiseResolver = resolve;
23
23
  });
24
24
  super({
@@ -46,7 +46,7 @@ class IterableReadableWritableStream extends stream_1.IterableReadableStream {
46
46
  });
47
47
  // .start() will always be called before the stream can be interacted
48
48
  // with anyway
49
- streamControllerPromise.then((controller) => {
49
+ void streamControllerPromise.then((controller) => {
50
50
  this.controller = controller;
51
51
  });
52
52
  this.passthroughFn = params.passthroughFn;
@@ -15,7 +15,7 @@ const SPECIAL_CHANNELS = [ERROR, INTERRUPT];
15
15
  export class IterableReadableWritableStream extends IterableReadableStream {
16
16
  constructor(params) {
17
17
  let streamControllerPromiseResolver;
18
- let streamControllerPromise = new Promise((resolve) => {
18
+ const streamControllerPromise = new Promise((resolve) => {
19
19
  streamControllerPromiseResolver = resolve;
20
20
  });
21
21
  super({
@@ -43,7 +43,7 @@ export class IterableReadableWritableStream extends IterableReadableStream {
43
43
  });
44
44
  // .start() will always be called before the stream can be interacted
45
45
  // with anyway
46
- streamControllerPromise.then((controller) => {
46
+ void streamControllerPromise.then((controller) => {
47
47
  this.controller = controller;
48
48
  });
49
49
  this.passthroughFn = params.passthroughFn;
@@ -0,0 +1,148 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.StreamMessagesHandler = void 0;
4
+ const uuid_1 = require("uuid");
5
+ const base_1 = require("@langchain/core/callbacks/base");
6
+ const messages_1 = require("@langchain/core/messages");
7
+ const constants_js_1 = require("../constants.cjs");
8
+ function isChatGenerationChunk(x) {
9
+ return (0, messages_1.isBaseMessage)(x?.message);
10
+ }
11
+ /**
12
+ * A callback handler that implements stream_mode=messages.
13
+ * Collects messages from (1) chat model stream events and (2) node outputs.
14
+ */
15
+ class StreamMessagesHandler extends base_1.BaseCallbackHandler {
16
+ constructor(streamFn) {
17
+ super();
18
+ Object.defineProperty(this, "name", {
19
+ enumerable: true,
20
+ configurable: true,
21
+ writable: true,
22
+ value: "StreamMessagesHandler"
23
+ });
24
+ Object.defineProperty(this, "streamFn", {
25
+ enumerable: true,
26
+ configurable: true,
27
+ writable: true,
28
+ value: void 0
29
+ });
30
+ Object.defineProperty(this, "metadatas", {
31
+ enumerable: true,
32
+ configurable: true,
33
+ writable: true,
34
+ value: {}
35
+ });
36
+ Object.defineProperty(this, "seen", {
37
+ enumerable: true,
38
+ configurable: true,
39
+ writable: true,
40
+ value: {}
41
+ });
42
+ Object.defineProperty(this, "emittedChatModelRunIds", {
43
+ enumerable: true,
44
+ configurable: true,
45
+ writable: true,
46
+ value: {}
47
+ });
48
+ this.streamFn = streamFn;
49
+ }
50
+ _emit(meta, message, dedupe = false) {
51
+ if (dedupe &&
52
+ message.id !== undefined &&
53
+ this.seen[message.id] !== undefined) {
54
+ return;
55
+ }
56
+ if (message.id === undefined) {
57
+ const id = (0, uuid_1.v4)();
58
+ // eslint-disable-next-line no-param-reassign
59
+ message.id = id;
60
+ // eslint-disable-next-line no-param-reassign
61
+ message.lc_kwargs.id = id;
62
+ }
63
+ this.seen[message.id] = message;
64
+ this.streamFn([meta[0], "messages", [message, meta[1]]]);
65
+ }
66
+ handleChatModelStart(_llm, _messages, runId, _parentRunId, _extraParams, tags, metadata, name) {
67
+ if (metadata &&
68
+ // Include legacy LangGraph SDK tag
69
+ (!tags || !(tags.includes(constants_js_1.TAG_NOSTREAM) && tags.includes("nostream")))) {
70
+ this.metadatas[runId] = [
71
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
72
+ { tags, name, ...metadata },
73
+ ];
74
+ }
75
+ }
76
+ handleLLMNewToken(token, _idx, runId, _parentRunId, _tags, fields) {
77
+ const chunk = fields?.chunk;
78
+ this.emittedChatModelRunIds[runId] = true;
79
+ if (isChatGenerationChunk(chunk) && this.metadatas[runId] !== undefined) {
80
+ this._emit(this.metadatas[runId], chunk.message);
81
+ }
82
+ else {
83
+ this._emit(this.metadatas[runId], new messages_1.AIMessageChunk({
84
+ content: token,
85
+ }));
86
+ }
87
+ }
88
+ handleLLMEnd(output, runId) {
89
+ // In JS, non-streaming runs do not call handleLLMNewToken at the model level
90
+ if (!this.emittedChatModelRunIds[runId]) {
91
+ const chatGeneration = output.generations?.[0]?.[0];
92
+ if ((0, messages_1.isBaseMessage)(chatGeneration?.message)) {
93
+ this._emit(this.metadatas[runId], chatGeneration?.message, true);
94
+ }
95
+ delete this.emittedChatModelRunIds[runId];
96
+ }
97
+ delete this.metadatas[runId];
98
+ }
99
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
100
+ handleLLMError(_err, runId) {
101
+ delete this.metadatas[runId];
102
+ }
103
+ handleChainStart(_chain, _inputs, runId, _parentRunId, tags, metadata, _runType, name) {
104
+ if (metadata !== undefined &&
105
+ name === metadata.langgraph_node &&
106
+ (tags === undefined || !tags.includes(constants_js_1.TAG_HIDDEN))) {
107
+ this.metadatas[runId] = [
108
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
109
+ { tags, name, ...metadata },
110
+ ];
111
+ }
112
+ }
113
+ handleChainEnd(outputs, runId) {
114
+ const metadata = this.metadatas[runId];
115
+ delete this.metadatas[runId];
116
+ if (metadata !== undefined) {
117
+ if ((0, messages_1.isBaseMessage)(outputs)) {
118
+ this._emit(metadata, outputs, true);
119
+ }
120
+ else if (Array.isArray(outputs)) {
121
+ for (const value of outputs) {
122
+ if ((0, messages_1.isBaseMessage)(value)) {
123
+ this._emit(metadata, value, true);
124
+ }
125
+ }
126
+ }
127
+ else if (outputs != null && typeof outputs === "object") {
128
+ for (const value of Object.values(outputs)) {
129
+ if ((0, messages_1.isBaseMessage)(value)) {
130
+ this._emit(metadata, value, true);
131
+ }
132
+ else if (Array.isArray(value)) {
133
+ for (const item of value) {
134
+ if ((0, messages_1.isBaseMessage)(item)) {
135
+ this._emit(metadata, item, true);
136
+ }
137
+ }
138
+ }
139
+ }
140
+ }
141
+ }
142
+ }
143
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
144
+ handleChainError(_err, runId) {
145
+ delete this.metadatas[runId];
146
+ }
147
+ }
148
+ exports.StreamMessagesHandler = StreamMessagesHandler;
@@ -0,0 +1,28 @@
1
+ import { BaseCallbackHandler, HandleLLMNewTokenCallbackFields, NewTokenIndices } from "@langchain/core/callbacks/base";
2
+ import { BaseMessage } from "@langchain/core/messages";
3
+ import { Serialized } from "@langchain/core/load/serializable";
4
+ import { LLMResult } from "@langchain/core/outputs";
5
+ import { ChainValues } from "@langchain/core/utils/types";
6
+ import { StreamChunk } from "./loop.js";
7
+ type Meta = [string[], Record<string, any>];
8
+ /**
9
+ * A callback handler that implements stream_mode=messages.
10
+ * Collects messages from (1) chat model stream events and (2) node outputs.
11
+ */
12
+ export declare class StreamMessagesHandler extends BaseCallbackHandler {
13
+ name: string;
14
+ streamFn: (streamChunk: StreamChunk) => void;
15
+ metadatas: Record<string, Meta>;
16
+ seen: Record<string, BaseMessage>;
17
+ emittedChatModelRunIds: Record<string, boolean>;
18
+ constructor(streamFn: (streamChunk: StreamChunk) => void);
19
+ _emit(meta: Meta, message: BaseMessage, dedupe?: boolean): void;
20
+ handleChatModelStart(_llm: Serialized, _messages: BaseMessage[][], runId: string, _parentRunId?: string, _extraParams?: Record<string, unknown>, tags?: string[], metadata?: Record<string, unknown>, name?: string): void;
21
+ handleLLMNewToken(token: string, _idx: NewTokenIndices, runId: string, _parentRunId?: string, _tags?: string[], fields?: HandleLLMNewTokenCallbackFields): void;
22
+ handleLLMEnd(output: LLMResult, runId: string): void;
23
+ handleLLMError(_err: any, runId: string): void;
24
+ handleChainStart(_chain: Serialized, _inputs: ChainValues, runId: string, _parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, _runType?: string, name?: string): void;
25
+ handleChainEnd(outputs: ChainValues, runId: string): void;
26
+ handleChainError(_err: any, runId: string): void;
27
+ }
28
+ export {};
@@ -0,0 +1,144 @@
1
+ import { v4 } from "uuid";
2
+ import { BaseCallbackHandler, } from "@langchain/core/callbacks/base";
3
+ import { AIMessageChunk, isBaseMessage, } from "@langchain/core/messages";
4
+ import { TAG_HIDDEN, TAG_NOSTREAM } from "../constants.js";
5
+ function isChatGenerationChunk(x) {
6
+ return isBaseMessage(x?.message);
7
+ }
8
+ /**
9
+ * A callback handler that implements stream_mode=messages.
10
+ * Collects messages from (1) chat model stream events and (2) node outputs.
11
+ */
12
+ export class StreamMessagesHandler extends BaseCallbackHandler {
13
+ constructor(streamFn) {
14
+ super();
15
+ Object.defineProperty(this, "name", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: "StreamMessagesHandler"
20
+ });
21
+ Object.defineProperty(this, "streamFn", {
22
+ enumerable: true,
23
+ configurable: true,
24
+ writable: true,
25
+ value: void 0
26
+ });
27
+ Object.defineProperty(this, "metadatas", {
28
+ enumerable: true,
29
+ configurable: true,
30
+ writable: true,
31
+ value: {}
32
+ });
33
+ Object.defineProperty(this, "seen", {
34
+ enumerable: true,
35
+ configurable: true,
36
+ writable: true,
37
+ value: {}
38
+ });
39
+ Object.defineProperty(this, "emittedChatModelRunIds", {
40
+ enumerable: true,
41
+ configurable: true,
42
+ writable: true,
43
+ value: {}
44
+ });
45
+ this.streamFn = streamFn;
46
+ }
47
+ _emit(meta, message, dedupe = false) {
48
+ if (dedupe &&
49
+ message.id !== undefined &&
50
+ this.seen[message.id] !== undefined) {
51
+ return;
52
+ }
53
+ if (message.id === undefined) {
54
+ const id = v4();
55
+ // eslint-disable-next-line no-param-reassign
56
+ message.id = id;
57
+ // eslint-disable-next-line no-param-reassign
58
+ message.lc_kwargs.id = id;
59
+ }
60
+ this.seen[message.id] = message;
61
+ this.streamFn([meta[0], "messages", [message, meta[1]]]);
62
+ }
63
+ handleChatModelStart(_llm, _messages, runId, _parentRunId, _extraParams, tags, metadata, name) {
64
+ if (metadata &&
65
+ // Include legacy LangGraph SDK tag
66
+ (!tags || !(tags.includes(TAG_NOSTREAM) && tags.includes("nostream")))) {
67
+ this.metadatas[runId] = [
68
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
69
+ { tags, name, ...metadata },
70
+ ];
71
+ }
72
+ }
73
+ handleLLMNewToken(token, _idx, runId, _parentRunId, _tags, fields) {
74
+ const chunk = fields?.chunk;
75
+ this.emittedChatModelRunIds[runId] = true;
76
+ if (isChatGenerationChunk(chunk) && this.metadatas[runId] !== undefined) {
77
+ this._emit(this.metadatas[runId], chunk.message);
78
+ }
79
+ else {
80
+ this._emit(this.metadatas[runId], new AIMessageChunk({
81
+ content: token,
82
+ }));
83
+ }
84
+ }
85
+ handleLLMEnd(output, runId) {
86
+ // In JS, non-streaming runs do not call handleLLMNewToken at the model level
87
+ if (!this.emittedChatModelRunIds[runId]) {
88
+ const chatGeneration = output.generations?.[0]?.[0];
89
+ if (isBaseMessage(chatGeneration?.message)) {
90
+ this._emit(this.metadatas[runId], chatGeneration?.message, true);
91
+ }
92
+ delete this.emittedChatModelRunIds[runId];
93
+ }
94
+ delete this.metadatas[runId];
95
+ }
96
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
97
+ handleLLMError(_err, runId) {
98
+ delete this.metadatas[runId];
99
+ }
100
+ handleChainStart(_chain, _inputs, runId, _parentRunId, tags, metadata, _runType, name) {
101
+ if (metadata !== undefined &&
102
+ name === metadata.langgraph_node &&
103
+ (tags === undefined || !tags.includes(TAG_HIDDEN))) {
104
+ this.metadatas[runId] = [
105
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
106
+ { tags, name, ...metadata },
107
+ ];
108
+ }
109
+ }
110
+ handleChainEnd(outputs, runId) {
111
+ const metadata = this.metadatas[runId];
112
+ delete this.metadatas[runId];
113
+ if (metadata !== undefined) {
114
+ if (isBaseMessage(outputs)) {
115
+ this._emit(metadata, outputs, true);
116
+ }
117
+ else if (Array.isArray(outputs)) {
118
+ for (const value of outputs) {
119
+ if (isBaseMessage(value)) {
120
+ this._emit(metadata, value, true);
121
+ }
122
+ }
123
+ }
124
+ else if (outputs != null && typeof outputs === "object") {
125
+ for (const value of Object.values(outputs)) {
126
+ if (isBaseMessage(value)) {
127
+ this._emit(metadata, value, true);
128
+ }
129
+ else if (Array.isArray(value)) {
130
+ for (const item of value) {
131
+ if (isBaseMessage(item)) {
132
+ this._emit(metadata, item, true);
133
+ }
134
+ }
135
+ }
136
+ }
137
+ }
138
+ }
139
+ }
140
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
141
+ handleChainError(_err, runId) {
142
+ delete this.metadatas[runId];
143
+ }
144
+ }
@@ -420,7 +420,6 @@ class RemoteGraph extends runnables_1.Runnable {
420
420
  xray: config?.xray,
421
421
  });
422
422
  return new graph_1.Graph({
423
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
424
423
  nodes: this._getDrawableNodes(graph.nodes),
425
424
  edges: graph.edges,
426
425
  });
@@ -417,7 +417,6 @@ export class RemoteGraph extends Runnable {
417
417
  xray: config?.xray,
418
418
  });
419
419
  return new DrawableGraph({
420
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
421
420
  nodes: this._getDrawableNodes(graph.nodes),
422
421
  edges: graph.edges,
423
422
  });
@@ -2,4 +2,5 @@ import { RunnableConfig } from "@langchain/core/runnables";
2
2
  import { BaseStore } from "@langchain/langgraph-checkpoint";
3
3
  export interface LangGraphRunnableConfig<ConfigurableType extends Record<string, any> = Record<string, any>> extends RunnableConfig<ConfigurableType> {
4
4
  store?: BaseStore;
5
+ writer?: (chunk: unknown) => void;
5
6
  }
@@ -8,7 +8,7 @@ import { RetryPolicy } from "./utils/index.js";
8
8
  import { Interrupt } from "../constants.js";
9
9
  import { type ManagedValueSpec } from "../managed/base.js";
10
10
  import { LangGraphRunnableConfig } from "./runnable_types.js";
11
- export type StreamMode = "values" | "updates" | "debug";
11
+ export type StreamMode = "values" | "updates" | "debug" | "messages" | "custom";
12
12
  export type PregelInputType = any;
13
13
  export type PregelOutputType = any;
14
14
  /**
@@ -103,7 +103,7 @@ export interface PregelTaskDescription {
103
103
  export interface PregelExecutableTask<N extends PropertyKey, C extends PropertyKey> {
104
104
  readonly name: N;
105
105
  readonly input: unknown;
106
- readonly proc: Runnable;
106
+ readonly proc: Runnable<any, any, LangGraphRunnableConfig>;
107
107
  readonly writes: PendingWrite<C>[];
108
108
  readonly config?: LangGraphRunnableConfig;
109
109
  readonly triggers: Array<string>;
@@ -15,6 +15,7 @@ const CONFIG_KEYS = [
15
15
  "outputKeys",
16
16
  "streamMode",
17
17
  "store",
18
+ "writer",
18
19
  ];
19
20
  const DEFAULT_RECURSION_LIMIT = 25;
20
21
  function ensureLangGraphConfig(...configs) {
@@ -12,6 +12,7 @@ const CONFIG_KEYS = [
12
12
  "outputKeys",
13
13
  "streamMode",
14
14
  "store",
15
+ "writer",
15
16
  ];
16
17
  const DEFAULT_RECURSION_LIMIT = 25;
17
18
  export function ensureLangGraphConfig(...configs) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph",
3
- "version": "0.2.20-rc.0",
3
+ "version": "0.2.21",
4
4
  "description": "LangGraph",
5
5
  "type": "module",
6
6
  "engines": {
@@ -32,7 +32,7 @@
32
32
  "license": "MIT",
33
33
  "dependencies": {
34
34
  "@langchain/langgraph-checkpoint": "~0.0.10",
35
- "@langchain/langgraph-sdk": "~0.0.20",
35
+ "@langchain/langgraph-sdk": "~0.0.21",
36
36
  "uuid": "^10.0.0",
37
37
  "zod": "^3.23.8"
38
38
  },