@langchain/langgraph 0.2.20-rc.0 → 0.2.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports._isSend = exports.Send = exports._isSendInterface = exports.CHECKPOINT_NAMESPACE_END = exports.CHECKPOINT_NAMESPACE_SEPARATOR = exports.RESERVED = exports.TASK_NAMESPACE = exports.PULL = exports.PUSH = exports.TASKS = exports.TAG_HIDDEN = exports.RECURSION_LIMIT_DEFAULT = exports.RUNTIME_PLACEHOLDER = exports.INTERRUPT = exports.CONFIG_KEY_CHECKPOINT_MAP = exports.CONFIG_KEY_STREAM = exports.CONFIG_KEY_TASK_ID = exports.CONFIG_KEY_RESUMING = exports.CONFIG_KEY_CHECKPOINTER = exports.CONFIG_KEY_READ = exports.CONFIG_KEY_SEND = exports.ERROR = exports.INPUT = void 0;
3
+ exports._isSend = exports.Send = exports._isSendInterface = exports.CHECKPOINT_NAMESPACE_END = exports.CHECKPOINT_NAMESPACE_SEPARATOR = exports.RESERVED = exports.TASK_NAMESPACE = exports.PULL = exports.PUSH = exports.TASKS = exports.TAG_NOSTREAM = exports.TAG_HIDDEN = exports.RECURSION_LIMIT_DEFAULT = exports.RUNTIME_PLACEHOLDER = exports.INTERRUPT = exports.CONFIG_KEY_CHECKPOINT_MAP = exports.CONFIG_KEY_STREAM = exports.CONFIG_KEY_TASK_ID = exports.CONFIG_KEY_RESUMING = exports.CONFIG_KEY_CHECKPOINTER = exports.CONFIG_KEY_READ = exports.CONFIG_KEY_SEND = exports.ERROR = exports.INPUT = void 0;
4
4
  exports.INPUT = "__input__";
5
5
  exports.ERROR = "__error__";
6
6
  exports.CONFIG_KEY_SEND = "__pregel_send";
@@ -15,6 +15,7 @@ exports.INTERRUPT = "__interrupt__";
15
15
  exports.RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
16
16
  exports.RECURSION_LIMIT_DEFAULT = 25;
17
17
  exports.TAG_HIDDEN = "langsmith:hidden";
18
+ exports.TAG_NOSTREAM = "langsmith:nostream";
18
19
  exports.TASKS = "__pregel_tasks";
19
20
  exports.PUSH = "__pregel_push";
20
21
  exports.PULL = "__pregel_pull";
@@ -11,6 +11,7 @@ export declare const INTERRUPT = "__interrupt__";
11
11
  export declare const RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
12
12
  export declare const RECURSION_LIMIT_DEFAULT = 25;
13
13
  export declare const TAG_HIDDEN = "langsmith:hidden";
14
+ export declare const TAG_NOSTREAM = "langsmith:nostream";
14
15
  export declare const TASKS = "__pregel_tasks";
15
16
  export declare const PUSH = "__pregel_push";
16
17
  export declare const PULL = "__pregel_pull";
package/dist/constants.js CHANGED
@@ -12,6 +12,7 @@ export const INTERRUPT = "__interrupt__";
12
12
  export const RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
13
13
  export const RECURSION_LIMIT_DEFAULT = 25;
14
14
  export const TAG_HIDDEN = "langsmith:hidden";
15
+ export const TAG_NOSTREAM = "langsmith:nostream";
15
16
  export const TASKS = "__pregel_tasks";
16
17
  export const PUSH = "__pregel_push";
17
18
  export const PULL = "__pregel_pull";
@@ -377,7 +377,7 @@ function _prepareSingleTask(taskPath, checkpoint, processes, channels, managed,
377
377
  config: (0, runnables_1.patchConfig)((0, runnables_1.mergeConfigs)(config, {
378
378
  metadata,
379
379
  tags: proc.tags,
380
- store: extra.store ?? config?.store,
380
+ store: extra.store ?? config.store,
381
381
  }), {
382
382
  runName: name,
383
383
  callbacks: manager?.getChild(`graph:step:${step}`),
@@ -368,7 +368,7 @@ export function _prepareSingleTask(taskPath, checkpoint, processes, channels, ma
368
368
  config: patchConfig(mergeConfigs(config, {
369
369
  metadata,
370
370
  tags: proc.tags,
371
- store: extra.store ?? config?.store,
371
+ store: extra.store ?? config.store,
372
372
  }), {
373
373
  runName: name,
374
374
  callbacks: manager?.getChild(`graph:step:${step}`),
@@ -20,6 +20,7 @@ const retry_js_1 = require("./retry.cjs");
20
20
  const base_js_2 = require("../managed/base.cjs");
21
21
  const utils_js_1 = require("../utils.cjs");
22
22
  const config_js_1 = require("./utils/config.cjs");
23
+ const messages_js_1 = require("./messages.cjs");
23
24
  function isString(value) {
24
25
  return typeof value === "string";
25
26
  }
@@ -575,6 +576,7 @@ class Pregel extends runnables_1.Runnable {
575
576
  }
576
577
  _defaults(config) {
577
578
  const { debug, streamMode, inputKeys, outputKeys, interruptAfter, interruptBefore, ...rest } = config;
579
+ let streamModeSingle = true;
578
580
  const defaultDebug = debug !== undefined ? debug : this.debug;
579
581
  let defaultOutputKeys = outputKeys;
580
582
  if (defaultOutputKeys === undefined) {
@@ -595,9 +597,11 @@ class Pregel extends runnables_1.Runnable {
595
597
  let defaultStreamMode;
596
598
  if (streamMode !== undefined) {
597
599
  defaultStreamMode = Array.isArray(streamMode) ? streamMode : [streamMode];
600
+ streamModeSingle = typeof streamMode === "string";
598
601
  }
599
602
  else {
600
603
  defaultStreamMode = this.streamMode;
604
+ streamModeSingle = true;
601
605
  }
602
606
  // if being called as a node in another graph, always use values mode
603
607
  if (config.configurable?.[constants_js_1.CONFIG_KEY_TASK_ID] !== undefined) {
@@ -625,6 +629,7 @@ class Pregel extends runnables_1.Runnable {
625
629
  defaultInterruptAfter,
626
630
  defaultCheckpointer,
627
631
  defaultStore,
632
+ streamModeSingle,
628
633
  ];
629
634
  }
630
635
  /**
@@ -701,15 +706,35 @@ class Pregel extends runnables_1.Runnable {
701
706
  inputConfig.configurable === undefined) {
702
707
  throw new Error(`Checkpointer requires one or more of the following "configurable" keys: "thread_id", "checkpoint_ns", "checkpoint_id"`);
703
708
  }
704
- const callbackManager = await (0, runnables_1.getCallbackManagerForConfig)(inputConfig);
705
- const runManager = await callbackManager?.handleChainStart(this.toJSON(), (0, index_js_1._coerceToDict)(input, "input"), inputConfig.runId, undefined, undefined, undefined, inputConfig?.runName ?? this.getName());
706
- delete inputConfig.runId;
709
+ const { runId, ...restConfig } = inputConfig;
707
710
  // assign defaults
708
- const [debug, streamMode, , outputKeys, config, interruptBefore, interruptAfter, checkpointer, store,] = this._defaults(inputConfig);
709
- const { channelSpecs, managed } = await this.prepareSpecs(config);
711
+ const [debug, streamMode, , outputKeys, config, interruptBefore, interruptAfter, checkpointer, store, streamModeSingle,] = this._defaults(restConfig);
710
712
  const stream = new loop_js_1.IterableReadableWritableStream({
711
713
  modes: new Set(streamMode),
712
714
  });
715
+ // set up messages stream mode
716
+ if (streamMode.includes("messages")) {
717
+ const messageStreamer = new messages_js_1.StreamMessagesHandler((chunk) => stream.push(chunk));
718
+ const { callbacks } = config;
719
+ if (callbacks === undefined) {
720
+ config.callbacks = [messageStreamer];
721
+ }
722
+ else if (Array.isArray(callbacks)) {
723
+ config.callbacks = callbacks.concat(messageStreamer);
724
+ }
725
+ else {
726
+ const copiedCallbacks = callbacks.copy();
727
+ copiedCallbacks.addHandler(messageStreamer, true);
728
+ config.callbacks = copiedCallbacks;
729
+ }
730
+ }
731
+ // setup custom stream mode
732
+ if (streamMode.includes("custom")) {
733
+ config.writer = (chunk) => stream.push([[], "custom", chunk]);
734
+ }
735
+ const callbackManager = await (0, runnables_1.getCallbackManagerForConfig)(config);
736
+ const runManager = await callbackManager?.handleChainStart(this.toJSON(), (0, index_js_1._coerceToDict)(input, "input"), runId, undefined, undefined, undefined, config?.runName ?? this.getName());
737
+ const { channelSpecs, managed } = await this.prepareSpecs(config);
713
738
  let loop;
714
739
  let loopError;
715
740
  const runLoop = async () => {
@@ -810,11 +835,16 @@ class Pregel extends runnables_1.Runnable {
810
835
  loopError = loopError ?? e;
811
836
  }
812
837
  if (loopError) {
813
- // Will throw an error outside of this method
838
+ // "Causes any future interactions with the associated stream to error".
839
+ // Wraps ReadableStreamDefaultController#error:
840
+ // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultController/error
814
841
  stream.error(loopError);
815
842
  }
816
843
  else {
817
- // Will end the iterator outside of this method
844
+ // Will end the iterator outside of this method,
845
+ // keeping previously enqueued chunks.
846
+ // Wraps ReadableStreamDefaultController#close:
847
+ // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultController/close
818
848
  stream.close();
819
849
  }
820
850
  }
@@ -827,10 +857,10 @@ class Pregel extends runnables_1.Runnable {
827
857
  }
828
858
  const [namespace, mode, payload] = chunk;
829
859
  if (streamMode.includes(mode)) {
830
- if (streamSubgraphs && streamMode.length > 1) {
860
+ if (streamSubgraphs && !streamModeSingle) {
831
861
  yield [namespace, mode, payload];
832
862
  }
833
- else if (streamMode.length > 1) {
863
+ else if (!streamModeSingle) {
834
864
  yield [mode, payload];
835
865
  }
836
866
  else if (streamSubgraphs) {
@@ -77,14 +77,15 @@ export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends
77
77
  string | string[],
78
78
  // input keys
79
79
  string | string[],
80
- RunnableConfig,
80
+ LangGraphRunnableConfig,
81
81
  // config without pregel keys
82
82
  All | string[],
83
83
  // interrupt before
84
84
  All | string[],
85
85
  // interrupt after
86
86
  BaseCheckpointSaver | undefined,
87
- BaseStore | undefined
87
+ BaseStore | undefined,
88
+ boolean
88
89
  ];
89
90
  /**
90
91
  * Stream graph steps for a single input.
@@ -17,6 +17,7 @@ import { executeTasksWithRetry } from "./retry.js";
17
17
  import { ChannelKeyPlaceholder, isConfiguredManagedValue, ManagedValueMapping, NoopManagedValue, } from "../managed/base.js";
18
18
  import { gatherIterator, patchConfigurable } from "../utils.js";
19
19
  import { ensureLangGraphConfig } from "./utils/config.js";
20
+ import { StreamMessagesHandler } from "./messages.js";
20
21
  function isString(value) {
21
22
  return typeof value === "string";
22
23
  }
@@ -571,6 +572,7 @@ export class Pregel extends Runnable {
571
572
  }
572
573
  _defaults(config) {
573
574
  const { debug, streamMode, inputKeys, outputKeys, interruptAfter, interruptBefore, ...rest } = config;
575
+ let streamModeSingle = true;
574
576
  const defaultDebug = debug !== undefined ? debug : this.debug;
575
577
  let defaultOutputKeys = outputKeys;
576
578
  if (defaultOutputKeys === undefined) {
@@ -591,9 +593,11 @@ export class Pregel extends Runnable {
591
593
  let defaultStreamMode;
592
594
  if (streamMode !== undefined) {
593
595
  defaultStreamMode = Array.isArray(streamMode) ? streamMode : [streamMode];
596
+ streamModeSingle = typeof streamMode === "string";
594
597
  }
595
598
  else {
596
599
  defaultStreamMode = this.streamMode;
600
+ streamModeSingle = true;
597
601
  }
598
602
  // if being called as a node in another graph, always use values mode
599
603
  if (config.configurable?.[CONFIG_KEY_TASK_ID] !== undefined) {
@@ -621,6 +625,7 @@ export class Pregel extends Runnable {
621
625
  defaultInterruptAfter,
622
626
  defaultCheckpointer,
623
627
  defaultStore,
628
+ streamModeSingle,
624
629
  ];
625
630
  }
626
631
  /**
@@ -697,15 +702,35 @@ export class Pregel extends Runnable {
697
702
  inputConfig.configurable === undefined) {
698
703
  throw new Error(`Checkpointer requires one or more of the following "configurable" keys: "thread_id", "checkpoint_ns", "checkpoint_id"`);
699
704
  }
700
- const callbackManager = await getCallbackManagerForConfig(inputConfig);
701
- const runManager = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), inputConfig.runId, undefined, undefined, undefined, inputConfig?.runName ?? this.getName());
702
- delete inputConfig.runId;
705
+ const { runId, ...restConfig } = inputConfig;
703
706
  // assign defaults
704
- const [debug, streamMode, , outputKeys, config, interruptBefore, interruptAfter, checkpointer, store,] = this._defaults(inputConfig);
705
- const { channelSpecs, managed } = await this.prepareSpecs(config);
707
+ const [debug, streamMode, , outputKeys, config, interruptBefore, interruptAfter, checkpointer, store, streamModeSingle,] = this._defaults(restConfig);
706
708
  const stream = new IterableReadableWritableStream({
707
709
  modes: new Set(streamMode),
708
710
  });
711
+ // set up messages stream mode
712
+ if (streamMode.includes("messages")) {
713
+ const messageStreamer = new StreamMessagesHandler((chunk) => stream.push(chunk));
714
+ const { callbacks } = config;
715
+ if (callbacks === undefined) {
716
+ config.callbacks = [messageStreamer];
717
+ }
718
+ else if (Array.isArray(callbacks)) {
719
+ config.callbacks = callbacks.concat(messageStreamer);
720
+ }
721
+ else {
722
+ const copiedCallbacks = callbacks.copy();
723
+ copiedCallbacks.addHandler(messageStreamer, true);
724
+ config.callbacks = copiedCallbacks;
725
+ }
726
+ }
727
+ // setup custom stream mode
728
+ if (streamMode.includes("custom")) {
729
+ config.writer = (chunk) => stream.push([[], "custom", chunk]);
730
+ }
731
+ const callbackManager = await getCallbackManagerForConfig(config);
732
+ const runManager = await callbackManager?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, config?.runName ?? this.getName());
733
+ const { channelSpecs, managed } = await this.prepareSpecs(config);
709
734
  let loop;
710
735
  let loopError;
711
736
  const runLoop = async () => {
@@ -806,11 +831,16 @@ export class Pregel extends Runnable {
806
831
  loopError = loopError ?? e;
807
832
  }
808
833
  if (loopError) {
809
- // Will throw an error outside of this method
834
+ // "Causes any future interactions with the associated stream to error".
835
+ // Wraps ReadableStreamDefaultController#error:
836
+ // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultController/error
810
837
  stream.error(loopError);
811
838
  }
812
839
  else {
813
- // Will end the iterator outside of this method
840
+ // Will end the iterator outside of this method,
841
+ // keeping previously enqueued chunks.
842
+ // Wraps ReadableStreamDefaultController#close:
843
+ // https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultController/close
814
844
  stream.close();
815
845
  }
816
846
  }
@@ -823,10 +853,10 @@ export class Pregel extends Runnable {
823
853
  }
824
854
  const [namespace, mode, payload] = chunk;
825
855
  if (streamMode.includes(mode)) {
826
- if (streamSubgraphs && streamMode.length > 1) {
856
+ if (streamSubgraphs && !streamModeSingle) {
827
857
  yield [namespace, mode, payload];
828
858
  }
829
- else if (streamMode.length > 1) {
859
+ else if (!streamModeSingle) {
830
860
  yield [mode, payload];
831
861
  }
832
862
  else if (streamSubgraphs) {
@@ -18,7 +18,7 @@ const SPECIAL_CHANNELS = [constants_js_1.ERROR, constants_js_1.INTERRUPT];
18
18
  class IterableReadableWritableStream extends stream_1.IterableReadableStream {
19
19
  constructor(params) {
20
20
  let streamControllerPromiseResolver;
21
- let streamControllerPromise = new Promise((resolve) => {
21
+ const streamControllerPromise = new Promise((resolve) => {
22
22
  streamControllerPromiseResolver = resolve;
23
23
  });
24
24
  super({
@@ -46,7 +46,7 @@ class IterableReadableWritableStream extends stream_1.IterableReadableStream {
46
46
  });
47
47
  // .start() will always be called before the stream can be interacted
48
48
  // with anyway
49
- streamControllerPromise.then((controller) => {
49
+ void streamControllerPromise.then((controller) => {
50
50
  this.controller = controller;
51
51
  });
52
52
  this.passthroughFn = params.passthroughFn;
@@ -15,7 +15,7 @@ const SPECIAL_CHANNELS = [ERROR, INTERRUPT];
15
15
  export class IterableReadableWritableStream extends IterableReadableStream {
16
16
  constructor(params) {
17
17
  let streamControllerPromiseResolver;
18
- let streamControllerPromise = new Promise((resolve) => {
18
+ const streamControllerPromise = new Promise((resolve) => {
19
19
  streamControllerPromiseResolver = resolve;
20
20
  });
21
21
  super({
@@ -43,7 +43,7 @@ export class IterableReadableWritableStream extends IterableReadableStream {
43
43
  });
44
44
  // .start() will always be called before the stream can be interacted
45
45
  // with anyway
46
- streamControllerPromise.then((controller) => {
46
+ void streamControllerPromise.then((controller) => {
47
47
  this.controller = controller;
48
48
  });
49
49
  this.passthroughFn = params.passthroughFn;
@@ -0,0 +1,148 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.StreamMessagesHandler = void 0;
4
+ const uuid_1 = require("uuid");
5
+ const base_1 = require("@langchain/core/callbacks/base");
6
+ const messages_1 = require("@langchain/core/messages");
7
+ const constants_js_1 = require("../constants.cjs");
8
+ function isChatGenerationChunk(x) {
9
+ return (0, messages_1.isBaseMessage)(x?.message);
10
+ }
11
+ /**
12
+ * A callback handler that implements stream_mode=messages.
13
+ * Collects messages from (1) chat model stream events and (2) node outputs.
14
+ */
15
+ class StreamMessagesHandler extends base_1.BaseCallbackHandler {
16
+ constructor(streamFn) {
17
+ super();
18
+ Object.defineProperty(this, "name", {
19
+ enumerable: true,
20
+ configurable: true,
21
+ writable: true,
22
+ value: "StreamMessagesHandler"
23
+ });
24
+ Object.defineProperty(this, "streamFn", {
25
+ enumerable: true,
26
+ configurable: true,
27
+ writable: true,
28
+ value: void 0
29
+ });
30
+ Object.defineProperty(this, "metadatas", {
31
+ enumerable: true,
32
+ configurable: true,
33
+ writable: true,
34
+ value: {}
35
+ });
36
+ Object.defineProperty(this, "seen", {
37
+ enumerable: true,
38
+ configurable: true,
39
+ writable: true,
40
+ value: {}
41
+ });
42
+ Object.defineProperty(this, "emittedChatModelRunIds", {
43
+ enumerable: true,
44
+ configurable: true,
45
+ writable: true,
46
+ value: {}
47
+ });
48
+ this.streamFn = streamFn;
49
+ }
50
+ _emit(meta, message, dedupe = false) {
51
+ if (dedupe &&
52
+ message.id !== undefined &&
53
+ this.seen[message.id] !== undefined) {
54
+ return;
55
+ }
56
+ if (message.id === undefined) {
57
+ const id = (0, uuid_1.v4)();
58
+ // eslint-disable-next-line no-param-reassign
59
+ message.id = id;
60
+ // eslint-disable-next-line no-param-reassign
61
+ message.lc_kwargs.id = id;
62
+ }
63
+ this.seen[message.id] = message;
64
+ this.streamFn([meta[0], "messages", [message, meta[1]]]);
65
+ }
66
+ handleChatModelStart(_llm, _messages, runId, _parentRunId, _extraParams, tags, metadata, name) {
67
+ if (metadata &&
68
+ // Include legacy LangGraph SDK tag
69
+ (!tags || !(tags.includes(constants_js_1.TAG_NOSTREAM) && tags.includes("nostream")))) {
70
+ this.metadatas[runId] = [
71
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
72
+ { tags, name, ...metadata },
73
+ ];
74
+ }
75
+ }
76
+ handleLLMNewToken(token, _idx, runId, _parentRunId, _tags, fields) {
77
+ const chunk = fields?.chunk;
78
+ this.emittedChatModelRunIds[runId] = true;
79
+ if (isChatGenerationChunk(chunk) && this.metadatas[runId] !== undefined) {
80
+ this._emit(this.metadatas[runId], chunk.message);
81
+ }
82
+ else {
83
+ this._emit(this.metadatas[runId], new messages_1.AIMessageChunk({
84
+ content: token,
85
+ }));
86
+ }
87
+ }
88
+ handleLLMEnd(output, runId) {
89
+ // In JS, non-streaming runs do not call handleLLMNewToken at the model level
90
+ if (!this.emittedChatModelRunIds[runId]) {
91
+ const chatGeneration = output.generations?.[0]?.[0];
92
+ if ((0, messages_1.isBaseMessage)(chatGeneration?.message)) {
93
+ this._emit(this.metadatas[runId], chatGeneration?.message, true);
94
+ }
95
+ delete this.emittedChatModelRunIds[runId];
96
+ }
97
+ delete this.metadatas[runId];
98
+ }
99
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
100
+ handleLLMError(_err, runId) {
101
+ delete this.metadatas[runId];
102
+ }
103
+ handleChainStart(_chain, _inputs, runId, _parentRunId, tags, metadata, _runType, name) {
104
+ if (metadata !== undefined &&
105
+ name === metadata.langgraph_node &&
106
+ (tags === undefined || !tags.includes(constants_js_1.TAG_HIDDEN))) {
107
+ this.metadatas[runId] = [
108
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
109
+ { tags, name, ...metadata },
110
+ ];
111
+ }
112
+ }
113
+ handleChainEnd(outputs, runId) {
114
+ const metadata = this.metadatas[runId];
115
+ delete this.metadatas[runId];
116
+ if (metadata !== undefined) {
117
+ if ((0, messages_1.isBaseMessage)(outputs)) {
118
+ this._emit(metadata, outputs, true);
119
+ }
120
+ else if (Array.isArray(outputs)) {
121
+ for (const value of outputs) {
122
+ if ((0, messages_1.isBaseMessage)(value)) {
123
+ this._emit(metadata, value, true);
124
+ }
125
+ }
126
+ }
127
+ else if (outputs != null && typeof outputs === "object") {
128
+ for (const value of Object.values(outputs)) {
129
+ if ((0, messages_1.isBaseMessage)(value)) {
130
+ this._emit(metadata, value, true);
131
+ }
132
+ else if (Array.isArray(value)) {
133
+ for (const item of value) {
134
+ if ((0, messages_1.isBaseMessage)(item)) {
135
+ this._emit(metadata, item, true);
136
+ }
137
+ }
138
+ }
139
+ }
140
+ }
141
+ }
142
+ }
143
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
144
+ handleChainError(_err, runId) {
145
+ delete this.metadatas[runId];
146
+ }
147
+ }
148
+ exports.StreamMessagesHandler = StreamMessagesHandler;
@@ -0,0 +1,28 @@
1
+ import { BaseCallbackHandler, HandleLLMNewTokenCallbackFields, NewTokenIndices } from "@langchain/core/callbacks/base";
2
+ import { BaseMessage } from "@langchain/core/messages";
3
+ import { Serialized } from "@langchain/core/load/serializable";
4
+ import { LLMResult } from "@langchain/core/outputs";
5
+ import { ChainValues } from "@langchain/core/utils/types";
6
+ import { StreamChunk } from "./loop.js";
7
+ type Meta = [string[], Record<string, any>];
8
+ /**
9
+ * A callback handler that implements stream_mode=messages.
10
+ * Collects messages from (1) chat model stream events and (2) node outputs.
11
+ */
12
+ export declare class StreamMessagesHandler extends BaseCallbackHandler {
13
+ name: string;
14
+ streamFn: (streamChunk: StreamChunk) => void;
15
+ metadatas: Record<string, Meta>;
16
+ seen: Record<string, BaseMessage>;
17
+ emittedChatModelRunIds: Record<string, boolean>;
18
+ constructor(streamFn: (streamChunk: StreamChunk) => void);
19
+ _emit(meta: Meta, message: BaseMessage, dedupe?: boolean): void;
20
+ handleChatModelStart(_llm: Serialized, _messages: BaseMessage[][], runId: string, _parentRunId?: string, _extraParams?: Record<string, unknown>, tags?: string[], metadata?: Record<string, unknown>, name?: string): void;
21
+ handleLLMNewToken(token: string, _idx: NewTokenIndices, runId: string, _parentRunId?: string, _tags?: string[], fields?: HandleLLMNewTokenCallbackFields): void;
22
+ handleLLMEnd(output: LLMResult, runId: string): void;
23
+ handleLLMError(_err: any, runId: string): void;
24
+ handleChainStart(_chain: Serialized, _inputs: ChainValues, runId: string, _parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, _runType?: string, name?: string): void;
25
+ handleChainEnd(outputs: ChainValues, runId: string): void;
26
+ handleChainError(_err: any, runId: string): void;
27
+ }
28
+ export {};
@@ -0,0 +1,144 @@
1
+ import { v4 } from "uuid";
2
+ import { BaseCallbackHandler, } from "@langchain/core/callbacks/base";
3
+ import { AIMessageChunk, isBaseMessage, } from "@langchain/core/messages";
4
+ import { TAG_HIDDEN, TAG_NOSTREAM } from "../constants.js";
5
+ function isChatGenerationChunk(x) {
6
+ return isBaseMessage(x?.message);
7
+ }
8
+ /**
9
+ * A callback handler that implements stream_mode=messages.
10
+ * Collects messages from (1) chat model stream events and (2) node outputs.
11
+ */
12
+ export class StreamMessagesHandler extends BaseCallbackHandler {
13
+ constructor(streamFn) {
14
+ super();
15
+ Object.defineProperty(this, "name", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: "StreamMessagesHandler"
20
+ });
21
+ Object.defineProperty(this, "streamFn", {
22
+ enumerable: true,
23
+ configurable: true,
24
+ writable: true,
25
+ value: void 0
26
+ });
27
+ Object.defineProperty(this, "metadatas", {
28
+ enumerable: true,
29
+ configurable: true,
30
+ writable: true,
31
+ value: {}
32
+ });
33
+ Object.defineProperty(this, "seen", {
34
+ enumerable: true,
35
+ configurable: true,
36
+ writable: true,
37
+ value: {}
38
+ });
39
+ Object.defineProperty(this, "emittedChatModelRunIds", {
40
+ enumerable: true,
41
+ configurable: true,
42
+ writable: true,
43
+ value: {}
44
+ });
45
+ this.streamFn = streamFn;
46
+ }
47
+ _emit(meta, message, dedupe = false) {
48
+ if (dedupe &&
49
+ message.id !== undefined &&
50
+ this.seen[message.id] !== undefined) {
51
+ return;
52
+ }
53
+ if (message.id === undefined) {
54
+ const id = v4();
55
+ // eslint-disable-next-line no-param-reassign
56
+ message.id = id;
57
+ // eslint-disable-next-line no-param-reassign
58
+ message.lc_kwargs.id = id;
59
+ }
60
+ this.seen[message.id] = message;
61
+ this.streamFn([meta[0], "messages", [message, meta[1]]]);
62
+ }
63
+ handleChatModelStart(_llm, _messages, runId, _parentRunId, _extraParams, tags, metadata, name) {
64
+ if (metadata &&
65
+ // Include legacy LangGraph SDK tag
66
+ (!tags || !(tags.includes(TAG_NOSTREAM) && tags.includes("nostream")))) {
67
+ this.metadatas[runId] = [
68
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
69
+ { tags, name, ...metadata },
70
+ ];
71
+ }
72
+ }
73
+ handleLLMNewToken(token, _idx, runId, _parentRunId, _tags, fields) {
74
+ const chunk = fields?.chunk;
75
+ this.emittedChatModelRunIds[runId] = true;
76
+ if (isChatGenerationChunk(chunk) && this.metadatas[runId] !== undefined) {
77
+ this._emit(this.metadatas[runId], chunk.message);
78
+ }
79
+ else {
80
+ this._emit(this.metadatas[runId], new AIMessageChunk({
81
+ content: token,
82
+ }));
83
+ }
84
+ }
85
+ handleLLMEnd(output, runId) {
86
+ // In JS, non-streaming runs do not call handleLLMNewToken at the model level
87
+ if (!this.emittedChatModelRunIds[runId]) {
88
+ const chatGeneration = output.generations?.[0]?.[0];
89
+ if (isBaseMessage(chatGeneration?.message)) {
90
+ this._emit(this.metadatas[runId], chatGeneration?.message, true);
91
+ }
92
+ delete this.emittedChatModelRunIds[runId];
93
+ }
94
+ delete this.metadatas[runId];
95
+ }
96
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
97
+ handleLLMError(_err, runId) {
98
+ delete this.metadatas[runId];
99
+ }
100
+ handleChainStart(_chain, _inputs, runId, _parentRunId, tags, metadata, _runType, name) {
101
+ if (metadata !== undefined &&
102
+ name === metadata.langgraph_node &&
103
+ (tags === undefined || !tags.includes(TAG_HIDDEN))) {
104
+ this.metadatas[runId] = [
105
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
106
+ { tags, name, ...metadata },
107
+ ];
108
+ }
109
+ }
110
+ handleChainEnd(outputs, runId) {
111
+ const metadata = this.metadatas[runId];
112
+ delete this.metadatas[runId];
113
+ if (metadata !== undefined) {
114
+ if (isBaseMessage(outputs)) {
115
+ this._emit(metadata, outputs, true);
116
+ }
117
+ else if (Array.isArray(outputs)) {
118
+ for (const value of outputs) {
119
+ if (isBaseMessage(value)) {
120
+ this._emit(metadata, value, true);
121
+ }
122
+ }
123
+ }
124
+ else if (outputs != null && typeof outputs === "object") {
125
+ for (const value of Object.values(outputs)) {
126
+ if (isBaseMessage(value)) {
127
+ this._emit(metadata, value, true);
128
+ }
129
+ else if (Array.isArray(value)) {
130
+ for (const item of value) {
131
+ if (isBaseMessage(item)) {
132
+ this._emit(metadata, item, true);
133
+ }
134
+ }
135
+ }
136
+ }
137
+ }
138
+ }
139
+ }
140
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
141
+ handleChainError(_err, runId) {
142
+ delete this.metadatas[runId];
143
+ }
144
+ }
@@ -2,4 +2,5 @@ import { RunnableConfig } from "@langchain/core/runnables";
2
2
  import { BaseStore } from "@langchain/langgraph-checkpoint";
3
3
  export interface LangGraphRunnableConfig<ConfigurableType extends Record<string, any> = Record<string, any>> extends RunnableConfig<ConfigurableType> {
4
4
  store?: BaseStore;
5
+ writer?: (chunk: unknown) => void;
5
6
  }
@@ -8,7 +8,7 @@ import { RetryPolicy } from "./utils/index.js";
8
8
  import { Interrupt } from "../constants.js";
9
9
  import { type ManagedValueSpec } from "../managed/base.js";
10
10
  import { LangGraphRunnableConfig } from "./runnable_types.js";
11
- export type StreamMode = "values" | "updates" | "debug";
11
+ export type StreamMode = "values" | "updates" | "debug" | "messages" | "custom";
12
12
  export type PregelInputType = any;
13
13
  export type PregelOutputType = any;
14
14
  /**
@@ -103,7 +103,7 @@ export interface PregelTaskDescription {
103
103
  export interface PregelExecutableTask<N extends PropertyKey, C extends PropertyKey> {
104
104
  readonly name: N;
105
105
  readonly input: unknown;
106
- readonly proc: Runnable;
106
+ readonly proc: Runnable<any, any, LangGraphRunnableConfig>;
107
107
  readonly writes: PendingWrite<C>[];
108
108
  readonly config?: LangGraphRunnableConfig;
109
109
  readonly triggers: Array<string>;
@@ -15,6 +15,7 @@ const CONFIG_KEYS = [
15
15
  "outputKeys",
16
16
  "streamMode",
17
17
  "store",
18
+ "writer",
18
19
  ];
19
20
  const DEFAULT_RECURSION_LIMIT = 25;
20
21
  function ensureLangGraphConfig(...configs) {
@@ -12,6 +12,7 @@ const CONFIG_KEYS = [
12
12
  "outputKeys",
13
13
  "streamMode",
14
14
  "store",
15
+ "writer",
15
16
  ];
16
17
  const DEFAULT_RECURSION_LIMIT = 25;
17
18
  export function ensureLangGraphConfig(...configs) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph",
3
- "version": "0.2.20-rc.0",
3
+ "version": "0.2.20",
4
4
  "description": "LangGraph",
5
5
  "type": "module",
6
6
  "engines": {