@langchain/langgraph 0.2.19 → 0.2.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,148 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.StreamMessagesHandler = void 0;
4
+ const uuid_1 = require("uuid");
5
+ const base_1 = require("@langchain/core/callbacks/base");
6
+ const messages_1 = require("@langchain/core/messages");
7
+ const constants_js_1 = require("../constants.cjs");
8
+ function isChatGenerationChunk(x) {
9
+ return (0, messages_1.isBaseMessage)(x?.message);
10
+ }
11
+ /**
12
+ * A callback handler that implements stream_mode=messages.
13
+ * Collects messages from (1) chat model stream events and (2) node outputs.
14
+ */
15
+ class StreamMessagesHandler extends base_1.BaseCallbackHandler {
16
+ constructor(streamFn) {
17
+ super();
18
+ Object.defineProperty(this, "name", {
19
+ enumerable: true,
20
+ configurable: true,
21
+ writable: true,
22
+ value: "StreamMessagesHandler"
23
+ });
24
+ Object.defineProperty(this, "streamFn", {
25
+ enumerable: true,
26
+ configurable: true,
27
+ writable: true,
28
+ value: void 0
29
+ });
30
+ Object.defineProperty(this, "metadatas", {
31
+ enumerable: true,
32
+ configurable: true,
33
+ writable: true,
34
+ value: {}
35
+ });
36
+ Object.defineProperty(this, "seen", {
37
+ enumerable: true,
38
+ configurable: true,
39
+ writable: true,
40
+ value: {}
41
+ });
42
+ Object.defineProperty(this, "emittedChatModelRunIds", {
43
+ enumerable: true,
44
+ configurable: true,
45
+ writable: true,
46
+ value: {}
47
+ });
48
+ this.streamFn = streamFn;
49
+ }
50
+ _emit(meta, message, dedupe = false) {
51
+ if (dedupe &&
52
+ message.id !== undefined &&
53
+ this.seen[message.id] !== undefined) {
54
+ return;
55
+ }
56
+ if (message.id === undefined) {
57
+ const id = (0, uuid_1.v4)();
58
+ // eslint-disable-next-line no-param-reassign
59
+ message.id = id;
60
+ // eslint-disable-next-line no-param-reassign
61
+ message.lc_kwargs.id = id;
62
+ }
63
+ this.seen[message.id] = message;
64
+ this.streamFn([meta[0], "messages", [message, meta[1]]]);
65
+ }
66
+ handleChatModelStart(_llm, _messages, runId, _parentRunId, _extraParams, tags, metadata, name) {
67
+ if (metadata &&
68
+ // Include legacy LangGraph SDK tag
69
+ (!tags || !(tags.includes(constants_js_1.TAG_NOSTREAM) && tags.includes("nostream")))) {
70
+ this.metadatas[runId] = [
71
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
72
+ { tags, name, ...metadata },
73
+ ];
74
+ }
75
+ }
76
+ handleLLMNewToken(token, _idx, runId, _parentRunId, _tags, fields) {
77
+ const chunk = fields?.chunk;
78
+ this.emittedChatModelRunIds[runId] = true;
79
+ if (isChatGenerationChunk(chunk) && this.metadatas[runId] !== undefined) {
80
+ this._emit(this.metadatas[runId], chunk.message);
81
+ }
82
+ else {
83
+ this._emit(this.metadatas[runId], new messages_1.AIMessageChunk({
84
+ content: token,
85
+ }));
86
+ }
87
+ }
88
+ handleLLMEnd(output, runId) {
89
+ // In JS, non-streaming runs do not call handleLLMNewToken at the model level
90
+ if (!this.emittedChatModelRunIds[runId]) {
91
+ const chatGeneration = output.generations?.[0]?.[0];
92
+ if ((0, messages_1.isBaseMessage)(chatGeneration?.message)) {
93
+ this._emit(this.metadatas[runId], chatGeneration?.message, true);
94
+ }
95
+ delete this.emittedChatModelRunIds[runId];
96
+ }
97
+ delete this.metadatas[runId];
98
+ }
99
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
100
+ handleLLMError(_err, runId) {
101
+ delete this.metadatas[runId];
102
+ }
103
+ handleChainStart(_chain, _inputs, runId, _parentRunId, tags, metadata, _runType, name) {
104
+ if (metadata !== undefined &&
105
+ name === metadata.langgraph_node &&
106
+ (tags === undefined || !tags.includes(constants_js_1.TAG_HIDDEN))) {
107
+ this.metadatas[runId] = [
108
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
109
+ { tags, name, ...metadata },
110
+ ];
111
+ }
112
+ }
113
+ handleChainEnd(outputs, runId) {
114
+ const metadata = this.metadatas[runId];
115
+ delete this.metadatas[runId];
116
+ if (metadata !== undefined) {
117
+ if ((0, messages_1.isBaseMessage)(outputs)) {
118
+ this._emit(metadata, outputs, true);
119
+ }
120
+ else if (Array.isArray(outputs)) {
121
+ for (const value of outputs) {
122
+ if ((0, messages_1.isBaseMessage)(value)) {
123
+ this._emit(metadata, value, true);
124
+ }
125
+ }
126
+ }
127
+ else if (outputs != null && typeof outputs === "object") {
128
+ for (const value of Object.values(outputs)) {
129
+ if ((0, messages_1.isBaseMessage)(value)) {
130
+ this._emit(metadata, value, true);
131
+ }
132
+ else if (Array.isArray(value)) {
133
+ for (const item of value) {
134
+ if ((0, messages_1.isBaseMessage)(item)) {
135
+ this._emit(metadata, item, true);
136
+ }
137
+ }
138
+ }
139
+ }
140
+ }
141
+ }
142
+ }
143
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
144
+ handleChainError(_err, runId) {
145
+ delete this.metadatas[runId];
146
+ }
147
+ }
148
+ exports.StreamMessagesHandler = StreamMessagesHandler;
@@ -0,0 +1,28 @@
1
+ import { BaseCallbackHandler, HandleLLMNewTokenCallbackFields, NewTokenIndices } from "@langchain/core/callbacks/base";
2
+ import { BaseMessage } from "@langchain/core/messages";
3
+ import { Serialized } from "@langchain/core/load/serializable";
4
+ import { LLMResult } from "@langchain/core/outputs";
5
+ import { ChainValues } from "@langchain/core/utils/types";
6
+ import { StreamChunk } from "./loop.js";
7
+ type Meta = [string[], Record<string, any>];
8
+ /**
9
+ * A callback handler that implements stream_mode=messages.
10
+ * Collects messages from (1) chat model stream events and (2) node outputs.
11
+ */
12
+ export declare class StreamMessagesHandler extends BaseCallbackHandler {
13
+ name: string;
14
+ streamFn: (streamChunk: StreamChunk) => void;
15
+ metadatas: Record<string, Meta>;
16
+ seen: Record<string, BaseMessage>;
17
+ emittedChatModelRunIds: Record<string, boolean>;
18
+ constructor(streamFn: (streamChunk: StreamChunk) => void);
19
+ _emit(meta: Meta, message: BaseMessage, dedupe?: boolean): void;
20
+ handleChatModelStart(_llm: Serialized, _messages: BaseMessage[][], runId: string, _parentRunId?: string, _extraParams?: Record<string, unknown>, tags?: string[], metadata?: Record<string, unknown>, name?: string): void;
21
+ handleLLMNewToken(token: string, _idx: NewTokenIndices, runId: string, _parentRunId?: string, _tags?: string[], fields?: HandleLLMNewTokenCallbackFields): void;
22
+ handleLLMEnd(output: LLMResult, runId: string): void;
23
+ handleLLMError(_err: any, runId: string): void;
24
+ handleChainStart(_chain: Serialized, _inputs: ChainValues, runId: string, _parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, _runType?: string, name?: string): void;
25
+ handleChainEnd(outputs: ChainValues, runId: string): void;
26
+ handleChainError(_err: any, runId: string): void;
27
+ }
28
+ export {};
@@ -0,0 +1,144 @@
1
+ import { v4 } from "uuid";
2
+ import { BaseCallbackHandler, } from "@langchain/core/callbacks/base";
3
+ import { AIMessageChunk, isBaseMessage, } from "@langchain/core/messages";
4
+ import { TAG_HIDDEN, TAG_NOSTREAM } from "../constants.js";
5
+ function isChatGenerationChunk(x) {
6
+ return isBaseMessage(x?.message);
7
+ }
8
+ /**
9
+ * A callback handler that implements stream_mode=messages.
10
+ * Collects messages from (1) chat model stream events and (2) node outputs.
11
+ */
12
+ export class StreamMessagesHandler extends BaseCallbackHandler {
13
+ constructor(streamFn) {
14
+ super();
15
+ Object.defineProperty(this, "name", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: "StreamMessagesHandler"
20
+ });
21
+ Object.defineProperty(this, "streamFn", {
22
+ enumerable: true,
23
+ configurable: true,
24
+ writable: true,
25
+ value: void 0
26
+ });
27
+ Object.defineProperty(this, "metadatas", {
28
+ enumerable: true,
29
+ configurable: true,
30
+ writable: true,
31
+ value: {}
32
+ });
33
+ Object.defineProperty(this, "seen", {
34
+ enumerable: true,
35
+ configurable: true,
36
+ writable: true,
37
+ value: {}
38
+ });
39
+ Object.defineProperty(this, "emittedChatModelRunIds", {
40
+ enumerable: true,
41
+ configurable: true,
42
+ writable: true,
43
+ value: {}
44
+ });
45
+ this.streamFn = streamFn;
46
+ }
47
+ _emit(meta, message, dedupe = false) {
48
+ if (dedupe &&
49
+ message.id !== undefined &&
50
+ this.seen[message.id] !== undefined) {
51
+ return;
52
+ }
53
+ if (message.id === undefined) {
54
+ const id = v4();
55
+ // eslint-disable-next-line no-param-reassign
56
+ message.id = id;
57
+ // eslint-disable-next-line no-param-reassign
58
+ message.lc_kwargs.id = id;
59
+ }
60
+ this.seen[message.id] = message;
61
+ this.streamFn([meta[0], "messages", [message, meta[1]]]);
62
+ }
63
+ handleChatModelStart(_llm, _messages, runId, _parentRunId, _extraParams, tags, metadata, name) {
64
+ if (metadata &&
65
+ // Include legacy LangGraph SDK tag
66
+ (!tags || !(tags.includes(TAG_NOSTREAM) && tags.includes("nostream")))) {
67
+ this.metadatas[runId] = [
68
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
69
+ { tags, name, ...metadata },
70
+ ];
71
+ }
72
+ }
73
+ handleLLMNewToken(token, _idx, runId, _parentRunId, _tags, fields) {
74
+ const chunk = fields?.chunk;
75
+ this.emittedChatModelRunIds[runId] = true;
76
+ if (isChatGenerationChunk(chunk) && this.metadatas[runId] !== undefined) {
77
+ this._emit(this.metadatas[runId], chunk.message);
78
+ }
79
+ else {
80
+ this._emit(this.metadatas[runId], new AIMessageChunk({
81
+ content: token,
82
+ }));
83
+ }
84
+ }
85
+ handleLLMEnd(output, runId) {
86
+ // In JS, non-streaming runs do not call handleLLMNewToken at the model level
87
+ if (!this.emittedChatModelRunIds[runId]) {
88
+ const chatGeneration = output.generations?.[0]?.[0];
89
+ if (isBaseMessage(chatGeneration?.message)) {
90
+ this._emit(this.metadatas[runId], chatGeneration?.message, true);
91
+ }
92
+ delete this.emittedChatModelRunIds[runId];
93
+ }
94
+ delete this.metadatas[runId];
95
+ }
96
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
97
+ handleLLMError(_err, runId) {
98
+ delete this.metadatas[runId];
99
+ }
100
+ handleChainStart(_chain, _inputs, runId, _parentRunId, tags, metadata, _runType, name) {
101
+ if (metadata !== undefined &&
102
+ name === metadata.langgraph_node &&
103
+ (tags === undefined || !tags.includes(TAG_HIDDEN))) {
104
+ this.metadatas[runId] = [
105
+ metadata.langgraph_checkpoint_ns.split("NS_SEP"),
106
+ { tags, name, ...metadata },
107
+ ];
108
+ }
109
+ }
110
+ handleChainEnd(outputs, runId) {
111
+ const metadata = this.metadatas[runId];
112
+ delete this.metadatas[runId];
113
+ if (metadata !== undefined) {
114
+ if (isBaseMessage(outputs)) {
115
+ this._emit(metadata, outputs, true);
116
+ }
117
+ else if (Array.isArray(outputs)) {
118
+ for (const value of outputs) {
119
+ if (isBaseMessage(value)) {
120
+ this._emit(metadata, value, true);
121
+ }
122
+ }
123
+ }
124
+ else if (outputs != null && typeof outputs === "object") {
125
+ for (const value of Object.values(outputs)) {
126
+ if (isBaseMessage(value)) {
127
+ this._emit(metadata, value, true);
128
+ }
129
+ else if (Array.isArray(value)) {
130
+ for (const item of value) {
131
+ if (isBaseMessage(item)) {
132
+ this._emit(metadata, item, true);
133
+ }
134
+ }
135
+ }
136
+ }
137
+ }
138
+ }
139
+ }
140
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
141
+ handleChainError(_err, runId) {
142
+ delete this.metadatas[runId];
143
+ }
144
+ }
@@ -130,7 +130,7 @@ pregelTask, retryPolicy) {
130
130
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
131
131
  error.constructor.unminifiable_name ??
132
132
  error.constructor.name;
133
- console.log(`Retrying task "${pregelTask.name}" after ${interval.toFixed(2)} seconds (attempt ${attempts}) after ${errorName}: ${error}`);
133
+ console.log(`Retrying task "${pregelTask.name}" after ${interval.toFixed(2)}ms (attempt ${attempts}) after ${errorName}: ${error}`);
134
134
  }
135
135
  finally {
136
136
  // Clear checkpoint_ns seen (for subgraph detection)
@@ -126,7 +126,7 @@ pregelTask, retryPolicy) {
126
126
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
127
127
  error.constructor.unminifiable_name ??
128
128
  error.constructor.name;
129
- console.log(`Retrying task "${pregelTask.name}" after ${interval.toFixed(2)} seconds (attempt ${attempts}) after ${errorName}: ${error}`);
129
+ console.log(`Retrying task "${pregelTask.name}" after ${interval.toFixed(2)}ms (attempt ${attempts}) after ${errorName}: ${error}`);
130
130
  }
131
131
  finally {
132
132
  // Clear checkpoint_ns seen (for subgraph detection)
@@ -2,4 +2,5 @@ import { RunnableConfig } from "@langchain/core/runnables";
2
2
  import { BaseStore } from "@langchain/langgraph-checkpoint";
3
3
  export interface LangGraphRunnableConfig<ConfigurableType extends Record<string, any> = Record<string, any>> extends RunnableConfig<ConfigurableType> {
4
4
  store?: BaseStore;
5
+ writer?: (chunk: unknown) => void;
5
6
  }
@@ -8,7 +8,7 @@ import { RetryPolicy } from "./utils/index.js";
8
8
  import { Interrupt } from "../constants.js";
9
9
  import { type ManagedValueSpec } from "../managed/base.js";
10
10
  import { LangGraphRunnableConfig } from "./runnable_types.js";
11
- export type StreamMode = "values" | "updates" | "debug";
11
+ export type StreamMode = "values" | "updates" | "debug" | "messages" | "custom";
12
12
  export type PregelInputType = any;
13
13
  export type PregelOutputType = any;
14
14
  /**
@@ -103,7 +103,7 @@ export interface PregelTaskDescription {
103
103
  export interface PregelExecutableTask<N extends PropertyKey, C extends PropertyKey> {
104
104
  readonly name: N;
105
105
  readonly input: unknown;
106
- readonly proc: Runnable;
106
+ readonly proc: Runnable<any, any, LangGraphRunnableConfig>;
107
107
  readonly writes: PendingWrite<C>[];
108
108
  readonly config?: LangGraphRunnableConfig;
109
109
  readonly triggers: Array<string>;
@@ -15,6 +15,7 @@ const CONFIG_KEYS = [
15
15
  "outputKeys",
16
16
  "streamMode",
17
17
  "store",
18
+ "writer",
18
19
  ];
19
20
  const DEFAULT_RECURSION_LIMIT = 25;
20
21
  function ensureLangGraphConfig(...configs) {
@@ -12,6 +12,7 @@ const CONFIG_KEYS = [
12
12
  "outputKeys",
13
13
  "streamMode",
14
14
  "store",
15
+ "writer",
15
16
  ];
16
17
  const DEFAULT_RECURSION_LIMIT = 25;
17
18
  export function ensureLangGraphConfig(...configs) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph",
3
- "version": "0.2.19",
3
+ "version": "0.2.20",
4
4
  "description": "LangGraph",
5
5
  "type": "module",
6
6
  "engines": {
@@ -33,7 +33,6 @@
33
33
  "dependencies": {
34
34
  "@langchain/langgraph-checkpoint": "~0.0.10",
35
35
  "@langchain/langgraph-sdk": "~0.0.20",
36
- "double-ended-queue": "^2.1.0-0",
37
36
  "uuid": "^10.0.0",
38
37
  "zod": "^3.23.8"
39
38
  },
@@ -44,7 +43,7 @@
44
43
  "@jest/globals": "^29.5.0",
45
44
  "@langchain/anthropic": "^0.3.5",
46
45
  "@langchain/community": "^0.3.9",
47
- "@langchain/core": "^0.3.15",
46
+ "@langchain/core": "^0.3.16",
48
47
  "@langchain/langgraph-checkpoint-postgres": "workspace:*",
49
48
  "@langchain/langgraph-checkpoint-sqlite": "workspace:*",
50
49
  "@langchain/openai": "^0.3.11",
@@ -52,7 +51,6 @@
52
51
  "@swc/core": "^1.3.90",
53
52
  "@swc/jest": "^0.2.29",
54
53
  "@tsconfig/recommended": "^1.0.3",
55
- "@types/double-ended-queue": "^2",
56
54
  "@types/pg": "^8",
57
55
  "@types/uuid": "^10",
58
56
  "@typescript-eslint/eslint-plugin": "^6.12.0",