@langchain/langgraph 0.2.22 → 0.2.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/constants.cjs +28 -1
- package/dist/constants.d.ts +12 -0
- package/dist/constants.js +25 -0
- package/dist/errors.cjs +12 -2
- package/dist/errors.d.ts +5 -1
- package/dist/errors.js +9 -1
- package/dist/interrupt.cjs +20 -0
- package/dist/interrupt.d.ts +1 -0
- package/dist/interrupt.js +16 -0
- package/dist/pregel/algo.cjs +24 -11
- package/dist/pregel/algo.d.ts +5 -5
- package/dist/pregel/algo.js +25 -12
- package/dist/pregel/index.cjs +4 -6
- package/dist/pregel/index.d.ts +4 -3
- package/dist/pregel/index.js +5 -7
- package/dist/pregel/io.cjs +18 -1
- package/dist/pregel/io.d.ts +2 -0
- package/dist/pregel/io.js +17 -1
- package/dist/pregel/loop.cjs +27 -26
- package/dist/pregel/loop.d.ts +4 -4
- package/dist/pregel/loop.js +29 -28
- package/dist/pregel/retry.cjs +1 -1
- package/dist/pregel/retry.js +2 -2
- package/dist/web.cjs +4 -1
- package/dist/web.d.ts +2 -1
- package/dist/web.js +2 -1
- package/package.json +2 -2
package/dist/constants.cjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports._isSend = exports.Send = exports._isSendInterface = exports.CHECKPOINT_NAMESPACE_END = exports.CHECKPOINT_NAMESPACE_SEPARATOR = exports.RESERVED = exports.TASK_NAMESPACE = exports.PULL = exports.PUSH = exports.TASKS = exports.TAG_NOSTREAM = exports.TAG_HIDDEN = exports.RECURSION_LIMIT_DEFAULT = exports.RUNTIME_PLACEHOLDER = exports.INTERRUPT = exports.CONFIG_KEY_CHECKPOINT_MAP = exports.CONFIG_KEY_STREAM = exports.CONFIG_KEY_TASK_ID = exports.CONFIG_KEY_RESUMING = exports.CONFIG_KEY_CHECKPOINTER = exports.CONFIG_KEY_READ = exports.CONFIG_KEY_SEND = exports.ERROR = exports.INPUT = void 0;
|
|
3
|
+
exports._isCommand = exports.Command = exports._isSend = exports.Send = exports._isSendInterface = exports.CHECKPOINT_NAMESPACE_END = exports.CHECKPOINT_NAMESPACE_SEPARATOR = exports.RESERVED = exports.NULL_TASK_ID = exports.TASK_NAMESPACE = exports.PULL = exports.PUSH = exports.TASKS = exports.TAG_NOSTREAM = exports.TAG_HIDDEN = exports.RECURSION_LIMIT_DEFAULT = exports.RUNTIME_PLACEHOLDER = exports.RESUME = exports.INTERRUPT = exports.CONFIG_KEY_CHECKPOINT_MAP = exports.CONFIG_KEY_RESUME_VALUE = exports.CONFIG_KEY_STREAM = exports.CONFIG_KEY_TASK_ID = exports.CONFIG_KEY_RESUMING = exports.CONFIG_KEY_CHECKPOINTER = exports.CONFIG_KEY_READ = exports.CONFIG_KEY_SEND = exports.ERROR = exports.INPUT = exports.MISSING = void 0;
|
|
4
|
+
exports.MISSING = Symbol.for("__missing__");
|
|
4
5
|
exports.INPUT = "__input__";
|
|
5
6
|
exports.ERROR = "__error__";
|
|
6
7
|
exports.CONFIG_KEY_SEND = "__pregel_send";
|
|
@@ -9,9 +10,11 @@ exports.CONFIG_KEY_CHECKPOINTER = "__pregel_checkpointer";
|
|
|
9
10
|
exports.CONFIG_KEY_RESUMING = "__pregel_resuming";
|
|
10
11
|
exports.CONFIG_KEY_TASK_ID = "__pregel_task_id";
|
|
11
12
|
exports.CONFIG_KEY_STREAM = "__pregel_stream";
|
|
13
|
+
exports.CONFIG_KEY_RESUME_VALUE = "__pregel_resume_value";
|
|
12
14
|
// this one is part of public API
|
|
13
15
|
exports.CONFIG_KEY_CHECKPOINT_MAP = "checkpoint_map";
|
|
14
16
|
exports.INTERRUPT = "__interrupt__";
|
|
17
|
+
exports.RESUME = "__resume__";
|
|
15
18
|
exports.RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
|
|
16
19
|
exports.RECURSION_LIMIT_DEFAULT = 25;
|
|
17
20
|
exports.TAG_HIDDEN = "langsmith:hidden";
|
|
@@ -20,8 +23,10 @@ exports.TASKS = "__pregel_tasks";
|
|
|
20
23
|
exports.PUSH = "__pregel_push";
|
|
21
24
|
exports.PULL = "__pregel_pull";
|
|
22
25
|
exports.TASK_NAMESPACE = "6ba7b831-9dad-11d1-80b4-00c04fd430c8";
|
|
26
|
+
exports.NULL_TASK_ID = "00000000-0000-0000-0000-000000000000";
|
|
23
27
|
exports.RESERVED = [
|
|
24
28
|
exports.INTERRUPT,
|
|
29
|
+
exports.RESUME,
|
|
25
30
|
exports.ERROR,
|
|
26
31
|
exports.TASKS,
|
|
27
32
|
exports.CONFIG_KEY_SEND,
|
|
@@ -115,3 +120,25 @@ function _isSend(x) {
|
|
|
115
120
|
return operation.lg_name === "Send";
|
|
116
121
|
}
|
|
117
122
|
exports._isSend = _isSend;
|
|
123
|
+
class Command {
|
|
124
|
+
constructor(args) {
|
|
125
|
+
Object.defineProperty(this, "lg_name", {
|
|
126
|
+
enumerable: true,
|
|
127
|
+
configurable: true,
|
|
128
|
+
writable: true,
|
|
129
|
+
value: "Command"
|
|
130
|
+
});
|
|
131
|
+
Object.defineProperty(this, "resume", {
|
|
132
|
+
enumerable: true,
|
|
133
|
+
configurable: true,
|
|
134
|
+
writable: true,
|
|
135
|
+
value: void 0
|
|
136
|
+
});
|
|
137
|
+
this.resume = args.resume;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
exports.Command = Command;
|
|
141
|
+
function _isCommand(x) {
|
|
142
|
+
return typeof x === "object" && !!x && x.lg_name === "Command";
|
|
143
|
+
}
|
|
144
|
+
exports._isCommand = _isCommand;
|
package/dist/constants.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
export declare const MISSING: unique symbol;
|
|
1
2
|
export declare const INPUT = "__input__";
|
|
2
3
|
export declare const ERROR = "__error__";
|
|
3
4
|
export declare const CONFIG_KEY_SEND = "__pregel_send";
|
|
@@ -6,8 +7,10 @@ export declare const CONFIG_KEY_CHECKPOINTER = "__pregel_checkpointer";
|
|
|
6
7
|
export declare const CONFIG_KEY_RESUMING = "__pregel_resuming";
|
|
7
8
|
export declare const CONFIG_KEY_TASK_ID = "__pregel_task_id";
|
|
8
9
|
export declare const CONFIG_KEY_STREAM = "__pregel_stream";
|
|
10
|
+
export declare const CONFIG_KEY_RESUME_VALUE = "__pregel_resume_value";
|
|
9
11
|
export declare const CONFIG_KEY_CHECKPOINT_MAP = "checkpoint_map";
|
|
10
12
|
export declare const INTERRUPT = "__interrupt__";
|
|
13
|
+
export declare const RESUME = "__resume__";
|
|
11
14
|
export declare const RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
|
|
12
15
|
export declare const RECURSION_LIMIT_DEFAULT = 25;
|
|
13
16
|
export declare const TAG_HIDDEN = "langsmith:hidden";
|
|
@@ -16,6 +19,7 @@ export declare const TASKS = "__pregel_tasks";
|
|
|
16
19
|
export declare const PUSH = "__pregel_push";
|
|
17
20
|
export declare const PULL = "__pregel_pull";
|
|
18
21
|
export declare const TASK_NAMESPACE = "6ba7b831-9dad-11d1-80b4-00c04fd430c8";
|
|
22
|
+
export declare const NULL_TASK_ID = "00000000-0000-0000-0000-000000000000";
|
|
19
23
|
export declare const RESERVED: string[];
|
|
20
24
|
export declare const CHECKPOINT_NAMESPACE_SEPARATOR = "|";
|
|
21
25
|
export declare const CHECKPOINT_NAMESPACE_END = ":";
|
|
@@ -81,3 +85,11 @@ export type Interrupt = {
|
|
|
81
85
|
value: any;
|
|
82
86
|
when: "during";
|
|
83
87
|
};
|
|
88
|
+
export declare class Command<R = unknown> {
|
|
89
|
+
lg_name: string;
|
|
90
|
+
resume: R;
|
|
91
|
+
constructor(args: {
|
|
92
|
+
resume: R;
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
export declare function _isCommand(x: unknown): x is Command;
|
package/dist/constants.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
export const MISSING = Symbol.for("__missing__");
|
|
1
2
|
export const INPUT = "__input__";
|
|
2
3
|
export const ERROR = "__error__";
|
|
3
4
|
export const CONFIG_KEY_SEND = "__pregel_send";
|
|
@@ -6,9 +7,11 @@ export const CONFIG_KEY_CHECKPOINTER = "__pregel_checkpointer";
|
|
|
6
7
|
export const CONFIG_KEY_RESUMING = "__pregel_resuming";
|
|
7
8
|
export const CONFIG_KEY_TASK_ID = "__pregel_task_id";
|
|
8
9
|
export const CONFIG_KEY_STREAM = "__pregel_stream";
|
|
10
|
+
export const CONFIG_KEY_RESUME_VALUE = "__pregel_resume_value";
|
|
9
11
|
// this one is part of public API
|
|
10
12
|
export const CONFIG_KEY_CHECKPOINT_MAP = "checkpoint_map";
|
|
11
13
|
export const INTERRUPT = "__interrupt__";
|
|
14
|
+
export const RESUME = "__resume__";
|
|
12
15
|
export const RUNTIME_PLACEHOLDER = "__pregel_runtime_placeholder__";
|
|
13
16
|
export const RECURSION_LIMIT_DEFAULT = 25;
|
|
14
17
|
export const TAG_HIDDEN = "langsmith:hidden";
|
|
@@ -17,8 +20,10 @@ export const TASKS = "__pregel_tasks";
|
|
|
17
20
|
export const PUSH = "__pregel_push";
|
|
18
21
|
export const PULL = "__pregel_pull";
|
|
19
22
|
export const TASK_NAMESPACE = "6ba7b831-9dad-11d1-80b4-00c04fd430c8";
|
|
23
|
+
export const NULL_TASK_ID = "00000000-0000-0000-0000-000000000000";
|
|
20
24
|
export const RESERVED = [
|
|
21
25
|
INTERRUPT,
|
|
26
|
+
RESUME,
|
|
22
27
|
ERROR,
|
|
23
28
|
TASKS,
|
|
24
29
|
CONFIG_KEY_SEND,
|
|
@@ -109,3 +114,23 @@ export function _isSend(x) {
|
|
|
109
114
|
const operation = x;
|
|
110
115
|
return operation.lg_name === "Send";
|
|
111
116
|
}
|
|
117
|
+
export class Command {
|
|
118
|
+
constructor(args) {
|
|
119
|
+
Object.defineProperty(this, "lg_name", {
|
|
120
|
+
enumerable: true,
|
|
121
|
+
configurable: true,
|
|
122
|
+
writable: true,
|
|
123
|
+
value: "Command"
|
|
124
|
+
});
|
|
125
|
+
Object.defineProperty(this, "resume", {
|
|
126
|
+
enumerable: true,
|
|
127
|
+
configurable: true,
|
|
128
|
+
writable: true,
|
|
129
|
+
value: void 0
|
|
130
|
+
});
|
|
131
|
+
this.resume = args.resume;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
export function _isCommand(x) {
|
|
135
|
+
return typeof x === "object" && !!x && x.lg_name === "Command";
|
|
136
|
+
}
|
package/dist/errors.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getSubgraphsSeenSet = exports.RemoteException = exports.MultipleSubgraphsError = exports.InvalidUpdateError = exports.EmptyChannelError = exports.EmptyInputError = exports.isGraphInterrupt = exports.NodeInterrupt = exports.GraphInterrupt = exports.GraphValueError = exports.GraphRecursionError = exports.BaseLangGraphError = void 0;
|
|
3
|
+
exports.getSubgraphsSeenSet = exports.RemoteException = exports.MultipleSubgraphsError = exports.InvalidUpdateError = exports.EmptyChannelError = exports.EmptyInputError = exports.isGraphInterrupt = exports.isGraphBubbleUp = exports.NodeInterrupt = exports.GraphInterrupt = exports.GraphValueError = exports.GraphRecursionError = exports.GraphBubbleUp = exports.BaseLangGraphError = void 0;
|
|
4
4
|
// TODO: Merge with base LangChain error class when we drop support for core@0.2.0
|
|
5
5
|
class BaseLangGraphError extends Error {
|
|
6
6
|
constructor(message, fields) {
|
|
@@ -19,6 +19,12 @@ class BaseLangGraphError extends Error {
|
|
|
19
19
|
}
|
|
20
20
|
}
|
|
21
21
|
exports.BaseLangGraphError = BaseLangGraphError;
|
|
22
|
+
class GraphBubbleUp extends BaseLangGraphError {
|
|
23
|
+
get is_bubble_up() {
|
|
24
|
+
return true;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
exports.GraphBubbleUp = GraphBubbleUp;
|
|
22
28
|
class GraphRecursionError extends BaseLangGraphError {
|
|
23
29
|
constructor(message, fields) {
|
|
24
30
|
super(message, fields);
|
|
@@ -39,7 +45,7 @@ class GraphValueError extends BaseLangGraphError {
|
|
|
39
45
|
}
|
|
40
46
|
}
|
|
41
47
|
exports.GraphValueError = GraphValueError;
|
|
42
|
-
class GraphInterrupt extends
|
|
48
|
+
class GraphInterrupt extends GraphBubbleUp {
|
|
43
49
|
constructor(interrupts, fields) {
|
|
44
50
|
super(JSON.stringify(interrupts, null, 2), fields);
|
|
45
51
|
Object.defineProperty(this, "interrupts", {
|
|
@@ -72,6 +78,10 @@ class NodeInterrupt extends GraphInterrupt {
|
|
|
72
78
|
}
|
|
73
79
|
}
|
|
74
80
|
exports.NodeInterrupt = NodeInterrupt;
|
|
81
|
+
function isGraphBubbleUp(e) {
|
|
82
|
+
return e !== undefined && e.is_bubble_up === true;
|
|
83
|
+
}
|
|
84
|
+
exports.isGraphBubbleUp = isGraphBubbleUp;
|
|
75
85
|
function isGraphInterrupt(e) {
|
|
76
86
|
return (e !== undefined &&
|
|
77
87
|
[
|
package/dist/errors.d.ts
CHANGED
|
@@ -6,6 +6,9 @@ export declare class BaseLangGraphError extends Error {
|
|
|
6
6
|
lc_error_code?: string;
|
|
7
7
|
constructor(message?: string, fields?: BaseLangGraphErrorFields);
|
|
8
8
|
}
|
|
9
|
+
export declare class GraphBubbleUp extends BaseLangGraphError {
|
|
10
|
+
get is_bubble_up(): boolean;
|
|
11
|
+
}
|
|
9
12
|
export declare class GraphRecursionError extends BaseLangGraphError {
|
|
10
13
|
constructor(message?: string, fields?: BaseLangGraphErrorFields);
|
|
11
14
|
static get unminifiable_name(): string;
|
|
@@ -14,7 +17,7 @@ export declare class GraphValueError extends BaseLangGraphError {
|
|
|
14
17
|
constructor(message?: string, fields?: BaseLangGraphErrorFields);
|
|
15
18
|
static get unminifiable_name(): string;
|
|
16
19
|
}
|
|
17
|
-
export declare class GraphInterrupt extends
|
|
20
|
+
export declare class GraphInterrupt extends GraphBubbleUp {
|
|
18
21
|
interrupts: Interrupt[];
|
|
19
22
|
constructor(interrupts?: Interrupt[], fields?: BaseLangGraphErrorFields);
|
|
20
23
|
static get unminifiable_name(): string;
|
|
@@ -24,6 +27,7 @@ export declare class NodeInterrupt extends GraphInterrupt {
|
|
|
24
27
|
constructor(message: string, fields?: BaseLangGraphErrorFields);
|
|
25
28
|
static get unminifiable_name(): string;
|
|
26
29
|
}
|
|
30
|
+
export declare function isGraphBubbleUp(e?: Error): e is GraphBubbleUp;
|
|
27
31
|
export declare function isGraphInterrupt(e?: GraphInterrupt | Error): e is GraphInterrupt;
|
|
28
32
|
export declare class EmptyInputError extends BaseLangGraphError {
|
|
29
33
|
constructor(message?: string, fields?: BaseLangGraphErrorFields);
|
package/dist/errors.js
CHANGED
|
@@ -15,6 +15,11 @@ export class BaseLangGraphError extends Error {
|
|
|
15
15
|
this.lc_error_code = fields?.lc_error_code;
|
|
16
16
|
}
|
|
17
17
|
}
|
|
18
|
+
export class GraphBubbleUp extends BaseLangGraphError {
|
|
19
|
+
get is_bubble_up() {
|
|
20
|
+
return true;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
18
23
|
export class GraphRecursionError extends BaseLangGraphError {
|
|
19
24
|
constructor(message, fields) {
|
|
20
25
|
super(message, fields);
|
|
@@ -33,7 +38,7 @@ export class GraphValueError extends BaseLangGraphError {
|
|
|
33
38
|
return "GraphValueError";
|
|
34
39
|
}
|
|
35
40
|
}
|
|
36
|
-
export class GraphInterrupt extends
|
|
41
|
+
export class GraphInterrupt extends GraphBubbleUp {
|
|
37
42
|
constructor(interrupts, fields) {
|
|
38
43
|
super(JSON.stringify(interrupts, null, 2), fields);
|
|
39
44
|
Object.defineProperty(this, "interrupts", {
|
|
@@ -64,6 +69,9 @@ export class NodeInterrupt extends GraphInterrupt {
|
|
|
64
69
|
return "NodeInterrupt";
|
|
65
70
|
}
|
|
66
71
|
}
|
|
72
|
+
export function isGraphBubbleUp(e) {
|
|
73
|
+
return e !== undefined && e.is_bubble_up === true;
|
|
74
|
+
}
|
|
67
75
|
export function isGraphInterrupt(e) {
|
|
68
76
|
return (e !== undefined &&
|
|
69
77
|
[
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.interrupt = void 0;
|
|
4
|
+
const singletons_1 = require("@langchain/core/singletons");
|
|
5
|
+
const errors_js_1 = require("./errors.cjs");
|
|
6
|
+
const constants_js_1 = require("./constants.cjs");
|
|
7
|
+
function interrupt(value) {
|
|
8
|
+
const config = singletons_1.AsyncLocalStorageProviderSingleton.getRunnableConfig();
|
|
9
|
+
if (!config) {
|
|
10
|
+
throw new Error("Called interrupt() outside the context of a graph.");
|
|
11
|
+
}
|
|
12
|
+
const resume = config.configurable?.[constants_js_1.CONFIG_KEY_RESUME_VALUE];
|
|
13
|
+
if (resume !== constants_js_1.MISSING) {
|
|
14
|
+
return resume;
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
throw new errors_js_1.GraphInterrupt([{ value, when: "during" }]);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
exports.interrupt = interrupt;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function interrupt<I = unknown, R = unknown>(value: I): R;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { AsyncLocalStorageProviderSingleton } from "@langchain/core/singletons";
|
|
2
|
+
import { GraphInterrupt } from "./errors.js";
|
|
3
|
+
import { CONFIG_KEY_RESUME_VALUE, MISSING } from "./constants.js";
|
|
4
|
+
export function interrupt(value) {
|
|
5
|
+
const config = AsyncLocalStorageProviderSingleton.getRunnableConfig();
|
|
6
|
+
if (!config) {
|
|
7
|
+
throw new Error("Called interrupt() outside the context of a graph.");
|
|
8
|
+
}
|
|
9
|
+
const resume = config.configurable?.[CONFIG_KEY_RESUME_VALUE];
|
|
10
|
+
if (resume !== MISSING) {
|
|
11
|
+
return resume;
|
|
12
|
+
}
|
|
13
|
+
else {
|
|
14
|
+
throw new GraphInterrupt([{ value, when: "during" }]);
|
|
15
|
+
}
|
|
16
|
+
}
|
package/dist/pregel/algo.cjs
CHANGED
|
@@ -96,9 +96,13 @@ writes) {
|
|
|
96
96
|
commit(writes);
|
|
97
97
|
}
|
|
98
98
|
exports._localWrite = _localWrite;
|
|
99
|
+
const IGNORE = new Set([constants_js_1.PUSH, constants_js_1.RESUME, constants_js_1.INTERRUPT]);
|
|
99
100
|
function _applyWrites(checkpoint, channels, tasks,
|
|
100
101
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
101
102
|
getNextVersion) {
|
|
103
|
+
// if no task has triggers this is applying writes from the null task only
|
|
104
|
+
// so we don't do anything other than update the channels written to
|
|
105
|
+
const bumpStep = tasks.some((task) => task.triggers.length > 0);
|
|
102
106
|
// Filter out non instances of BaseChannel
|
|
103
107
|
const onlyChannels = Object.fromEntries(Object.entries(channels).filter(([_, value]) => (0, base_js_1.isBaseChannel)(value)));
|
|
104
108
|
// Update seen versions
|
|
@@ -130,7 +134,7 @@ getNextVersion) {
|
|
|
130
134
|
}
|
|
131
135
|
}
|
|
132
136
|
// Clear pending sends
|
|
133
|
-
if (checkpoint.pending_sends) {
|
|
137
|
+
if (checkpoint.pending_sends?.length && bumpStep) {
|
|
134
138
|
checkpoint.pending_sends = [];
|
|
135
139
|
}
|
|
136
140
|
// Group writes by channel
|
|
@@ -138,7 +142,10 @@ getNextVersion) {
|
|
|
138
142
|
const pendingWritesByManaged = {};
|
|
139
143
|
for (const task of tasks) {
|
|
140
144
|
for (const [chan, val] of task.writes) {
|
|
141
|
-
if (chan
|
|
145
|
+
if (IGNORE.has(chan)) {
|
|
146
|
+
// do nothing
|
|
147
|
+
}
|
|
148
|
+
else if (chan === constants_js_1.TASKS) {
|
|
142
149
|
checkpoint.pending_sends.push({
|
|
143
150
|
node: val.node,
|
|
144
151
|
args: val.args,
|
|
@@ -193,11 +200,13 @@ getNextVersion) {
|
|
|
193
200
|
}
|
|
194
201
|
}
|
|
195
202
|
// Channels that weren't updated in this step are notified of a new step
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
203
|
+
if (bumpStep) {
|
|
204
|
+
for (const chan of Object.keys(onlyChannels)) {
|
|
205
|
+
if (!updatedChannels.has(chan)) {
|
|
206
|
+
const updated = onlyChannels[chan].update([]);
|
|
207
|
+
if (updated && getNextVersion !== undefined) {
|
|
208
|
+
checkpoint.channel_versions[chan] = getNextVersion(maxVersion, onlyChannels[chan]);
|
|
209
|
+
}
|
|
201
210
|
}
|
|
202
211
|
}
|
|
203
212
|
}
|
|
@@ -205,11 +214,11 @@ getNextVersion) {
|
|
|
205
214
|
return pendingWritesByManaged;
|
|
206
215
|
}
|
|
207
216
|
exports._applyWrites = _applyWrites;
|
|
208
|
-
function _prepareNextTasks(checkpoint, processes, channels, managed, config, forExecution, extra) {
|
|
217
|
+
function _prepareNextTasks(checkpoint, pendingWrites, processes, channels, managed, config, forExecution, extra) {
|
|
209
218
|
const tasks = {};
|
|
210
219
|
// Consume pending packets
|
|
211
220
|
for (let i = 0; i < checkpoint.pending_sends.length; i += 1) {
|
|
212
|
-
const task = _prepareSingleTask([constants_js_1.PUSH, i], checkpoint, processes, channels, managed, config, forExecution, extra);
|
|
221
|
+
const task = _prepareSingleTask([constants_js_1.PUSH, i], checkpoint, pendingWrites, processes, channels, managed, config, forExecution, extra);
|
|
213
222
|
if (task !== undefined) {
|
|
214
223
|
tasks[task.id] = task;
|
|
215
224
|
}
|
|
@@ -217,7 +226,7 @@ function _prepareNextTasks(checkpoint, processes, channels, managed, config, for
|
|
|
217
226
|
// Check if any processes should be run in next step
|
|
218
227
|
// If so, prepare the values to be passed to them
|
|
219
228
|
for (const name of Object.keys(processes)) {
|
|
220
|
-
const task = _prepareSingleTask([constants_js_1.PULL, name], checkpoint, processes, channels, managed, config, forExecution, extra);
|
|
229
|
+
const task = _prepareSingleTask([constants_js_1.PULL, name], checkpoint, pendingWrites, processes, channels, managed, config, forExecution, extra);
|
|
221
230
|
if (task !== undefined) {
|
|
222
231
|
tasks[task.id] = task;
|
|
223
232
|
}
|
|
@@ -225,7 +234,7 @@ function _prepareNextTasks(checkpoint, processes, channels, managed, config, for
|
|
|
225
234
|
return tasks;
|
|
226
235
|
}
|
|
227
236
|
exports._prepareNextTasks = _prepareNextTasks;
|
|
228
|
-
function _prepareSingleTask(taskPath, checkpoint, processes, channels, managed, config, forExecution, extra) {
|
|
237
|
+
function _prepareSingleTask(taskPath, checkpoint, pendingWrites, processes, channels, managed, config, forExecution, extra) {
|
|
229
238
|
const { step, checkpointer, manager } = extra;
|
|
230
239
|
const configurable = config.configurable ?? {};
|
|
231
240
|
const parentNamespace = configurable.checkpoint_ns ?? "";
|
|
@@ -271,6 +280,7 @@ function _prepareSingleTask(taskPath, checkpoint, processes, channels, managed,
|
|
|
271
280
|
metadata = { ...metadata, ...proc.metadata };
|
|
272
281
|
}
|
|
273
282
|
const writes = [];
|
|
283
|
+
const resume = pendingWrites?.find((w) => [taskId, constants_js_1.NULL_TASK_ID].includes(w[0]) && w[1] === constants_js_1.RESUME);
|
|
274
284
|
return {
|
|
275
285
|
name: packet.node,
|
|
276
286
|
input: packet.args,
|
|
@@ -298,6 +308,7 @@ function _prepareSingleTask(taskPath, checkpoint, processes, channels, managed,
|
|
|
298
308
|
...configurable[constants_js_1.CONFIG_KEY_CHECKPOINT_MAP],
|
|
299
309
|
[parentNamespace]: checkpoint.id,
|
|
300
310
|
},
|
|
311
|
+
[constants_js_1.CONFIG_KEY_RESUME_VALUE]: resume ? resume[2] : constants_js_1.MISSING,
|
|
301
312
|
checkpoint_id: undefined,
|
|
302
313
|
checkpoint_ns: taskCheckpointNamespace,
|
|
303
314
|
},
|
|
@@ -367,6 +378,7 @@ function _prepareSingleTask(taskPath, checkpoint, processes, channels, managed,
|
|
|
367
378
|
metadata = { ...metadata, ...proc.metadata };
|
|
368
379
|
}
|
|
369
380
|
const writes = [];
|
|
381
|
+
const resume = pendingWrites?.find((w) => [taskId, constants_js_1.NULL_TASK_ID].includes(w[0]) && w[1] === constants_js_1.RESUME);
|
|
370
382
|
const taskCheckpointNamespace = `${checkpointNamespace}${constants_js_1.CHECKPOINT_NAMESPACE_END}${taskId}`;
|
|
371
383
|
return {
|
|
372
384
|
name,
|
|
@@ -397,6 +409,7 @@ function _prepareSingleTask(taskPath, checkpoint, processes, channels, managed,
|
|
|
397
409
|
...configurable[constants_js_1.CONFIG_KEY_CHECKPOINT_MAP],
|
|
398
410
|
[parentNamespace]: checkpoint.id,
|
|
399
411
|
},
|
|
412
|
+
[constants_js_1.CONFIG_KEY_RESUME_VALUE]: resume ? resume[2] : constants_js_1.MISSING,
|
|
400
413
|
checkpoint_id: undefined,
|
|
401
414
|
checkpoint_ns: taskCheckpointNamespace,
|
|
402
415
|
},
|
package/dist/pregel/algo.d.ts
CHANGED
|
@@ -34,8 +34,8 @@ export type NextTaskExtraFieldsWithStore = NextTaskExtraFields & {
|
|
|
34
34
|
export type NextTaskExtraFieldsWithoutStore = NextTaskExtraFields & {
|
|
35
35
|
store?: never;
|
|
36
36
|
};
|
|
37
|
-
export declare function _prepareNextTasks<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(checkpoint: ReadonlyCheckpoint, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: false, extra: NextTaskExtraFieldsWithoutStore): Record<string, PregelTaskDescription>;
|
|
38
|
-
export declare function _prepareNextTasks<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(checkpoint: ReadonlyCheckpoint, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: true, extra: NextTaskExtraFieldsWithStore): Record<string, PregelExecutableTask<keyof Nn, keyof Cc>>;
|
|
39
|
-
export declare function _prepareSingleTask<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(taskPath: [string, string | number], checkpoint: ReadonlyCheckpoint, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: false, extra: NextTaskExtraFields): PregelTaskDescription | undefined;
|
|
40
|
-
export declare function _prepareSingleTask<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(taskPath: [string, string | number], checkpoint: ReadonlyCheckpoint, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: true, extra: NextTaskExtraFields): PregelExecutableTask<keyof Nn, keyof Cc> | undefined;
|
|
41
|
-
export declare function _prepareSingleTask<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(taskPath: [string, string | number], checkpoint: ReadonlyCheckpoint, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: boolean, extra: NextTaskExtraFieldsWithStore): PregelTaskDescription | PregelExecutableTask<keyof Nn, keyof Cc> | undefined;
|
|
37
|
+
export declare function _prepareNextTasks<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(checkpoint: ReadonlyCheckpoint, pendingWrites: [string, string, unknown][] | undefined, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: false, extra: NextTaskExtraFieldsWithoutStore): Record<string, PregelTaskDescription>;
|
|
38
|
+
export declare function _prepareNextTasks<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(checkpoint: ReadonlyCheckpoint, pendingWrites: [string, string, unknown][] | undefined, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: true, extra: NextTaskExtraFieldsWithStore): Record<string, PregelExecutableTask<keyof Nn, keyof Cc>>;
|
|
39
|
+
export declare function _prepareSingleTask<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(taskPath: [string, string | number], checkpoint: ReadonlyCheckpoint, pendingWrites: [string, string, unknown][] | undefined, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: false, extra: NextTaskExtraFields): PregelTaskDescription | undefined;
|
|
40
|
+
export declare function _prepareSingleTask<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(taskPath: [string, string | number], checkpoint: ReadonlyCheckpoint, pendingWrites: [string, string, unknown][] | undefined, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: true, extra: NextTaskExtraFields): PregelExecutableTask<keyof Nn, keyof Cc> | undefined;
|
|
41
|
+
export declare function _prepareSingleTask<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>>(taskPath: [string, string | number], checkpoint: ReadonlyCheckpoint, pendingWrites: [string, string, unknown][] | undefined, processes: Nn, channels: Cc, managed: ManagedValueMapping, config: RunnableConfig, forExecution: boolean, extra: NextTaskExtraFieldsWithStore): PregelTaskDescription | PregelExecutableTask<keyof Nn, keyof Cc> | undefined;
|
package/dist/pregel/algo.js
CHANGED
|
@@ -3,7 +3,7 @@ import { mergeConfigs, patchConfig, } from "@langchain/core/runnables";
|
|
|
3
3
|
import { copyCheckpoint, uuid5, maxChannelVersion, } from "@langchain/langgraph-checkpoint";
|
|
4
4
|
import { createCheckpoint, emptyChannels, isBaseChannel, } from "../channels/base.js";
|
|
5
5
|
import { readChannel, readChannels } from "./io.js";
|
|
6
|
-
import { _isSend, _isSendInterface, CONFIG_KEY_CHECKPOINT_MAP, CHECKPOINT_NAMESPACE_SEPARATOR, CONFIG_KEY_CHECKPOINTER, CONFIG_KEY_READ, CONFIG_KEY_TASK_ID, CONFIG_KEY_SEND, INTERRUPT, RESERVED, TAG_HIDDEN, TASKS, CHECKPOINT_NAMESPACE_END, PUSH, PULL, } from "../constants.js";
|
|
6
|
+
import { _isSend, _isSendInterface, CONFIG_KEY_CHECKPOINT_MAP, CHECKPOINT_NAMESPACE_SEPARATOR, CONFIG_KEY_CHECKPOINTER, CONFIG_KEY_READ, CONFIG_KEY_TASK_ID, CONFIG_KEY_SEND, INTERRUPT, RESERVED, TAG_HIDDEN, TASKS, CHECKPOINT_NAMESPACE_END, PUSH, PULL, RESUME, CONFIG_KEY_RESUME_VALUE, NULL_TASK_ID, MISSING, } from "../constants.js";
|
|
7
7
|
import { EmptyChannelError, InvalidUpdateError } from "../errors.js";
|
|
8
8
|
import { getNullChannelVersion } from "./utils/index.js";
|
|
9
9
|
export const increment = (current) => {
|
|
@@ -89,9 +89,13 @@ writes) {
|
|
|
89
89
|
}
|
|
90
90
|
commit(writes);
|
|
91
91
|
}
|
|
92
|
+
const IGNORE = new Set([PUSH, RESUME, INTERRUPT]);
|
|
92
93
|
export function _applyWrites(checkpoint, channels, tasks,
|
|
93
94
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
94
95
|
getNextVersion) {
|
|
96
|
+
// if no task has triggers this is applying writes from the null task only
|
|
97
|
+
// so we don't do anything other than update the channels written to
|
|
98
|
+
const bumpStep = tasks.some((task) => task.triggers.length > 0);
|
|
95
99
|
// Filter out non instances of BaseChannel
|
|
96
100
|
const onlyChannels = Object.fromEntries(Object.entries(channels).filter(([_, value]) => isBaseChannel(value)));
|
|
97
101
|
// Update seen versions
|
|
@@ -123,7 +127,7 @@ getNextVersion) {
|
|
|
123
127
|
}
|
|
124
128
|
}
|
|
125
129
|
// Clear pending sends
|
|
126
|
-
if (checkpoint.pending_sends) {
|
|
130
|
+
if (checkpoint.pending_sends?.length && bumpStep) {
|
|
127
131
|
checkpoint.pending_sends = [];
|
|
128
132
|
}
|
|
129
133
|
// Group writes by channel
|
|
@@ -131,7 +135,10 @@ getNextVersion) {
|
|
|
131
135
|
const pendingWritesByManaged = {};
|
|
132
136
|
for (const task of tasks) {
|
|
133
137
|
for (const [chan, val] of task.writes) {
|
|
134
|
-
if (chan
|
|
138
|
+
if (IGNORE.has(chan)) {
|
|
139
|
+
// do nothing
|
|
140
|
+
}
|
|
141
|
+
else if (chan === TASKS) {
|
|
135
142
|
checkpoint.pending_sends.push({
|
|
136
143
|
node: val.node,
|
|
137
144
|
args: val.args,
|
|
@@ -186,22 +193,24 @@ getNextVersion) {
|
|
|
186
193
|
}
|
|
187
194
|
}
|
|
188
195
|
// Channels that weren't updated in this step are notified of a new step
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
196
|
+
if (bumpStep) {
|
|
197
|
+
for (const chan of Object.keys(onlyChannels)) {
|
|
198
|
+
if (!updatedChannels.has(chan)) {
|
|
199
|
+
const updated = onlyChannels[chan].update([]);
|
|
200
|
+
if (updated && getNextVersion !== undefined) {
|
|
201
|
+
checkpoint.channel_versions[chan] = getNextVersion(maxVersion, onlyChannels[chan]);
|
|
202
|
+
}
|
|
194
203
|
}
|
|
195
204
|
}
|
|
196
205
|
}
|
|
197
206
|
// Return managed values writes to be applied externally
|
|
198
207
|
return pendingWritesByManaged;
|
|
199
208
|
}
|
|
200
|
-
export function _prepareNextTasks(checkpoint, processes, channels, managed, config, forExecution, extra) {
|
|
209
|
+
export function _prepareNextTasks(checkpoint, pendingWrites, processes, channels, managed, config, forExecution, extra) {
|
|
201
210
|
const tasks = {};
|
|
202
211
|
// Consume pending packets
|
|
203
212
|
for (let i = 0; i < checkpoint.pending_sends.length; i += 1) {
|
|
204
|
-
const task = _prepareSingleTask([PUSH, i], checkpoint, processes, channels, managed, config, forExecution, extra);
|
|
213
|
+
const task = _prepareSingleTask([PUSH, i], checkpoint, pendingWrites, processes, channels, managed, config, forExecution, extra);
|
|
205
214
|
if (task !== undefined) {
|
|
206
215
|
tasks[task.id] = task;
|
|
207
216
|
}
|
|
@@ -209,14 +218,14 @@ export function _prepareNextTasks(checkpoint, processes, channels, managed, conf
|
|
|
209
218
|
// Check if any processes should be run in next step
|
|
210
219
|
// If so, prepare the values to be passed to them
|
|
211
220
|
for (const name of Object.keys(processes)) {
|
|
212
|
-
const task = _prepareSingleTask([PULL, name], checkpoint, processes, channels, managed, config, forExecution, extra);
|
|
221
|
+
const task = _prepareSingleTask([PULL, name], checkpoint, pendingWrites, processes, channels, managed, config, forExecution, extra);
|
|
213
222
|
if (task !== undefined) {
|
|
214
223
|
tasks[task.id] = task;
|
|
215
224
|
}
|
|
216
225
|
}
|
|
217
226
|
return tasks;
|
|
218
227
|
}
|
|
219
|
-
export function _prepareSingleTask(taskPath, checkpoint, processes, channels, managed, config, forExecution, extra) {
|
|
228
|
+
export function _prepareSingleTask(taskPath, checkpoint, pendingWrites, processes, channels, managed, config, forExecution, extra) {
|
|
220
229
|
const { step, checkpointer, manager } = extra;
|
|
221
230
|
const configurable = config.configurable ?? {};
|
|
222
231
|
const parentNamespace = configurable.checkpoint_ns ?? "";
|
|
@@ -262,6 +271,7 @@ export function _prepareSingleTask(taskPath, checkpoint, processes, channels, ma
|
|
|
262
271
|
metadata = { ...metadata, ...proc.metadata };
|
|
263
272
|
}
|
|
264
273
|
const writes = [];
|
|
274
|
+
const resume = pendingWrites?.find((w) => [taskId, NULL_TASK_ID].includes(w[0]) && w[1] === RESUME);
|
|
265
275
|
return {
|
|
266
276
|
name: packet.node,
|
|
267
277
|
input: packet.args,
|
|
@@ -289,6 +299,7 @@ export function _prepareSingleTask(taskPath, checkpoint, processes, channels, ma
|
|
|
289
299
|
...configurable[CONFIG_KEY_CHECKPOINT_MAP],
|
|
290
300
|
[parentNamespace]: checkpoint.id,
|
|
291
301
|
},
|
|
302
|
+
[CONFIG_KEY_RESUME_VALUE]: resume ? resume[2] : MISSING,
|
|
292
303
|
checkpoint_id: undefined,
|
|
293
304
|
checkpoint_ns: taskCheckpointNamespace,
|
|
294
305
|
},
|
|
@@ -358,6 +369,7 @@ export function _prepareSingleTask(taskPath, checkpoint, processes, channels, ma
|
|
|
358
369
|
metadata = { ...metadata, ...proc.metadata };
|
|
359
370
|
}
|
|
360
371
|
const writes = [];
|
|
372
|
+
const resume = pendingWrites?.find((w) => [taskId, NULL_TASK_ID].includes(w[0]) && w[1] === RESUME);
|
|
361
373
|
const taskCheckpointNamespace = `${checkpointNamespace}${CHECKPOINT_NAMESPACE_END}${taskId}`;
|
|
362
374
|
return {
|
|
363
375
|
name,
|
|
@@ -388,6 +400,7 @@ export function _prepareSingleTask(taskPath, checkpoint, processes, channels, ma
|
|
|
388
400
|
...configurable[CONFIG_KEY_CHECKPOINT_MAP],
|
|
389
401
|
[parentNamespace]: checkpoint.id,
|
|
390
402
|
},
|
|
403
|
+
[CONFIG_KEY_RESUME_VALUE]: resume ? resume[2] : MISSING,
|
|
391
404
|
checkpoint_id: undefined,
|
|
392
405
|
checkpoint_ns: taskCheckpointNamespace,
|
|
393
406
|
},
|
package/dist/pregel/index.cjs
CHANGED
|
@@ -307,7 +307,7 @@ class Pregel extends runnables_1.Runnable {
|
|
|
307
307
|
// Pass `skipManaged: true` as managed values should not be returned in get state calls.
|
|
308
308
|
const { managed } = await this.prepareSpecs(config, { skipManaged: true });
|
|
309
309
|
const channels = (0, base_js_1.emptyChannels)(this.channels, saved.checkpoint);
|
|
310
|
-
const nextTasks = Object.values((0, algo_js_1._prepareNextTasks)(saved.checkpoint, this.nodes, channels, managed, saved.config, false, { step: (saved.metadata?.step ?? -1) + 1 }));
|
|
310
|
+
const nextTasks = Object.values((0, algo_js_1._prepareNextTasks)(saved.checkpoint, saved.pendingWrites, this.nodes, channels, managed, saved.config, false, { step: (saved.metadata?.step ?? -1) + 1 }));
|
|
311
311
|
const subgraphs = await (0, utils_js_1.gatherIterator)(this.getSubgraphsAsync());
|
|
312
312
|
const parentNamespace = saved.config.configurable?.checkpoint_ns ?? "";
|
|
313
313
|
const taskStates = {};
|
|
@@ -798,11 +798,11 @@ class Pregel extends runnables_1.Runnable {
|
|
|
798
798
|
// Timeouts will be thrown
|
|
799
799
|
for await (const { task, error } of taskStream) {
|
|
800
800
|
if (error !== undefined) {
|
|
801
|
-
if ((0, errors_js_1.
|
|
801
|
+
if ((0, errors_js_1.isGraphBubbleUp)(error)) {
|
|
802
802
|
if (loop.isNested) {
|
|
803
803
|
throw error;
|
|
804
804
|
}
|
|
805
|
-
if (error.interrupts.length) {
|
|
805
|
+
if ((0, errors_js_1.isGraphInterrupt)(error) && error.interrupts.length) {
|
|
806
806
|
loop.putWrites(task.id, error.interrupts.map((interrupt) => [constants_js_1.INTERRUPT, interrupt]));
|
|
807
807
|
}
|
|
808
808
|
}
|
|
@@ -810,14 +810,12 @@ class Pregel extends runnables_1.Runnable {
|
|
|
810
810
|
loop.putWrites(task.id, [
|
|
811
811
|
[constants_js_1.ERROR, { message: error.message, name: error.name }],
|
|
812
812
|
]);
|
|
813
|
+
throw error;
|
|
813
814
|
}
|
|
814
815
|
}
|
|
815
816
|
else {
|
|
816
817
|
loop.putWrites(task.id, task.writes);
|
|
817
818
|
}
|
|
818
|
-
if (error !== undefined && !(0, errors_js_1.isGraphInterrupt)(error)) {
|
|
819
|
-
throw error;
|
|
820
|
-
}
|
|
821
819
|
}
|
|
822
820
|
if (debug) {
|
|
823
821
|
(0, debug_js_1.printStepWrites)(loop.step, Object.values(loop.tasks)
|
package/dist/pregel/index.d.ts
CHANGED
|
@@ -4,6 +4,7 @@ import { All, BaseCheckpointSaver, BaseStore, CheckpointListOptions, CheckpointT
|
|
|
4
4
|
import { BaseChannel } from "../channels/base.js";
|
|
5
5
|
import { PregelNode } from "./read.js";
|
|
6
6
|
import { ChannelWrite } from "./write.js";
|
|
7
|
+
import { Command } from "../constants.js";
|
|
7
8
|
import { PregelInterface, PregelParams, StateSnapshot, StreamMode, PregelInputType, PregelOutputType, PregelOptions } from "./types.js";
|
|
8
9
|
import { StrRecord } from "./algo.js";
|
|
9
10
|
import { RetryPolicy } from "./utils/index.js";
|
|
@@ -102,14 +103,14 @@ export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends
|
|
|
102
103
|
* @param options.interruptAfter Nodes to interrupt after.
|
|
103
104
|
* @param options.debug Whether to print debug information during execution.
|
|
104
105
|
*/
|
|
105
|
-
stream(input: PregelInputType, options?: Partial<PregelOptions<Nn, Cc, ConfigurableFieldType>>): Promise<IterableReadableStream<PregelOutputType>>;
|
|
106
|
+
stream(input: PregelInputType | Command, options?: Partial<PregelOptions<Nn, Cc, ConfigurableFieldType>>): Promise<IterableReadableStream<PregelOutputType>>;
|
|
106
107
|
protected prepareSpecs(config: RunnableConfig, options?: {
|
|
107
108
|
skipManaged?: boolean;
|
|
108
109
|
}): Promise<{
|
|
109
110
|
channelSpecs: Record<string, BaseChannel<unknown, unknown, unknown>>;
|
|
110
111
|
managed: ManagedValueMapping;
|
|
111
112
|
}>;
|
|
112
|
-
_streamIterator(input: PregelInputType, options?: Partial<PregelOptions<Nn, Cc>>): AsyncGenerator<PregelOutputType>;
|
|
113
|
+
_streamIterator(input: PregelInputType | Command, options?: Partial<PregelOptions<Nn, Cc>>): AsyncGenerator<PregelOutputType>;
|
|
113
114
|
/**
|
|
114
115
|
* Run the graph with a single input and config.
|
|
115
116
|
* @param input The input to the graph.
|
|
@@ -125,5 +126,5 @@ export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends
|
|
|
125
126
|
* @param options.interruptAfter Nodes to interrupt after.
|
|
126
127
|
* @param options.debug Whether to print debug information during execution.
|
|
127
128
|
*/
|
|
128
|
-
invoke(input: PregelInputType, options?: Partial<PregelOptions<Nn, Cc, ConfigurableFieldType>>): Promise<PregelOutputType>;
|
|
129
|
+
invoke(input: PregelInputType | Command, options?: Partial<PregelOptions<Nn, Cc, ConfigurableFieldType>>): Promise<PregelOutputType>;
|
|
129
130
|
}
|
package/dist/pregel/index.js
CHANGED
|
@@ -8,7 +8,7 @@ import { readChannels } from "./io.js";
|
|
|
8
8
|
import { printStepCheckpoint, printStepTasks, printStepWrites, tasksWithWrites, } from "./debug.js";
|
|
9
9
|
import { ChannelWrite, PASSTHROUGH } from "./write.js";
|
|
10
10
|
import { CONFIG_KEY_CHECKPOINTER, CONFIG_KEY_READ, CONFIG_KEY_SEND, ERROR, INTERRUPT, CHECKPOINT_NAMESPACE_SEPARATOR, CHECKPOINT_NAMESPACE_END, CONFIG_KEY_STREAM, CONFIG_KEY_TASK_ID, } from "../constants.js";
|
|
11
|
-
import { GraphRecursionError, GraphValueError, InvalidUpdateError, isGraphInterrupt, } from "../errors.js";
|
|
11
|
+
import { GraphRecursionError, GraphValueError, InvalidUpdateError, isGraphBubbleUp, isGraphInterrupt, } from "../errors.js";
|
|
12
12
|
import { _prepareNextTasks, _localRead, _applyWrites, } from "./algo.js";
|
|
13
13
|
import { _coerceToDict, getNewChannelVersions, patchCheckpointMap, } from "./utils/index.js";
|
|
14
14
|
import { findSubgraphPregel } from "./utils/subgraph.js";
|
|
@@ -303,7 +303,7 @@ export class Pregel extends Runnable {
|
|
|
303
303
|
// Pass `skipManaged: true` as managed values should not be returned in get state calls.
|
|
304
304
|
const { managed } = await this.prepareSpecs(config, { skipManaged: true });
|
|
305
305
|
const channels = emptyChannels(this.channels, saved.checkpoint);
|
|
306
|
-
const nextTasks = Object.values(_prepareNextTasks(saved.checkpoint, this.nodes, channels, managed, saved.config, false, { step: (saved.metadata?.step ?? -1) + 1 }));
|
|
306
|
+
const nextTasks = Object.values(_prepareNextTasks(saved.checkpoint, saved.pendingWrites, this.nodes, channels, managed, saved.config, false, { step: (saved.metadata?.step ?? -1) + 1 }));
|
|
307
307
|
const subgraphs = await gatherIterator(this.getSubgraphsAsync());
|
|
308
308
|
const parentNamespace = saved.config.configurable?.checkpoint_ns ?? "";
|
|
309
309
|
const taskStates = {};
|
|
@@ -794,11 +794,11 @@ export class Pregel extends Runnable {
|
|
|
794
794
|
// Timeouts will be thrown
|
|
795
795
|
for await (const { task, error } of taskStream) {
|
|
796
796
|
if (error !== undefined) {
|
|
797
|
-
if (
|
|
797
|
+
if (isGraphBubbleUp(error)) {
|
|
798
798
|
if (loop.isNested) {
|
|
799
799
|
throw error;
|
|
800
800
|
}
|
|
801
|
-
if (error.interrupts.length) {
|
|
801
|
+
if (isGraphInterrupt(error) && error.interrupts.length) {
|
|
802
802
|
loop.putWrites(task.id, error.interrupts.map((interrupt) => [INTERRUPT, interrupt]));
|
|
803
803
|
}
|
|
804
804
|
}
|
|
@@ -806,14 +806,12 @@ export class Pregel extends Runnable {
|
|
|
806
806
|
loop.putWrites(task.id, [
|
|
807
807
|
[ERROR, { message: error.message, name: error.name }],
|
|
808
808
|
]);
|
|
809
|
+
throw error;
|
|
809
810
|
}
|
|
810
811
|
}
|
|
811
812
|
else {
|
|
812
813
|
loop.putWrites(task.id, task.writes);
|
|
813
814
|
}
|
|
814
|
-
if (error !== undefined && !isGraphInterrupt(error)) {
|
|
815
|
-
throw error;
|
|
816
|
-
}
|
|
817
815
|
}
|
|
818
816
|
if (debug) {
|
|
819
817
|
printStepWrites(loop.step, Object.values(loop.tasks)
|
package/dist/pregel/io.cjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.single = exports.mapOutputUpdates = exports.mapOutputValues = exports.mapInput = exports.readChannels = exports.readChannel = void 0;
|
|
3
|
+
exports.single = exports.mapOutputUpdates = exports.mapOutputValues = exports.mapInput = exports.mapCommand = exports.readChannels = exports.readChannel = void 0;
|
|
4
|
+
const uuid_1 = require("uuid");
|
|
4
5
|
const constants_js_1 = require("../constants.cjs");
|
|
5
6
|
const errors_js_1 = require("../errors.cjs");
|
|
6
7
|
function readChannel(channels, chan, catchErrors = true, returnException = false) {
|
|
@@ -45,6 +46,22 @@ function readChannels(channels, select, skipEmpty = true
|
|
|
45
46
|
}
|
|
46
47
|
}
|
|
47
48
|
exports.readChannels = readChannels;
|
|
49
|
+
function* mapCommand(cmd) {
|
|
50
|
+
if (cmd.resume) {
|
|
51
|
+
if (typeof cmd.resume === "object" &&
|
|
52
|
+
!!cmd.resume &&
|
|
53
|
+
Object.keys(cmd.resume).length &&
|
|
54
|
+
Object.keys(cmd.resume).every(uuid_1.validate)) {
|
|
55
|
+
for (const [tid, resume] of Object.entries(cmd.resume)) {
|
|
56
|
+
yield [tid, constants_js_1.RESUME, resume];
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
yield [constants_js_1.NULL_TASK_ID, constants_js_1.RESUME, cmd.resume];
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
exports.mapCommand = mapCommand;
|
|
48
65
|
/**
|
|
49
66
|
* Map input chunk to a sequence of pending writes in the form [channel, value].
|
|
50
67
|
*/
|
package/dist/pregel/io.d.ts
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import type { PendingWrite } from "@langchain/langgraph-checkpoint";
|
|
2
2
|
import type { BaseChannel } from "../channels/base.js";
|
|
3
3
|
import type { PregelExecutableTask } from "./types.js";
|
|
4
|
+
import { Command } from "../constants.js";
|
|
4
5
|
export declare function readChannel<C extends PropertyKey>(channels: Record<C, BaseChannel>, chan: C, catchErrors?: boolean, returnException?: boolean): unknown | null;
|
|
5
6
|
export declare function readChannels<C extends PropertyKey>(channels: Record<C, BaseChannel>, select: C | Array<C>, skipEmpty?: boolean): Record<string, any> | any;
|
|
7
|
+
export declare function mapCommand(cmd: Command): Generator<[string, string, unknown]>;
|
|
6
8
|
/**
|
|
7
9
|
* Map input chunk to a sequence of pending writes in the form [channel, value].
|
|
8
10
|
*/
|
package/dist/pregel/io.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { validate } from "uuid";
|
|
2
|
+
import { NULL_TASK_ID, RESUME, TAG_HIDDEN } from "../constants.js";
|
|
2
3
|
import { EmptyChannelError } from "../errors.js";
|
|
3
4
|
export function readChannel(channels, chan, catchErrors = true, returnException = false) {
|
|
4
5
|
try {
|
|
@@ -40,6 +41,21 @@ export function readChannels(channels, select, skipEmpty = true
|
|
|
40
41
|
return readChannel(channels, select);
|
|
41
42
|
}
|
|
42
43
|
}
|
|
44
|
+
export function* mapCommand(cmd) {
|
|
45
|
+
if (cmd.resume) {
|
|
46
|
+
if (typeof cmd.resume === "object" &&
|
|
47
|
+
!!cmd.resume &&
|
|
48
|
+
Object.keys(cmd.resume).length &&
|
|
49
|
+
Object.keys(cmd.resume).every(validate)) {
|
|
50
|
+
for (const [tid, resume] of Object.entries(cmd.resume)) {
|
|
51
|
+
yield [tid, RESUME, resume];
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
else {
|
|
55
|
+
yield [NULL_TASK_ID, RESUME, cmd.resume];
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
43
59
|
/**
|
|
44
60
|
* Map input chunk to a sequence of pending writes in the form [channel, value].
|
|
45
61
|
*/
|
package/dist/pregel/loop.cjs
CHANGED
|
@@ -14,7 +14,6 @@ const debug_js_1 = require("./debug.cjs");
|
|
|
14
14
|
const INPUT_DONE = Symbol.for("INPUT_DONE");
|
|
15
15
|
const INPUT_RESUMING = Symbol.for("INPUT_RESUMING");
|
|
16
16
|
const DEFAULT_LOOP_LIMIT = 25;
|
|
17
|
-
const SPECIAL_CHANNELS = [constants_js_1.ERROR, constants_js_1.INTERRUPT];
|
|
18
17
|
class IterableReadableWritableStream extends stream_1.IterableReadableStream {
|
|
19
18
|
constructor(params) {
|
|
20
19
|
let streamControllerPromiseResolver;
|
|
@@ -200,12 +199,6 @@ class PregelLoop {
|
|
|
200
199
|
writable: true,
|
|
201
200
|
value: void 0
|
|
202
201
|
});
|
|
203
|
-
Object.defineProperty(this, "taskWritesLeft", {
|
|
204
|
-
enumerable: true,
|
|
205
|
-
configurable: true,
|
|
206
|
-
writable: true,
|
|
207
|
-
value: 0
|
|
208
|
-
});
|
|
209
202
|
Object.defineProperty(this, "prevCheckpointConfig", {
|
|
210
203
|
enumerable: true,
|
|
211
204
|
configurable: true,
|
|
@@ -293,7 +286,9 @@ class PregelLoop {
|
|
|
293
286
|
config.configurable?.[constants_js_1.CONFIG_KEY_STREAM] !== undefined) {
|
|
294
287
|
stream = createDuplexStream(stream, config.configurable[constants_js_1.CONFIG_KEY_STREAM]);
|
|
295
288
|
}
|
|
296
|
-
const skipDoneTasks = config.configurable
|
|
289
|
+
const skipDoneTasks = config.configurable
|
|
290
|
+
? !("checkpoint_id" in config.configurable)
|
|
291
|
+
: true;
|
|
297
292
|
const isNested = constants_js_1.CONFIG_KEY_READ in (config.configurable ?? {});
|
|
298
293
|
if (!isNested &&
|
|
299
294
|
config.configurable?.checkpoint_ns !== undefined &&
|
|
@@ -407,18 +402,6 @@ class PregelLoop {
|
|
|
407
402
|
if (writes.length === 0) {
|
|
408
403
|
return;
|
|
409
404
|
}
|
|
410
|
-
// adjust taskWritesLeft
|
|
411
|
-
const firstChannel = writes[0][0];
|
|
412
|
-
const anyChannelIsSend = writes.find(([channel]) => channel === constants_js_1.TASKS);
|
|
413
|
-
const alwaysSave = anyChannelIsSend || SPECIAL_CHANNELS.includes(firstChannel);
|
|
414
|
-
if (!alwaysSave && !this.taskWritesLeft) {
|
|
415
|
-
return this._outputWrites(taskId, writes);
|
|
416
|
-
}
|
|
417
|
-
else if (firstChannel !== constants_js_1.INTERRUPT) {
|
|
418
|
-
// INTERRUPT makes us want to save the last task's writes
|
|
419
|
-
// so we don't decrement tasksWritesLeft in that case
|
|
420
|
-
this.taskWritesLeft -= 1;
|
|
421
|
-
}
|
|
422
405
|
// save writes
|
|
423
406
|
const pendingWrites = writes.map(([key, value]) => {
|
|
424
407
|
return [taskId, key, value];
|
|
@@ -435,7 +418,9 @@ class PregelLoop {
|
|
|
435
418
|
if (putWritePromise !== undefined) {
|
|
436
419
|
this.checkpointerPromises.push(putWritePromise);
|
|
437
420
|
}
|
|
438
|
-
this.
|
|
421
|
+
if (this.tasks) {
|
|
422
|
+
this._outputWrites(taskId, writes);
|
|
423
|
+
}
|
|
439
424
|
}
|
|
440
425
|
_outputWrites(taskId, writes, cached = false) {
|
|
441
426
|
const task = this.tasks[taskId];
|
|
@@ -506,7 +491,7 @@ class PregelLoop {
|
|
|
506
491
|
this.status = "out_of_steps";
|
|
507
492
|
return false;
|
|
508
493
|
}
|
|
509
|
-
const nextTasks = (0, algo_js_1._prepareNextTasks)(this.checkpoint, this.nodes, this.channels, this.managed, this.config, true, {
|
|
494
|
+
const nextTasks = (0, algo_js_1._prepareNextTasks)(this.checkpoint, this.checkpointPendingWrites, this.nodes, this.channels, this.managed, this.config, true, {
|
|
510
495
|
step: this.step,
|
|
511
496
|
checkpointer: this.checkpointer,
|
|
512
497
|
isResuming: this.input === INPUT_RESUMING,
|
|
@@ -514,7 +499,6 @@ class PregelLoop {
|
|
|
514
499
|
store: this.store,
|
|
515
500
|
});
|
|
516
501
|
this.tasks = nextTasks;
|
|
517
|
-
this.taskWritesLeft = Object.values(this.tasks).length - 1;
|
|
518
502
|
// Produce debug output
|
|
519
503
|
if (this.checkpointer) {
|
|
520
504
|
this._emit(await (0, utils_js_1.gatherIterator)((0, utils_js_1.prefixGenerator)((0, debug_js_1.mapDebugCheckpoint)(this.step - 1, // printing checkpoint for previous step
|
|
@@ -527,7 +511,7 @@ class PregelLoop {
|
|
|
527
511
|
// if there are pending writes from a previous loop, apply them
|
|
528
512
|
if (this.skipDoneTasks && this.checkpointPendingWrites.length > 0) {
|
|
529
513
|
for (const [tid, k, v] of this.checkpointPendingWrites) {
|
|
530
|
-
if (k === constants_js_1.ERROR || k === constants_js_1.INTERRUPT) {
|
|
514
|
+
if (k === constants_js_1.ERROR || k === constants_js_1.INTERRUPT || k === constants_js_1.RESUME) {
|
|
531
515
|
continue;
|
|
532
516
|
}
|
|
533
517
|
const task = Object.values(this.tasks).find((t) => t.id === tid);
|
|
@@ -606,14 +590,31 @@ class PregelLoop {
|
|
|
606
590
|
// produce values output
|
|
607
591
|
const valuesOutput = await (0, utils_js_1.gatherIterator)((0, utils_js_1.prefixGenerator)((0, io_js_1.mapOutputValues)(this.outputKeys, true, this.channels), "values"));
|
|
608
592
|
this._emit(valuesOutput);
|
|
609
|
-
|
|
593
|
+
}
|
|
594
|
+
else if ((0, constants_js_1._isCommand)(this.input)) {
|
|
595
|
+
const writes = {};
|
|
596
|
+
// group writes by task id
|
|
597
|
+
for (const [tid, key, value] of (0, io_js_1.mapCommand)(this.input)) {
|
|
598
|
+
if (writes[tid] === undefined) {
|
|
599
|
+
writes[tid] = [];
|
|
600
|
+
}
|
|
601
|
+
writes[tid].push([key, value]);
|
|
602
|
+
}
|
|
603
|
+
if (Object.keys(writes).length === 0) {
|
|
604
|
+
throw new errors_js_1.EmptyInputError("Received empty Command input");
|
|
605
|
+
}
|
|
606
|
+
// save writes
|
|
607
|
+
for (const [tid, ws] of Object.entries(writes)) {
|
|
608
|
+
this.putWrites(tid, ws);
|
|
609
|
+
}
|
|
610
610
|
}
|
|
611
611
|
else {
|
|
612
|
+
// map inputs to channel updates
|
|
612
613
|
const inputWrites = await (0, utils_js_1.gatherIterator)((0, io_js_1.mapInput)(inputKeys, this.input));
|
|
613
614
|
if (inputWrites.length === 0) {
|
|
614
615
|
throw new errors_js_1.EmptyInputError(`Received no input writes for ${JSON.stringify(inputKeys, null, 2)}`);
|
|
615
616
|
}
|
|
616
|
-
const discardTasks = (0, algo_js_1._prepareNextTasks)(this.checkpoint, this.nodes, this.channels, this.managed, this.config, true, { step: this.step });
|
|
617
|
+
const discardTasks = (0, algo_js_1._prepareNextTasks)(this.checkpoint, this.checkpointPendingWrites, this.nodes, this.channels, this.managed, this.config, true, { step: this.step });
|
|
617
618
|
(0, algo_js_1._applyWrites)(this.checkpoint, this.channels, Object.values(discardTasks).concat([
|
|
618
619
|
{
|
|
619
620
|
name: constants_js_1.INPUT,
|
package/dist/pregel/loop.d.ts
CHANGED
|
@@ -4,12 +4,13 @@ import { IterableReadableStream } from "@langchain/core/utils/stream";
|
|
|
4
4
|
import { BaseCheckpointSaver, Checkpoint, PendingWrite, CheckpointPendingWrite, CheckpointMetadata, All, BaseStore, AsyncBatchedStore } from "@langchain/langgraph-checkpoint";
|
|
5
5
|
import { BaseChannel } from "../channels/base.js";
|
|
6
6
|
import { PregelExecutableTask, StreamMode } from "./types.js";
|
|
7
|
+
import { Command } from "../constants.js";
|
|
7
8
|
import { PregelNode } from "./read.js";
|
|
8
9
|
import { ManagedValueMapping } from "../managed/base.js";
|
|
9
10
|
import { LangGraphRunnableConfig } from "./runnable_types.js";
|
|
10
11
|
export type StreamChunk = [string[], StreamMode, unknown];
|
|
11
12
|
export type PregelLoopInitializeParams = {
|
|
12
|
-
input?: any;
|
|
13
|
+
input?: any | Command;
|
|
13
14
|
config: RunnableConfig;
|
|
14
15
|
checkpointer?: BaseCheckpointSaver;
|
|
15
16
|
outputKeys: string | string[];
|
|
@@ -22,7 +23,7 @@ export type PregelLoopInitializeParams = {
|
|
|
22
23
|
checkSubgraphs?: boolean;
|
|
23
24
|
};
|
|
24
25
|
type PregelLoopParams = {
|
|
25
|
-
input?: any;
|
|
26
|
+
input?: any | Command;
|
|
26
27
|
config: RunnableConfig;
|
|
27
28
|
checkpointer?: BaseCheckpointSaver;
|
|
28
29
|
checkpoint: Checkpoint;
|
|
@@ -57,7 +58,7 @@ export declare class IterableReadableWritableStream extends IterableReadableStre
|
|
|
57
58
|
error(e: any): void;
|
|
58
59
|
}
|
|
59
60
|
export declare class PregelLoop {
|
|
60
|
-
protected input?: any;
|
|
61
|
+
protected input?: any | Command;
|
|
61
62
|
output: any;
|
|
62
63
|
config: LangGraphRunnableConfig;
|
|
63
64
|
protected checkpointer?: BaseCheckpointSaver;
|
|
@@ -76,7 +77,6 @@ export declare class PregelLoop {
|
|
|
76
77
|
protected streamKeys: string | string[];
|
|
77
78
|
protected nodes: Record<string, PregelNode>;
|
|
78
79
|
protected skipDoneTasks: boolean;
|
|
79
|
-
protected taskWritesLeft: number;
|
|
80
80
|
protected prevCheckpointConfig: RunnableConfig | undefined;
|
|
81
81
|
status: "pending" | "done" | "interrupt_before" | "interrupt_after" | "out_of_steps";
|
|
82
82
|
tasks: Record<string, PregelExecutableTask<any, any>>;
|
package/dist/pregel/loop.js
CHANGED
|
@@ -1,17 +1,16 @@
|
|
|
1
1
|
import { IterableReadableStream } from "@langchain/core/utils/stream";
|
|
2
2
|
import { copyCheckpoint, emptyCheckpoint, AsyncBatchedStore, } from "@langchain/langgraph-checkpoint";
|
|
3
3
|
import { createCheckpoint, emptyChannels, } from "../channels/base.js";
|
|
4
|
-
import { CHECKPOINT_NAMESPACE_SEPARATOR, CONFIG_KEY_CHECKPOINT_MAP, CONFIG_KEY_READ, CONFIG_KEY_RESUMING, CONFIG_KEY_STREAM, ERROR, INPUT, INTERRUPT,
|
|
4
|
+
import { _isCommand, CHECKPOINT_NAMESPACE_SEPARATOR, CONFIG_KEY_CHECKPOINT_MAP, CONFIG_KEY_READ, CONFIG_KEY_RESUMING, CONFIG_KEY_STREAM, ERROR, INPUT, INTERRUPT, RESUME, TAG_HIDDEN, } from "../constants.js";
|
|
5
5
|
import { _applyWrites, _prepareNextTasks, increment, shouldInterrupt, } from "./algo.js";
|
|
6
6
|
import { gatherIterator, gatherIteratorSync, prefixGenerator, } from "../utils.js";
|
|
7
|
-
import { mapInput, mapOutputUpdates, mapOutputValues, readChannels, } from "./io.js";
|
|
7
|
+
import { mapCommand, mapInput, mapOutputUpdates, mapOutputValues, readChannels, } from "./io.js";
|
|
8
8
|
import { getSubgraphsSeenSet, EmptyInputError, GraphInterrupt, isGraphInterrupt, MultipleSubgraphsError, } from "../errors.js";
|
|
9
9
|
import { getNewChannelVersions, patchConfigurable } from "./utils/index.js";
|
|
10
10
|
import { mapDebugTasks, mapDebugCheckpoint, mapDebugTaskResults, } from "./debug.js";
|
|
11
11
|
const INPUT_DONE = Symbol.for("INPUT_DONE");
|
|
12
12
|
const INPUT_RESUMING = Symbol.for("INPUT_RESUMING");
|
|
13
13
|
const DEFAULT_LOOP_LIMIT = 25;
|
|
14
|
-
const SPECIAL_CHANNELS = [ERROR, INTERRUPT];
|
|
15
14
|
export class IterableReadableWritableStream extends IterableReadableStream {
|
|
16
15
|
constructor(params) {
|
|
17
16
|
let streamControllerPromiseResolver;
|
|
@@ -196,12 +195,6 @@ export class PregelLoop {
|
|
|
196
195
|
writable: true,
|
|
197
196
|
value: void 0
|
|
198
197
|
});
|
|
199
|
-
Object.defineProperty(this, "taskWritesLeft", {
|
|
200
|
-
enumerable: true,
|
|
201
|
-
configurable: true,
|
|
202
|
-
writable: true,
|
|
203
|
-
value: 0
|
|
204
|
-
});
|
|
205
198
|
Object.defineProperty(this, "prevCheckpointConfig", {
|
|
206
199
|
enumerable: true,
|
|
207
200
|
configurable: true,
|
|
@@ -289,7 +282,9 @@ export class PregelLoop {
|
|
|
289
282
|
config.configurable?.[CONFIG_KEY_STREAM] !== undefined) {
|
|
290
283
|
stream = createDuplexStream(stream, config.configurable[CONFIG_KEY_STREAM]);
|
|
291
284
|
}
|
|
292
|
-
const skipDoneTasks = config.configurable
|
|
285
|
+
const skipDoneTasks = config.configurable
|
|
286
|
+
? !("checkpoint_id" in config.configurable)
|
|
287
|
+
: true;
|
|
293
288
|
const isNested = CONFIG_KEY_READ in (config.configurable ?? {});
|
|
294
289
|
if (!isNested &&
|
|
295
290
|
config.configurable?.checkpoint_ns !== undefined &&
|
|
@@ -403,18 +398,6 @@ export class PregelLoop {
|
|
|
403
398
|
if (writes.length === 0) {
|
|
404
399
|
return;
|
|
405
400
|
}
|
|
406
|
-
// adjust taskWritesLeft
|
|
407
|
-
const firstChannel = writes[0][0];
|
|
408
|
-
const anyChannelIsSend = writes.find(([channel]) => channel === TASKS);
|
|
409
|
-
const alwaysSave = anyChannelIsSend || SPECIAL_CHANNELS.includes(firstChannel);
|
|
410
|
-
if (!alwaysSave && !this.taskWritesLeft) {
|
|
411
|
-
return this._outputWrites(taskId, writes);
|
|
412
|
-
}
|
|
413
|
-
else if (firstChannel !== INTERRUPT) {
|
|
414
|
-
// INTERRUPT makes us want to save the last task's writes
|
|
415
|
-
// so we don't decrement tasksWritesLeft in that case
|
|
416
|
-
this.taskWritesLeft -= 1;
|
|
417
|
-
}
|
|
418
401
|
// save writes
|
|
419
402
|
const pendingWrites = writes.map(([key, value]) => {
|
|
420
403
|
return [taskId, key, value];
|
|
@@ -431,7 +414,9 @@ export class PregelLoop {
|
|
|
431
414
|
if (putWritePromise !== undefined) {
|
|
432
415
|
this.checkpointerPromises.push(putWritePromise);
|
|
433
416
|
}
|
|
434
|
-
this.
|
|
417
|
+
if (this.tasks) {
|
|
418
|
+
this._outputWrites(taskId, writes);
|
|
419
|
+
}
|
|
435
420
|
}
|
|
436
421
|
_outputWrites(taskId, writes, cached = false) {
|
|
437
422
|
const task = this.tasks[taskId];
|
|
@@ -502,7 +487,7 @@ export class PregelLoop {
|
|
|
502
487
|
this.status = "out_of_steps";
|
|
503
488
|
return false;
|
|
504
489
|
}
|
|
505
|
-
const nextTasks = _prepareNextTasks(this.checkpoint, this.nodes, this.channels, this.managed, this.config, true, {
|
|
490
|
+
const nextTasks = _prepareNextTasks(this.checkpoint, this.checkpointPendingWrites, this.nodes, this.channels, this.managed, this.config, true, {
|
|
506
491
|
step: this.step,
|
|
507
492
|
checkpointer: this.checkpointer,
|
|
508
493
|
isResuming: this.input === INPUT_RESUMING,
|
|
@@ -510,7 +495,6 @@ export class PregelLoop {
|
|
|
510
495
|
store: this.store,
|
|
511
496
|
});
|
|
512
497
|
this.tasks = nextTasks;
|
|
513
|
-
this.taskWritesLeft = Object.values(this.tasks).length - 1;
|
|
514
498
|
// Produce debug output
|
|
515
499
|
if (this.checkpointer) {
|
|
516
500
|
this._emit(await gatherIterator(prefixGenerator(mapDebugCheckpoint(this.step - 1, // printing checkpoint for previous step
|
|
@@ -523,7 +507,7 @@ export class PregelLoop {
|
|
|
523
507
|
// if there are pending writes from a previous loop, apply them
|
|
524
508
|
if (this.skipDoneTasks && this.checkpointPendingWrites.length > 0) {
|
|
525
509
|
for (const [tid, k, v] of this.checkpointPendingWrites) {
|
|
526
|
-
if (k === ERROR || k === INTERRUPT) {
|
|
510
|
+
if (k === ERROR || k === INTERRUPT || k === RESUME) {
|
|
527
511
|
continue;
|
|
528
512
|
}
|
|
529
513
|
const task = Object.values(this.tasks).find((t) => t.id === tid);
|
|
@@ -602,14 +586,31 @@ export class PregelLoop {
|
|
|
602
586
|
// produce values output
|
|
603
587
|
const valuesOutput = await gatherIterator(prefixGenerator(mapOutputValues(this.outputKeys, true, this.channels), "values"));
|
|
604
588
|
this._emit(valuesOutput);
|
|
605
|
-
|
|
589
|
+
}
|
|
590
|
+
else if (_isCommand(this.input)) {
|
|
591
|
+
const writes = {};
|
|
592
|
+
// group writes by task id
|
|
593
|
+
for (const [tid, key, value] of mapCommand(this.input)) {
|
|
594
|
+
if (writes[tid] === undefined) {
|
|
595
|
+
writes[tid] = [];
|
|
596
|
+
}
|
|
597
|
+
writes[tid].push([key, value]);
|
|
598
|
+
}
|
|
599
|
+
if (Object.keys(writes).length === 0) {
|
|
600
|
+
throw new EmptyInputError("Received empty Command input");
|
|
601
|
+
}
|
|
602
|
+
// save writes
|
|
603
|
+
for (const [tid, ws] of Object.entries(writes)) {
|
|
604
|
+
this.putWrites(tid, ws);
|
|
605
|
+
}
|
|
606
606
|
}
|
|
607
607
|
else {
|
|
608
|
+
// map inputs to channel updates
|
|
608
609
|
const inputWrites = await gatherIterator(mapInput(inputKeys, this.input));
|
|
609
610
|
if (inputWrites.length === 0) {
|
|
610
611
|
throw new EmptyInputError(`Received no input writes for ${JSON.stringify(inputKeys, null, 2)}`);
|
|
611
612
|
}
|
|
612
|
-
const discardTasks = _prepareNextTasks(this.checkpoint, this.nodes, this.channels, this.managed, this.config, true, { step: this.step });
|
|
613
|
+
const discardTasks = _prepareNextTasks(this.checkpoint, this.checkpointPendingWrites, this.nodes, this.channels, this.managed, this.config, true, { step: this.step });
|
|
613
614
|
_applyWrites(this.checkpoint, this.channels, Object.values(discardTasks).concat([
|
|
614
615
|
{
|
|
615
616
|
name: INPUT,
|
package/dist/pregel/retry.cjs
CHANGED
|
@@ -103,7 +103,7 @@ pregelTask, retryPolicy) {
|
|
|
103
103
|
catch (e) {
|
|
104
104
|
error = e;
|
|
105
105
|
error.pregelTaskId = pregelTask.id;
|
|
106
|
-
if ((0, errors_js_1.
|
|
106
|
+
if ((0, errors_js_1.isGraphBubbleUp)(error)) {
|
|
107
107
|
break;
|
|
108
108
|
}
|
|
109
109
|
if (resolvedRetryPolicy === undefined) {
|
package/dist/pregel/retry.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { getSubgraphsSeenSet,
|
|
1
|
+
import { getSubgraphsSeenSet, isGraphBubbleUp } from "../errors.js";
|
|
2
2
|
export const DEFAULT_INITIAL_INTERVAL = 500;
|
|
3
3
|
export const DEFAULT_BACKOFF_FACTOR = 2;
|
|
4
4
|
export const DEFAULT_MAX_INTERVAL = 128000;
|
|
@@ -99,7 +99,7 @@ pregelTask, retryPolicy) {
|
|
|
99
99
|
catch (e) {
|
|
100
100
|
error = e;
|
|
101
101
|
error.pregelTaskId = pregelTask.id;
|
|
102
|
-
if (
|
|
102
|
+
if (isGraphBubbleUp(error)) {
|
|
103
103
|
break;
|
|
104
104
|
}
|
|
105
105
|
if (resolvedRetryPolicy === undefined) {
|
package/dist/web.cjs
CHANGED
|
@@ -14,7 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
14
14
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
15
|
};
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
exports.MessagesAnnotation = exports.InMemoryStore = exports.AsyncBatchedStore = exports.BaseStore = exports.BaseCheckpointSaver = exports.emptyCheckpoint = exports.copyCheckpoint = exports.MemorySaver = exports.Send = exports.BinaryOperatorAggregate = exports.BaseChannel = exports.Annotation = exports.messagesStateReducer = exports.MessageGraph = exports.CompiledStateGraph = exports.StateGraph = exports.START = exports.Graph = exports.END = void 0;
|
|
17
|
+
exports.MessagesAnnotation = exports.InMemoryStore = exports.AsyncBatchedStore = exports.BaseStore = exports.BaseCheckpointSaver = exports.emptyCheckpoint = exports.copyCheckpoint = exports.MemorySaver = exports.interrupt = exports.Command = exports.Send = exports.BinaryOperatorAggregate = exports.BaseChannel = exports.Annotation = exports.messagesStateReducer = exports.MessageGraph = exports.CompiledStateGraph = exports.StateGraph = exports.START = exports.Graph = exports.END = void 0;
|
|
18
18
|
var index_js_1 = require("./graph/index.cjs");
|
|
19
19
|
Object.defineProperty(exports, "END", { enumerable: true, get: function () { return index_js_1.END; } });
|
|
20
20
|
Object.defineProperty(exports, "Graph", { enumerable: true, get: function () { return index_js_1.Graph; } });
|
|
@@ -30,6 +30,9 @@ Object.defineProperty(exports, "BaseChannel", { enumerable: true, get: function
|
|
|
30
30
|
Object.defineProperty(exports, "BinaryOperatorAggregate", { enumerable: true, get: function () { return index_js_2.BinaryOperatorAggregate; } });
|
|
31
31
|
var constants_js_1 = require("./constants.cjs");
|
|
32
32
|
Object.defineProperty(exports, "Send", { enumerable: true, get: function () { return constants_js_1.Send; } });
|
|
33
|
+
Object.defineProperty(exports, "Command", { enumerable: true, get: function () { return constants_js_1.Command; } });
|
|
34
|
+
var interrupt_js_1 = require("./interrupt.cjs");
|
|
35
|
+
Object.defineProperty(exports, "interrupt", { enumerable: true, get: function () { return interrupt_js_1.interrupt; } });
|
|
33
36
|
var langgraph_checkpoint_1 = require("@langchain/langgraph-checkpoint");
|
|
34
37
|
Object.defineProperty(exports, "MemorySaver", { enumerable: true, get: function () { return langgraph_checkpoint_1.MemorySaver; } });
|
|
35
38
|
Object.defineProperty(exports, "copyCheckpoint", { enumerable: true, get: function () { return langgraph_checkpoint_1.copyCheckpoint; } });
|
package/dist/web.d.ts
CHANGED
|
@@ -4,7 +4,8 @@ export * from "./errors.js";
|
|
|
4
4
|
export { BaseChannel, type BinaryOperator, BinaryOperatorAggregate, type AnyValue, type WaitForNames, type DynamicBarrierValue, type LastValue, type NamedBarrierValue, type Topic, } from "./channels/index.js";
|
|
5
5
|
export { type AnnotationRoot as _INTERNAL_ANNOTATION_ROOT } from "./graph/index.js";
|
|
6
6
|
export { type RetryPolicy } from "./pregel/utils/index.js";
|
|
7
|
-
export { Send } from "./constants.js";
|
|
7
|
+
export { Send, Command, type Interrupt } from "./constants.js";
|
|
8
|
+
export { interrupt } from "./interrupt.js";
|
|
8
9
|
export { MemorySaver, type Checkpoint, type CheckpointMetadata, type CheckpointTuple, copyCheckpoint, emptyCheckpoint, BaseCheckpointSaver, type Item, type GetOperation, type SearchOperation, type PutOperation, type Operation, type OperationResults, BaseStore, AsyncBatchedStore, InMemoryStore, type NameSpacePath, type NamespaceMatchType, type MatchCondition, type ListNamespacesOperation, } from "@langchain/langgraph-checkpoint";
|
|
9
10
|
export * from "./managed/index.js";
|
|
10
11
|
export { MessagesAnnotation } from "./graph/messages_annotation.js";
|
package/dist/web.js
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
export { END, Graph, START, StateGraph, CompiledStateGraph, MessageGraph, messagesStateReducer, Annotation, } from "./graph/index.js";
|
|
2
2
|
export * from "./errors.js";
|
|
3
3
|
export { BaseChannel, BinaryOperatorAggregate, } from "./channels/index.js";
|
|
4
|
-
export { Send } from "./constants.js";
|
|
4
|
+
export { Send, Command } from "./constants.js";
|
|
5
|
+
export { interrupt } from "./interrupt.js";
|
|
5
6
|
export { MemorySaver, copyCheckpoint, emptyCheckpoint, BaseCheckpointSaver, BaseStore, AsyncBatchedStore, InMemoryStore, } from "@langchain/langgraph-checkpoint";
|
|
6
7
|
export * from "./managed/index.js";
|
|
7
8
|
export { MessagesAnnotation } from "./graph/messages_annotation.js";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/langgraph",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.23",
|
|
4
4
|
"description": "LangGraph",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -72,7 +72,7 @@
|
|
|
72
72
|
"pg": "^8.13.0",
|
|
73
73
|
"prettier": "^2.8.3",
|
|
74
74
|
"release-it": "^17.6.0",
|
|
75
|
-
"rollup": "^4.
|
|
75
|
+
"rollup": "^4.22.4",
|
|
76
76
|
"ts-jest": "^29.1.0",
|
|
77
77
|
"tsx": "^4.7.0",
|
|
78
78
|
"typescript": "^4.9.5 || ^5.4.5",
|