@langchain/langgraph 0.2.24 → 0.2.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -84,6 +84,8 @@ export declare function _isSend(x: unknown): x is Send;
84
84
  export type Interrupt = {
85
85
  value: any;
86
86
  when: "during";
87
+ resumable?: boolean;
88
+ ns?: string[];
87
89
  };
88
90
  export declare class Command<R = unknown> {
89
91
  lg_name: string;
@@ -62,17 +62,17 @@ export declare class Graph<N extends string = typeof END, RunInput = any, RunOut
62
62
  }): CompiledGraph<N>;
63
63
  validate(interrupt?: string[]): void;
64
64
  }
65
- export declare class CompiledGraph<N extends string, RunInput = any, RunOutput = any, ConfigurableFieldType extends Record<string, any> = Record<string, any>> extends Pregel<Record<N | typeof START, PregelNode<RunInput, RunOutput>>, Record<N | typeof START | typeof END | string, BaseChannel>, ConfigurableFieldType & Record<string, any>> {
65
+ export declare class CompiledGraph<N extends string, State = any, Update = any, ConfigurableFieldType extends Record<string, any> = Record<string, any>> extends Pregel<Record<N | typeof START, PregelNode<State, Update>>, Record<N | typeof START | typeof END | string, BaseChannel>, ConfigurableFieldType & Record<string, any>, Update, State> {
66
66
  NodeType: N;
67
- RunInput: RunInput;
68
- RunOutput: RunOutput;
69
- builder: Graph<N, RunInput, RunOutput>;
67
+ RunInput: State;
68
+ RunOutput: Update;
69
+ builder: Graph<N, State, Update>;
70
70
  constructor({ builder, ...rest }: {
71
- builder: Graph<N, RunInput, RunOutput>;
72
- } & PregelParams<Record<N | typeof START, PregelNode<RunInput, RunOutput>>, Record<N | typeof START | typeof END | string, BaseChannel>>);
73
- attachNode(key: N, node: NodeSpec<RunInput, RunOutput>): void;
71
+ builder: Graph<N, State, Update>;
72
+ } & PregelParams<Record<N | typeof START, PregelNode<State, Update>>, Record<N | typeof START | typeof END | string, BaseChannel>>);
73
+ attachNode(key: N, node: NodeSpec<State, Update>): void;
74
74
  attachEdge(start: N | typeof START, end: N | typeof END): void;
75
- attachBranch(start: N | typeof START, name: string, branch: Branch<RunInput, N>): void;
75
+ attachBranch(start: N | typeof START, name: string, branch: Branch<State, N>): void;
76
76
  /**
77
77
  * Returns a drawable representation of the computation graph.
78
78
  */
@@ -14,7 +14,14 @@ function interrupt(value) {
14
14
  return resume;
15
15
  }
16
16
  else {
17
- throw new errors_js_1.GraphInterrupt([{ value, when: "during" }]);
17
+ throw new errors_js_1.GraphInterrupt([
18
+ {
19
+ value,
20
+ when: "during",
21
+ resumable: true,
22
+ ns: config.configurable?.checkpoint_ns?.split("|"),
23
+ },
24
+ ]);
18
25
  }
19
26
  }
20
27
  exports.interrupt = interrupt;
package/dist/interrupt.js CHANGED
@@ -11,6 +11,13 @@ export function interrupt(value) {
11
11
  return resume;
12
12
  }
13
13
  else {
14
- throw new GraphInterrupt([{ value, when: "during" }]);
14
+ throw new GraphInterrupt([
15
+ {
16
+ value,
17
+ when: "during",
18
+ resumable: true,
19
+ ns: config.configurable?.checkpoint_ns?.split("|"),
20
+ },
21
+ ]);
15
22
  }
16
23
  }
@@ -5,6 +5,7 @@ const messages_1 = require("@langchain/core/messages");
5
5
  const runnables_1 = require("@langchain/core/runnables");
6
6
  const prompts_1 = require("@langchain/core/prompts");
7
7
  const index_js_1 = require("../graph/index.cjs");
8
+ const messages_annotation_js_1 = require("../graph/messages_annotation.cjs");
8
9
  const tool_node_js_1 = require("./tool_node.cjs");
9
10
  /**
10
11
  * Creates a StateGraph agent that relies on a chat model utilizing tool calling.
@@ -58,12 +59,6 @@ const tool_node_js_1 = require("./tool_node.cjs");
58
59
  */
59
60
  function createReactAgent(params) {
60
61
  const { llm, tools, messageModifier, checkpointSaver, interruptBefore, interruptAfter, } = params;
61
- const schema = {
62
- messages: {
63
- value: index_js_1.messagesStateReducer,
64
- default: () => [],
65
- },
66
- };
67
62
  let toolClasses;
68
63
  if (!Array.isArray(tools)) {
69
64
  toolClasses = tools.tools;
@@ -92,9 +87,7 @@ function createReactAgent(params) {
92
87
  // TODO: Auto-promote streaming.
93
88
  return { messages: [await modelRunnable.invoke(messages, config)] };
94
89
  };
95
- const workflow = new index_js_1.StateGraph({
96
- channels: schema,
97
- })
90
+ const workflow = new index_js_1.StateGraph(messages_annotation_js_1.MessagesAnnotation)
98
91
  .addNode("agent", callModel)
99
92
  .addNode("tools", new tool_node_js_1.ToolNode(toolClasses))
100
93
  .addEdge(index_js_1.START, "agent")
@@ -69,4 +69,4 @@ export type CreateReactAgentParams = {
69
69
  * // Returns the messages in the state at each step of execution
70
70
  * ```
71
71
  */
72
- export declare function createReactAgent(params: CreateReactAgentParams): CompiledStateGraph<AgentState, Partial<AgentState>, typeof START | "agent" | "tools">;
72
+ export declare function createReactAgent(params: CreateReactAgentParams): CompiledStateGraph<(typeof MessagesAnnotation)["State"], (typeof MessagesAnnotation)["Update"], typeof START | "agent" | "tools">;
@@ -1,7 +1,8 @@
1
1
  import { isAIMessage, SystemMessage, } from "@langchain/core/messages";
2
2
  import { Runnable, RunnableLambda, } from "@langchain/core/runnables";
3
3
  import { ChatPromptTemplate } from "@langchain/core/prompts";
4
- import { END, messagesStateReducer, START, StateGraph, } from "../graph/index.js";
4
+ import { END, START, StateGraph } from "../graph/index.js";
5
+ import { MessagesAnnotation } from "../graph/messages_annotation.js";
5
6
  import { ToolNode } from "./tool_node.js";
6
7
  /**
7
8
  * Creates a StateGraph agent that relies on a chat model utilizing tool calling.
@@ -55,12 +56,6 @@ import { ToolNode } from "./tool_node.js";
55
56
  */
56
57
  export function createReactAgent(params) {
57
58
  const { llm, tools, messageModifier, checkpointSaver, interruptBefore, interruptAfter, } = params;
58
- const schema = {
59
- messages: {
60
- value: messagesStateReducer,
61
- default: () => [],
62
- },
63
- };
64
59
  let toolClasses;
65
60
  if (!Array.isArray(tools)) {
66
61
  toolClasses = tools.tools;
@@ -89,9 +84,7 @@ export function createReactAgent(params) {
89
84
  // TODO: Auto-promote streaming.
90
85
  return { messages: [await modelRunnable.invoke(messages, config)] };
91
86
  };
92
- const workflow = new StateGraph({
93
- channels: schema,
94
- })
87
+ const workflow = new StateGraph(MessagesAnnotation)
95
88
  .addNode("agent", callModel)
96
89
  .addNode("tools", new ToolNode(toolClasses))
97
90
  .addEdge(START, "agent")
@@ -94,9 +94,7 @@ function* mapDebugTaskResults(step, tasks, streamChannels) {
94
94
  ? streamChannels.includes(channel)
95
95
  : channel === streamChannels;
96
96
  }),
97
- interrupts: writes.filter(([channel]) => {
98
- return channel === constants_js_1.INTERRUPT;
99
- }),
97
+ interrupts: writes.filter((w) => w[0] === constants_js_1.INTERRUPT).map((w) => w[1]),
100
98
  },
101
99
  };
102
100
  }
@@ -23,7 +23,7 @@ export declare function mapDebugTaskResults<N extends PropertyKey, C extends Pro
23
23
  id: string;
24
24
  name: N;
25
25
  result: PendingWrite<C>[];
26
- interrupts: PendingWrite<C>[];
26
+ interrupts: unknown[];
27
27
  };
28
28
  }, void, unknown>;
29
29
  export declare function mapDebugCheckpoint<N extends PropertyKey, C extends PropertyKey>(step: number, config: RunnableConfig, channels: Record<string, BaseChannel>, streamChannels: string | string[], metadata: CheckpointMetadata, tasks: readonly PregelExecutableTask<N, C>[], pendingWrites: CheckpointPendingWrite[], parentConfig: RunnableConfig | undefined): Generator<{
@@ -89,9 +89,7 @@ export function* mapDebugTaskResults(step, tasks, streamChannels) {
89
89
  ? streamChannels.includes(channel)
90
90
  : channel === streamChannels;
91
91
  }),
92
- interrupts: writes.filter(([channel]) => {
93
- return channel === INTERRUPT;
94
- }),
92
+ interrupts: writes.filter((w) => w[0] === INTERRUPT).map((w) => w[1]),
95
93
  },
96
94
  };
97
95
  }
@@ -22,7 +22,7 @@ export declare class Channel {
22
22
  static writeTo(channels: string[], kwargs?: Record<string, WriteValue>): ChannelWrite;
23
23
  }
24
24
  export type { PregelInputType, PregelOutputType, PregelOptions };
25
- export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel | ManagedValueSpec>, ConfigurableFieldType extends Record<string, any> = StrRecord<string, any>> extends Runnable<PregelInputType, PregelOutputType, PregelOptions<Nn, Cc, ConfigurableFieldType>> implements PregelInterface<Nn, Cc, ConfigurableFieldType>, PregelParams<Nn, Cc> {
25
+ export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel | ManagedValueSpec>, ConfigurableFieldType extends Record<string, any> = StrRecord<string, any>, InputType = PregelInputType, OutputType = PregelOutputType> extends Runnable<InputType | Command | null, OutputType, PregelOptions<Nn, Cc, ConfigurableFieldType>> implements PregelInterface<Nn, Cc, ConfigurableFieldType>, PregelParams<Nn, Cc> {
26
26
  static lc_name(): string;
27
27
  lc_namespace: string[];
28
28
  lg_is_pregel: boolean;
@@ -103,7 +103,7 @@ export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends
103
103
  * @param options.interruptAfter Nodes to interrupt after.
104
104
  * @param options.debug Whether to print debug information during execution.
105
105
  */
106
- stream(input: PregelInputType | Command, options?: Partial<PregelOptions<Nn, Cc, ConfigurableFieldType>>): Promise<IterableReadableStream<PregelOutputType>>;
106
+ stream(input: InputType | Command | null, options?: Partial<PregelOptions<Nn, Cc, ConfigurableFieldType>>): Promise<IterableReadableStream<PregelOutputType>>;
107
107
  protected prepareSpecs(config: RunnableConfig, options?: {
108
108
  skipManaged?: boolean;
109
109
  }): Promise<{
@@ -126,5 +126,5 @@ export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends
126
126
  * @param options.interruptAfter Nodes to interrupt after.
127
127
  * @param options.debug Whether to print debug information during execution.
128
128
  */
129
- invoke(input: PregelInputType | Command, options?: Partial<PregelOptions<Nn, Cc, ConfigurableFieldType>>): Promise<PregelOutputType>;
129
+ invoke(input: InputType | Command | null, options?: Partial<PregelOptions<Nn, Cc, ConfigurableFieldType>>): Promise<OutputType>;
130
130
  }
@@ -68,7 +68,7 @@ class StreamMessagesHandler extends base_1.BaseCallbackHandler {
68
68
  // Include legacy LangGraph SDK tag
69
69
  (!tags || !(tags.includes(constants_js_1.TAG_NOSTREAM) && tags.includes("nostream")))) {
70
70
  this.metadatas[runId] = [
71
- metadata.langgraph_checkpoint_ns.split("NS_SEP"),
71
+ metadata.langgraph_checkpoint_ns.split("|"),
72
72
  { tags, name, ...metadata },
73
73
  ];
74
74
  }
@@ -105,7 +105,7 @@ class StreamMessagesHandler extends base_1.BaseCallbackHandler {
105
105
  name === metadata.langgraph_node &&
106
106
  (tags === undefined || !tags.includes(constants_js_1.TAG_HIDDEN))) {
107
107
  this.metadatas[runId] = [
108
- metadata.langgraph_checkpoint_ns.split("NS_SEP"),
108
+ metadata.langgraph_checkpoint_ns.split("|"),
109
109
  { tags, name, ...metadata },
110
110
  ];
111
111
  }
@@ -65,7 +65,7 @@ export class StreamMessagesHandler extends BaseCallbackHandler {
65
65
  // Include legacy LangGraph SDK tag
66
66
  (!tags || !(tags.includes(TAG_NOSTREAM) && tags.includes("nostream")))) {
67
67
  this.metadatas[runId] = [
68
- metadata.langgraph_checkpoint_ns.split("NS_SEP"),
68
+ metadata.langgraph_checkpoint_ns.split("|"),
69
69
  { tags, name, ...metadata },
70
70
  ];
71
71
  }
@@ -102,7 +102,7 @@ export class StreamMessagesHandler extends BaseCallbackHandler {
102
102
  name === metadata.langgraph_node &&
103
103
  (tags === undefined || !tags.includes(TAG_HIDDEN))) {
104
104
  this.metadatas[runId] = [
105
- metadata.langgraph_checkpoint_ns.split("NS_SEP"),
105
+ metadata.langgraph_checkpoint_ns.split("|"),
106
106
  { tags, name, ...metadata },
107
107
  ];
108
108
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph",
3
- "version": "0.2.24",
3
+ "version": "0.2.26",
4
4
  "description": "LangGraph",
5
5
  "type": "module",
6
6
  "engines": {
@@ -43,7 +43,7 @@
43
43
  "@jest/globals": "^29.5.0",
44
44
  "@langchain/anthropic": "^0.3.5",
45
45
  "@langchain/community": "^0.3.9",
46
- "@langchain/core": "^0.3.16",
46
+ "@langchain/core": "^0.3.22",
47
47
  "@langchain/langgraph-checkpoint-postgres": "workspace:*",
48
48
  "@langchain/langgraph-checkpoint-sqlite": "workspace:*",
49
49
  "@langchain/openai": "^0.3.11",