@langchain/langgraph 0.0.30 → 0.0.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/README.md +75 -28
  2. package/dist/channels/base.cjs +14 -0
  3. package/dist/channels/base.d.ts +2 -0
  4. package/dist/channels/base.js +14 -0
  5. package/dist/graph/message.d.ts +1 -1
  6. package/dist/graph/state.cjs +36 -2
  7. package/dist/graph/state.d.ts +23 -9
  8. package/dist/graph/state.js +34 -1
  9. package/dist/index.cjs +2 -1
  10. package/dist/index.js +2 -1
  11. package/dist/prebuilt/agent_executor.d.ts +1 -1
  12. package/dist/pregel/index.cjs +26 -21
  13. package/dist/pregel/index.js +26 -21
  14. package/package.json +9 -16
  15. package/dist/tests/channels.test.d.ts +0 -1
  16. package/dist/tests/channels.test.js +0 -151
  17. package/dist/tests/chatbot.int.test.d.ts +0 -1
  18. package/dist/tests/chatbot.int.test.js +0 -66
  19. package/dist/tests/checkpoints.test.d.ts +0 -1
  20. package/dist/tests/checkpoints.test.js +0 -178
  21. package/dist/tests/diagrams.test.d.ts +0 -1
  22. package/dist/tests/diagrams.test.js +0 -25
  23. package/dist/tests/graph.test.d.ts +0 -1
  24. package/dist/tests/graph.test.js +0 -33
  25. package/dist/tests/prebuilt.int.test.d.ts +0 -1
  26. package/dist/tests/prebuilt.int.test.js +0 -207
  27. package/dist/tests/prebuilt.test.d.ts +0 -1
  28. package/dist/tests/prebuilt.test.js +0 -427
  29. package/dist/tests/pregel.io.test.d.ts +0 -1
  30. package/dist/tests/pregel.io.test.js +0 -332
  31. package/dist/tests/pregel.read.test.d.ts +0 -1
  32. package/dist/tests/pregel.read.test.js +0 -109
  33. package/dist/tests/pregel.test.d.ts +0 -1
  34. package/dist/tests/pregel.test.js +0 -1882
  35. package/dist/tests/pregel.validate.test.d.ts +0 -1
  36. package/dist/tests/pregel.validate.test.js +0 -198
  37. package/dist/tests/pregel.write.test.d.ts +0 -1
  38. package/dist/tests/pregel.write.test.js +0 -44
  39. package/dist/tests/tracing.int.test.d.ts +0 -1
  40. package/dist/tests/tracing.int.test.js +0 -450
  41. package/dist/tests/tracing.test.d.ts +0 -1
  42. package/dist/tests/tracing.test.js +0 -332
  43. package/dist/tests/utils.d.ts +0 -53
  44. package/dist/tests/utils.js +0 -167
@@ -1,332 +0,0 @@
1
- import { expect, it } from "@jest/globals";
2
- import { AIMessage, HumanMessage } from "@langchain/core/messages";
3
- import { FakeToolCallingChatModel } from "./utils.js";
4
- // Import from main `@langchain/langgraph` endpoint to turn on automatic config passing
5
- import { END, START, StateGraph } from "../index.js";
6
- it("should pass config through if importing from the primary entrypoint", async () => {
7
- const stateGraph = new StateGraph({
8
- channels: { messages: null },
9
- });
10
- const graph = stateGraph
11
- .addNode("testnode", async (_) => {
12
- const model = new FakeToolCallingChatModel({
13
- responses: [new AIMessage("hey!")],
14
- }).withConfig({ runName: "model_call" });
15
- // Don't explicitly pass config here
16
- const res = await model.invoke("hello!");
17
- return { messages: [res] };
18
- })
19
- .addEdge(START, "testnode")
20
- .addConditionalEdges("testnode", async (_state) => {
21
- const model = new FakeToolCallingChatModel({
22
- responses: [new AIMessage("hey!")],
23
- }).withConfig({ runName: "conditional_edge_call" });
24
- await model.invoke("testing but should be traced");
25
- return END;
26
- })
27
- .compile();
28
- const eventStream = graph.streamEvents({ messages: [] }, { version: "v2" });
29
- const events = [];
30
- for await (const event of eventStream) {
31
- events.push(event);
32
- }
33
- expect(events).toEqual([
34
- {
35
- event: "on_chain_start",
36
- data: {
37
- input: {
38
- messages: [],
39
- },
40
- },
41
- name: "LangGraph",
42
- tags: [],
43
- run_id: expect.any(String),
44
- metadata: {},
45
- },
46
- {
47
- event: "on_chain_start",
48
- data: {
49
- input: {
50
- messages: [],
51
- },
52
- },
53
- name: "__start__",
54
- tags: ["graph:step:0", "langsmith:hidden"],
55
- run_id: expect.any(String),
56
- metadata: {
57
- langgraph_node: "__start__",
58
- langgraph_step: 0,
59
- langgraph_task_idx: 0,
60
- langgraph_triggers: ["__pregel_tasks"],
61
- },
62
- },
63
- {
64
- event: "on_chain_end",
65
- data: {
66
- output: { output: undefined },
67
- input: {
68
- messages: [],
69
- },
70
- },
71
- run_id: expect.any(String),
72
- name: "__start__",
73
- tags: ["graph:step:0", "langsmith:hidden"],
74
- metadata: {
75
- langgraph_node: "__start__",
76
- langgraph_step: 0,
77
- langgraph_task_idx: 0,
78
- langgraph_triggers: ["__pregel_tasks"],
79
- },
80
- },
81
- {
82
- event: "on_chain_start",
83
- data: {
84
- input: {
85
- messages: [],
86
- },
87
- },
88
- name: "testnode",
89
- tags: ["graph:step:1"],
90
- run_id: expect.any(String),
91
- metadata: {
92
- langgraph_node: "testnode",
93
- langgraph_step: 1,
94
- langgraph_task_idx: 0,
95
- langgraph_triggers: ["__pregel_tasks"],
96
- },
97
- },
98
- {
99
- event: "on_chain_start",
100
- data: {
101
- input: {
102
- messages: [],
103
- },
104
- },
105
- name: "RunnableLambda",
106
- tags: ["seq:step:1"],
107
- run_id: expect.any(String),
108
- metadata: {
109
- langgraph_node: "testnode",
110
- langgraph_step: 1,
111
- langgraph_task_idx: 0,
112
- langgraph_triggers: ["__pregel_tasks"],
113
- },
114
- },
115
- {
116
- event: "on_chat_model_start",
117
- data: {
118
- input: {
119
- messages: [[new HumanMessage("hello!")]],
120
- },
121
- },
122
- name: "model_call",
123
- tags: [],
124
- run_id: expect.any(String),
125
- metadata: {
126
- langgraph_node: "testnode",
127
- langgraph_step: 1,
128
- langgraph_task_idx: 0,
129
- langgraph_triggers: ["__pregel_tasks"],
130
- ls_model_type: "chat",
131
- ls_stop: undefined,
132
- },
133
- },
134
- {
135
- event: "on_chat_model_end",
136
- data: {
137
- output: new AIMessage("hey!"),
138
- input: {
139
- messages: [[new HumanMessage("hello!")]],
140
- },
141
- },
142
- run_id: expect.any(String),
143
- name: "model_call",
144
- tags: [],
145
- metadata: {
146
- langgraph_node: "testnode",
147
- langgraph_step: 1,
148
- langgraph_task_idx: 0,
149
- langgraph_triggers: ["__pregel_tasks"],
150
- ls_model_type: "chat",
151
- ls_stop: undefined,
152
- },
153
- },
154
- {
155
- event: "on_chain_end",
156
- data: {
157
- output: {
158
- messages: [new AIMessage("hey!")],
159
- },
160
- input: {
161
- messages: [],
162
- },
163
- },
164
- run_id: expect.any(String),
165
- name: "RunnableLambda",
166
- tags: ["seq:step:1"],
167
- metadata: {
168
- langgraph_node: "testnode",
169
- langgraph_step: 1,
170
- langgraph_task_idx: 0,
171
- langgraph_triggers: ["__pregel_tasks"],
172
- },
173
- },
174
- {
175
- event: "on_chain_start",
176
- data: {
177
- input: {
178
- messages: [new AIMessage("hey!")],
179
- },
180
- },
181
- name: "ChannelWrite<messages,testnode>",
182
- tags: ["seq:step:2", "langsmith:hidden"],
183
- run_id: expect.any(String),
184
- metadata: {
185
- langgraph_node: "testnode",
186
- langgraph_step: 1,
187
- langgraph_task_idx: 0,
188
- langgraph_triggers: ["__pregel_tasks"],
189
- },
190
- },
191
- {
192
- event: "on_chain_end",
193
- data: {
194
- output: { output: undefined },
195
- input: {
196
- messages: [new AIMessage("hey!")],
197
- },
198
- },
199
- run_id: expect.any(String),
200
- name: "ChannelWrite<messages,testnode>",
201
- tags: ["seq:step:2", "langsmith:hidden"],
202
- metadata: {
203
- langgraph_node: "testnode",
204
- langgraph_step: 1,
205
- langgraph_task_idx: 0,
206
- langgraph_triggers: ["__pregel_tasks"],
207
- },
208
- },
209
- {
210
- event: "on_chain_start",
211
- data: {
212
- input: {
213
- input: undefined,
214
- },
215
- },
216
- name: "func",
217
- tags: ["seq:step:3"],
218
- run_id: expect.any(String),
219
- metadata: {
220
- langgraph_node: "testnode",
221
- langgraph_step: 1,
222
- langgraph_task_idx: 0,
223
- langgraph_triggers: ["__pregel_tasks"],
224
- },
225
- },
226
- {
227
- event: "on_chat_model_start",
228
- data: {
229
- input: {
230
- messages: [[new HumanMessage("testing but should be traced")]],
231
- },
232
- },
233
- name: "conditional_edge_call",
234
- tags: [],
235
- run_id: expect.any(String),
236
- metadata: {
237
- langgraph_node: "testnode",
238
- langgraph_step: 1,
239
- langgraph_task_idx: 0,
240
- langgraph_triggers: ["__pregel_tasks"],
241
- ls_model_type: "chat",
242
- ls_stop: undefined,
243
- },
244
- },
245
- {
246
- event: "on_chat_model_end",
247
- data: {
248
- output: new AIMessage("hey!"),
249
- input: {
250
- messages: [[new HumanMessage("testing but should be traced")]],
251
- },
252
- },
253
- run_id: expect.any(String),
254
- name: "conditional_edge_call",
255
- tags: [],
256
- metadata: {
257
- langgraph_node: "testnode",
258
- langgraph_step: 1,
259
- langgraph_task_idx: 0,
260
- langgraph_triggers: ["__pregel_tasks"],
261
- ls_model_type: "chat",
262
- ls_stop: undefined,
263
- },
264
- },
265
- {
266
- event: "on_chain_end",
267
- data: {
268
- output: {
269
- output: undefined,
270
- },
271
- input: {
272
- input: undefined,
273
- },
274
- },
275
- run_id: expect.any(String),
276
- name: "func",
277
- tags: ["seq:step:3"],
278
- metadata: {
279
- langgraph_node: "testnode",
280
- langgraph_step: 1,
281
- langgraph_task_idx: 0,
282
- langgraph_triggers: ["__pregel_tasks"],
283
- },
284
- },
285
- {
286
- event: "on_chain_end",
287
- data: {
288
- output: { output: undefined },
289
- input: {
290
- messages: [],
291
- },
292
- },
293
- run_id: expect.any(String),
294
- name: "testnode",
295
- tags: ["graph:step:1"],
296
- metadata: {
297
- langgraph_node: "testnode",
298
- langgraph_step: 1,
299
- langgraph_task_idx: 0,
300
- langgraph_triggers: ["__pregel_tasks"],
301
- },
302
- },
303
- {
304
- event: "on_chain_stream",
305
- run_id: expect.any(String),
306
- name: "LangGraph",
307
- tags: [],
308
- metadata: {},
309
- data: {
310
- chunk: {
311
- testnode: {
312
- messages: [new AIMessage("hey!")],
313
- },
314
- },
315
- },
316
- },
317
- {
318
- event: "on_chain_end",
319
- data: {
320
- output: {
321
- testnode: {
322
- messages: [new AIMessage("hey!")],
323
- },
324
- },
325
- },
326
- run_id: expect.any(String),
327
- name: "LangGraph",
328
- tags: [],
329
- metadata: {},
330
- },
331
- ]);
332
- });
@@ -1,53 +0,0 @@
1
- import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
2
- import { BaseChatModel, BaseChatModelParams } from "@langchain/core/language_models/chat_models";
3
- import { BaseMessage } from "@langchain/core/messages";
4
- import { ChatResult } from "@langchain/core/outputs";
5
- import { RunnableConfig } from "@langchain/core/runnables";
6
- import { Tool } from "@langchain/core/tools";
7
- import { z } from "zod";
8
- import { MemorySaver } from "../checkpoint/memory.js";
9
- import { Checkpoint, CheckpointMetadata } from "../checkpoint/base.js";
10
- export interface FakeChatModelArgs extends BaseChatModelParams {
11
- responses: BaseMessage[];
12
- }
13
- export declare class FakeChatModel extends BaseChatModel {
14
- responses: BaseMessage[];
15
- constructor(fields: FakeChatModelArgs);
16
- _combineLLMOutput(): never[];
17
- _llmType(): string;
18
- _generate(messages: BaseMessage[], options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
19
- }
20
- export declare class FakeToolCallingChatModel extends BaseChatModel {
21
- sleep?: number;
22
- responses?: BaseMessage[];
23
- thrownErrorString?: string;
24
- idx: number;
25
- constructor(fields: {
26
- sleep?: number;
27
- responses?: BaseMessage[];
28
- thrownErrorString?: string;
29
- } & BaseChatModelParams);
30
- _llmType(): string;
31
- _generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
32
- bindTools(_: Tool[]): FakeToolCallingChatModel;
33
- }
34
- export declare class MemorySaverAssertImmutable extends MemorySaver {
35
- storageForCopies: Record<string, Record<string, string>>;
36
- constructor();
37
- put(config: RunnableConfig, checkpoint: Checkpoint, metadata: CheckpointMetadata): Promise<RunnableConfig>;
38
- }
39
- export declare class FakeSearchTool extends Tool {
40
- name: string;
41
- description: string;
42
- schema: z.ZodEffects<z.ZodObject<{
43
- input: z.ZodOptional<z.ZodString>;
44
- }, "strip", z.ZodTypeAny, {
45
- input?: string | undefined;
46
- }, {
47
- input?: string | undefined;
48
- }>, string | undefined, {
49
- input?: string | undefined;
50
- }>;
51
- constructor();
52
- _call(query: string): Promise<string>;
53
- }
@@ -1,167 +0,0 @@
1
- import assert from "node:assert";
2
- import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
3
- import { AIMessage } from "@langchain/core/messages";
4
- import { Tool } from "@langchain/core/tools";
5
- import { z } from "zod";
6
- import { MemorySaver } from "../checkpoint/memory.js";
7
- export class FakeChatModel extends BaseChatModel {
8
- constructor(fields) {
9
- super(fields);
10
- Object.defineProperty(this, "responses", {
11
- enumerable: true,
12
- configurable: true,
13
- writable: true,
14
- value: void 0
15
- });
16
- this.responses = fields.responses;
17
- }
18
- _combineLLMOutput() {
19
- return [];
20
- }
21
- _llmType() {
22
- return "fake";
23
- }
24
- async _generate(messages, options, runManager) {
25
- if (options?.stop?.length) {
26
- return {
27
- generations: [
28
- {
29
- message: new AIMessage(options.stop[0]),
30
- text: options.stop[0],
31
- },
32
- ],
33
- };
34
- }
35
- const response = this.responses.shift();
36
- const text = messages.map((m) => m.content).join("\n");
37
- await runManager?.handleLLMNewToken(text);
38
- return {
39
- generations: [
40
- {
41
- message: response ?? new AIMessage(text),
42
- text: response ? response.content : text,
43
- },
44
- ],
45
- llmOutput: {},
46
- };
47
- }
48
- }
49
- export class FakeToolCallingChatModel extends BaseChatModel {
50
- constructor(fields) {
51
- super(fields);
52
- Object.defineProperty(this, "sleep", {
53
- enumerable: true,
54
- configurable: true,
55
- writable: true,
56
- value: 50
57
- });
58
- Object.defineProperty(this, "responses", {
59
- enumerable: true,
60
- configurable: true,
61
- writable: true,
62
- value: void 0
63
- });
64
- Object.defineProperty(this, "thrownErrorString", {
65
- enumerable: true,
66
- configurable: true,
67
- writable: true,
68
- value: void 0
69
- });
70
- Object.defineProperty(this, "idx", {
71
- enumerable: true,
72
- configurable: true,
73
- writable: true,
74
- value: void 0
75
- });
76
- this.sleep = fields.sleep ?? this.sleep;
77
- this.responses = fields.responses;
78
- this.thrownErrorString = fields.thrownErrorString;
79
- this.idx = 0;
80
- }
81
- _llmType() {
82
- return "fake";
83
- }
84
- async _generate(messages, _options, _runManager) {
85
- if (this.thrownErrorString) {
86
- throw new Error(this.thrownErrorString);
87
- }
88
- const msg = this.responses?.[this.idx] ?? messages[this.idx];
89
- const generation = {
90
- generations: [
91
- {
92
- text: "",
93
- message: msg,
94
- },
95
- ],
96
- };
97
- this.idx += 1;
98
- return generation;
99
- }
100
- bindTools(_) {
101
- return new FakeToolCallingChatModel({
102
- sleep: this.sleep,
103
- responses: this.responses,
104
- thrownErrorString: this.thrownErrorString,
105
- });
106
- }
107
- }
108
- export class MemorySaverAssertImmutable extends MemorySaver {
109
- constructor() {
110
- super();
111
- Object.defineProperty(this, "storageForCopies", {
112
- enumerable: true,
113
- configurable: true,
114
- writable: true,
115
- value: {}
116
- });
117
- this.storageForCopies = {};
118
- }
119
- async put(config, checkpoint, metadata) {
120
- const thread_id = config.configurable?.thread_id;
121
- if (!this.storageForCopies[thread_id]) {
122
- this.storageForCopies[thread_id] = {};
123
- }
124
- // assert checkpoint hasn't been modified since last written
125
- const saved = await super.get(config);
126
- if (saved) {
127
- const savedId = saved.id;
128
- if (this.storageForCopies[thread_id][savedId]) {
129
- assert(JSON.stringify(saved) === this.storageForCopies[thread_id][savedId], "Checkpoint has been modified since last written");
130
- }
131
- }
132
- // save a copy of the checkpoint
133
- this.storageForCopies[thread_id][checkpoint.id] =
134
- this.serde.stringify(checkpoint);
135
- return super.put(config, checkpoint, metadata);
136
- }
137
- }
138
- export class FakeSearchTool extends Tool {
139
- constructor() {
140
- super();
141
- Object.defineProperty(this, "name", {
142
- enumerable: true,
143
- configurable: true,
144
- writable: true,
145
- value: "search_api"
146
- });
147
- Object.defineProperty(this, "description", {
148
- enumerable: true,
149
- configurable: true,
150
- writable: true,
151
- value: "A simple API that returns the input string."
152
- });
153
- Object.defineProperty(this, "schema", {
154
- enumerable: true,
155
- configurable: true,
156
- writable: true,
157
- value: z
158
- .object({
159
- input: z.string().optional(),
160
- })
161
- .transform((data) => data.input)
162
- });
163
- }
164
- async _call(query) {
165
- return `result for ${query}`;
166
- }
167
- }