@langchain/langgraph 0.0.21 → 0.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/graph/graph.d.ts +1 -1
- package/dist/graph/index.d.ts +1 -1
- package/dist/graph/index.js +1 -1
- package/dist/graph/state.d.ts +2 -2
- package/dist/index.cjs +18 -18
- package/dist/index.d.ts +1 -4
- package/dist/index.js +4 -4
- package/dist/prebuilt/agent_executor.cjs +1 -1
- package/dist/prebuilt/agent_executor.js +1 -1
- package/dist/prebuilt/chat_agent_executor.cjs +1 -2
- package/dist/prebuilt/chat_agent_executor.d.ts +1 -1
- package/dist/prebuilt/chat_agent_executor.js +1 -2
- package/dist/prebuilt/react_agent_executor.cjs +7 -7
- package/dist/prebuilt/react_agent_executor.js +7 -7
- package/dist/prebuilt/tool_executor.cjs +1 -3
- package/dist/prebuilt/tool_executor.js +1 -3
- package/dist/pregel/index.cjs +0 -3
- package/dist/pregel/index.d.ts +1 -1
- package/dist/pregel/index.js +0 -3
- package/dist/tests/chatbot.int.test.js +6 -1
- package/dist/tests/prebuilt.int.test.js +7 -4
- package/dist/tests/prebuilt.test.d.ts +1 -20
- package/dist/tests/prebuilt.test.js +1 -60
- package/dist/tests/pregel.io.test.js +1 -1
- package/dist/tests/pregel.test.js +11 -11
- package/dist/tests/pregel.write.test.js +1 -1
- package/dist/tests/tracing.int.test.js +2 -1
- package/dist/tests/tracing.test.d.ts +1 -0
- package/dist/tests/tracing.test.js +202 -0
- package/dist/tests/utils.d.ts +15 -0
- package/dist/tests/utils.js +59 -0
- package/dist/web.cjs +20 -0
- package/dist/web.d.ts +4 -0
- package/dist/web.js +4 -0
- package/package.json +15 -2
- package/web.cjs +1 -0
- package/web.d.cts +1 -0
- package/web.d.ts +1 -0
- package/web.js +1 -0
package/dist/graph/graph.d.ts
CHANGED
|
@@ -19,7 +19,7 @@ export declare class Branch<IO, N extends string> {
|
|
|
19
19
|
compile(writer: (dests: string[]) => Runnable | undefined, reader?: (config: RunnableConfig) => IO): RunnableCallable<unknown, unknown>;
|
|
20
20
|
_route(input: IO, config: RunnableConfig, writer: (dests: string[]) => Runnable | undefined, reader?: (config: RunnableConfig) => IO): Promise<Runnable | undefined>;
|
|
21
21
|
}
|
|
22
|
-
export declare class Graph<
|
|
22
|
+
export declare class Graph<N extends string = typeof END, RunInput = any, RunOutput = any> {
|
|
23
23
|
nodes: Record<N, Runnable<RunInput, RunOutput>>;
|
|
24
24
|
edges: Set<[N | typeof START, N | typeof END]>;
|
|
25
25
|
branches: Record<string, Record<string, Branch<RunInput, N>>>;
|
package/dist/graph/index.d.ts
CHANGED
package/dist/graph/index.js
CHANGED
package/dist/graph/state.d.ts
CHANGED
|
@@ -24,7 +24,7 @@ export interface StateGraphArgs<Channels extends object | unknown> {
|
|
|
24
24
|
__root__: Channels;
|
|
25
25
|
}>;
|
|
26
26
|
}
|
|
27
|
-
export declare class StateGraph<
|
|
27
|
+
export declare class StateGraph<State extends object | unknown, Update extends object | unknown = Partial<State>, N extends string = typeof START> extends Graph<N, State, Update> {
|
|
28
28
|
channels: Record<string, BaseChannel>;
|
|
29
29
|
waitingEdges: Set<[N[], N]>;
|
|
30
30
|
constructor(fields: StateGraphArgs<State>);
|
|
@@ -37,7 +37,7 @@ export declare class StateGraph<const State extends object | unknown, const Upda
|
|
|
37
37
|
interruptAfter?: N[] | All;
|
|
38
38
|
}): CompiledStateGraph<State, Update, N>;
|
|
39
39
|
}
|
|
40
|
-
export declare class CompiledStateGraph<
|
|
40
|
+
export declare class CompiledStateGraph<State extends object | unknown, Update extends object | unknown = Partial<State>, N extends string = typeof START> extends CompiledGraph<N, State, Update> {
|
|
41
41
|
builder: StateGraph<State, Update, N>;
|
|
42
42
|
attachNode(key: typeof START, node?: never): void;
|
|
43
43
|
attachNode(key: N, node: Runnable<State, Update, RunnableConfig>): void;
|
package/dist/index.cjs
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
2
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
Object.defineProperty(exports, "START", { enumerable: true, get: function () { return index_js_1.START; } });
|
|
8
|
-
Object.defineProperty(exports, "StateGraph", { enumerable: true, get: function () { return index_js_1.StateGraph; } });
|
|
9
|
-
Object.defineProperty(exports, "MessageGraph", { enumerable: true, get: function () { return index_js_1.MessageGraph; } });
|
|
10
|
-
var memory_js_1 = require("./checkpoint/memory.cjs");
|
|
11
|
-
Object.defineProperty(exports, "MemorySaver", { enumerable: true, get: function () { return memory_js_1.MemorySaver; } });
|
|
12
|
-
var base_js_1 = require("./checkpoint/base.cjs");
|
|
13
|
-
Object.defineProperty(exports, "copyCheckpoint", { enumerable: true, get: function () { return base_js_1.copyCheckpoint; } });
|
|
14
|
-
Object.defineProperty(exports, "emptyCheckpoint", { enumerable: true, get: function () { return base_js_1.emptyCheckpoint; } });
|
|
15
|
-
Object.defineProperty(exports, "BaseCheckpointSaver", { enumerable: true, get: function () { return base_js_1.BaseCheckpointSaver; } });
|
|
16
|
-
var errors_js_1 = require("./errors.cjs");
|
|
17
|
-
Object.defineProperty(exports, "GraphRecursionError", { enumerable: true, get: function () { return errors_js_1.GraphRecursionError; } });
|
|
18
|
-
Object.defineProperty(exports, "GraphValueError", { enumerable: true, get: function () { return errors_js_1.GraphValueError; } });
|
|
19
|
-
Object.defineProperty(exports, "InvalidUpdateError", { enumerable: true, get: function () { return errors_js_1.InvalidUpdateError; } });
|
|
20
|
-
Object.defineProperty(exports, "EmptyChannelError", { enumerable: true, get: function () { return errors_js_1.EmptyChannelError; } });
|
|
17
|
+
const async_local_storage_js_1 = require("./setup/async_local_storage.cjs");
|
|
18
|
+
// Initialize global async local storage instance for tracing
|
|
19
|
+
/* #__PURE__ */ (0, async_local_storage_js_1.initializeAsyncLocalStorageSingleton)();
|
|
20
|
+
__exportStar(require("./web.cjs"), exports);
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1 @@
|
|
|
1
|
-
export
|
|
2
|
-
export { MemorySaver } from "./checkpoint/memory.js";
|
|
3
|
-
export { type Checkpoint, type CheckpointMetadata, copyCheckpoint, emptyCheckpoint, BaseCheckpointSaver, } from "./checkpoint/base.js";
|
|
4
|
-
export { GraphRecursionError, GraphValueError, InvalidUpdateError, EmptyChannelError, } from "./errors.js";
|
|
1
|
+
export * from "./web.js";
|
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
export
|
|
1
|
+
import { initializeAsyncLocalStorageSingleton } from "./setup/async_local_storage.js";
|
|
2
|
+
// Initialize global async local storage instance for tracing
|
|
3
|
+
/* #__PURE__ */ initializeAsyncLocalStorageSingleton();
|
|
4
|
+
export * from "./web.js";
|
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.createAgentExecutor = void 0;
|
|
4
4
|
const tool_executor_js_1 = require("./tool_executor.cjs");
|
|
5
5
|
const state_js_1 = require("../graph/state.cjs");
|
|
6
|
-
const index_js_1 = require("../index.cjs");
|
|
6
|
+
const index_js_1 = require("../graph/index.cjs");
|
|
7
7
|
function createAgentExecutor({ agentRunnable, tools, }) {
|
|
8
8
|
let toolExecutor;
|
|
9
9
|
if (!Array.isArray(tools)) {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { ToolExecutor } from "./tool_executor.js";
|
|
2
2
|
import { StateGraph } from "../graph/state.js";
|
|
3
|
-
import { END, START } from "../index.js";
|
|
3
|
+
import { END, START } from "../graph/index.js";
|
|
4
4
|
export function createAgentExecutor({ agentRunnable, tools, }) {
|
|
5
5
|
let toolExecutor;
|
|
6
6
|
if (!Array.isArray(tools)) {
|
|
@@ -6,7 +6,7 @@ const messages_1 = require("@langchain/core/messages");
|
|
|
6
6
|
const runnables_1 = require("@langchain/core/runnables");
|
|
7
7
|
const tool_executor_js_1 = require("./tool_executor.cjs");
|
|
8
8
|
const state_js_1 = require("../graph/state.cjs");
|
|
9
|
-
const index_js_1 = require("../index.cjs");
|
|
9
|
+
const index_js_1 = require("../graph/index.cjs");
|
|
10
10
|
function createFunctionCallingExecutor({ model, tools, }) {
|
|
11
11
|
let toolExecutor;
|
|
12
12
|
let toolClasses;
|
|
@@ -26,7 +26,6 @@ function createFunctionCallingExecutor({ model, tools, }) {
|
|
|
26
26
|
const toolsAsOpenAIFunctions = toolClasses.map((tool) => (0, function_calling_1.convertToOpenAIFunction)(tool));
|
|
27
27
|
const newModel = model.bind({
|
|
28
28
|
functions: toolsAsOpenAIFunctions,
|
|
29
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
30
29
|
});
|
|
31
30
|
// Define the function that determines whether to continue or not
|
|
32
31
|
const shouldContinue = (state) => {
|
|
@@ -2,7 +2,7 @@ import { StructuredTool } from "@langchain/core/tools";
|
|
|
2
2
|
import { BaseMessage } from "@langchain/core/messages";
|
|
3
3
|
import { ToolExecutor } from "./tool_executor.js";
|
|
4
4
|
import { CompiledStateGraph } from "../graph/state.js";
|
|
5
|
-
import { START } from "../index.js";
|
|
5
|
+
import { START } from "../graph/index.js";
|
|
6
6
|
export type FunctionCallingExecutorState = {
|
|
7
7
|
messages: Array<BaseMessage>;
|
|
8
8
|
};
|
|
@@ -3,7 +3,7 @@ import { FunctionMessage } from "@langchain/core/messages";
|
|
|
3
3
|
import { RunnableLambda } from "@langchain/core/runnables";
|
|
4
4
|
import { ToolExecutor } from "./tool_executor.js";
|
|
5
5
|
import { StateGraph, } from "../graph/state.js";
|
|
6
|
-
import { END, START } from "../index.js";
|
|
6
|
+
import { END, START } from "../graph/index.js";
|
|
7
7
|
export function createFunctionCallingExecutor({ model, tools, }) {
|
|
8
8
|
let toolExecutor;
|
|
9
9
|
let toolClasses;
|
|
@@ -23,7 +23,6 @@ export function createFunctionCallingExecutor({ model, tools, }) {
|
|
|
23
23
|
const toolsAsOpenAIFunctions = toolClasses.map((tool) => convertToOpenAIFunction(tool));
|
|
24
24
|
const newModel = model.bind({
|
|
25
25
|
functions: toolsAsOpenAIFunctions,
|
|
26
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
27
26
|
});
|
|
28
27
|
// Define the function that determines whether to continue or not
|
|
29
28
|
const shouldContinue = (state) => {
|
|
@@ -48,15 +48,15 @@ function createReactAgent(props) {
|
|
|
48
48
|
return "continue";
|
|
49
49
|
}
|
|
50
50
|
};
|
|
51
|
-
const callModel = async (state) => {
|
|
51
|
+
const callModel = async (state, config) => {
|
|
52
52
|
const { messages } = state;
|
|
53
53
|
// TODO: Auto-promote streaming.
|
|
54
|
-
return { messages: [await modelRunnable.invoke(messages)] };
|
|
54
|
+
return { messages: [await modelRunnable.invoke(messages, config)] };
|
|
55
55
|
};
|
|
56
56
|
const workflow = new index_js_1.StateGraph({
|
|
57
57
|
channels: schema,
|
|
58
58
|
})
|
|
59
|
-
.addNode("agent",
|
|
59
|
+
.addNode("agent", runnables_1.RunnableLambda.from(callModel).withConfig({ runName: "agent" }))
|
|
60
60
|
.addNode("tools", new tool_node_js_1.ToolNode(toolClasses))
|
|
61
61
|
.addEdge(index_js_1.START, "agent")
|
|
62
62
|
.addConditionalEdges("agent", shouldContinue, {
|
|
@@ -75,9 +75,9 @@ function _createModelWrapper(modelWithTools, messageModifier) {
|
|
|
75
75
|
if (!messageModifier) {
|
|
76
76
|
return modelWithTools;
|
|
77
77
|
}
|
|
78
|
-
const endict =
|
|
79
|
-
|
|
80
|
-
});
|
|
78
|
+
const endict = runnables_1.RunnableLambda.from((messages) => ({
|
|
79
|
+
messages,
|
|
80
|
+
}));
|
|
81
81
|
if (typeof messageModifier === "string") {
|
|
82
82
|
const systemMessage = new messages_1.SystemMessage(messageModifier);
|
|
83
83
|
const prompt = prompts_1.ChatPromptTemplate.fromMessages([
|
|
@@ -87,7 +87,7 @@ function _createModelWrapper(modelWithTools, messageModifier) {
|
|
|
87
87
|
return endict.pipe(prompt).pipe(modelWithTools);
|
|
88
88
|
}
|
|
89
89
|
if (typeof messageModifier === "function") {
|
|
90
|
-
const lambda =
|
|
90
|
+
const lambda = runnables_1.RunnableLambda.from(messageModifier).withConfig({
|
|
91
91
|
runName: "message_modifier",
|
|
92
92
|
});
|
|
93
93
|
return lambda.pipe(modelWithTools);
|
|
@@ -45,15 +45,15 @@ export function createReactAgent(props) {
|
|
|
45
45
|
return "continue";
|
|
46
46
|
}
|
|
47
47
|
};
|
|
48
|
-
const callModel = async (state) => {
|
|
48
|
+
const callModel = async (state, config) => {
|
|
49
49
|
const { messages } = state;
|
|
50
50
|
// TODO: Auto-promote streaming.
|
|
51
|
-
return { messages: [await modelRunnable.invoke(messages)] };
|
|
51
|
+
return { messages: [await modelRunnable.invoke(messages, config)] };
|
|
52
52
|
};
|
|
53
53
|
const workflow = new StateGraph({
|
|
54
54
|
channels: schema,
|
|
55
55
|
})
|
|
56
|
-
.addNode("agent",
|
|
56
|
+
.addNode("agent", RunnableLambda.from(callModel).withConfig({ runName: "agent" }))
|
|
57
57
|
.addNode("tools", new ToolNode(toolClasses))
|
|
58
58
|
.addEdge(START, "agent")
|
|
59
59
|
.addConditionalEdges("agent", shouldContinue, {
|
|
@@ -71,9 +71,9 @@ function _createModelWrapper(modelWithTools, messageModifier) {
|
|
|
71
71
|
if (!messageModifier) {
|
|
72
72
|
return modelWithTools;
|
|
73
73
|
}
|
|
74
|
-
const endict =
|
|
75
|
-
|
|
76
|
-
});
|
|
74
|
+
const endict = RunnableLambda.from((messages) => ({
|
|
75
|
+
messages,
|
|
76
|
+
}));
|
|
77
77
|
if (typeof messageModifier === "string") {
|
|
78
78
|
const systemMessage = new SystemMessage(messageModifier);
|
|
79
79
|
const prompt = ChatPromptTemplate.fromMessages([
|
|
@@ -83,7 +83,7 @@ function _createModelWrapper(modelWithTools, messageModifier) {
|
|
|
83
83
|
return endict.pipe(prompt).pipe(modelWithTools);
|
|
84
84
|
}
|
|
85
85
|
if (typeof messageModifier === "function") {
|
|
86
|
-
const lambda =
|
|
86
|
+
const lambda = RunnableLambda.from(messageModifier).withConfig({
|
|
87
87
|
runName: "message_modifier",
|
|
88
88
|
});
|
|
89
89
|
return lambda.pipe(modelWithTools);
|
|
@@ -9,9 +9,7 @@ class ToolExecutor extends runnables_1.RunnableBinding {
|
|
|
9
9
|
invalidToolMsgTemplate: INVALID_TOOL_MSG_TEMPLATE,
|
|
10
10
|
...fields,
|
|
11
11
|
};
|
|
12
|
-
const bound =
|
|
13
|
-
func: async (input, options) => this._execute(input, options?.config),
|
|
14
|
-
});
|
|
12
|
+
const bound = runnables_1.RunnableLambda.from(async (input, config) => this._execute(input, config));
|
|
15
13
|
super({
|
|
16
14
|
bound,
|
|
17
15
|
config: {},
|
|
@@ -6,9 +6,7 @@ export class ToolExecutor extends RunnableBinding {
|
|
|
6
6
|
invalidToolMsgTemplate: INVALID_TOOL_MSG_TEMPLATE,
|
|
7
7
|
...fields,
|
|
8
8
|
};
|
|
9
|
-
const bound =
|
|
10
|
-
func: async (input, options) => this._execute(input, options?.config),
|
|
11
|
-
});
|
|
9
|
+
const bound = RunnableLambda.from(async (input, config) => this._execute(input, config));
|
|
12
10
|
super({
|
|
13
11
|
bound,
|
|
14
12
|
config: {},
|
package/dist/pregel/index.cjs
CHANGED
|
@@ -11,7 +11,6 @@ const validate_js_1 = require("./validate.cjs");
|
|
|
11
11
|
const io_js_1 = require("./io.cjs");
|
|
12
12
|
const write_js_1 = require("./write.cjs");
|
|
13
13
|
const constants_js_1 = require("../constants.cjs");
|
|
14
|
-
const async_local_storage_js_1 = require("../setup/async_local_storage.cjs");
|
|
15
14
|
const errors_js_1 = require("../errors.cjs");
|
|
16
15
|
const DEFAULT_LOOP_LIMIT = 25;
|
|
17
16
|
function isString(value) {
|
|
@@ -157,8 +156,6 @@ class Pregel extends runnables_1.Runnable {
|
|
|
157
156
|
writable: true,
|
|
158
157
|
value: void 0
|
|
159
158
|
});
|
|
160
|
-
// Initialize global async local storage instance for tracing
|
|
161
|
-
(0, async_local_storage_js_1.initializeAsyncLocalStorageSingleton)();
|
|
162
159
|
this.nodes = fields.nodes;
|
|
163
160
|
this.channels = fields.channels;
|
|
164
161
|
this.autoValidate = fields.autoValidate ?? this.autoValidate;
|
package/dist/pregel/index.d.ts
CHANGED
|
@@ -66,7 +66,7 @@ export interface PregelOptions<Nn extends StrRecord<string, PregelNode>, Cc exte
|
|
|
66
66
|
}
|
|
67
67
|
export type PregelInputType = any;
|
|
68
68
|
export type PregelOutputType = any;
|
|
69
|
-
export declare class Pregel<
|
|
69
|
+
export declare class Pregel<Nn extends StrRecord<string, PregelNode>, Cc extends StrRecord<string, BaseChannel>> extends Runnable<PregelInputType, PregelOutputType, PregelOptions<Nn, Cc>> implements PregelInterface<Nn, Cc> {
|
|
70
70
|
static lc_name(): string;
|
|
71
71
|
lc_namespace: string[];
|
|
72
72
|
nodes: Nn;
|
package/dist/pregel/index.js
CHANGED
|
@@ -8,7 +8,6 @@ import { validateGraph, validateKeys } from "./validate.js";
|
|
|
8
8
|
import { mapInput, mapOutputUpdates, mapOutputValues, readChannel, readChannels, single, } from "./io.js";
|
|
9
9
|
import { ChannelWrite, PASSTHROUGH } from "./write.js";
|
|
10
10
|
import { CONFIG_KEY_READ, CONFIG_KEY_SEND, INTERRUPT, TAG_HIDDEN, } from "../constants.js";
|
|
11
|
-
import { initializeAsyncLocalStorageSingleton } from "../setup/async_local_storage.js";
|
|
12
11
|
import { EmptyChannelError, GraphRecursionError, GraphValueError, InvalidUpdateError, } from "../errors.js";
|
|
13
12
|
const DEFAULT_LOOP_LIMIT = 25;
|
|
14
13
|
function isString(value) {
|
|
@@ -153,8 +152,6 @@ export class Pregel extends Runnable {
|
|
|
153
152
|
writable: true,
|
|
154
153
|
value: void 0
|
|
155
154
|
});
|
|
156
|
-
// Initialize global async local storage instance for tracing
|
|
157
|
-
initializeAsyncLocalStorageSingleton();
|
|
158
155
|
this.nodes = fields.nodes;
|
|
159
156
|
this.channels = fields.channels;
|
|
160
157
|
this.autoValidate = fields.autoValidate ?? this.autoValidate;
|
|
@@ -1,10 +1,15 @@
|
|
|
1
|
-
import { describe, it } from "@jest/globals";
|
|
1
|
+
import { beforeAll, describe, it } from "@jest/globals";
|
|
2
2
|
import { ChatOpenAI } from "@langchain/openai";
|
|
3
3
|
import { HumanMessage, ToolMessage, } from "@langchain/core/messages";
|
|
4
4
|
import { Calculator } from "@langchain/community/tools/calculator";
|
|
5
5
|
import { convertToOpenAITool } from "@langchain/core/utils/function_calling";
|
|
6
6
|
import { END, MessageGraph, START } from "../index.js";
|
|
7
|
+
import { initializeAsyncLocalStorageSingleton } from "../setup/async_local_storage.js";
|
|
7
8
|
describe("Chatbot", () => {
|
|
9
|
+
beforeAll(() => {
|
|
10
|
+
// Will occur naturally if user imports from main `@langchain/langgraph` endpoint.
|
|
11
|
+
initializeAsyncLocalStorageSingleton();
|
|
12
|
+
});
|
|
8
13
|
it("Simple chat use-case", async () => {
|
|
9
14
|
const model = new ChatOpenAI({ temperature: 0 });
|
|
10
15
|
const graph = new MessageGraph()
|
|
@@ -4,12 +4,15 @@ import { Tool } from "@langchain/core/tools";
|
|
|
4
4
|
import { ChatOpenAI } from "@langchain/openai";
|
|
5
5
|
import { HumanMessage } from "@langchain/core/messages";
|
|
6
6
|
import { createReactAgent, createFunctionCallingExecutor, } from "../prebuilt/index.js";
|
|
7
|
+
import { initializeAsyncLocalStorageSingleton } from "../setup/async_local_storage.js";
|
|
7
8
|
// Tracing slows down the tests
|
|
8
9
|
beforeAll(() => {
|
|
9
|
-
process.env.LANGCHAIN_TRACING_V2 = "false";
|
|
10
|
-
process.env.LANGCHAIN_ENDPOINT = "";
|
|
11
|
-
process.env.LANGCHAIN_API_KEY = "";
|
|
12
|
-
process.env.LANGCHAIN_PROJECT = "";
|
|
10
|
+
// process.env.LANGCHAIN_TRACING_V2 = "false";
|
|
11
|
+
// process.env.LANGCHAIN_ENDPOINT = "";
|
|
12
|
+
// process.env.LANGCHAIN_API_KEY = "";
|
|
13
|
+
// process.env.LANGCHAIN_PROJECT = "";
|
|
14
|
+
// Will occur naturally if user imports from main `@langchain/langgraph` endpoint.
|
|
15
|
+
initializeAsyncLocalStorageSingleton();
|
|
13
16
|
});
|
|
14
17
|
describe("createFunctionCallingExecutor", () => {
|
|
15
18
|
it("can call a function", async () => {
|
|
@@ -1,20 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
3
|
-
import { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
|
4
|
-
import { BaseLLMParams } from "@langchain/core/language_models/llms";
|
|
5
|
-
import { BaseMessage } from "@langchain/core/messages";
|
|
6
|
-
import { ChatResult } from "@langchain/core/outputs";
|
|
7
|
-
export declare class FakeToolCallingChatModel extends BaseChatModel {
|
|
8
|
-
sleep?: number;
|
|
9
|
-
responses?: BaseMessage[];
|
|
10
|
-
thrownErrorString?: string;
|
|
11
|
-
idx: number;
|
|
12
|
-
constructor(fields: {
|
|
13
|
-
sleep?: number;
|
|
14
|
-
responses?: BaseMessage[];
|
|
15
|
-
thrownErrorString?: string;
|
|
16
|
-
} & BaseLLMParams);
|
|
17
|
-
_llmType(): string;
|
|
18
|
-
_generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
19
|
-
bindTools(_: Tool[]): FakeToolCallingChatModel;
|
|
20
|
-
}
|
|
1
|
+
export {};
|
|
@@ -3,10 +3,10 @@ import { beforeAll, describe, expect, it } from "@jest/globals";
|
|
|
3
3
|
import { PromptTemplate } from "@langchain/core/prompts";
|
|
4
4
|
import { StructuredTool, Tool } from "@langchain/core/tools";
|
|
5
5
|
import { FakeStreamingLLM } from "@langchain/core/utils/testing";
|
|
6
|
-
import { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
|
7
6
|
import { AIMessage, HumanMessage, SystemMessage, ToolMessage, } from "@langchain/core/messages";
|
|
8
7
|
import { RunnableLambda } from "@langchain/core/runnables";
|
|
9
8
|
import { z } from "zod";
|
|
9
|
+
import { FakeToolCallingChatModel } from "./utils.js";
|
|
10
10
|
import { createAgentExecutor, createReactAgent } from "../prebuilt/index.js";
|
|
11
11
|
// Tracing slows down the tests
|
|
12
12
|
beforeAll(() => {
|
|
@@ -197,65 +197,6 @@ describe("PreBuilt", () => {
|
|
|
197
197
|
]);
|
|
198
198
|
});
|
|
199
199
|
});
|
|
200
|
-
export class FakeToolCallingChatModel extends BaseChatModel {
|
|
201
|
-
constructor(fields) {
|
|
202
|
-
super(fields);
|
|
203
|
-
Object.defineProperty(this, "sleep", {
|
|
204
|
-
enumerable: true,
|
|
205
|
-
configurable: true,
|
|
206
|
-
writable: true,
|
|
207
|
-
value: 50
|
|
208
|
-
});
|
|
209
|
-
Object.defineProperty(this, "responses", {
|
|
210
|
-
enumerable: true,
|
|
211
|
-
configurable: true,
|
|
212
|
-
writable: true,
|
|
213
|
-
value: void 0
|
|
214
|
-
});
|
|
215
|
-
Object.defineProperty(this, "thrownErrorString", {
|
|
216
|
-
enumerable: true,
|
|
217
|
-
configurable: true,
|
|
218
|
-
writable: true,
|
|
219
|
-
value: void 0
|
|
220
|
-
});
|
|
221
|
-
Object.defineProperty(this, "idx", {
|
|
222
|
-
enumerable: true,
|
|
223
|
-
configurable: true,
|
|
224
|
-
writable: true,
|
|
225
|
-
value: void 0
|
|
226
|
-
});
|
|
227
|
-
this.sleep = fields.sleep ?? this.sleep;
|
|
228
|
-
this.responses = fields.responses;
|
|
229
|
-
this.thrownErrorString = fields.thrownErrorString;
|
|
230
|
-
this.idx = 0;
|
|
231
|
-
}
|
|
232
|
-
_llmType() {
|
|
233
|
-
return "fake";
|
|
234
|
-
}
|
|
235
|
-
async _generate(messages, _options, _runManager) {
|
|
236
|
-
if (this.thrownErrorString) {
|
|
237
|
-
throw new Error(this.thrownErrorString);
|
|
238
|
-
}
|
|
239
|
-
const msg = this.responses?.[this.idx] ?? messages[this.idx];
|
|
240
|
-
const generation = {
|
|
241
|
-
generations: [
|
|
242
|
-
{
|
|
243
|
-
text: "",
|
|
244
|
-
message: msg,
|
|
245
|
-
},
|
|
246
|
-
],
|
|
247
|
-
};
|
|
248
|
-
this.idx += 1;
|
|
249
|
-
return generation;
|
|
250
|
-
}
|
|
251
|
-
bindTools(_) {
|
|
252
|
-
return new FakeToolCallingChatModel({
|
|
253
|
-
sleep: this.sleep,
|
|
254
|
-
responses: this.responses,
|
|
255
|
-
thrownErrorString: this.thrownErrorString,
|
|
256
|
-
});
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
200
|
describe("createReactAgent", () => {
|
|
260
201
|
const searchSchema = z.object({
|
|
261
202
|
query: z.string().describe("The query to search for."),
|
|
@@ -269,7 +269,7 @@ describe("mapOutputUpdates", () => {
|
|
|
269
269
|
input: null,
|
|
270
270
|
proc: new RunnablePassthrough(),
|
|
271
271
|
// @ts-expect-error invalid write
|
|
272
|
-
writes: [["someOutputChannelNameThatDoesntMatch", 3]],
|
|
272
|
+
writes: [["someOutputChannelNameThatDoesntMatch", 3]],
|
|
273
273
|
config: undefined,
|
|
274
274
|
},
|
|
275
275
|
];
|
|
@@ -155,21 +155,21 @@ describe("Pregel", () => {
|
|
|
155
155
|
});
|
|
156
156
|
// call method / assertions
|
|
157
157
|
const expectedDefaults1 = [
|
|
158
|
-
false,
|
|
159
|
-
"values",
|
|
160
|
-
"outputKey",
|
|
161
|
-
["inputKey", "outputKey", "channel3"],
|
|
158
|
+
false,
|
|
159
|
+
"values",
|
|
160
|
+
"outputKey",
|
|
161
|
+
["inputKey", "outputKey", "channel3"],
|
|
162
162
|
{},
|
|
163
|
-
["one"],
|
|
163
|
+
["one"],
|
|
164
164
|
["one"], // interrupt after
|
|
165
165
|
];
|
|
166
166
|
const expectedDefaults2 = [
|
|
167
|
-
true,
|
|
168
|
-
"updates",
|
|
169
|
-
"inputKey",
|
|
170
|
-
"outputKey",
|
|
167
|
+
true,
|
|
168
|
+
"updates",
|
|
169
|
+
"inputKey",
|
|
170
|
+
"outputKey",
|
|
171
171
|
{ tags: ["hello"] },
|
|
172
|
-
"*",
|
|
172
|
+
"*",
|
|
173
173
|
["one"], // interrupt after
|
|
174
174
|
];
|
|
175
175
|
expect(pregel._defaults(config1)).toEqual(expectedDefaults1);
|
|
@@ -290,7 +290,7 @@ describe("_shouldInterrupt", () => {
|
|
|
290
290
|
// call method / assertions
|
|
291
291
|
expect(_shouldInterrupt(checkpoint, interruptNodes, snapshotChannels, [
|
|
292
292
|
{
|
|
293
|
-
name: "node2",
|
|
293
|
+
name: "node2",
|
|
294
294
|
input: undefined,
|
|
295
295
|
proc: new RunnablePassthrough(),
|
|
296
296
|
writes: [],
|
|
@@ -34,7 +34,7 @@ describe("ChannelWrite", () => {
|
|
|
34
34
|
const writeValues = await channelWrite._getWriteValues(input, config);
|
|
35
35
|
const expectedWriteValues = {
|
|
36
36
|
someChannel1: 1,
|
|
37
|
-
someChannel2: 2,
|
|
37
|
+
someChannel2: 2,
|
|
38
38
|
// someChannel3 should be filtered out
|
|
39
39
|
someChannel4: 2, // value is set to input value since PASSTHROUGH value was specified
|
|
40
40
|
};
|
|
@@ -12,9 +12,10 @@ import { JsonOutputFunctionsParser, JsonOutputToolsParser, } from "langchain/out
|
|
|
12
12
|
import { createOpenAIFnRunnable } from "langchain/chains/openai_functions";
|
|
13
13
|
import { zodToJsonSchema } from "zod-to-json-schema";
|
|
14
14
|
import { z } from "zod";
|
|
15
|
-
import { StateGraph, END } from "../index.js";
|
|
16
15
|
import { ToolExecutor } from "../prebuilt/tool_executor.js";
|
|
17
16
|
import { createAgentExecutor } from "../prebuilt/agent_executor.js";
|
|
17
|
+
// Import from main `@langchain/langgraph` endpoint to turn on automatic config passing
|
|
18
|
+
import { StateGraph, END } from "../index.js";
|
|
18
19
|
test.skip("Can invoke with tracing", async () => {
|
|
19
20
|
const tools = [new TavilySearchResults({ maxResults: 1 })];
|
|
20
21
|
// Get the prompt to use - you can modify this!
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
import { expect, it } from "@jest/globals";
|
|
2
|
+
import { AIMessage, HumanMessage } from "@langchain/core/messages";
|
|
3
|
+
import { FakeToolCallingChatModel } from "./utils.js";
|
|
4
|
+
// Import from main `@langchain/langgraph` endpoint to turn on automatic config passing
|
|
5
|
+
import { END, START, StateGraph } from "../index.js";
|
|
6
|
+
it("should pass config through if importing from the primary entrypoint", async () => {
|
|
7
|
+
const stateGraph = new StateGraph({
|
|
8
|
+
channels: { messages: null },
|
|
9
|
+
});
|
|
10
|
+
const graph = stateGraph
|
|
11
|
+
.addNode("testnode", async (_) => {
|
|
12
|
+
const model = new FakeToolCallingChatModel({
|
|
13
|
+
responses: [new AIMessage("hey!")],
|
|
14
|
+
}).withConfig({ runName: "model_call" });
|
|
15
|
+
// Don't explicitly pass config here
|
|
16
|
+
const res = await model.invoke("hello!");
|
|
17
|
+
return { messages: [res] };
|
|
18
|
+
})
|
|
19
|
+
.addEdge(START, "testnode")
|
|
20
|
+
.addEdge("testnode", END)
|
|
21
|
+
.compile();
|
|
22
|
+
const eventStream = graph.streamEvents({ messages: [] }, { version: "v2" });
|
|
23
|
+
const events = [];
|
|
24
|
+
for await (const event of eventStream) {
|
|
25
|
+
events.push(event);
|
|
26
|
+
}
|
|
27
|
+
expect(events).toEqual([
|
|
28
|
+
{
|
|
29
|
+
event: "on_chain_start",
|
|
30
|
+
data: {
|
|
31
|
+
input: {
|
|
32
|
+
messages: [],
|
|
33
|
+
},
|
|
34
|
+
},
|
|
35
|
+
name: "LangGraph",
|
|
36
|
+
tags: [],
|
|
37
|
+
run_id: expect.any(String),
|
|
38
|
+
metadata: {},
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
event: "on_chain_start",
|
|
42
|
+
data: {
|
|
43
|
+
input: {
|
|
44
|
+
messages: [],
|
|
45
|
+
},
|
|
46
|
+
},
|
|
47
|
+
name: "__start__",
|
|
48
|
+
tags: ["graph:step:0", "langsmith:hidden"],
|
|
49
|
+
run_id: expect.any(String),
|
|
50
|
+
metadata: {},
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
event: "on_chain_end",
|
|
54
|
+
data: {
|
|
55
|
+
output: { output: undefined },
|
|
56
|
+
input: {
|
|
57
|
+
messages: [],
|
|
58
|
+
},
|
|
59
|
+
},
|
|
60
|
+
run_id: expect.any(String),
|
|
61
|
+
name: "__start__",
|
|
62
|
+
tags: ["graph:step:0", "langsmith:hidden"],
|
|
63
|
+
metadata: {},
|
|
64
|
+
},
|
|
65
|
+
{
|
|
66
|
+
event: "on_chain_start",
|
|
67
|
+
data: {
|
|
68
|
+
input: {
|
|
69
|
+
messages: [],
|
|
70
|
+
},
|
|
71
|
+
},
|
|
72
|
+
name: "testnode",
|
|
73
|
+
tags: ["graph:step:1"],
|
|
74
|
+
run_id: expect.any(String),
|
|
75
|
+
metadata: {},
|
|
76
|
+
},
|
|
77
|
+
{
|
|
78
|
+
event: "on_chain_start",
|
|
79
|
+
data: {
|
|
80
|
+
input: {
|
|
81
|
+
messages: [],
|
|
82
|
+
},
|
|
83
|
+
},
|
|
84
|
+
name: "RunnableLambda",
|
|
85
|
+
tags: ["seq:step:1"],
|
|
86
|
+
run_id: expect.any(String),
|
|
87
|
+
metadata: {},
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
event: "on_chat_model_start",
|
|
91
|
+
data: {
|
|
92
|
+
input: {
|
|
93
|
+
messages: [[new HumanMessage("hello!")]],
|
|
94
|
+
},
|
|
95
|
+
},
|
|
96
|
+
name: "model_call",
|
|
97
|
+
tags: [],
|
|
98
|
+
run_id: expect.any(String),
|
|
99
|
+
metadata: {
|
|
100
|
+
ls_model_type: "chat",
|
|
101
|
+
ls_stop: undefined,
|
|
102
|
+
},
|
|
103
|
+
},
|
|
104
|
+
{
|
|
105
|
+
event: "on_chat_model_end",
|
|
106
|
+
data: {
|
|
107
|
+
output: new AIMessage("hey!"),
|
|
108
|
+
input: {
|
|
109
|
+
messages: [[new HumanMessage("hello!")]],
|
|
110
|
+
},
|
|
111
|
+
},
|
|
112
|
+
run_id: expect.any(String),
|
|
113
|
+
name: "model_call",
|
|
114
|
+
tags: [],
|
|
115
|
+
metadata: {
|
|
116
|
+
ls_model_type: "chat",
|
|
117
|
+
ls_stop: undefined,
|
|
118
|
+
},
|
|
119
|
+
},
|
|
120
|
+
{
|
|
121
|
+
event: "on_chain_end",
|
|
122
|
+
data: {
|
|
123
|
+
output: {
|
|
124
|
+
messages: [new AIMessage("hey!")],
|
|
125
|
+
},
|
|
126
|
+
input: {
|
|
127
|
+
messages: [],
|
|
128
|
+
},
|
|
129
|
+
},
|
|
130
|
+
run_id: expect.any(String),
|
|
131
|
+
name: "RunnableLambda",
|
|
132
|
+
tags: ["seq:step:1"],
|
|
133
|
+
metadata: {},
|
|
134
|
+
},
|
|
135
|
+
{
|
|
136
|
+
event: "on_chain_start",
|
|
137
|
+
data: {
|
|
138
|
+
input: {
|
|
139
|
+
messages: [new AIMessage("hey!")],
|
|
140
|
+
},
|
|
141
|
+
},
|
|
142
|
+
name: "ChannelWrite<messages,testnode>",
|
|
143
|
+
tags: ["seq:step:2", "langsmith:hidden"],
|
|
144
|
+
run_id: expect.any(String),
|
|
145
|
+
metadata: {},
|
|
146
|
+
},
|
|
147
|
+
{
|
|
148
|
+
event: "on_chain_end",
|
|
149
|
+
data: {
|
|
150
|
+
output: { output: undefined },
|
|
151
|
+
input: {
|
|
152
|
+
messages: [new AIMessage("hey!")],
|
|
153
|
+
},
|
|
154
|
+
},
|
|
155
|
+
run_id: expect.any(String),
|
|
156
|
+
name: "ChannelWrite<messages,testnode>",
|
|
157
|
+
tags: ["seq:step:2", "langsmith:hidden"],
|
|
158
|
+
metadata: {},
|
|
159
|
+
},
|
|
160
|
+
{
|
|
161
|
+
event: "on_chain_end",
|
|
162
|
+
data: {
|
|
163
|
+
output: { output: undefined },
|
|
164
|
+
input: {
|
|
165
|
+
messages: [],
|
|
166
|
+
},
|
|
167
|
+
},
|
|
168
|
+
run_id: expect.any(String),
|
|
169
|
+
name: "testnode",
|
|
170
|
+
tags: ["graph:step:1"],
|
|
171
|
+
metadata: {},
|
|
172
|
+
},
|
|
173
|
+
{
|
|
174
|
+
event: "on_chain_stream",
|
|
175
|
+
run_id: expect.any(String),
|
|
176
|
+
name: "LangGraph",
|
|
177
|
+
tags: [],
|
|
178
|
+
metadata: {},
|
|
179
|
+
data: {
|
|
180
|
+
chunk: {
|
|
181
|
+
testnode: {
|
|
182
|
+
messages: [new AIMessage("hey!")],
|
|
183
|
+
},
|
|
184
|
+
},
|
|
185
|
+
},
|
|
186
|
+
},
|
|
187
|
+
{
|
|
188
|
+
event: "on_chain_end",
|
|
189
|
+
data: {
|
|
190
|
+
output: {
|
|
191
|
+
testnode: {
|
|
192
|
+
messages: [new AIMessage("hey!")],
|
|
193
|
+
},
|
|
194
|
+
},
|
|
195
|
+
},
|
|
196
|
+
run_id: expect.any(String),
|
|
197
|
+
name: "LangGraph",
|
|
198
|
+
tags: [],
|
|
199
|
+
metadata: {},
|
|
200
|
+
},
|
|
201
|
+
]);
|
|
202
|
+
});
|
package/dist/tests/utils.d.ts
CHANGED
|
@@ -3,6 +3,7 @@ import { BaseChatModel, BaseChatModelParams } from "@langchain/core/language_mod
|
|
|
3
3
|
import { BaseMessage } from "@langchain/core/messages";
|
|
4
4
|
import { ChatResult } from "@langchain/core/outputs";
|
|
5
5
|
import { RunnableConfig } from "@langchain/core/runnables";
|
|
6
|
+
import { Tool } from "@langchain/core/tools";
|
|
6
7
|
import { MemorySaver } from "../checkpoint/memory.js";
|
|
7
8
|
import { Checkpoint, CheckpointMetadata } from "../checkpoint/base.js";
|
|
8
9
|
export interface FakeChatModelArgs extends BaseChatModelParams {
|
|
@@ -15,6 +16,20 @@ export declare class FakeChatModel extends BaseChatModel {
|
|
|
15
16
|
_llmType(): string;
|
|
16
17
|
_generate(messages: BaseMessage[], options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
17
18
|
}
|
|
19
|
+
export declare class FakeToolCallingChatModel extends BaseChatModel {
|
|
20
|
+
sleep?: number;
|
|
21
|
+
responses?: BaseMessage[];
|
|
22
|
+
thrownErrorString?: string;
|
|
23
|
+
idx: number;
|
|
24
|
+
constructor(fields: {
|
|
25
|
+
sleep?: number;
|
|
26
|
+
responses?: BaseMessage[];
|
|
27
|
+
thrownErrorString?: string;
|
|
28
|
+
} & BaseChatModelParams);
|
|
29
|
+
_llmType(): string;
|
|
30
|
+
_generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
31
|
+
bindTools(_: Tool[]): FakeToolCallingChatModel;
|
|
32
|
+
}
|
|
18
33
|
export declare class MemorySaverAssertImmutable extends MemorySaver {
|
|
19
34
|
storageForCopies: Record<string, Record<string, string>>;
|
|
20
35
|
constructor();
|
package/dist/tests/utils.js
CHANGED
|
@@ -44,6 +44,65 @@ export class FakeChatModel extends BaseChatModel {
|
|
|
44
44
|
};
|
|
45
45
|
}
|
|
46
46
|
}
|
|
47
|
+
export class FakeToolCallingChatModel extends BaseChatModel {
|
|
48
|
+
constructor(fields) {
|
|
49
|
+
super(fields);
|
|
50
|
+
Object.defineProperty(this, "sleep", {
|
|
51
|
+
enumerable: true,
|
|
52
|
+
configurable: true,
|
|
53
|
+
writable: true,
|
|
54
|
+
value: 50
|
|
55
|
+
});
|
|
56
|
+
Object.defineProperty(this, "responses", {
|
|
57
|
+
enumerable: true,
|
|
58
|
+
configurable: true,
|
|
59
|
+
writable: true,
|
|
60
|
+
value: void 0
|
|
61
|
+
});
|
|
62
|
+
Object.defineProperty(this, "thrownErrorString", {
|
|
63
|
+
enumerable: true,
|
|
64
|
+
configurable: true,
|
|
65
|
+
writable: true,
|
|
66
|
+
value: void 0
|
|
67
|
+
});
|
|
68
|
+
Object.defineProperty(this, "idx", {
|
|
69
|
+
enumerable: true,
|
|
70
|
+
configurable: true,
|
|
71
|
+
writable: true,
|
|
72
|
+
value: void 0
|
|
73
|
+
});
|
|
74
|
+
this.sleep = fields.sleep ?? this.sleep;
|
|
75
|
+
this.responses = fields.responses;
|
|
76
|
+
this.thrownErrorString = fields.thrownErrorString;
|
|
77
|
+
this.idx = 0;
|
|
78
|
+
}
|
|
79
|
+
_llmType() {
|
|
80
|
+
return "fake";
|
|
81
|
+
}
|
|
82
|
+
async _generate(messages, _options, _runManager) {
|
|
83
|
+
if (this.thrownErrorString) {
|
|
84
|
+
throw new Error(this.thrownErrorString);
|
|
85
|
+
}
|
|
86
|
+
const msg = this.responses?.[this.idx] ?? messages[this.idx];
|
|
87
|
+
const generation = {
|
|
88
|
+
generations: [
|
|
89
|
+
{
|
|
90
|
+
text: "",
|
|
91
|
+
message: msg,
|
|
92
|
+
},
|
|
93
|
+
],
|
|
94
|
+
};
|
|
95
|
+
this.idx += 1;
|
|
96
|
+
return generation;
|
|
97
|
+
}
|
|
98
|
+
bindTools(_) {
|
|
99
|
+
return new FakeToolCallingChatModel({
|
|
100
|
+
sleep: this.sleep,
|
|
101
|
+
responses: this.responses,
|
|
102
|
+
thrownErrorString: this.thrownErrorString,
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
}
|
|
47
106
|
export class MemorySaverAssertImmutable extends MemorySaver {
|
|
48
107
|
constructor() {
|
|
49
108
|
super();
|
package/dist/web.cjs
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.EmptyChannelError = exports.InvalidUpdateError = exports.GraphValueError = exports.GraphRecursionError = exports.BaseCheckpointSaver = exports.emptyCheckpoint = exports.copyCheckpoint = exports.MemorySaver = exports.MessageGraph = exports.StateGraph = exports.START = exports.Graph = exports.END = void 0;
|
|
4
|
+
var index_js_1 = require("./graph/index.cjs");
|
|
5
|
+
Object.defineProperty(exports, "END", { enumerable: true, get: function () { return index_js_1.END; } });
|
|
6
|
+
Object.defineProperty(exports, "Graph", { enumerable: true, get: function () { return index_js_1.Graph; } });
|
|
7
|
+
Object.defineProperty(exports, "START", { enumerable: true, get: function () { return index_js_1.START; } });
|
|
8
|
+
Object.defineProperty(exports, "StateGraph", { enumerable: true, get: function () { return index_js_1.StateGraph; } });
|
|
9
|
+
Object.defineProperty(exports, "MessageGraph", { enumerable: true, get: function () { return index_js_1.MessageGraph; } });
|
|
10
|
+
var memory_js_1 = require("./checkpoint/memory.cjs");
|
|
11
|
+
Object.defineProperty(exports, "MemorySaver", { enumerable: true, get: function () { return memory_js_1.MemorySaver; } });
|
|
12
|
+
var base_js_1 = require("./checkpoint/base.cjs");
|
|
13
|
+
Object.defineProperty(exports, "copyCheckpoint", { enumerable: true, get: function () { return base_js_1.copyCheckpoint; } });
|
|
14
|
+
Object.defineProperty(exports, "emptyCheckpoint", { enumerable: true, get: function () { return base_js_1.emptyCheckpoint; } });
|
|
15
|
+
Object.defineProperty(exports, "BaseCheckpointSaver", { enumerable: true, get: function () { return base_js_1.BaseCheckpointSaver; } });
|
|
16
|
+
var errors_js_1 = require("./errors.cjs");
|
|
17
|
+
Object.defineProperty(exports, "GraphRecursionError", { enumerable: true, get: function () { return errors_js_1.GraphRecursionError; } });
|
|
18
|
+
Object.defineProperty(exports, "GraphValueError", { enumerable: true, get: function () { return errors_js_1.GraphValueError; } });
|
|
19
|
+
Object.defineProperty(exports, "InvalidUpdateError", { enumerable: true, get: function () { return errors_js_1.InvalidUpdateError; } });
|
|
20
|
+
Object.defineProperty(exports, "EmptyChannelError", { enumerable: true, get: function () { return errors_js_1.EmptyChannelError; } });
|
package/dist/web.d.ts
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { END, Graph, type StateGraphArgs, START, StateGraph, type CompiledStateGraph, MessageGraph, } from "./graph/index.js";
|
|
2
|
+
export { MemorySaver } from "./checkpoint/memory.js";
|
|
3
|
+
export { type Checkpoint, type CheckpointMetadata, copyCheckpoint, emptyCheckpoint, BaseCheckpointSaver, } from "./checkpoint/base.js";
|
|
4
|
+
export { GraphRecursionError, GraphValueError, InvalidUpdateError, EmptyChannelError, } from "./errors.js";
|
package/dist/web.js
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { END, Graph, START, StateGraph, MessageGraph, } from "./graph/index.js";
|
|
2
|
+
export { MemorySaver } from "./checkpoint/memory.js";
|
|
3
|
+
export { copyCheckpoint, emptyCheckpoint, BaseCheckpointSaver, } from "./checkpoint/base.js";
|
|
4
|
+
export { GraphRecursionError, GraphValueError, InvalidUpdateError, EmptyChannelError, } from "./errors.js";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/langgraph",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.23",
|
|
4
4
|
"description": "LangGraph",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -70,7 +70,7 @@
|
|
|
70
70
|
"rollup": "^4.5.2",
|
|
71
71
|
"ts-jest": "^29.1.0",
|
|
72
72
|
"tsx": "^4.7.0",
|
|
73
|
-
"typescript": "^5.4.5",
|
|
73
|
+
"typescript": "^4.9.5 || ^5.4.5",
|
|
74
74
|
"zod": "^3.22.4",
|
|
75
75
|
"zod-to-json-schema": "^3.22.4"
|
|
76
76
|
},
|
|
@@ -96,6 +96,15 @@
|
|
|
96
96
|
"import": "./index.js",
|
|
97
97
|
"require": "./index.cjs"
|
|
98
98
|
},
|
|
99
|
+
"./web": {
|
|
100
|
+
"types": {
|
|
101
|
+
"import": "./web.d.ts",
|
|
102
|
+
"require": "./web.d.cts",
|
|
103
|
+
"default": "./web.d.ts"
|
|
104
|
+
},
|
|
105
|
+
"import": "./web.js",
|
|
106
|
+
"require": "./web.cjs"
|
|
107
|
+
},
|
|
99
108
|
"./pregel": {
|
|
100
109
|
"types": {
|
|
101
110
|
"import": "./pregel.d.ts",
|
|
@@ -131,6 +140,10 @@
|
|
|
131
140
|
"index.js",
|
|
132
141
|
"index.d.ts",
|
|
133
142
|
"index.d.cts",
|
|
143
|
+
"web.cjs",
|
|
144
|
+
"web.js",
|
|
145
|
+
"web.d.ts",
|
|
146
|
+
"web.d.cts",
|
|
134
147
|
"pregel.cjs",
|
|
135
148
|
"pregel.js",
|
|
136
149
|
"pregel.d.ts",
|
package/web.cjs
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('./dist/web.cjs');
|
package/web.d.cts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './dist/web.js'
|
package/web.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './dist/web.js'
|
package/web.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './dist/web.js'
|