@langchain/langgraph-api 1.1.8 → 1.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/README.md +3 -3
  2. package/dist/api/assistants.d.mts +3 -0
  3. package/dist/api/assistants.mjs +193 -0
  4. package/dist/api/meta.d.mts +3 -0
  5. package/dist/api/meta.mjs +65 -0
  6. package/dist/api/runs.d.mts +3 -0
  7. package/dist/api/runs.mjs +324 -0
  8. package/dist/api/store.d.mts +3 -0
  9. package/dist/api/store.mjs +111 -0
  10. package/dist/api/threads.d.mts +3 -0
  11. package/dist/api/threads.mjs +143 -0
  12. package/dist/auth/custom.d.mts +9 -0
  13. package/dist/auth/custom.mjs +32 -0
  14. package/dist/auth/index.d.mts +43 -0
  15. package/dist/auth/index.mjs +163 -0
  16. package/dist/cli/entrypoint.d.mts +1 -0
  17. package/dist/cli/entrypoint.mjs +41 -0
  18. package/dist/cli/spawn.d.mts +42 -0
  19. package/dist/cli/spawn.mjs +47 -0
  20. package/dist/cli/utils/ipc/client.d.mts +5 -0
  21. package/dist/cli/utils/ipc/client.mjs +47 -0
  22. package/dist/cli/utils/ipc/utils/get-pipe-path.d.mts +1 -0
  23. package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +29 -0
  24. package/dist/cli/utils/ipc/utils/temporary-directory.d.mts +5 -0
  25. package/dist/cli/utils/ipc/utils/temporary-directory.mjs +40 -0
  26. package/dist/command.d.mts +11 -0
  27. package/dist/command.mjs +15 -0
  28. package/dist/experimental/embed.d.mts +42 -0
  29. package/dist/experimental/embed.mjs +299 -0
  30. package/dist/graph/api.d.mts +1 -0
  31. package/dist/graph/api.mjs +2 -0
  32. package/dist/graph/load.d.mts +19 -0
  33. package/dist/graph/load.hooks.d.mts +2 -0
  34. package/dist/graph/load.hooks.mjs +52 -0
  35. package/dist/graph/load.mjs +96 -0
  36. package/dist/graph/load.utils.d.mts +22 -0
  37. package/dist/graph/load.utils.mjs +49 -0
  38. package/dist/graph/parser/index.d.mts +23 -0
  39. package/dist/graph/parser/index.mjs +58 -0
  40. package/dist/graph/parser/parser.d.mts +77 -0
  41. package/dist/graph/parser/parser.mjs +429 -0
  42. package/dist/graph/parser/parser.worker.d.mts +1 -0
  43. package/dist/graph/parser/parser.worker.mjs +7 -0
  44. package/dist/graph/parser/schema/types.d.mts +154 -0
  45. package/dist/graph/parser/schema/types.mjs +1496 -0
  46. package/dist/graph/parser/schema/types.template.d.mts +1 -0
  47. package/dist/graph/parser/schema/types.template.mts +92 -0
  48. package/dist/http/custom.d.mts +6 -0
  49. package/dist/http/custom.mjs +10 -0
  50. package/dist/http/middleware.d.mts +11 -0
  51. package/dist/http/middleware.mjs +57 -0
  52. package/dist/logging.d.mts +10 -0
  53. package/dist/logging.mjs +115 -0
  54. package/dist/loopback.d.mts +4 -0
  55. package/dist/loopback.mjs +10 -0
  56. package/dist/preload.d.mts +1 -0
  57. package/dist/preload.mjs +29 -0
  58. package/dist/queue.d.mts +2 -0
  59. package/dist/queue.mjs +119 -0
  60. package/dist/schemas.d.mts +1552 -0
  61. package/dist/schemas.mjs +492 -0
  62. package/dist/semver/index.d.mts +15 -0
  63. package/dist/semver/index.mjs +46 -0
  64. package/dist/server.d.mts +175 -0
  65. package/dist/server.mjs +181 -0
  66. package/dist/state.d.mts +3 -0
  67. package/dist/state.mjs +30 -0
  68. package/dist/storage/checkpoint.d.mts +19 -0
  69. package/dist/storage/checkpoint.mjs +127 -0
  70. package/dist/storage/context.d.mts +3 -0
  71. package/dist/storage/context.mjs +11 -0
  72. package/dist/storage/importMap.d.mts +55 -0
  73. package/dist/storage/importMap.mjs +55 -0
  74. package/dist/storage/ops.d.mts +169 -0
  75. package/dist/storage/ops.mjs +1262 -0
  76. package/dist/storage/persist.d.mts +18 -0
  77. package/dist/storage/persist.mjs +81 -0
  78. package/dist/storage/store.d.mts +17 -0
  79. package/dist/storage/store.mjs +41 -0
  80. package/dist/storage/types.d.mts +301 -0
  81. package/dist/storage/types.mjs +1 -0
  82. package/dist/stream.d.mts +43 -0
  83. package/dist/stream.mjs +235 -0
  84. package/dist/ui/load.d.mts +8 -0
  85. package/dist/ui/load.mjs +53 -0
  86. package/dist/utils/abort.d.mts +1 -0
  87. package/dist/utils/abort.mjs +8 -0
  88. package/dist/utils/hono.d.mts +5 -0
  89. package/dist/utils/hono.mjs +24 -0
  90. package/dist/utils/importMap.d.mts +55 -0
  91. package/dist/utils/importMap.mjs +55 -0
  92. package/dist/utils/runnableConfig.d.mts +3 -0
  93. package/dist/utils/runnableConfig.mjs +45 -0
  94. package/dist/utils/serde.d.mts +5 -0
  95. package/dist/utils/serde.mjs +20 -0
  96. package/dist/vitest.config.d.ts +2 -0
  97. package/dist/vitest.config.js +11 -0
  98. package/dist/webhook.d.mts +11 -0
  99. package/dist/webhook.mjs +30 -0
  100. package/package.json +19 -19
@@ -0,0 +1,235 @@
1
+ import { isBaseMessage } from "@langchain/core/messages";
2
+ import { LangChainTracer } from "@langchain/core/tracers/tracer_langchain";
3
+ import { Client as LangSmithClient, getDefaultProjectName } from "langsmith";
4
+ import { getLangGraphCommand } from "./command.mjs";
5
+ import { checkLangGraphSemver } from "./semver/index.mjs";
6
+ import { runnableConfigToCheckpoint, taskRunnableConfigToCheckpoint, } from "./utils/runnableConfig.mjs";
7
+ const isRunnableConfig = (config) => {
8
+ if (typeof config !== "object" || config == null)
9
+ return false;
10
+ return ("configurable" in config &&
11
+ typeof config.configurable === "object" &&
12
+ config.configurable != null);
13
+ };
14
+ function preprocessDebugCheckpointTask(task) {
15
+ if (!isRunnableConfig(task.state) ||
16
+ !taskRunnableConfigToCheckpoint(task.state)) {
17
+ return task;
18
+ }
19
+ const cloneTask = { ...task };
20
+ cloneTask.checkpoint = taskRunnableConfigToCheckpoint(task.state);
21
+ delete cloneTask.state;
22
+ return cloneTask;
23
+ }
24
+ const isConfigurablePresent = (config) => typeof config === "object" &&
25
+ config != null &&
26
+ "configurable" in config &&
27
+ typeof config.configurable === "object" &&
28
+ config.configurable != null;
29
+ const deleteInternalConfigurableFields = (config) => {
30
+ if (isConfigurablePresent(config)) {
31
+ const newConfig = {
32
+ ...config,
33
+ configurable: Object.fromEntries(Object.entries(config.configurable).filter(([key]) => !key.startsWith("__"))),
34
+ };
35
+ delete newConfig.callbacks;
36
+ return newConfig;
37
+ }
38
+ return config;
39
+ };
40
+ function preprocessDebugCheckpoint(payload) {
41
+ const result = {
42
+ ...payload,
43
+ checkpoint: runnableConfigToCheckpoint(payload["config"]),
44
+ parent_checkpoint: runnableConfigToCheckpoint(payload["parentConfig"]),
45
+ tasks: payload["tasks"].map(preprocessDebugCheckpointTask),
46
+ };
47
+ // Handle LangGraph JS pascalCase vs snake_case
48
+ // TODO: use stream to LangGraph.JS
49
+ result.parent_config = payload["parentConfig"];
50
+ delete result.parentConfig;
51
+ result.config = deleteInternalConfigurableFields(result.config);
52
+ result.parent_config = deleteInternalConfigurableFields(result.parent_config);
53
+ return result;
54
+ }
55
+ let LANGGRAPH_VERSION;
56
+ export async function* streamState(run, options) {
57
+ const kwargs = run.kwargs;
58
+ const graphId = kwargs.config?.configurable?.graph_id;
59
+ if (!graphId || typeof graphId !== "string") {
60
+ throw new Error("Invalid or missing graph_id");
61
+ }
62
+ const graph = await options.getGraph(graphId, kwargs.config, {
63
+ checkpointer: kwargs.temporary ? null : undefined,
64
+ });
65
+ const userStreamMode = kwargs.stream_mode ?? [];
66
+ const libStreamMode = new Set(userStreamMode.filter((mode) => mode !== "events" && mode !== "messages-tuple") ?? []);
67
+ if (userStreamMode.includes("messages-tuple")) {
68
+ libStreamMode.add("messages");
69
+ }
70
+ if (userStreamMode.includes("messages")) {
71
+ libStreamMode.add("values");
72
+ }
73
+ if (!libStreamMode.has("debug"))
74
+ libStreamMode.add("debug");
75
+ yield {
76
+ event: "metadata",
77
+ data: { run_id: run.run_id, attempt: options.attempt },
78
+ };
79
+ if (!LANGGRAPH_VERSION) {
80
+ const version = await checkLangGraphSemver();
81
+ LANGGRAPH_VERSION = version.find((v) => v.name === "@langchain/langgraph");
82
+ }
83
+ const metadata = {
84
+ ...kwargs.config?.metadata,
85
+ run_attempt: options.attempt,
86
+ langgraph_version: LANGGRAPH_VERSION?.version ?? "0.0.0",
87
+ langgraph_plan: "developer",
88
+ langgraph_host: "self-hosted",
89
+ langgraph_api_url: process.env.LANGGRAPH_API_URL ?? undefined,
90
+ };
91
+ const tracer = run.kwargs?.config?.configurable?.langsmith_project
92
+ ? new LangChainTracer({
93
+ replicas: [
94
+ [
95
+ run.kwargs?.config?.configurable?.langsmith_project,
96
+ {
97
+ reference_example_id: run.kwargs?.config?.configurable?.langsmith_example_id,
98
+ },
99
+ ],
100
+ [getDefaultProjectName(), undefined],
101
+ ],
102
+ })
103
+ : undefined;
104
+ const events = graph.streamEvents(kwargs.command != null
105
+ ? getLangGraphCommand(kwargs.command)
106
+ : kwargs.input ?? null, {
107
+ version: "v2",
108
+ interruptAfter: kwargs.interrupt_after,
109
+ interruptBefore: kwargs.interrupt_before,
110
+ tags: kwargs.config?.tags,
111
+ context: kwargs.context,
112
+ configurable: kwargs.config?.configurable,
113
+ recursionLimit: kwargs.config?.recursion_limit,
114
+ subgraphs: kwargs.subgraphs,
115
+ metadata,
116
+ runId: run.run_id,
117
+ streamMode: [...libStreamMode],
118
+ signal: options?.signal,
119
+ ...(tracer && { callbacks: [tracer] }),
120
+ });
121
+ const messages = {};
122
+ const completedIds = new Set();
123
+ for await (const event of events) {
124
+ if (event.tags?.includes("langsmith:hidden"))
125
+ continue;
126
+ if (event.event === "on_chain_stream" && event.run_id === run.run_id) {
127
+ const [ns, mode, chunk] = (kwargs.subgraphs ? event.data.chunk : [null, ...event.data.chunk]);
128
+ // Listen for debug events and capture checkpoint
129
+ let data = chunk;
130
+ if (mode === "debug") {
131
+ const debugChunk = chunk;
132
+ if (debugChunk.type === "checkpoint") {
133
+ const debugCheckpoint = preprocessDebugCheckpoint(debugChunk.payload);
134
+ options?.onCheckpoint?.(debugCheckpoint);
135
+ data = { ...debugChunk, payload: debugCheckpoint };
136
+ }
137
+ else if (debugChunk.type === "task_result") {
138
+ const debugResult = preprocessDebugCheckpointTask(debugChunk.payload);
139
+ options?.onTaskResult?.(debugResult);
140
+ data = { ...debugChunk, payload: debugResult };
141
+ }
142
+ }
143
+ else if (mode === "checkpoints") {
144
+ const debugCheckpoint = preprocessDebugCheckpoint(chunk);
145
+ options?.onCheckpoint?.(debugCheckpoint);
146
+ data = debugCheckpoint;
147
+ }
148
+ else if (mode === "tasks") {
149
+ const debugTask = preprocessDebugCheckpointTask(chunk);
150
+ if ("result" in debugTask || "error" in debugTask) {
151
+ options?.onTaskResult?.(debugTask);
152
+ }
153
+ data = debugTask;
154
+ }
155
+ if (mode === "messages") {
156
+ if (userStreamMode.includes("messages-tuple")) {
157
+ if (kwargs.subgraphs && ns?.length) {
158
+ yield { event: `messages|${ns.join("|")}`, data };
159
+ }
160
+ else {
161
+ yield { event: "messages", data };
162
+ }
163
+ }
164
+ }
165
+ else if (userStreamMode.includes(mode)) {
166
+ if (kwargs.subgraphs && ns?.length) {
167
+ yield { event: `${mode}|${ns.join("|")}`, data };
168
+ }
169
+ else {
170
+ yield { event: mode, data };
171
+ }
172
+ }
173
+ }
174
+ else if (userStreamMode.includes("events")) {
175
+ yield { event: "events", data: event };
176
+ }
177
+ // TODO: we still rely on old messages mode based of streamMode=values
178
+ // In order to fully switch to library messages mode, we need to do ensure that
179
+ // `StreamMessagesHandler` sends the final message, which requires the following:
180
+ // - handleLLMEnd does not send the final message b/c handleLLMNewToken sets the this.emittedChatModelRunIds[runId] flag. Python does not do that
181
+ // - handleLLMEnd receives the final message as BaseMessageChunk rather than BaseMessage, which from the outside will become indistinguishable.
182
+ // - handleLLMEnd should not dedupe the message
183
+ // - Don't think there's an utility that would convert a BaseMessageChunk to a BaseMessage?
184
+ if (userStreamMode.includes("messages")) {
185
+ if (event.event === "on_chain_stream" && event.run_id === run.run_id) {
186
+ const newMessages = [];
187
+ const [_, chunk] = event.data.chunk;
188
+ let chunkMessages = [];
189
+ if (typeof chunk === "object" &&
190
+ chunk != null &&
191
+ "messages" in chunk &&
192
+ !isBaseMessage(chunk)) {
193
+ chunkMessages = chunk?.messages;
194
+ }
195
+ if (!Array.isArray(chunkMessages)) {
196
+ chunkMessages = [chunkMessages];
197
+ }
198
+ for (const message of chunkMessages) {
199
+ if (!message.id || completedIds.has(message.id))
200
+ continue;
201
+ completedIds.add(message.id);
202
+ newMessages.push(message);
203
+ }
204
+ if (newMessages.length > 0) {
205
+ yield { event: "messages/complete", data: newMessages };
206
+ }
207
+ }
208
+ else if (event.event === "on_chat_model_stream" &&
209
+ !event.tags?.includes("nostream")) {
210
+ const message = event.data.chunk;
211
+ if (!message.id)
212
+ continue;
213
+ if (messages[message.id] == null) {
214
+ messages[message.id] = message;
215
+ yield {
216
+ event: "messages/metadata",
217
+ data: { [message.id]: { metadata: event.metadata } },
218
+ };
219
+ }
220
+ else {
221
+ messages[message.id] = messages[message.id].concat(message);
222
+ }
223
+ yield { event: "messages/partial", data: [messages[message.id]] };
224
+ }
225
+ }
226
+ }
227
+ if (kwargs.feedback_keys) {
228
+ const client = new LangSmithClient();
229
+ const data = Object.fromEntries(await Promise.all(kwargs.feedback_keys.map(async (feedback) => {
230
+ const { url } = await client.createPresignedFeedbackToken(run.run_id, feedback);
231
+ return [feedback, url];
232
+ })));
233
+ yield { event: "feedback", data };
234
+ }
235
+ }
@@ -0,0 +1,8 @@
1
+ import { Hono } from "hono";
2
+ export declare function registerGraphUi(defs: Record<string, string>, options: {
3
+ cwd: string;
4
+ config?: {
5
+ shared?: string[];
6
+ };
7
+ }): Promise<void>;
8
+ export declare const api: Hono<import("hono/types").BlankEnv, import("hono/types").BlankSchema, "/">;
@@ -0,0 +1,53 @@
1
+ import { z } from "zod/v3";
2
+ import { Hono } from "hono";
3
+ import { getMimeType } from "hono/utils/mime";
4
+ import { zValidator } from "@hono/zod-validator";
5
+ import { watch } from "@langchain/langgraph-ui";
6
+ import * as path from "node:path";
7
+ const GRAPH_UI = {};
8
+ export async function registerGraphUi(defs, options) {
9
+ await watch({
10
+ defs,
11
+ cwd: options.cwd,
12
+ config: options.config,
13
+ onOutput: (graphId, files) => (GRAPH_UI[graphId] = files),
14
+ });
15
+ }
16
+ export const api = new Hono();
17
+ api.post("/ui/:agent", zValidator("json", z.object({ name: z.string() })), async (c) => {
18
+ const agent = c.req.param("agent");
19
+ const host = c.req.header("host");
20
+ const message = await c.req.valid("json");
21
+ const isHost = (needle) => {
22
+ if (!host)
23
+ return false;
24
+ return host.startsWith(needle + ":") || host === needle;
25
+ };
26
+ const protocol = isHost("localhost") || isHost("127.0.0.1") ? "http:" : "";
27
+ const files = GRAPH_UI[agent];
28
+ if (!files?.length)
29
+ return c.text(`UI not found for agent "${agent}"`, 404);
30
+ const messageName = JSON.stringify(message.name);
31
+ const result = [];
32
+ for (const css of files.filter((i) => path.extname(i.basename) === ".css")) {
33
+ result.push(`<link rel="stylesheet" href="${protocol}//${host}/ui/${agent}/${css.basename}" />`);
34
+ }
35
+ const stableName = agent.replace(/[^a-zA-Z0-9]/g, "_");
36
+ const js = files.find((i) => path.extname(i.basename) === ".js");
37
+ if (js) {
38
+ result.push(`<script src="${protocol}//${host}/ui/${agent}/${js.basename}" onload='__LGUI_${stableName}.render(${messageName}, "{{shadowRootId}}")'></script>`);
39
+ }
40
+ return c.text(result.join("\n"), {
41
+ headers: { "Content-Type": "text/html" },
42
+ });
43
+ });
44
+ api.get("/ui/:agent/:basename", async (c) => {
45
+ const agent = c.req.param("agent");
46
+ const basename = c.req.param("basename");
47
+ const file = GRAPH_UI[agent]?.find((item) => item.basename === basename);
48
+ if (!file)
49
+ return c.text("File not found", 404);
50
+ return c.body(file.contents, {
51
+ headers: { "Content-Type": getMimeType(file.basename) ?? "text/plain" },
52
+ });
53
+ });
@@ -0,0 +1 @@
1
+ export declare const combineAbortSignals: (...input: (AbortSignal | undefined | null)[]) => AbortSignal;
@@ -0,0 +1,8 @@
1
+ export const combineAbortSignals = (...input) => {
2
+ const signals = input.filter((item) => item != null);
3
+ if ("any" in AbortSignal)
4
+ return AbortSignal.any(signals);
5
+ const abortController = new AbortController();
6
+ signals.forEach((signal) => signal.addEventListener("abort", () => abortController.abort()));
7
+ return abortController.signal;
8
+ };
@@ -0,0 +1,5 @@
1
+ import type { Context } from "hono";
2
+ import { StreamingApi } from "hono/utils/stream";
3
+ export declare function jsonExtra<T>(c: Context, object: T): Response & import("hono").TypedResponse<string, import("hono/utils/http-status").ContentfulStatusCode, "body">;
4
+ export declare function waitKeepAlive(c: Context, promise: Promise<unknown>): Response;
5
+ export declare const getDisconnectAbortSignal: (c: Context, stream: StreamingApi) => AbortSignal;
@@ -0,0 +1,24 @@
1
+ import { serialiseAsDict } from "./serde.mjs";
2
+ import { stream } from "hono/streaming";
3
+ export function jsonExtra(c, object) {
4
+ c.header("Content-Type", "application/json");
5
+ return c.body(serialiseAsDict(object));
6
+ }
7
+ export function waitKeepAlive(c, promise) {
8
+ return stream(c, async (stream) => {
9
+ // keep sending newlines until we resolved the chunk
10
+ let keepAlive = Promise.resolve();
11
+ const timer = setInterval(() => {
12
+ keepAlive = keepAlive.then(() => stream.write("\n"));
13
+ }, 1000);
14
+ const result = await promise;
15
+ clearInterval(timer);
16
+ await keepAlive;
17
+ await stream.write(serialiseAsDict(result));
18
+ });
19
+ }
20
+ export const getDisconnectAbortSignal = (c, stream) => {
21
+ // https://github.com/honojs/hono/issues/1770
22
+ stream.onAbort(() => { });
23
+ return c.req.raw.signal;
24
+ };
@@ -0,0 +1,55 @@
1
+ import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, ImagePromptTemplate, PipelinePromptTemplate } from "@langchain/core/prompts";
2
+ import { AIMessage, AIMessageChunk, BaseMessage, BaseMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessage, FunctionMessageChunk, HumanMessage, HumanMessageChunk, SystemMessage, SystemMessageChunk, ToolMessage, ToolMessageChunk } from "@langchain/core/messages";
3
+ import { StringPromptValue } from "@langchain/core/prompt_values";
4
+ export declare const prompts__prompt: {
5
+ PromptTemplate: typeof PromptTemplate;
6
+ };
7
+ export declare const schema__messages: {
8
+ AIMessage: typeof AIMessage;
9
+ AIMessageChunk: typeof AIMessageChunk;
10
+ BaseMessage: typeof BaseMessage;
11
+ BaseMessageChunk: typeof BaseMessageChunk;
12
+ ChatMessage: typeof ChatMessage;
13
+ ChatMessageChunk: typeof ChatMessageChunk;
14
+ FunctionMessage: typeof FunctionMessage;
15
+ FunctionMessageChunk: typeof FunctionMessageChunk;
16
+ HumanMessage: typeof HumanMessage;
17
+ HumanMessageChunk: typeof HumanMessageChunk;
18
+ SystemMessage: typeof SystemMessage;
19
+ SystemMessageChunk: typeof SystemMessageChunk;
20
+ ToolMessage: typeof ToolMessage;
21
+ ToolMessageChunk: typeof ToolMessageChunk;
22
+ };
23
+ export declare const schema: {
24
+ AIMessage: typeof AIMessage;
25
+ AIMessageChunk: typeof AIMessageChunk;
26
+ BaseMessage: typeof BaseMessage;
27
+ BaseMessageChunk: typeof BaseMessageChunk;
28
+ ChatMessage: typeof ChatMessage;
29
+ ChatMessageChunk: typeof ChatMessageChunk;
30
+ FunctionMessage: typeof FunctionMessage;
31
+ FunctionMessageChunk: typeof FunctionMessageChunk;
32
+ HumanMessage: typeof HumanMessage;
33
+ HumanMessageChunk: typeof HumanMessageChunk;
34
+ SystemMessage: typeof SystemMessage;
35
+ SystemMessageChunk: typeof SystemMessageChunk;
36
+ ToolMessage: typeof ToolMessage;
37
+ ToolMessageChunk: typeof ToolMessageChunk;
38
+ };
39
+ export declare const prompts__chat: {
40
+ AIMessagePromptTemplate: typeof AIMessagePromptTemplate;
41
+ ChatMessagePromptTemplate: typeof ChatMessagePromptTemplate;
42
+ ChatPromptTemplate: typeof ChatPromptTemplate;
43
+ HumanMessagePromptTemplate: typeof HumanMessagePromptTemplate;
44
+ MessagesPlaceholder: typeof MessagesPlaceholder;
45
+ SystemMessagePromptTemplate: typeof SystemMessagePromptTemplate;
46
+ };
47
+ export declare const prompts__image: {
48
+ ImagePromptTemplate: typeof ImagePromptTemplate;
49
+ };
50
+ export declare const prompts__pipeline: {
51
+ PipelinePromptTemplate: typeof PipelinePromptTemplate;
52
+ };
53
+ export declare const prompts__base: {
54
+ StringPromptValue: typeof StringPromptValue;
55
+ };
@@ -0,0 +1,55 @@
1
+ import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, ImagePromptTemplate, PipelinePromptTemplate, } from "@langchain/core/prompts";
2
+ import { AIMessage, AIMessageChunk, BaseMessage, BaseMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessage, FunctionMessageChunk, HumanMessage, HumanMessageChunk, SystemMessage, SystemMessageChunk, ToolMessage, ToolMessageChunk, } from "@langchain/core/messages";
3
+ import { StringPromptValue } from "@langchain/core/prompt_values";
4
+ export const prompts__prompt = {
5
+ PromptTemplate,
6
+ };
7
+ export const schema__messages = {
8
+ AIMessage,
9
+ AIMessageChunk,
10
+ BaseMessage,
11
+ BaseMessageChunk,
12
+ ChatMessage,
13
+ ChatMessageChunk,
14
+ FunctionMessage,
15
+ FunctionMessageChunk,
16
+ HumanMessage,
17
+ HumanMessageChunk,
18
+ SystemMessage,
19
+ SystemMessageChunk,
20
+ ToolMessage,
21
+ ToolMessageChunk,
22
+ };
23
+ export const schema = {
24
+ AIMessage,
25
+ AIMessageChunk,
26
+ BaseMessage,
27
+ BaseMessageChunk,
28
+ ChatMessage,
29
+ ChatMessageChunk,
30
+ FunctionMessage,
31
+ FunctionMessageChunk,
32
+ HumanMessage,
33
+ HumanMessageChunk,
34
+ SystemMessage,
35
+ SystemMessageChunk,
36
+ ToolMessage,
37
+ ToolMessageChunk,
38
+ };
39
+ export const prompts__chat = {
40
+ AIMessagePromptTemplate,
41
+ ChatMessagePromptTemplate,
42
+ ChatPromptTemplate,
43
+ HumanMessagePromptTemplate,
44
+ MessagesPlaceholder,
45
+ SystemMessagePromptTemplate,
46
+ };
47
+ export const prompts__image = {
48
+ ImagePromptTemplate,
49
+ };
50
+ export const prompts__pipeline = {
51
+ PipelinePromptTemplate,
52
+ };
53
+ export const prompts__base = {
54
+ StringPromptValue,
55
+ };
@@ -0,0 +1,3 @@
1
+ import type { Checkpoint, RunnableConfig } from "../storage/types.mjs";
2
+ export declare const runnableConfigToCheckpoint: (config: RunnableConfig | null | undefined) => Checkpoint | null;
3
+ export declare const taskRunnableConfigToCheckpoint: (config: RunnableConfig | null | undefined) => Partial<Checkpoint> | null;
@@ -0,0 +1,45 @@
1
+ import { z } from "zod/v3";
2
+ const ConfigSchema = z.object({
3
+ configurable: z.object({
4
+ thread_id: z.string(),
5
+ checkpoint_id: z.string(),
6
+ checkpoint_ns: z.string().nullish(),
7
+ checkpoint_map: z.record(z.string(), z.unknown()).nullish(),
8
+ }),
9
+ });
10
+ export const runnableConfigToCheckpoint = (config) => {
11
+ if (!config || !config.configurable || !config.configurable.thread_id) {
12
+ return null;
13
+ }
14
+ const parsed = ConfigSchema.safeParse(config);
15
+ if (!parsed.success)
16
+ return null;
17
+ return {
18
+ thread_id: parsed.data.configurable.thread_id,
19
+ checkpoint_id: parsed.data.configurable.checkpoint_id,
20
+ checkpoint_ns: parsed.data.configurable.checkpoint_ns || "",
21
+ checkpoint_map: parsed.data.configurable.checkpoint_map || null,
22
+ };
23
+ };
24
+ const TaskConfigSchema = z.object({
25
+ configurable: z.object({
26
+ thread_id: z.string(),
27
+ checkpoint_id: z.string().nullish(),
28
+ checkpoint_ns: z.string().nullish(),
29
+ checkpoint_map: z.record(z.string(), z.unknown()).nullish(),
30
+ }),
31
+ });
32
+ export const taskRunnableConfigToCheckpoint = (config) => {
33
+ if (!config || !config.configurable || !config.configurable.thread_id) {
34
+ return null;
35
+ }
36
+ const parsed = TaskConfigSchema.safeParse(config);
37
+ if (!parsed.success)
38
+ return null;
39
+ return {
40
+ thread_id: parsed.data.configurable.thread_id,
41
+ checkpoint_id: parsed.data.configurable.checkpoint_id || null,
42
+ checkpoint_ns: parsed.data.configurable.checkpoint_ns || "",
43
+ checkpoint_map: parsed.data.configurable.checkpoint_map || null,
44
+ };
45
+ };
@@ -0,0 +1,5 @@
1
+ export declare const serialiseAsDict: (obj: unknown) => string;
2
+ export declare const serializeError: (error: unknown) => {
3
+ error: string;
4
+ message: string;
5
+ };
@@ -0,0 +1,20 @@
1
+ export const serialiseAsDict = (obj) => {
2
+ return JSON.stringify(obj, function (key, value) {
3
+ const rawValue = this[key];
4
+ if (rawValue != null &&
5
+ typeof rawValue === "object" &&
6
+ "toDict" in rawValue &&
7
+ typeof rawValue.toDict === "function") {
8
+ // TODO: we need to upstream this to LangChainJS
9
+ const { type, data } = rawValue.toDict();
10
+ return { ...data, type };
11
+ }
12
+ return value;
13
+ }, 2);
14
+ };
15
+ export const serializeError = (error) => {
16
+ if (error instanceof Error) {
17
+ return { error: error.name, message: error.message };
18
+ }
19
+ return { error: "Error", message: JSON.stringify(error) };
20
+ };
@@ -0,0 +1,2 @@
1
+ declare const _default: import("vitest/config.js").UserConfigFnObject;
2
+ export default _default;
@@ -0,0 +1,11 @@
1
+ import { defineConfig } from "vitest/config";
2
+ export default defineConfig(() => {
3
+ /** @type {import("vitest/config").UserConfigExport} */
4
+ return {
5
+ test: {
6
+ hideSkippedTests: true,
7
+ testTimeout: 30_000,
8
+ fileParallelism: false,
9
+ },
10
+ };
11
+ });
@@ -0,0 +1,11 @@
1
+ import type { Run } from "./storage/types.mjs";
2
+ import type { StreamCheckpoint } from "./stream.mjs";
3
+ export declare function callWebhook(result: {
4
+ checkpoint: StreamCheckpoint | undefined;
5
+ status: string | undefined;
6
+ exception: Error | undefined;
7
+ run: Run;
8
+ webhook: string;
9
+ run_started_at: Date;
10
+ run_ended_at: Date | undefined;
11
+ }): Promise<void>;
@@ -0,0 +1,30 @@
1
+ import { serializeError } from "./utils/serde.mjs";
2
+ import { getLoopbackFetch } from "./loopback.mjs";
3
+ export async function callWebhook(result) {
4
+ const payload = {
5
+ ...result.run,
6
+ status: result.status,
7
+ run_started_at: result.run_started_at.toISOString(),
8
+ run_ended_at: result.run_ended_at?.toISOString(),
9
+ webhook_sent_at: new Date().toISOString(),
10
+ values: result.checkpoint?.values,
11
+ ...(result.exception
12
+ ? { error: serializeError(result.exception).message }
13
+ : undefined),
14
+ };
15
+ if (result.webhook.startsWith("/")) {
16
+ const fetch = getLoopbackFetch();
17
+ if (!fetch)
18
+ throw new Error("Loopback fetch is not bound");
19
+ await fetch(result.webhook, {
20
+ method: "POST",
21
+ body: JSON.stringify(payload),
22
+ });
23
+ }
24
+ else {
25
+ await fetch(result.webhook, {
26
+ method: "POST",
27
+ body: JSON.stringify(payload),
28
+ });
29
+ }
30
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph-api",
3
- "version": "1.1.8",
3
+ "version": "1.1.10",
4
4
  "type": "module",
5
5
  "engines": {
6
6
  "node": "^18.19.0 || >=20.16.0"
@@ -50,22 +50,10 @@
50
50
  "url": "git+ssh://git@github.com/langchain-ai/langgraphjs.git",
51
51
  "directory": "libs/langgraph-api"
52
52
  },
53
- "scripts": {
54
- "clean": "rm -rf dist/ .turbo/ ./tests/graphs/.langgraph_api/",
55
- "build": "yarn turbo:command build:internal --filter=@langchain/langgraph-api",
56
- "build:internal": "yarn clean && node scripts/build.mjs",
57
- "dev": "tsx ./tests/utils.server.mts --dev",
58
- "prepublish": "yarn build",
59
- "typecheck": "tsc --noEmit",
60
- "test": "vitest run",
61
- "format": "prettier --write .",
62
- "format:check": "prettier --check ."
63
- },
64
53
  "dependencies": {
65
54
  "@babel/code-frame": "^7.26.2",
66
55
  "@hono/node-server": "^1.12.0",
67
56
  "@hono/zod-validator": "^0.7.6",
68
- "@langchain/langgraph-ui": "workspace:*",
69
57
  "@types/json-schema": "^7.0.15",
70
58
  "@typescript/vfs": "^1.6.0",
71
59
  "dedent": "^1.5.3",
@@ -81,7 +69,8 @@
81
69
  "uuid": "^10.0.0",
82
70
  "winston": "^3.17.0",
83
71
  "winston-console-format": "^1.0.8",
84
- "zod": "^3.25.76 || ^4"
72
+ "zod": "^3.25.76 || ^4",
73
+ "@langchain/langgraph-ui": "1.1.10"
85
74
  },
86
75
  "peerDependencies": {
87
76
  "@langchain/core": "^0.3.59 || ^1.0.1",
@@ -97,9 +86,6 @@
97
86
  },
98
87
  "devDependencies": {
99
88
  "@langchain/core": "^1.0.0",
100
- "@langchain/langgraph": "workspace:*",
101
- "@langchain/langgraph-checkpoint": "workspace:*",
102
- "@langchain/langgraph-sdk": "workspace:*",
103
89
  "@types/babel__code-frame": "^7.0.6",
104
90
  "@types/node": "^18.15.11",
105
91
  "@types/react": "^19.0.8",
@@ -111,6 +97,20 @@
111
97
  "prettier": "^2.8.3",
112
98
  "typescript": "^4.9.5 || ^5.4.5",
113
99
  "vitest": "^3.2.4",
114
- "wait-port": "^1.1.0"
100
+ "wait-port": "^1.1.0",
101
+ "@langchain/langgraph": "1.0.14",
102
+ "@langchain/langgraph-checkpoint": "1.0.0",
103
+ "@langchain/langgraph-sdk": "1.4.6"
104
+ },
105
+ "scripts": {
106
+ "clean": "rm -rf dist/ .turbo/ ./tests/graphs/.langgraph_api/",
107
+ "build": "pnpm turbo build:internal --filter=@langchain/langgraph-api",
108
+ "build:internal": "pnpm clean && node scripts/build.mjs",
109
+ "dev": "tsx ./tests/utils.server.mts --dev",
110
+ "prepublish": "pnpm build",
111
+ "typecheck": "tsc --noEmit",
112
+ "test": "vitest run",
113
+ "format": "prettier --write .",
114
+ "format:check": "prettier --check ."
115
115
  }
116
- }
116
+ }