@langchain/langgraph-api 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +3 -0
- package/dist/api/assistants.mjs +144 -0
- package/dist/api/runs.mjs +239 -0
- package/dist/api/store.mjs +83 -0
- package/dist/api/threads.mjs +143 -0
- package/dist/cli/entrypoint.mjs +42 -0
- package/dist/cli/spawn.d.mts +14 -0
- package/dist/cli/spawn.mjs +34 -0
- package/dist/cli/utils/ipc/client.mjs +47 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +29 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +40 -0
- package/dist/graph/load.hooks.mjs +17 -0
- package/dist/graph/load.mjs +72 -0
- package/dist/graph/load.utils.mjs +50 -0
- package/dist/graph/parser/parser.mjs +309 -0
- package/dist/graph/parser/parser.worker.mjs +7 -0
- package/dist/graph/parser/schema/types.mjs +1607 -0
- package/dist/graph/parser/schema/types.template.mts +83 -0
- package/dist/logging.mjs +100 -0
- package/dist/preload.mjs +3 -0
- package/dist/queue.mjs +93 -0
- package/dist/schemas.mjs +407 -0
- package/dist/server.mjs +74 -0
- package/dist/state.mjs +32 -0
- package/dist/storage/checkpoint.mjs +127 -0
- package/dist/storage/importMap.mjs +55 -0
- package/dist/storage/ops.mjs +792 -0
- package/dist/storage/persist.mjs +78 -0
- package/dist/storage/store.mjs +41 -0
- package/dist/stream.mjs +215 -0
- package/dist/utils/abort.mjs +8 -0
- package/dist/utils/hono.mjs +27 -0
- package/dist/utils/importMap.mjs +55 -0
- package/dist/utils/runnableConfig.mjs +45 -0
- package/dist/utils/serde.mjs +20 -0
- package/package.json +56 -0
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import * as path from "node:path";
|
|
2
|
+
import * as fs from "node:fs/promises";
|
|
3
|
+
import * as superjson from "superjson";
|
|
4
|
+
import * as importMap from "./importMap.mjs";
|
|
5
|
+
import { load } from "@langchain/core/load";
|
|
6
|
+
// Add custom transformers for Uint8Array
|
|
7
|
+
superjson.registerCustom({
|
|
8
|
+
isApplicable: (v) => v instanceof Uint8Array,
|
|
9
|
+
serialize: (v) => Buffer.from(v).toString("base64"),
|
|
10
|
+
deserialize: (v) => new Uint8Array(Buffer.from(v, "base64")),
|
|
11
|
+
}, "Uint8Array");
|
|
12
|
+
export function serialize(data) {
|
|
13
|
+
return superjson.stringify(data);
|
|
14
|
+
}
|
|
15
|
+
export async function deserialize(input) {
|
|
16
|
+
const result = await load(input, { importMap });
|
|
17
|
+
return superjson.deserialize(result);
|
|
18
|
+
}
|
|
19
|
+
export class FileSystemPersistence {
|
|
20
|
+
filepath = null;
|
|
21
|
+
data = null;
|
|
22
|
+
defaultSchema;
|
|
23
|
+
name;
|
|
24
|
+
flushTimeout = undefined;
|
|
25
|
+
constructor(name, defaultSchema) {
|
|
26
|
+
this.name = name;
|
|
27
|
+
this.defaultSchema = defaultSchema;
|
|
28
|
+
}
|
|
29
|
+
async initialize(cwd) {
|
|
30
|
+
this.filepath = path.resolve(cwd, ".langgraph_api", `${this.name}`);
|
|
31
|
+
try {
|
|
32
|
+
this.data = await deserialize(await fs.readFile(this.filepath, "utf-8"));
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
this.data = this.defaultSchema();
|
|
36
|
+
}
|
|
37
|
+
await fs
|
|
38
|
+
.mkdir(path.dirname(this.filepath), { recursive: true })
|
|
39
|
+
.catch(() => void 0);
|
|
40
|
+
return this;
|
|
41
|
+
}
|
|
42
|
+
async persist() {
|
|
43
|
+
if (this.data == null || this.filepath == null)
|
|
44
|
+
return;
|
|
45
|
+
clearTimeout(this.flushTimeout);
|
|
46
|
+
await fs.writeFile(this.filepath, serialize(this.data), "utf-8");
|
|
47
|
+
}
|
|
48
|
+
schedulePersist() {
|
|
49
|
+
clearTimeout(this.flushTimeout);
|
|
50
|
+
this.flushTimeout = setTimeout(() => this.persist(), 3000);
|
|
51
|
+
}
|
|
52
|
+
async flush() {
|
|
53
|
+
await this.persist();
|
|
54
|
+
}
|
|
55
|
+
async with(fn) {
|
|
56
|
+
if (this.filepath == null || this.data == null) {
|
|
57
|
+
throw new Error(`${this.name} not initialized`);
|
|
58
|
+
}
|
|
59
|
+
try {
|
|
60
|
+
return await fn(this.data);
|
|
61
|
+
}
|
|
62
|
+
finally {
|
|
63
|
+
this.schedulePersist();
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
async *withGenerator(fn) {
|
|
67
|
+
if (this.filepath == null || this.data == null) {
|
|
68
|
+
throw new Error(`${this.name} not initialized`);
|
|
69
|
+
}
|
|
70
|
+
try {
|
|
71
|
+
const gen = typeof fn === "function" ? fn(this.data) : fn;
|
|
72
|
+
yield* gen;
|
|
73
|
+
}
|
|
74
|
+
finally {
|
|
75
|
+
this.schedulePersist();
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { InMemoryStore as BaseMemoryStore, } from "@langchain/langgraph";
|
|
2
|
+
import { FileSystemPersistence } from "./persist.mjs";
|
|
3
|
+
const conn = new FileSystemPersistence(".langgraphjs_api.store.json", () => ({
|
|
4
|
+
data: new Map(),
|
|
5
|
+
vectors: new Map(),
|
|
6
|
+
}));
|
|
7
|
+
class InMemoryStore extends BaseMemoryStore {
|
|
8
|
+
async initialize(cwd) {
|
|
9
|
+
await conn.initialize(cwd);
|
|
10
|
+
await conn.with(({ data, vectors }) => {
|
|
11
|
+
Object.assign(this, { data, vectors });
|
|
12
|
+
});
|
|
13
|
+
return conn;
|
|
14
|
+
}
|
|
15
|
+
async clear() {
|
|
16
|
+
await conn.with(({ data, vectors }) => {
|
|
17
|
+
data.clear();
|
|
18
|
+
vectors.clear();
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
async batch(operations) {
|
|
22
|
+
return await conn.with(() => super.batch(operations));
|
|
23
|
+
}
|
|
24
|
+
async get(...args) {
|
|
25
|
+
return await conn.with(() => super.get(...args));
|
|
26
|
+
}
|
|
27
|
+
async search(...args) {
|
|
28
|
+
return await conn.with(() => super.search(...args));
|
|
29
|
+
}
|
|
30
|
+
async put(...args) {
|
|
31
|
+
return await conn.with(() => super.put(...args));
|
|
32
|
+
}
|
|
33
|
+
async listNamespaces(...args) {
|
|
34
|
+
return await conn.with(() => super.listNamespaces(...args));
|
|
35
|
+
}
|
|
36
|
+
toJSON() {
|
|
37
|
+
// Prevent serialization of internal state
|
|
38
|
+
return "[InMemoryStore]";
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
export const store = new InMemoryStore();
|
package/dist/stream.mjs
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
import { getGraph } from "./graph/load.mjs";
|
|
2
|
+
import { Client as LangSmithClient } from "langsmith";
|
|
3
|
+
import { Command, Send, } from "@langchain/langgraph";
|
|
4
|
+
import { runnableConfigToCheckpoint, taskRunnableConfigToCheckpoint, } from "./utils/runnableConfig.mjs";
|
|
5
|
+
import { BaseMessageChunk, isBaseMessage } from "@langchain/core/messages";
|
|
6
|
+
import { logger } from "./logging.mjs";
|
|
7
|
+
const getLangGraphCommand = (command) => {
|
|
8
|
+
let goto = command.goto != null && !Array.isArray(command.goto)
|
|
9
|
+
? [command.goto]
|
|
10
|
+
: command.goto;
|
|
11
|
+
return new Command({
|
|
12
|
+
goto: goto?.map((item) => {
|
|
13
|
+
if (typeof item !== "string")
|
|
14
|
+
return new Send(item.node, item.input);
|
|
15
|
+
return item;
|
|
16
|
+
}),
|
|
17
|
+
update: command.update,
|
|
18
|
+
resume: command.resume,
|
|
19
|
+
});
|
|
20
|
+
};
|
|
21
|
+
const isRunnableConfig = (config) => {
|
|
22
|
+
if (typeof config !== "object" || config == null)
|
|
23
|
+
return false;
|
|
24
|
+
return ("configurable" in config &&
|
|
25
|
+
typeof config.configurable === "object" &&
|
|
26
|
+
config.configurable != null);
|
|
27
|
+
};
|
|
28
|
+
function preprocessDebugCheckpointTask(task) {
|
|
29
|
+
if (!isRunnableConfig(task.state) ||
|
|
30
|
+
!taskRunnableConfigToCheckpoint(task.state)) {
|
|
31
|
+
return task;
|
|
32
|
+
}
|
|
33
|
+
const cloneTask = { ...task };
|
|
34
|
+
cloneTask.checkpoint = taskRunnableConfigToCheckpoint(task.state);
|
|
35
|
+
delete cloneTask.state;
|
|
36
|
+
return cloneTask;
|
|
37
|
+
}
|
|
38
|
+
const isConfigurablePresent = (config) => typeof config === "object" &&
|
|
39
|
+
config != null &&
|
|
40
|
+
"configurable" in config &&
|
|
41
|
+
typeof config.configurable === "object" &&
|
|
42
|
+
config.configurable != null;
|
|
43
|
+
const deleteInternalConfigurableFields = (config) => {
|
|
44
|
+
if (isConfigurablePresent(config)) {
|
|
45
|
+
const newConfig = {
|
|
46
|
+
...config,
|
|
47
|
+
configurable: Object.fromEntries(Object.entries(config.configurable).filter(([key]) => !key.startsWith("__"))),
|
|
48
|
+
};
|
|
49
|
+
delete newConfig.callbacks;
|
|
50
|
+
return newConfig;
|
|
51
|
+
}
|
|
52
|
+
return config;
|
|
53
|
+
};
|
|
54
|
+
function preprocessDebugCheckpoint(payload) {
|
|
55
|
+
const result = {
|
|
56
|
+
...payload,
|
|
57
|
+
checkpoint: runnableConfigToCheckpoint(payload["config"]),
|
|
58
|
+
parent_checkpoint: runnableConfigToCheckpoint(payload["parentConfig"]),
|
|
59
|
+
tasks: payload["tasks"].map(preprocessDebugCheckpointTask),
|
|
60
|
+
};
|
|
61
|
+
// Handle LangGraph JS pascalCase vs snake_case
|
|
62
|
+
// TODO: use stream to LangGraph.JS
|
|
63
|
+
result.parent_config = payload["parentConfig"];
|
|
64
|
+
delete result.parentConfig;
|
|
65
|
+
result.config = deleteInternalConfigurableFields(result.config);
|
|
66
|
+
result.parent_config = deleteInternalConfigurableFields(result.parent_config);
|
|
67
|
+
return result;
|
|
68
|
+
}
|
|
69
|
+
export async function* streamState(run, attempt = 1, options) {
|
|
70
|
+
const kwargs = run.kwargs;
|
|
71
|
+
const graphId = kwargs.config?.configurable?.graph_id;
|
|
72
|
+
if (!graphId || typeof graphId !== "string") {
|
|
73
|
+
throw new Error("Invalid or missing graph_id");
|
|
74
|
+
}
|
|
75
|
+
const graph = getGraph(graphId, {
|
|
76
|
+
checkpointer: kwargs.temporary ? null : undefined,
|
|
77
|
+
});
|
|
78
|
+
const userStreamMode = kwargs.stream_mode ?? [];
|
|
79
|
+
const libStreamMode = new Set(userStreamMode.filter((mode) => mode !== "events" && mode !== "messages-tuple") ?? []);
|
|
80
|
+
if (userStreamMode.includes("messages-tuple")) {
|
|
81
|
+
libStreamMode.add("messages");
|
|
82
|
+
}
|
|
83
|
+
if (userStreamMode.includes("messages")) {
|
|
84
|
+
libStreamMode.add("values");
|
|
85
|
+
}
|
|
86
|
+
if (!libStreamMode.has("debug"))
|
|
87
|
+
libStreamMode.add("debug");
|
|
88
|
+
yield {
|
|
89
|
+
event: "metadata",
|
|
90
|
+
data: { run_id: run.run_id, attempt },
|
|
91
|
+
};
|
|
92
|
+
const metadata = {
|
|
93
|
+
...kwargs.config?.metadata,
|
|
94
|
+
run_attempt: attempt,
|
|
95
|
+
// TODO: get langgraph version from NPM / load.hooks.mjs
|
|
96
|
+
langgraph_version: "0.2.35",
|
|
97
|
+
langgraph_plan: "developer",
|
|
98
|
+
langgraph_host: "self-hosted",
|
|
99
|
+
};
|
|
100
|
+
const events = graph.streamEvents(kwargs.command != null
|
|
101
|
+
? getLangGraphCommand(kwargs.command)
|
|
102
|
+
: (kwargs.input ?? null), {
|
|
103
|
+
version: "v2",
|
|
104
|
+
interruptAfter: kwargs.interrupt_after,
|
|
105
|
+
interruptBefore: kwargs.interrupt_before,
|
|
106
|
+
tags: kwargs.config?.tags,
|
|
107
|
+
configurable: kwargs.config?.configurable,
|
|
108
|
+
recursionLimit: kwargs.config?.recursion_limit,
|
|
109
|
+
subgraphs: kwargs.subgraphs,
|
|
110
|
+
metadata,
|
|
111
|
+
runId: run.run_id,
|
|
112
|
+
streamMode: [...libStreamMode],
|
|
113
|
+
signal: options?.signal,
|
|
114
|
+
});
|
|
115
|
+
const messages = {};
|
|
116
|
+
const completedIds = new Set();
|
|
117
|
+
for await (const event of events) {
|
|
118
|
+
if (event.tags?.includes("langsmith:hidden"))
|
|
119
|
+
continue;
|
|
120
|
+
if (event.event === "on_chain_stream" && event.run_id === run.run_id) {
|
|
121
|
+
const [ns, mode, chunk] = (kwargs.subgraphs ? event.data.chunk : [null, ...event.data.chunk]);
|
|
122
|
+
// Listen for debug events and capture checkpoint
|
|
123
|
+
let data = chunk;
|
|
124
|
+
if (mode === "debug") {
|
|
125
|
+
const debugChunk = chunk;
|
|
126
|
+
if (debugChunk.type === "checkpoint") {
|
|
127
|
+
const debugCheckpoint = preprocessDebugCheckpoint(debugChunk.payload);
|
|
128
|
+
options?.onCheckpoint?.(debugCheckpoint);
|
|
129
|
+
data = { ...debugChunk, payload: debugCheckpoint };
|
|
130
|
+
}
|
|
131
|
+
else if (debugChunk.type === "task_result") {
|
|
132
|
+
const debugResult = preprocessDebugCheckpointTask(debugChunk.payload);
|
|
133
|
+
options?.onTaskResult?.(debugResult);
|
|
134
|
+
data = { ...debugChunk, payload: debugResult };
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
if (mode === "messages") {
|
|
138
|
+
if (userStreamMode.includes("messages-tuple")) {
|
|
139
|
+
yield { event: "messages", data };
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
else if (mode === "custom") {
|
|
143
|
+
logger.warn("unhandled custom mode", { mode, chunk });
|
|
144
|
+
}
|
|
145
|
+
else if (userStreamMode.includes(mode)) {
|
|
146
|
+
if (kwargs.subgraphs && ns?.length) {
|
|
147
|
+
yield { event: `${mode}|${ns.join("|")}`, data };
|
|
148
|
+
}
|
|
149
|
+
else {
|
|
150
|
+
yield { event: mode, data };
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
else if (userStreamMode.includes("events")) {
|
|
155
|
+
yield { event: "events", data: event };
|
|
156
|
+
}
|
|
157
|
+
// TODO: we still rely on old messages mode based of streamMode=values
|
|
158
|
+
// In order to fully switch to library messages mode, we need to do ensure that
|
|
159
|
+
// `StreamMessagesHandler` sends the final message, which requires the following:
|
|
160
|
+
// - handleLLMEnd does not send the final message b/c handleLLMNewToken sets the this.emittedChatModelRunIds[runId] flag. Python does not do that
|
|
161
|
+
// - handleLLMEnd receives the final message as BaseMessageChunk rather than BaseMessage, which from the outside will become indistinguishable.
|
|
162
|
+
// - handleLLMEnd should not dedupe the message
|
|
163
|
+
// - Don't think there's an utility that would convert a BaseMessageChunk to a BaseMessage?
|
|
164
|
+
if (userStreamMode.includes("messages")) {
|
|
165
|
+
if (event.event === "on_chain_stream" && event.run_id === run.run_id) {
|
|
166
|
+
const newMessages = [];
|
|
167
|
+
const [_, chunk] = event.data.chunk;
|
|
168
|
+
let chunkMessages = [];
|
|
169
|
+
if (typeof chunk === "object" &&
|
|
170
|
+
chunk != null &&
|
|
171
|
+
"messages" in chunk &&
|
|
172
|
+
!isBaseMessage(chunk)) {
|
|
173
|
+
chunkMessages = chunk?.messages;
|
|
174
|
+
}
|
|
175
|
+
if (!Array.isArray(chunkMessages)) {
|
|
176
|
+
chunkMessages = [chunkMessages];
|
|
177
|
+
}
|
|
178
|
+
for (const message of chunkMessages) {
|
|
179
|
+
if (!message.id || completedIds.has(message.id))
|
|
180
|
+
continue;
|
|
181
|
+
completedIds.add(message.id);
|
|
182
|
+
newMessages.push(message);
|
|
183
|
+
}
|
|
184
|
+
if (newMessages.length > 0) {
|
|
185
|
+
yield { event: "messages/complete", data: newMessages };
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
else if (event.event === "on_chat_model_stream" &&
|
|
189
|
+
!event.tags?.includes("nostream")) {
|
|
190
|
+
const message = event.data.chunk;
|
|
191
|
+
if (!message.id)
|
|
192
|
+
continue;
|
|
193
|
+
if (messages[message.id] == null) {
|
|
194
|
+
messages[message.id] = message;
|
|
195
|
+
yield {
|
|
196
|
+
event: "messages/metadata",
|
|
197
|
+
data: { [message.id]: { metadata: event.metadata } },
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
else {
|
|
201
|
+
messages[message.id] = messages[message.id].concat(message);
|
|
202
|
+
}
|
|
203
|
+
yield { event: "messages/partial", data: [messages[message.id]] };
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
if (kwargs.feedback_keys) {
|
|
208
|
+
const client = new LangSmithClient();
|
|
209
|
+
const data = Object.fromEntries(await Promise.all(kwargs.feedback_keys.map(async (feedback) => {
|
|
210
|
+
const { url } = await client.createPresignedFeedbackToken(run.run_id, feedback);
|
|
211
|
+
return [feedback, url];
|
|
212
|
+
})));
|
|
213
|
+
yield { event: "feedback", data };
|
|
214
|
+
}
|
|
215
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export const combineAbortSignals = (...input) => {
|
|
2
|
+
const signals = input.filter((item) => item != null);
|
|
3
|
+
if ("any" in AbortSignal)
|
|
4
|
+
return AbortSignal.any(signals);
|
|
5
|
+
const abortController = new AbortController();
|
|
6
|
+
signals.forEach((signal) => signal.addEventListener("abort", () => abortController.abort()));
|
|
7
|
+
return abortController.signal;
|
|
8
|
+
};
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { serialiseAsDict } from "./serde.mjs";
|
|
2
|
+
import { stream } from "hono/streaming";
|
|
3
|
+
import { StreamingApi } from "hono/utils/stream";
|
|
4
|
+
export function jsonExtra(c, object) {
|
|
5
|
+
return new Response(serialiseAsDict(object), {
|
|
6
|
+
...c.res,
|
|
7
|
+
headers: { ...c.res.headers, "Content-Type": "application/json" },
|
|
8
|
+
});
|
|
9
|
+
}
|
|
10
|
+
export function waitKeepAlive(c, promise) {
|
|
11
|
+
return stream(c, async (stream) => {
|
|
12
|
+
// keep sending newlines until we resolved the chunk
|
|
13
|
+
let keepAlive = Promise.resolve();
|
|
14
|
+
const timer = setInterval(() => {
|
|
15
|
+
keepAlive = keepAlive.then(() => stream.write("\n"));
|
|
16
|
+
}, 1000);
|
|
17
|
+
const result = await promise;
|
|
18
|
+
clearInterval(timer);
|
|
19
|
+
await keepAlive;
|
|
20
|
+
await stream.write(serialiseAsDict(result));
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
export const getDisconnectAbortSignal = (c, stream) => {
|
|
24
|
+
// https://github.com/honojs/hono/issues/1770
|
|
25
|
+
stream.onAbort(() => { });
|
|
26
|
+
return c.req.raw.signal;
|
|
27
|
+
};
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, ImagePromptTemplate, PipelinePromptTemplate, } from "@langchain/core/prompts";
|
|
2
|
+
import { AIMessage, AIMessageChunk, BaseMessage, BaseMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessage, FunctionMessageChunk, HumanMessage, HumanMessageChunk, SystemMessage, SystemMessageChunk, ToolMessage, ToolMessageChunk, } from "@langchain/core/messages";
|
|
3
|
+
import { StringPromptValue } from "@langchain/core/prompt_values";
|
|
4
|
+
export const prompts__prompt = {
|
|
5
|
+
PromptTemplate,
|
|
6
|
+
};
|
|
7
|
+
export const schema__messages = {
|
|
8
|
+
AIMessage,
|
|
9
|
+
AIMessageChunk,
|
|
10
|
+
BaseMessage,
|
|
11
|
+
BaseMessageChunk,
|
|
12
|
+
ChatMessage,
|
|
13
|
+
ChatMessageChunk,
|
|
14
|
+
FunctionMessage,
|
|
15
|
+
FunctionMessageChunk,
|
|
16
|
+
HumanMessage,
|
|
17
|
+
HumanMessageChunk,
|
|
18
|
+
SystemMessage,
|
|
19
|
+
SystemMessageChunk,
|
|
20
|
+
ToolMessage,
|
|
21
|
+
ToolMessageChunk,
|
|
22
|
+
};
|
|
23
|
+
export const schema = {
|
|
24
|
+
AIMessage,
|
|
25
|
+
AIMessageChunk,
|
|
26
|
+
BaseMessage,
|
|
27
|
+
BaseMessageChunk,
|
|
28
|
+
ChatMessage,
|
|
29
|
+
ChatMessageChunk,
|
|
30
|
+
FunctionMessage,
|
|
31
|
+
FunctionMessageChunk,
|
|
32
|
+
HumanMessage,
|
|
33
|
+
HumanMessageChunk,
|
|
34
|
+
SystemMessage,
|
|
35
|
+
SystemMessageChunk,
|
|
36
|
+
ToolMessage,
|
|
37
|
+
ToolMessageChunk,
|
|
38
|
+
};
|
|
39
|
+
export const prompts__chat = {
|
|
40
|
+
AIMessagePromptTemplate,
|
|
41
|
+
ChatMessagePromptTemplate,
|
|
42
|
+
ChatPromptTemplate,
|
|
43
|
+
HumanMessagePromptTemplate,
|
|
44
|
+
MessagesPlaceholder,
|
|
45
|
+
SystemMessagePromptTemplate,
|
|
46
|
+
};
|
|
47
|
+
export const prompts__image = {
|
|
48
|
+
ImagePromptTemplate,
|
|
49
|
+
};
|
|
50
|
+
export const prompts__pipeline = {
|
|
51
|
+
PipelinePromptTemplate,
|
|
52
|
+
};
|
|
53
|
+
export const prompts__base = {
|
|
54
|
+
StringPromptValue,
|
|
55
|
+
};
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
const ConfigSchema = z.object({
|
|
3
|
+
configurable: z.object({
|
|
4
|
+
thread_id: z.string(),
|
|
5
|
+
checkpoint_id: z.string(),
|
|
6
|
+
checkpoint_ns: z.string().nullish(),
|
|
7
|
+
checkpoint_map: z.record(z.string(), z.unknown()).nullish(),
|
|
8
|
+
}),
|
|
9
|
+
});
|
|
10
|
+
export const runnableConfigToCheckpoint = (config) => {
|
|
11
|
+
if (!config || !config.configurable || !config.configurable.thread_id) {
|
|
12
|
+
return null;
|
|
13
|
+
}
|
|
14
|
+
const parsed = ConfigSchema.safeParse(config);
|
|
15
|
+
if (!parsed.success)
|
|
16
|
+
return null;
|
|
17
|
+
return {
|
|
18
|
+
thread_id: parsed.data.configurable.thread_id,
|
|
19
|
+
checkpoint_id: parsed.data.configurable.checkpoint_id,
|
|
20
|
+
checkpoint_ns: parsed.data.configurable.checkpoint_ns || "",
|
|
21
|
+
checkpoint_map: parsed.data.configurable.checkpoint_map || null,
|
|
22
|
+
};
|
|
23
|
+
};
|
|
24
|
+
const TaskConfigSchema = z.object({
|
|
25
|
+
configurable: z.object({
|
|
26
|
+
thread_id: z.string(),
|
|
27
|
+
checkpoint_id: z.string().nullish(),
|
|
28
|
+
checkpoint_ns: z.string().nullish(),
|
|
29
|
+
checkpoint_map: z.record(z.string(), z.unknown()).nullish(),
|
|
30
|
+
}),
|
|
31
|
+
});
|
|
32
|
+
export const taskRunnableConfigToCheckpoint = (config) => {
|
|
33
|
+
if (!config || !config.configurable || !config.configurable.thread_id) {
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
const parsed = TaskConfigSchema.safeParse(config);
|
|
37
|
+
if (!parsed.success)
|
|
38
|
+
return null;
|
|
39
|
+
return {
|
|
40
|
+
thread_id: parsed.data.configurable.thread_id,
|
|
41
|
+
checkpoint_id: parsed.data.configurable.checkpoint_id || null,
|
|
42
|
+
checkpoint_ns: parsed.data.configurable.checkpoint_ns || "",
|
|
43
|
+
checkpoint_map: parsed.data.configurable.checkpoint_map || null,
|
|
44
|
+
};
|
|
45
|
+
};
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export const serialiseAsDict = (obj) => {
|
|
2
|
+
return JSON.stringify(obj, function (key, value) {
|
|
3
|
+
const rawValue = this[key];
|
|
4
|
+
if (rawValue != null &&
|
|
5
|
+
typeof rawValue === "object" &&
|
|
6
|
+
"toDict" in rawValue &&
|
|
7
|
+
typeof rawValue.toDict === "function") {
|
|
8
|
+
// TODO: we need to upstream this to LangChainJS
|
|
9
|
+
const { type, data } = rawValue.toDict();
|
|
10
|
+
return { ...data, type };
|
|
11
|
+
}
|
|
12
|
+
return value;
|
|
13
|
+
}, 2);
|
|
14
|
+
};
|
|
15
|
+
export const serializeError = (error) => {
|
|
16
|
+
if (error instanceof Error) {
|
|
17
|
+
return { error: error.name, message: error.message };
|
|
18
|
+
}
|
|
19
|
+
return { error: "Error", message: JSON.stringify(error) };
|
|
20
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@langchain/langgraph-api",
|
|
3
|
+
"version": "0.0.10",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"engines": {
|
|
6
|
+
"node": ">=18"
|
|
7
|
+
},
|
|
8
|
+
"license": "MIT",
|
|
9
|
+
"main": "./dist/cli/spawn.mjs",
|
|
10
|
+
"files": [
|
|
11
|
+
"dist/"
|
|
12
|
+
],
|
|
13
|
+
"dependencies": {
|
|
14
|
+
"@babel/code-frame": "^7.26.2",
|
|
15
|
+
"@hono/node-server": "^1.12.0",
|
|
16
|
+
"@hono/zod-validator": "^0.2.2",
|
|
17
|
+
"@types/json-schema": "^7.0.15",
|
|
18
|
+
"@typescript/vfs": "^1.6.0",
|
|
19
|
+
"dedent": "^1.5.3",
|
|
20
|
+
"dotenv": "^16.4.7",
|
|
21
|
+
"exit-hook": "^4.0.0",
|
|
22
|
+
"hono": "^4.5.4",
|
|
23
|
+
"langsmith": "^0.2.15",
|
|
24
|
+
"open": "^10.1.0",
|
|
25
|
+
"stacktrace-parser": "^0.1.10",
|
|
26
|
+
"superjson": "^2.2.2",
|
|
27
|
+
"tsx": "^4.19.2",
|
|
28
|
+
"uuid": "^10.0.0",
|
|
29
|
+
"winston": "^3.17.0",
|
|
30
|
+
"winston-console-format": "^1.0.8",
|
|
31
|
+
"zod": "^3.23.8"
|
|
32
|
+
},
|
|
33
|
+
"peerDependencies": {
|
|
34
|
+
"@langchain/core": "^0.3.36",
|
|
35
|
+
"@langchain/langgraph": "^0.2.43",
|
|
36
|
+
"@langchain/langgraph-checkpoint": "^0.0.14",
|
|
37
|
+
"typescript": "^5.5.4"
|
|
38
|
+
},
|
|
39
|
+
"devDependencies": {
|
|
40
|
+
"@langchain/langgraph-sdk": "^0.0.33",
|
|
41
|
+
"@types/babel__code-frame": "^7.0.6",
|
|
42
|
+
"@types/node": "^22.2.0",
|
|
43
|
+
"@types/uuid": "^10.0.0",
|
|
44
|
+
"postgres": "^3.4.5",
|
|
45
|
+
"prettier": "^3.3.3",
|
|
46
|
+
"vitest": "^3.0.5"
|
|
47
|
+
},
|
|
48
|
+
"scripts": {
|
|
49
|
+
"build": "npx -y bun scripts/build.mjs",
|
|
50
|
+
"dev": "tsx ./tests/utils.server.mts",
|
|
51
|
+
"typecheck": "tsc --noEmit",
|
|
52
|
+
"test": "vitest",
|
|
53
|
+
"test:parser": "vitest run ./tests/parser.test.mts --testTimeout 15000",
|
|
54
|
+
"test:api": "npx -y bun scripts/test.mjs"
|
|
55
|
+
}
|
|
56
|
+
}
|