@langchain/langgraph-api 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,74 @@
1
+ import { serve } from "@hono/node-server";
2
+ import { Hono } from "hono";
3
+ import { cors } from "hono/cors";
4
+ import { registerFromEnv } from "./graph/load.mjs";
5
+ import runs from "./api/runs.mjs";
6
+ import threads from "./api/threads.mjs";
7
+ import assistants from "./api/assistants.mjs";
8
+ import store from "./api/store.mjs";
9
+ import { truncate, conn as opsConn } from "./storage/ops.mjs";
10
+ import { zValidator } from "@hono/zod-validator";
11
+ import { z } from "zod";
12
+ import { queue } from "./queue.mjs";
13
+ import { logger, requestLogger } from "./logging.mjs";
14
+ import { checkpointer } from "./storage/checkpoint.mjs";
15
+ import { store as graphStore } from "./storage/store.mjs";
16
+ const app = new Hono();
17
+ // This is used to match the behavior of the original LangGraph API
18
+ // where the content-type is not being validated. Might be nice
19
+ // to warn about this in the future and throw an error instead.
20
+ app.use(async (c, next) => {
21
+ if (c.req.header("content-type")?.startsWith("text/plain") &&
22
+ c.req.method !== "GET" &&
23
+ c.req.method !== "OPTIONS") {
24
+ c.req.raw.headers.set("content-type", "application/json");
25
+ }
26
+ await next();
27
+ });
28
+ app.use(cors());
29
+ app.use(requestLogger());
30
+ app.route("/", assistants);
31
+ app.route("/", runs);
32
+ app.route("/", threads);
33
+ app.route("/", store);
34
+ app.get("/info", (c) => c.json({ flags: { assistants: true, crons: false } }));
35
+ app.post("/internal/truncate", zValidator("json", z.object({
36
+ runs: z.boolean().optional(),
37
+ threads: z.boolean().optional(),
38
+ assistants: z.boolean().optional(),
39
+ checkpointer: z.boolean().optional(),
40
+ store: z.boolean().optional(),
41
+ })), (c) => {
42
+ const { runs, threads, assistants, checkpointer, store } = c.req.valid("json");
43
+ truncate({ runs, threads, assistants, checkpointer, store });
44
+ return c.json({ ok: true });
45
+ });
46
+ export const StartServerSchema = z.object({
47
+ port: z.number(),
48
+ nWorkers: z.number(),
49
+ host: z.string(),
50
+ cwd: z.string(),
51
+ graphs: z.record(z.string()),
52
+ });
53
+ export async function startServer(options) {
54
+ logger.info(`Initializing storage...`);
55
+ const callbacks = await Promise.all([
56
+ opsConn.initialize(options.cwd),
57
+ checkpointer.initialize(options.cwd),
58
+ graphStore.initialize(options.cwd),
59
+ ]);
60
+ const cleanup = async () => {
61
+ logger.info(`Flushing to persistent storage, exiting...`);
62
+ await Promise.all(callbacks.map((c) => c.flush()));
63
+ };
64
+ logger.info(`Registering graphs from ${options.cwd}`);
65
+ await registerFromEnv(options.graphs, { cwd: options.cwd });
66
+ logger.info(`Starting ${options.nWorkers} workers`);
67
+ for (let i = 0; i < options.nWorkers; i++)
68
+ queue();
69
+ return new Promise((resolve) => {
70
+ serve({ fetch: app.fetch, port: options.port, hostname: options.host }, (c) => {
71
+ resolve({ host: `${c.address}:${c.port}`, cleanup });
72
+ });
73
+ });
74
+ }
package/dist/state.mjs ADDED
@@ -0,0 +1,32 @@
1
+ import { runnableConfigToCheckpoint } from "./utils/runnableConfig.mjs";
2
+ import { serializeError } from "./utils/serde.mjs";
3
+ const isStateSnapshot = (state) => {
4
+ return "values" in state && "next" in state;
5
+ };
6
+ export const stateSnapshotToThreadState = (state) => {
7
+ return {
8
+ values: state.values,
9
+ next: state.next,
10
+ tasks: state.tasks.map((task) => ({
11
+ id: task.id,
12
+ name: task.name,
13
+ error: task.error != null ? serializeError(task.error).message : null,
14
+ interrupts: task.interrupts,
15
+ path: task.path,
16
+ // TODO: too many type assertions, check if this is actually correct
17
+ checkpoint: task.state != null && "configurable" in task.state
18
+ ? (task.state.configurable ?? null)
19
+ : null,
20
+ state: task.state != null && isStateSnapshot(task.state)
21
+ ? stateSnapshotToThreadState(task.state)
22
+ : null,
23
+ // TODO: add missing result to the library
24
+ // @ts-expect-error
25
+ result: task.result ?? null,
26
+ })),
27
+ metadata: state.metadata,
28
+ created_at: state.createdAt ? new Date(state.createdAt) : null,
29
+ checkpoint: runnableConfigToCheckpoint(state.config),
30
+ parent_checkpoint: runnableConfigToCheckpoint(state.parentConfig),
31
+ };
32
+ };
@@ -0,0 +1,127 @@
1
+ import { MemorySaver, } from "@langchain/langgraph";
2
+ import { FileSystemPersistence } from "./persist.mjs";
3
+ const EXCLUDED_KEYS = ["checkpoint_ns", "checkpoint_id", "run_id", "thread_id"];
4
+ const textDecoder = new TextDecoder();
5
+ const textEncoder = new TextEncoder();
6
+ const WriteKey = {
7
+ serialize: (key) => {
8
+ return JSON.stringify(key);
9
+ },
10
+ deserialize: (key) => {
11
+ const [threadId, checkpointNamespace, checkpointId] = JSON.parse(key);
12
+ return [threadId, checkpointNamespace, checkpointId];
13
+ },
14
+ };
15
+ const conn = new FileSystemPersistence(".langgraphjs_api.checkpointer.json", () => ({
16
+ storage: {},
17
+ writes: {},
18
+ }));
19
+ class InMemorySaver extends MemorySaver {
20
+ async initialize(cwd) {
21
+ await conn.initialize(cwd);
22
+ await conn.with(({ storage, writes }) => {
23
+ this.storage = storage;
24
+ this.writes = writes;
25
+ });
26
+ return conn;
27
+ }
28
+ clear() {
29
+ // { [threadId: string]: { [checkpointNs: string]: { [checkpointId]: [checkpoint, metadata, parentId] } }}
30
+ this.storage = {};
31
+ // { [WriteKey]: CheckpointPendingWrite[] }
32
+ this.writes = {};
33
+ }
34
+ // Patch every method that has access to this.storage or this.writes
35
+ // to also persist the changes to the filesystem in a non-blocking manner
36
+ async getTuple(...args) {
37
+ return await conn.with(() => super.getTuple(...args));
38
+ }
39
+ async *list(...args) {
40
+ yield* conn.withGenerator(super.list(...args));
41
+ }
42
+ async putWrites(...args) {
43
+ return await conn.with(() => super.putWrites(...args));
44
+ }
45
+ async put(config, checkpoint, metadata) {
46
+ return await conn.with(() => super.put(config, checkpoint, {
47
+ ...Object.fromEntries(Object.entries(config.configurable ?? {}).filter(([key]) => !key.startsWith("__") && !EXCLUDED_KEYS.includes(key))),
48
+ ...config.metadata,
49
+ ...metadata,
50
+ }));
51
+ }
52
+ async delete(threadId, runId) {
53
+ if (this.storage[threadId] == null)
54
+ return;
55
+ return await conn.with(() => {
56
+ if (runId != null) {
57
+ const writeKeysToDelete = [];
58
+ for (const ns of Object.keys(this.storage[threadId])) {
59
+ for (const id of Object.keys(this.storage[threadId][ns])) {
60
+ const [_checkpoint, metadata, _parentId] = this.storage[threadId][ns][id];
61
+ const jsonMetadata = JSON.parse(textDecoder.decode(metadata));
62
+ if (jsonMetadata.run_id === runId) {
63
+ delete this.storage[threadId][ns][id];
64
+ writeKeysToDelete.push(WriteKey.serialize([threadId, ns, id]));
65
+ if (Object.keys(this.storage[threadId][ns]).length === 0) {
66
+ delete this.storage[threadId][ns];
67
+ }
68
+ }
69
+ }
70
+ }
71
+ for (const key of writeKeysToDelete) {
72
+ delete this.writes[key];
73
+ }
74
+ }
75
+ else {
76
+ delete this.storage[threadId];
77
+ // delete all writes for this thread
78
+ const writeKeys = Object.keys(this.writes);
79
+ for (const key of writeKeys) {
80
+ const [writeThreadId] = WriteKey.deserialize(key);
81
+ if (writeThreadId === threadId)
82
+ delete this.writes[key];
83
+ }
84
+ }
85
+ });
86
+ }
87
+ async copy(threadId, newThreadId) {
88
+ return await conn.with(() => {
89
+ // copy storage over
90
+ const newThreadCheckpoints = {};
91
+ for (const oldNs of Object.keys(this.storage[threadId] ?? {})) {
92
+ const newNs = oldNs.replace(threadId, newThreadId);
93
+ for (const oldId of Object.keys(this.storage[threadId][oldNs])) {
94
+ const newId = oldId.replace(threadId, newThreadId);
95
+ const [checkpoint, metadata, oldParentId] = this.storage[threadId][oldNs][oldId];
96
+ const newParentId = oldParentId?.replace(threadId, newThreadId);
97
+ const rawMetadata = textDecoder
98
+ .decode(metadata)
99
+ .replaceAll(threadId, newThreadId);
100
+ newThreadCheckpoints[newNs] ??= {};
101
+ newThreadCheckpoints[newNs][newId] = [
102
+ checkpoint,
103
+ textEncoder.encode(rawMetadata),
104
+ newParentId,
105
+ ];
106
+ }
107
+ }
108
+ this.storage[newThreadId] = newThreadCheckpoints;
109
+ // copy writes over (if any)
110
+ const outerKeys = [];
111
+ for (const keyJson of Object.keys(this.writes)) {
112
+ const key = WriteKey.deserialize(keyJson);
113
+ if (key[0] === threadId)
114
+ outerKeys.push(keyJson);
115
+ }
116
+ for (const keyJson of outerKeys) {
117
+ const [_threadId, checkpointNamespace, checkpointId] = WriteKey.deserialize(keyJson);
118
+ this.writes[WriteKey.serialize([newThreadId, checkpointNamespace, checkpointId])] = structuredClone(this.writes[keyJson]);
119
+ }
120
+ });
121
+ }
122
+ toJSON() {
123
+ // Prevent serialization of internal state
124
+ return "[InMemorySaver]";
125
+ }
126
+ }
127
+ export const checkpointer = new InMemorySaver();
@@ -0,0 +1,55 @@
1
+ import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, ImagePromptTemplate, PipelinePromptTemplate, } from "@langchain/core/prompts";
2
+ import { AIMessage, AIMessageChunk, BaseMessage, BaseMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessage, FunctionMessageChunk, HumanMessage, HumanMessageChunk, SystemMessage, SystemMessageChunk, ToolMessage, ToolMessageChunk, } from "@langchain/core/messages";
3
+ import { StringPromptValue } from "@langchain/core/prompt_values";
4
+ export const prompts__prompt = {
5
+ PromptTemplate,
6
+ };
7
+ export const schema__messages = {
8
+ AIMessage,
9
+ AIMessageChunk,
10
+ BaseMessage,
11
+ BaseMessageChunk,
12
+ ChatMessage,
13
+ ChatMessageChunk,
14
+ FunctionMessage,
15
+ FunctionMessageChunk,
16
+ HumanMessage,
17
+ HumanMessageChunk,
18
+ SystemMessage,
19
+ SystemMessageChunk,
20
+ ToolMessage,
21
+ ToolMessageChunk,
22
+ };
23
+ export const schema = {
24
+ AIMessage,
25
+ AIMessageChunk,
26
+ BaseMessage,
27
+ BaseMessageChunk,
28
+ ChatMessage,
29
+ ChatMessageChunk,
30
+ FunctionMessage,
31
+ FunctionMessageChunk,
32
+ HumanMessage,
33
+ HumanMessageChunk,
34
+ SystemMessage,
35
+ SystemMessageChunk,
36
+ ToolMessage,
37
+ ToolMessageChunk,
38
+ };
39
+ export const prompts__chat = {
40
+ AIMessagePromptTemplate,
41
+ ChatMessagePromptTemplate,
42
+ ChatPromptTemplate,
43
+ HumanMessagePromptTemplate,
44
+ MessagesPlaceholder,
45
+ SystemMessagePromptTemplate,
46
+ };
47
+ export const prompts__image = {
48
+ ImagePromptTemplate,
49
+ };
50
+ export const prompts__pipeline = {
51
+ PipelinePromptTemplate,
52
+ };
53
+ export const prompts__base = {
54
+ StringPromptValue,
55
+ };