@langchain/langgraph-cli 0.0.0-preview.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -0
- package/dist/api/assistants.mjs +144 -0
- package/dist/api/runs.mjs +239 -0
- package/dist/api/store.mjs +83 -0
- package/dist/api/threads.mjs +145 -0
- package/dist/cli/build.mjs +44 -0
- package/dist/cli/cli.mjs +7 -0
- package/dist/cli/dev.entrypoint.mjs +35 -0
- package/dist/cli/dev.mjs +133 -0
- package/dist/cli/dockerfile.mjs +35 -0
- package/dist/cli/utils/builder.mjs +16 -0
- package/dist/cli/utils/ipc/client.mjs +25 -0
- package/dist/cli/utils/ipc/server.mjs +71 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +7 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +18 -0
- package/dist/cli/utils/project.mjs +18 -0
- package/dist/docker/compose.mjs +185 -0
- package/dist/docker/dockerfile.mjs +390 -0
- package/dist/docker/shell.mjs +62 -0
- package/dist/graph/load.hooks.mjs +17 -0
- package/dist/graph/load.mjs +71 -0
- package/dist/graph/load.utils.mjs +50 -0
- package/dist/graph/parser/parser.mjs +308 -0
- package/dist/graph/parser/parser.worker.mjs +7 -0
- package/dist/graph/parser/schema/types.mjs +1607 -0
- package/dist/graph/parser/schema/types.template.mts +81 -0
- package/dist/logging.mjs +50 -0
- package/dist/preload.mjs +3 -0
- package/dist/queue.mjs +91 -0
- package/dist/schemas.mjs +399 -0
- package/dist/server.mjs +63 -0
- package/dist/state.mjs +32 -0
- package/dist/storage/checkpoint.mjs +123 -0
- package/dist/storage/ops.mjs +786 -0
- package/dist/storage/persist.mjs +69 -0
- package/dist/storage/store.mjs +37 -0
- package/dist/stream.mjs +215 -0
- package/dist/utils/abort.mjs +8 -0
- package/dist/utils/config.mjs +35 -0
- package/dist/utils/error.mjs +1 -0
- package/dist/utils/hono.mjs +27 -0
- package/dist/utils/importMap.mjs +55 -0
- package/dist/utils/runnableConfig.mjs +45 -0
- package/dist/utils/serde.mjs +20 -0
- package/package.json +62 -0
package/README.md
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# LangGraph.js API
|
|
2
|
+
|
|
3
|
+
This package implements the LangGraph API for rapid development and testing. Build and iterate on LangGraph.js agents with a tight feedback loop. The server is backed by a predominently in-memory data store that is persisted to local disk.
|
|
4
|
+
|
|
5
|
+
For production use, see the various [deployment options](https://langchain-ai.github.io/langgraph/concepts/deployment_options/) for the LangGraph API, which are backed by a production-grade database.
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
Install the `@langchain/langgraph-api` package via your package manager of choice.
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
npm install @langchain/langgraph-api
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
## Usage
|
|
16
|
+
|
|
17
|
+
Start the development server:
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
npm run langgraph dev
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
Your agent's state (threads, runs, assistants, store) persists in memory while the server is running - perfect for development and testing. Each run's state is tracked and can be inspected, making it easy to debug and improve your agent's behavior.
|
|
24
|
+
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { zValidator } from "@hono/zod-validator";
|
|
3
|
+
import { v4 as uuid } from "uuid";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
import { getAssistantId, getGraph, getGraphSchema } from "../graph/load.mjs";
|
|
6
|
+
import { Assistants } from "../storage/ops.mjs";
|
|
7
|
+
import * as schemas from "../schemas.mjs";
|
|
8
|
+
import { HTTPException } from "hono/http-exception";
|
|
9
|
+
const api = new Hono();
|
|
10
|
+
api.post("/assistants", zValidator("json", schemas.AssistantCreate), async (c) => {
|
|
11
|
+
// Create Assistant
|
|
12
|
+
const payload = c.req.valid("json");
|
|
13
|
+
const assistant = await Assistants.put(payload.assistant_id ?? uuid(), {
|
|
14
|
+
config: payload.config ?? {},
|
|
15
|
+
graph_id: payload.graph_id,
|
|
16
|
+
metadata: payload.metadata ?? {},
|
|
17
|
+
if_exists: payload.if_exists ?? "raise",
|
|
18
|
+
name: payload.name ?? "Untitled",
|
|
19
|
+
});
|
|
20
|
+
return c.json(assistant);
|
|
21
|
+
});
|
|
22
|
+
api.post("/assistants/search", zValidator("json", schemas.AssistantSearchRequest), async (c) => {
|
|
23
|
+
// Search Assistants
|
|
24
|
+
const payload = c.req.valid("json");
|
|
25
|
+
const result = [];
|
|
26
|
+
for await (const item of Assistants.search({
|
|
27
|
+
graph_id: payload.graph_id,
|
|
28
|
+
metadata: payload.metadata,
|
|
29
|
+
limit: payload.limit ?? 10,
|
|
30
|
+
offset: payload.offset ?? 0,
|
|
31
|
+
})) {
|
|
32
|
+
result.push(item);
|
|
33
|
+
}
|
|
34
|
+
return c.json(result);
|
|
35
|
+
});
|
|
36
|
+
api.get("/assistants/:assistant_id", async (c) => {
|
|
37
|
+
// Get Assistant
|
|
38
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
39
|
+
return c.json(await Assistants.get(assistantId));
|
|
40
|
+
});
|
|
41
|
+
api.delete("/assistants/:assistant_id", async (c) => {
|
|
42
|
+
// Delete Assistant
|
|
43
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
44
|
+
return c.json(await Assistants.delete(assistantId));
|
|
45
|
+
});
|
|
46
|
+
api.patch("/assistants/:assistant_id", zValidator("json", schemas.AssistantPatch), async (c) => {
|
|
47
|
+
// Patch Assistant
|
|
48
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
49
|
+
const payload = c.req.valid("json");
|
|
50
|
+
return c.json(await Assistants.patch(assistantId, payload));
|
|
51
|
+
});
|
|
52
|
+
const RunnableConfigSchema = z.object({
|
|
53
|
+
tags: z.array(z.string()).optional(),
|
|
54
|
+
metadata: z.record(z.unknown()).optional(),
|
|
55
|
+
run_name: z.string().optional(),
|
|
56
|
+
max_concurrency: z.number().optional(),
|
|
57
|
+
recursion_limit: z.number().optional(),
|
|
58
|
+
configurable: z.record(z.unknown()).optional(),
|
|
59
|
+
run_id: z.string().uuid().optional(),
|
|
60
|
+
});
|
|
61
|
+
const getRunnableConfig = (userConfig) => {
|
|
62
|
+
if (!userConfig)
|
|
63
|
+
return {};
|
|
64
|
+
return {
|
|
65
|
+
configurable: userConfig.configurable,
|
|
66
|
+
tags: userConfig.tags,
|
|
67
|
+
metadata: userConfig.metadata,
|
|
68
|
+
runName: userConfig.run_name,
|
|
69
|
+
maxConcurrency: userConfig.max_concurrency,
|
|
70
|
+
recursionLimit: userConfig.recursion_limit,
|
|
71
|
+
runId: userConfig.run_id,
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
api.get("/assistants/:assistant_id/graph", zValidator("query", z.object({ xray: schemas.coercedBoolean.optional() })), async (c) => {
|
|
75
|
+
// Get Assistant Graph
|
|
76
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
77
|
+
const assistant = await Assistants.get(assistantId);
|
|
78
|
+
const { xray } = c.req.valid("query");
|
|
79
|
+
const graph = getGraph(assistant.graph_id);
|
|
80
|
+
return c.json(graph.getGraph({ ...getRunnableConfig(assistant.config), xray }).toJSON());
|
|
81
|
+
});
|
|
82
|
+
api.get("/assistants/:assistant_id/schemas", async (c) => {
|
|
83
|
+
// Get Assistant Schemas
|
|
84
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
85
|
+
const assistant = await Assistants.get(assistantId);
|
|
86
|
+
const graphSchema = await getGraphSchema(assistant.graph_id);
|
|
87
|
+
const rootGraphId = Object.keys(graphSchema).find((i) => !i.includes("|"));
|
|
88
|
+
if (!rootGraphId)
|
|
89
|
+
throw new Error("Failed to find root graph");
|
|
90
|
+
const rootGraphSchema = graphSchema[rootGraphId];
|
|
91
|
+
return c.json({
|
|
92
|
+
graph_id: assistant.graph_id,
|
|
93
|
+
input_schema: rootGraphSchema.input,
|
|
94
|
+
output_schema: rootGraphSchema.output,
|
|
95
|
+
state_schema: rootGraphSchema.state,
|
|
96
|
+
config_schema: rootGraphSchema.config,
|
|
97
|
+
});
|
|
98
|
+
});
|
|
99
|
+
api.get("/assistants/:assistant_id/subgraphs/:namespace?", zValidator("param", z.object({ assistant_id: z.string(), namespace: z.string().optional() })), zValidator("query", z.object({ recurse: schemas.coercedBoolean.optional() })), async (c) => {
|
|
100
|
+
// Get Assistant Subgraphs
|
|
101
|
+
const { assistant_id, namespace } = c.req.valid("param");
|
|
102
|
+
const { recurse } = c.req.valid("query");
|
|
103
|
+
const assistantId = getAssistantId(assistant_id);
|
|
104
|
+
const assistant = await Assistants.get(assistantId);
|
|
105
|
+
const graph = getGraph(assistant.graph_id);
|
|
106
|
+
const graphSchema = await getGraphSchema(assistant.graph_id);
|
|
107
|
+
const rootGraphId = Object.keys(graphSchema).find((i) => !i.includes("|"));
|
|
108
|
+
if (!rootGraphId) {
|
|
109
|
+
throw new HTTPException(404, { message: "Failed to find root graph" });
|
|
110
|
+
}
|
|
111
|
+
const result = [];
|
|
112
|
+
const subgraphsGenerator = "getSubgraphsAsync" in graph
|
|
113
|
+
? graph.getSubgraphsAsync.bind(graph)
|
|
114
|
+
: // @ts-expect-error older versions of langgraph don't have getSubgraphsAsync
|
|
115
|
+
graph.getSubgraphs.bind(graph);
|
|
116
|
+
for await (const [ns] of subgraphsGenerator(namespace, recurse)) {
|
|
117
|
+
result.push([
|
|
118
|
+
ns,
|
|
119
|
+
graphSchema[`${rootGraphId}|${ns}`] || graphSchema[rootGraphId],
|
|
120
|
+
]);
|
|
121
|
+
}
|
|
122
|
+
return c.json(Object.fromEntries(result));
|
|
123
|
+
});
|
|
124
|
+
api.post("/assistants/:assistant_id/latest", zValidator("json", schemas.AssistantLatestVersion), async (c) => {
|
|
125
|
+
// Set Latest Assistant Version
|
|
126
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
127
|
+
const { version } = c.req.valid("json");
|
|
128
|
+
return c.json(await Assistants.setLatest(assistantId, version));
|
|
129
|
+
});
|
|
130
|
+
api.post("/assistants/:assistant_id/versions", zValidator("json", z.object({
|
|
131
|
+
limit: z.number().min(1).max(1000).optional().default(10),
|
|
132
|
+
offset: z.number().min(0).optional().default(0),
|
|
133
|
+
metadata: z.record(z.unknown()).optional(),
|
|
134
|
+
})), async (c) => {
|
|
135
|
+
// Get Assistant Versions
|
|
136
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
137
|
+
const { limit, offset, metadata } = c.req.valid("json");
|
|
138
|
+
return c.json(await Assistants.getVersions(assistantId, {
|
|
139
|
+
limit,
|
|
140
|
+
offset,
|
|
141
|
+
metadata,
|
|
142
|
+
}));
|
|
143
|
+
});
|
|
144
|
+
export default api;
|
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { HTTPException } from "hono/http-exception";
|
|
3
|
+
import { streamSSE } from "hono/streaming";
|
|
4
|
+
import { getAssistantId } from "../graph/load.mjs";
|
|
5
|
+
import { zValidator } from "@hono/zod-validator";
|
|
6
|
+
import * as schemas from "../schemas.mjs";
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
import { Runs, Threads } from "../storage/ops.mjs";
|
|
9
|
+
import { serialiseAsDict } from "../utils/serde.mjs";
|
|
10
|
+
import { getDisconnectAbortSignal, jsonExtra, waitKeepAlive, } from "../utils/hono.mjs";
|
|
11
|
+
import { logger } from "../logging.mjs";
|
|
12
|
+
import { v4 as uuid4 } from "uuid";
|
|
13
|
+
const api = new Hono();
|
|
14
|
+
const createValidRun = async (threadId, payload) => {
|
|
15
|
+
const { assistant_id: assistantId, ...run } = payload;
|
|
16
|
+
const runId = uuid4();
|
|
17
|
+
const streamMode = Array.isArray(payload.stream_mode)
|
|
18
|
+
? payload.stream_mode
|
|
19
|
+
: payload.stream_mode != null
|
|
20
|
+
? [payload.stream_mode]
|
|
21
|
+
: [];
|
|
22
|
+
if (streamMode.length === 0)
|
|
23
|
+
streamMode.push("values");
|
|
24
|
+
const multitaskStrategy = payload.multitask_strategy ?? "reject";
|
|
25
|
+
const preventInsertInInflight = multitaskStrategy === "reject";
|
|
26
|
+
const config = { ...run.config };
|
|
27
|
+
if (run.checkpoint_id) {
|
|
28
|
+
config.configurable ??= {};
|
|
29
|
+
config.configurable.checkpoint_id = run.checkpoint_id;
|
|
30
|
+
}
|
|
31
|
+
if (run.checkpoint) {
|
|
32
|
+
config.configurable ??= {};
|
|
33
|
+
Object.assign(config.configurable, run.checkpoint);
|
|
34
|
+
}
|
|
35
|
+
let feedbackKeys = run.feedback_keys != null
|
|
36
|
+
? Array.isArray(run.feedback_keys)
|
|
37
|
+
? run.feedback_keys
|
|
38
|
+
: [run.feedback_keys]
|
|
39
|
+
: undefined;
|
|
40
|
+
if (!feedbackKeys?.length)
|
|
41
|
+
feedbackKeys = undefined;
|
|
42
|
+
const [first, ...inflight] = await Runs.put(runId, getAssistantId(assistantId), {
|
|
43
|
+
input: run.input,
|
|
44
|
+
command: run.command,
|
|
45
|
+
config,
|
|
46
|
+
stream_mode: streamMode,
|
|
47
|
+
interrupt_before: run.interrupt_before,
|
|
48
|
+
interrupt_after: run.interrupt_after,
|
|
49
|
+
webhook: run.webhook,
|
|
50
|
+
feedback_keys: feedbackKeys,
|
|
51
|
+
temporary: threadId == null && (run.on_completion ?? "delete") === "delete",
|
|
52
|
+
subgraphs: run.stream_subgraphs ?? false,
|
|
53
|
+
}, {
|
|
54
|
+
threadId,
|
|
55
|
+
metadata: run.metadata,
|
|
56
|
+
status: "pending",
|
|
57
|
+
multitaskStrategy,
|
|
58
|
+
preventInsertInInflight,
|
|
59
|
+
afterSeconds: payload.after_seconds,
|
|
60
|
+
ifNotExists: payload.if_not_exists,
|
|
61
|
+
});
|
|
62
|
+
if (first?.run_id === runId) {
|
|
63
|
+
logger.info("Created run", { run_id: runId, thread_id: threadId });
|
|
64
|
+
if ((multitaskStrategy === "interrupt" || multitaskStrategy === "rollback") &&
|
|
65
|
+
inflight.length > 0) {
|
|
66
|
+
try {
|
|
67
|
+
await Runs.cancel(threadId, inflight.map((run) => run.run_id), { action: multitaskStrategy });
|
|
68
|
+
}
|
|
69
|
+
catch (error) {
|
|
70
|
+
logger.warn("Failed to cancel inflight runs, might be already cancelled", {
|
|
71
|
+
error,
|
|
72
|
+
run_ids: inflight.map((run) => run.run_id),
|
|
73
|
+
thread_id: threadId,
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
return first;
|
|
78
|
+
}
|
|
79
|
+
else if (multitaskStrategy === "reject") {
|
|
80
|
+
throw new HTTPException(422, {
|
|
81
|
+
message: "Thread is already running a task. Wait for it to finish or choose a different multitask strategy.",
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
throw new HTTPException(500, {
|
|
85
|
+
message: "Unreachable state when creating run",
|
|
86
|
+
});
|
|
87
|
+
};
|
|
88
|
+
api.post("/runs/crons", zValidator("json", schemas.CronCreate), async () => {
|
|
89
|
+
// Create Thread Cron
|
|
90
|
+
throw new HTTPException(500, { message: "Not implemented" });
|
|
91
|
+
});
|
|
92
|
+
api.post("/runs/crons/search", zValidator("json", schemas.CronSearch), async () => {
|
|
93
|
+
// Search Crons
|
|
94
|
+
throw new HTTPException(500, { message: "Not implemented" });
|
|
95
|
+
});
|
|
96
|
+
api.delete("/runs/crons/:cron_id", zValidator("param", z.object({ cron_id: z.string().uuid() })), async () => {
|
|
97
|
+
// Delete Cron
|
|
98
|
+
throw new HTTPException(500, { message: "Not implemented" });
|
|
99
|
+
});
|
|
100
|
+
api.post("/threads/:thread_id/runs/crons", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", schemas.CronCreate), async () => {
|
|
101
|
+
// Create Thread Cron
|
|
102
|
+
throw new HTTPException(500, { message: "Not implemented" });
|
|
103
|
+
});
|
|
104
|
+
api.post("/runs/stream", zValidator("json", schemas.RunCreate), async (c) => {
|
|
105
|
+
// Stream Run
|
|
106
|
+
const payload = c.req.valid("json");
|
|
107
|
+
const run = await createValidRun(undefined, payload);
|
|
108
|
+
return streamSSE(c, async (stream) => {
|
|
109
|
+
const cancelOnDisconnect = payload.on_disconnect === "cancel"
|
|
110
|
+
? getDisconnectAbortSignal(c, stream)
|
|
111
|
+
: undefined;
|
|
112
|
+
try {
|
|
113
|
+
for await (const { event, data } of Runs.Stream.join(run.run_id, undefined, { cancelOnDisconnect })) {
|
|
114
|
+
await stream.writeSSE({ data: serialiseAsDict(data), event });
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
catch (error) {
|
|
118
|
+
logger.error("Error streaming run", { error });
|
|
119
|
+
}
|
|
120
|
+
});
|
|
121
|
+
});
|
|
122
|
+
api.post("/runs/wait", zValidator("json", schemas.RunCreate), async (c) => {
|
|
123
|
+
// Wait Run
|
|
124
|
+
const payload = c.req.valid("json");
|
|
125
|
+
const run = await createValidRun(undefined, payload);
|
|
126
|
+
return waitKeepAlive(c, Runs.wait(run.run_id, undefined));
|
|
127
|
+
});
|
|
128
|
+
api.post("/runs", zValidator("json", schemas.RunCreate), async (c) => {
|
|
129
|
+
// Create Stateless Run
|
|
130
|
+
const payload = c.req.valid("json");
|
|
131
|
+
const run = await createValidRun(undefined, payload);
|
|
132
|
+
return jsonExtra(c, run);
|
|
133
|
+
});
|
|
134
|
+
api.post("/runs/batch", zValidator("json", schemas.RunBatchCreate), async (c) => {
|
|
135
|
+
// Batch Runs
|
|
136
|
+
const payload = c.req.valid("json");
|
|
137
|
+
const runs = await Promise.all(payload.map((run) => createValidRun(undefined, run)));
|
|
138
|
+
return jsonExtra(c, runs);
|
|
139
|
+
});
|
|
140
|
+
api.get("/threads/:thread_id/runs", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("query", z.object({
|
|
141
|
+
limit: z.coerce.number().nullish(),
|
|
142
|
+
offset: z.coerce.number().nullish(),
|
|
143
|
+
status: z.string().nullish(),
|
|
144
|
+
metadata: z.record(z.string(), z.unknown()).nullish(),
|
|
145
|
+
})), async (c) => {
|
|
146
|
+
// List runs
|
|
147
|
+
const { thread_id } = c.req.valid("param");
|
|
148
|
+
const { limit, offset, status, metadata } = c.req.valid("query");
|
|
149
|
+
const [runs] = await Promise.all([
|
|
150
|
+
Runs.search(thread_id, {
|
|
151
|
+
limit,
|
|
152
|
+
offset,
|
|
153
|
+
status,
|
|
154
|
+
metadata,
|
|
155
|
+
}),
|
|
156
|
+
Threads.get(thread_id),
|
|
157
|
+
]);
|
|
158
|
+
return jsonExtra(c, runs);
|
|
159
|
+
});
|
|
160
|
+
api.post("/threads/:thread_id/runs", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", schemas.RunCreate), async (c) => {
|
|
161
|
+
// Create Run
|
|
162
|
+
const { thread_id } = c.req.valid("param");
|
|
163
|
+
const payload = c.req.valid("json");
|
|
164
|
+
const run = await createValidRun(thread_id, payload);
|
|
165
|
+
return jsonExtra(c, run);
|
|
166
|
+
});
|
|
167
|
+
api.post("/threads/:thread_id/runs/stream", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", schemas.RunCreate), async (c) => {
|
|
168
|
+
// Stream Run
|
|
169
|
+
const { thread_id } = c.req.valid("param");
|
|
170
|
+
const payload = c.req.valid("json");
|
|
171
|
+
const run = await createValidRun(thread_id, payload);
|
|
172
|
+
return streamSSE(c, async (stream) => {
|
|
173
|
+
const cancelOnDisconnect = payload.on_disconnect === "cancel"
|
|
174
|
+
? getDisconnectAbortSignal(c, stream)
|
|
175
|
+
: undefined;
|
|
176
|
+
try {
|
|
177
|
+
for await (const { event, data } of Runs.Stream.join(run.run_id, thread_id, { cancelOnDisconnect })) {
|
|
178
|
+
await stream.writeSSE({ data: serialiseAsDict(data), event });
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
catch (error) {
|
|
182
|
+
logger.error("Error streaming run", { error });
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
});
|
|
186
|
+
api.post("/threads/:thread_id/runs/wait", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", schemas.RunCreate), async (c) => {
|
|
187
|
+
// Wait Run
|
|
188
|
+
const { thread_id } = c.req.valid("param");
|
|
189
|
+
const payload = c.req.valid("json");
|
|
190
|
+
const run = await createValidRun(thread_id, payload);
|
|
191
|
+
return waitKeepAlive(c, Runs.join(run.run_id, thread_id));
|
|
192
|
+
});
|
|
193
|
+
api.get("/threads/:thread_id/runs/:run_id", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), async (c) => {
|
|
194
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
195
|
+
const [run] = await Promise.all([
|
|
196
|
+
Runs.get(run_id, thread_id),
|
|
197
|
+
Threads.get(thread_id),
|
|
198
|
+
]);
|
|
199
|
+
return jsonExtra(c, run);
|
|
200
|
+
});
|
|
201
|
+
api.delete("/threads/:thread_id/runs/:run_id", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), async (c) => {
|
|
202
|
+
// Delete Run
|
|
203
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
204
|
+
await Runs.delete(run_id, thread_id);
|
|
205
|
+
return c.body(null, 204);
|
|
206
|
+
});
|
|
207
|
+
api.get("/threads/:thread_id/runs/:run_id/join", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), async (c) => {
|
|
208
|
+
// Join Run Http
|
|
209
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
210
|
+
return jsonExtra(c, await Runs.join(run_id, thread_id));
|
|
211
|
+
});
|
|
212
|
+
api.get("/threads/:thread_id/runs/:run_id/stream", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), zValidator("query", z.object({ cancel_on_disconnect: schemas.coercedBoolean.optional() })), async (c) => {
|
|
213
|
+
// Stream Run Http
|
|
214
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
215
|
+
const { cancel_on_disconnect } = c.req.valid("query");
|
|
216
|
+
return streamSSE(c, async (stream) => {
|
|
217
|
+
const signal = cancel_on_disconnect
|
|
218
|
+
? getDisconnectAbortSignal(c, stream)
|
|
219
|
+
: undefined;
|
|
220
|
+
for await (const { event, data } of Runs.Stream.join(run_id, thread_id, {
|
|
221
|
+
cancelOnDisconnect: signal,
|
|
222
|
+
})) {
|
|
223
|
+
await stream.writeSSE({ data: serialiseAsDict(data), event });
|
|
224
|
+
}
|
|
225
|
+
});
|
|
226
|
+
});
|
|
227
|
+
api.post("/threads/:thread_id/runs/:run_id/cancel", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), zValidator("query", z.object({
|
|
228
|
+
wait: z.coerce.boolean().optional().default(false),
|
|
229
|
+
action: z.enum(["interrupt", "rollback"]).optional().default("interrupt"),
|
|
230
|
+
})), async (c) => {
|
|
231
|
+
// Cancel Run Http
|
|
232
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
233
|
+
const { wait, action } = c.req.valid("query");
|
|
234
|
+
await Runs.cancel(thread_id, [run_id], { action });
|
|
235
|
+
if (wait)
|
|
236
|
+
await Runs.join(run_id, thread_id);
|
|
237
|
+
return c.body(null, wait ? 204 : 202);
|
|
238
|
+
});
|
|
239
|
+
export default api;
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { zValidator } from "@hono/zod-validator";
|
|
3
|
+
import * as schemas from "../schemas.mjs";
|
|
4
|
+
import { HTTPException } from "hono/http-exception";
|
|
5
|
+
import { store as storageStore } from "../storage/store.mjs";
|
|
6
|
+
const api = new Hono();
|
|
7
|
+
const validateNamespace = (namespace) => {
|
|
8
|
+
if (!namespace || namespace.length === 0) {
|
|
9
|
+
throw new HTTPException(400, { message: "Namespace is required" });
|
|
10
|
+
}
|
|
11
|
+
for (const label of namespace) {
|
|
12
|
+
if (!label || label.includes(".")) {
|
|
13
|
+
throw new HTTPException(422, {
|
|
14
|
+
message: "Namespace labels cannot be empty or contain periods. Received: " +
|
|
15
|
+
namespace.join("."),
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
const mapItemsToApi = (item) => {
|
|
21
|
+
if (item == null)
|
|
22
|
+
return null;
|
|
23
|
+
const clonedItem = { ...item };
|
|
24
|
+
delete clonedItem.createdAt;
|
|
25
|
+
delete clonedItem.updatedAt;
|
|
26
|
+
clonedItem.created_at = item.createdAt;
|
|
27
|
+
clonedItem.updated_at = item.updatedAt;
|
|
28
|
+
return clonedItem;
|
|
29
|
+
};
|
|
30
|
+
api.post("/store/namespaces", zValidator("json", schemas.StoreListNamespaces), async (c) => {
|
|
31
|
+
// List Namespaces
|
|
32
|
+
const payload = c.req.valid("json");
|
|
33
|
+
if (payload.prefix)
|
|
34
|
+
validateNamespace(payload.prefix);
|
|
35
|
+
if (payload.suffix)
|
|
36
|
+
validateNamespace(payload.suffix);
|
|
37
|
+
return c.json({
|
|
38
|
+
namespaces: await storageStore.listNamespaces({
|
|
39
|
+
limit: payload.limit ?? 100,
|
|
40
|
+
offset: payload.offset ?? 0,
|
|
41
|
+
prefix: payload.prefix,
|
|
42
|
+
suffix: payload.suffix,
|
|
43
|
+
maxDepth: payload.max_depth,
|
|
44
|
+
}),
|
|
45
|
+
});
|
|
46
|
+
});
|
|
47
|
+
api.post("/store/items/search", zValidator("json", schemas.StoreSearchItems), async (c) => {
|
|
48
|
+
// Search Items
|
|
49
|
+
const payload = c.req.valid("json");
|
|
50
|
+
if (payload.namespace_prefix)
|
|
51
|
+
validateNamespace(payload.namespace_prefix);
|
|
52
|
+
const items = await storageStore.search(payload.namespace_prefix, {
|
|
53
|
+
filter: payload.filter,
|
|
54
|
+
limit: payload.limit ?? 10,
|
|
55
|
+
offset: payload.offset ?? 0,
|
|
56
|
+
query: payload.query,
|
|
57
|
+
});
|
|
58
|
+
return c.json({ items: items.map(mapItemsToApi) });
|
|
59
|
+
});
|
|
60
|
+
api.put("/store/items", zValidator("json", schemas.StorePutItem), async (c) => {
|
|
61
|
+
// Put Item
|
|
62
|
+
const payload = c.req.valid("json");
|
|
63
|
+
if (payload.namespace)
|
|
64
|
+
validateNamespace(payload.namespace);
|
|
65
|
+
await storageStore.put(payload.namespace, payload.key, payload.value);
|
|
66
|
+
return c.body(null, 204);
|
|
67
|
+
});
|
|
68
|
+
api.delete("/store/items", zValidator("json", schemas.StoreDeleteItem), async (c) => {
|
|
69
|
+
// Delete Item
|
|
70
|
+
const payload = c.req.valid("json");
|
|
71
|
+
if (payload.namespace)
|
|
72
|
+
validateNamespace(payload.namespace);
|
|
73
|
+
await storageStore.delete(payload.namespace ?? [], payload.key);
|
|
74
|
+
return c.body(null, 204);
|
|
75
|
+
});
|
|
76
|
+
api.get("/store/items", zValidator("query", schemas.StoreGetItem), async (c) => {
|
|
77
|
+
// Get Item
|
|
78
|
+
const payload = c.req.valid("query");
|
|
79
|
+
const key = payload.key;
|
|
80
|
+
const namespace = payload.namespace;
|
|
81
|
+
return c.json(mapItemsToApi(await storageStore.get(namespace, key)));
|
|
82
|
+
});
|
|
83
|
+
export default api;
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import { zValidator } from "@hono/zod-validator";
|
|
2
|
+
import { Hono } from "hono";
|
|
3
|
+
import { v4 as uuid4 } from "uuid";
|
|
4
|
+
import * as schemas from "../schemas.mjs";
|
|
5
|
+
import { Threads } from "../storage/ops.mjs";
|
|
6
|
+
import { z } from "zod";
|
|
7
|
+
import { stateSnapshotToThreadState } from "../state.mjs";
|
|
8
|
+
import { jsonExtra } from "../utils/hono.mjs";
|
|
9
|
+
const api = new Hono();
|
|
10
|
+
// Threads Routes
|
|
11
|
+
api.post("/threads", zValidator("json", schemas.ThreadCreate), async (c) => {
|
|
12
|
+
// Create Thread
|
|
13
|
+
const payload = c.req.valid("json");
|
|
14
|
+
const thread = await Threads.put(payload.thread_id || uuid4(), {
|
|
15
|
+
metadata: payload.metadata,
|
|
16
|
+
if_exists: payload.if_exists ?? "raise",
|
|
17
|
+
});
|
|
18
|
+
return jsonExtra(c, thread);
|
|
19
|
+
});
|
|
20
|
+
api.post("/threads/search", zValidator("json", schemas.ThreadSearchRequest), async (c) => {
|
|
21
|
+
// Search Threads
|
|
22
|
+
const payload = c.req.valid("json");
|
|
23
|
+
const result = [];
|
|
24
|
+
for await (const item of Threads.search({
|
|
25
|
+
status: payload.status,
|
|
26
|
+
values: payload.values,
|
|
27
|
+
metadata: payload.metadata,
|
|
28
|
+
limit: payload.limit ?? 10,
|
|
29
|
+
offset: payload.offset ?? 0,
|
|
30
|
+
})) {
|
|
31
|
+
result.push({
|
|
32
|
+
...item,
|
|
33
|
+
created_at: item.created_at.toISOString(),
|
|
34
|
+
updated_at: item.updated_at.toISOString(),
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
return jsonExtra(c, result);
|
|
38
|
+
});
|
|
39
|
+
api.get("/threads/:thread_id/state", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("query", z.object({ subgraphs: schemas.coercedBoolean.optional() })), async (c) => {
|
|
40
|
+
// Get Latest Thread State
|
|
41
|
+
const { thread_id } = c.req.valid("param");
|
|
42
|
+
const { subgraphs } = c.req.valid("query");
|
|
43
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id } }, { subgraphs }));
|
|
44
|
+
return jsonExtra(c, state);
|
|
45
|
+
});
|
|
46
|
+
api.post("/threads/:thread_id/state", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
47
|
+
values: z.union([
|
|
48
|
+
z.record(z.string(), z.unknown()),
|
|
49
|
+
z.array(z.record(z.string(), z.unknown())),
|
|
50
|
+
]),
|
|
51
|
+
as_node: z.string().optional(),
|
|
52
|
+
checkpoint_id: z.string().optional(),
|
|
53
|
+
checkpoint: z.record(z.string(), z.unknown()).optional(),
|
|
54
|
+
})), async (c) => {
|
|
55
|
+
// Update Thread State
|
|
56
|
+
const { thread_id } = c.req.valid("param");
|
|
57
|
+
const payload = c.req.valid("json");
|
|
58
|
+
const config = { configurable: { thread_id } };
|
|
59
|
+
if (payload.checkpoint_id) {
|
|
60
|
+
config.configurable ??= {};
|
|
61
|
+
config.configurable.checkpoint_id = payload.checkpoint_id;
|
|
62
|
+
}
|
|
63
|
+
if (payload.checkpoint) {
|
|
64
|
+
config.configurable ??= {};
|
|
65
|
+
Object.assign(config.configurable, payload.checkpoint);
|
|
66
|
+
}
|
|
67
|
+
const inserted = await Threads.State.post(config, payload.values, payload.as_node);
|
|
68
|
+
return jsonExtra(c, inserted);
|
|
69
|
+
});
|
|
70
|
+
api.get("/threads/:thread_id/state/:checkpoint_id", zValidator("param", z.object({ thread_id: z.string().uuid(), checkpoint_id: z.string().uuid() })), zValidator("query", z.object({ subgraphs: schemas.coercedBoolean.optional() })), async (c) => {
|
|
71
|
+
// Get Thread State At Checkpoint
|
|
72
|
+
const { thread_id, checkpoint_id } = c.req.valid("param");
|
|
73
|
+
const { subgraphs } = c.req.valid("query");
|
|
74
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id, checkpoint_id } }, { subgraphs }));
|
|
75
|
+
return jsonExtra(c, state);
|
|
76
|
+
});
|
|
77
|
+
api.post("/threads/:thread_id/state/checkpoint", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
78
|
+
subgraphs: schemas.coercedBoolean.optional(),
|
|
79
|
+
checkpoint: z.object({
|
|
80
|
+
checkpoint_id: z.string().uuid().optional(),
|
|
81
|
+
checkpoint_ns: z.string().optional(),
|
|
82
|
+
checkpoint_map: z.record(z.string(), z.unknown()).optional(),
|
|
83
|
+
}),
|
|
84
|
+
})), async (c) => {
|
|
85
|
+
// Get Thread State At Checkpoint Post
|
|
86
|
+
const { thread_id } = c.req.valid("param");
|
|
87
|
+
const { checkpoint, subgraphs } = c.req.valid("json");
|
|
88
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id, ...checkpoint } }, { subgraphs }));
|
|
89
|
+
return jsonExtra(c, state);
|
|
90
|
+
});
|
|
91
|
+
api.get("/threads/:thread_id/history", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("query", z.object({
|
|
92
|
+
limit: z
|
|
93
|
+
.string()
|
|
94
|
+
.optional()
|
|
95
|
+
.default("10")
|
|
96
|
+
.transform((value) => parseInt(value, 10)),
|
|
97
|
+
before: z.string().optional(),
|
|
98
|
+
})), async (c) => {
|
|
99
|
+
// Get Thread History
|
|
100
|
+
const { thread_id } = c.req.valid("param");
|
|
101
|
+
const { limit, before } = c.req.valid("query");
|
|
102
|
+
const states = await Threads.State.list({ configurable: { thread_id, checkpoint_ns: "" } }, { limit, before });
|
|
103
|
+
return jsonExtra(c, states.map(stateSnapshotToThreadState));
|
|
104
|
+
});
|
|
105
|
+
api.post("/threads/:thread_id/history", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
106
|
+
limit: z.number().optional().default(10),
|
|
107
|
+
before: z.string().optional(),
|
|
108
|
+
metadata: z.record(z.string(), z.unknown()).optional(),
|
|
109
|
+
checkpoint: z
|
|
110
|
+
.object({
|
|
111
|
+
checkpoint_id: z.string().uuid().optional(),
|
|
112
|
+
checkpoint_ns: z.string().optional(),
|
|
113
|
+
checkpoint_map: z.record(z.string(), z.unknown()).optional(),
|
|
114
|
+
})
|
|
115
|
+
.optional(),
|
|
116
|
+
})), async (c) => {
|
|
117
|
+
// Get Thread History Post
|
|
118
|
+
const { thread_id } = c.req.valid("param");
|
|
119
|
+
const { limit, before, metadata, checkpoint } = c.req.valid("json");
|
|
120
|
+
const states = await Threads.State.list({ configurable: { thread_id, checkpoint_ns: "", ...checkpoint } }, { limit, before, metadata });
|
|
121
|
+
return jsonExtra(c, states.map(stateSnapshotToThreadState));
|
|
122
|
+
});
|
|
123
|
+
api.get("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
124
|
+
// Get Thread
|
|
125
|
+
const { thread_id } = c.req.valid("param");
|
|
126
|
+
return jsonExtra(c, await Threads.get(thread_id));
|
|
127
|
+
});
|
|
128
|
+
api.delete("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
129
|
+
// Delete Thread
|
|
130
|
+
const { thread_id } = c.req.valid("param");
|
|
131
|
+
await Threads.delete(thread_id);
|
|
132
|
+
return new Response(null, { status: 204 });
|
|
133
|
+
});
|
|
134
|
+
api.patch("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({ metadata: z.record(z.string(), z.unknown()) })), async (c) => {
|
|
135
|
+
// Patch Thread
|
|
136
|
+
const { thread_id } = c.req.valid("param");
|
|
137
|
+
const { metadata } = c.req.valid("json");
|
|
138
|
+
return jsonExtra(c, await Threads.patch(thread_id, { metadata }));
|
|
139
|
+
});
|
|
140
|
+
api.post("/threads/:thread_id/copy", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
141
|
+
// Copy Thread
|
|
142
|
+
const { thread_id } = c.req.valid("param");
|
|
143
|
+
return jsonExtra(c, await Threads.copy(thread_id));
|
|
144
|
+
});
|
|
145
|
+
export default api;
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { getDockerCapabilities } from "../docker/compose.mjs";
|
|
2
|
+
import { assembleLocalDeps, configToDocker, getBaseImage, } from "../docker/dockerfile.mjs";
|
|
3
|
+
import { getExecaOptions } from "../docker/shell.mjs";
|
|
4
|
+
import { ConfigSchema } from "../utils/config.mjs";
|
|
5
|
+
import { builder } from "./utils/builder.mjs";
|
|
6
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
7
|
+
import { $ } from "execa";
|
|
8
|
+
import * as path from "node:path";
|
|
9
|
+
import * as fs from "node:fs/promises";
|
|
10
|
+
import { logger } from "../logging.mjs";
|
|
11
|
+
const stream = (proc) => {
|
|
12
|
+
logger.info(`Running "${proc.spawnargs.join(" ")}"`);
|
|
13
|
+
return proc;
|
|
14
|
+
};
|
|
15
|
+
builder
|
|
16
|
+
.command("build")
|
|
17
|
+
.description("Build LangGraph API server Docker image.")
|
|
18
|
+
.requiredOption("-t, --tag <tag>", "Tag for the Docker image.")
|
|
19
|
+
.option("-c, --config <path>", "Path to configuration file", process.cwd())
|
|
20
|
+
.option("--no-pull", "Running the server with locally-built images. By default LangGraph will pull the latest images from the registry")
|
|
21
|
+
.argument("[args...]")
|
|
22
|
+
.passThroughOptions()
|
|
23
|
+
.action(async (pass, params) => {
|
|
24
|
+
const configPath = await getProjectPath(params.config);
|
|
25
|
+
await getDockerCapabilities();
|
|
26
|
+
const projectDir = path.dirname(configPath);
|
|
27
|
+
const config = ConfigSchema.parse(JSON.parse(await fs.readFile(configPath, "utf-8")));
|
|
28
|
+
const opts = await getExecaOptions({
|
|
29
|
+
cwd: projectDir,
|
|
30
|
+
stderr: "inherit",
|
|
31
|
+
stdout: "inherit",
|
|
32
|
+
});
|
|
33
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
34
|
+
const input = await configToDocker(configPath, config, localDeps, {
|
|
35
|
+
watch: false,
|
|
36
|
+
dockerCommand: "build",
|
|
37
|
+
});
|
|
38
|
+
let exec = $({ ...opts, input });
|
|
39
|
+
if (params.pull) {
|
|
40
|
+
await stream(exec `docker pull ${getBaseImage(config)}`);
|
|
41
|
+
}
|
|
42
|
+
exec = $({ ...opts, input });
|
|
43
|
+
await stream(exec `docker build -f - -t ${params.tag} ${projectDir} ${pass}`);
|
|
44
|
+
});
|