@langchain/langgraph-api 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +3 -0
- package/dist/api/assistants.mjs +144 -0
- package/dist/api/runs.mjs +239 -0
- package/dist/api/store.mjs +83 -0
- package/dist/api/threads.mjs +143 -0
- package/dist/cli/entrypoint.mjs +42 -0
- package/dist/cli/spawn.d.mts +14 -0
- package/dist/cli/spawn.mjs +34 -0
- package/dist/cli/utils/ipc/client.mjs +47 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +29 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +40 -0
- package/dist/graph/load.hooks.mjs +17 -0
- package/dist/graph/load.mjs +72 -0
- package/dist/graph/load.utils.mjs +50 -0
- package/dist/graph/parser/parser.mjs +309 -0
- package/dist/graph/parser/parser.worker.mjs +7 -0
- package/dist/graph/parser/schema/types.mjs +1607 -0
- package/dist/graph/parser/schema/types.template.mts +83 -0
- package/dist/logging.mjs +100 -0
- package/dist/preload.mjs +3 -0
- package/dist/queue.mjs +93 -0
- package/dist/schemas.mjs +407 -0
- package/dist/server.mjs +74 -0
- package/dist/state.mjs +32 -0
- package/dist/storage/checkpoint.mjs +127 -0
- package/dist/storage/importMap.mjs +55 -0
- package/dist/storage/ops.mjs +792 -0
- package/dist/storage/persist.mjs +78 -0
- package/dist/storage/store.mjs +41 -0
- package/dist/stream.mjs +215 -0
- package/dist/utils/abort.mjs +8 -0
- package/dist/utils/hono.mjs +27 -0
- package/dist/utils/importMap.mjs +55 -0
- package/dist/utils/runnableConfig.mjs +45 -0
- package/dist/utils/serde.mjs +20 -0
- package/package.json +56 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License Copyright (c) 2025 LangChain
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free
|
|
4
|
+
of charge, to any person obtaining a copy of this software and associated
|
|
5
|
+
documentation files (the "Software"), to deal in the Software without
|
|
6
|
+
restriction, including without limitation the rights to use, copy, modify, merge,
|
|
7
|
+
publish, distribute, sublicense, and/or sell copies of the Software, and to
|
|
8
|
+
permit persons to whom the Software is furnished to do so, subject to the
|
|
9
|
+
following conditions:
|
|
10
|
+
|
|
11
|
+
The above copyright notice and this permission notice
|
|
12
|
+
(including the next paragraph) shall be included in all copies or substantial
|
|
13
|
+
portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
|
16
|
+
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
17
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
|
|
18
|
+
EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
|
19
|
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
20
|
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
21
|
+
THE SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { zValidator } from "@hono/zod-validator";
|
|
3
|
+
import { v4 as uuid } from "uuid";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
import { getAssistantId, getGraph, getGraphSchema } from "../graph/load.mjs";
|
|
6
|
+
import { Assistants } from "../storage/ops.mjs";
|
|
7
|
+
import * as schemas from "../schemas.mjs";
|
|
8
|
+
import { HTTPException } from "hono/http-exception";
|
|
9
|
+
const api = new Hono();
|
|
10
|
+
api.post("/assistants", zValidator("json", schemas.AssistantCreate), async (c) => {
|
|
11
|
+
// Create Assistant
|
|
12
|
+
const payload = c.req.valid("json");
|
|
13
|
+
const assistant = await Assistants.put(payload.assistant_id ?? uuid(), {
|
|
14
|
+
config: payload.config ?? {},
|
|
15
|
+
graph_id: payload.graph_id,
|
|
16
|
+
metadata: payload.metadata ?? {},
|
|
17
|
+
if_exists: payload.if_exists ?? "raise",
|
|
18
|
+
name: payload.name ?? "Untitled",
|
|
19
|
+
});
|
|
20
|
+
return c.json(assistant);
|
|
21
|
+
});
|
|
22
|
+
api.post("/assistants/search", zValidator("json", schemas.AssistantSearchRequest), async (c) => {
|
|
23
|
+
// Search Assistants
|
|
24
|
+
const payload = c.req.valid("json");
|
|
25
|
+
const result = [];
|
|
26
|
+
for await (const item of Assistants.search({
|
|
27
|
+
graph_id: payload.graph_id,
|
|
28
|
+
metadata: payload.metadata,
|
|
29
|
+
limit: payload.limit ?? 10,
|
|
30
|
+
offset: payload.offset ?? 0,
|
|
31
|
+
})) {
|
|
32
|
+
result.push(item);
|
|
33
|
+
}
|
|
34
|
+
return c.json(result);
|
|
35
|
+
});
|
|
36
|
+
api.get("/assistants/:assistant_id", async (c) => {
|
|
37
|
+
// Get Assistant
|
|
38
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
39
|
+
return c.json(await Assistants.get(assistantId));
|
|
40
|
+
});
|
|
41
|
+
api.delete("/assistants/:assistant_id", async (c) => {
|
|
42
|
+
// Delete Assistant
|
|
43
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
44
|
+
return c.json(await Assistants.delete(assistantId));
|
|
45
|
+
});
|
|
46
|
+
api.patch("/assistants/:assistant_id", zValidator("json", schemas.AssistantPatch), async (c) => {
|
|
47
|
+
// Patch Assistant
|
|
48
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
49
|
+
const payload = c.req.valid("json");
|
|
50
|
+
return c.json(await Assistants.patch(assistantId, payload));
|
|
51
|
+
});
|
|
52
|
+
const RunnableConfigSchema = z.object({
|
|
53
|
+
tags: z.array(z.string()).optional(),
|
|
54
|
+
metadata: z.record(z.unknown()).optional(),
|
|
55
|
+
run_name: z.string().optional(),
|
|
56
|
+
max_concurrency: z.number().optional(),
|
|
57
|
+
recursion_limit: z.number().optional(),
|
|
58
|
+
configurable: z.record(z.unknown()).optional(),
|
|
59
|
+
run_id: z.string().uuid().optional(),
|
|
60
|
+
});
|
|
61
|
+
const getRunnableConfig = (userConfig) => {
|
|
62
|
+
if (!userConfig)
|
|
63
|
+
return {};
|
|
64
|
+
return {
|
|
65
|
+
configurable: userConfig.configurable,
|
|
66
|
+
tags: userConfig.tags,
|
|
67
|
+
metadata: userConfig.metadata,
|
|
68
|
+
runName: userConfig.run_name,
|
|
69
|
+
maxConcurrency: userConfig.max_concurrency,
|
|
70
|
+
recursionLimit: userConfig.recursion_limit,
|
|
71
|
+
runId: userConfig.run_id,
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
api.get("/assistants/:assistant_id/graph", zValidator("query", z.object({ xray: schemas.coercedBoolean.optional() })), async (c) => {
|
|
75
|
+
// Get Assistant Graph
|
|
76
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
77
|
+
const assistant = await Assistants.get(assistantId);
|
|
78
|
+
const { xray } = c.req.valid("query");
|
|
79
|
+
const graph = getGraph(assistant.graph_id);
|
|
80
|
+
return c.json(graph.getGraph({ ...getRunnableConfig(assistant.config), xray }).toJSON());
|
|
81
|
+
});
|
|
82
|
+
api.get("/assistants/:assistant_id/schemas", async (c) => {
|
|
83
|
+
// Get Assistant Schemas
|
|
84
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
85
|
+
const assistant = await Assistants.get(assistantId);
|
|
86
|
+
const graphSchema = await getGraphSchema(assistant.graph_id);
|
|
87
|
+
const rootGraphId = Object.keys(graphSchema).find((i) => !i.includes("|"));
|
|
88
|
+
if (!rootGraphId)
|
|
89
|
+
throw new Error("Failed to find root graph");
|
|
90
|
+
const rootGraphSchema = graphSchema[rootGraphId];
|
|
91
|
+
return c.json({
|
|
92
|
+
graph_id: assistant.graph_id,
|
|
93
|
+
input_schema: rootGraphSchema.input,
|
|
94
|
+
output_schema: rootGraphSchema.output,
|
|
95
|
+
state_schema: rootGraphSchema.state,
|
|
96
|
+
config_schema: rootGraphSchema.config,
|
|
97
|
+
});
|
|
98
|
+
});
|
|
99
|
+
api.get("/assistants/:assistant_id/subgraphs/:namespace?", zValidator("param", z.object({ assistant_id: z.string(), namespace: z.string().optional() })), zValidator("query", z.object({ recurse: schemas.coercedBoolean.optional() })), async (c) => {
|
|
100
|
+
// Get Assistant Subgraphs
|
|
101
|
+
const { assistant_id, namespace } = c.req.valid("param");
|
|
102
|
+
const { recurse } = c.req.valid("query");
|
|
103
|
+
const assistantId = getAssistantId(assistant_id);
|
|
104
|
+
const assistant = await Assistants.get(assistantId);
|
|
105
|
+
const graph = getGraph(assistant.graph_id);
|
|
106
|
+
const graphSchema = await getGraphSchema(assistant.graph_id);
|
|
107
|
+
const rootGraphId = Object.keys(graphSchema).find((i) => !i.includes("|"));
|
|
108
|
+
if (!rootGraphId) {
|
|
109
|
+
throw new HTTPException(404, { message: "Failed to find root graph" });
|
|
110
|
+
}
|
|
111
|
+
const result = [];
|
|
112
|
+
const subgraphsGenerator = "getSubgraphsAsync" in graph
|
|
113
|
+
? graph.getSubgraphsAsync.bind(graph)
|
|
114
|
+
: // @ts-expect-error older versions of langgraph don't have getSubgraphsAsync
|
|
115
|
+
graph.getSubgraphs.bind(graph);
|
|
116
|
+
for await (const [ns] of subgraphsGenerator(namespace, recurse)) {
|
|
117
|
+
result.push([
|
|
118
|
+
ns,
|
|
119
|
+
graphSchema[`${rootGraphId}|${ns}`] || graphSchema[rootGraphId],
|
|
120
|
+
]);
|
|
121
|
+
}
|
|
122
|
+
return c.json(Object.fromEntries(result));
|
|
123
|
+
});
|
|
124
|
+
api.post("/assistants/:assistant_id/latest", zValidator("json", schemas.AssistantLatestVersion), async (c) => {
|
|
125
|
+
// Set Latest Assistant Version
|
|
126
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
127
|
+
const { version } = c.req.valid("json");
|
|
128
|
+
return c.json(await Assistants.setLatest(assistantId, version));
|
|
129
|
+
});
|
|
130
|
+
api.post("/assistants/:assistant_id/versions", zValidator("json", z.object({
|
|
131
|
+
limit: z.number().min(1).max(1000).optional().default(10),
|
|
132
|
+
offset: z.number().min(0).optional().default(0),
|
|
133
|
+
metadata: z.record(z.unknown()).optional(),
|
|
134
|
+
})), async (c) => {
|
|
135
|
+
// Get Assistant Versions
|
|
136
|
+
const assistantId = getAssistantId(c.req.param("assistant_id"));
|
|
137
|
+
const { limit, offset, metadata } = c.req.valid("json");
|
|
138
|
+
return c.json(await Assistants.getVersions(assistantId, {
|
|
139
|
+
limit,
|
|
140
|
+
offset,
|
|
141
|
+
metadata,
|
|
142
|
+
}));
|
|
143
|
+
});
|
|
144
|
+
export default api;
|
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { HTTPException } from "hono/http-exception";
|
|
3
|
+
import { streamSSE } from "hono/streaming";
|
|
4
|
+
import { getAssistantId } from "../graph/load.mjs";
|
|
5
|
+
import { zValidator } from "@hono/zod-validator";
|
|
6
|
+
import * as schemas from "../schemas.mjs";
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
import { Runs, Threads } from "../storage/ops.mjs";
|
|
9
|
+
import { serialiseAsDict } from "../utils/serde.mjs";
|
|
10
|
+
import { getDisconnectAbortSignal, jsonExtra, waitKeepAlive, } from "../utils/hono.mjs";
|
|
11
|
+
import { logError, logger } from "../logging.mjs";
|
|
12
|
+
import { v4 as uuid4 } from "uuid";
|
|
13
|
+
const api = new Hono();
|
|
14
|
+
const createValidRun = async (threadId, payload) => {
|
|
15
|
+
const { assistant_id: assistantId, ...run } = payload;
|
|
16
|
+
const runId = uuid4();
|
|
17
|
+
const streamMode = Array.isArray(payload.stream_mode)
|
|
18
|
+
? payload.stream_mode
|
|
19
|
+
: payload.stream_mode != null
|
|
20
|
+
? [payload.stream_mode]
|
|
21
|
+
: [];
|
|
22
|
+
if (streamMode.length === 0)
|
|
23
|
+
streamMode.push("values");
|
|
24
|
+
const multitaskStrategy = payload.multitask_strategy ?? "reject";
|
|
25
|
+
const preventInsertInInflight = multitaskStrategy === "reject";
|
|
26
|
+
const config = { ...run.config };
|
|
27
|
+
if (run.checkpoint_id) {
|
|
28
|
+
config.configurable ??= {};
|
|
29
|
+
config.configurable.checkpoint_id = run.checkpoint_id;
|
|
30
|
+
}
|
|
31
|
+
if (run.checkpoint) {
|
|
32
|
+
config.configurable ??= {};
|
|
33
|
+
Object.assign(config.configurable, run.checkpoint);
|
|
34
|
+
}
|
|
35
|
+
let feedbackKeys = run.feedback_keys != null
|
|
36
|
+
? Array.isArray(run.feedback_keys)
|
|
37
|
+
? run.feedback_keys
|
|
38
|
+
: [run.feedback_keys]
|
|
39
|
+
: undefined;
|
|
40
|
+
if (!feedbackKeys?.length)
|
|
41
|
+
feedbackKeys = undefined;
|
|
42
|
+
const [first, ...inflight] = await Runs.put(runId, getAssistantId(assistantId), {
|
|
43
|
+
input: run.input,
|
|
44
|
+
command: run.command,
|
|
45
|
+
config,
|
|
46
|
+
stream_mode: streamMode,
|
|
47
|
+
interrupt_before: run.interrupt_before,
|
|
48
|
+
interrupt_after: run.interrupt_after,
|
|
49
|
+
webhook: run.webhook,
|
|
50
|
+
feedback_keys: feedbackKeys,
|
|
51
|
+
temporary: threadId == null && (run.on_completion ?? "delete") === "delete",
|
|
52
|
+
subgraphs: run.stream_subgraphs ?? false,
|
|
53
|
+
}, {
|
|
54
|
+
threadId,
|
|
55
|
+
metadata: run.metadata,
|
|
56
|
+
status: "pending",
|
|
57
|
+
multitaskStrategy,
|
|
58
|
+
preventInsertInInflight,
|
|
59
|
+
afterSeconds: payload.after_seconds,
|
|
60
|
+
ifNotExists: payload.if_not_exists,
|
|
61
|
+
});
|
|
62
|
+
if (first?.run_id === runId) {
|
|
63
|
+
logger.info("Created run", { run_id: runId, thread_id: threadId });
|
|
64
|
+
if ((multitaskStrategy === "interrupt" || multitaskStrategy === "rollback") &&
|
|
65
|
+
inflight.length > 0) {
|
|
66
|
+
try {
|
|
67
|
+
await Runs.cancel(threadId, inflight.map((run) => run.run_id), { action: multitaskStrategy });
|
|
68
|
+
}
|
|
69
|
+
catch (error) {
|
|
70
|
+
logger.warn("Failed to cancel inflight runs, might be already cancelled", {
|
|
71
|
+
error,
|
|
72
|
+
run_ids: inflight.map((run) => run.run_id),
|
|
73
|
+
thread_id: threadId,
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
return first;
|
|
78
|
+
}
|
|
79
|
+
else if (multitaskStrategy === "reject") {
|
|
80
|
+
throw new HTTPException(422, {
|
|
81
|
+
message: "Thread is already running a task. Wait for it to finish or choose a different multitask strategy.",
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
throw new HTTPException(500, {
|
|
85
|
+
message: "Unreachable state when creating run",
|
|
86
|
+
});
|
|
87
|
+
};
|
|
88
|
+
api.post("/runs/crons", zValidator("json", schemas.CronCreate), async () => {
|
|
89
|
+
// Create Thread Cron
|
|
90
|
+
throw new HTTPException(500, { message: "Not implemented" });
|
|
91
|
+
});
|
|
92
|
+
api.post("/runs/crons/search", zValidator("json", schemas.CronSearch), async () => {
|
|
93
|
+
// Search Crons
|
|
94
|
+
throw new HTTPException(500, { message: "Not implemented" });
|
|
95
|
+
});
|
|
96
|
+
api.delete("/runs/crons/:cron_id", zValidator("param", z.object({ cron_id: z.string().uuid() })), async () => {
|
|
97
|
+
// Delete Cron
|
|
98
|
+
throw new HTTPException(500, { message: "Not implemented" });
|
|
99
|
+
});
|
|
100
|
+
api.post("/threads/:thread_id/runs/crons", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", schemas.CronCreate), async () => {
|
|
101
|
+
// Create Thread Cron
|
|
102
|
+
throw new HTTPException(500, { message: "Not implemented" });
|
|
103
|
+
});
|
|
104
|
+
api.post("/runs/stream", zValidator("json", schemas.RunCreate), async (c) => {
|
|
105
|
+
// Stream Stateless Run
|
|
106
|
+
const payload = c.req.valid("json");
|
|
107
|
+
const run = await createValidRun(undefined, payload);
|
|
108
|
+
return streamSSE(c, async (stream) => {
|
|
109
|
+
const cancelOnDisconnect = payload.on_disconnect === "cancel"
|
|
110
|
+
? getDisconnectAbortSignal(c, stream)
|
|
111
|
+
: undefined;
|
|
112
|
+
try {
|
|
113
|
+
for await (const { event, data } of Runs.Stream.join(run.run_id, undefined, { cancelOnDisconnect, ignore404: true })) {
|
|
114
|
+
await stream.writeSSE({ data: serialiseAsDict(data), event });
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
catch (error) {
|
|
118
|
+
logError(error, { prefix: "Error streaming run" });
|
|
119
|
+
}
|
|
120
|
+
});
|
|
121
|
+
});
|
|
122
|
+
api.post("/runs/wait", zValidator("json", schemas.RunCreate), async (c) => {
|
|
123
|
+
// Wait Stateless Run
|
|
124
|
+
const payload = c.req.valid("json");
|
|
125
|
+
const run = await createValidRun(undefined, payload);
|
|
126
|
+
return waitKeepAlive(c, Runs.wait(run.run_id, undefined));
|
|
127
|
+
});
|
|
128
|
+
api.post("/runs", zValidator("json", schemas.RunCreate), async (c) => {
|
|
129
|
+
// Create Stateless Run
|
|
130
|
+
const payload = c.req.valid("json");
|
|
131
|
+
const run = await createValidRun(undefined, payload);
|
|
132
|
+
return jsonExtra(c, run);
|
|
133
|
+
});
|
|
134
|
+
api.post("/runs/batch", zValidator("json", schemas.RunBatchCreate), async (c) => {
|
|
135
|
+
// Batch Runs
|
|
136
|
+
const payload = c.req.valid("json");
|
|
137
|
+
const runs = await Promise.all(payload.map((run) => createValidRun(undefined, run)));
|
|
138
|
+
return jsonExtra(c, runs);
|
|
139
|
+
});
|
|
140
|
+
api.get("/threads/:thread_id/runs", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("query", z.object({
|
|
141
|
+
limit: z.coerce.number().nullish(),
|
|
142
|
+
offset: z.coerce.number().nullish(),
|
|
143
|
+
status: z.string().nullish(),
|
|
144
|
+
metadata: z.record(z.string(), z.unknown()).nullish(),
|
|
145
|
+
})), async (c) => {
|
|
146
|
+
// List runs
|
|
147
|
+
const { thread_id } = c.req.valid("param");
|
|
148
|
+
const { limit, offset, status, metadata } = c.req.valid("query");
|
|
149
|
+
const [runs] = await Promise.all([
|
|
150
|
+
Runs.search(thread_id, {
|
|
151
|
+
limit,
|
|
152
|
+
offset,
|
|
153
|
+
status,
|
|
154
|
+
metadata,
|
|
155
|
+
}),
|
|
156
|
+
Threads.get(thread_id),
|
|
157
|
+
]);
|
|
158
|
+
return jsonExtra(c, runs);
|
|
159
|
+
});
|
|
160
|
+
api.post("/threads/:thread_id/runs", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", schemas.RunCreate), async (c) => {
|
|
161
|
+
// Create Run
|
|
162
|
+
const { thread_id } = c.req.valid("param");
|
|
163
|
+
const payload = c.req.valid("json");
|
|
164
|
+
const run = await createValidRun(thread_id, payload);
|
|
165
|
+
return jsonExtra(c, run);
|
|
166
|
+
});
|
|
167
|
+
api.post("/threads/:thread_id/runs/stream", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", schemas.RunCreate), async (c) => {
|
|
168
|
+
// Stream Run
|
|
169
|
+
const { thread_id } = c.req.valid("param");
|
|
170
|
+
const payload = c.req.valid("json");
|
|
171
|
+
const run = await createValidRun(thread_id, payload);
|
|
172
|
+
return streamSSE(c, async (stream) => {
|
|
173
|
+
const cancelOnDisconnect = payload.on_disconnect === "cancel"
|
|
174
|
+
? getDisconnectAbortSignal(c, stream)
|
|
175
|
+
: undefined;
|
|
176
|
+
try {
|
|
177
|
+
for await (const { event, data } of Runs.Stream.join(run.run_id, thread_id, { cancelOnDisconnect })) {
|
|
178
|
+
await stream.writeSSE({ data: serialiseAsDict(data), event });
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
catch (error) {
|
|
182
|
+
logError(error, { prefix: "Error streaming run" });
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
});
|
|
186
|
+
api.post("/threads/:thread_id/runs/wait", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", schemas.RunCreate), async (c) => {
|
|
187
|
+
// Wait Run
|
|
188
|
+
const { thread_id } = c.req.valid("param");
|
|
189
|
+
const payload = c.req.valid("json");
|
|
190
|
+
const run = await createValidRun(thread_id, payload);
|
|
191
|
+
return waitKeepAlive(c, Runs.join(run.run_id, thread_id));
|
|
192
|
+
});
|
|
193
|
+
api.get("/threads/:thread_id/runs/:run_id", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), async (c) => {
|
|
194
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
195
|
+
const [run] = await Promise.all([
|
|
196
|
+
Runs.get(run_id, thread_id),
|
|
197
|
+
Threads.get(thread_id),
|
|
198
|
+
]);
|
|
199
|
+
return jsonExtra(c, run);
|
|
200
|
+
});
|
|
201
|
+
api.delete("/threads/:thread_id/runs/:run_id", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), async (c) => {
|
|
202
|
+
// Delete Run
|
|
203
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
204
|
+
await Runs.delete(run_id, thread_id);
|
|
205
|
+
return c.body(null, 204);
|
|
206
|
+
});
|
|
207
|
+
api.get("/threads/:thread_id/runs/:run_id/join", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), async (c) => {
|
|
208
|
+
// Join Run Http
|
|
209
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
210
|
+
return jsonExtra(c, await Runs.join(run_id, thread_id));
|
|
211
|
+
});
|
|
212
|
+
api.get("/threads/:thread_id/runs/:run_id/stream", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), zValidator("query", z.object({ cancel_on_disconnect: schemas.coercedBoolean.optional() })), async (c) => {
|
|
213
|
+
// Stream Run Http
|
|
214
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
215
|
+
const { cancel_on_disconnect } = c.req.valid("query");
|
|
216
|
+
return streamSSE(c, async (stream) => {
|
|
217
|
+
const signal = cancel_on_disconnect
|
|
218
|
+
? getDisconnectAbortSignal(c, stream)
|
|
219
|
+
: undefined;
|
|
220
|
+
for await (const { event, data } of Runs.Stream.join(run_id, thread_id, {
|
|
221
|
+
cancelOnDisconnect: signal,
|
|
222
|
+
})) {
|
|
223
|
+
await stream.writeSSE({ data: serialiseAsDict(data), event });
|
|
224
|
+
}
|
|
225
|
+
});
|
|
226
|
+
});
|
|
227
|
+
api.post("/threads/:thread_id/runs/:run_id/cancel", zValidator("param", z.object({ thread_id: z.string().uuid(), run_id: z.string().uuid() })), zValidator("query", z.object({
|
|
228
|
+
wait: z.coerce.boolean().optional().default(false),
|
|
229
|
+
action: z.enum(["interrupt", "rollback"]).optional().default("interrupt"),
|
|
230
|
+
})), async (c) => {
|
|
231
|
+
// Cancel Run Http
|
|
232
|
+
const { thread_id, run_id } = c.req.valid("param");
|
|
233
|
+
const { wait, action } = c.req.valid("query");
|
|
234
|
+
await Runs.cancel(thread_id, [run_id], { action });
|
|
235
|
+
if (wait)
|
|
236
|
+
await Runs.join(run_id, thread_id);
|
|
237
|
+
return c.body(null, wait ? 204 : 202);
|
|
238
|
+
});
|
|
239
|
+
export default api;
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { zValidator } from "@hono/zod-validator";
|
|
3
|
+
import * as schemas from "../schemas.mjs";
|
|
4
|
+
import { HTTPException } from "hono/http-exception";
|
|
5
|
+
import { store as storageStore } from "../storage/store.mjs";
|
|
6
|
+
const api = new Hono();
|
|
7
|
+
const validateNamespace = (namespace) => {
|
|
8
|
+
if (!namespace || namespace.length === 0) {
|
|
9
|
+
throw new HTTPException(400, { message: "Namespace is required" });
|
|
10
|
+
}
|
|
11
|
+
for (const label of namespace) {
|
|
12
|
+
if (!label || label.includes(".")) {
|
|
13
|
+
throw new HTTPException(422, {
|
|
14
|
+
message: "Namespace labels cannot be empty or contain periods. Received: " +
|
|
15
|
+
namespace.join("."),
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
const mapItemsToApi = (item) => {
|
|
21
|
+
if (item == null)
|
|
22
|
+
return null;
|
|
23
|
+
const clonedItem = { ...item };
|
|
24
|
+
delete clonedItem.createdAt;
|
|
25
|
+
delete clonedItem.updatedAt;
|
|
26
|
+
clonedItem.created_at = item.createdAt;
|
|
27
|
+
clonedItem.updated_at = item.updatedAt;
|
|
28
|
+
return clonedItem;
|
|
29
|
+
};
|
|
30
|
+
api.post("/store/namespaces", zValidator("json", schemas.StoreListNamespaces), async (c) => {
|
|
31
|
+
// List Namespaces
|
|
32
|
+
const payload = c.req.valid("json");
|
|
33
|
+
if (payload.prefix)
|
|
34
|
+
validateNamespace(payload.prefix);
|
|
35
|
+
if (payload.suffix)
|
|
36
|
+
validateNamespace(payload.suffix);
|
|
37
|
+
return c.json({
|
|
38
|
+
namespaces: await storageStore.listNamespaces({
|
|
39
|
+
limit: payload.limit ?? 100,
|
|
40
|
+
offset: payload.offset ?? 0,
|
|
41
|
+
prefix: payload.prefix,
|
|
42
|
+
suffix: payload.suffix,
|
|
43
|
+
maxDepth: payload.max_depth,
|
|
44
|
+
}),
|
|
45
|
+
});
|
|
46
|
+
});
|
|
47
|
+
api.post("/store/items/search", zValidator("json", schemas.StoreSearchItems), async (c) => {
|
|
48
|
+
// Search Items
|
|
49
|
+
const payload = c.req.valid("json");
|
|
50
|
+
if (payload.namespace_prefix)
|
|
51
|
+
validateNamespace(payload.namespace_prefix);
|
|
52
|
+
const items = await storageStore.search(payload.namespace_prefix, {
|
|
53
|
+
filter: payload.filter,
|
|
54
|
+
limit: payload.limit ?? 10,
|
|
55
|
+
offset: payload.offset ?? 0,
|
|
56
|
+
query: payload.query,
|
|
57
|
+
});
|
|
58
|
+
return c.json({ items: items.map(mapItemsToApi) });
|
|
59
|
+
});
|
|
60
|
+
api.put("/store/items", zValidator("json", schemas.StorePutItem), async (c) => {
|
|
61
|
+
// Put Item
|
|
62
|
+
const payload = c.req.valid("json");
|
|
63
|
+
if (payload.namespace)
|
|
64
|
+
validateNamespace(payload.namespace);
|
|
65
|
+
await storageStore.put(payload.namespace, payload.key, payload.value);
|
|
66
|
+
return c.body(null, 204);
|
|
67
|
+
});
|
|
68
|
+
api.delete("/store/items", zValidator("json", schemas.StoreDeleteItem), async (c) => {
|
|
69
|
+
// Delete Item
|
|
70
|
+
const payload = c.req.valid("json");
|
|
71
|
+
if (payload.namespace)
|
|
72
|
+
validateNamespace(payload.namespace);
|
|
73
|
+
await storageStore.delete(payload.namespace ?? [], payload.key);
|
|
74
|
+
return c.body(null, 204);
|
|
75
|
+
});
|
|
76
|
+
api.get("/store/items", zValidator("query", schemas.StoreGetItem), async (c) => {
|
|
77
|
+
// Get Item
|
|
78
|
+
const payload = c.req.valid("query");
|
|
79
|
+
const key = payload.key;
|
|
80
|
+
const namespace = payload.namespace;
|
|
81
|
+
return c.json(mapItemsToApi(await storageStore.get(namespace, key)));
|
|
82
|
+
});
|
|
83
|
+
export default api;
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import { zValidator } from "@hono/zod-validator";
|
|
2
|
+
import { Hono } from "hono";
|
|
3
|
+
import { v4 as uuid4 } from "uuid";
|
|
4
|
+
import * as schemas from "../schemas.mjs";
|
|
5
|
+
import { Threads } from "../storage/ops.mjs";
|
|
6
|
+
import { z } from "zod";
|
|
7
|
+
import { stateSnapshotToThreadState } from "../state.mjs";
|
|
8
|
+
import { jsonExtra } from "../utils/hono.mjs";
|
|
9
|
+
const api = new Hono();
|
|
10
|
+
// Threads Routes
|
|
11
|
+
api.post("/threads", zValidator("json", schemas.ThreadCreate), async (c) => {
|
|
12
|
+
// Create Thread
|
|
13
|
+
const payload = c.req.valid("json");
|
|
14
|
+
const thread = await Threads.put(payload.thread_id || uuid4(), {
|
|
15
|
+
metadata: payload.metadata,
|
|
16
|
+
if_exists: payload.if_exists ?? "raise",
|
|
17
|
+
});
|
|
18
|
+
return jsonExtra(c, thread);
|
|
19
|
+
});
|
|
20
|
+
api.post("/threads/search", zValidator("json", schemas.ThreadSearchRequest), async (c) => {
|
|
21
|
+
// Search Threads
|
|
22
|
+
const payload = c.req.valid("json");
|
|
23
|
+
const result = [];
|
|
24
|
+
for await (const item of Threads.search({
|
|
25
|
+
status: payload.status,
|
|
26
|
+
values: payload.values,
|
|
27
|
+
metadata: payload.metadata,
|
|
28
|
+
limit: payload.limit ?? 10,
|
|
29
|
+
offset: payload.offset ?? 0,
|
|
30
|
+
})) {
|
|
31
|
+
result.push({
|
|
32
|
+
...item,
|
|
33
|
+
created_at: item.created_at.toISOString(),
|
|
34
|
+
updated_at: item.updated_at.toISOString(),
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
return jsonExtra(c, result);
|
|
38
|
+
});
|
|
39
|
+
api.get("/threads/:thread_id/state", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("query", z.object({ subgraphs: schemas.coercedBoolean.optional() })), async (c) => {
|
|
40
|
+
// Get Latest Thread State
|
|
41
|
+
const { thread_id } = c.req.valid("param");
|
|
42
|
+
const { subgraphs } = c.req.valid("query");
|
|
43
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id } }, { subgraphs }));
|
|
44
|
+
return jsonExtra(c, state);
|
|
45
|
+
});
|
|
46
|
+
api.post("/threads/:thread_id/state", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
47
|
+
values: z
|
|
48
|
+
.union([
|
|
49
|
+
z.record(z.string(), z.unknown()),
|
|
50
|
+
z.array(z.record(z.string(), z.unknown())),
|
|
51
|
+
])
|
|
52
|
+
.nullish(),
|
|
53
|
+
as_node: z.string().optional(),
|
|
54
|
+
checkpoint_id: z.string().optional(),
|
|
55
|
+
checkpoint: schemas.CheckpointSchema.nullish(),
|
|
56
|
+
})), async (c) => {
|
|
57
|
+
// Update Thread State
|
|
58
|
+
const { thread_id } = c.req.valid("param");
|
|
59
|
+
const payload = c.req.valid("json");
|
|
60
|
+
const config = { configurable: { thread_id } };
|
|
61
|
+
if (payload.checkpoint_id) {
|
|
62
|
+
config.configurable ??= {};
|
|
63
|
+
config.configurable.checkpoint_id = payload.checkpoint_id;
|
|
64
|
+
}
|
|
65
|
+
if (payload.checkpoint) {
|
|
66
|
+
config.configurable ??= {};
|
|
67
|
+
Object.assign(config.configurable, payload.checkpoint);
|
|
68
|
+
}
|
|
69
|
+
const inserted = await Threads.State.post(config, payload.values, payload.as_node);
|
|
70
|
+
return jsonExtra(c, inserted);
|
|
71
|
+
});
|
|
72
|
+
api.get("/threads/:thread_id/state/:checkpoint_id", zValidator("param", z.object({ thread_id: z.string().uuid(), checkpoint_id: z.string().uuid() })), zValidator("query", z.object({ subgraphs: schemas.coercedBoolean.optional() })), async (c) => {
|
|
73
|
+
// Get Thread State At Checkpoint
|
|
74
|
+
const { thread_id, checkpoint_id } = c.req.valid("param");
|
|
75
|
+
const { subgraphs } = c.req.valid("query");
|
|
76
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id, checkpoint_id } }, { subgraphs }));
|
|
77
|
+
return jsonExtra(c, state);
|
|
78
|
+
});
|
|
79
|
+
api.post("/threads/:thread_id/state/checkpoint", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
80
|
+
subgraphs: schemas.coercedBoolean.optional(),
|
|
81
|
+
checkpoint: schemas.CheckpointSchema.nullish(),
|
|
82
|
+
})), async (c) => {
|
|
83
|
+
// Get Thread State At Checkpoint Post
|
|
84
|
+
const { thread_id } = c.req.valid("param");
|
|
85
|
+
const { checkpoint, subgraphs } = c.req.valid("json");
|
|
86
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id, ...checkpoint } }, { subgraphs }));
|
|
87
|
+
return jsonExtra(c, state);
|
|
88
|
+
});
|
|
89
|
+
api.get("/threads/:thread_id/history", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("query", z.object({
|
|
90
|
+
limit: z
|
|
91
|
+
.string()
|
|
92
|
+
.optional()
|
|
93
|
+
.default("10")
|
|
94
|
+
.transform((value) => parseInt(value, 10)),
|
|
95
|
+
before: z.string().optional(),
|
|
96
|
+
})), async (c) => {
|
|
97
|
+
// Get Thread History
|
|
98
|
+
const { thread_id } = c.req.valid("param");
|
|
99
|
+
const { limit, before } = c.req.valid("query");
|
|
100
|
+
const states = await Threads.State.list({ configurable: { thread_id, checkpoint_ns: "" } }, { limit, before });
|
|
101
|
+
return jsonExtra(c, states.map(stateSnapshotToThreadState));
|
|
102
|
+
});
|
|
103
|
+
api.post("/threads/:thread_id/history", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
104
|
+
limit: z.number().optional().default(10),
|
|
105
|
+
before: z.string().optional(),
|
|
106
|
+
metadata: z.record(z.string(), z.unknown()).optional(),
|
|
107
|
+
checkpoint: z
|
|
108
|
+
.object({
|
|
109
|
+
checkpoint_id: z.string().uuid().optional(),
|
|
110
|
+
checkpoint_ns: z.string().optional(),
|
|
111
|
+
checkpoint_map: z.record(z.string(), z.unknown()).optional(),
|
|
112
|
+
})
|
|
113
|
+
.optional(),
|
|
114
|
+
})), async (c) => {
|
|
115
|
+
// Get Thread History Post
|
|
116
|
+
const { thread_id } = c.req.valid("param");
|
|
117
|
+
const { limit, before, metadata, checkpoint } = c.req.valid("json");
|
|
118
|
+
const states = await Threads.State.list({ configurable: { thread_id, checkpoint_ns: "", ...checkpoint } }, { limit, before, metadata });
|
|
119
|
+
return jsonExtra(c, states.map(stateSnapshotToThreadState));
|
|
120
|
+
});
|
|
121
|
+
api.get("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
122
|
+
// Get Thread
|
|
123
|
+
const { thread_id } = c.req.valid("param");
|
|
124
|
+
return jsonExtra(c, await Threads.get(thread_id));
|
|
125
|
+
});
|
|
126
|
+
api.delete("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
127
|
+
// Delete Thread
|
|
128
|
+
const { thread_id } = c.req.valid("param");
|
|
129
|
+
await Threads.delete(thread_id);
|
|
130
|
+
return new Response(null, { status: 204 });
|
|
131
|
+
});
|
|
132
|
+
api.patch("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({ metadata: z.record(z.string(), z.unknown()) })), async (c) => {
|
|
133
|
+
// Patch Thread
|
|
134
|
+
const { thread_id } = c.req.valid("param");
|
|
135
|
+
const { metadata } = c.req.valid("json");
|
|
136
|
+
return jsonExtra(c, await Threads.patch(thread_id, { metadata }));
|
|
137
|
+
});
|
|
138
|
+
api.post("/threads/:thread_id/copy", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
139
|
+
// Copy Thread
|
|
140
|
+
const { thread_id } = c.req.valid("param");
|
|
141
|
+
return jsonExtra(c, await Threads.copy(thread_id));
|
|
142
|
+
});
|
|
143
|
+
export default api;
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import "../preload.mjs";
|
|
2
|
+
import { asyncExitHook } from "exit-hook";
|
|
3
|
+
import * as process from "node:process";
|
|
4
|
+
import { startServer, StartServerSchema } from "../server.mjs";
|
|
5
|
+
import { connectToServer } from "./utils/ipc/client.mjs";
|
|
6
|
+
import { Client as LangSmithClient } from "langsmith";
|
|
7
|
+
import { logger } from "../logging.mjs";
|
|
8
|
+
logger.info(`Starting server...`);
|
|
9
|
+
const [ppid, payload] = process.argv.slice(-2);
|
|
10
|
+
const sendToParent = await connectToServer(+ppid);
|
|
11
|
+
// TODO: re-export langsmith/isTracingEnabled
|
|
12
|
+
const isTracingEnabled = () => {
|
|
13
|
+
const value = process.env?.LANGSMITH_TRACING_V2 ||
|
|
14
|
+
process.env?.LANGCHAIN_TRACING_V2 ||
|
|
15
|
+
process.env?.LANGSMITH_TRACING ||
|
|
16
|
+
process.env?.LANGCHAIN_TRACING;
|
|
17
|
+
return value === "true";
|
|
18
|
+
};
|
|
19
|
+
const options = StartServerSchema.parse(JSON.parse(payload));
|
|
20
|
+
// Export PORT to the environment
|
|
21
|
+
process.env.PORT = options.port.toString();
|
|
22
|
+
const [{ host, cleanup }, organizationId] = await Promise.all([
|
|
23
|
+
startServer(options),
|
|
24
|
+
(async () => {
|
|
25
|
+
if (isTracingEnabled()) {
|
|
26
|
+
try {
|
|
27
|
+
// @ts-expect-error Private method
|
|
28
|
+
return new LangSmithClient()._getTenantId();
|
|
29
|
+
}
|
|
30
|
+
catch (error) {
|
|
31
|
+
logger.warn("Failed to get organization ID", { error });
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return null;
|
|
35
|
+
})(),
|
|
36
|
+
]);
|
|
37
|
+
logger.info(`Server running at ${host}`);
|
|
38
|
+
let queryParams = `?baseUrl=http://${options.host}:${options.port}`;
|
|
39
|
+
if (organizationId)
|
|
40
|
+
queryParams += `&organizationId=${organizationId}`;
|
|
41
|
+
asyncExitHook(cleanup, { wait: 1000 });
|
|
42
|
+
sendToParent?.({ queryParams });
|