@langchain/langgraph-cli 0.0.0-preview.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +93 -0
- package/README.md +69 -0
- package/dist/api/assistants.mjs +144 -0
- package/dist/api/runs.mjs +239 -0
- package/dist/api/store.mjs +83 -0
- package/dist/api/threads.mjs +145 -0
- package/dist/cli/build.mjs +49 -0
- package/dist/cli/cli.mjs +11 -0
- package/dist/cli/dev.mjs +113 -0
- package/dist/cli/dev.node.entrypoint.mjs +40 -0
- package/dist/cli/dev.node.mjs +35 -0
- package/dist/cli/dev.python.mjs +128 -0
- package/dist/cli/docker.mjs +112 -0
- package/dist/cli/up.mjs +137 -0
- package/dist/cli/utils/analytics.mjs +39 -0
- package/dist/cli/utils/builder.mjs +7 -0
- package/dist/cli/utils/ipc/client.mjs +47 -0
- package/dist/cli/utils/ipc/server.mjs +93 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +29 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +40 -0
- package/dist/cli/utils/project.mjs +18 -0
- package/dist/cli/utils/version.mjs +13 -0
- package/dist/docker/compose.mjs +185 -0
- package/dist/docker/docker.mjs +390 -0
- package/dist/docker/shell.mjs +62 -0
- package/dist/graph/load.hooks.mjs +17 -0
- package/dist/graph/load.mjs +71 -0
- package/dist/graph/load.utils.mjs +50 -0
- package/dist/graph/parser/parser.mjs +308 -0
- package/dist/graph/parser/parser.worker.mjs +7 -0
- package/dist/graph/parser/schema/types.mjs +1607 -0
- package/dist/graph/parser/schema/types.template.mts +81 -0
- package/dist/logging.mjs +95 -0
- package/dist/preload.mjs +3 -0
- package/dist/queue.mjs +93 -0
- package/dist/schemas.mjs +399 -0
- package/dist/server.mjs +63 -0
- package/dist/state.mjs +32 -0
- package/dist/storage/checkpoint.mjs +127 -0
- package/dist/storage/ops.mjs +786 -0
- package/dist/storage/persist.mjs +69 -0
- package/dist/storage/store.mjs +41 -0
- package/dist/stream.mjs +215 -0
- package/dist/utils/abort.mjs +8 -0
- package/dist/utils/config.mjs +47 -0
- package/dist/utils/error.mjs +1 -0
- package/dist/utils/hono.mjs +27 -0
- package/dist/utils/importMap.mjs +55 -0
- package/dist/utils/runnableConfig.mjs +45 -0
- package/dist/utils/serde.mjs +20 -0
- package/package.json +68 -0
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import { zValidator } from "@hono/zod-validator";
|
|
2
|
+
import { Hono } from "hono";
|
|
3
|
+
import { v4 as uuid4 } from "uuid";
|
|
4
|
+
import * as schemas from "../schemas.mjs";
|
|
5
|
+
import { Threads } from "../storage/ops.mjs";
|
|
6
|
+
import { z } from "zod";
|
|
7
|
+
import { stateSnapshotToThreadState } from "../state.mjs";
|
|
8
|
+
import { jsonExtra } from "../utils/hono.mjs";
|
|
9
|
+
const api = new Hono();
|
|
10
|
+
// Threads Routes
|
|
11
|
+
api.post("/threads", zValidator("json", schemas.ThreadCreate), async (c) => {
|
|
12
|
+
// Create Thread
|
|
13
|
+
const payload = c.req.valid("json");
|
|
14
|
+
const thread = await Threads.put(payload.thread_id || uuid4(), {
|
|
15
|
+
metadata: payload.metadata,
|
|
16
|
+
if_exists: payload.if_exists ?? "raise",
|
|
17
|
+
});
|
|
18
|
+
return jsonExtra(c, thread);
|
|
19
|
+
});
|
|
20
|
+
api.post("/threads/search", zValidator("json", schemas.ThreadSearchRequest), async (c) => {
|
|
21
|
+
// Search Threads
|
|
22
|
+
const payload = c.req.valid("json");
|
|
23
|
+
const result = [];
|
|
24
|
+
for await (const item of Threads.search({
|
|
25
|
+
status: payload.status,
|
|
26
|
+
values: payload.values,
|
|
27
|
+
metadata: payload.metadata,
|
|
28
|
+
limit: payload.limit ?? 10,
|
|
29
|
+
offset: payload.offset ?? 0,
|
|
30
|
+
})) {
|
|
31
|
+
result.push({
|
|
32
|
+
...item,
|
|
33
|
+
created_at: item.created_at.toISOString(),
|
|
34
|
+
updated_at: item.updated_at.toISOString(),
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
return jsonExtra(c, result);
|
|
38
|
+
});
|
|
39
|
+
api.get("/threads/:thread_id/state", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("query", z.object({ subgraphs: schemas.coercedBoolean.optional() })), async (c) => {
|
|
40
|
+
// Get Latest Thread State
|
|
41
|
+
const { thread_id } = c.req.valid("param");
|
|
42
|
+
const { subgraphs } = c.req.valid("query");
|
|
43
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id } }, { subgraphs }));
|
|
44
|
+
return jsonExtra(c, state);
|
|
45
|
+
});
|
|
46
|
+
api.post("/threads/:thread_id/state", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
47
|
+
values: z.union([
|
|
48
|
+
z.record(z.string(), z.unknown()),
|
|
49
|
+
z.array(z.record(z.string(), z.unknown())),
|
|
50
|
+
]),
|
|
51
|
+
as_node: z.string().optional(),
|
|
52
|
+
checkpoint_id: z.string().optional(),
|
|
53
|
+
checkpoint: z.record(z.string(), z.unknown()).optional(),
|
|
54
|
+
})), async (c) => {
|
|
55
|
+
// Update Thread State
|
|
56
|
+
const { thread_id } = c.req.valid("param");
|
|
57
|
+
const payload = c.req.valid("json");
|
|
58
|
+
const config = { configurable: { thread_id } };
|
|
59
|
+
if (payload.checkpoint_id) {
|
|
60
|
+
config.configurable ??= {};
|
|
61
|
+
config.configurable.checkpoint_id = payload.checkpoint_id;
|
|
62
|
+
}
|
|
63
|
+
if (payload.checkpoint) {
|
|
64
|
+
config.configurable ??= {};
|
|
65
|
+
Object.assign(config.configurable, payload.checkpoint);
|
|
66
|
+
}
|
|
67
|
+
const inserted = await Threads.State.post(config, payload.values, payload.as_node);
|
|
68
|
+
return jsonExtra(c, inserted);
|
|
69
|
+
});
|
|
70
|
+
api.get("/threads/:thread_id/state/:checkpoint_id", zValidator("param", z.object({ thread_id: z.string().uuid(), checkpoint_id: z.string().uuid() })), zValidator("query", z.object({ subgraphs: schemas.coercedBoolean.optional() })), async (c) => {
|
|
71
|
+
// Get Thread State At Checkpoint
|
|
72
|
+
const { thread_id, checkpoint_id } = c.req.valid("param");
|
|
73
|
+
const { subgraphs } = c.req.valid("query");
|
|
74
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id, checkpoint_id } }, { subgraphs }));
|
|
75
|
+
return jsonExtra(c, state);
|
|
76
|
+
});
|
|
77
|
+
api.post("/threads/:thread_id/state/checkpoint", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
78
|
+
subgraphs: schemas.coercedBoolean.optional(),
|
|
79
|
+
checkpoint: z.object({
|
|
80
|
+
checkpoint_id: z.string().uuid().optional(),
|
|
81
|
+
checkpoint_ns: z.string().optional(),
|
|
82
|
+
checkpoint_map: z.record(z.string(), z.unknown()).optional(),
|
|
83
|
+
}),
|
|
84
|
+
})), async (c) => {
|
|
85
|
+
// Get Thread State At Checkpoint Post
|
|
86
|
+
const { thread_id } = c.req.valid("param");
|
|
87
|
+
const { checkpoint, subgraphs } = c.req.valid("json");
|
|
88
|
+
const state = stateSnapshotToThreadState(await Threads.State.get({ configurable: { thread_id, ...checkpoint } }, { subgraphs }));
|
|
89
|
+
return jsonExtra(c, state);
|
|
90
|
+
});
|
|
91
|
+
api.get("/threads/:thread_id/history", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("query", z.object({
|
|
92
|
+
limit: z
|
|
93
|
+
.string()
|
|
94
|
+
.optional()
|
|
95
|
+
.default("10")
|
|
96
|
+
.transform((value) => parseInt(value, 10)),
|
|
97
|
+
before: z.string().optional(),
|
|
98
|
+
})), async (c) => {
|
|
99
|
+
// Get Thread History
|
|
100
|
+
const { thread_id } = c.req.valid("param");
|
|
101
|
+
const { limit, before } = c.req.valid("query");
|
|
102
|
+
const states = await Threads.State.list({ configurable: { thread_id, checkpoint_ns: "" } }, { limit, before });
|
|
103
|
+
return jsonExtra(c, states.map(stateSnapshotToThreadState));
|
|
104
|
+
});
|
|
105
|
+
api.post("/threads/:thread_id/history", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({
|
|
106
|
+
limit: z.number().optional().default(10),
|
|
107
|
+
before: z.string().optional(),
|
|
108
|
+
metadata: z.record(z.string(), z.unknown()).optional(),
|
|
109
|
+
checkpoint: z
|
|
110
|
+
.object({
|
|
111
|
+
checkpoint_id: z.string().uuid().optional(),
|
|
112
|
+
checkpoint_ns: z.string().optional(),
|
|
113
|
+
checkpoint_map: z.record(z.string(), z.unknown()).optional(),
|
|
114
|
+
})
|
|
115
|
+
.optional(),
|
|
116
|
+
})), async (c) => {
|
|
117
|
+
// Get Thread History Post
|
|
118
|
+
const { thread_id } = c.req.valid("param");
|
|
119
|
+
const { limit, before, metadata, checkpoint } = c.req.valid("json");
|
|
120
|
+
const states = await Threads.State.list({ configurable: { thread_id, checkpoint_ns: "", ...checkpoint } }, { limit, before, metadata });
|
|
121
|
+
return jsonExtra(c, states.map(stateSnapshotToThreadState));
|
|
122
|
+
});
|
|
123
|
+
api.get("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
124
|
+
// Get Thread
|
|
125
|
+
const { thread_id } = c.req.valid("param");
|
|
126
|
+
return jsonExtra(c, await Threads.get(thread_id));
|
|
127
|
+
});
|
|
128
|
+
api.delete("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
129
|
+
// Delete Thread
|
|
130
|
+
const { thread_id } = c.req.valid("param");
|
|
131
|
+
await Threads.delete(thread_id);
|
|
132
|
+
return new Response(null, { status: 204 });
|
|
133
|
+
});
|
|
134
|
+
api.patch("/threads/:thread_id", zValidator("param", z.object({ thread_id: z.string().uuid() })), zValidator("json", z.object({ metadata: z.record(z.string(), z.unknown()) })), async (c) => {
|
|
135
|
+
// Patch Thread
|
|
136
|
+
const { thread_id } = c.req.valid("param");
|
|
137
|
+
const { metadata } = c.req.valid("json");
|
|
138
|
+
return jsonExtra(c, await Threads.patch(thread_id, { metadata }));
|
|
139
|
+
});
|
|
140
|
+
api.post("/threads/:thread_id/copy", zValidator("param", z.object({ thread_id: z.string().uuid() })), async (c) => {
|
|
141
|
+
// Copy Thread
|
|
142
|
+
const { thread_id } = c.req.valid("param");
|
|
143
|
+
return jsonExtra(c, await Threads.copy(thread_id));
|
|
144
|
+
});
|
|
145
|
+
export default api;
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { getDockerCapabilities } from "../docker/compose.mjs";
|
|
2
|
+
import { assembleLocalDeps, configToDocker, getBaseImage, } from "../docker/docker.mjs";
|
|
3
|
+
import { getExecaOptions } from "../docker/shell.mjs";
|
|
4
|
+
import { getConfig } from "../utils/config.mjs";
|
|
5
|
+
import { builder } from "./utils/builder.mjs";
|
|
6
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
7
|
+
import { $ } from "execa";
|
|
8
|
+
import * as path from "node:path";
|
|
9
|
+
import * as fs from "node:fs/promises";
|
|
10
|
+
import { logger } from "../logging.mjs";
|
|
11
|
+
import { withAnalytics } from "./utils/analytics.mjs";
|
|
12
|
+
const stream = (proc) => {
|
|
13
|
+
logger.info(`Running "${proc.spawnargs.join(" ")}"`);
|
|
14
|
+
return proc;
|
|
15
|
+
};
|
|
16
|
+
builder
|
|
17
|
+
.command("build")
|
|
18
|
+
.description("Build LangGraph API server Docker image.")
|
|
19
|
+
.requiredOption("-t, --tag <tag>", "Tag for the Docker image.")
|
|
20
|
+
.option("-c, --config <path>", "Path to configuration file", process.cwd())
|
|
21
|
+
.option("--no-pull", "Running the server with locally-built images. By default LangGraph will pull the latest images from the registry")
|
|
22
|
+
.argument("[args...]")
|
|
23
|
+
.passThroughOptions()
|
|
24
|
+
.hook("preAction", withAnalytics((command) => ({
|
|
25
|
+
config: command.opts().config !== process.cwd(),
|
|
26
|
+
pull: command.opts().pull,
|
|
27
|
+
})))
|
|
28
|
+
.action(async (pass, params) => {
|
|
29
|
+
const configPath = await getProjectPath(params.config);
|
|
30
|
+
await getDockerCapabilities();
|
|
31
|
+
const projectDir = path.dirname(configPath);
|
|
32
|
+
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
33
|
+
const opts = await getExecaOptions({
|
|
34
|
+
cwd: projectDir,
|
|
35
|
+
stderr: "inherit",
|
|
36
|
+
stdout: "inherit",
|
|
37
|
+
});
|
|
38
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
39
|
+
const input = await configToDocker(configPath, config, localDeps, {
|
|
40
|
+
watch: false,
|
|
41
|
+
dockerCommand: "build",
|
|
42
|
+
});
|
|
43
|
+
let exec = $({ ...opts, input });
|
|
44
|
+
if (params.pull) {
|
|
45
|
+
await stream(exec `docker pull ${getBaseImage(config)}`);
|
|
46
|
+
}
|
|
47
|
+
exec = $({ ...opts, input });
|
|
48
|
+
await stream(exec `docker build -f - -t ${params.tag} ${projectDir} ${pass}`);
|
|
49
|
+
});
|
package/dist/cli/cli.mjs
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { builder } from "./utils/builder.mjs";
|
|
3
|
+
import { flushAnalytics } from "./utils/analytics.mjs";
|
|
4
|
+
import { asyncExitHook, gracefulExit } from "exit-hook";
|
|
5
|
+
import "./dev.mjs";
|
|
6
|
+
import "./docker.mjs";
|
|
7
|
+
import "./build.mjs";
|
|
8
|
+
import "./up.mjs";
|
|
9
|
+
builder.exitOverride((error) => gracefulExit(error.exitCode));
|
|
10
|
+
asyncExitHook(() => flushAnalytics(), { wait: 2000 });
|
|
11
|
+
builder.parse();
|
package/dist/cli/dev.mjs
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import * as path from "node:path";
|
|
2
|
+
import * as fs from "node:fs/promises";
|
|
3
|
+
import { parse, populate } from "dotenv";
|
|
4
|
+
import { watch } from "chokidar";
|
|
5
|
+
import { z } from "zod";
|
|
6
|
+
import open from "open";
|
|
7
|
+
import { createIpcServer } from "./utils/ipc/server.mjs";
|
|
8
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
9
|
+
import { getConfig } from "../utils/config.mjs";
|
|
10
|
+
import { builder } from "./utils/builder.mjs";
|
|
11
|
+
import { logError, logger } from "../logging.mjs";
|
|
12
|
+
import { withAnalytics } from "./utils/analytics.mjs";
|
|
13
|
+
builder
|
|
14
|
+
.command("dev")
|
|
15
|
+
.description("Run LangGraph API server in development mode with hot reloading.")
|
|
16
|
+
.option("-p, --port <number>", "port to run the server on", "2024")
|
|
17
|
+
.option("-h, --host <string>", "host to bind to", "localhost")
|
|
18
|
+
.option("--no-browser", "disable auto-opening the browser")
|
|
19
|
+
.option("-n, --n-jobs-per-worker <number>", "number of workers to run", "10")
|
|
20
|
+
.option("-c, --config <path>", "path to configuration file", process.cwd())
|
|
21
|
+
.allowExcessArguments()
|
|
22
|
+
.allowUnknownOption()
|
|
23
|
+
.hook("preAction", withAnalytics((command) => ({
|
|
24
|
+
config: command.opts().config !== process.cwd(),
|
|
25
|
+
port: command.opts().port !== "2024",
|
|
26
|
+
host: command.opts().host !== "localhost",
|
|
27
|
+
n_jobs_per_worker: command.opts().nJobsPerWorker !== "10",
|
|
28
|
+
})))
|
|
29
|
+
.action(async (options, { args }) => {
|
|
30
|
+
try {
|
|
31
|
+
const configPath = await getProjectPath(options.config);
|
|
32
|
+
const projectCwd = path.dirname(configPath);
|
|
33
|
+
const [pid, server] = await createIpcServer();
|
|
34
|
+
const watcher = watch([configPath], {
|
|
35
|
+
ignoreInitial: true,
|
|
36
|
+
cwd: projectCwd,
|
|
37
|
+
});
|
|
38
|
+
let hasOpenedFlag = false;
|
|
39
|
+
let child = undefined;
|
|
40
|
+
server.on("data", (data) => {
|
|
41
|
+
const response = z.object({ queryParams: z.string() }).parse(data);
|
|
42
|
+
if (options.browser && !hasOpenedFlag) {
|
|
43
|
+
hasOpenedFlag = true;
|
|
44
|
+
open(`https://smith.langchain.com/studio${response.queryParams}`);
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
// check if .gitignore already contains .langgraph-api
|
|
48
|
+
const gitignorePath = path.resolve(projectCwd, ".gitignore");
|
|
49
|
+
const gitignoreContent = await fs
|
|
50
|
+
.readFile(gitignorePath, "utf-8")
|
|
51
|
+
.catch(() => "");
|
|
52
|
+
if (!gitignoreContent.includes(".langgraph_api")) {
|
|
53
|
+
logger.info("Updating .gitignore to prevent `.langgraph_api` from being committed.");
|
|
54
|
+
await fs.appendFile(gitignorePath, "\n# LangGraph API\n.langgraph_api\n");
|
|
55
|
+
}
|
|
56
|
+
const prepareContext = async () => {
|
|
57
|
+
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
58
|
+
const newWatch = [configPath];
|
|
59
|
+
const env = { ...process.env };
|
|
60
|
+
const configEnv = config?.env;
|
|
61
|
+
if (configEnv) {
|
|
62
|
+
if (typeof configEnv === "string") {
|
|
63
|
+
const envPath = path.resolve(projectCwd, configEnv);
|
|
64
|
+
newWatch.push(envPath);
|
|
65
|
+
const envData = await fs.readFile(envPath, "utf-8");
|
|
66
|
+
populate(env, parse(envData));
|
|
67
|
+
}
|
|
68
|
+
else if (Array.isArray(configEnv)) {
|
|
69
|
+
throw new Error("Env storage is not supported by CLI.");
|
|
70
|
+
}
|
|
71
|
+
else if (typeof configEnv === "object") {
|
|
72
|
+
if (!process.env)
|
|
73
|
+
throw new Error("process.env is not defined");
|
|
74
|
+
populate(env, configEnv);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
const oldWatch = Object.entries(watcher.getWatched()).flatMap(([dir, files]) => files.map((file) => path.resolve(projectCwd, dir, file)));
|
|
78
|
+
const addedTarget = newWatch.filter((target) => !oldWatch.includes(target));
|
|
79
|
+
const removedTarget = oldWatch.filter((target) => !newWatch.includes(target));
|
|
80
|
+
watcher.unwatch(removedTarget).add(addedTarget);
|
|
81
|
+
return { config, env };
|
|
82
|
+
};
|
|
83
|
+
const launchServer = async () => {
|
|
84
|
+
const { config, env } = await prepareContext();
|
|
85
|
+
if (child != null)
|
|
86
|
+
child.kill();
|
|
87
|
+
if ("python_version" in config) {
|
|
88
|
+
logger.warn("Launching Python server from @langchain/langgraph-cli is experimental. Please use the `langgraph-cli` package from PyPi instead.");
|
|
89
|
+
const { spawnPythonServer } = await import("./dev.python.mjs");
|
|
90
|
+
child = await spawnPythonServer({ ...options, rest: args }, { configPath, config, env }, { pid, projectCwd });
|
|
91
|
+
}
|
|
92
|
+
else {
|
|
93
|
+
const { spawnNodeServer } = await import("./dev.node.mjs");
|
|
94
|
+
child = await spawnNodeServer({ ...options, rest: args }, { configPath, config, env }, { pid, projectCwd });
|
|
95
|
+
}
|
|
96
|
+
};
|
|
97
|
+
watcher.on("all", async (_name, path) => {
|
|
98
|
+
logger.warn(`Detected changes in ${path}, restarting server`);
|
|
99
|
+
launchServer();
|
|
100
|
+
});
|
|
101
|
+
// TODO: sometimes the server keeps sending stuff
|
|
102
|
+
// while gracefully exiting
|
|
103
|
+
launchServer();
|
|
104
|
+
process.on("exit", () => {
|
|
105
|
+
watcher.close();
|
|
106
|
+
server.close();
|
|
107
|
+
child?.kill();
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
catch (error) {
|
|
111
|
+
logError(error, { prefix: "Failed to launch server" });
|
|
112
|
+
}
|
|
113
|
+
});
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import "../preload.mjs";
|
|
2
|
+
import { asyncExitHook } from "exit-hook";
|
|
3
|
+
import * as process from "node:process";
|
|
4
|
+
import { startServer, StartServerSchema } from "../server.mjs";
|
|
5
|
+
import { connectToServer } from "./utils/ipc/client.mjs";
|
|
6
|
+
import { Client as LangSmithClient } from "langsmith";
|
|
7
|
+
import { logger } from "../logging.mjs";
|
|
8
|
+
logger.info(`Starting server...`);
|
|
9
|
+
const [ppid, payload] = process.argv.slice(-2);
|
|
10
|
+
const sendToParent = await connectToServer(+ppid);
|
|
11
|
+
// TODO: re-export langsmith/isTracingEnabled
|
|
12
|
+
const isTracingEnabled = () => {
|
|
13
|
+
const value = process.env?.LANGSMITH_TRACING_V2 ||
|
|
14
|
+
process.env?.LANGCHAIN_TRACING_V2 ||
|
|
15
|
+
process.env?.LANGSMITH_TRACING ||
|
|
16
|
+
process.env?.LANGCHAIN_TRACING;
|
|
17
|
+
return value === "true";
|
|
18
|
+
};
|
|
19
|
+
const options = StartServerSchema.parse(JSON.parse(payload));
|
|
20
|
+
const [{ host, cleanup }, organizationId] = await Promise.all([
|
|
21
|
+
startServer(options),
|
|
22
|
+
(async () => {
|
|
23
|
+
if (isTracingEnabled()) {
|
|
24
|
+
try {
|
|
25
|
+
// @ts-expect-error Private method
|
|
26
|
+
return new LangSmithClient()._getTenantId();
|
|
27
|
+
}
|
|
28
|
+
catch (error) {
|
|
29
|
+
logger.warn("Failed to get organization ID", { error });
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return null;
|
|
33
|
+
})(),
|
|
34
|
+
]);
|
|
35
|
+
logger.info(`Server running at ${host}`);
|
|
36
|
+
let queryParams = `?baseUrl=http://${options.host}:${options.port}`;
|
|
37
|
+
if (organizationId)
|
|
38
|
+
queryParams += `&organizationId=${organizationId}`;
|
|
39
|
+
asyncExitHook(cleanup, { wait: 1000 });
|
|
40
|
+
sendToParent?.({ queryParams });
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { fileURLToPath } from "node:url";
|
|
2
|
+
import { spawn } from "node:child_process";
|
|
3
|
+
import {} from "../utils/config.mjs";
|
|
4
|
+
export async function spawnNodeServer(args, context, options) {
|
|
5
|
+
const localUrl = `http://${args.host}:${args.port}`;
|
|
6
|
+
const studioUrl = `https://smith.langchain.com/studio?baseUrl=${localUrl}`;
|
|
7
|
+
console.log(`
|
|
8
|
+
Welcome to
|
|
9
|
+
|
|
10
|
+
╦ ┌─┐┌┐┌┌─┐╔═╗┬─┐┌─┐┌─┐┬ ┬
|
|
11
|
+
║ ├─┤││││ ┬║ ╦├┬┘├─┤├─┘├─┤
|
|
12
|
+
╩═╝┴ ┴┘└┘└─┘╚═╝┴└─┴ ┴┴ ┴ ┴.js
|
|
13
|
+
|
|
14
|
+
- 🚀 API: \x1b[36m${localUrl}\x1b[0m
|
|
15
|
+
- 🎨 Studio UI: \x1b[36m${studioUrl}\x1b[0m
|
|
16
|
+
|
|
17
|
+
This in-memory server is designed for development and testing.
|
|
18
|
+
For production use, please use LangGraph Cloud.
|
|
19
|
+
|
|
20
|
+
`);
|
|
21
|
+
return spawn(process.execPath, [
|
|
22
|
+
fileURLToPath(new URL("../../cli.mjs", import.meta.resolve("tsx/esm/api"))),
|
|
23
|
+
"watch",
|
|
24
|
+
"--clear-screen=false",
|
|
25
|
+
fileURLToPath(new URL(import.meta.resolve("./dev.node.entrypoint.mjs"))),
|
|
26
|
+
options.pid.toString(),
|
|
27
|
+
JSON.stringify({
|
|
28
|
+
port: Number.parseInt(args.port, 10),
|
|
29
|
+
nWorkers: Number.parseInt(args.nJobsPerWorker, 10),
|
|
30
|
+
host: args.host,
|
|
31
|
+
graphs: context.config.graphs,
|
|
32
|
+
cwd: options.projectCwd,
|
|
33
|
+
}),
|
|
34
|
+
], { stdio: ["inherit", "inherit", "inherit", "ipc"], env: context.env });
|
|
35
|
+
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import { fileURLToPath } from "node:url";
|
|
3
|
+
import { Readable } from "node:stream";
|
|
4
|
+
import fs from "node:fs/promises";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import os from "node:os";
|
|
7
|
+
import { extract as tarExtract } from "tar";
|
|
8
|
+
import zipExtract from "extract-zip";
|
|
9
|
+
import { logger } from "../logging.mjs";
|
|
10
|
+
import { assembleLocalDeps } from "../docker/docker.mjs";
|
|
11
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
12
|
+
const UV_VERSION = "0.5.20";
|
|
13
|
+
const UV_BINARY_CACHE = path.join(__dirname, ".uv", UV_VERSION);
|
|
14
|
+
function getPlatformInfo() {
|
|
15
|
+
const platform = os.platform();
|
|
16
|
+
const arch = os.arch();
|
|
17
|
+
let binaryName = "uv";
|
|
18
|
+
let extension = "";
|
|
19
|
+
if (platform === "win32") {
|
|
20
|
+
extension = ".exe";
|
|
21
|
+
}
|
|
22
|
+
return {
|
|
23
|
+
platform,
|
|
24
|
+
arch,
|
|
25
|
+
extension,
|
|
26
|
+
binaryName: binaryName + extension,
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
function getDownloadUrl(info) {
|
|
30
|
+
let platformStr;
|
|
31
|
+
switch (info.platform) {
|
|
32
|
+
case "darwin":
|
|
33
|
+
platformStr = "apple-darwin";
|
|
34
|
+
break;
|
|
35
|
+
case "win32":
|
|
36
|
+
platformStr = "pc-windows-msvc";
|
|
37
|
+
break;
|
|
38
|
+
case "linux":
|
|
39
|
+
platformStr = "unknown-linux-gnu";
|
|
40
|
+
break;
|
|
41
|
+
default:
|
|
42
|
+
throw new Error(`Unsupported platform: ${info.platform}`);
|
|
43
|
+
}
|
|
44
|
+
let archStr;
|
|
45
|
+
switch (info.arch) {
|
|
46
|
+
case "x64":
|
|
47
|
+
archStr = "x86_64";
|
|
48
|
+
break;
|
|
49
|
+
case "arm64":
|
|
50
|
+
archStr = "aarch64";
|
|
51
|
+
break;
|
|
52
|
+
default:
|
|
53
|
+
throw new Error(`Unsupported architecture: ${info.arch}`);
|
|
54
|
+
}
|
|
55
|
+
const fileName = `uv-${archStr}-${platformStr}${info.platform === "win32" ? ".zip" : ".tar.gz"}`;
|
|
56
|
+
return `https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/${fileName}`;
|
|
57
|
+
}
|
|
58
|
+
async function downloadAndExtract(url, destPath, info) {
|
|
59
|
+
const response = await fetch(url);
|
|
60
|
+
if (!response.ok)
|
|
61
|
+
throw new Error(`Failed to download uv: ${response.statusText}`);
|
|
62
|
+
if (!response.body)
|
|
63
|
+
throw new Error("No response body");
|
|
64
|
+
const tempDirPath = await fs.mkdtemp(path.join(os.tmpdir(), "uv-"));
|
|
65
|
+
const tempFilePath = path.join(tempDirPath, path.basename(url));
|
|
66
|
+
try {
|
|
67
|
+
// @ts-expect-error invalid types for response.body
|
|
68
|
+
await fs.writeFile(tempFilePath, Readable.fromWeb(response.body));
|
|
69
|
+
let sourceBinaryPath = tempDirPath;
|
|
70
|
+
if (url.endsWith(".zip")) {
|
|
71
|
+
await zipExtract(tempFilePath, { dir: tempDirPath });
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
await tarExtract({ file: tempFilePath, cwd: tempDirPath });
|
|
75
|
+
sourceBinaryPath = path.resolve(sourceBinaryPath, path.basename(tempFilePath).slice(0, ".tar.gz".length * -1));
|
|
76
|
+
}
|
|
77
|
+
sourceBinaryPath = path.resolve(sourceBinaryPath, info.binaryName);
|
|
78
|
+
// Move binary to cache directory
|
|
79
|
+
const targetBinaryPath = path.join(destPath, info.binaryName);
|
|
80
|
+
await fs.rename(sourceBinaryPath, targetBinaryPath);
|
|
81
|
+
await fs.chmod(targetBinaryPath, 0o755);
|
|
82
|
+
return targetBinaryPath;
|
|
83
|
+
}
|
|
84
|
+
finally {
|
|
85
|
+
await fs.rm(tempDirPath, { recursive: true, force: true });
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
export async function getUvBinary() {
|
|
89
|
+
await fs.mkdir(UV_BINARY_CACHE, { recursive: true });
|
|
90
|
+
const info = getPlatformInfo();
|
|
91
|
+
const cachedBinaryPath = path.join(UV_BINARY_CACHE, info.binaryName);
|
|
92
|
+
try {
|
|
93
|
+
await fs.access(cachedBinaryPath);
|
|
94
|
+
return cachedBinaryPath;
|
|
95
|
+
}
|
|
96
|
+
catch {
|
|
97
|
+
// Binary not found in cache, download it
|
|
98
|
+
logger.info(`Downloading uv ${UV_VERSION} for ${info.platform}...`);
|
|
99
|
+
const url = getDownloadUrl(info);
|
|
100
|
+
return await downloadAndExtract(url, UV_BINARY_CACHE, info);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
export async function spawnPythonServer(args, context, options) {
|
|
104
|
+
const deps = await assembleLocalDeps(context.configPath, context.config);
|
|
105
|
+
const requirements = deps.rebuildFiles.filter((i) => i.endsWith(".txt"));
|
|
106
|
+
return spawn(await getUvBinary(), [
|
|
107
|
+
"run",
|
|
108
|
+
"--with",
|
|
109
|
+
"langgraph-cli[inmem]",
|
|
110
|
+
...requirements?.flatMap((i) => ["--with-requirements", i]),
|
|
111
|
+
"langgraph",
|
|
112
|
+
"dev",
|
|
113
|
+
"--port",
|
|
114
|
+
args.port,
|
|
115
|
+
"--host",
|
|
116
|
+
args.host,
|
|
117
|
+
"--n-jobs-per-worker",
|
|
118
|
+
args.nJobsPerWorker,
|
|
119
|
+
"--config",
|
|
120
|
+
context.configPath,
|
|
121
|
+
...(args.browser ? [] : ["--no-browser"]),
|
|
122
|
+
...args.rest,
|
|
123
|
+
], {
|
|
124
|
+
stdio: ["inherit", "inherit", "inherit"],
|
|
125
|
+
env: context.env,
|
|
126
|
+
cwd: options.projectCwd,
|
|
127
|
+
});
|
|
128
|
+
}
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import { assembleLocalDeps, configToCompose, configToDocker, } from "../docker/docker.mjs";
|
|
2
|
+
import { createCompose, getDockerCapabilities } from "../docker/compose.mjs";
|
|
3
|
+
import { getConfig } from "../utils/config.mjs";
|
|
4
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
5
|
+
import { builder } from "./utils/builder.mjs";
|
|
6
|
+
import * as fs from "node:fs/promises";
|
|
7
|
+
import * as path from "node:path";
|
|
8
|
+
import dedent from "dedent";
|
|
9
|
+
import { logger } from "../logging.mjs";
|
|
10
|
+
import { withAnalytics } from "./utils/analytics.mjs";
|
|
11
|
+
const fileExists = async (path) => {
|
|
12
|
+
try {
|
|
13
|
+
await fs.access(path);
|
|
14
|
+
return true;
|
|
15
|
+
}
|
|
16
|
+
catch (e) {
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
builder
|
|
21
|
+
.command("dockerfile")
|
|
22
|
+
.description("Generate a Dockerfile for the LangGraph API server, with Docker Compose options.")
|
|
23
|
+
.argument("<save-path>", "Path to save the Dockerfile")
|
|
24
|
+
.option("--add-docker-compose", "Add additional files for running the LangGraph API server with docker-compose. These files include a docker-compose.yml, .env file, and a .dockerignore file.")
|
|
25
|
+
.option("-c, --config <path>", "Path to configuration file", process.cwd())
|
|
26
|
+
.hook("preAction", withAnalytics((command) => ({
|
|
27
|
+
config: command.opts().config !== process.cwd(),
|
|
28
|
+
add_docker_compose: !!command.opts().addDockerCompose,
|
|
29
|
+
})))
|
|
30
|
+
.action(async (savePath, options) => {
|
|
31
|
+
const configPath = await getProjectPath(options.config);
|
|
32
|
+
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
33
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
34
|
+
const dockerfile = await configToDocker(configPath, config, localDeps);
|
|
35
|
+
if (savePath === "-") {
|
|
36
|
+
process.stdout.write(dockerfile);
|
|
37
|
+
process.stdout.write("\n");
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
const targetPath = path.resolve(process.cwd(), savePath);
|
|
41
|
+
await fs.writeFile(targetPath, dockerfile);
|
|
42
|
+
logger.info(`✅ Created: ${path.basename(targetPath)}`);
|
|
43
|
+
if (options.addDockerCompose) {
|
|
44
|
+
const { apiDef } = await configToCompose(configPath, config, {
|
|
45
|
+
watch: false,
|
|
46
|
+
});
|
|
47
|
+
const capabilities = await getDockerCapabilities();
|
|
48
|
+
const compose = createCompose(capabilities, { apiDef });
|
|
49
|
+
const composePath = path.resolve(path.dirname(targetPath), "docker-compose.yml");
|
|
50
|
+
await fs.writeFile(composePath, compose);
|
|
51
|
+
logger.info("✅ Created: .docker-compose.yml");
|
|
52
|
+
const dockerignorePath = path.resolve(path.dirname(targetPath), ".dockerignore");
|
|
53
|
+
if (!fileExists(dockerignorePath)) {
|
|
54
|
+
await fs.writeFile(dockerignorePath, dedent `
|
|
55
|
+
# Ignore node_modules and other dependency directories
|
|
56
|
+
node_modules
|
|
57
|
+
bower_components
|
|
58
|
+
vendor
|
|
59
|
+
|
|
60
|
+
# Ignore logs and temporary files
|
|
61
|
+
*.log
|
|
62
|
+
*.tmp
|
|
63
|
+
*.swp
|
|
64
|
+
|
|
65
|
+
# Ignore .env files and other environment files
|
|
66
|
+
.env
|
|
67
|
+
.env.*
|
|
68
|
+
*.local
|
|
69
|
+
|
|
70
|
+
# Ignore git-related files
|
|
71
|
+
.git
|
|
72
|
+
.gitignore
|
|
73
|
+
|
|
74
|
+
# Ignore Docker-related files and configs
|
|
75
|
+
.dockerignore
|
|
76
|
+
docker-compose.yml
|
|
77
|
+
|
|
78
|
+
# Ignore build and cache directories
|
|
79
|
+
dist
|
|
80
|
+
build
|
|
81
|
+
.cache
|
|
82
|
+
__pycache__
|
|
83
|
+
|
|
84
|
+
# Ignore IDE and editor configurations
|
|
85
|
+
.vscode
|
|
86
|
+
.idea
|
|
87
|
+
*.sublime-project
|
|
88
|
+
*.sublime-workspace
|
|
89
|
+
.DS_Store # macOS-specific
|
|
90
|
+
|
|
91
|
+
# Ignore test and coverage files
|
|
92
|
+
coverage
|
|
93
|
+
*.coverage
|
|
94
|
+
*.test.js
|
|
95
|
+
*.spec.js
|
|
96
|
+
tests
|
|
97
|
+
`);
|
|
98
|
+
logger.info(`✅ Created: ${path.basename(dockerignorePath)}`);
|
|
99
|
+
}
|
|
100
|
+
const envPath = path.resolve(path.dirname(targetPath), ".env");
|
|
101
|
+
if (!fileExists(envPath)) {
|
|
102
|
+
await fs.writeFile(envPath, dedent `
|
|
103
|
+
# Uncomment the following line to add your LangSmith API key
|
|
104
|
+
# LANGSMITH_API_KEY=your-api-key
|
|
105
|
+
# Or if you have a LangGraph Cloud license key, then uncomment the following line:
|
|
106
|
+
# LANGGRAPH_CLOUD_LICENSE_KEY=your-license-key
|
|
107
|
+
# Add any other environment variables go below...
|
|
108
|
+
`);
|
|
109
|
+
logger.info(`✅ Created: ${path.basename(envPath)}`);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
});
|