@langchain/langgraph-cli 1.1.8 → 1.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/build.mjs +52 -0
- package/dist/cli/cli.mjs +13 -0
- package/dist/cli/cloudflare.mjs +172 -0
- package/dist/cli/dev.mjs +143 -0
- package/dist/cli/dev.python.mjs +129 -0
- package/dist/cli/docker.mjs +114 -0
- package/dist/cli/new.mjs +13 -0
- package/dist/cli/sysinfo.mjs +63 -0
- package/dist/cli/up.mjs +139 -0
- package/dist/cli/utils/analytics.mjs +39 -0
- package/dist/cli/utils/builder.mjs +7 -0
- package/dist/cli/utils/ipc/server.mjs +93 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +29 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +40 -0
- package/dist/cli/utils/project.mjs +18 -0
- package/dist/cli/utils/stream.mjs +90 -0
- package/dist/cli/utils/version.mjs +13 -0
- package/dist/docker/compose.mjs +185 -0
- package/dist/docker/docker.mjs +390 -0
- package/dist/docker/shell.mjs +62 -0
- package/dist/utils/config.mjs +104 -0
- package/dist/utils/logging.mjs +96 -0
- package/package.json +17 -17
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { getDockerCapabilities } from "../docker/compose.mjs";
|
|
2
|
+
import { assembleLocalDeps, configToDocker, getBaseImage, } from "../docker/docker.mjs";
|
|
3
|
+
import { getExecaOptions } from "../docker/shell.mjs";
|
|
4
|
+
import { getConfig } from "../utils/config.mjs";
|
|
5
|
+
import { builder } from "./utils/builder.mjs";
|
|
6
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
7
|
+
import { $ } from "execa";
|
|
8
|
+
import * as path from "node:path";
|
|
9
|
+
import * as fs from "node:fs/promises";
|
|
10
|
+
import { logger } from "../utils/logging.mjs";
|
|
11
|
+
import { withAnalytics } from "./utils/analytics.mjs";
|
|
12
|
+
import { gracefulExit } from "exit-hook";
|
|
13
|
+
const stream = (proc) => {
|
|
14
|
+
logger.info(`Running "${proc.spawnargs.join(" ")}"`);
|
|
15
|
+
return proc;
|
|
16
|
+
};
|
|
17
|
+
builder
|
|
18
|
+
.command("build")
|
|
19
|
+
.description("Build LangGraph API server Docker image.")
|
|
20
|
+
.requiredOption("-t, --tag <tag>", "Tag for the Docker image.")
|
|
21
|
+
.option("-c, --config <path>", "Path to configuration file", process.cwd())
|
|
22
|
+
.option("--no-pull", "Running the server with locally-built images. By default LangGraph will pull the latest images from the registry")
|
|
23
|
+
.argument("[args...]")
|
|
24
|
+
.passThroughOptions()
|
|
25
|
+
.allowUnknownOption()
|
|
26
|
+
.exitOverride((error) => gracefulExit(error.exitCode))
|
|
27
|
+
.hook("preAction", withAnalytics((command) => ({
|
|
28
|
+
config: command.opts().config !== process.cwd(),
|
|
29
|
+
pull: command.opts().pull,
|
|
30
|
+
})))
|
|
31
|
+
.action(async (pass, params) => {
|
|
32
|
+
const configPath = await getProjectPath(params.config);
|
|
33
|
+
await getDockerCapabilities();
|
|
34
|
+
const projectDir = path.dirname(configPath);
|
|
35
|
+
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
36
|
+
const opts = await getExecaOptions({
|
|
37
|
+
cwd: projectDir,
|
|
38
|
+
stderr: "inherit",
|
|
39
|
+
stdout: "inherit",
|
|
40
|
+
});
|
|
41
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
42
|
+
const input = await configToDocker(configPath, config, localDeps, {
|
|
43
|
+
watch: false,
|
|
44
|
+
dockerCommand: "build",
|
|
45
|
+
});
|
|
46
|
+
let exec = $({ ...opts, input });
|
|
47
|
+
if (params.pull) {
|
|
48
|
+
await stream(exec `docker pull ${getBaseImage(config)}`);
|
|
49
|
+
}
|
|
50
|
+
exec = $({ ...opts, input });
|
|
51
|
+
await stream(exec `docker build -f - -t ${params.tag} ${projectDir} ${pass}`);
|
|
52
|
+
});
|
package/dist/cli/cli.mjs
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { builder } from "./utils/builder.mjs";
|
|
3
|
+
import { flushAnalytics } from "./utils/analytics.mjs";
|
|
4
|
+
import { asyncExitHook, gracefulExit } from "exit-hook";
|
|
5
|
+
import "./dev.mjs";
|
|
6
|
+
import "./docker.mjs";
|
|
7
|
+
import "./build.mjs";
|
|
8
|
+
import "./up.mjs";
|
|
9
|
+
import "./new.mjs";
|
|
10
|
+
import "./sysinfo.mjs";
|
|
11
|
+
builder.exitOverride((error) => gracefulExit(error.exitCode));
|
|
12
|
+
asyncExitHook(() => flushAnalytics(), { wait: 2000 });
|
|
13
|
+
builder.parse();
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import * as os from "node:os";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
import * as fs from "node:fs/promises";
|
|
4
|
+
import { Readable, Transform, Writable } from "node:stream";
|
|
5
|
+
import { ChildProcess, spawn } from "node:child_process";
|
|
6
|
+
import { extract as tarExtract } from "tar";
|
|
7
|
+
import * as nodeStream from "node:stream/web";
|
|
8
|
+
import { fileURLToPath } from "node:url";
|
|
9
|
+
import { logger } from "../utils/logging.mjs";
|
|
10
|
+
import { BytesLineDecoder } from "./utils/stream.mjs";
|
|
11
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
12
|
+
const CLOUDFLARED_VERSION = "2025.2.1";
|
|
13
|
+
const CLOUDFLARED_CACHE_DIR = path.join(__dirname, ".cloudflare", CLOUDFLARED_VERSION);
|
|
14
|
+
const writeFile = async (path, stream) => {
|
|
15
|
+
if (stream == null)
|
|
16
|
+
throw new Error("Stream is null");
|
|
17
|
+
return await fs.writeFile(path, Readable.fromWeb(stream));
|
|
18
|
+
};
|
|
19
|
+
class CloudflareLoggerStream extends WritableStream {
|
|
20
|
+
constructor() {
|
|
21
|
+
const decoder = new TextDecoder();
|
|
22
|
+
super({
|
|
23
|
+
write(chunk) {
|
|
24
|
+
const text = decoder.decode(chunk);
|
|
25
|
+
const [_timestamp, level, ...rest] = text.split(" ");
|
|
26
|
+
const message = rest.join(" ");
|
|
27
|
+
if (level === "INF") {
|
|
28
|
+
logger.debug(message);
|
|
29
|
+
}
|
|
30
|
+
else if (level === "ERR") {
|
|
31
|
+
logger.error(message);
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
logger.info(message);
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
fromWeb() {
|
|
40
|
+
return Writable.fromWeb(this);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
class CloudflareUrlStream extends TransformStream {
|
|
44
|
+
constructor() {
|
|
45
|
+
const decoder = new TextDecoder();
|
|
46
|
+
super({
|
|
47
|
+
transform(chunk, controller) {
|
|
48
|
+
const str = decoder.decode(chunk);
|
|
49
|
+
const urlMatch = str.match(/https:\/\/[a-z0-9-]+\.trycloudflare\.com/)?.[0];
|
|
50
|
+
if (urlMatch)
|
|
51
|
+
controller.enqueue(urlMatch);
|
|
52
|
+
},
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
fromWeb() {
|
|
56
|
+
// @ts-expect-error
|
|
57
|
+
return Transform.fromWeb(this, { objectMode: true });
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
export async function startCloudflareTunnel(port) {
|
|
61
|
+
const targetBinaryPath = await ensureCloudflared();
|
|
62
|
+
logger.info("Starting tunnel");
|
|
63
|
+
const child = spawn(targetBinaryPath, ["tunnel", "--url", `http://localhost:${port}`], { stdio: ["inherit", "pipe", "pipe"] });
|
|
64
|
+
child.stdout
|
|
65
|
+
.pipe(new BytesLineDecoder().fromWeb())
|
|
66
|
+
.pipe(new CloudflareLoggerStream().fromWeb());
|
|
67
|
+
child.stderr
|
|
68
|
+
.pipe(new BytesLineDecoder().fromWeb())
|
|
69
|
+
.pipe(new CloudflareLoggerStream().fromWeb());
|
|
70
|
+
const tunnelUrl = new Promise((resolve) => {
|
|
71
|
+
child.stderr
|
|
72
|
+
.pipe(new CloudflareUrlStream().fromWeb())
|
|
73
|
+
.once("data", (data) => {
|
|
74
|
+
logger.info(`Tunnel URL: "${data}"`);
|
|
75
|
+
resolve(data);
|
|
76
|
+
});
|
|
77
|
+
});
|
|
78
|
+
return { child, tunnelUrl };
|
|
79
|
+
}
|
|
80
|
+
function getFiles() {
|
|
81
|
+
const platform = getPlatform();
|
|
82
|
+
const arch = getArchitecture();
|
|
83
|
+
if (platform === "windows") {
|
|
84
|
+
if (arch !== "386" && arch !== "amd64") {
|
|
85
|
+
throw new Error(`Unsupported architecture: ${arch}`);
|
|
86
|
+
}
|
|
87
|
+
return { binary: `cloudflared-${platform}-${arch}.exe` };
|
|
88
|
+
}
|
|
89
|
+
if (platform === "darwin") {
|
|
90
|
+
if (arch !== "arm64" && arch !== "amd64") {
|
|
91
|
+
throw new Error(`Unsupported architecture: ${arch}`);
|
|
92
|
+
}
|
|
93
|
+
return {
|
|
94
|
+
archive: `cloudflared-${platform}-${arch}.tgz`,
|
|
95
|
+
binary: "cloudflared",
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
if (platform === "linux") {
|
|
99
|
+
if (arch !== "arm64" && arch !== "amd64" && arch !== "386") {
|
|
100
|
+
throw new Error(`Unsupported architecture: ${arch}`);
|
|
101
|
+
}
|
|
102
|
+
return { binary: `cloudflared-${platform}-${arch}` };
|
|
103
|
+
}
|
|
104
|
+
throw new Error(`Unsupported platform: ${platform}`);
|
|
105
|
+
}
|
|
106
|
+
async function downloadCloudflared() {
|
|
107
|
+
await fs.mkdir(CLOUDFLARED_CACHE_DIR, { recursive: true });
|
|
108
|
+
logger.info("Requesting download of `cloudflared`");
|
|
109
|
+
const { binary, archive } = getFiles();
|
|
110
|
+
const downloadFile = archive ?? binary;
|
|
111
|
+
const tempDirPath = await fs.mkdtemp(path.join(os.tmpdir(), "cloudflared-"));
|
|
112
|
+
const tempFilePath = path.join(tempDirPath, downloadFile);
|
|
113
|
+
const url = `https://github.com/cloudflare/cloudflared/releases/download/${CLOUDFLARED_VERSION}/${downloadFile}`;
|
|
114
|
+
logger.debug("Downloading `${archive}`", { url, target: tempDirPath });
|
|
115
|
+
const response = await fetch(url);
|
|
116
|
+
if (!response.ok || !response.body) {
|
|
117
|
+
throw new Error(`Failed to download cloudflared: ${response.statusText}`);
|
|
118
|
+
}
|
|
119
|
+
await writeFile(tempFilePath, response.body);
|
|
120
|
+
if (archive != null) {
|
|
121
|
+
if (path.extname(archive) !== ".tgz") {
|
|
122
|
+
throw new Error(`Invalid archive type: "${path.extname(archive)}"`);
|
|
123
|
+
}
|
|
124
|
+
logger.debug("Extracting `cloudflared`");
|
|
125
|
+
await tarExtract({ file: tempFilePath, cwd: tempDirPath });
|
|
126
|
+
}
|
|
127
|
+
const sourceBinaryPath = path.resolve(tempDirPath, binary);
|
|
128
|
+
const targetBinaryPath = path.resolve(CLOUDFLARED_CACHE_DIR, binary);
|
|
129
|
+
logger.debug("Moving `cloudflared` to target directory", {
|
|
130
|
+
targetBinaryPath,
|
|
131
|
+
});
|
|
132
|
+
await fs.rename(sourceBinaryPath, targetBinaryPath);
|
|
133
|
+
await fs.chmod(targetBinaryPath, 0o755);
|
|
134
|
+
}
|
|
135
|
+
async function ensureCloudflared() {
|
|
136
|
+
const { binary } = getFiles();
|
|
137
|
+
const targetBinaryPath = path.resolve(CLOUDFLARED_CACHE_DIR, binary);
|
|
138
|
+
try {
|
|
139
|
+
await fs.access(targetBinaryPath);
|
|
140
|
+
}
|
|
141
|
+
catch {
|
|
142
|
+
await downloadCloudflared();
|
|
143
|
+
}
|
|
144
|
+
return targetBinaryPath;
|
|
145
|
+
}
|
|
146
|
+
function getArchitecture() {
|
|
147
|
+
const arch = os.arch();
|
|
148
|
+
switch (arch) {
|
|
149
|
+
case "x64":
|
|
150
|
+
return "amd64";
|
|
151
|
+
case "arm64":
|
|
152
|
+
return "arm64";
|
|
153
|
+
case "ia32":
|
|
154
|
+
case "x86":
|
|
155
|
+
return "386";
|
|
156
|
+
default:
|
|
157
|
+
throw new Error(`Unsupported architecture: ${arch}`);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
function getPlatform() {
|
|
161
|
+
const platform = os.platform();
|
|
162
|
+
switch (platform) {
|
|
163
|
+
case "darwin":
|
|
164
|
+
return "darwin";
|
|
165
|
+
case "linux":
|
|
166
|
+
return "linux";
|
|
167
|
+
case "win32":
|
|
168
|
+
return "windows";
|
|
169
|
+
default:
|
|
170
|
+
throw new Error(`Unsupported platform: ${platform}`);
|
|
171
|
+
}
|
|
172
|
+
}
|
package/dist/cli/dev.mjs
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import * as path from "node:path";
|
|
2
|
+
import * as fs from "node:fs/promises";
|
|
3
|
+
import { parse, populate } from "dotenv";
|
|
4
|
+
import { watch } from "chokidar";
|
|
5
|
+
import { z } from "zod/v3";
|
|
6
|
+
import open from "open";
|
|
7
|
+
import { startCloudflareTunnel } from "./cloudflare.mjs";
|
|
8
|
+
import { createIpcServer } from "./utils/ipc/server.mjs";
|
|
9
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
10
|
+
import { getConfig } from "../utils/config.mjs";
|
|
11
|
+
import { builder } from "./utils/builder.mjs";
|
|
12
|
+
import { logError, logger } from "../utils/logging.mjs";
|
|
13
|
+
import { withAnalytics } from "./utils/analytics.mjs";
|
|
14
|
+
import { gracefulExit } from "exit-hook";
|
|
15
|
+
builder
|
|
16
|
+
.command("dev")
|
|
17
|
+
.description("Run LangGraph API server in development mode with hot reloading.")
|
|
18
|
+
.option("-p, --port <number>", "port to run the server on", "2024")
|
|
19
|
+
.option("-h, --host <string>", "host to bind to", "localhost")
|
|
20
|
+
.option("--no-browser", "disable auto-opening the browser")
|
|
21
|
+
.option("-n, --n-jobs-per-worker <number>", "number of workers to run", "10")
|
|
22
|
+
.option("-c, --config <path>", "path to configuration file", process.cwd())
|
|
23
|
+
.option("--tunnel", "use Cloudflare Tunnel to expose the server to the internet")
|
|
24
|
+
.allowExcessArguments()
|
|
25
|
+
.allowUnknownOption()
|
|
26
|
+
.exitOverride((error) => gracefulExit(error.exitCode))
|
|
27
|
+
.hook("preAction", withAnalytics((command) => ({
|
|
28
|
+
config: command.opts().config !== process.cwd(),
|
|
29
|
+
port: command.opts().port !== "2024",
|
|
30
|
+
host: command.opts().host !== "localhost",
|
|
31
|
+
n_jobs_per_worker: command.opts().nJobsPerWorker !== "10",
|
|
32
|
+
tunnel: Boolean(command.opts().tunnel),
|
|
33
|
+
})))
|
|
34
|
+
.action(async (options, { args }) => {
|
|
35
|
+
try {
|
|
36
|
+
const configPath = await getProjectPath(options.config);
|
|
37
|
+
const projectCwd = path.dirname(configPath);
|
|
38
|
+
const [pid, server] = await createIpcServer();
|
|
39
|
+
const watcher = watch([configPath], {
|
|
40
|
+
ignoreInitial: true,
|
|
41
|
+
cwd: projectCwd,
|
|
42
|
+
});
|
|
43
|
+
let hasOpenedFlag = false;
|
|
44
|
+
let child = undefined;
|
|
45
|
+
let tunnel = undefined;
|
|
46
|
+
let hostUrl = "https://smith.langchain.com";
|
|
47
|
+
let envNoBrowser = process.env.BROWSER === "none";
|
|
48
|
+
server.on("data", async (data) => {
|
|
49
|
+
const response = z.object({ queryParams: z.string() }).parse(data);
|
|
50
|
+
if (options.browser && !envNoBrowser && !hasOpenedFlag) {
|
|
51
|
+
hasOpenedFlag = true;
|
|
52
|
+
const queryParams = new URLSearchParams(response.queryParams);
|
|
53
|
+
const tunnelUrl = await tunnel?.tunnelUrl;
|
|
54
|
+
if (tunnelUrl)
|
|
55
|
+
queryParams.set("baseUrl", tunnelUrl);
|
|
56
|
+
let queryParamsStr = queryParams.toString();
|
|
57
|
+
if (queryParamsStr)
|
|
58
|
+
queryParamsStr = `?${queryParams.toString()}`;
|
|
59
|
+
open(`${hostUrl}/studio${queryParamsStr}`);
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
// check if .gitignore already contains .langgraph-api
|
|
63
|
+
const gitignorePath = path.resolve(projectCwd, ".gitignore");
|
|
64
|
+
const gitignoreContent = await fs
|
|
65
|
+
.readFile(gitignorePath, "utf-8")
|
|
66
|
+
.catch(() => "");
|
|
67
|
+
if (!gitignoreContent.includes(".langgraph_api")) {
|
|
68
|
+
logger.info("Updating .gitignore to prevent `.langgraph_api` from being committed.");
|
|
69
|
+
await fs.appendFile(gitignorePath, "\n# LangGraph API\n.langgraph_api\n");
|
|
70
|
+
}
|
|
71
|
+
const prepareContext = async () => {
|
|
72
|
+
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
73
|
+
const newWatch = [configPath];
|
|
74
|
+
const env = { ...process.env };
|
|
75
|
+
const configEnv = config?.env;
|
|
76
|
+
if (configEnv) {
|
|
77
|
+
if (typeof configEnv === "string") {
|
|
78
|
+
const envPath = path.resolve(projectCwd, configEnv);
|
|
79
|
+
newWatch.push(envPath);
|
|
80
|
+
const envData = await fs.readFile(envPath, "utf-8");
|
|
81
|
+
populate(env, parse(envData));
|
|
82
|
+
}
|
|
83
|
+
else if (Array.isArray(configEnv)) {
|
|
84
|
+
throw new Error("Env storage is not supported by CLI.");
|
|
85
|
+
}
|
|
86
|
+
else if (typeof configEnv === "object") {
|
|
87
|
+
if (!process.env)
|
|
88
|
+
throw new Error("process.env is not defined");
|
|
89
|
+
populate(env, configEnv);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
const oldWatch = Object.entries(watcher.getWatched()).flatMap(([dir, files]) => files.map((file) => path.resolve(projectCwd, dir, file)));
|
|
93
|
+
const addedTarget = newWatch.filter((target) => !oldWatch.includes(target));
|
|
94
|
+
const removedTarget = oldWatch.filter((target) => !newWatch.includes(target));
|
|
95
|
+
watcher.unwatch(removedTarget).add(addedTarget);
|
|
96
|
+
try {
|
|
97
|
+
const { Client } = await import("langsmith");
|
|
98
|
+
const apiUrl = env?.["LANGSMITH_ENDPOINT"] ||
|
|
99
|
+
env?.["LANGCHAIN_ENDPOINT"] ||
|
|
100
|
+
undefined;
|
|
101
|
+
hostUrl = new Client({ apiUrl }).getHostUrl() || hostUrl;
|
|
102
|
+
}
|
|
103
|
+
catch {
|
|
104
|
+
// pass
|
|
105
|
+
}
|
|
106
|
+
return { config, env, hostUrl };
|
|
107
|
+
};
|
|
108
|
+
const launchServer = async () => {
|
|
109
|
+
const { config, env, hostUrl } = await prepareContext();
|
|
110
|
+
if (child != null)
|
|
111
|
+
child.kill();
|
|
112
|
+
if (tunnel != null)
|
|
113
|
+
tunnel.child.kill();
|
|
114
|
+
if (options.tunnel)
|
|
115
|
+
tunnel = await startCloudflareTunnel(options.port);
|
|
116
|
+
envNoBrowser = process.env.BROWSER === "none" || env.BROWSER === "none";
|
|
117
|
+
if ("python_version" in config) {
|
|
118
|
+
logger.warn("Launching Python server from @langchain/langgraph-cli is experimental. Please use the `langgraph-cli` package from PyPi instead.");
|
|
119
|
+
const { spawnPythonServer } = await import("./dev.python.mjs");
|
|
120
|
+
child = await spawnPythonServer({ ...options, rest: args }, { configPath, config, env, hostUrl }, { pid, projectCwd });
|
|
121
|
+
}
|
|
122
|
+
else {
|
|
123
|
+
const { spawnServer } = await import("@langchain/langgraph-api");
|
|
124
|
+
child = await spawnServer(options, { config, env, hostUrl }, { pid, projectCwd });
|
|
125
|
+
}
|
|
126
|
+
};
|
|
127
|
+
watcher.on("all", async (_name, path) => {
|
|
128
|
+
logger.warn(`Detected changes in ${path}, restarting server`);
|
|
129
|
+
launchServer();
|
|
130
|
+
});
|
|
131
|
+
// TODO: sometimes the server keeps sending stuff
|
|
132
|
+
// while gracefully exiting
|
|
133
|
+
launchServer();
|
|
134
|
+
process.on("exit", () => {
|
|
135
|
+
watcher.close();
|
|
136
|
+
server.close();
|
|
137
|
+
child?.kill();
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
catch (error) {
|
|
141
|
+
logError(error, { prefix: "Failed to launch server" });
|
|
142
|
+
}
|
|
143
|
+
});
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import { fileURLToPath } from "node:url";
|
|
3
|
+
import { Readable } from "node:stream";
|
|
4
|
+
import fs from "node:fs/promises";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import os from "node:os";
|
|
7
|
+
import { extract as tarExtract } from "tar";
|
|
8
|
+
import zipExtract from "extract-zip";
|
|
9
|
+
import { logger } from "../utils/logging.mjs";
|
|
10
|
+
import { assembleLocalDeps } from "../docker/docker.mjs";
|
|
11
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
12
|
+
const UV_VERSION = "0.9.11";
|
|
13
|
+
const UV_BINARY_CACHE = path.join(__dirname, ".uv", UV_VERSION);
|
|
14
|
+
function getPlatformInfo() {
|
|
15
|
+
const platform = os.platform();
|
|
16
|
+
const arch = os.arch();
|
|
17
|
+
let binaryName = "uv";
|
|
18
|
+
let extension = "";
|
|
19
|
+
if (platform === "win32") {
|
|
20
|
+
extension = ".exe";
|
|
21
|
+
}
|
|
22
|
+
return {
|
|
23
|
+
platform,
|
|
24
|
+
arch,
|
|
25
|
+
extension,
|
|
26
|
+
binaryName: binaryName + extension,
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
function getDownloadUrl(info) {
|
|
30
|
+
let platformStr;
|
|
31
|
+
switch (info.platform) {
|
|
32
|
+
case "darwin":
|
|
33
|
+
platformStr = "apple-darwin";
|
|
34
|
+
break;
|
|
35
|
+
case "win32":
|
|
36
|
+
platformStr = "pc-windows-msvc";
|
|
37
|
+
break;
|
|
38
|
+
case "linux":
|
|
39
|
+
platformStr = "unknown-linux-gnu";
|
|
40
|
+
break;
|
|
41
|
+
default:
|
|
42
|
+
throw new Error(`Unsupported platform: ${info.platform}`);
|
|
43
|
+
}
|
|
44
|
+
let archStr;
|
|
45
|
+
switch (info.arch) {
|
|
46
|
+
case "x64":
|
|
47
|
+
archStr = "x86_64";
|
|
48
|
+
break;
|
|
49
|
+
case "arm64":
|
|
50
|
+
archStr = "aarch64";
|
|
51
|
+
break;
|
|
52
|
+
default:
|
|
53
|
+
throw new Error(`Unsupported architecture: ${info.arch}`);
|
|
54
|
+
}
|
|
55
|
+
const fileName = `uv-${archStr}-${platformStr}${info.platform === "win32" ? ".zip" : ".tar.gz"}`;
|
|
56
|
+
return `https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/${fileName}`;
|
|
57
|
+
}
|
|
58
|
+
async function downloadAndExtract(url, destPath, info) {
|
|
59
|
+
const response = await fetch(url);
|
|
60
|
+
if (!response.ok)
|
|
61
|
+
throw new Error(`Failed to download uv: ${response.statusText}`);
|
|
62
|
+
if (!response.body)
|
|
63
|
+
throw new Error("No response body");
|
|
64
|
+
const tempDirPath = await fs.mkdtemp(path.join(os.tmpdir(), "uv-"));
|
|
65
|
+
const tempFilePath = path.join(tempDirPath, path.basename(url));
|
|
66
|
+
try {
|
|
67
|
+
// @ts-expect-error invalid types for response.body
|
|
68
|
+
await fs.writeFile(tempFilePath, Readable.fromWeb(response.body));
|
|
69
|
+
let sourceBinaryPath = tempDirPath;
|
|
70
|
+
if (url.endsWith(".zip")) {
|
|
71
|
+
await zipExtract(tempFilePath, { dir: tempDirPath });
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
await tarExtract({ file: tempFilePath, cwd: tempDirPath });
|
|
75
|
+
sourceBinaryPath = path.resolve(sourceBinaryPath, path.basename(tempFilePath).slice(0, ".tar.gz".length * -1));
|
|
76
|
+
}
|
|
77
|
+
sourceBinaryPath = path.resolve(sourceBinaryPath, info.binaryName);
|
|
78
|
+
// Move binary to cache directory
|
|
79
|
+
const targetBinaryPath = path.join(destPath, info.binaryName);
|
|
80
|
+
// Just copy the file directly (it's a single executable, not a directory)
|
|
81
|
+
await fs.copyFile(sourceBinaryPath, targetBinaryPath);
|
|
82
|
+
await fs.chmod(targetBinaryPath, 0o755);
|
|
83
|
+
return targetBinaryPath;
|
|
84
|
+
}
|
|
85
|
+
finally {
|
|
86
|
+
await fs.rm(tempDirPath, { recursive: true, force: true });
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
export async function getUvBinary() {
|
|
90
|
+
await fs.mkdir(UV_BINARY_CACHE, { recursive: true });
|
|
91
|
+
const info = getPlatformInfo();
|
|
92
|
+
const cachedBinaryPath = path.join(UV_BINARY_CACHE, info.binaryName);
|
|
93
|
+
try {
|
|
94
|
+
await fs.access(cachedBinaryPath);
|
|
95
|
+
return cachedBinaryPath;
|
|
96
|
+
}
|
|
97
|
+
catch {
|
|
98
|
+
// Binary not found in cache, download it
|
|
99
|
+
logger.info(`Downloading uv ${UV_VERSION} for ${info.platform}...`);
|
|
100
|
+
const url = getDownloadUrl(info);
|
|
101
|
+
return await downloadAndExtract(url, UV_BINARY_CACHE, info);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
export async function spawnPythonServer(args, context, options) {
|
|
105
|
+
const deps = await assembleLocalDeps(context.configPath, context.config);
|
|
106
|
+
const requirements = deps.rebuildFiles.filter((i) => i.endsWith(".txt"));
|
|
107
|
+
return spawn(await getUvBinary(), [
|
|
108
|
+
"run",
|
|
109
|
+
"--with",
|
|
110
|
+
"langgraph-cli[inmem]",
|
|
111
|
+
...requirements?.flatMap((i) => ["--with-requirements", i]),
|
|
112
|
+
"langgraph",
|
|
113
|
+
"dev",
|
|
114
|
+
"--port",
|
|
115
|
+
args.port,
|
|
116
|
+
"--host",
|
|
117
|
+
args.host,
|
|
118
|
+
"--n-jobs-per-worker",
|
|
119
|
+
args.nJobsPerWorker,
|
|
120
|
+
"--config",
|
|
121
|
+
context.configPath,
|
|
122
|
+
...(args.browser ? [] : ["--no-browser"]),
|
|
123
|
+
...args.rest,
|
|
124
|
+
], {
|
|
125
|
+
stdio: ["inherit", "inherit", "inherit"],
|
|
126
|
+
env: context.env,
|
|
127
|
+
cwd: options.projectCwd,
|
|
128
|
+
});
|
|
129
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { assembleLocalDeps, configToCompose, configToDocker, } from "../docker/docker.mjs";
|
|
2
|
+
import { createCompose, getDockerCapabilities } from "../docker/compose.mjs";
|
|
3
|
+
import { getConfig } from "../utils/config.mjs";
|
|
4
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
5
|
+
import { builder } from "./utils/builder.mjs";
|
|
6
|
+
import * as fs from "node:fs/promises";
|
|
7
|
+
import * as path from "node:path";
|
|
8
|
+
import dedent from "dedent";
|
|
9
|
+
import { logger } from "../utils/logging.mjs";
|
|
10
|
+
import { withAnalytics } from "./utils/analytics.mjs";
|
|
11
|
+
import { gracefulExit } from "exit-hook";
|
|
12
|
+
const fileExists = async (path) => {
|
|
13
|
+
try {
|
|
14
|
+
await fs.access(path);
|
|
15
|
+
return true;
|
|
16
|
+
}
|
|
17
|
+
catch (e) {
|
|
18
|
+
return false;
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
builder
|
|
22
|
+
.command("dockerfile")
|
|
23
|
+
.description("Generate a Dockerfile for the LangGraph API server, with Docker Compose options.")
|
|
24
|
+
.argument("<save-path>", "Path to save the Dockerfile")
|
|
25
|
+
.option("--add-docker-compose", "Add additional files for running the LangGraph API server with docker-compose. These files include a docker-compose.yml, .env file, and a .dockerignore file.")
|
|
26
|
+
.option("-c, --config <path>", "Path to configuration file", process.cwd())
|
|
27
|
+
.exitOverride((error) => gracefulExit(error.exitCode))
|
|
28
|
+
.hook("preAction", withAnalytics((command) => ({
|
|
29
|
+
config: command.opts().config !== process.cwd(),
|
|
30
|
+
add_docker_compose: !!command.opts().addDockerCompose,
|
|
31
|
+
})))
|
|
32
|
+
.action(async (savePath, options) => {
|
|
33
|
+
const configPath = await getProjectPath(options.config);
|
|
34
|
+
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
35
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
36
|
+
const dockerfile = await configToDocker(configPath, config, localDeps);
|
|
37
|
+
if (savePath === "-") {
|
|
38
|
+
process.stdout.write(dockerfile);
|
|
39
|
+
process.stdout.write("\n");
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
const targetPath = path.resolve(process.cwd(), savePath);
|
|
43
|
+
await fs.writeFile(targetPath, dockerfile);
|
|
44
|
+
logger.info(`✅ Created: ${path.basename(targetPath)}`);
|
|
45
|
+
if (options.addDockerCompose) {
|
|
46
|
+
const { apiDef } = await configToCompose(configPath, config, {
|
|
47
|
+
watch: false,
|
|
48
|
+
});
|
|
49
|
+
const capabilities = await getDockerCapabilities();
|
|
50
|
+
const compose = createCompose(capabilities, { apiDef });
|
|
51
|
+
const composePath = path.resolve(path.dirname(targetPath), "docker-compose.yml");
|
|
52
|
+
await fs.writeFile(composePath, compose);
|
|
53
|
+
logger.info("✅ Created: .docker-compose.yml");
|
|
54
|
+
const dockerignorePath = path.resolve(path.dirname(targetPath), ".dockerignore");
|
|
55
|
+
if (!fileExists(dockerignorePath)) {
|
|
56
|
+
await fs.writeFile(dockerignorePath, dedent `
|
|
57
|
+
# Ignore node_modules and other dependency directories
|
|
58
|
+
node_modules
|
|
59
|
+
bower_components
|
|
60
|
+
vendor
|
|
61
|
+
|
|
62
|
+
# Ignore logs and temporary files
|
|
63
|
+
*.log
|
|
64
|
+
*.tmp
|
|
65
|
+
*.swp
|
|
66
|
+
|
|
67
|
+
# Ignore .env files and other environment files
|
|
68
|
+
.env
|
|
69
|
+
.env.*
|
|
70
|
+
*.local
|
|
71
|
+
|
|
72
|
+
# Ignore git-related files
|
|
73
|
+
.git
|
|
74
|
+
.gitignore
|
|
75
|
+
|
|
76
|
+
# Ignore Docker-related files and configs
|
|
77
|
+
.dockerignore
|
|
78
|
+
docker-compose.yml
|
|
79
|
+
|
|
80
|
+
# Ignore build and cache directories
|
|
81
|
+
dist
|
|
82
|
+
build
|
|
83
|
+
.cache
|
|
84
|
+
__pycache__
|
|
85
|
+
|
|
86
|
+
# Ignore IDE and editor configurations
|
|
87
|
+
.vscode
|
|
88
|
+
.idea
|
|
89
|
+
*.sublime-project
|
|
90
|
+
*.sublime-workspace
|
|
91
|
+
.DS_Store # macOS-specific
|
|
92
|
+
|
|
93
|
+
# Ignore test and coverage files
|
|
94
|
+
coverage
|
|
95
|
+
*.coverage
|
|
96
|
+
*.test.js
|
|
97
|
+
*.spec.js
|
|
98
|
+
tests
|
|
99
|
+
`);
|
|
100
|
+
logger.info(`✅ Created: ${path.basename(dockerignorePath)}`);
|
|
101
|
+
}
|
|
102
|
+
const envPath = path.resolve(path.dirname(targetPath), ".env");
|
|
103
|
+
if (!fileExists(envPath)) {
|
|
104
|
+
await fs.writeFile(envPath, dedent `
|
|
105
|
+
# Uncomment the following line to add your LangSmith API key
|
|
106
|
+
# LANGSMITH_API_KEY=your-api-key
|
|
107
|
+
# Or if you have a LangSmith Deployment license key, then uncomment the following line:
|
|
108
|
+
# LANGGRAPH_CLOUD_LICENSE_KEY=your-license-key
|
|
109
|
+
# Add any other environment variables go below...
|
|
110
|
+
`);
|
|
111
|
+
logger.info(`✅ Created: ${path.basename(envPath)}`);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
});
|
package/dist/cli/new.mjs
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { builder } from "./utils/builder.mjs";
|
|
3
|
+
import { withAnalytics } from "./utils/analytics.mjs";
|
|
4
|
+
import { createNew } from "create-langgraph";
|
|
5
|
+
import { gracefulExit } from "exit-hook";
|
|
6
|
+
builder
|
|
7
|
+
.command("new")
|
|
8
|
+
.description("Create a new LangGraph project")
|
|
9
|
+
.argument("[path]", "Path to create the project")
|
|
10
|
+
.option("-t, --template <template>", "Template to use", "")
|
|
11
|
+
.hook("preAction", withAnalytics())
|
|
12
|
+
.exitOverride((error) => gracefulExit(error.exitCode))
|
|
13
|
+
.action((path, options) => createNew(path, options.template));
|