@langchain/langgraph-cli 1.1.1 → 1.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -5
- package/CHANGELOG.md +0 -292
- package/dist/cli/build.mjs +0 -52
- package/dist/cli/cli.mjs +0 -13
- package/dist/cli/cloudflare.mjs +0 -172
- package/dist/cli/dev.mjs +0 -143
- package/dist/cli/dev.python.mjs +0 -129
- package/dist/cli/docker.mjs +0 -114
- package/dist/cli/new.mjs +0 -13
- package/dist/cli/sysinfo.mjs +0 -63
- package/dist/cli/up.mjs +0 -139
- package/dist/cli/utils/analytics.mjs +0 -39
- package/dist/cli/utils/builder.mjs +0 -7
- package/dist/cli/utils/ipc/server.mjs +0 -93
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +0 -29
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +0 -40
- package/dist/cli/utils/project.mjs +0 -18
- package/dist/cli/utils/stream.mjs +0 -90
- package/dist/cli/utils/version.mjs +0 -13
- package/dist/docker/compose.mjs +0 -185
- package/dist/docker/docker.mjs +0 -390
- package/dist/docker/shell.mjs +0 -62
- package/dist/utils/config.mjs +0 -104
- package/dist/utils/logging.mjs +0 -96
package/dist/cli/dev.mjs
DELETED
|
@@ -1,143 +0,0 @@
|
|
|
1
|
-
import * as path from "node:path";
|
|
2
|
-
import * as fs from "node:fs/promises";
|
|
3
|
-
import { parse, populate } from "dotenv";
|
|
4
|
-
import { watch } from "chokidar";
|
|
5
|
-
import { z } from "zod/v3";
|
|
6
|
-
import open from "open";
|
|
7
|
-
import { startCloudflareTunnel } from "./cloudflare.mjs";
|
|
8
|
-
import { createIpcServer } from "./utils/ipc/server.mjs";
|
|
9
|
-
import { getProjectPath } from "./utils/project.mjs";
|
|
10
|
-
import { getConfig } from "../utils/config.mjs";
|
|
11
|
-
import { builder } from "./utils/builder.mjs";
|
|
12
|
-
import { logError, logger } from "../utils/logging.mjs";
|
|
13
|
-
import { withAnalytics } from "./utils/analytics.mjs";
|
|
14
|
-
import { gracefulExit } from "exit-hook";
|
|
15
|
-
builder
|
|
16
|
-
.command("dev")
|
|
17
|
-
.description("Run LangGraph API server in development mode with hot reloading.")
|
|
18
|
-
.option("-p, --port <number>", "port to run the server on", "2024")
|
|
19
|
-
.option("-h, --host <string>", "host to bind to", "localhost")
|
|
20
|
-
.option("--no-browser", "disable auto-opening the browser")
|
|
21
|
-
.option("-n, --n-jobs-per-worker <number>", "number of workers to run", "10")
|
|
22
|
-
.option("-c, --config <path>", "path to configuration file", process.cwd())
|
|
23
|
-
.option("--tunnel", "use Cloudflare Tunnel to expose the server to the internet")
|
|
24
|
-
.allowExcessArguments()
|
|
25
|
-
.allowUnknownOption()
|
|
26
|
-
.exitOverride((error) => gracefulExit(error.exitCode))
|
|
27
|
-
.hook("preAction", withAnalytics((command) => ({
|
|
28
|
-
config: command.opts().config !== process.cwd(),
|
|
29
|
-
port: command.opts().port !== "2024",
|
|
30
|
-
host: command.opts().host !== "localhost",
|
|
31
|
-
n_jobs_per_worker: command.opts().nJobsPerWorker !== "10",
|
|
32
|
-
tunnel: Boolean(command.opts().tunnel),
|
|
33
|
-
})))
|
|
34
|
-
.action(async (options, { args }) => {
|
|
35
|
-
try {
|
|
36
|
-
const configPath = await getProjectPath(options.config);
|
|
37
|
-
const projectCwd = path.dirname(configPath);
|
|
38
|
-
const [pid, server] = await createIpcServer();
|
|
39
|
-
const watcher = watch([configPath], {
|
|
40
|
-
ignoreInitial: true,
|
|
41
|
-
cwd: projectCwd,
|
|
42
|
-
});
|
|
43
|
-
let hasOpenedFlag = false;
|
|
44
|
-
let child = undefined;
|
|
45
|
-
let tunnel = undefined;
|
|
46
|
-
let hostUrl = "https://smith.langchain.com";
|
|
47
|
-
let envNoBrowser = process.env.BROWSER === "none";
|
|
48
|
-
server.on("data", async (data) => {
|
|
49
|
-
const response = z.object({ queryParams: z.string() }).parse(data);
|
|
50
|
-
if (options.browser && !envNoBrowser && !hasOpenedFlag) {
|
|
51
|
-
hasOpenedFlag = true;
|
|
52
|
-
const queryParams = new URLSearchParams(response.queryParams);
|
|
53
|
-
const tunnelUrl = await tunnel?.tunnelUrl;
|
|
54
|
-
if (tunnelUrl)
|
|
55
|
-
queryParams.set("baseUrl", tunnelUrl);
|
|
56
|
-
let queryParamsStr = queryParams.toString();
|
|
57
|
-
if (queryParamsStr)
|
|
58
|
-
queryParamsStr = `?${queryParams.toString()}`;
|
|
59
|
-
open(`${hostUrl}/studio${queryParamsStr}`);
|
|
60
|
-
}
|
|
61
|
-
});
|
|
62
|
-
// check if .gitignore already contains .langgraph-api
|
|
63
|
-
const gitignorePath = path.resolve(projectCwd, ".gitignore");
|
|
64
|
-
const gitignoreContent = await fs
|
|
65
|
-
.readFile(gitignorePath, "utf-8")
|
|
66
|
-
.catch(() => "");
|
|
67
|
-
if (!gitignoreContent.includes(".langgraph_api")) {
|
|
68
|
-
logger.info("Updating .gitignore to prevent `.langgraph_api` from being committed.");
|
|
69
|
-
await fs.appendFile(gitignorePath, "\n# LangGraph API\n.langgraph_api\n");
|
|
70
|
-
}
|
|
71
|
-
const prepareContext = async () => {
|
|
72
|
-
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
73
|
-
const newWatch = [configPath];
|
|
74
|
-
const env = { ...process.env };
|
|
75
|
-
const configEnv = config?.env;
|
|
76
|
-
if (configEnv) {
|
|
77
|
-
if (typeof configEnv === "string") {
|
|
78
|
-
const envPath = path.resolve(projectCwd, configEnv);
|
|
79
|
-
newWatch.push(envPath);
|
|
80
|
-
const envData = await fs.readFile(envPath, "utf-8");
|
|
81
|
-
populate(env, parse(envData));
|
|
82
|
-
}
|
|
83
|
-
else if (Array.isArray(configEnv)) {
|
|
84
|
-
throw new Error("Env storage is not supported by CLI.");
|
|
85
|
-
}
|
|
86
|
-
else if (typeof configEnv === "object") {
|
|
87
|
-
if (!process.env)
|
|
88
|
-
throw new Error("process.env is not defined");
|
|
89
|
-
populate(env, configEnv);
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
const oldWatch = Object.entries(watcher.getWatched()).flatMap(([dir, files]) => files.map((file) => path.resolve(projectCwd, dir, file)));
|
|
93
|
-
const addedTarget = newWatch.filter((target) => !oldWatch.includes(target));
|
|
94
|
-
const removedTarget = oldWatch.filter((target) => !newWatch.includes(target));
|
|
95
|
-
watcher.unwatch(removedTarget).add(addedTarget);
|
|
96
|
-
try {
|
|
97
|
-
const { Client } = await import("langsmith");
|
|
98
|
-
const apiUrl = env?.["LANGSMITH_ENDPOINT"] ||
|
|
99
|
-
env?.["LANGCHAIN_ENDPOINT"] ||
|
|
100
|
-
undefined;
|
|
101
|
-
hostUrl = new Client({ apiUrl }).getHostUrl() || hostUrl;
|
|
102
|
-
}
|
|
103
|
-
catch {
|
|
104
|
-
// pass
|
|
105
|
-
}
|
|
106
|
-
return { config, env, hostUrl };
|
|
107
|
-
};
|
|
108
|
-
const launchServer = async () => {
|
|
109
|
-
const { config, env, hostUrl } = await prepareContext();
|
|
110
|
-
if (child != null)
|
|
111
|
-
child.kill();
|
|
112
|
-
if (tunnel != null)
|
|
113
|
-
tunnel.child.kill();
|
|
114
|
-
if (options.tunnel)
|
|
115
|
-
tunnel = await startCloudflareTunnel(options.port);
|
|
116
|
-
envNoBrowser = process.env.BROWSER === "none" || env.BROWSER === "none";
|
|
117
|
-
if ("python_version" in config) {
|
|
118
|
-
logger.warn("Launching Python server from @langchain/langgraph-cli is experimental. Please use the `langgraph-cli` package from PyPi instead.");
|
|
119
|
-
const { spawnPythonServer } = await import("./dev.python.mjs");
|
|
120
|
-
child = await spawnPythonServer({ ...options, rest: args }, { configPath, config, env, hostUrl }, { pid, projectCwd });
|
|
121
|
-
}
|
|
122
|
-
else {
|
|
123
|
-
const { spawnServer } = await import("@langchain/langgraph-api");
|
|
124
|
-
child = await spawnServer(options, { config, env, hostUrl }, { pid, projectCwd });
|
|
125
|
-
}
|
|
126
|
-
};
|
|
127
|
-
watcher.on("all", async (_name, path) => {
|
|
128
|
-
logger.warn(`Detected changes in ${path}, restarting server`);
|
|
129
|
-
launchServer();
|
|
130
|
-
});
|
|
131
|
-
// TODO: sometimes the server keeps sending stuff
|
|
132
|
-
// while gracefully exiting
|
|
133
|
-
launchServer();
|
|
134
|
-
process.on("exit", () => {
|
|
135
|
-
watcher.close();
|
|
136
|
-
server.close();
|
|
137
|
-
child?.kill();
|
|
138
|
-
});
|
|
139
|
-
}
|
|
140
|
-
catch (error) {
|
|
141
|
-
logError(error, { prefix: "Failed to launch server" });
|
|
142
|
-
}
|
|
143
|
-
});
|
package/dist/cli/dev.python.mjs
DELETED
|
@@ -1,129 +0,0 @@
|
|
|
1
|
-
import { spawn } from "node:child_process";
|
|
2
|
-
import { fileURLToPath } from "node:url";
|
|
3
|
-
import { Readable } from "node:stream";
|
|
4
|
-
import fs from "node:fs/promises";
|
|
5
|
-
import path from "node:path";
|
|
6
|
-
import os from "node:os";
|
|
7
|
-
import { extract as tarExtract } from "tar";
|
|
8
|
-
import zipExtract from "extract-zip";
|
|
9
|
-
import { logger } from "../utils/logging.mjs";
|
|
10
|
-
import { assembleLocalDeps } from "../docker/docker.mjs";
|
|
11
|
-
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
12
|
-
const UV_VERSION = "0.9.11";
|
|
13
|
-
const UV_BINARY_CACHE = path.join(__dirname, ".uv", UV_VERSION);
|
|
14
|
-
function getPlatformInfo() {
|
|
15
|
-
const platform = os.platform();
|
|
16
|
-
const arch = os.arch();
|
|
17
|
-
let binaryName = "uv";
|
|
18
|
-
let extension = "";
|
|
19
|
-
if (platform === "win32") {
|
|
20
|
-
extension = ".exe";
|
|
21
|
-
}
|
|
22
|
-
return {
|
|
23
|
-
platform,
|
|
24
|
-
arch,
|
|
25
|
-
extension,
|
|
26
|
-
binaryName: binaryName + extension,
|
|
27
|
-
};
|
|
28
|
-
}
|
|
29
|
-
function getDownloadUrl(info) {
|
|
30
|
-
let platformStr;
|
|
31
|
-
switch (info.platform) {
|
|
32
|
-
case "darwin":
|
|
33
|
-
platformStr = "apple-darwin";
|
|
34
|
-
break;
|
|
35
|
-
case "win32":
|
|
36
|
-
platformStr = "pc-windows-msvc";
|
|
37
|
-
break;
|
|
38
|
-
case "linux":
|
|
39
|
-
platformStr = "unknown-linux-gnu";
|
|
40
|
-
break;
|
|
41
|
-
default:
|
|
42
|
-
throw new Error(`Unsupported platform: ${info.platform}`);
|
|
43
|
-
}
|
|
44
|
-
let archStr;
|
|
45
|
-
switch (info.arch) {
|
|
46
|
-
case "x64":
|
|
47
|
-
archStr = "x86_64";
|
|
48
|
-
break;
|
|
49
|
-
case "arm64":
|
|
50
|
-
archStr = "aarch64";
|
|
51
|
-
break;
|
|
52
|
-
default:
|
|
53
|
-
throw new Error(`Unsupported architecture: ${info.arch}`);
|
|
54
|
-
}
|
|
55
|
-
const fileName = `uv-${archStr}-${platformStr}${info.platform === "win32" ? ".zip" : ".tar.gz"}`;
|
|
56
|
-
return `https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/${fileName}`;
|
|
57
|
-
}
|
|
58
|
-
async function downloadAndExtract(url, destPath, info) {
|
|
59
|
-
const response = await fetch(url);
|
|
60
|
-
if (!response.ok)
|
|
61
|
-
throw new Error(`Failed to download uv: ${response.statusText}`);
|
|
62
|
-
if (!response.body)
|
|
63
|
-
throw new Error("No response body");
|
|
64
|
-
const tempDirPath = await fs.mkdtemp(path.join(os.tmpdir(), "uv-"));
|
|
65
|
-
const tempFilePath = path.join(tempDirPath, path.basename(url));
|
|
66
|
-
try {
|
|
67
|
-
// @ts-expect-error invalid types for response.body
|
|
68
|
-
await fs.writeFile(tempFilePath, Readable.fromWeb(response.body));
|
|
69
|
-
let sourceBinaryPath = tempDirPath;
|
|
70
|
-
if (url.endsWith(".zip")) {
|
|
71
|
-
await zipExtract(tempFilePath, { dir: tempDirPath });
|
|
72
|
-
}
|
|
73
|
-
else {
|
|
74
|
-
await tarExtract({ file: tempFilePath, cwd: tempDirPath });
|
|
75
|
-
sourceBinaryPath = path.resolve(sourceBinaryPath, path.basename(tempFilePath).slice(0, ".tar.gz".length * -1));
|
|
76
|
-
}
|
|
77
|
-
sourceBinaryPath = path.resolve(sourceBinaryPath, info.binaryName);
|
|
78
|
-
// Move binary to cache directory
|
|
79
|
-
const targetBinaryPath = path.join(destPath, info.binaryName);
|
|
80
|
-
// Just copy the file directly (it's a single executable, not a directory)
|
|
81
|
-
await fs.copyFile(sourceBinaryPath, targetBinaryPath);
|
|
82
|
-
await fs.chmod(targetBinaryPath, 0o755);
|
|
83
|
-
return targetBinaryPath;
|
|
84
|
-
}
|
|
85
|
-
finally {
|
|
86
|
-
await fs.rm(tempDirPath, { recursive: true, force: true });
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
export async function getUvBinary() {
|
|
90
|
-
await fs.mkdir(UV_BINARY_CACHE, { recursive: true });
|
|
91
|
-
const info = getPlatformInfo();
|
|
92
|
-
const cachedBinaryPath = path.join(UV_BINARY_CACHE, info.binaryName);
|
|
93
|
-
try {
|
|
94
|
-
await fs.access(cachedBinaryPath);
|
|
95
|
-
return cachedBinaryPath;
|
|
96
|
-
}
|
|
97
|
-
catch {
|
|
98
|
-
// Binary not found in cache, download it
|
|
99
|
-
logger.info(`Downloading uv ${UV_VERSION} for ${info.platform}...`);
|
|
100
|
-
const url = getDownloadUrl(info);
|
|
101
|
-
return await downloadAndExtract(url, UV_BINARY_CACHE, info);
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
export async function spawnPythonServer(args, context, options) {
|
|
105
|
-
const deps = await assembleLocalDeps(context.configPath, context.config);
|
|
106
|
-
const requirements = deps.rebuildFiles.filter((i) => i.endsWith(".txt"));
|
|
107
|
-
return spawn(await getUvBinary(), [
|
|
108
|
-
"run",
|
|
109
|
-
"--with",
|
|
110
|
-
"langgraph-cli[inmem]",
|
|
111
|
-
...requirements?.flatMap((i) => ["--with-requirements", i]),
|
|
112
|
-
"langgraph",
|
|
113
|
-
"dev",
|
|
114
|
-
"--port",
|
|
115
|
-
args.port,
|
|
116
|
-
"--host",
|
|
117
|
-
args.host,
|
|
118
|
-
"--n-jobs-per-worker",
|
|
119
|
-
args.nJobsPerWorker,
|
|
120
|
-
"--config",
|
|
121
|
-
context.configPath,
|
|
122
|
-
...(args.browser ? [] : ["--no-browser"]),
|
|
123
|
-
...args.rest,
|
|
124
|
-
], {
|
|
125
|
-
stdio: ["inherit", "inherit", "inherit"],
|
|
126
|
-
env: context.env,
|
|
127
|
-
cwd: options.projectCwd,
|
|
128
|
-
});
|
|
129
|
-
}
|
package/dist/cli/docker.mjs
DELETED
|
@@ -1,114 +0,0 @@
|
|
|
1
|
-
import { assembleLocalDeps, configToCompose, configToDocker, } from "../docker/docker.mjs";
|
|
2
|
-
import { createCompose, getDockerCapabilities } from "../docker/compose.mjs";
|
|
3
|
-
import { getConfig } from "../utils/config.mjs";
|
|
4
|
-
import { getProjectPath } from "./utils/project.mjs";
|
|
5
|
-
import { builder } from "./utils/builder.mjs";
|
|
6
|
-
import * as fs from "node:fs/promises";
|
|
7
|
-
import * as path from "node:path";
|
|
8
|
-
import dedent from "dedent";
|
|
9
|
-
import { logger } from "../utils/logging.mjs";
|
|
10
|
-
import { withAnalytics } from "./utils/analytics.mjs";
|
|
11
|
-
import { gracefulExit } from "exit-hook";
|
|
12
|
-
const fileExists = async (path) => {
|
|
13
|
-
try {
|
|
14
|
-
await fs.access(path);
|
|
15
|
-
return true;
|
|
16
|
-
}
|
|
17
|
-
catch (e) {
|
|
18
|
-
return false;
|
|
19
|
-
}
|
|
20
|
-
};
|
|
21
|
-
builder
|
|
22
|
-
.command("dockerfile")
|
|
23
|
-
.description("Generate a Dockerfile for the LangGraph API server, with Docker Compose options.")
|
|
24
|
-
.argument("<save-path>", "Path to save the Dockerfile")
|
|
25
|
-
.option("--add-docker-compose", "Add additional files for running the LangGraph API server with docker-compose. These files include a docker-compose.yml, .env file, and a .dockerignore file.")
|
|
26
|
-
.option("-c, --config <path>", "Path to configuration file", process.cwd())
|
|
27
|
-
.exitOverride((error) => gracefulExit(error.exitCode))
|
|
28
|
-
.hook("preAction", withAnalytics((command) => ({
|
|
29
|
-
config: command.opts().config !== process.cwd(),
|
|
30
|
-
add_docker_compose: !!command.opts().addDockerCompose,
|
|
31
|
-
})))
|
|
32
|
-
.action(async (savePath, options) => {
|
|
33
|
-
const configPath = await getProjectPath(options.config);
|
|
34
|
-
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
35
|
-
const localDeps = await assembleLocalDeps(configPath, config);
|
|
36
|
-
const dockerfile = await configToDocker(configPath, config, localDeps);
|
|
37
|
-
if (savePath === "-") {
|
|
38
|
-
process.stdout.write(dockerfile);
|
|
39
|
-
process.stdout.write("\n");
|
|
40
|
-
return;
|
|
41
|
-
}
|
|
42
|
-
const targetPath = path.resolve(process.cwd(), savePath);
|
|
43
|
-
await fs.writeFile(targetPath, dockerfile);
|
|
44
|
-
logger.info(`✅ Created: ${path.basename(targetPath)}`);
|
|
45
|
-
if (options.addDockerCompose) {
|
|
46
|
-
const { apiDef } = await configToCompose(configPath, config, {
|
|
47
|
-
watch: false,
|
|
48
|
-
});
|
|
49
|
-
const capabilities = await getDockerCapabilities();
|
|
50
|
-
const compose = createCompose(capabilities, { apiDef });
|
|
51
|
-
const composePath = path.resolve(path.dirname(targetPath), "docker-compose.yml");
|
|
52
|
-
await fs.writeFile(composePath, compose);
|
|
53
|
-
logger.info("✅ Created: .docker-compose.yml");
|
|
54
|
-
const dockerignorePath = path.resolve(path.dirname(targetPath), ".dockerignore");
|
|
55
|
-
if (!fileExists(dockerignorePath)) {
|
|
56
|
-
await fs.writeFile(dockerignorePath, dedent `
|
|
57
|
-
# Ignore node_modules and other dependency directories
|
|
58
|
-
node_modules
|
|
59
|
-
bower_components
|
|
60
|
-
vendor
|
|
61
|
-
|
|
62
|
-
# Ignore logs and temporary files
|
|
63
|
-
*.log
|
|
64
|
-
*.tmp
|
|
65
|
-
*.swp
|
|
66
|
-
|
|
67
|
-
# Ignore .env files and other environment files
|
|
68
|
-
.env
|
|
69
|
-
.env.*
|
|
70
|
-
*.local
|
|
71
|
-
|
|
72
|
-
# Ignore git-related files
|
|
73
|
-
.git
|
|
74
|
-
.gitignore
|
|
75
|
-
|
|
76
|
-
# Ignore Docker-related files and configs
|
|
77
|
-
.dockerignore
|
|
78
|
-
docker-compose.yml
|
|
79
|
-
|
|
80
|
-
# Ignore build and cache directories
|
|
81
|
-
dist
|
|
82
|
-
build
|
|
83
|
-
.cache
|
|
84
|
-
__pycache__
|
|
85
|
-
|
|
86
|
-
# Ignore IDE and editor configurations
|
|
87
|
-
.vscode
|
|
88
|
-
.idea
|
|
89
|
-
*.sublime-project
|
|
90
|
-
*.sublime-workspace
|
|
91
|
-
.DS_Store # macOS-specific
|
|
92
|
-
|
|
93
|
-
# Ignore test and coverage files
|
|
94
|
-
coverage
|
|
95
|
-
*.coverage
|
|
96
|
-
*.test.js
|
|
97
|
-
*.spec.js
|
|
98
|
-
tests
|
|
99
|
-
`);
|
|
100
|
-
logger.info(`✅ Created: ${path.basename(dockerignorePath)}`);
|
|
101
|
-
}
|
|
102
|
-
const envPath = path.resolve(path.dirname(targetPath), ".env");
|
|
103
|
-
if (!fileExists(envPath)) {
|
|
104
|
-
await fs.writeFile(envPath, dedent `
|
|
105
|
-
# Uncomment the following line to add your LangSmith API key
|
|
106
|
-
# LANGSMITH_API_KEY=your-api-key
|
|
107
|
-
# Or if you have a LangSmith Deployment license key, then uncomment the following line:
|
|
108
|
-
# LANGGRAPH_CLOUD_LICENSE_KEY=your-license-key
|
|
109
|
-
# Add any other environment variables go below...
|
|
110
|
-
`);
|
|
111
|
-
logger.info(`✅ Created: ${path.basename(envPath)}`);
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
});
|
package/dist/cli/new.mjs
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import { builder } from "./utils/builder.mjs";
|
|
3
|
-
import { withAnalytics } from "./utils/analytics.mjs";
|
|
4
|
-
import { createNew } from "create-langgraph";
|
|
5
|
-
import { gracefulExit } from "exit-hook";
|
|
6
|
-
builder
|
|
7
|
-
.command("new")
|
|
8
|
-
.description("Create a new LangGraph project")
|
|
9
|
-
.argument("[path]", "Path to create the project")
|
|
10
|
-
.option("-t, --template <template>", "Template to use", "")
|
|
11
|
-
.hook("preAction", withAnalytics())
|
|
12
|
-
.exitOverride((error) => gracefulExit(error.exitCode))
|
|
13
|
-
.action((path, options) => createNew(path, options.template));
|
package/dist/cli/sysinfo.mjs
DELETED
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
import { builder } from "./utils/builder.mjs";
|
|
2
|
-
import { detect } from "package-manager-detector";
|
|
3
|
-
import { $ } from "execa";
|
|
4
|
-
builder
|
|
5
|
-
.command("sysinfo")
|
|
6
|
-
.description("Print system information")
|
|
7
|
-
.action(async () => {
|
|
8
|
-
const manager = await detect();
|
|
9
|
-
if (!manager)
|
|
10
|
-
throw new Error("No package manager detected");
|
|
11
|
-
console.log("Node version:", process.version);
|
|
12
|
-
console.log("Operating system:", process.platform, process.arch);
|
|
13
|
-
console.log("Package manager:", manager.name);
|
|
14
|
-
console.log("Package manager version:", manager.version ?? "N/A");
|
|
15
|
-
console.log("-".repeat(20));
|
|
16
|
-
const output = await (async () => {
|
|
17
|
-
switch (manager.name) {
|
|
18
|
-
case "npm":
|
|
19
|
-
return await $ `npm ls --depth=4`;
|
|
20
|
-
case "yarn":
|
|
21
|
-
if (manager.version === "berry") {
|
|
22
|
-
return await $ `yarn info`;
|
|
23
|
-
}
|
|
24
|
-
return await $ `yarn list --depth=4`;
|
|
25
|
-
case "pnpm":
|
|
26
|
-
return await $ `pnpm ls --depth=4`;
|
|
27
|
-
case "bun":
|
|
28
|
-
return await $ `bun pm ls`;
|
|
29
|
-
default:
|
|
30
|
-
return await $ `npm ls`;
|
|
31
|
-
}
|
|
32
|
-
})();
|
|
33
|
-
const gatherMatch = (str, regex) => {
|
|
34
|
-
return [...new Set([...str.matchAll(regex)].map((match) => match[0]))];
|
|
35
|
-
};
|
|
36
|
-
const packages = gatherMatch(output.stdout, /(@langchain\/[^\s@]+|langsmith|langchain|zod|zod-to-json-schema)/g);
|
|
37
|
-
async function getPackageInfo(packageName) {
|
|
38
|
-
switch (manager?.name) {
|
|
39
|
-
case "npm":
|
|
40
|
-
return (await $ `npm explain ${packageName}`).stdout;
|
|
41
|
-
case "yarn":
|
|
42
|
-
return (await $ `yarn why ${packageName}`).stdout;
|
|
43
|
-
case "pnpm":
|
|
44
|
-
return (await $ `pnpm why ${packageName}`).stdout;
|
|
45
|
-
case "bun":
|
|
46
|
-
return (await $ `bun why ${packageName}`).stdout;
|
|
47
|
-
default:
|
|
48
|
-
return null;
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
function escapeRegExp(text) {
|
|
52
|
-
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
|
|
53
|
-
}
|
|
54
|
-
for (const pkg of packages) {
|
|
55
|
-
const info = await getPackageInfo(pkg);
|
|
56
|
-
if (!info)
|
|
57
|
-
continue;
|
|
58
|
-
const targetRegex = new RegExp(escapeRegExp(pkg) + "[@\\s][^\\s]*", "g");
|
|
59
|
-
console.log(pkg, "->", gatherMatch(info, targetRegex)
|
|
60
|
-
.map((i) => i.slice(pkg.length).trim())
|
|
61
|
-
.join(", "));
|
|
62
|
-
}
|
|
63
|
-
});
|
package/dist/cli/up.mjs
DELETED
|
@@ -1,139 +0,0 @@
|
|
|
1
|
-
import { builder } from "./utils/builder.mjs";
|
|
2
|
-
import * as fs from "node:fs/promises";
|
|
3
|
-
import * as path from "node:path";
|
|
4
|
-
import { getConfig } from "../utils/config.mjs";
|
|
5
|
-
import { getProjectPath } from "./utils/project.mjs";
|
|
6
|
-
import { logger } from "../utils/logging.mjs";
|
|
7
|
-
import { createCompose, getDockerCapabilities } from "../docker/compose.mjs";
|
|
8
|
-
import { configToCompose, getBaseImage } from "../docker/docker.mjs";
|
|
9
|
-
import { getExecaOptions } from "../docker/shell.mjs";
|
|
10
|
-
import { $ } from "execa";
|
|
11
|
-
import { createHash } from "node:crypto";
|
|
12
|
-
import dedent from "dedent";
|
|
13
|
-
import { withAnalytics } from "./utils/analytics.mjs";
|
|
14
|
-
import { gracefulExit } from "exit-hook";
|
|
15
|
-
const sha256 = (input) => createHash("sha256").update(input).digest("hex");
|
|
16
|
-
const getProjectName = (configPath) => {
|
|
17
|
-
const cwd = path.dirname(configPath).toLocaleLowerCase();
|
|
18
|
-
return `${path.basename(cwd)}-${sha256(cwd)}`;
|
|
19
|
-
};
|
|
20
|
-
const stream = (proc) => {
|
|
21
|
-
logger.debug(`Running "${proc.spawnargs.join(" ")}"`);
|
|
22
|
-
return proc;
|
|
23
|
-
};
|
|
24
|
-
const waitForHealthcheck = async (port) => {
|
|
25
|
-
const now = Date.now();
|
|
26
|
-
while (Date.now() - now < 10_000) {
|
|
27
|
-
const ok = await fetch(`http://localhost:${port}/ok`).then((res) => res.ok, () => false);
|
|
28
|
-
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
29
|
-
if (ok)
|
|
30
|
-
return true;
|
|
31
|
-
}
|
|
32
|
-
throw new Error("Healthcheck timed out");
|
|
33
|
-
};
|
|
34
|
-
builder
|
|
35
|
-
.command("up")
|
|
36
|
-
.description("Launch LangGraph API server.")
|
|
37
|
-
.option("-c, --config <path>", "Path to configuration file", process.cwd())
|
|
38
|
-
.option("-d, --docker-compose <path>", "Advanced: Path to docker-compose.yml file with additional services to launch")
|
|
39
|
-
.option("-p, --port <port>", "Port to run the server on", "8123")
|
|
40
|
-
.option("--recreate", "Force recreate containers and volumes", false)
|
|
41
|
-
.option("--no-pull", "Running the server with locally-built images. By default LangGraph will pull the latest images from the registry")
|
|
42
|
-
.option("--watch", "Restart on file changes", false)
|
|
43
|
-
.option("--wait", "Wait for services to start before returning. Implies --detach", false)
|
|
44
|
-
.option("--postgres-uri <uri>", "Postgres URI to use for the database. Defaults to launching a local database")
|
|
45
|
-
.exitOverride((error) => gracefulExit(error.exitCode))
|
|
46
|
-
.hook("preAction", withAnalytics((command) => ({
|
|
47
|
-
config: command.opts().config !== process.cwd(),
|
|
48
|
-
port: command.opts().port !== "8123",
|
|
49
|
-
postgres_uri: !!command.opts().postgresUri,
|
|
50
|
-
docker_compose: !!command.opts().dockerCompose,
|
|
51
|
-
recreate: command.opts().recreate,
|
|
52
|
-
pull: command.opts().pull,
|
|
53
|
-
watch: command.opts().watch,
|
|
54
|
-
wait: command.opts().wait,
|
|
55
|
-
})))
|
|
56
|
-
.action(async (params) => {
|
|
57
|
-
logger.info("Starting LangGraph API server...");
|
|
58
|
-
logger.warn(dedent `
|
|
59
|
-
For local dev, requires env var LANGSMITH_API_KEY with access to LangSmith Deployment.
|
|
60
|
-
For production use, requires a license key in env var LANGGRAPH_CLOUD_LICENSE_KEY.
|
|
61
|
-
`);
|
|
62
|
-
const configPath = await getProjectPath(params.config);
|
|
63
|
-
const config = getConfig(await fs.readFile(configPath, "utf-8"));
|
|
64
|
-
const cwd = path.dirname(configPath);
|
|
65
|
-
const capabilities = await getDockerCapabilities();
|
|
66
|
-
const fullRestartFiles = [configPath];
|
|
67
|
-
if (typeof config.env === "string") {
|
|
68
|
-
fullRestartFiles.push(path.resolve(cwd, config.env));
|
|
69
|
-
}
|
|
70
|
-
const { apiDef } = await configToCompose(configPath, config, {
|
|
71
|
-
watch: capabilities.watchAvailable,
|
|
72
|
-
});
|
|
73
|
-
const name = getProjectName(configPath);
|
|
74
|
-
const execOpts = await getExecaOptions({
|
|
75
|
-
cwd,
|
|
76
|
-
stdout: "inherit",
|
|
77
|
-
stderr: "inherit",
|
|
78
|
-
});
|
|
79
|
-
const exec = $(execOpts);
|
|
80
|
-
if (!config._INTERNAL_docker_tag && params.pull) {
|
|
81
|
-
// pull the image
|
|
82
|
-
logger.info(`Pulling image ${getBaseImage(config)}...`);
|
|
83
|
-
await stream(exec `docker pull ${getBaseImage(config)}`);
|
|
84
|
-
}
|
|
85
|
-
// remove dangling images
|
|
86
|
-
logger.info(`Pruning dangling images...`);
|
|
87
|
-
await stream(exec `docker image prune -f --filter ${`label=com.docker.compose.project=${name}`}`);
|
|
88
|
-
// remove stale containers
|
|
89
|
-
logger.info(`Pruning stale containers...`);
|
|
90
|
-
await stream(exec `docker container prune -f --filter ${`label=com.docker.compose.project=${name}`}`);
|
|
91
|
-
const input = createCompose(capabilities, {
|
|
92
|
-
port: +params.port,
|
|
93
|
-
postgresUri: params.postgresUri,
|
|
94
|
-
apiDef,
|
|
95
|
-
});
|
|
96
|
-
const args = ["--remove-orphans"];
|
|
97
|
-
if (params.recreate) {
|
|
98
|
-
args.push("--force-recreate", "--renew-anon-volumes");
|
|
99
|
-
try {
|
|
100
|
-
await stream(exec `docker volume rm langgraph-data`);
|
|
101
|
-
}
|
|
102
|
-
catch (e) {
|
|
103
|
-
// ignore
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
if (params.watch) {
|
|
107
|
-
if (capabilities.watchAvailable) {
|
|
108
|
-
args.push("--watch");
|
|
109
|
-
}
|
|
110
|
-
else {
|
|
111
|
-
logger.warn("Watch mode is not available. Please upgrade your Docker Engine.");
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
else if (params.wait) {
|
|
115
|
-
args.push("--wait");
|
|
116
|
-
}
|
|
117
|
-
else {
|
|
118
|
-
args.push("--abort-on-container-exit");
|
|
119
|
-
}
|
|
120
|
-
logger.info(`Launching docker-compose...`);
|
|
121
|
-
const cmd = capabilities.composeType === "plugin"
|
|
122
|
-
? ["docker", "compose"]
|
|
123
|
-
: ["docker-compose"];
|
|
124
|
-
cmd.push("--project-directory", cwd, "--project-name", name);
|
|
125
|
-
const userCompose = params.dockerCompose || config.docker_compose_file;
|
|
126
|
-
if (userCompose)
|
|
127
|
-
cmd.push("-f", userCompose);
|
|
128
|
-
cmd.push("-f", "-");
|
|
129
|
-
const up = stream($({ ...execOpts, input }) `${cmd} up ${args}`);
|
|
130
|
-
waitForHealthcheck(+params.port).then(() => {
|
|
131
|
-
logger.info(`
|
|
132
|
-
Ready!
|
|
133
|
-
- API: http://localhost:${params.port}
|
|
134
|
-
- Docs: http://localhost:${params.port}/docs
|
|
135
|
-
- LangGraph Studio: https://smith.langchain.com/studio/?baseUrl=http://127.0.0.1:${params.port}
|
|
136
|
-
`);
|
|
137
|
-
}, () => void 0);
|
|
138
|
-
await up.catch(() => void 0);
|
|
139
|
-
});
|
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
import os from "node:os";
|
|
2
|
-
import { version } from "./version.mjs";
|
|
3
|
-
const SUPABASE_PUBLIC_API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Imt6cmxwcG9qaW5wY3l5YWlweG5iIiwicm9sZSI6ImFub24iLCJpYXQiOjE3MTkyNTc1NzksImV4cCI6MjAzNDgzMzU3OX0.kkVOlLz3BxemA5nP-vat3K4qRtrDuO4SwZSR_htcX9c";
|
|
4
|
-
const SUPABASE_URL = "https://kzrlppojinpcyyaipxnb.supabase.co";
|
|
5
|
-
async function logData(data) {
|
|
6
|
-
try {
|
|
7
|
-
await fetch(`${SUPABASE_URL}/rest/v1/js_logs`, {
|
|
8
|
-
method: "POST",
|
|
9
|
-
headers: {
|
|
10
|
-
"Content-Type": "application/json",
|
|
11
|
-
apikey: SUPABASE_PUBLIC_API_KEY,
|
|
12
|
-
"User-Agent": "Mozilla/5.0",
|
|
13
|
-
},
|
|
14
|
-
body: JSON.stringify(data),
|
|
15
|
-
});
|
|
16
|
-
}
|
|
17
|
-
catch (error) {
|
|
18
|
-
// pass
|
|
19
|
-
}
|
|
20
|
-
}
|
|
21
|
-
let analyticsPromise = Promise.resolve();
|
|
22
|
-
export function withAnalytics(fn, options) {
|
|
23
|
-
if (process.env.LANGGRAPH_CLI_NO_ANALYTICS === "1") {
|
|
24
|
-
return () => void 0;
|
|
25
|
-
}
|
|
26
|
-
return function (actionCommand) {
|
|
27
|
-
analyticsPromise = analyticsPromise.then(() => logData({
|
|
28
|
-
os: os.platform(),
|
|
29
|
-
os_version: os.release(),
|
|
30
|
-
node_version: process.version,
|
|
31
|
-
cli_version: version,
|
|
32
|
-
cli_command: options?.name ?? actionCommand.name(),
|
|
33
|
-
params: fn?.(actionCommand) ?? {},
|
|
34
|
-
}).catch(() => { }));
|
|
35
|
-
};
|
|
36
|
-
}
|
|
37
|
-
export async function flushAnalytics() {
|
|
38
|
-
await analyticsPromise;
|
|
39
|
-
}
|