@langchain/langgraph-cli 0.0.0-preview.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -0
- package/dist/api/assistants.mjs +144 -0
- package/dist/api/runs.mjs +239 -0
- package/dist/api/store.mjs +83 -0
- package/dist/api/threads.mjs +145 -0
- package/dist/cli/build.mjs +44 -0
- package/dist/cli/cli.mjs +7 -0
- package/dist/cli/dev.entrypoint.mjs +35 -0
- package/dist/cli/dev.mjs +133 -0
- package/dist/cli/dockerfile.mjs +35 -0
- package/dist/cli/utils/builder.mjs +16 -0
- package/dist/cli/utils/ipc/client.mjs +25 -0
- package/dist/cli/utils/ipc/server.mjs +71 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +7 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +18 -0
- package/dist/cli/utils/project.mjs +18 -0
- package/dist/docker/compose.mjs +185 -0
- package/dist/docker/dockerfile.mjs +390 -0
- package/dist/docker/shell.mjs +62 -0
- package/dist/graph/load.hooks.mjs +17 -0
- package/dist/graph/load.mjs +71 -0
- package/dist/graph/load.utils.mjs +50 -0
- package/dist/graph/parser/parser.mjs +308 -0
- package/dist/graph/parser/parser.worker.mjs +7 -0
- package/dist/graph/parser/schema/types.mjs +1607 -0
- package/dist/graph/parser/schema/types.template.mts +81 -0
- package/dist/logging.mjs +50 -0
- package/dist/preload.mjs +3 -0
- package/dist/queue.mjs +91 -0
- package/dist/schemas.mjs +399 -0
- package/dist/server.mjs +63 -0
- package/dist/state.mjs +32 -0
- package/dist/storage/checkpoint.mjs +123 -0
- package/dist/storage/ops.mjs +786 -0
- package/dist/storage/persist.mjs +69 -0
- package/dist/storage/store.mjs +37 -0
- package/dist/stream.mjs +215 -0
- package/dist/utils/abort.mjs +8 -0
- package/dist/utils/config.mjs +35 -0
- package/dist/utils/error.mjs +1 -0
- package/dist/utils/hono.mjs +27 -0
- package/dist/utils/importMap.mjs +55 -0
- package/dist/utils/runnableConfig.mjs +45 -0
- package/dist/utils/serde.mjs +20 -0
- package/package.json +62 -0
package/dist/cli/cli.mjs
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import "../preload.mjs";
|
|
2
|
+
import { asyncExitHook } from "exit-hook";
|
|
3
|
+
import * as process from "node:process";
|
|
4
|
+
import { startServer, StartServerSchema } from "../server.mjs";
|
|
5
|
+
import { connectToServer } from "./utils/ipc/client.mjs";
|
|
6
|
+
import { Client as LangSmithClient } from "langsmith";
|
|
7
|
+
import { logger } from "../logging.mjs";
|
|
8
|
+
logger.info(`Starting server...`);
|
|
9
|
+
const [ppid, payload] = process.argv.slice(-2);
|
|
10
|
+
const sendToParent = await connectToServer(+ppid);
|
|
11
|
+
// TODO: re-export langsmith/isTracingEnabled
|
|
12
|
+
const isTracingEnabled = () => {
|
|
13
|
+
const value = process.env?.LANGSMITH_TRACING_V2 ||
|
|
14
|
+
process.env?.LANGCHAIN_TRACING_V2 ||
|
|
15
|
+
process.env?.LANGSMITH_TRACING ||
|
|
16
|
+
process.env?.LANGCHAIN_TRACING;
|
|
17
|
+
return value === "true";
|
|
18
|
+
};
|
|
19
|
+
const [{ host, cleanup }, organizationId] = await Promise.all([
|
|
20
|
+
startServer(StartServerSchema.parse(JSON.parse(payload))),
|
|
21
|
+
(async () => {
|
|
22
|
+
if (isTracingEnabled()) {
|
|
23
|
+
try {
|
|
24
|
+
// @ts-expect-error Private method
|
|
25
|
+
return new LangSmithClient()._getTenantId();
|
|
26
|
+
}
|
|
27
|
+
catch (error) {
|
|
28
|
+
logger.warn("Failed to get organization ID", { error });
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
return null;
|
|
32
|
+
})(),
|
|
33
|
+
]);
|
|
34
|
+
asyncExitHook(cleanup, { wait: 1000 });
|
|
35
|
+
sendToParent?.({ host, organizationId });
|
package/dist/cli/dev.mjs
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import * as path from "node:path";
|
|
2
|
+
import * as fs from "node:fs/promises";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { spawn } from "node:child_process";
|
|
5
|
+
import open from "open";
|
|
6
|
+
import * as dotenv from "dotenv";
|
|
7
|
+
import { logger } from "../logging.mjs";
|
|
8
|
+
import { ConfigSchema } from "../utils/config.mjs";
|
|
9
|
+
import { createIpcServer } from "./utils/ipc/server.mjs";
|
|
10
|
+
import { z } from "zod";
|
|
11
|
+
import { watch } from "chokidar";
|
|
12
|
+
import { builder } from "./utils/builder.mjs";
|
|
13
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
14
|
+
const tsxTarget = new URL("../../cli.mjs", import.meta.resolve("tsx/esm/api"));
|
|
15
|
+
const entrypointTarget = new URL(import.meta.resolve("./dev.entrypoint.mjs"));
|
|
16
|
+
builder
|
|
17
|
+
.command("dev")
|
|
18
|
+
.description("Run LangGraph API server in development mode with hot reloading.")
|
|
19
|
+
.option("-p, --port <number>", "port to run the server on", "2024")
|
|
20
|
+
.option("-h, --host <string>", "host to bind to", "localhost")
|
|
21
|
+
.option("--no-browser", "disable auto-opening the browser")
|
|
22
|
+
.option("-n, --n-jobs-per-worker <number>", "number of workers to run", "10")
|
|
23
|
+
.option("-c, --config <path>", "path to configuration file", process.cwd())
|
|
24
|
+
.action(async (options) => {
|
|
25
|
+
try {
|
|
26
|
+
const configPath = await getProjectPath(options.config);
|
|
27
|
+
const projectCwd = path.dirname(configPath);
|
|
28
|
+
const [pid, server] = await createIpcServer();
|
|
29
|
+
const watcher = watch([configPath], {
|
|
30
|
+
ignoreInitial: true,
|
|
31
|
+
cwd: projectCwd,
|
|
32
|
+
});
|
|
33
|
+
let hasOpenedFlag = false;
|
|
34
|
+
let child = undefined;
|
|
35
|
+
const localUrl = `http://${options.host}:${options.port}`;
|
|
36
|
+
const studioUrl = `https://smith.langchain.com/studio?baseUrl=${localUrl}`;
|
|
37
|
+
console.log(`
|
|
38
|
+
Welcome to
|
|
39
|
+
|
|
40
|
+
╦ ┌─┐┌┐┌┌─┐╔═╗┬─┐┌─┐┌─┐┬ ┬
|
|
41
|
+
║ ├─┤││││ ┬║ ╦├┬┘├─┤├─┘├─┤
|
|
42
|
+
╩═╝┴ ┴┘└┘└─┘╚═╝┴└─┴ ┴┴ ┴ ┴.js
|
|
43
|
+
|
|
44
|
+
- 🚀 API: \x1b[36m${localUrl}\x1b[0m
|
|
45
|
+
- 🎨 Studio UI: \x1b[36m${studioUrl}\x1b[0m
|
|
46
|
+
|
|
47
|
+
This in-memory server is designed for development and testing.
|
|
48
|
+
For production use, please use LangGraph Cloud.
|
|
49
|
+
|
|
50
|
+
`);
|
|
51
|
+
server.on("data", (data) => {
|
|
52
|
+
const { host, organizationId } = z
|
|
53
|
+
.object({ host: z.string(), organizationId: z.string().nullish() })
|
|
54
|
+
.parse(data);
|
|
55
|
+
logger.info(`Server running at ${host}`);
|
|
56
|
+
if (options.browser && !hasOpenedFlag) {
|
|
57
|
+
hasOpenedFlag = true;
|
|
58
|
+
open(organizationId
|
|
59
|
+
? `${studioUrl}&organizationId=${organizationId}`
|
|
60
|
+
: studioUrl);
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
// check if .gitignore already contains .langgraph-api
|
|
64
|
+
const gitignorePath = path.resolve(projectCwd, ".gitignore");
|
|
65
|
+
const gitignoreContent = await fs
|
|
66
|
+
.readFile(gitignorePath, "utf-8")
|
|
67
|
+
.catch(() => "");
|
|
68
|
+
if (!gitignoreContent.includes(".langgraph_api")) {
|
|
69
|
+
logger.info("Updating .gitignore to prevent `.langgraph_api` from being committed.");
|
|
70
|
+
await fs.appendFile(gitignorePath, "\n# LangGraph API\n.langgraph_api\n");
|
|
71
|
+
}
|
|
72
|
+
const prepareContext = async () => {
|
|
73
|
+
const config = ConfigSchema.parse(JSON.parse(await fs.readFile(configPath, "utf-8")));
|
|
74
|
+
const newWatch = [configPath];
|
|
75
|
+
const env = { ...process.env };
|
|
76
|
+
const configEnv = config?.env;
|
|
77
|
+
if (configEnv) {
|
|
78
|
+
if (typeof configEnv === "string") {
|
|
79
|
+
const envPath = path.resolve(projectCwd, configEnv);
|
|
80
|
+
newWatch.push(envPath);
|
|
81
|
+
const envData = await fs.readFile(envPath, "utf-8");
|
|
82
|
+
dotenv.populate(env, dotenv.parse(envData));
|
|
83
|
+
}
|
|
84
|
+
else if (Array.isArray(configEnv)) {
|
|
85
|
+
throw new Error("Env storage is not supported by CLI.");
|
|
86
|
+
}
|
|
87
|
+
else if (typeof configEnv === "object") {
|
|
88
|
+
if (!process.env)
|
|
89
|
+
throw new Error("process.env is not defined");
|
|
90
|
+
dotenv.populate(env, configEnv);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
const oldWatch = Object.entries(watcher.getWatched()).flatMap(([dir, files]) => files.map((file) => path.resolve(projectCwd, dir, file)));
|
|
94
|
+
const addedTarget = newWatch.filter((target) => !oldWatch.includes(target));
|
|
95
|
+
const removedTarget = oldWatch.filter((target) => !newWatch.includes(target));
|
|
96
|
+
watcher.unwatch(removedTarget).add(addedTarget);
|
|
97
|
+
return { config, env };
|
|
98
|
+
};
|
|
99
|
+
const launchTsx = async () => {
|
|
100
|
+
const { config, env } = await prepareContext();
|
|
101
|
+
if (child != null)
|
|
102
|
+
child.kill();
|
|
103
|
+
child = spawn(process.execPath, [
|
|
104
|
+
fileURLToPath(tsxTarget),
|
|
105
|
+
"watch",
|
|
106
|
+
"--clear-screen=false",
|
|
107
|
+
fileURLToPath(entrypointTarget),
|
|
108
|
+
pid.toString(),
|
|
109
|
+
JSON.stringify({
|
|
110
|
+
port: Number.parseInt(options.port, 10),
|
|
111
|
+
nWorkers: Number.parseInt(options.nJobsPerWorker, 10),
|
|
112
|
+
host: options.host,
|
|
113
|
+
graphs: config.graphs,
|
|
114
|
+
cwd: projectCwd,
|
|
115
|
+
}),
|
|
116
|
+
], { stdio: ["inherit", "inherit", "inherit", "ipc"], env });
|
|
117
|
+
};
|
|
118
|
+
watcher.on("all", async (_name, path) => {
|
|
119
|
+
logger.warn(`Detected changes in ${path}, restarting server`);
|
|
120
|
+
launchTsx();
|
|
121
|
+
});
|
|
122
|
+
// TODO: handle errors
|
|
123
|
+
launchTsx();
|
|
124
|
+
process.on("exit", () => {
|
|
125
|
+
watcher.close();
|
|
126
|
+
server.close();
|
|
127
|
+
child?.kill();
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
catch (error) {
|
|
131
|
+
logger.error(error);
|
|
132
|
+
}
|
|
133
|
+
});
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { assembleLocalDeps, configToCompose, configToDocker, } from "../docker/dockerfile.mjs";
|
|
2
|
+
import { createCompose, getDockerCapabilities } from "../docker/compose.mjs";
|
|
3
|
+
import { ConfigSchema } from "../utils/config.mjs";
|
|
4
|
+
import { getProjectPath } from "./utils/project.mjs";
|
|
5
|
+
import { builder } from "./utils/builder.mjs";
|
|
6
|
+
import * as fs from "node:fs/promises";
|
|
7
|
+
import * as path from "node:path";
|
|
8
|
+
builder
|
|
9
|
+
.command("dockerfile")
|
|
10
|
+
.description("Generate a Dockerfile for the LangGraph API server, with Docker Compose options.")
|
|
11
|
+
.argument("<save-path>", "Path to save the Dockerfile")
|
|
12
|
+
.option("--add-docker-compose", "Add additional files for running the LangGraph API server with docker-compose. These files include a docker-compose.yml, .env file, and a .dockerignore file.")
|
|
13
|
+
.option("-c, --config <path>", "Path to configuration file", process.cwd())
|
|
14
|
+
.action(async (savePath, options) => {
|
|
15
|
+
const configPath = await getProjectPath(options.config);
|
|
16
|
+
const config = ConfigSchema.parse(JSON.parse(await fs.readFile(configPath, "utf-8")));
|
|
17
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
18
|
+
const dockerfile = await configToDocker(configPath, config, localDeps);
|
|
19
|
+
if (savePath === "-") {
|
|
20
|
+
console.log(dockerfile);
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
const targetPath = path.resolve(process.cwd(), savePath, "Dockerfile");
|
|
24
|
+
await fs.writeFile(targetPath, dockerfile);
|
|
25
|
+
if (options.addDockerCompose) {
|
|
26
|
+
const { apiDef } = await configToCompose(configPath, config, {
|
|
27
|
+
watch: false,
|
|
28
|
+
});
|
|
29
|
+
const capabilities = await getDockerCapabilities();
|
|
30
|
+
const compose = createCompose(capabilities, { apiDef });
|
|
31
|
+
const composePath = path.resolve(process.cwd(), savePath, "docker-compose.yml");
|
|
32
|
+
await fs.writeFile(composePath, compose);
|
|
33
|
+
// TODO: add .dockerignore and .env files
|
|
34
|
+
}
|
|
35
|
+
});
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Command } from "@commander-js/extra-typings";
|
|
2
|
+
import * as fs from "node:fs/promises";
|
|
3
|
+
import * as url from "node:url";
|
|
4
|
+
export const builder = new Command()
|
|
5
|
+
.name("langgraph")
|
|
6
|
+
.description("LangGraph.js CLI")
|
|
7
|
+
.enablePositionalOptions();
|
|
8
|
+
try {
|
|
9
|
+
const packageJson = url.fileURLToPath(new URL("../../../package.json", import.meta.url));
|
|
10
|
+
const { version } = JSON.parse(await fs.readFile(packageJson, "utf-8"));
|
|
11
|
+
builder.version(version);
|
|
12
|
+
}
|
|
13
|
+
catch (error) {
|
|
14
|
+
console.error(error);
|
|
15
|
+
// pass
|
|
16
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
// https://github.com/privatenumber/tsx/tree/28a3e7d2b8fd72b683aab8a98dd1fcee4624e4cb
|
|
2
|
+
import net from "node:net";
|
|
3
|
+
import { getPipePath } from "./utils/get-pipe-path.mjs";
|
|
4
|
+
export const connectToServer = (processId = process.ppid) => new Promise((resolve) => {
|
|
5
|
+
const pipePath = getPipePath(processId);
|
|
6
|
+
const socket = net.createConnection(pipePath, () => {
|
|
7
|
+
const sendToParent = (data) => {
|
|
8
|
+
const messageBuffer = Buffer.from(JSON.stringify(data));
|
|
9
|
+
const lengthBuffer = Buffer.alloc(4);
|
|
10
|
+
lengthBuffer.writeInt32BE(messageBuffer.length, 0);
|
|
11
|
+
socket.write(Buffer.concat([lengthBuffer, messageBuffer]));
|
|
12
|
+
};
|
|
13
|
+
resolve(sendToParent);
|
|
14
|
+
});
|
|
15
|
+
/**
|
|
16
|
+
* Ignore error when:
|
|
17
|
+
* - Called as a loader and there is no server
|
|
18
|
+
* - Nested process when using --test and the ppid is incorrect
|
|
19
|
+
*/
|
|
20
|
+
socket.on("error", () => {
|
|
21
|
+
resolve(undefined);
|
|
22
|
+
});
|
|
23
|
+
// Prevent Node from waiting for this socket to close before exiting
|
|
24
|
+
socket.unref();
|
|
25
|
+
});
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
// https://github.com/privatenumber/tsx/tree/28a3e7d2b8fd72b683aab8a98dd1fcee4624e4cb
|
|
2
|
+
import net from "node:net";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import { tmpdir } from "./utils/temporary-directory.mjs";
|
|
5
|
+
import { getPipePath } from "./utils/get-pipe-path.mjs";
|
|
6
|
+
const bufferData = (onMessage) => {
|
|
7
|
+
let buffer = Buffer.alloc(0);
|
|
8
|
+
return (data) => {
|
|
9
|
+
buffer = Buffer.concat([buffer, data]);
|
|
10
|
+
while (buffer.length > 4) {
|
|
11
|
+
const messageLength = buffer.readInt32BE(0);
|
|
12
|
+
if (buffer.length >= 4 + messageLength) {
|
|
13
|
+
const message = buffer.slice(4, 4 + messageLength);
|
|
14
|
+
onMessage(message);
|
|
15
|
+
buffer = buffer.slice(4 + messageLength);
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
break;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
};
|
|
23
|
+
export const createIpcServer = async () => {
|
|
24
|
+
const server = net.createServer((socket) => {
|
|
25
|
+
socket.on("data", bufferData((message) => {
|
|
26
|
+
const data = JSON.parse(message.toString());
|
|
27
|
+
server.emit("data", data);
|
|
28
|
+
}));
|
|
29
|
+
});
|
|
30
|
+
const pipePath = getPipePath(process.pid);
|
|
31
|
+
await fs.promises.mkdir(tmpdir, { recursive: true });
|
|
32
|
+
/**
|
|
33
|
+
* Fix #457 (https://github.com/privatenumber/tsx/issues/457)
|
|
34
|
+
*
|
|
35
|
+
* Avoid the error "EADDRINUSE: address already in use"
|
|
36
|
+
*
|
|
37
|
+
* If the pipe file already exists, it means that the previous process has been closed abnormally.
|
|
38
|
+
*
|
|
39
|
+
* We can safely delete the pipe file, the previous process must has been closed,
|
|
40
|
+
* as pid is unique at the same.
|
|
41
|
+
*/
|
|
42
|
+
await fs.promises.rm(pipePath, { force: true });
|
|
43
|
+
await new Promise((resolve, reject) => {
|
|
44
|
+
server.listen(pipePath, resolve);
|
|
45
|
+
server.on("error", reject);
|
|
46
|
+
});
|
|
47
|
+
// Prevent Node from waiting for this socket to close before exiting
|
|
48
|
+
server.unref();
|
|
49
|
+
process.on("exit", () => {
|
|
50
|
+
server.close();
|
|
51
|
+
/**
|
|
52
|
+
* Only clean on Unix
|
|
53
|
+
*
|
|
54
|
+
* https://nodejs.org/api/net.html#ipc-support:
|
|
55
|
+
* On Windows, the local domain is implemented using a named pipe.
|
|
56
|
+
* The path must refer to an entry in \\?\pipe\ or \\.\pipe\.
|
|
57
|
+
* Any characters are permitted, but the latter may do some processing
|
|
58
|
+
* of pipe names, such as resolving .. sequences. Despite how it might
|
|
59
|
+
* look, the pipe namespace is flat. Pipes will not persist. They are
|
|
60
|
+
* removed when the last reference to them is closed. Unlike Unix domain
|
|
61
|
+
* sockets, Windows will close and remove the pipe when the owning process exits.
|
|
62
|
+
*/
|
|
63
|
+
if (process.platform !== "win32") {
|
|
64
|
+
try {
|
|
65
|
+
fs.rmSync(pipePath);
|
|
66
|
+
}
|
|
67
|
+
catch { }
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
return [process.pid, server];
|
|
71
|
+
};
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
// https://github.com/privatenumber/tsx/tree/28a3e7d2b8fd72b683aab8a98dd1fcee4624e4cb
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { tmpdir } from "./temporary-directory.mjs";
|
|
4
|
+
export const getPipePath = (processId) => {
|
|
5
|
+
const pipePath = path.join(tmpdir, `${processId}.pipe`);
|
|
6
|
+
return process.platform === "win32" ? `\\\\?\\pipe\\${pipePath}` : pipePath;
|
|
7
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
// https://github.com/privatenumber/tsx/tree/28a3e7d2b8fd72b683aab8a98dd1fcee4624e4cb
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
/**
|
|
5
|
+
* Cache directory is based on the user's identifier
|
|
6
|
+
* to avoid permission issues when accessed by a different user
|
|
7
|
+
*/
|
|
8
|
+
const { geteuid } = process;
|
|
9
|
+
const userId = geteuid
|
|
10
|
+
? // For Linux users with virtual users on CI (e.g. Docker)
|
|
11
|
+
geteuid()
|
|
12
|
+
: // Use username on Windows because it doesn't have id
|
|
13
|
+
os.userInfo().username;
|
|
14
|
+
/**
|
|
15
|
+
* This ensures that the cache directory is unique per user
|
|
16
|
+
* and has the appropriate permissions
|
|
17
|
+
*/
|
|
18
|
+
export const tmpdir = path.join(os.tmpdir(), `tsx-${userId}`);
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import * as url from "node:url";
|
|
2
|
+
import * as fs from "node:fs/promises";
|
|
3
|
+
import * as path from "node:path";
|
|
4
|
+
export async function getProjectPath(key) {
|
|
5
|
+
const configPathOrFile = key.startsWith("file://")
|
|
6
|
+
? url.fileURLToPath(key)
|
|
7
|
+
: path.resolve(process.cwd(), key);
|
|
8
|
+
let configPath = undefined;
|
|
9
|
+
if ((await fs.stat(configPathOrFile)).isDirectory()) {
|
|
10
|
+
configPath = path.join(configPathOrFile, "langgraph.json");
|
|
11
|
+
}
|
|
12
|
+
else if (path.basename(configPathOrFile) === "langgraph.json") {
|
|
13
|
+
configPath = configPathOrFile;
|
|
14
|
+
}
|
|
15
|
+
if (!configPath)
|
|
16
|
+
throw new Error("Invalid path");
|
|
17
|
+
return configPath;
|
|
18
|
+
}
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import { $ } from "execa";
|
|
2
|
+
import * as yaml from "yaml";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
import { getExecaOptions } from "./shell.mjs";
|
|
5
|
+
export const DEFAULT_POSTGRES_URI = "postgres://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable";
|
|
6
|
+
const REDIS = {
|
|
7
|
+
image: "redis:6",
|
|
8
|
+
healthcheck: {
|
|
9
|
+
test: "redis-cli ping",
|
|
10
|
+
start_period: "10s",
|
|
11
|
+
timeout: "1s",
|
|
12
|
+
retries: 5,
|
|
13
|
+
},
|
|
14
|
+
};
|
|
15
|
+
const DB = {
|
|
16
|
+
image: "pgvector/pgvector:pg16",
|
|
17
|
+
// TODO: make exposing postgres optional
|
|
18
|
+
// ports: ['5433:5432'],
|
|
19
|
+
expose: ["5432"],
|
|
20
|
+
command: ["postgres", "-c", "shared_preload_libraries=vector"],
|
|
21
|
+
environment: {
|
|
22
|
+
POSTGRES_DB: "postgres",
|
|
23
|
+
POSTGRES_USER: "postgres",
|
|
24
|
+
POSTGRES_PASSWORD: "postgres",
|
|
25
|
+
},
|
|
26
|
+
volumes: ["langgraph-data:/var/lib/postgresql/data"],
|
|
27
|
+
healthcheck: {
|
|
28
|
+
test: "pg_isready -U postgres",
|
|
29
|
+
start_period: "10s",
|
|
30
|
+
timeout: "1s",
|
|
31
|
+
retries: 5,
|
|
32
|
+
},
|
|
33
|
+
};
|
|
34
|
+
function parseVersion(input) {
|
|
35
|
+
const parts = input.trim().split(".", 3);
|
|
36
|
+
const majorStr = parts[0] ?? "0";
|
|
37
|
+
const minorStr = parts[1] ?? "0";
|
|
38
|
+
const patchStr = parts[2] ?? "0";
|
|
39
|
+
const major = Number.parseInt(majorStr.startsWith("v") ? majorStr.slice(1) : majorStr);
|
|
40
|
+
const minor = Number.parseInt(minorStr);
|
|
41
|
+
const patch = Number.parseInt(patchStr.split("-").at(0) ?? "0");
|
|
42
|
+
return { major, minor, patch };
|
|
43
|
+
}
|
|
44
|
+
function compareVersion(a, b) {
|
|
45
|
+
if (a.major !== b.major) {
|
|
46
|
+
return Math.sign(a.major - b.major);
|
|
47
|
+
}
|
|
48
|
+
if (a.minor !== b.minor) {
|
|
49
|
+
return Math.sign(a.minor - b.minor);
|
|
50
|
+
}
|
|
51
|
+
return Math.sign(a.patch - b.patch);
|
|
52
|
+
}
|
|
53
|
+
export async function getDockerCapabilities() {
|
|
54
|
+
let rawInfo = null;
|
|
55
|
+
try {
|
|
56
|
+
const { stdout } = await $(await getExecaOptions()) `docker info -f json`;
|
|
57
|
+
rawInfo = JSON.parse(stdout);
|
|
58
|
+
}
|
|
59
|
+
catch (error) {
|
|
60
|
+
throw new Error("Docker not installed or not running: " + error);
|
|
61
|
+
}
|
|
62
|
+
const info = z
|
|
63
|
+
.object({
|
|
64
|
+
ServerVersion: z.string(),
|
|
65
|
+
ClientInfo: z.object({
|
|
66
|
+
Plugins: z.array(z.object({
|
|
67
|
+
Name: z.string(),
|
|
68
|
+
Version: z.string().optional(),
|
|
69
|
+
})),
|
|
70
|
+
}),
|
|
71
|
+
})
|
|
72
|
+
.safeParse(rawInfo);
|
|
73
|
+
if (!info.success || !info.data.ServerVersion) {
|
|
74
|
+
throw new Error("Docker not running");
|
|
75
|
+
}
|
|
76
|
+
const composePlugin = info.data.ClientInfo.Plugins.find((i) => i.Name === "compose" && i.Version != null);
|
|
77
|
+
const buildxPlugin = info.data.ClientInfo.Plugins.find((i) => i.Name === "buildx" && i.Version != null);
|
|
78
|
+
let composeRes;
|
|
79
|
+
if (composePlugin != null) {
|
|
80
|
+
composeRes = {
|
|
81
|
+
composeType: "plugin",
|
|
82
|
+
versionCompose: parseVersion(composePlugin.Version),
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
try {
|
|
87
|
+
const standalone = await $(await getExecaOptions()) `docker-compose --version --short`;
|
|
88
|
+
composeRes = {
|
|
89
|
+
composeType: "standalone",
|
|
90
|
+
versionCompose: parseVersion(standalone.stdout),
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
catch (error) {
|
|
94
|
+
console.error(error);
|
|
95
|
+
throw new Error("Docker Compose not installed");
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
const versionDocker = parseVersion(info.data.ServerVersion);
|
|
99
|
+
if (compareVersion(versionDocker, parseVersion("23.0.5")) < 0) {
|
|
100
|
+
throw new Error("Please upgrade Docker to at least 23.0.5");
|
|
101
|
+
}
|
|
102
|
+
return {
|
|
103
|
+
...composeRes,
|
|
104
|
+
healthcheckStartInterval: compareVersion(versionDocker, parseVersion("25.0.0")) >= 0,
|
|
105
|
+
watchAvailable: compareVersion(composeRes.versionCompose, parseVersion("2.25.0")) >= 0,
|
|
106
|
+
buildAvailable: buildxPlugin != null,
|
|
107
|
+
versionDocker,
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
function isPlainObject(value) {
|
|
111
|
+
return value !== null && typeof value === "object" && !Array.isArray(value);
|
|
112
|
+
}
|
|
113
|
+
export function createCompose(capabilities, options) {
|
|
114
|
+
let includeDb = false;
|
|
115
|
+
let postgresUri = options.postgresUri;
|
|
116
|
+
if (!options.postgresUri) {
|
|
117
|
+
includeDb = true;
|
|
118
|
+
postgresUri = DEFAULT_POSTGRES_URI;
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
includeDb = false;
|
|
122
|
+
}
|
|
123
|
+
const compose = {
|
|
124
|
+
services: {},
|
|
125
|
+
};
|
|
126
|
+
compose.services["langgraph-redis"] = { ...REDIS };
|
|
127
|
+
if (includeDb) {
|
|
128
|
+
compose.volumes = {
|
|
129
|
+
"langgraph-data": { driver: "local" },
|
|
130
|
+
};
|
|
131
|
+
compose.services["langgraph-postgres"] = { ...DB };
|
|
132
|
+
if (capabilities.healthcheckStartInterval) {
|
|
133
|
+
compose.services["langgraph-postgres"].healthcheck.interval = "60s";
|
|
134
|
+
compose.services["langgraph-postgres"].healthcheck.start_interval = "1s";
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
compose.services["langgraph-postgres"].healthcheck.interval = "5s";
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
compose.services["langgraph-api"] = {
|
|
141
|
+
ports: [options.port ? `${options.port}:8000` : "8000"],
|
|
142
|
+
environment: {
|
|
143
|
+
REDIS_URI: "redis://langgraph-redis:6379",
|
|
144
|
+
POSTGRES_URI: postgresUri,
|
|
145
|
+
},
|
|
146
|
+
depends_on: {
|
|
147
|
+
"langgraph-redis": { condition: "service_healthy" },
|
|
148
|
+
},
|
|
149
|
+
};
|
|
150
|
+
if (includeDb) {
|
|
151
|
+
compose.services["langgraph-api"].depends_on["langgraph-postgres"] = {
|
|
152
|
+
condition: "service_healthy",
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
if (capabilities.healthcheckStartInterval) {
|
|
156
|
+
compose.services["langgraph-api"].healthcheck = {
|
|
157
|
+
test: "python /api/healthcheck.py",
|
|
158
|
+
interval: "60s",
|
|
159
|
+
start_interval: "1s",
|
|
160
|
+
start_period: "10s",
|
|
161
|
+
};
|
|
162
|
+
compose.services["langgraph-redis"].healthcheck.interval = "60s";
|
|
163
|
+
compose.services["langgraph-redis"].healthcheck.start_interval = "1s";
|
|
164
|
+
}
|
|
165
|
+
else {
|
|
166
|
+
compose.services["langgraph-redis"].healthcheck.interval = "5s";
|
|
167
|
+
}
|
|
168
|
+
// merge in with rest of the payload
|
|
169
|
+
if (options.apiDef) {
|
|
170
|
+
for (const key in options.apiDef) {
|
|
171
|
+
const prevValue = compose.services["langgraph-api"][key];
|
|
172
|
+
const newValue = options.apiDef[key];
|
|
173
|
+
if (isPlainObject(prevValue) && isPlainObject(newValue)) {
|
|
174
|
+
compose.services["langgraph-api"][key] = {
|
|
175
|
+
...prevValue,
|
|
176
|
+
...newValue,
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
else {
|
|
180
|
+
compose.services["langgraph-api"][key] = newValue;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
return yaml.stringify(compose, { blockQuote: "literal" });
|
|
185
|
+
}
|