@langchain/langgraph-cli 1.1.8 → 1.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/build.mjs +52 -0
- package/dist/cli/cli.mjs +13 -0
- package/dist/cli/cloudflare.mjs +172 -0
- package/dist/cli/dev.mjs +143 -0
- package/dist/cli/dev.python.mjs +129 -0
- package/dist/cli/docker.mjs +114 -0
- package/dist/cli/new.mjs +13 -0
- package/dist/cli/sysinfo.mjs +63 -0
- package/dist/cli/up.mjs +139 -0
- package/dist/cli/utils/analytics.mjs +39 -0
- package/dist/cli/utils/builder.mjs +7 -0
- package/dist/cli/utils/ipc/server.mjs +93 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +29 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +40 -0
- package/dist/cli/utils/project.mjs +18 -0
- package/dist/cli/utils/stream.mjs +90 -0
- package/dist/cli/utils/version.mjs +13 -0
- package/dist/docker/compose.mjs +185 -0
- package/dist/docker/docker.mjs +390 -0
- package/dist/docker/shell.mjs +62 -0
- package/dist/utils/config.mjs +104 -0
- package/dist/utils/logging.mjs +96 -0
- package/package.json +17 -17
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import { $ } from "execa";
|
|
2
|
+
import * as yaml from "yaml";
|
|
3
|
+
import { z } from "zod/v3";
|
|
4
|
+
import { getExecaOptions } from "./shell.mjs";
|
|
5
|
+
export const DEFAULT_POSTGRES_URI = "postgres://postgres:postgres@langgraph-postgres:5432/postgres?sslmode=disable";
|
|
6
|
+
const REDIS = {
|
|
7
|
+
image: "redis:6",
|
|
8
|
+
healthcheck: {
|
|
9
|
+
test: "redis-cli ping",
|
|
10
|
+
start_period: "10s",
|
|
11
|
+
timeout: "1s",
|
|
12
|
+
retries: 5,
|
|
13
|
+
},
|
|
14
|
+
};
|
|
15
|
+
const DB = {
|
|
16
|
+
image: "pgvector/pgvector:pg16",
|
|
17
|
+
// TODO: make exposing postgres optional
|
|
18
|
+
// ports: ['5433:5432'],
|
|
19
|
+
expose: ["5432"],
|
|
20
|
+
command: ["postgres", "-c", "shared_preload_libraries=vector"],
|
|
21
|
+
environment: {
|
|
22
|
+
POSTGRES_DB: "postgres",
|
|
23
|
+
POSTGRES_USER: "postgres",
|
|
24
|
+
POSTGRES_PASSWORD: "postgres",
|
|
25
|
+
},
|
|
26
|
+
volumes: ["langgraph-data:/var/lib/postgresql/data"],
|
|
27
|
+
healthcheck: {
|
|
28
|
+
test: "pg_isready -U postgres",
|
|
29
|
+
start_period: "10s",
|
|
30
|
+
timeout: "1s",
|
|
31
|
+
retries: 5,
|
|
32
|
+
},
|
|
33
|
+
};
|
|
34
|
+
function parseVersion(input) {
|
|
35
|
+
const parts = input.trim().split(".", 3);
|
|
36
|
+
const majorStr = parts[0] ?? "0";
|
|
37
|
+
const minorStr = parts[1] ?? "0";
|
|
38
|
+
const patchStr = parts[2] ?? "0";
|
|
39
|
+
const major = Number.parseInt(majorStr.startsWith("v") ? majorStr.slice(1) : majorStr);
|
|
40
|
+
const minor = Number.parseInt(minorStr);
|
|
41
|
+
const patch = Number.parseInt(patchStr.split("-").at(0) ?? "0");
|
|
42
|
+
return { major, minor, patch };
|
|
43
|
+
}
|
|
44
|
+
function compareVersion(a, b) {
|
|
45
|
+
if (a.major !== b.major) {
|
|
46
|
+
return Math.sign(a.major - b.major);
|
|
47
|
+
}
|
|
48
|
+
if (a.minor !== b.minor) {
|
|
49
|
+
return Math.sign(a.minor - b.minor);
|
|
50
|
+
}
|
|
51
|
+
return Math.sign(a.patch - b.patch);
|
|
52
|
+
}
|
|
53
|
+
export async function getDockerCapabilities() {
|
|
54
|
+
let rawInfo = null;
|
|
55
|
+
try {
|
|
56
|
+
const { stdout } = await $(await getExecaOptions()) `docker info -f json`;
|
|
57
|
+
rawInfo = JSON.parse(stdout);
|
|
58
|
+
}
|
|
59
|
+
catch (error) {
|
|
60
|
+
throw new Error("Docker not installed or not running: " + error);
|
|
61
|
+
}
|
|
62
|
+
const info = z
|
|
63
|
+
.object({
|
|
64
|
+
ServerVersion: z.string(),
|
|
65
|
+
ClientInfo: z.object({
|
|
66
|
+
Plugins: z.array(z.object({
|
|
67
|
+
Name: z.string(),
|
|
68
|
+
Version: z.string().optional(),
|
|
69
|
+
})),
|
|
70
|
+
}),
|
|
71
|
+
})
|
|
72
|
+
.safeParse(rawInfo);
|
|
73
|
+
if (!info.success || !info.data.ServerVersion) {
|
|
74
|
+
throw new Error("Docker not running");
|
|
75
|
+
}
|
|
76
|
+
const composePlugin = info.data.ClientInfo.Plugins.find((i) => i.Name === "compose" && i.Version != null);
|
|
77
|
+
const buildxPlugin = info.data.ClientInfo.Plugins.find((i) => i.Name === "buildx" && i.Version != null);
|
|
78
|
+
let composeRes;
|
|
79
|
+
if (composePlugin != null) {
|
|
80
|
+
composeRes = {
|
|
81
|
+
composeType: "plugin",
|
|
82
|
+
versionCompose: parseVersion(composePlugin.Version),
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
try {
|
|
87
|
+
const standalone = await $(await getExecaOptions()) `docker-compose --version --short`;
|
|
88
|
+
composeRes = {
|
|
89
|
+
composeType: "standalone",
|
|
90
|
+
versionCompose: parseVersion(standalone.stdout),
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
catch (error) {
|
|
94
|
+
console.error(error);
|
|
95
|
+
throw new Error("Docker Compose not installed");
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
const versionDocker = parseVersion(info.data.ServerVersion);
|
|
99
|
+
if (compareVersion(versionDocker, parseVersion("23.0.5")) < 0) {
|
|
100
|
+
throw new Error("Please upgrade Docker to at least 23.0.5");
|
|
101
|
+
}
|
|
102
|
+
return {
|
|
103
|
+
...composeRes,
|
|
104
|
+
healthcheckStartInterval: compareVersion(versionDocker, parseVersion("25.0.0")) >= 0,
|
|
105
|
+
watchAvailable: compareVersion(composeRes.versionCompose, parseVersion("2.25.0")) >= 0,
|
|
106
|
+
buildAvailable: buildxPlugin != null,
|
|
107
|
+
versionDocker,
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
function isPlainObject(value) {
|
|
111
|
+
return value !== null && typeof value === "object" && !Array.isArray(value);
|
|
112
|
+
}
|
|
113
|
+
export function createCompose(capabilities, options) {
|
|
114
|
+
let includeDb = false;
|
|
115
|
+
let postgresUri = options.postgresUri;
|
|
116
|
+
if (!options.postgresUri) {
|
|
117
|
+
includeDb = true;
|
|
118
|
+
postgresUri = DEFAULT_POSTGRES_URI;
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
includeDb = false;
|
|
122
|
+
}
|
|
123
|
+
const compose = {
|
|
124
|
+
services: {},
|
|
125
|
+
};
|
|
126
|
+
compose.services["langgraph-redis"] = { ...REDIS };
|
|
127
|
+
if (includeDb) {
|
|
128
|
+
compose.volumes = {
|
|
129
|
+
"langgraph-data": { driver: "local" },
|
|
130
|
+
};
|
|
131
|
+
compose.services["langgraph-postgres"] = { ...DB };
|
|
132
|
+
if (capabilities.healthcheckStartInterval) {
|
|
133
|
+
compose.services["langgraph-postgres"].healthcheck.interval = "60s";
|
|
134
|
+
compose.services["langgraph-postgres"].healthcheck.start_interval = "1s";
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
compose.services["langgraph-postgres"].healthcheck.interval = "5s";
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
compose.services["langgraph-api"] = {
|
|
141
|
+
ports: [options.port ? `${options.port}:8000` : "8000"],
|
|
142
|
+
environment: {
|
|
143
|
+
REDIS_URI: "redis://langgraph-redis:6379",
|
|
144
|
+
POSTGRES_URI: postgresUri,
|
|
145
|
+
},
|
|
146
|
+
depends_on: {
|
|
147
|
+
"langgraph-redis": { condition: "service_healthy" },
|
|
148
|
+
},
|
|
149
|
+
};
|
|
150
|
+
if (includeDb) {
|
|
151
|
+
compose.services["langgraph-api"].depends_on["langgraph-postgres"] = {
|
|
152
|
+
condition: "service_healthy",
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
if (capabilities.healthcheckStartInterval) {
|
|
156
|
+
compose.services["langgraph-api"].healthcheck = {
|
|
157
|
+
test: "python /api/healthcheck.py",
|
|
158
|
+
interval: "60s",
|
|
159
|
+
start_interval: "1s",
|
|
160
|
+
start_period: "10s",
|
|
161
|
+
};
|
|
162
|
+
compose.services["langgraph-redis"].healthcheck.interval = "60s";
|
|
163
|
+
compose.services["langgraph-redis"].healthcheck.start_interval = "1s";
|
|
164
|
+
}
|
|
165
|
+
else {
|
|
166
|
+
compose.services["langgraph-redis"].healthcheck.interval = "5s";
|
|
167
|
+
}
|
|
168
|
+
// merge in with rest of the payload
|
|
169
|
+
if (options.apiDef) {
|
|
170
|
+
for (const key in options.apiDef) {
|
|
171
|
+
const prevValue = compose.services["langgraph-api"][key];
|
|
172
|
+
const newValue = options.apiDef[key];
|
|
173
|
+
if (isPlainObject(prevValue) && isPlainObject(newValue)) {
|
|
174
|
+
compose.services["langgraph-api"][key] = {
|
|
175
|
+
...prevValue,
|
|
176
|
+
...newValue,
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
else {
|
|
180
|
+
compose.services["langgraph-api"][key] = newValue;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
return yaml.stringify(compose, { blockQuote: "literal" });
|
|
185
|
+
}
|
|
@@ -0,0 +1,390 @@
|
|
|
1
|
+
import dedent from "dedent";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
import { promises as fs } from "node:fs";
|
|
4
|
+
const dedenter = dedent.withOptions({ escapeSpecialCharacters: false });
|
|
5
|
+
async function exists(path) {
|
|
6
|
+
try {
|
|
7
|
+
await fs.stat(path);
|
|
8
|
+
return true;
|
|
9
|
+
}
|
|
10
|
+
catch {
|
|
11
|
+
return false;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
export async function assembleLocalDeps(configPath, config) {
|
|
15
|
+
const reserved = new Set([
|
|
16
|
+
"src",
|
|
17
|
+
"langgraph-api",
|
|
18
|
+
"langgraph_api",
|
|
19
|
+
"langgraph",
|
|
20
|
+
"langchain-core",
|
|
21
|
+
"langchain_core",
|
|
22
|
+
"pydantic",
|
|
23
|
+
"orjson",
|
|
24
|
+
"fastapi",
|
|
25
|
+
"uvicorn",
|
|
26
|
+
"psycopg",
|
|
27
|
+
"httpx",
|
|
28
|
+
"langsmith",
|
|
29
|
+
]);
|
|
30
|
+
function checkReserved(name, ref) {
|
|
31
|
+
if (reserved.has(name)) {
|
|
32
|
+
throw new Error(`Package name '${name}' used in local dep '${ref}' is reserved. Rename the directory.`);
|
|
33
|
+
}
|
|
34
|
+
reserved.add(name);
|
|
35
|
+
}
|
|
36
|
+
const pipReqs = [];
|
|
37
|
+
const realPkgs = {};
|
|
38
|
+
const fauxPkgs = {};
|
|
39
|
+
const rebuildFiles = [];
|
|
40
|
+
let workingDir;
|
|
41
|
+
let reloadDir;
|
|
42
|
+
const dependencies = "dependencies" in config ? config.dependencies : [];
|
|
43
|
+
for (const localDep of dependencies) {
|
|
44
|
+
if (!localDep.startsWith("."))
|
|
45
|
+
continue;
|
|
46
|
+
const resolved = path.resolve(path.dirname(configPath), localDep);
|
|
47
|
+
if (!(await exists(resolved))) {
|
|
48
|
+
throw new Error(`Could not find local dependency: ${resolved}`);
|
|
49
|
+
}
|
|
50
|
+
else if (!(await fs.stat(resolved)).isDirectory()) {
|
|
51
|
+
throw new Error(`Local dependency must be a directory: ${resolved}`);
|
|
52
|
+
}
|
|
53
|
+
else if (!resolved.startsWith(path.dirname(configPath))) {
|
|
54
|
+
throw new Error(`Local dependency must be a subdirectory of the config file: ${resolved}`);
|
|
55
|
+
}
|
|
56
|
+
// if it's installable, add it to local_pkgs
|
|
57
|
+
// otherwise, add it to faux_pkgs, and create a pyproject.toml
|
|
58
|
+
const files = await fs.readdir(resolved);
|
|
59
|
+
if (files.includes("pyproject.toml") || files.includes("setup.py")) {
|
|
60
|
+
realPkgs[resolved] = localDep;
|
|
61
|
+
if (localDep === ".") {
|
|
62
|
+
workingDir = `/deps/${path.basename(resolved)}`;
|
|
63
|
+
}
|
|
64
|
+
if (files.includes("pyproject.toml")) {
|
|
65
|
+
rebuildFiles.push(path.resolve(resolved, "pyproject.toml"));
|
|
66
|
+
}
|
|
67
|
+
if (files.includes("setup.py")) {
|
|
68
|
+
rebuildFiles.push(path.resolve(resolved, "setup.py"));
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
let containerPath;
|
|
73
|
+
if (files.includes("__init__.py")) {
|
|
74
|
+
// flat layout
|
|
75
|
+
if (path.basename(resolved).includes("-")) {
|
|
76
|
+
throw new Error(`Package name '${path.basename(resolved)}' contains a hyphen. Rename the directory to use it as flat-layout package.`);
|
|
77
|
+
}
|
|
78
|
+
checkReserved(path.basename(resolved), localDep);
|
|
79
|
+
containerPath = `/deps/__outer_${path.basename(resolved)}/${path.basename(resolved)}`;
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
containerPath = `/deps/__outer_${path.basename(resolved)}/src`;
|
|
83
|
+
for (const file of files) {
|
|
84
|
+
const rfile = path.resolve(resolved, file);
|
|
85
|
+
if (file !== "__pycache__" &&
|
|
86
|
+
!file.startsWith(".") &&
|
|
87
|
+
(await fs.stat(rfile)).isDirectory()) {
|
|
88
|
+
try {
|
|
89
|
+
for (const subfile of await fs.readdir(rfile)) {
|
|
90
|
+
if (subfile.endsWith(".py")) {
|
|
91
|
+
checkReserved(file, localDep);
|
|
92
|
+
break;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
catch {
|
|
97
|
+
// pass
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
fauxPkgs[resolved] = [localDep, containerPath];
|
|
103
|
+
if (localDep === ".") {
|
|
104
|
+
workingDir = containerPath;
|
|
105
|
+
}
|
|
106
|
+
else {
|
|
107
|
+
reloadDir = containerPath;
|
|
108
|
+
}
|
|
109
|
+
if (files.includes("requirements.txt")) {
|
|
110
|
+
const rfile = path.resolve(resolved, "requirements.txt");
|
|
111
|
+
rebuildFiles.push(rfile);
|
|
112
|
+
pipReqs.push([
|
|
113
|
+
path
|
|
114
|
+
.relative(path.dirname(configPath), rfile)
|
|
115
|
+
.split(path.sep)
|
|
116
|
+
.join("/"),
|
|
117
|
+
`${containerPath}/requirements.txt`,
|
|
118
|
+
]);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
if ("node_version" in config) {
|
|
123
|
+
for (const name of [
|
|
124
|
+
"package.json",
|
|
125
|
+
"package-lock.json",
|
|
126
|
+
"yarn.lock",
|
|
127
|
+
"pnpm-lock.yaml",
|
|
128
|
+
"bun.lockb",
|
|
129
|
+
]) {
|
|
130
|
+
const jsFile = path
|
|
131
|
+
.resolve(path.dirname(configPath), name)
|
|
132
|
+
.split(path.sep)
|
|
133
|
+
.join("/");
|
|
134
|
+
rebuildFiles.push(jsFile);
|
|
135
|
+
}
|
|
136
|
+
workingDir ??= `/deps/${path.basename(path.dirname(configPath))}`;
|
|
137
|
+
}
|
|
138
|
+
return { pipReqs, realPkgs, fauxPkgs, workingDir, reloadDir, rebuildFiles };
|
|
139
|
+
}
|
|
140
|
+
async function updateGraphPaths(configPath, config, localDeps) {
|
|
141
|
+
for (const [graphId, graphDef] of Object.entries(config.graphs)) {
|
|
142
|
+
const importStr = typeof graphDef === "string" ? graphDef : graphDef.path;
|
|
143
|
+
const description = typeof graphDef === "string" ? undefined : graphDef.description;
|
|
144
|
+
let [moduleStr, attrStr] = importStr.split(":", 2);
|
|
145
|
+
if (!moduleStr || !attrStr) {
|
|
146
|
+
throw new Error(`Import string "${importStr}" must be in format "<module>:<attribute>".`);
|
|
147
|
+
}
|
|
148
|
+
if (moduleStr.includes("/")) {
|
|
149
|
+
const resolved = path.resolve(path.dirname(configPath), moduleStr);
|
|
150
|
+
if (!(await exists(resolved))) {
|
|
151
|
+
throw new Error(`Could not find local module: ${resolved}`);
|
|
152
|
+
}
|
|
153
|
+
else if (!(await fs.stat(resolved)).isFile()) {
|
|
154
|
+
throw new Error(`Local module must be a file: ${resolved}`);
|
|
155
|
+
}
|
|
156
|
+
else {
|
|
157
|
+
find: {
|
|
158
|
+
for (const realPath of Object.keys(localDeps.realPkgs)) {
|
|
159
|
+
if (resolved.startsWith(realPath)) {
|
|
160
|
+
moduleStr = path
|
|
161
|
+
.join("/deps", path.basename(realPath), path.relative(realPath, resolved))
|
|
162
|
+
.split(path.sep)
|
|
163
|
+
.join("/");
|
|
164
|
+
break find;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
for (const [fauxPkg, [_, destPath]] of Object.entries(localDeps.fauxPkgs)) {
|
|
168
|
+
if (resolved.startsWith(fauxPkg)) {
|
|
169
|
+
moduleStr = path
|
|
170
|
+
.join(destPath, path.relative(fauxPkg, resolved))
|
|
171
|
+
.split(path.sep)
|
|
172
|
+
.join("/");
|
|
173
|
+
break find;
|
|
174
|
+
}
|
|
175
|
+
throw new Error(`Module '${importStr}' not found in 'dependencies' list. Add its containing package to 'dependencies' list.`);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
const resolvedPath = `${moduleStr}:${attrStr}`;
|
|
179
|
+
config["graphs"][graphId] = description
|
|
180
|
+
? {
|
|
181
|
+
path: resolvedPath,
|
|
182
|
+
description,
|
|
183
|
+
}
|
|
184
|
+
: resolvedPath;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
export function getBaseImage(config) {
|
|
190
|
+
if ("node_version" in config) {
|
|
191
|
+
return `langchain/langgraphjs-api:${config._INTERNAL_docker_tag || config.node_version}`;
|
|
192
|
+
}
|
|
193
|
+
if ("python_version" in config) {
|
|
194
|
+
return `langchain/langgraph-api:${config._INTERNAL_docker_tag || config.python_version}`;
|
|
195
|
+
}
|
|
196
|
+
throw new Error("Invalid config type");
|
|
197
|
+
}
|
|
198
|
+
export async function configToDocker(configPath, config, localDeps, options) {
|
|
199
|
+
// figure out the package manager used here
|
|
200
|
+
const testFile = async (file) => fs
|
|
201
|
+
.stat(path.resolve(path.dirname(configPath), file))
|
|
202
|
+
.then((a) => a.isFile())
|
|
203
|
+
.catch(() => false);
|
|
204
|
+
let pipInstall = `PYTHONDONTWRITEBYTECODE=1 pip install -c /api/constraints.txt`;
|
|
205
|
+
if ("python_version" in config && config.pip_config_file) {
|
|
206
|
+
pipInstall = `PIP_CONFIG_FILE=/pipconfig.txt ${pipInstall}`;
|
|
207
|
+
}
|
|
208
|
+
pipInstall = `--mount=type=cache,target=/root/.cache/pip ${pipInstall}`;
|
|
209
|
+
const pipConfigFile = "python_version" in config && config.pip_config_file
|
|
210
|
+
? `ADD ${config.pip_config_file} /pipconfig.txt`
|
|
211
|
+
: undefined;
|
|
212
|
+
const _pypiDeps = "python_version" in config
|
|
213
|
+
? config.dependencies.filter((dep) => !dep.startsWith("."))
|
|
214
|
+
: [];
|
|
215
|
+
await updateGraphPaths(configPath, config, localDeps);
|
|
216
|
+
const pipPkgs = _pypiDeps.length
|
|
217
|
+
? `RUN ${pipInstall} ${_pypiDeps.join(" ")}`
|
|
218
|
+
: undefined;
|
|
219
|
+
const pipReqs = localDeps.pipReqs.map(([reqpath, destpath]) => `ADD ${reqpath} ${destpath}`);
|
|
220
|
+
if (pipReqs.length) {
|
|
221
|
+
pipReqs.push(`RUN ${pipInstall} ${localDeps.pipReqs
|
|
222
|
+
.map(([, r]) => `-r ${r}`)
|
|
223
|
+
.join(" ")}`);
|
|
224
|
+
}
|
|
225
|
+
const localPkg = Object.entries(localDeps.realPkgs).map(([fullpath, relpath]) => `ADD ${relpath} /deps/${path.basename(fullpath)}`);
|
|
226
|
+
const fauxPkgs = Object.entries(localDeps.fauxPkgs).flatMap(([fullpath, [relpath, destpath]]) => [
|
|
227
|
+
`ADD ${relpath} ${destpath}`,
|
|
228
|
+
dedenter `
|
|
229
|
+
RUN set -ex && \
|
|
230
|
+
for line in '[project]' \
|
|
231
|
+
'name = "${path.basename(fullpath)}"' \
|
|
232
|
+
'version = "0.1"' \
|
|
233
|
+
'[tool.setuptools.package-data]' \
|
|
234
|
+
'"*" = ["**/*"]'; do \
|
|
235
|
+
echo "${options?.dockerCommand === "build" ? "$line" : "$$line"}" >> /deps/__outer_${path.basename(fullpath)}/pyproject.toml; \
|
|
236
|
+
done
|
|
237
|
+
`,
|
|
238
|
+
]);
|
|
239
|
+
if (!pipReqs.length &&
|
|
240
|
+
!localPkg.length &&
|
|
241
|
+
!fauxPkgs.length &&
|
|
242
|
+
"node_version" in config) {
|
|
243
|
+
pipReqs.push(`ADD . ${localDeps.workingDir}`);
|
|
244
|
+
}
|
|
245
|
+
const [npm, yarn, pnpm, bun] = await Promise.all([
|
|
246
|
+
testFile("package-lock.json"),
|
|
247
|
+
testFile("yarn.lock"),
|
|
248
|
+
testFile("pnpm-lock.yaml"),
|
|
249
|
+
testFile("bun.lockb"),
|
|
250
|
+
]);
|
|
251
|
+
let installCmd = "npm i";
|
|
252
|
+
if (yarn) {
|
|
253
|
+
installCmd = "yarn install";
|
|
254
|
+
}
|
|
255
|
+
else if (pnpm) {
|
|
256
|
+
installCmd = "pnpm i --frozen-lockfile";
|
|
257
|
+
}
|
|
258
|
+
else if (npm) {
|
|
259
|
+
installCmd = "npm ci";
|
|
260
|
+
}
|
|
261
|
+
else if (bun) {
|
|
262
|
+
installCmd = "bun i";
|
|
263
|
+
}
|
|
264
|
+
const lines = [
|
|
265
|
+
`FROM ${getBaseImage(config)}`,
|
|
266
|
+
config.dockerfile_lines,
|
|
267
|
+
pipConfigFile,
|
|
268
|
+
pipPkgs,
|
|
269
|
+
pipReqs,
|
|
270
|
+
localPkg,
|
|
271
|
+
fauxPkgs,
|
|
272
|
+
"python_version" in config ? `RUN ${pipInstall} -e /deps/*` : undefined,
|
|
273
|
+
`ENV LANGSERVE_GRAPHS='${JSON.stringify(config.graphs)}'`,
|
|
274
|
+
!!config.ui && `ENV LANGGRAPH_UI='${JSON.stringify(config.ui)}'`,
|
|
275
|
+
!!config.ui_config &&
|
|
276
|
+
`ENV LANGGRAPH_UI_CONFIG='${JSON.stringify(config.ui_config)}'`,
|
|
277
|
+
!!config.store && `ENV LANGGRAPH_STORE='${JSON.stringify(config.store)}'`,
|
|
278
|
+
!!config.auth && `ENV LANGGRAPH_AUTH='${JSON.stringify(config.auth)}'`,
|
|
279
|
+
!!localDeps.workingDir && `WORKDIR ${localDeps.workingDir}`,
|
|
280
|
+
"node_version" in config
|
|
281
|
+
? [
|
|
282
|
+
`RUN ${installCmd}`,
|
|
283
|
+
`RUN (test ! -f /api/langgraph_api/js/build.mts && echo "Prebuild script not found, skipping") || tsx /api/langgraph_api/js/build.mts`,
|
|
284
|
+
]
|
|
285
|
+
: undefined,
|
|
286
|
+
];
|
|
287
|
+
if (options?.watch && (localDeps.workingDir || localDeps.reloadDir)) {
|
|
288
|
+
// TODO: hacky, should add as entrypoint to the langgraph-api base image
|
|
289
|
+
lines.push(`CMD exec uvicorn langgraph_api.server:app --log-config /api/logging.json --no-access-log --host 0.0.0.0 --port 8000 --reload --reload-dir ${localDeps.workingDir || localDeps.reloadDir}`);
|
|
290
|
+
}
|
|
291
|
+
return lines.flat().filter(Boolean).join("\n");
|
|
292
|
+
}
|
|
293
|
+
export async function configToWatch(configPath, config, localDeps) {
|
|
294
|
+
const projectDir = path.dirname(configPath);
|
|
295
|
+
const watch = [];
|
|
296
|
+
const watchSources = "python_version" in config
|
|
297
|
+
? config.dependencies.filter((dep) => dep.startsWith("."))
|
|
298
|
+
: ["."];
|
|
299
|
+
const watchIgnore = "node_version" in config
|
|
300
|
+
? ["node_modules", "langgraph.json"]
|
|
301
|
+
: ["langgraph.json"];
|
|
302
|
+
if (typeof config.env === "string") {
|
|
303
|
+
watchIgnore.push(config.env);
|
|
304
|
+
}
|
|
305
|
+
for (const absPath of localDeps.rebuildFiles) {
|
|
306
|
+
const relative = path.relative(projectDir, absPath);
|
|
307
|
+
if (watch.find((i) => i.path === relative))
|
|
308
|
+
continue;
|
|
309
|
+
watch.push({ path: relative, action: "rebuild" });
|
|
310
|
+
watchIgnore.push(relative);
|
|
311
|
+
}
|
|
312
|
+
for (const source of watchSources) {
|
|
313
|
+
const target = localDeps.workingDir || localDeps.reloadDir;
|
|
314
|
+
watch.push({
|
|
315
|
+
path: source,
|
|
316
|
+
action: target ? "sync" : "rebuild",
|
|
317
|
+
target: target,
|
|
318
|
+
ignore: watchIgnore,
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
if (watch.length > 0) {
|
|
322
|
+
return watch.map((item) => ({
|
|
323
|
+
...item,
|
|
324
|
+
path: item.path.split(path.sep).join("/"),
|
|
325
|
+
target: item.target?.split(path.sep).join("/"),
|
|
326
|
+
ignore: item.ignore?.map((i) => i.split(path.sep).join("/")),
|
|
327
|
+
}));
|
|
328
|
+
}
|
|
329
|
+
return undefined;
|
|
330
|
+
}
|
|
331
|
+
export async function configToCompose(configPath, config, options) {
|
|
332
|
+
const result = {};
|
|
333
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
334
|
+
const inline = await configToDocker(configPath, config, localDeps, options);
|
|
335
|
+
result.pull_policy = "build";
|
|
336
|
+
result.build = {
|
|
337
|
+
context: ".",
|
|
338
|
+
dockerfile_inline: inline + "\n",
|
|
339
|
+
};
|
|
340
|
+
const extendEnvIgnore = new Set();
|
|
341
|
+
if (typeof config.env === "string") {
|
|
342
|
+
// try to parse out the env file
|
|
343
|
+
const envPath = path.resolve(path.dirname(configPath), config.env);
|
|
344
|
+
try {
|
|
345
|
+
const envFileKeys = (await fs.readFile(envPath, "utf-8"))
|
|
346
|
+
.split("\n")
|
|
347
|
+
.map((lines) => lines.trim().split("=").at(0));
|
|
348
|
+
for (const key of envFileKeys) {
|
|
349
|
+
if (key)
|
|
350
|
+
extendEnvIgnore.add(key);
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
catch {
|
|
354
|
+
throw new Error(`Could not read env file: ${envPath}`);
|
|
355
|
+
}
|
|
356
|
+
result.env_file = config.env;
|
|
357
|
+
}
|
|
358
|
+
else if (!Array.isArray(config.env)) {
|
|
359
|
+
Object.entries(config.env).forEach(([k, v]) => {
|
|
360
|
+
result.environment ??= {};
|
|
361
|
+
result.environment[k] = v;
|
|
362
|
+
extendEnvIgnore.add(k);
|
|
363
|
+
});
|
|
364
|
+
}
|
|
365
|
+
if (options?.watch) {
|
|
366
|
+
const watch = await configToWatch(configPath, config, localDeps);
|
|
367
|
+
if (watch)
|
|
368
|
+
result.develop = { watch };
|
|
369
|
+
}
|
|
370
|
+
if (options?.extendEnv) {
|
|
371
|
+
Object.entries(options.extendEnv).forEach(([k, v]) => {
|
|
372
|
+
if (extendEnvIgnore.has(k))
|
|
373
|
+
return;
|
|
374
|
+
result.environment ??= {};
|
|
375
|
+
result.environment[k] = v;
|
|
376
|
+
});
|
|
377
|
+
}
|
|
378
|
+
if (Array.isArray(config.env)) {
|
|
379
|
+
// check if all the environment variables are present or not
|
|
380
|
+
const missing = config.env.filter((k) => !result.environment?.[k]);
|
|
381
|
+
if (missing.length)
|
|
382
|
+
throw new Error(`Missing environment variables: ${missing.join(", ")}`);
|
|
383
|
+
}
|
|
384
|
+
return {
|
|
385
|
+
apiDef: result,
|
|
386
|
+
rewrite: localDeps.workingDir
|
|
387
|
+
? { source: path.dirname(configPath), target: localDeps.workingDir }
|
|
388
|
+
: undefined,
|
|
389
|
+
};
|
|
390
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { $ } from "execa";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
let PATH = undefined;
|
|
4
|
+
// TODO: macOS related only
|
|
5
|
+
async function getUserShell() {
|
|
6
|
+
const dscl = await $({
|
|
7
|
+
shell: true,
|
|
8
|
+
}) `dscl . -read ~/ UserShell | sed 's/UserShell: //'`;
|
|
9
|
+
return dscl.stdout.trim();
|
|
10
|
+
}
|
|
11
|
+
async function verifyDockerPath(PATH) {
|
|
12
|
+
await $({ env: { PATH } }) `which docker`;
|
|
13
|
+
return PATH;
|
|
14
|
+
}
|
|
15
|
+
// TODO: macOS related only
|
|
16
|
+
async function extractPathFromShell() {
|
|
17
|
+
const pathToShell = await getUserShell().catch(() => "/bin/zsh");
|
|
18
|
+
const args = pathToShell.includes("csh")
|
|
19
|
+
? ["-c", "echo $PATH"]
|
|
20
|
+
: ["-lc", "echo $PATH"];
|
|
21
|
+
const shell = await $(pathToShell, args);
|
|
22
|
+
return shell.stdout.trim();
|
|
23
|
+
}
|
|
24
|
+
// TODO: macOS related only
|
|
25
|
+
async function guessUserPath() {
|
|
26
|
+
return [
|
|
27
|
+
"/bin",
|
|
28
|
+
"/usr/bin",
|
|
29
|
+
"/sbin",
|
|
30
|
+
"/usr/sbin",
|
|
31
|
+
"/opt/homebrew/bin",
|
|
32
|
+
"/opt/homebrew/sbin",
|
|
33
|
+
`${homedir()}/.local/bin`,
|
|
34
|
+
"/Applications/Docker.app/Contents/Resources/bin",
|
|
35
|
+
`${homedir()}/.docker/bin`,
|
|
36
|
+
// support for Rancher Desktop
|
|
37
|
+
// https://github.com/langchain-ai/langgraph-studio/issues/24#issuecomment-2274046328
|
|
38
|
+
// https://github.com/langchain-ai/langgraph-studio/issues/122
|
|
39
|
+
`${homedir()}/.rd/bin`,
|
|
40
|
+
`/Applications/Rancher Desktop.app/Contents/Resources/resources/darwin/bin`,
|
|
41
|
+
].join(":");
|
|
42
|
+
}
|
|
43
|
+
async function getLoginPath() {
|
|
44
|
+
if (PATH)
|
|
45
|
+
return { PATH };
|
|
46
|
+
const [fromShell, fromBackup] = await Promise.allSettled([extractPathFromShell(), guessUserPath()].map((promise) => promise.then(verifyDockerPath)));
|
|
47
|
+
if (fromShell.status === "fulfilled") {
|
|
48
|
+
PATH = fromShell.value;
|
|
49
|
+
}
|
|
50
|
+
else if (fromBackup.status === "fulfilled") {
|
|
51
|
+
PATH = fromBackup.value;
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
console.error("Failed to get PATH from shell or backup", fromShell.reason, fromBackup.reason);
|
|
55
|
+
throw fromShell.reason || fromBackup.reason;
|
|
56
|
+
}
|
|
57
|
+
return { PATH };
|
|
58
|
+
}
|
|
59
|
+
export async function getExecaOptions(options) {
|
|
60
|
+
const env = await getLoginPath();
|
|
61
|
+
return { ...options, env };
|
|
62
|
+
}
|