@langchain/langgraph-cli 1.1.2 → 1.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -5
- package/CHANGELOG.md +0 -299
- package/dist/cli/build.mjs +0 -52
- package/dist/cli/cli.mjs +0 -13
- package/dist/cli/cloudflare.mjs +0 -172
- package/dist/cli/dev.mjs +0 -143
- package/dist/cli/dev.python.mjs +0 -129
- package/dist/cli/docker.mjs +0 -114
- package/dist/cli/new.mjs +0 -13
- package/dist/cli/sysinfo.mjs +0 -63
- package/dist/cli/up.mjs +0 -139
- package/dist/cli/utils/analytics.mjs +0 -39
- package/dist/cli/utils/builder.mjs +0 -7
- package/dist/cli/utils/ipc/server.mjs +0 -93
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +0 -29
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +0 -40
- package/dist/cli/utils/project.mjs +0 -18
- package/dist/cli/utils/stream.mjs +0 -90
- package/dist/cli/utils/version.mjs +0 -13
- package/dist/docker/compose.mjs +0 -185
- package/dist/docker/docker.mjs +0 -390
- package/dist/docker/shell.mjs +0 -62
- package/dist/utils/config.mjs +0 -104
- package/dist/utils/logging.mjs +0 -96
package/dist/docker/docker.mjs
DELETED
|
@@ -1,390 +0,0 @@
|
|
|
1
|
-
import dedent from "dedent";
|
|
2
|
-
import * as path from "node:path";
|
|
3
|
-
import { promises as fs } from "node:fs";
|
|
4
|
-
const dedenter = dedent.withOptions({ escapeSpecialCharacters: false });
|
|
5
|
-
async function exists(path) {
|
|
6
|
-
try {
|
|
7
|
-
await fs.stat(path);
|
|
8
|
-
return true;
|
|
9
|
-
}
|
|
10
|
-
catch {
|
|
11
|
-
return false;
|
|
12
|
-
}
|
|
13
|
-
}
|
|
14
|
-
export async function assembleLocalDeps(configPath, config) {
|
|
15
|
-
const reserved = new Set([
|
|
16
|
-
"src",
|
|
17
|
-
"langgraph-api",
|
|
18
|
-
"langgraph_api",
|
|
19
|
-
"langgraph",
|
|
20
|
-
"langchain-core",
|
|
21
|
-
"langchain_core",
|
|
22
|
-
"pydantic",
|
|
23
|
-
"orjson",
|
|
24
|
-
"fastapi",
|
|
25
|
-
"uvicorn",
|
|
26
|
-
"psycopg",
|
|
27
|
-
"httpx",
|
|
28
|
-
"langsmith",
|
|
29
|
-
]);
|
|
30
|
-
function checkReserved(name, ref) {
|
|
31
|
-
if (reserved.has(name)) {
|
|
32
|
-
throw new Error(`Package name '${name}' used in local dep '${ref}' is reserved. Rename the directory.`);
|
|
33
|
-
}
|
|
34
|
-
reserved.add(name);
|
|
35
|
-
}
|
|
36
|
-
const pipReqs = [];
|
|
37
|
-
const realPkgs = {};
|
|
38
|
-
const fauxPkgs = {};
|
|
39
|
-
const rebuildFiles = [];
|
|
40
|
-
let workingDir;
|
|
41
|
-
let reloadDir;
|
|
42
|
-
const dependencies = "dependencies" in config ? config.dependencies : [];
|
|
43
|
-
for (const localDep of dependencies) {
|
|
44
|
-
if (!localDep.startsWith("."))
|
|
45
|
-
continue;
|
|
46
|
-
const resolved = path.resolve(path.dirname(configPath), localDep);
|
|
47
|
-
if (!(await exists(resolved))) {
|
|
48
|
-
throw new Error(`Could not find local dependency: ${resolved}`);
|
|
49
|
-
}
|
|
50
|
-
else if (!(await fs.stat(resolved)).isDirectory()) {
|
|
51
|
-
throw new Error(`Local dependency must be a directory: ${resolved}`);
|
|
52
|
-
}
|
|
53
|
-
else if (!resolved.startsWith(path.dirname(configPath))) {
|
|
54
|
-
throw new Error(`Local dependency must be a subdirectory of the config file: ${resolved}`);
|
|
55
|
-
}
|
|
56
|
-
// if it's installable, add it to local_pkgs
|
|
57
|
-
// otherwise, add it to faux_pkgs, and create a pyproject.toml
|
|
58
|
-
const files = await fs.readdir(resolved);
|
|
59
|
-
if (files.includes("pyproject.toml") || files.includes("setup.py")) {
|
|
60
|
-
realPkgs[resolved] = localDep;
|
|
61
|
-
if (localDep === ".") {
|
|
62
|
-
workingDir = `/deps/${path.basename(resolved)}`;
|
|
63
|
-
}
|
|
64
|
-
if (files.includes("pyproject.toml")) {
|
|
65
|
-
rebuildFiles.push(path.resolve(resolved, "pyproject.toml"));
|
|
66
|
-
}
|
|
67
|
-
if (files.includes("setup.py")) {
|
|
68
|
-
rebuildFiles.push(path.resolve(resolved, "setup.py"));
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
else {
|
|
72
|
-
let containerPath;
|
|
73
|
-
if (files.includes("__init__.py")) {
|
|
74
|
-
// flat layout
|
|
75
|
-
if (path.basename(resolved).includes("-")) {
|
|
76
|
-
throw new Error(`Package name '${path.basename(resolved)}' contains a hyphen. Rename the directory to use it as flat-layout package.`);
|
|
77
|
-
}
|
|
78
|
-
checkReserved(path.basename(resolved), localDep);
|
|
79
|
-
containerPath = `/deps/__outer_${path.basename(resolved)}/${path.basename(resolved)}`;
|
|
80
|
-
}
|
|
81
|
-
else {
|
|
82
|
-
containerPath = `/deps/__outer_${path.basename(resolved)}/src`;
|
|
83
|
-
for (const file of files) {
|
|
84
|
-
const rfile = path.resolve(resolved, file);
|
|
85
|
-
if (file !== "__pycache__" &&
|
|
86
|
-
!file.startsWith(".") &&
|
|
87
|
-
(await fs.stat(rfile)).isDirectory()) {
|
|
88
|
-
try {
|
|
89
|
-
for (const subfile of await fs.readdir(rfile)) {
|
|
90
|
-
if (subfile.endsWith(".py")) {
|
|
91
|
-
checkReserved(file, localDep);
|
|
92
|
-
break;
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
catch {
|
|
97
|
-
// pass
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
fauxPkgs[resolved] = [localDep, containerPath];
|
|
103
|
-
if (localDep === ".") {
|
|
104
|
-
workingDir = containerPath;
|
|
105
|
-
}
|
|
106
|
-
else {
|
|
107
|
-
reloadDir = containerPath;
|
|
108
|
-
}
|
|
109
|
-
if (files.includes("requirements.txt")) {
|
|
110
|
-
const rfile = path.resolve(resolved, "requirements.txt");
|
|
111
|
-
rebuildFiles.push(rfile);
|
|
112
|
-
pipReqs.push([
|
|
113
|
-
path
|
|
114
|
-
.relative(path.dirname(configPath), rfile)
|
|
115
|
-
.split(path.sep)
|
|
116
|
-
.join("/"),
|
|
117
|
-
`${containerPath}/requirements.txt`,
|
|
118
|
-
]);
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
if ("node_version" in config) {
|
|
123
|
-
for (const name of [
|
|
124
|
-
"package.json",
|
|
125
|
-
"package-lock.json",
|
|
126
|
-
"yarn.lock",
|
|
127
|
-
"pnpm-lock.yaml",
|
|
128
|
-
"bun.lockb",
|
|
129
|
-
]) {
|
|
130
|
-
const jsFile = path
|
|
131
|
-
.resolve(path.dirname(configPath), name)
|
|
132
|
-
.split(path.sep)
|
|
133
|
-
.join("/");
|
|
134
|
-
rebuildFiles.push(jsFile);
|
|
135
|
-
}
|
|
136
|
-
workingDir ??= `/deps/${path.basename(path.dirname(configPath))}`;
|
|
137
|
-
}
|
|
138
|
-
return { pipReqs, realPkgs, fauxPkgs, workingDir, reloadDir, rebuildFiles };
|
|
139
|
-
}
|
|
140
|
-
async function updateGraphPaths(configPath, config, localDeps) {
|
|
141
|
-
for (const [graphId, graphDef] of Object.entries(config.graphs)) {
|
|
142
|
-
const importStr = typeof graphDef === "string" ? graphDef : graphDef.path;
|
|
143
|
-
const description = typeof graphDef === "string" ? undefined : graphDef.description;
|
|
144
|
-
let [moduleStr, attrStr] = importStr.split(":", 2);
|
|
145
|
-
if (!moduleStr || !attrStr) {
|
|
146
|
-
throw new Error(`Import string "${importStr}" must be in format "<module>:<attribute>".`);
|
|
147
|
-
}
|
|
148
|
-
if (moduleStr.includes("/")) {
|
|
149
|
-
const resolved = path.resolve(path.dirname(configPath), moduleStr);
|
|
150
|
-
if (!(await exists(resolved))) {
|
|
151
|
-
throw new Error(`Could not find local module: ${resolved}`);
|
|
152
|
-
}
|
|
153
|
-
else if (!(await fs.stat(resolved)).isFile()) {
|
|
154
|
-
throw new Error(`Local module must be a file: ${resolved}`);
|
|
155
|
-
}
|
|
156
|
-
else {
|
|
157
|
-
find: {
|
|
158
|
-
for (const realPath of Object.keys(localDeps.realPkgs)) {
|
|
159
|
-
if (resolved.startsWith(realPath)) {
|
|
160
|
-
moduleStr = path
|
|
161
|
-
.join("/deps", path.basename(realPath), path.relative(realPath, resolved))
|
|
162
|
-
.split(path.sep)
|
|
163
|
-
.join("/");
|
|
164
|
-
break find;
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
for (const [fauxPkg, [_, destPath]] of Object.entries(localDeps.fauxPkgs)) {
|
|
168
|
-
if (resolved.startsWith(fauxPkg)) {
|
|
169
|
-
moduleStr = path
|
|
170
|
-
.join(destPath, path.relative(fauxPkg, resolved))
|
|
171
|
-
.split(path.sep)
|
|
172
|
-
.join("/");
|
|
173
|
-
break find;
|
|
174
|
-
}
|
|
175
|
-
throw new Error(`Module '${importStr}' not found in 'dependencies' list. Add its containing package to 'dependencies' list.`);
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
const resolvedPath = `${moduleStr}:${attrStr}`;
|
|
179
|
-
config["graphs"][graphId] = description
|
|
180
|
-
? {
|
|
181
|
-
path: resolvedPath,
|
|
182
|
-
description,
|
|
183
|
-
}
|
|
184
|
-
: resolvedPath;
|
|
185
|
-
}
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
export function getBaseImage(config) {
|
|
190
|
-
if ("node_version" in config) {
|
|
191
|
-
return `langchain/langgraphjs-api:${config._INTERNAL_docker_tag || config.node_version}`;
|
|
192
|
-
}
|
|
193
|
-
if ("python_version" in config) {
|
|
194
|
-
return `langchain/langgraph-api:${config._INTERNAL_docker_tag || config.python_version}`;
|
|
195
|
-
}
|
|
196
|
-
throw new Error("Invalid config type");
|
|
197
|
-
}
|
|
198
|
-
export async function configToDocker(configPath, config, localDeps, options) {
|
|
199
|
-
// figure out the package manager used here
|
|
200
|
-
const testFile = async (file) => fs
|
|
201
|
-
.stat(path.resolve(path.dirname(configPath), file))
|
|
202
|
-
.then((a) => a.isFile())
|
|
203
|
-
.catch(() => false);
|
|
204
|
-
let pipInstall = `PYTHONDONTWRITEBYTECODE=1 pip install -c /api/constraints.txt`;
|
|
205
|
-
if ("python_version" in config && config.pip_config_file) {
|
|
206
|
-
pipInstall = `PIP_CONFIG_FILE=/pipconfig.txt ${pipInstall}`;
|
|
207
|
-
}
|
|
208
|
-
pipInstall = `--mount=type=cache,target=/root/.cache/pip ${pipInstall}`;
|
|
209
|
-
const pipConfigFile = "python_version" in config && config.pip_config_file
|
|
210
|
-
? `ADD ${config.pip_config_file} /pipconfig.txt`
|
|
211
|
-
: undefined;
|
|
212
|
-
const _pypiDeps = "python_version" in config
|
|
213
|
-
? config.dependencies.filter((dep) => !dep.startsWith("."))
|
|
214
|
-
: [];
|
|
215
|
-
await updateGraphPaths(configPath, config, localDeps);
|
|
216
|
-
const pipPkgs = _pypiDeps.length
|
|
217
|
-
? `RUN ${pipInstall} ${_pypiDeps.join(" ")}`
|
|
218
|
-
: undefined;
|
|
219
|
-
const pipReqs = localDeps.pipReqs.map(([reqpath, destpath]) => `ADD ${reqpath} ${destpath}`);
|
|
220
|
-
if (pipReqs.length) {
|
|
221
|
-
pipReqs.push(`RUN ${pipInstall} ${localDeps.pipReqs
|
|
222
|
-
.map(([, r]) => `-r ${r}`)
|
|
223
|
-
.join(" ")}`);
|
|
224
|
-
}
|
|
225
|
-
const localPkg = Object.entries(localDeps.realPkgs).map(([fullpath, relpath]) => `ADD ${relpath} /deps/${path.basename(fullpath)}`);
|
|
226
|
-
const fauxPkgs = Object.entries(localDeps.fauxPkgs).flatMap(([fullpath, [relpath, destpath]]) => [
|
|
227
|
-
`ADD ${relpath} ${destpath}`,
|
|
228
|
-
dedenter `
|
|
229
|
-
RUN set -ex && \
|
|
230
|
-
for line in '[project]' \
|
|
231
|
-
'name = "${path.basename(fullpath)}"' \
|
|
232
|
-
'version = "0.1"' \
|
|
233
|
-
'[tool.setuptools.package-data]' \
|
|
234
|
-
'"*" = ["**/*"]'; do \
|
|
235
|
-
echo "${options?.dockerCommand === "build" ? "$line" : "$$line"}" >> /deps/__outer_${path.basename(fullpath)}/pyproject.toml; \
|
|
236
|
-
done
|
|
237
|
-
`,
|
|
238
|
-
]);
|
|
239
|
-
if (!pipReqs.length &&
|
|
240
|
-
!localPkg.length &&
|
|
241
|
-
!fauxPkgs.length &&
|
|
242
|
-
"node_version" in config) {
|
|
243
|
-
pipReqs.push(`ADD . ${localDeps.workingDir}`);
|
|
244
|
-
}
|
|
245
|
-
const [npm, yarn, pnpm, bun] = await Promise.all([
|
|
246
|
-
testFile("package-lock.json"),
|
|
247
|
-
testFile("yarn.lock"),
|
|
248
|
-
testFile("pnpm-lock.yaml"),
|
|
249
|
-
testFile("bun.lockb"),
|
|
250
|
-
]);
|
|
251
|
-
let installCmd = "npm i";
|
|
252
|
-
if (yarn) {
|
|
253
|
-
installCmd = "yarn install";
|
|
254
|
-
}
|
|
255
|
-
else if (pnpm) {
|
|
256
|
-
installCmd = "pnpm i --frozen-lockfile";
|
|
257
|
-
}
|
|
258
|
-
else if (npm) {
|
|
259
|
-
installCmd = "npm ci";
|
|
260
|
-
}
|
|
261
|
-
else if (bun) {
|
|
262
|
-
installCmd = "bun i";
|
|
263
|
-
}
|
|
264
|
-
const lines = [
|
|
265
|
-
`FROM ${getBaseImage(config)}`,
|
|
266
|
-
config.dockerfile_lines,
|
|
267
|
-
pipConfigFile,
|
|
268
|
-
pipPkgs,
|
|
269
|
-
pipReqs,
|
|
270
|
-
localPkg,
|
|
271
|
-
fauxPkgs,
|
|
272
|
-
"python_version" in config ? `RUN ${pipInstall} -e /deps/*` : undefined,
|
|
273
|
-
`ENV LANGSERVE_GRAPHS='${JSON.stringify(config.graphs)}'`,
|
|
274
|
-
!!config.ui && `ENV LANGGRAPH_UI='${JSON.stringify(config.ui)}'`,
|
|
275
|
-
!!config.ui_config &&
|
|
276
|
-
`ENV LANGGRAPH_UI_CONFIG='${JSON.stringify(config.ui_config)}'`,
|
|
277
|
-
!!config.store && `ENV LANGGRAPH_STORE='${JSON.stringify(config.store)}'`,
|
|
278
|
-
!!config.auth && `ENV LANGGRAPH_AUTH='${JSON.stringify(config.auth)}'`,
|
|
279
|
-
!!localDeps.workingDir && `WORKDIR ${localDeps.workingDir}`,
|
|
280
|
-
"node_version" in config
|
|
281
|
-
? [
|
|
282
|
-
`RUN ${installCmd}`,
|
|
283
|
-
`RUN (test ! -f /api/langgraph_api/js/build.mts && echo "Prebuild script not found, skipping") || tsx /api/langgraph_api/js/build.mts`,
|
|
284
|
-
]
|
|
285
|
-
: undefined,
|
|
286
|
-
];
|
|
287
|
-
if (options?.watch && (localDeps.workingDir || localDeps.reloadDir)) {
|
|
288
|
-
// TODO: hacky, should add as entrypoint to the langgraph-api base image
|
|
289
|
-
lines.push(`CMD exec uvicorn langgraph_api.server:app --log-config /api/logging.json --no-access-log --host 0.0.0.0 --port 8000 --reload --reload-dir ${localDeps.workingDir || localDeps.reloadDir}`);
|
|
290
|
-
}
|
|
291
|
-
return lines.flat().filter(Boolean).join("\n");
|
|
292
|
-
}
|
|
293
|
-
export async function configToWatch(configPath, config, localDeps) {
|
|
294
|
-
const projectDir = path.dirname(configPath);
|
|
295
|
-
const watch = [];
|
|
296
|
-
const watchSources = "python_version" in config
|
|
297
|
-
? config.dependencies.filter((dep) => dep.startsWith("."))
|
|
298
|
-
: ["."];
|
|
299
|
-
const watchIgnore = "node_version" in config
|
|
300
|
-
? ["node_modules", "langgraph.json"]
|
|
301
|
-
: ["langgraph.json"];
|
|
302
|
-
if (typeof config.env === "string") {
|
|
303
|
-
watchIgnore.push(config.env);
|
|
304
|
-
}
|
|
305
|
-
for (const absPath of localDeps.rebuildFiles) {
|
|
306
|
-
const relative = path.relative(projectDir, absPath);
|
|
307
|
-
if (watch.find((i) => i.path === relative))
|
|
308
|
-
continue;
|
|
309
|
-
watch.push({ path: relative, action: "rebuild" });
|
|
310
|
-
watchIgnore.push(relative);
|
|
311
|
-
}
|
|
312
|
-
for (const source of watchSources) {
|
|
313
|
-
const target = localDeps.workingDir || localDeps.reloadDir;
|
|
314
|
-
watch.push({
|
|
315
|
-
path: source,
|
|
316
|
-
action: target ? "sync" : "rebuild",
|
|
317
|
-
target: target,
|
|
318
|
-
ignore: watchIgnore,
|
|
319
|
-
});
|
|
320
|
-
}
|
|
321
|
-
if (watch.length > 0) {
|
|
322
|
-
return watch.map((item) => ({
|
|
323
|
-
...item,
|
|
324
|
-
path: item.path.split(path.sep).join("/"),
|
|
325
|
-
target: item.target?.split(path.sep).join("/"),
|
|
326
|
-
ignore: item.ignore?.map((i) => i.split(path.sep).join("/")),
|
|
327
|
-
}));
|
|
328
|
-
}
|
|
329
|
-
return undefined;
|
|
330
|
-
}
|
|
331
|
-
export async function configToCompose(configPath, config, options) {
|
|
332
|
-
const result = {};
|
|
333
|
-
const localDeps = await assembleLocalDeps(configPath, config);
|
|
334
|
-
const inline = await configToDocker(configPath, config, localDeps, options);
|
|
335
|
-
result.pull_policy = "build";
|
|
336
|
-
result.build = {
|
|
337
|
-
context: ".",
|
|
338
|
-
dockerfile_inline: inline + "\n",
|
|
339
|
-
};
|
|
340
|
-
const extendEnvIgnore = new Set();
|
|
341
|
-
if (typeof config.env === "string") {
|
|
342
|
-
// try to parse out the env file
|
|
343
|
-
const envPath = path.resolve(path.dirname(configPath), config.env);
|
|
344
|
-
try {
|
|
345
|
-
const envFileKeys = (await fs.readFile(envPath, "utf-8"))
|
|
346
|
-
.split("\n")
|
|
347
|
-
.map((lines) => lines.trim().split("=").at(0));
|
|
348
|
-
for (const key of envFileKeys) {
|
|
349
|
-
if (key)
|
|
350
|
-
extendEnvIgnore.add(key);
|
|
351
|
-
}
|
|
352
|
-
}
|
|
353
|
-
catch {
|
|
354
|
-
throw new Error(`Could not read env file: ${envPath}`);
|
|
355
|
-
}
|
|
356
|
-
result.env_file = config.env;
|
|
357
|
-
}
|
|
358
|
-
else if (!Array.isArray(config.env)) {
|
|
359
|
-
Object.entries(config.env).forEach(([k, v]) => {
|
|
360
|
-
result.environment ??= {};
|
|
361
|
-
result.environment[k] = v;
|
|
362
|
-
extendEnvIgnore.add(k);
|
|
363
|
-
});
|
|
364
|
-
}
|
|
365
|
-
if (options?.watch) {
|
|
366
|
-
const watch = await configToWatch(configPath, config, localDeps);
|
|
367
|
-
if (watch)
|
|
368
|
-
result.develop = { watch };
|
|
369
|
-
}
|
|
370
|
-
if (options?.extendEnv) {
|
|
371
|
-
Object.entries(options.extendEnv).forEach(([k, v]) => {
|
|
372
|
-
if (extendEnvIgnore.has(k))
|
|
373
|
-
return;
|
|
374
|
-
result.environment ??= {};
|
|
375
|
-
result.environment[k] = v;
|
|
376
|
-
});
|
|
377
|
-
}
|
|
378
|
-
if (Array.isArray(config.env)) {
|
|
379
|
-
// check if all the environment variables are present or not
|
|
380
|
-
const missing = config.env.filter((k) => !result.environment?.[k]);
|
|
381
|
-
if (missing.length)
|
|
382
|
-
throw new Error(`Missing environment variables: ${missing.join(", ")}`);
|
|
383
|
-
}
|
|
384
|
-
return {
|
|
385
|
-
apiDef: result,
|
|
386
|
-
rewrite: localDeps.workingDir
|
|
387
|
-
? { source: path.dirname(configPath), target: localDeps.workingDir }
|
|
388
|
-
: undefined,
|
|
389
|
-
};
|
|
390
|
-
}
|
package/dist/docker/shell.mjs
DELETED
|
@@ -1,62 +0,0 @@
|
|
|
1
|
-
import { $ } from "execa";
|
|
2
|
-
import { homedir } from "node:os";
|
|
3
|
-
let PATH = undefined;
|
|
4
|
-
// TODO: macOS related only
|
|
5
|
-
async function getUserShell() {
|
|
6
|
-
const dscl = await $({
|
|
7
|
-
shell: true,
|
|
8
|
-
}) `dscl . -read ~/ UserShell | sed 's/UserShell: //'`;
|
|
9
|
-
return dscl.stdout.trim();
|
|
10
|
-
}
|
|
11
|
-
async function verifyDockerPath(PATH) {
|
|
12
|
-
await $({ env: { PATH } }) `which docker`;
|
|
13
|
-
return PATH;
|
|
14
|
-
}
|
|
15
|
-
// TODO: macOS related only
|
|
16
|
-
async function extractPathFromShell() {
|
|
17
|
-
const pathToShell = await getUserShell().catch(() => "/bin/zsh");
|
|
18
|
-
const args = pathToShell.includes("csh")
|
|
19
|
-
? ["-c", "echo $PATH"]
|
|
20
|
-
: ["-lc", "echo $PATH"];
|
|
21
|
-
const shell = await $(pathToShell, args);
|
|
22
|
-
return shell.stdout.trim();
|
|
23
|
-
}
|
|
24
|
-
// TODO: macOS related only
|
|
25
|
-
async function guessUserPath() {
|
|
26
|
-
return [
|
|
27
|
-
"/bin",
|
|
28
|
-
"/usr/bin",
|
|
29
|
-
"/sbin",
|
|
30
|
-
"/usr/sbin",
|
|
31
|
-
"/opt/homebrew/bin",
|
|
32
|
-
"/opt/homebrew/sbin",
|
|
33
|
-
`${homedir()}/.local/bin`,
|
|
34
|
-
"/Applications/Docker.app/Contents/Resources/bin",
|
|
35
|
-
`${homedir()}/.docker/bin`,
|
|
36
|
-
// support for Rancher Desktop
|
|
37
|
-
// https://github.com/langchain-ai/langgraph-studio/issues/24#issuecomment-2274046328
|
|
38
|
-
// https://github.com/langchain-ai/langgraph-studio/issues/122
|
|
39
|
-
`${homedir()}/.rd/bin`,
|
|
40
|
-
`/Applications/Rancher Desktop.app/Contents/Resources/resources/darwin/bin`,
|
|
41
|
-
].join(":");
|
|
42
|
-
}
|
|
43
|
-
async function getLoginPath() {
|
|
44
|
-
if (PATH)
|
|
45
|
-
return { PATH };
|
|
46
|
-
const [fromShell, fromBackup] = await Promise.allSettled([extractPathFromShell(), guessUserPath()].map((promise) => promise.then(verifyDockerPath)));
|
|
47
|
-
if (fromShell.status === "fulfilled") {
|
|
48
|
-
PATH = fromShell.value;
|
|
49
|
-
}
|
|
50
|
-
else if (fromBackup.status === "fulfilled") {
|
|
51
|
-
PATH = fromBackup.value;
|
|
52
|
-
}
|
|
53
|
-
else {
|
|
54
|
-
console.error("Failed to get PATH from shell or backup", fromShell.reason, fromBackup.reason);
|
|
55
|
-
throw fromShell.reason || fromBackup.reason;
|
|
56
|
-
}
|
|
57
|
-
return { PATH };
|
|
58
|
-
}
|
|
59
|
-
export async function getExecaOptions(options) {
|
|
60
|
-
const env = await getLoginPath();
|
|
61
|
-
return { ...options, env };
|
|
62
|
-
}
|
package/dist/utils/config.mjs
DELETED
|
@@ -1,104 +0,0 @@
|
|
|
1
|
-
import { z } from "zod/v3";
|
|
2
|
-
import { extname } from "node:path";
|
|
3
|
-
const GraphPathSchema = z.string().refine((i) => i.includes(":"), {
|
|
4
|
-
message: "Import string must be in format '<file>:<export>'",
|
|
5
|
-
});
|
|
6
|
-
const BaseConfigSchema = z.object({
|
|
7
|
-
docker_compose_file: z.string().optional(),
|
|
8
|
-
dockerfile_lines: z.array(z.string()).default([]),
|
|
9
|
-
graphs: z.record(z.union([
|
|
10
|
-
GraphPathSchema,
|
|
11
|
-
z.object({
|
|
12
|
-
path: GraphPathSchema,
|
|
13
|
-
description: z.string().optional(),
|
|
14
|
-
}),
|
|
15
|
-
])),
|
|
16
|
-
ui: z.record(z.string()).optional(),
|
|
17
|
-
ui_config: z.object({ shared: z.array(z.string()).optional() }).optional(),
|
|
18
|
-
_INTERNAL_docker_tag: z.string().optional(),
|
|
19
|
-
env: z
|
|
20
|
-
.union([z.array(z.string()), z.record(z.string()), z.string()])
|
|
21
|
-
.default({}),
|
|
22
|
-
store: z
|
|
23
|
-
.object({
|
|
24
|
-
index: z
|
|
25
|
-
.object({
|
|
26
|
-
dims: z.number().optional(),
|
|
27
|
-
embed: z.string().optional(),
|
|
28
|
-
fields: z.array(z.string()).optional(),
|
|
29
|
-
})
|
|
30
|
-
.optional(),
|
|
31
|
-
})
|
|
32
|
-
.optional(),
|
|
33
|
-
auth: z
|
|
34
|
-
.object({
|
|
35
|
-
path: z.string().optional(),
|
|
36
|
-
disable_studio_auth: z.boolean().default(false),
|
|
37
|
-
})
|
|
38
|
-
.optional(),
|
|
39
|
-
http: z
|
|
40
|
-
.object({
|
|
41
|
-
app: z.string().optional(),
|
|
42
|
-
disable_assistants: z.boolean().default(false),
|
|
43
|
-
disable_threads: z.boolean().default(false),
|
|
44
|
-
disable_runs: z.boolean().default(false),
|
|
45
|
-
disable_store: z.boolean().default(false),
|
|
46
|
-
disable_meta: z.boolean().default(false),
|
|
47
|
-
cors: z
|
|
48
|
-
.object({
|
|
49
|
-
allow_origins: z.array(z.string()).optional(),
|
|
50
|
-
allow_methods: z.array(z.string()).optional(),
|
|
51
|
-
allow_headers: z.array(z.string()).optional(),
|
|
52
|
-
allow_credentials: z.boolean().optional(),
|
|
53
|
-
allow_origin_regex: z.string().optional(),
|
|
54
|
-
expose_headers: z.array(z.string()).optional(),
|
|
55
|
-
max_age: z.number().optional(),
|
|
56
|
-
})
|
|
57
|
-
.optional(),
|
|
58
|
-
})
|
|
59
|
-
.optional(),
|
|
60
|
-
});
|
|
61
|
-
const DEFAULT_PYTHON_VERSION = "3.11";
|
|
62
|
-
const DEFAULT_NODE_VERSION = "20";
|
|
63
|
-
const PYTHON_EXTENSIONS = [".py", ".pyx", ".pyd", ".pyi"];
|
|
64
|
-
const PythonVersionSchema = z.union([z.literal("3.11"), z.literal("3.12")]);
|
|
65
|
-
const NodeVersionSchema = z.union([z.literal("20"), z.literal("22")]);
|
|
66
|
-
const PythonConfigSchema = BaseConfigSchema.merge(z.object({
|
|
67
|
-
pip_config_file: z.string().optional(),
|
|
68
|
-
dependencies: z
|
|
69
|
-
.array(z.string())
|
|
70
|
-
.nonempty("You need to specify at least one dependency"),
|
|
71
|
-
})).merge(z.object({
|
|
72
|
-
python_version: PythonVersionSchema.default(DEFAULT_PYTHON_VERSION),
|
|
73
|
-
node_version: NodeVersionSchema.optional(),
|
|
74
|
-
}));
|
|
75
|
-
const NodeConfigSchema = BaseConfigSchema.merge(z.object({ node_version: NodeVersionSchema.default(DEFAULT_NODE_VERSION) }));
|
|
76
|
-
const ConfigSchema = z.union([NodeConfigSchema, PythonConfigSchema]);
|
|
77
|
-
// TODO: implement this in Python CLI
|
|
78
|
-
export const getConfig = (config) => {
|
|
79
|
-
let input = typeof config === "string" ? JSON.parse(config) : config;
|
|
80
|
-
const { graphs } = BaseConfigSchema.parse(input);
|
|
81
|
-
const isPython = Object.values(graphs).map((graphDef) => {
|
|
82
|
-
const importStr = typeof graphDef === "string" ? graphDef : graphDef.path;
|
|
83
|
-
return PYTHON_EXTENSIONS.includes(extname(importStr.split(":")[0]));
|
|
84
|
-
});
|
|
85
|
-
const somePython = isPython.some((i) => i);
|
|
86
|
-
const someNode = !isPython.every((i) => i);
|
|
87
|
-
const node_version = someNode
|
|
88
|
-
? input.node_version || DEFAULT_NODE_VERSION
|
|
89
|
-
: undefined;
|
|
90
|
-
const python_version = somePython
|
|
91
|
-
? input.python_version || (someNode ? "3.12" : DEFAULT_PYTHON_VERSION)
|
|
92
|
-
: undefined;
|
|
93
|
-
if (node_version && python_version && python_version !== "3.12") {
|
|
94
|
-
throw new Error("Only Python 3.12 is supported with Node.js");
|
|
95
|
-
}
|
|
96
|
-
input = { ...input, node_version, python_version };
|
|
97
|
-
if (!input.node_version)
|
|
98
|
-
delete input.node_version;
|
|
99
|
-
if (!input.python_version)
|
|
100
|
-
delete input.python_version;
|
|
101
|
-
if (python_version)
|
|
102
|
-
return PythonConfigSchema.parse(input);
|
|
103
|
-
return NodeConfigSchema.parse(input);
|
|
104
|
-
};
|
package/dist/utils/logging.mjs
DELETED
|
@@ -1,96 +0,0 @@
|
|
|
1
|
-
import { createLogger, format, transports } from "winston";
|
|
2
|
-
import { consoleFormat } from "winston-console-format";
|
|
3
|
-
import { parse as stacktraceParser } from "stacktrace-parser";
|
|
4
|
-
import { readFileSync } from "fs";
|
|
5
|
-
import { codeFrameColumns } from "@babel/code-frame";
|
|
6
|
-
import path from "node:path";
|
|
7
|
-
const LOG_JSON = process.env.LOG_JSON === "true";
|
|
8
|
-
const LOG_LEVEL = process.env.LOG_LEVEL || "debug";
|
|
9
|
-
export const logger = createLogger({
|
|
10
|
-
level: LOG_LEVEL,
|
|
11
|
-
format: format.combine(format.errors({ stack: true }), format.timestamp(), format.json(), ...(!LOG_JSON
|
|
12
|
-
? [
|
|
13
|
-
format.colorize({ all: true }),
|
|
14
|
-
format.padLevels(),
|
|
15
|
-
consoleFormat({
|
|
16
|
-
showMeta: true,
|
|
17
|
-
metaStrip: ["timestamp"],
|
|
18
|
-
inspectOptions: {
|
|
19
|
-
depth: Infinity,
|
|
20
|
-
colors: true,
|
|
21
|
-
maxArrayLength: Infinity,
|
|
22
|
-
breakLength: 120,
|
|
23
|
-
compact: Infinity,
|
|
24
|
-
},
|
|
25
|
-
}),
|
|
26
|
-
]
|
|
27
|
-
: [
|
|
28
|
-
format.printf((info) => {
|
|
29
|
-
const { timestamp, level, message, ...rest } = info;
|
|
30
|
-
let event;
|
|
31
|
-
if (typeof message === "string") {
|
|
32
|
-
event = message;
|
|
33
|
-
}
|
|
34
|
-
else {
|
|
35
|
-
event = JSON.stringify(message);
|
|
36
|
-
}
|
|
37
|
-
if (rest.stack) {
|
|
38
|
-
rest.message = event;
|
|
39
|
-
event = rest.stack;
|
|
40
|
-
}
|
|
41
|
-
return JSON.stringify({ timestamp, level, event, ...rest });
|
|
42
|
-
}),
|
|
43
|
-
])),
|
|
44
|
-
transports: [new transports.Console()],
|
|
45
|
-
});
|
|
46
|
-
const formatStack = (stack) => {
|
|
47
|
-
if (!stack)
|
|
48
|
-
return stack;
|
|
49
|
-
const [firstFile] = stacktraceParser(stack).filter((item) => !item.file?.split(path.sep).includes("node_modules") &&
|
|
50
|
-
!item.file?.startsWith("node:"));
|
|
51
|
-
if (firstFile?.file && firstFile?.lineNumber) {
|
|
52
|
-
try {
|
|
53
|
-
const filePath = firstFile.file;
|
|
54
|
-
const line = firstFile.lineNumber;
|
|
55
|
-
const column = firstFile.column ?? 0;
|
|
56
|
-
const messageLines = stack.split("\n");
|
|
57
|
-
const spliceIndex = messageLines.findIndex((i) => i.includes(filePath));
|
|
58
|
-
const padding = " ".repeat(Math.max(0, messageLines[spliceIndex].indexOf("at")));
|
|
59
|
-
const highlightCode = process.stdout.isTTY;
|
|
60
|
-
let codeFrame = codeFrameColumns(readFileSync(filePath, "utf-8"), { start: { line, column } }, { highlightCode });
|
|
61
|
-
codeFrame = codeFrame
|
|
62
|
-
.split("\n")
|
|
63
|
-
.map((i) => padding + i + "\x1b[0m")
|
|
64
|
-
.join("\n");
|
|
65
|
-
if (highlightCode) {
|
|
66
|
-
codeFrame = "\x1b[36m" + codeFrame + "\x1b[31m";
|
|
67
|
-
}
|
|
68
|
-
// insert codeframe after the line but dont lose the stack
|
|
69
|
-
return [
|
|
70
|
-
...messageLines.slice(0, spliceIndex + 1),
|
|
71
|
-
codeFrame,
|
|
72
|
-
...messageLines.slice(spliceIndex + 1),
|
|
73
|
-
].join("\n");
|
|
74
|
-
}
|
|
75
|
-
catch {
|
|
76
|
-
// pass
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
return stack;
|
|
80
|
-
};
|
|
81
|
-
export const logError = (error, options) => {
|
|
82
|
-
let message;
|
|
83
|
-
let context = options?.context;
|
|
84
|
-
if (error instanceof Error) {
|
|
85
|
-
message = formatStack(error.stack) || error.message;
|
|
86
|
-
}
|
|
87
|
-
else {
|
|
88
|
-
message = String(error);
|
|
89
|
-
context = { ...context, error };
|
|
90
|
-
}
|
|
91
|
-
if (options?.prefix != null)
|
|
92
|
-
message = `${options.prefix}:\n${message}`;
|
|
93
|
-
logger.error(message, ...(context != null ? [context] : []));
|
|
94
|
-
};
|
|
95
|
-
process.on("uncaughtException", (error) => logError(error));
|
|
96
|
-
process.on("unhandledRejection", (error) => logError(error));
|