@langchain/langgraph-cli 0.0.0-preview.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -0
- package/dist/api/assistants.mjs +144 -0
- package/dist/api/runs.mjs +239 -0
- package/dist/api/store.mjs +83 -0
- package/dist/api/threads.mjs +145 -0
- package/dist/cli/build.mjs +44 -0
- package/dist/cli/cli.mjs +7 -0
- package/dist/cli/dev.entrypoint.mjs +35 -0
- package/dist/cli/dev.mjs +133 -0
- package/dist/cli/dockerfile.mjs +35 -0
- package/dist/cli/utils/builder.mjs +16 -0
- package/dist/cli/utils/ipc/client.mjs +25 -0
- package/dist/cli/utils/ipc/server.mjs +71 -0
- package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +7 -0
- package/dist/cli/utils/ipc/utils/temporary-directory.mjs +18 -0
- package/dist/cli/utils/project.mjs +18 -0
- package/dist/docker/compose.mjs +185 -0
- package/dist/docker/dockerfile.mjs +390 -0
- package/dist/docker/shell.mjs +62 -0
- package/dist/graph/load.hooks.mjs +17 -0
- package/dist/graph/load.mjs +71 -0
- package/dist/graph/load.utils.mjs +50 -0
- package/dist/graph/parser/parser.mjs +308 -0
- package/dist/graph/parser/parser.worker.mjs +7 -0
- package/dist/graph/parser/schema/types.mjs +1607 -0
- package/dist/graph/parser/schema/types.template.mts +81 -0
- package/dist/logging.mjs +50 -0
- package/dist/preload.mjs +3 -0
- package/dist/queue.mjs +91 -0
- package/dist/schemas.mjs +399 -0
- package/dist/server.mjs +63 -0
- package/dist/state.mjs +32 -0
- package/dist/storage/checkpoint.mjs +123 -0
- package/dist/storage/ops.mjs +786 -0
- package/dist/storage/persist.mjs +69 -0
- package/dist/storage/store.mjs +37 -0
- package/dist/stream.mjs +215 -0
- package/dist/utils/abort.mjs +8 -0
- package/dist/utils/config.mjs +35 -0
- package/dist/utils/error.mjs +1 -0
- package/dist/utils/hono.mjs +27 -0
- package/dist/utils/importMap.mjs +55 -0
- package/dist/utils/runnableConfig.mjs +45 -0
- package/dist/utils/serde.mjs +20 -0
- package/package.json +62 -0
|
@@ -0,0 +1,390 @@
|
|
|
1
|
+
import dedent from "dedent";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
import { promises as fs } from "node:fs";
|
|
4
|
+
const dedenter = dedent.withOptions({ escapeSpecialCharacters: false });
|
|
5
|
+
async function exists(path) {
|
|
6
|
+
try {
|
|
7
|
+
await fs.stat(path);
|
|
8
|
+
return true;
|
|
9
|
+
}
|
|
10
|
+
catch {
|
|
11
|
+
return false;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
export async function assembleLocalDeps(configPath, config) {
|
|
15
|
+
if ("node_version" in config) {
|
|
16
|
+
const rebuildFiles = [];
|
|
17
|
+
const workingDir = `/deps/${path.basename(path.dirname(configPath))}`;
|
|
18
|
+
rebuildFiles.push(path.resolve(path.dirname(configPath), "package.json"));
|
|
19
|
+
rebuildFiles.push(path.resolve(path.dirname(configPath), "package-lock.json"));
|
|
20
|
+
rebuildFiles.push(path.resolve(path.dirname(configPath), "yarn.lock"));
|
|
21
|
+
rebuildFiles.push(path.resolve(path.dirname(configPath), "pnpm-lock.yaml"));
|
|
22
|
+
return {
|
|
23
|
+
pipReqs: [],
|
|
24
|
+
realPkgs: {},
|
|
25
|
+
fauxPkgs: {},
|
|
26
|
+
rebuildFiles,
|
|
27
|
+
workingDir,
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
const reserved = new Set([
|
|
31
|
+
"src",
|
|
32
|
+
"langgraph-api",
|
|
33
|
+
"langgraph_api",
|
|
34
|
+
"langgraph",
|
|
35
|
+
"langchain-core",
|
|
36
|
+
"langchain_core",
|
|
37
|
+
"pydantic",
|
|
38
|
+
"orjson",
|
|
39
|
+
"fastapi",
|
|
40
|
+
"uvicorn",
|
|
41
|
+
"psycopg",
|
|
42
|
+
"httpx",
|
|
43
|
+
"langsmith",
|
|
44
|
+
]);
|
|
45
|
+
function checkReserved(name, ref) {
|
|
46
|
+
if (reserved.has(name)) {
|
|
47
|
+
throw new Error(`Package name '${name}' used in local dep '${ref}' is reserved. Rename the directory.`);
|
|
48
|
+
}
|
|
49
|
+
reserved.add(name);
|
|
50
|
+
}
|
|
51
|
+
const pipReqs = [];
|
|
52
|
+
const realPkgs = {};
|
|
53
|
+
const fauxPkgs = {};
|
|
54
|
+
const rebuildFiles = [];
|
|
55
|
+
let workingDir;
|
|
56
|
+
let reloadDir;
|
|
57
|
+
for (const localDep of config.dependencies) {
|
|
58
|
+
if (!localDep.startsWith("."))
|
|
59
|
+
continue;
|
|
60
|
+
const resolved = path.resolve(path.dirname(configPath), localDep);
|
|
61
|
+
if (!(await exists(resolved))) {
|
|
62
|
+
throw new Error(`Could not find local dependency: ${resolved}`);
|
|
63
|
+
}
|
|
64
|
+
else if (!(await fs.stat(resolved)).isDirectory()) {
|
|
65
|
+
throw new Error(`Local dependency must be a directory: ${resolved}`);
|
|
66
|
+
}
|
|
67
|
+
else if (!resolved.startsWith(path.dirname(configPath))) {
|
|
68
|
+
throw new Error(`Local dependency must be a subdirectory of the config file: ${resolved}`);
|
|
69
|
+
}
|
|
70
|
+
// if it's installable, add it to local_pkgs
|
|
71
|
+
// otherwise, add it to faux_pkgs, and create a pyproject.toml
|
|
72
|
+
const files = await fs.readdir(resolved);
|
|
73
|
+
if (files.includes("pyproject.toml") || files.includes("setup.py")) {
|
|
74
|
+
realPkgs[resolved] = localDep;
|
|
75
|
+
if (localDep === ".") {
|
|
76
|
+
workingDir = `/deps/${path.basename(resolved)}`;
|
|
77
|
+
}
|
|
78
|
+
if (files.includes("pyproject.toml")) {
|
|
79
|
+
rebuildFiles.push(path.resolve(resolved, "pyproject.toml"));
|
|
80
|
+
}
|
|
81
|
+
if (files.includes("setup.py")) {
|
|
82
|
+
rebuildFiles.push(path.resolve(resolved, "setup.py"));
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
let containerPath;
|
|
87
|
+
if (files.includes("__init__.py")) {
|
|
88
|
+
// flat layout
|
|
89
|
+
if (path.basename(resolved).includes("-")) {
|
|
90
|
+
throw new Error(`Package name '${path.basename(resolved)}' contains a hyphen. Rename the directory to use it as flat-layout package.`);
|
|
91
|
+
}
|
|
92
|
+
checkReserved(path.basename(resolved), localDep);
|
|
93
|
+
containerPath = `/deps/__outer_${path.basename(resolved)}/${path.basename(resolved)}`;
|
|
94
|
+
}
|
|
95
|
+
else {
|
|
96
|
+
containerPath = `/deps/__outer_${path.basename(resolved)}/src`;
|
|
97
|
+
for (const file of files) {
|
|
98
|
+
const rfile = path.resolve(resolved, file);
|
|
99
|
+
if (file !== "__pycache__" &&
|
|
100
|
+
!file.startsWith(".") &&
|
|
101
|
+
(await fs.stat(rfile)).isDirectory()) {
|
|
102
|
+
try {
|
|
103
|
+
for (const subfile of await fs.readdir(rfile)) {
|
|
104
|
+
if (subfile.endsWith(".py")) {
|
|
105
|
+
checkReserved(file, localDep);
|
|
106
|
+
break;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
catch {
|
|
111
|
+
// pass
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
fauxPkgs[resolved] = [localDep, containerPath];
|
|
117
|
+
if (localDep === ".") {
|
|
118
|
+
workingDir = containerPath;
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
reloadDir = containerPath;
|
|
122
|
+
}
|
|
123
|
+
if (files.includes("requirements.txt")) {
|
|
124
|
+
const rfile = path.resolve(resolved, "requirements.txt");
|
|
125
|
+
rebuildFiles.push(rfile);
|
|
126
|
+
pipReqs.push([
|
|
127
|
+
path.relative(path.dirname(configPath), rfile),
|
|
128
|
+
`${containerPath}/requirements.txt`,
|
|
129
|
+
]);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return { pipReqs, realPkgs, fauxPkgs, workingDir, reloadDir, rebuildFiles };
|
|
134
|
+
}
|
|
135
|
+
async function updateGraphPaths(configPath, config, localDeps) {
|
|
136
|
+
for (const [graphId, importStr] of Object.entries(config.graphs)) {
|
|
137
|
+
let [moduleStr, attrStr] = importStr.split(":", 2);
|
|
138
|
+
if (!moduleStr || !attrStr) {
|
|
139
|
+
throw new Error(`Import string "${importStr}" must be in format "<module>:<attribute>".`);
|
|
140
|
+
}
|
|
141
|
+
if (moduleStr.includes("/")) {
|
|
142
|
+
const resolved = path.resolve(path.dirname(configPath), moduleStr);
|
|
143
|
+
if (!(await exists(resolved))) {
|
|
144
|
+
throw new Error(`Could not find local module: ${resolved}`);
|
|
145
|
+
}
|
|
146
|
+
else if (!(await fs.stat(resolved)).isFile()) {
|
|
147
|
+
throw new Error(`Local module must be a file: ${resolved}`);
|
|
148
|
+
}
|
|
149
|
+
else {
|
|
150
|
+
find: {
|
|
151
|
+
for (const realPath of Object.keys(localDeps.realPkgs)) {
|
|
152
|
+
if (resolved.startsWith(realPath)) {
|
|
153
|
+
moduleStr = `/deps/${path.basename(realPath)}/${path.relative(realPath, resolved)}`;
|
|
154
|
+
break find;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
for (const [fauxPkg, [_, destPath]] of Object.entries(localDeps.fauxPkgs)) {
|
|
158
|
+
if (resolved.startsWith(fauxPkg)) {
|
|
159
|
+
moduleStr = `${destPath}/${path.relative(fauxPkg, resolved)}`;
|
|
160
|
+
break find;
|
|
161
|
+
}
|
|
162
|
+
throw new Error(`Module '${importStr}' not found in 'dependencies' list. Add its containing package to 'dependencies' list.`);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
config["graphs"][graphId] = `${moduleStr}:${attrStr}`;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
export function getBaseImage(config) {
|
|
171
|
+
if ("python_version" in config) {
|
|
172
|
+
return `langchain/langgraph-api:${config._INTERNAL_docker_tag || config.python_version}`;
|
|
173
|
+
}
|
|
174
|
+
if ("node_version" in config) {
|
|
175
|
+
return `langchain/langgraphjs-api:${config._INTERNAL_docker_tag || config.node_version}`;
|
|
176
|
+
}
|
|
177
|
+
throw new Error("Invalid config type");
|
|
178
|
+
}
|
|
179
|
+
export async function configToDocker(configPath, config, localDeps, options) {
|
|
180
|
+
if ("python_version" in config) {
|
|
181
|
+
return pythonConfigToDocker(configPath, config, localDeps, options);
|
|
182
|
+
}
|
|
183
|
+
if ("node_version" in config) {
|
|
184
|
+
return nodeConfigToDocker(configPath, config, localDeps, options);
|
|
185
|
+
}
|
|
186
|
+
throw new Error("Invalid config type");
|
|
187
|
+
}
|
|
188
|
+
export async function nodeConfigToDocker(configPath, config, localDeps, options) {
|
|
189
|
+
// figure out the package manager used here
|
|
190
|
+
const projectFolder = path.dirname(configPath);
|
|
191
|
+
const testFile = async (file) => fs
|
|
192
|
+
.stat(path.resolve(projectFolder, file))
|
|
193
|
+
.then((a) => a.isFile())
|
|
194
|
+
.catch(() => false);
|
|
195
|
+
const [npm, yarn, pnpm] = await Promise.all([
|
|
196
|
+
testFile("package-lock.json"),
|
|
197
|
+
testFile("yarn.lock"),
|
|
198
|
+
testFile("pnpm-lock.yaml"),
|
|
199
|
+
]);
|
|
200
|
+
let installCmd = "npm i";
|
|
201
|
+
if (yarn) {
|
|
202
|
+
installCmd = "yarn install";
|
|
203
|
+
}
|
|
204
|
+
else if (pnpm) {
|
|
205
|
+
installCmd = "pnpm i --frozen-lockfile";
|
|
206
|
+
}
|
|
207
|
+
else if (npm) {
|
|
208
|
+
installCmd = "npm ci";
|
|
209
|
+
}
|
|
210
|
+
const lines = [
|
|
211
|
+
`FROM ${getBaseImage(config)}`,
|
|
212
|
+
...config.dockerfile_lines,
|
|
213
|
+
`ADD . ${localDeps.workingDir}`,
|
|
214
|
+
`RUN cd ${localDeps.workingDir} && ${installCmd}`,
|
|
215
|
+
`ENV LANGSERVE_GRAPHS='${JSON.stringify(config.graphs)}'`,
|
|
216
|
+
...(config.store
|
|
217
|
+
? [`ENV LANGGRAPH_STORE='${JSON.stringify(config.store)}'`]
|
|
218
|
+
: []),
|
|
219
|
+
...(config.auth
|
|
220
|
+
? [`ENV LANGGRAPH_AUTH='${JSON.stringify(config.auth)}'`]
|
|
221
|
+
: []),
|
|
222
|
+
`WORKDIR ${localDeps.workingDir}`,
|
|
223
|
+
`RUN (test ! -f /api/langgraph_api/js/build.mts && echo "Prebuild script not found, skipping") || tsx /api/langgraph_api/js/build.mts`,
|
|
224
|
+
];
|
|
225
|
+
if (options?.watch) {
|
|
226
|
+
// TODO: hacky, should add as entrypoint to the langgraph-api base image
|
|
227
|
+
lines.push(`CMD exec uvicorn langgraph_api.server:app --log-config /api/logging.json --no-access-log --host 0.0.0.0 --port 8000 --reload --reload-dir ${localDeps.workingDir}`);
|
|
228
|
+
}
|
|
229
|
+
return lines.filter(Boolean).join("\n");
|
|
230
|
+
}
|
|
231
|
+
export async function pythonConfigToDocker(configPath, config, localDeps, options) {
|
|
232
|
+
let pipInstall = `PYTHONDONTWRITEBYTECODE=1 pip install -c /api/constraints.txt`;
|
|
233
|
+
if (config.pip_config_file) {
|
|
234
|
+
pipInstall = `PIP_CONFIG_FILE=/pipconfig.txt ${pipInstall}`;
|
|
235
|
+
}
|
|
236
|
+
pipInstall = `--mount=type=cache,target=/root/.cache/pip ${pipInstall}`;
|
|
237
|
+
const pipConfigFileStr = config.pip_config_file
|
|
238
|
+
? [`ADD ${config.pip_config_file} /pipconfig.txt`].join("\n")
|
|
239
|
+
: "";
|
|
240
|
+
const pypiDeps = config.dependencies.filter((dep) => !dep.startsWith("."));
|
|
241
|
+
await updateGraphPaths(configPath, config, localDeps);
|
|
242
|
+
const pipPkgsStr = pypiDeps.length
|
|
243
|
+
? `RUN ${pipInstall} ${pypiDeps.join(" ")}`
|
|
244
|
+
: "";
|
|
245
|
+
let pipReqStr;
|
|
246
|
+
if (localDeps.pipReqs.length) {
|
|
247
|
+
const pipReqsStr = localDeps.pipReqs
|
|
248
|
+
.map(([reqpath, destpath]) => `ADD ${reqpath} ${destpath}`)
|
|
249
|
+
.join("\n");
|
|
250
|
+
pipReqStr = `${pipReqsStr}\nRUN ${pipInstall} ${localDeps.pipReqs.map(([, r]) => `-r ${r}`)}`;
|
|
251
|
+
}
|
|
252
|
+
else {
|
|
253
|
+
pipReqStr = "";
|
|
254
|
+
}
|
|
255
|
+
const localPkgStr = Object.entries(localDeps.realPkgs)
|
|
256
|
+
.map(([fullpath, relpath]) => `ADD ${relpath} /deps/${path.basename(fullpath)}`)
|
|
257
|
+
.join("\n");
|
|
258
|
+
const fauxPkgsStr = Object.entries(localDeps.fauxPkgs)
|
|
259
|
+
.map(([fullpath, [relpath, destpath]]) => {
|
|
260
|
+
const x = dedenter `
|
|
261
|
+
ADD ${relpath} ${destpath}
|
|
262
|
+
RUN set -ex && \
|
|
263
|
+
for line in '[project]' \
|
|
264
|
+
'name = "${path.basename(fullpath)}"' \
|
|
265
|
+
'version = "0.1"' \
|
|
266
|
+
'[tool.setuptools.package-data]' \
|
|
267
|
+
'"*" = ["**/*"]'; do \
|
|
268
|
+
echo "${options?.dockerCommand === "build" ? "$line" : "$$line"}" >> /deps/__outer_${path.basename(fullpath)}/pyproject.toml; \
|
|
269
|
+
done
|
|
270
|
+
`;
|
|
271
|
+
return x;
|
|
272
|
+
})
|
|
273
|
+
.join("\n");
|
|
274
|
+
const lines = [
|
|
275
|
+
`FROM ${getBaseImage(config)}`,
|
|
276
|
+
...config.dockerfile_lines,
|
|
277
|
+
pipConfigFileStr,
|
|
278
|
+
pipPkgsStr,
|
|
279
|
+
pipReqStr,
|
|
280
|
+
localPkgStr,
|
|
281
|
+
fauxPkgsStr,
|
|
282
|
+
`RUN ${pipInstall} -e /deps/*`,
|
|
283
|
+
`ENV LANGSERVE_GRAPHS='${JSON.stringify(config.graphs)}'`,
|
|
284
|
+
...(config.store
|
|
285
|
+
? [`ENV LANGGRAPH_STORE='${JSON.stringify(config.store)}'`]
|
|
286
|
+
: []),
|
|
287
|
+
...(config.auth
|
|
288
|
+
? [`ENV LANGGRAPH_AUTH='${JSON.stringify(config.auth)}'`]
|
|
289
|
+
: []),
|
|
290
|
+
localDeps.workingDir ? `WORKDIR ${localDeps.workingDir}` : null,
|
|
291
|
+
];
|
|
292
|
+
if (options?.watch && (localDeps.workingDir || localDeps.reloadDir)) {
|
|
293
|
+
// TODO: hacky, should add as entrypoint to the langgraph-api base image
|
|
294
|
+
lines.push(`CMD exec uvicorn langgraph_api.server:app --log-config /api/logging.json --no-access-log --host 0.0.0.0 --port 8000 --reload --reload-dir ${localDeps.workingDir || localDeps.reloadDir}`);
|
|
295
|
+
}
|
|
296
|
+
return lines.filter(Boolean).join("\n");
|
|
297
|
+
}
|
|
298
|
+
export async function configToWatch(configPath, config) {
|
|
299
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
300
|
+
const projectDir = path.dirname(configPath);
|
|
301
|
+
const watch = [];
|
|
302
|
+
const watchSources = "python_version" in config
|
|
303
|
+
? config.dependencies.filter((dep) => dep.startsWith("."))
|
|
304
|
+
: ["."];
|
|
305
|
+
const watchIgnore = "node_version" in config
|
|
306
|
+
? ["node_modules", "langgraph.json"]
|
|
307
|
+
: ["langgraph.json"];
|
|
308
|
+
if (typeof config.env === "string") {
|
|
309
|
+
watchIgnore.push(config.env);
|
|
310
|
+
}
|
|
311
|
+
for (const absPath of localDeps.rebuildFiles) {
|
|
312
|
+
const relative = path.relative(projectDir, absPath);
|
|
313
|
+
if (watch.find((i) => i.path === relative))
|
|
314
|
+
continue;
|
|
315
|
+
watch.push({ path: relative, action: "rebuild" });
|
|
316
|
+
watchIgnore.push(relative);
|
|
317
|
+
}
|
|
318
|
+
for (const source of watchSources) {
|
|
319
|
+
const target = localDeps.workingDir || localDeps.reloadDir;
|
|
320
|
+
watch.push({
|
|
321
|
+
path: source,
|
|
322
|
+
action: target ? "sync" : "rebuild",
|
|
323
|
+
target: target,
|
|
324
|
+
ignore: watchIgnore,
|
|
325
|
+
});
|
|
326
|
+
}
|
|
327
|
+
if (watch.length > 0)
|
|
328
|
+
return watch;
|
|
329
|
+
return undefined;
|
|
330
|
+
}
|
|
331
|
+
export async function configToCompose(configPath, config, options) {
|
|
332
|
+
const result = {};
|
|
333
|
+
const localDeps = await assembleLocalDeps(configPath, config);
|
|
334
|
+
const inline = await configToDocker(configPath, config, localDeps, options);
|
|
335
|
+
result.pull_policy = "build";
|
|
336
|
+
result.build = {
|
|
337
|
+
context: ".",
|
|
338
|
+
dockerfile_inline: inline + "\n",
|
|
339
|
+
};
|
|
340
|
+
const extendEnvIgnore = new Set();
|
|
341
|
+
if (typeof config.env === "string") {
|
|
342
|
+
// try to parse out the env file
|
|
343
|
+
const envPath = path.resolve(path.dirname(configPath), config.env);
|
|
344
|
+
try {
|
|
345
|
+
const envFileKeys = (await fs.readFile(envPath, "utf-8"))
|
|
346
|
+
.split("\n")
|
|
347
|
+
.map((lines) => lines.trim().split("=").at(0));
|
|
348
|
+
for (const key of envFileKeys) {
|
|
349
|
+
if (key)
|
|
350
|
+
extendEnvIgnore.add(key);
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
catch {
|
|
354
|
+
throw new Error(`Could not read env file: ${envPath}`);
|
|
355
|
+
}
|
|
356
|
+
result.env_file = config.env;
|
|
357
|
+
}
|
|
358
|
+
else if (!Array.isArray(config.env)) {
|
|
359
|
+
Object.entries(config.env).forEach(([k, v]) => {
|
|
360
|
+
result.environment ??= {};
|
|
361
|
+
result.environment[k] = v;
|
|
362
|
+
extendEnvIgnore.add(k);
|
|
363
|
+
});
|
|
364
|
+
}
|
|
365
|
+
if (options?.watch) {
|
|
366
|
+
const watch = await configToWatch(configPath, config);
|
|
367
|
+
if (watch)
|
|
368
|
+
result.develop = { watch };
|
|
369
|
+
}
|
|
370
|
+
if (options?.extendEnv) {
|
|
371
|
+
Object.entries(options.extendEnv).forEach(([k, v]) => {
|
|
372
|
+
if (extendEnvIgnore.has(k))
|
|
373
|
+
return;
|
|
374
|
+
result.environment ??= {};
|
|
375
|
+
result.environment[k] = v;
|
|
376
|
+
});
|
|
377
|
+
}
|
|
378
|
+
if (Array.isArray(config.env)) {
|
|
379
|
+
// check if all the environment variables are present or not
|
|
380
|
+
const missing = config.env.filter((k) => !result.environment?.[k]);
|
|
381
|
+
if (missing.length)
|
|
382
|
+
throw new Error(`Missing environment variables: ${missing.join(", ")}`);
|
|
383
|
+
}
|
|
384
|
+
return {
|
|
385
|
+
apiDef: result,
|
|
386
|
+
rewrite: localDeps.workingDir
|
|
387
|
+
? { source: path.dirname(configPath), target: localDeps.workingDir }
|
|
388
|
+
: undefined,
|
|
389
|
+
};
|
|
390
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { $ } from "execa";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
let PATH = undefined;
|
|
4
|
+
// TODO: macOS related only
|
|
5
|
+
async function getUserShell() {
|
|
6
|
+
const dscl = await $({
|
|
7
|
+
shell: true,
|
|
8
|
+
}) `dscl . -read ~/ UserShell | sed 's/UserShell: //'`;
|
|
9
|
+
return dscl.stdout.trim();
|
|
10
|
+
}
|
|
11
|
+
async function verifyDockerPath(PATH) {
|
|
12
|
+
await $({ env: { PATH } }) `which docker`;
|
|
13
|
+
return PATH;
|
|
14
|
+
}
|
|
15
|
+
// TODO: macOS related only
|
|
16
|
+
async function extractPathFromShell() {
|
|
17
|
+
const pathToShell = await getUserShell().catch(() => "/bin/zsh");
|
|
18
|
+
const args = pathToShell.includes("csh")
|
|
19
|
+
? ["-c", "echo $PATH"]
|
|
20
|
+
: ["-lc", "echo $PATH"];
|
|
21
|
+
const shell = await $(pathToShell, args);
|
|
22
|
+
return shell.stdout.trim();
|
|
23
|
+
}
|
|
24
|
+
// TODO: macOS related only
|
|
25
|
+
async function guessUserPath() {
|
|
26
|
+
return [
|
|
27
|
+
"/bin",
|
|
28
|
+
"/usr/bin",
|
|
29
|
+
"/sbin",
|
|
30
|
+
"/usr/sbin",
|
|
31
|
+
"/opt/homebrew/bin",
|
|
32
|
+
"/opt/homebrew/sbin",
|
|
33
|
+
`${homedir()}/.local/bin`,
|
|
34
|
+
"/Applications/Docker.app/Contents/Resources/bin",
|
|
35
|
+
`${homedir()}/.docker/bin`,
|
|
36
|
+
// support for Rancher Desktop
|
|
37
|
+
// https://github.com/langchain-ai/langgraph-studio/issues/24#issuecomment-2274046328
|
|
38
|
+
// https://github.com/langchain-ai/langgraph-studio/issues/122
|
|
39
|
+
`${homedir()}/.rd/bin`,
|
|
40
|
+
`/Applications/Rancher Desktop.app/Contents/Resources/resources/darwin/bin`,
|
|
41
|
+
].join(":");
|
|
42
|
+
}
|
|
43
|
+
async function getLoginPath() {
|
|
44
|
+
if (PATH)
|
|
45
|
+
return { PATH };
|
|
46
|
+
const [fromShell, fromBackup] = await Promise.allSettled([extractPathFromShell(), guessUserPath()].map((promise) => promise.then(verifyDockerPath)));
|
|
47
|
+
if (fromShell.status === "fulfilled") {
|
|
48
|
+
PATH = fromShell.value;
|
|
49
|
+
}
|
|
50
|
+
else if (fromBackup.status === "fulfilled") {
|
|
51
|
+
PATH = fromBackup.value;
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
console.error("Failed to get PATH from shell or backup", fromShell.reason, fromBackup.reason);
|
|
55
|
+
throw fromShell.reason || fromBackup.reason;
|
|
56
|
+
}
|
|
57
|
+
return { PATH };
|
|
58
|
+
}
|
|
59
|
+
export async function getExecaOptions(options) {
|
|
60
|
+
const env = await getLoginPath();
|
|
61
|
+
return { ...options, env };
|
|
62
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
// This hook is to ensure that @langchain/langgraph package
|
|
2
|
+
// found in /api folder has precendence compared to user-provided package
|
|
3
|
+
// found in /deps. Does not attempt to semver check for too old packages.
|
|
4
|
+
const OVERRIDE_RESOLVE = [
|
|
5
|
+
"@langchain/langgraph",
|
|
6
|
+
"@langchain/langgraph-checkpoint",
|
|
7
|
+
];
|
|
8
|
+
export async function resolve(specifier, context, nextResolve) {
|
|
9
|
+
if (OVERRIDE_RESOLVE.includes(specifier)) {
|
|
10
|
+
const parentURL = new URL("./load.mts", import.meta.url).toString();
|
|
11
|
+
return await nextResolve(specifier, {
|
|
12
|
+
...context,
|
|
13
|
+
parentURL,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
return nextResolve(specifier, context);
|
|
17
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import * as uuid from "uuid";
|
|
3
|
+
import { Assistants } from "../storage/ops.mjs";
|
|
4
|
+
import { HTTPException } from "hono/http-exception";
|
|
5
|
+
import { resolveGraph, runGraphSchemaWorker, } from "./load.utils.mjs";
|
|
6
|
+
import { checkpointer } from "../storage/checkpoint.mjs";
|
|
7
|
+
import { store } from "../storage/store.mjs";
|
|
8
|
+
import { logger } from "../logging.mjs";
|
|
9
|
+
export const GRAPHS = {};
|
|
10
|
+
export const GRAPH_SPEC = {};
|
|
11
|
+
export const GRAPH_SCHEMA = {};
|
|
12
|
+
export const NAMESPACE_GRAPH = uuid.parse("6ba7b821-9dad-11d1-80b4-00c04fd430c8");
|
|
13
|
+
const ConfigSchema = z.record(z.unknown());
|
|
14
|
+
export const getAssistantId = (graphId) => {
|
|
15
|
+
if (graphId in GRAPHS)
|
|
16
|
+
return uuid.v5(graphId, NAMESPACE_GRAPH);
|
|
17
|
+
return graphId;
|
|
18
|
+
};
|
|
19
|
+
export async function registerFromEnv(specs, options) {
|
|
20
|
+
const envConfig = process.env.LANGGRAPH_CONFIG
|
|
21
|
+
? ConfigSchema.parse(JSON.parse(process.env.LANGGRAPH_CONFIG))
|
|
22
|
+
: undefined;
|
|
23
|
+
return await Promise.all(Object.entries(specs).map(async ([graphId, rawSpec]) => {
|
|
24
|
+
logger.info(`Registering graph with id '${graphId}'`, {
|
|
25
|
+
graph_id: graphId,
|
|
26
|
+
});
|
|
27
|
+
const config = envConfig?.[graphId];
|
|
28
|
+
const { resolved, ...spec } = await resolveGraph(rawSpec, {
|
|
29
|
+
cwd: options.cwd,
|
|
30
|
+
});
|
|
31
|
+
// registering the graph runtime
|
|
32
|
+
GRAPHS[graphId] = resolved;
|
|
33
|
+
GRAPH_SPEC[graphId] = spec;
|
|
34
|
+
await Assistants.put(uuid.v5(graphId, NAMESPACE_GRAPH), {
|
|
35
|
+
graph_id: graphId,
|
|
36
|
+
metadata: { created_by: "system" },
|
|
37
|
+
config: config ?? {},
|
|
38
|
+
if_exists: "do_nothing",
|
|
39
|
+
});
|
|
40
|
+
return resolved;
|
|
41
|
+
}));
|
|
42
|
+
}
|
|
43
|
+
export function getGraph(graphId, options) {
|
|
44
|
+
if (!GRAPHS[graphId])
|
|
45
|
+
throw new HTTPException(404, { message: `Graph "${graphId}" not found` });
|
|
46
|
+
// TODO: have a check for the type of graph
|
|
47
|
+
const compiled = GRAPHS[graphId];
|
|
48
|
+
if (typeof options?.checkpointer !== "undefined") {
|
|
49
|
+
compiled.checkpointer = options?.checkpointer ?? undefined;
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
compiled.checkpointer = checkpointer;
|
|
53
|
+
}
|
|
54
|
+
compiled.store = options?.store ?? store;
|
|
55
|
+
return compiled;
|
|
56
|
+
}
|
|
57
|
+
export async function getGraphSchema(graphId) {
|
|
58
|
+
if (!GRAPH_SPEC[graphId])
|
|
59
|
+
throw new HTTPException(404, {
|
|
60
|
+
message: `Spec for "${graphId}" not found`,
|
|
61
|
+
});
|
|
62
|
+
if (!GRAPH_SCHEMA[graphId] || true) {
|
|
63
|
+
try {
|
|
64
|
+
GRAPH_SCHEMA[graphId] = await runGraphSchemaWorker(GRAPH_SPEC[graphId]);
|
|
65
|
+
}
|
|
66
|
+
catch (error) {
|
|
67
|
+
throw new Error(`Failed to extract schema for "${graphId}": ${error}`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return GRAPH_SCHEMA[graphId];
|
|
71
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { Worker } from "node:worker_threads";
|
|
2
|
+
import * as fs from "node:fs/promises";
|
|
3
|
+
import * as path from "node:path";
|
|
4
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
5
|
+
import * as uuid from "uuid";
|
|
6
|
+
export const GRAPHS = {};
|
|
7
|
+
export const NAMESPACE_GRAPH = uuid.parse("6ba7b821-9dad-11d1-80b4-00c04fd430c8");
|
|
8
|
+
export async function resolveGraph(spec, options) {
|
|
9
|
+
const [userFile, exportSymbol] = spec.split(":", 2);
|
|
10
|
+
const sourceFile = path.resolve(options.cwd, userFile);
|
|
11
|
+
// validate file exists
|
|
12
|
+
await fs.stat(sourceFile);
|
|
13
|
+
if (options?.onlyFilePresence) {
|
|
14
|
+
return { sourceFile: userFile, exportSymbol, resolved: undefined };
|
|
15
|
+
}
|
|
16
|
+
const isGraph = (graph) => {
|
|
17
|
+
if (typeof graph !== "object" || graph == null)
|
|
18
|
+
return false;
|
|
19
|
+
return "compile" in graph && typeof graph.compile === "function";
|
|
20
|
+
};
|
|
21
|
+
const graph = await import(pathToFileURL(sourceFile).toString()).then((module) => module[exportSymbol || "default"]);
|
|
22
|
+
// obtain the graph, and if not compiled, compile it
|
|
23
|
+
const resolved = await (async () => {
|
|
24
|
+
if (!graph)
|
|
25
|
+
throw new Error("Failed to load graph: graph is nullush");
|
|
26
|
+
const graphLike = typeof graph === "function" ? await graph() : await graph;
|
|
27
|
+
if (isGraph(graphLike))
|
|
28
|
+
return graphLike.compile();
|
|
29
|
+
return graphLike;
|
|
30
|
+
})();
|
|
31
|
+
return { sourceFile, exportSymbol, resolved };
|
|
32
|
+
}
|
|
33
|
+
export async function runGraphSchemaWorker(spec) {
|
|
34
|
+
const SCHEMA_RESOLVE_TIMEOUT_MS = 30_000;
|
|
35
|
+
return await new Promise((resolve, reject) => {
|
|
36
|
+
const worker = new Worker(fileURLToPath(new URL("./parser/parser.worker.mjs", import.meta.url)));
|
|
37
|
+
// Set a timeout to reject if the worker takes too long
|
|
38
|
+
const timeoutId = setTimeout(() => {
|
|
39
|
+
worker.terminate();
|
|
40
|
+
reject(new Error("Schema extract worker timed out"));
|
|
41
|
+
}, SCHEMA_RESOLVE_TIMEOUT_MS);
|
|
42
|
+
worker.on("message", (result) => {
|
|
43
|
+
worker.terminate();
|
|
44
|
+
clearTimeout(timeoutId);
|
|
45
|
+
resolve(result);
|
|
46
|
+
});
|
|
47
|
+
worker.on("error", reject);
|
|
48
|
+
worker.postMessage(spec);
|
|
49
|
+
});
|
|
50
|
+
}
|