@langchain/langgraph-cli 0.0.0-preview.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +24 -0
  2. package/dist/api/assistants.mjs +144 -0
  3. package/dist/api/runs.mjs +239 -0
  4. package/dist/api/store.mjs +83 -0
  5. package/dist/api/threads.mjs +145 -0
  6. package/dist/cli/build.mjs +44 -0
  7. package/dist/cli/cli.mjs +7 -0
  8. package/dist/cli/dev.entrypoint.mjs +35 -0
  9. package/dist/cli/dev.mjs +133 -0
  10. package/dist/cli/dockerfile.mjs +35 -0
  11. package/dist/cli/utils/builder.mjs +16 -0
  12. package/dist/cli/utils/ipc/client.mjs +25 -0
  13. package/dist/cli/utils/ipc/server.mjs +71 -0
  14. package/dist/cli/utils/ipc/utils/get-pipe-path.mjs +7 -0
  15. package/dist/cli/utils/ipc/utils/temporary-directory.mjs +18 -0
  16. package/dist/cli/utils/project.mjs +18 -0
  17. package/dist/docker/compose.mjs +185 -0
  18. package/dist/docker/dockerfile.mjs +390 -0
  19. package/dist/docker/shell.mjs +62 -0
  20. package/dist/graph/load.hooks.mjs +17 -0
  21. package/dist/graph/load.mjs +71 -0
  22. package/dist/graph/load.utils.mjs +50 -0
  23. package/dist/graph/parser/parser.mjs +308 -0
  24. package/dist/graph/parser/parser.worker.mjs +7 -0
  25. package/dist/graph/parser/schema/types.mjs +1607 -0
  26. package/dist/graph/parser/schema/types.template.mts +81 -0
  27. package/dist/logging.mjs +50 -0
  28. package/dist/preload.mjs +3 -0
  29. package/dist/queue.mjs +91 -0
  30. package/dist/schemas.mjs +399 -0
  31. package/dist/server.mjs +63 -0
  32. package/dist/state.mjs +32 -0
  33. package/dist/storage/checkpoint.mjs +123 -0
  34. package/dist/storage/ops.mjs +786 -0
  35. package/dist/storage/persist.mjs +69 -0
  36. package/dist/storage/store.mjs +37 -0
  37. package/dist/stream.mjs +215 -0
  38. package/dist/utils/abort.mjs +8 -0
  39. package/dist/utils/config.mjs +35 -0
  40. package/dist/utils/error.mjs +1 -0
  41. package/dist/utils/hono.mjs +27 -0
  42. package/dist/utils/importMap.mjs +55 -0
  43. package/dist/utils/runnableConfig.mjs +45 -0
  44. package/dist/utils/serde.mjs +20 -0
  45. package/package.json +62 -0
@@ -0,0 +1,69 @@
1
+ import * as path from "node:path";
2
+ import * as fs from "node:fs/promises";
3
+ import * as superjson from "superjson";
4
+ // Add custom transformers for Uint8Array
5
+ superjson.registerCustom({
6
+ isApplicable: (v) => v instanceof Uint8Array,
7
+ serialize: (v) => Buffer.from(v).toString("base64"),
8
+ deserialize: (v) => new Uint8Array(Buffer.from(v, "base64")),
9
+ }, "Uint8Array");
10
+ export class FileSystemPersistence {
11
+ filepath = null;
12
+ data = null;
13
+ defaultSchema;
14
+ name;
15
+ flushTimeout = undefined;
16
+ constructor(name, defaultSchema) {
17
+ this.name = name;
18
+ this.defaultSchema = defaultSchema;
19
+ }
20
+ async initialize(cwd) {
21
+ this.filepath = path.resolve(cwd, ".langgraph_api", `${this.name}`);
22
+ try {
23
+ this.data = superjson.parse(await fs.readFile(this.filepath, "utf-8"));
24
+ }
25
+ catch {
26
+ this.data = this.defaultSchema();
27
+ }
28
+ await fs
29
+ .mkdir(path.dirname(this.filepath), { recursive: true })
30
+ .catch(() => void 0);
31
+ return this;
32
+ }
33
+ async persist() {
34
+ if (this.data == null || this.filepath == null)
35
+ return;
36
+ clearTimeout(this.flushTimeout);
37
+ await fs.writeFile(this.filepath, superjson.stringify(this.data), "utf-8");
38
+ }
39
+ schedulePersist() {
40
+ clearTimeout(this.flushTimeout);
41
+ this.flushTimeout = setTimeout(() => this.persist(), 3000);
42
+ }
43
+ async flush() {
44
+ await this.persist();
45
+ }
46
+ async with(fn) {
47
+ if (this.filepath == null || this.data == null) {
48
+ throw new Error(`${this.name} not initialized`);
49
+ }
50
+ try {
51
+ return await fn(this.data);
52
+ }
53
+ finally {
54
+ this.schedulePersist();
55
+ }
56
+ }
57
+ async *withGenerator(fn) {
58
+ if (this.filepath == null || this.data == null) {
59
+ throw new Error(`${this.name} not initialized`);
60
+ }
61
+ try {
62
+ const gen = typeof fn === "function" ? fn(this.data) : fn;
63
+ yield* gen;
64
+ }
65
+ finally {
66
+ this.schedulePersist();
67
+ }
68
+ }
69
+ }
@@ -0,0 +1,37 @@
1
+ import { InMemoryStore as BaseMemoryStore, } from "@langchain/langgraph";
2
+ import { FileSystemPersistence } from "./persist.mjs";
3
+ const conn = new FileSystemPersistence(".langgraphjs_api.store.json", () => ({
4
+ data: new Map(),
5
+ vectors: new Map(),
6
+ }));
7
+ class InMemoryStore extends BaseMemoryStore {
8
+ async initialize(cwd) {
9
+ await conn.initialize(cwd);
10
+ await conn.with(({ data, vectors }) => {
11
+ Object.assign(this, { data, vectors });
12
+ });
13
+ return conn;
14
+ }
15
+ async clear() {
16
+ await conn.with(({ data, vectors }) => {
17
+ data.clear();
18
+ vectors.clear();
19
+ });
20
+ }
21
+ async batch(operations) {
22
+ return await conn.with(() => super.batch(operations));
23
+ }
24
+ async get(...args) {
25
+ return await conn.with(() => super.get(...args));
26
+ }
27
+ async search(...args) {
28
+ return await conn.with(() => super.search(...args));
29
+ }
30
+ async put(...args) {
31
+ return await conn.with(() => super.put(...args));
32
+ }
33
+ async listNamespaces(...args) {
34
+ return await conn.with(() => super.listNamespaces(...args));
35
+ }
36
+ }
37
+ export const store = new InMemoryStore();
@@ -0,0 +1,215 @@
1
+ import { getGraph } from "./graph/load.mjs";
2
+ import { Client as LangSmithClient } from "langsmith";
3
+ import { Command, Send, } from "@langchain/langgraph";
4
+ import { runnableConfigToCheckpoint, taskRunnableConfigToCheckpoint, } from "./utils/runnableConfig.mjs";
5
+ import { BaseMessageChunk, isBaseMessage } from "@langchain/core/messages";
6
+ import { logger } from "./logging.mjs";
7
+ const getLangGraphCommand = (command) => {
8
+ let goto = command.goto != null && !Array.isArray(command.goto)
9
+ ? [command.goto]
10
+ : command.goto;
11
+ return new Command({
12
+ goto: goto?.map((item) => {
13
+ if (typeof item !== "string")
14
+ return new Send(item.node, item.input);
15
+ return item;
16
+ }),
17
+ update: command.update,
18
+ resume: command.resume,
19
+ });
20
+ };
21
+ const isRunnableConfig = (config) => {
22
+ if (typeof config !== "object" || config == null)
23
+ return false;
24
+ return ("configurable" in config &&
25
+ typeof config.configurable === "object" &&
26
+ config.configurable != null);
27
+ };
28
+ function preprocessDebugCheckpointTask(task) {
29
+ if (!isRunnableConfig(task.state) ||
30
+ !taskRunnableConfigToCheckpoint(task.state)) {
31
+ return task;
32
+ }
33
+ const cloneTask = { ...task };
34
+ cloneTask.checkpoint = taskRunnableConfigToCheckpoint(task.state);
35
+ delete cloneTask.state;
36
+ return cloneTask;
37
+ }
38
+ const isConfigurablePresent = (config) => typeof config === "object" &&
39
+ config != null &&
40
+ "configurable" in config &&
41
+ typeof config.configurable === "object" &&
42
+ config.configurable != null;
43
+ const deleteInternalConfigurableFields = (config) => {
44
+ if (isConfigurablePresent(config)) {
45
+ const newConfig = {
46
+ ...config,
47
+ configurable: Object.fromEntries(Object.entries(config.configurable).filter(([key]) => !key.startsWith("__"))),
48
+ };
49
+ delete newConfig.callbacks;
50
+ return newConfig;
51
+ }
52
+ return config;
53
+ };
54
+ function preprocessDebugCheckpoint(payload) {
55
+ const result = {
56
+ ...payload,
57
+ checkpoint: runnableConfigToCheckpoint(payload["config"]),
58
+ parent_checkpoint: runnableConfigToCheckpoint(payload["parentConfig"]),
59
+ tasks: payload["tasks"].map(preprocessDebugCheckpointTask),
60
+ };
61
+ // Handle LangGraph JS pascalCase vs snake_case
62
+ // TODO: use stream to LangGraph.JS
63
+ result.parent_config = payload["parentConfig"];
64
+ delete result.parentConfig;
65
+ result.config = deleteInternalConfigurableFields(result.config);
66
+ result.parent_config = deleteInternalConfigurableFields(result.parent_config);
67
+ return result;
68
+ }
69
+ export async function* streamState(run, attempt = 1, options) {
70
+ const kwargs = run.kwargs;
71
+ const graphId = kwargs.config?.configurable?.graph_id;
72
+ if (!graphId || typeof graphId !== "string") {
73
+ throw new Error("Invalid or missing graph_id");
74
+ }
75
+ const graph = getGraph(graphId, {
76
+ checkpointer: kwargs.temporary ? null : undefined,
77
+ });
78
+ const userStreamMode = kwargs.stream_mode ?? [];
79
+ const libStreamMode = new Set(userStreamMode.filter((mode) => mode !== "events" && mode !== "messages-tuple") ?? []);
80
+ if (userStreamMode.includes("messages-tuple")) {
81
+ libStreamMode.add("messages");
82
+ }
83
+ if (userStreamMode.includes("messages")) {
84
+ libStreamMode.add("values");
85
+ }
86
+ if (!libStreamMode.has("debug"))
87
+ libStreamMode.add("debug");
88
+ yield {
89
+ event: "metadata",
90
+ data: { run_id: run.run_id, attempt },
91
+ };
92
+ const metadata = {
93
+ ...kwargs.config?.metadata,
94
+ run_attempt: attempt,
95
+ // TODO: get langgraph version from NPM / load.hooks.mjs
96
+ langgraph_version: "0.2.35",
97
+ langgraph_plan: "developer",
98
+ langgraph_host: "self-hosted",
99
+ };
100
+ const events = graph.streamEvents(kwargs.command != null
101
+ ? getLangGraphCommand(kwargs.command)
102
+ : (kwargs.input ?? null), {
103
+ version: "v2",
104
+ interruptAfter: kwargs.interrupt_after,
105
+ interruptBefore: kwargs.interrupt_before,
106
+ tags: kwargs.config?.tags,
107
+ configurable: kwargs.config?.configurable,
108
+ recursionLimit: kwargs.config?.recursion_limit,
109
+ subgraphs: kwargs.subgraphs,
110
+ metadata,
111
+ runId: run.run_id,
112
+ streamMode: [...libStreamMode],
113
+ signal: options?.signal,
114
+ });
115
+ const messages = {};
116
+ const completedIds = new Set();
117
+ for await (const event of events) {
118
+ if (event.tags?.includes("langsmith:hidden"))
119
+ continue;
120
+ if (event.event === "on_chain_stream" && event.run_id === run.run_id) {
121
+ const [ns, mode, chunk] = (kwargs.subgraphs ? event.data.chunk : [null, ...event.data.chunk]);
122
+ // Listen for debug events and capture checkpoint
123
+ let data = chunk;
124
+ if (mode === "debug") {
125
+ const debugChunk = chunk;
126
+ if (debugChunk.type === "checkpoint") {
127
+ const debugCheckpoint = preprocessDebugCheckpoint(debugChunk.payload);
128
+ options?.onCheckpoint?.(debugCheckpoint);
129
+ data = { ...debugChunk, payload: debugCheckpoint };
130
+ }
131
+ else if (debugChunk.type === "task_result") {
132
+ const debugResult = preprocessDebugCheckpointTask(debugChunk.payload);
133
+ options?.onTaskResult?.(debugResult);
134
+ data = { ...debugChunk, payload: debugResult };
135
+ }
136
+ }
137
+ if (mode === "messages") {
138
+ if (userStreamMode.includes("messages-tuple")) {
139
+ yield { event: "messages", data };
140
+ }
141
+ }
142
+ else if (mode === "custom") {
143
+ logger.warn("unhandled custom mode", { mode, chunk });
144
+ }
145
+ else if (userStreamMode.includes(mode)) {
146
+ if (kwargs.subgraphs && ns?.length) {
147
+ yield { event: `${mode}|${ns.join("|")}`, data };
148
+ }
149
+ else {
150
+ yield { event: mode, data };
151
+ }
152
+ }
153
+ }
154
+ else if (userStreamMode.includes("events")) {
155
+ yield { event: "events", data: event };
156
+ }
157
+ // TODO: we still rely on old messages mode based of streamMode=values
158
+ // In order to fully switch to library messages mode, we need to do ensure that
159
+ // `StreamMessagesHandler` sends the final message, which requires the following:
160
+ // - handleLLMEnd does not send the final message b/c handleLLMNewToken sets the this.emittedChatModelRunIds[runId] flag. Python does not do that
161
+ // - handleLLMEnd receives the final message as BaseMessageChunk rather than BaseMessage, which from the outside will become indistinguishable.
162
+ // - handleLLMEnd should not dedupe the message
163
+ // - Don't think there's an utility that would convert a BaseMessageChunk to a BaseMessage?
164
+ if (userStreamMode.includes("messages")) {
165
+ if (event.event === "on_chain_stream" && event.run_id === run.run_id) {
166
+ const newMessages = [];
167
+ const [_, chunk] = event.data.chunk;
168
+ let chunkMessages = [];
169
+ if (typeof chunk === "object" &&
170
+ chunk != null &&
171
+ "messages" in chunk &&
172
+ !isBaseMessage(chunk)) {
173
+ chunkMessages = chunk?.messages;
174
+ }
175
+ if (!Array.isArray(chunkMessages)) {
176
+ chunkMessages = [chunkMessages];
177
+ }
178
+ for (const message of chunkMessages) {
179
+ if (!message.id || completedIds.has(message.id))
180
+ continue;
181
+ completedIds.add(message.id);
182
+ newMessages.push(message);
183
+ }
184
+ if (newMessages.length > 0) {
185
+ yield { event: "messages/complete", data: newMessages };
186
+ }
187
+ }
188
+ else if (event.event === "on_chat_model_stream" &&
189
+ !event.tags?.includes("nostream")) {
190
+ const message = event.data.chunk;
191
+ if (!message.id)
192
+ continue;
193
+ if (messages[message.id] == null) {
194
+ messages[message.id] = message;
195
+ yield {
196
+ event: "messages/metadata",
197
+ data: { [message.id]: { metadata: event.metadata } },
198
+ };
199
+ }
200
+ else {
201
+ messages[message.id] = messages[message.id].concat(message);
202
+ }
203
+ yield { event: "messages/partial", data: [messages[message.id]] };
204
+ }
205
+ }
206
+ }
207
+ if (kwargs.feedback_keys) {
208
+ const client = new LangSmithClient();
209
+ const data = Object.fromEntries(await Promise.all(kwargs.feedback_keys.map(async (feedback) => {
210
+ const { url } = await client.createPresignedFeedbackToken(run.run_id, feedback);
211
+ return [feedback, url];
212
+ })));
213
+ yield { event: "feedback", data };
214
+ }
215
+ }
@@ -0,0 +1,8 @@
1
+ export const combineAbortSignals = (...input) => {
2
+ const signals = input.filter((item) => item != null);
3
+ if ("any" in AbortSignal)
4
+ return AbortSignal.any(signals);
5
+ const abortController = new AbortController();
6
+ signals.forEach((signal) => signal.addEventListener("abort", () => abortController.abort()));
7
+ return abortController.signal;
8
+ };
@@ -0,0 +1,35 @@
1
+ import { z } from "zod";
2
+ const AuthConfigSchema = z.object({
3
+ path: z.string().optional(),
4
+ disable_studio_auth: z.boolean().default(false),
5
+ });
6
+ const IndexConfigSchema = z.object({
7
+ dims: z.number().optional(),
8
+ embed: z.string().optional(),
9
+ fields: z.array(z.string()).optional(),
10
+ });
11
+ const StoreConfigSchema = z.object({
12
+ index: IndexConfigSchema.optional(),
13
+ });
14
+ const BaseConfigSchema = z.object({
15
+ docker_compose_file: z.string().optional(),
16
+ dockerfile_lines: z.array(z.string()).default([]),
17
+ graphs: z.record(z.string()).default({}),
18
+ env: z
19
+ .union([z.array(z.string()), z.record(z.string()), z.string()])
20
+ .default({}),
21
+ store: StoreConfigSchema.optional(),
22
+ _INTERNAL_docker_tag: z.string().optional(),
23
+ auth: AuthConfigSchema.optional(),
24
+ });
25
+ export const PythonConfigSchema = BaseConfigSchema.merge(z.object({
26
+ python_version: z
27
+ .union([z.literal("3.11"), z.literal("3.12")])
28
+ .default("3.11"),
29
+ pip_config_file: z.string().optional(),
30
+ dependencies: z
31
+ .array(z.string())
32
+ .nonempty("You need to specify at least one dependency"),
33
+ }));
34
+ export const NodeConfigSchema = BaseConfigSchema.merge(z.object({ node_version: z.literal("20") }));
35
+ export const ConfigSchema = z.union([NodeConfigSchema, PythonConfigSchema]);
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,27 @@
1
+ import { serialiseAsDict } from "./serde.mjs";
2
+ import { stream } from "hono/streaming";
3
+ import { StreamingApi } from "hono/utils/stream";
4
+ export function jsonExtra(c, object) {
5
+ return new Response(serialiseAsDict(object), {
6
+ ...c.res,
7
+ headers: { ...c.res.headers, "Content-Type": "application/json" },
8
+ });
9
+ }
10
+ export function waitKeepAlive(c, promise) {
11
+ return stream(c, async (stream) => {
12
+ // keep sending newlines until we resolved the chunk
13
+ let keepAlive = Promise.resolve();
14
+ const timer = setInterval(() => {
15
+ keepAlive = keepAlive.then(() => stream.write("\n"));
16
+ }, 1000);
17
+ const result = await promise;
18
+ clearInterval(timer);
19
+ await keepAlive;
20
+ await stream.write(serialiseAsDict(result));
21
+ });
22
+ }
23
+ export const getDisconnectAbortSignal = (c, stream) => {
24
+ // https://github.com/honojs/hono/issues/1770
25
+ stream.onAbort(() => { });
26
+ return c.req.raw.signal;
27
+ };
@@ -0,0 +1,55 @@
1
+ import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, ImagePromptTemplate, PipelinePromptTemplate, } from "@langchain/core/prompts";
2
+ import { AIMessage, AIMessageChunk, BaseMessage, BaseMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessage, FunctionMessageChunk, HumanMessage, HumanMessageChunk, SystemMessage, SystemMessageChunk, ToolMessage, ToolMessageChunk, } from "@langchain/core/messages";
3
+ import { StringPromptValue } from "@langchain/core/prompt_values";
4
+ export const prompts__prompt = {
5
+ PromptTemplate,
6
+ };
7
+ export const schema__messages = {
8
+ AIMessage,
9
+ AIMessageChunk,
10
+ BaseMessage,
11
+ BaseMessageChunk,
12
+ ChatMessage,
13
+ ChatMessageChunk,
14
+ FunctionMessage,
15
+ FunctionMessageChunk,
16
+ HumanMessage,
17
+ HumanMessageChunk,
18
+ SystemMessage,
19
+ SystemMessageChunk,
20
+ ToolMessage,
21
+ ToolMessageChunk,
22
+ };
23
+ export const schema = {
24
+ AIMessage,
25
+ AIMessageChunk,
26
+ BaseMessage,
27
+ BaseMessageChunk,
28
+ ChatMessage,
29
+ ChatMessageChunk,
30
+ FunctionMessage,
31
+ FunctionMessageChunk,
32
+ HumanMessage,
33
+ HumanMessageChunk,
34
+ SystemMessage,
35
+ SystemMessageChunk,
36
+ ToolMessage,
37
+ ToolMessageChunk,
38
+ };
39
+ export const prompts__chat = {
40
+ AIMessagePromptTemplate,
41
+ ChatMessagePromptTemplate,
42
+ ChatPromptTemplate,
43
+ HumanMessagePromptTemplate,
44
+ MessagesPlaceholder,
45
+ SystemMessagePromptTemplate,
46
+ };
47
+ export const prompts__image = {
48
+ ImagePromptTemplate,
49
+ };
50
+ export const prompts__pipeline = {
51
+ PipelinePromptTemplate,
52
+ };
53
+ export const prompts__base = {
54
+ StringPromptValue,
55
+ };
@@ -0,0 +1,45 @@
1
+ import { z } from "zod";
2
+ const ConfigSchema = z.object({
3
+ configurable: z.object({
4
+ thread_id: z.string(),
5
+ checkpoint_id: z.string(),
6
+ checkpoint_ns: z.string().nullish(),
7
+ checkpoint_map: z.record(z.string(), z.unknown()).nullish(),
8
+ }),
9
+ });
10
+ export const runnableConfigToCheckpoint = (config) => {
11
+ if (!config || !config.configurable || !config.configurable.thread_id) {
12
+ return null;
13
+ }
14
+ const parsed = ConfigSchema.safeParse(config);
15
+ if (!parsed.success)
16
+ return null;
17
+ return {
18
+ thread_id: parsed.data.configurable.thread_id,
19
+ checkpoint_id: parsed.data.configurable.checkpoint_id,
20
+ checkpoint_ns: parsed.data.configurable.checkpoint_ns || "",
21
+ checkpoint_map: parsed.data.configurable.checkpoint_map || null,
22
+ };
23
+ };
24
+ const TaskConfigSchema = z.object({
25
+ configurable: z.object({
26
+ thread_id: z.string(),
27
+ checkpoint_id: z.string().nullish(),
28
+ checkpoint_ns: z.string().nullish(),
29
+ checkpoint_map: z.record(z.string(), z.unknown()).nullish(),
30
+ }),
31
+ });
32
+ export const taskRunnableConfigToCheckpoint = (config) => {
33
+ if (!config || !config.configurable || !config.configurable.thread_id) {
34
+ return null;
35
+ }
36
+ const parsed = TaskConfigSchema.safeParse(config);
37
+ if (!parsed.success)
38
+ return null;
39
+ return {
40
+ thread_id: parsed.data.configurable.thread_id,
41
+ checkpoint_id: parsed.data.configurable.checkpoint_id || null,
42
+ checkpoint_ns: parsed.data.configurable.checkpoint_ns || "",
43
+ checkpoint_map: parsed.data.configurable.checkpoint_map || null,
44
+ };
45
+ };
@@ -0,0 +1,20 @@
1
+ export const serialiseAsDict = (obj) => {
2
+ return JSON.stringify(obj, function (key, value) {
3
+ const rawValue = this[key];
4
+ if (rawValue != null &&
5
+ typeof rawValue === "object" &&
6
+ "toDict" in rawValue &&
7
+ typeof rawValue.toDict === "function") {
8
+ // TODO: we need to upstream this to LangChainJS
9
+ const { type, data } = rawValue.toDict();
10
+ return { ...data, type };
11
+ }
12
+ return value;
13
+ }, 2);
14
+ };
15
+ export const serializeError = (error) => {
16
+ if (error instanceof Error) {
17
+ return { error: error.name, message: error.message };
18
+ }
19
+ return { error: "Error", message: JSON.stringify(error) };
20
+ };
package/package.json ADDED
@@ -0,0 +1,62 @@
1
+ {
2
+ "name": "@langchain/langgraph-cli",
3
+ "version": "0.0.0-preview.4",
4
+ "type": "module",
5
+ "engines": {
6
+ "node": ">=18"
7
+ },
8
+ "main": "./dist/server.mjs",
9
+ "bin": {
10
+ "langgraph": "dist/cli/cli.mjs"
11
+ },
12
+ "files": [
13
+ "dist/"
14
+ ],
15
+ "scripts": {
16
+ "build": "node scripts/spawn.mjs build",
17
+ "prepack": "pnpm run build",
18
+ "typecheck": "tsc --noEmit",
19
+ "cli": "tsx src/cli/cli.mts",
20
+ "cli:watch": "tsx watch src/cli/cli.mts",
21
+ "test:cli": "pnpm run cli dev --no-browser --config ./tests/graphs/langgraph.json",
22
+ "test": "vitest",
23
+ "test:ci": "node scripts/spawn.mjs test"
24
+ },
25
+ "dependencies": {
26
+ "@commander-js/extra-typings": "^13.0.0",
27
+ "@hono/node-server": "^1.12.0",
28
+ "@hono/zod-validator": "^0.2.2",
29
+ "@types/json-schema": "^7.0.15",
30
+ "@typescript/vfs": "^1.6.0",
31
+ "chokidar": "^4.0.3",
32
+ "commander": "^13.0.0",
33
+ "dedent": "^1.5.3",
34
+ "dotenv": "^16.4.7",
35
+ "execa": "^9.5.2",
36
+ "exit-hook": "^4.0.0",
37
+ "hono": "^4.5.4",
38
+ "langsmith": "^0.2.15",
39
+ "open": "^10.1.0",
40
+ "superjson": "^2.2.2",
41
+ "tsx": "^4.19.2",
42
+ "uuid": "^10.0.0",
43
+ "winston": "^3.17.0",
44
+ "winston-console-format": "^1.0.8",
45
+ "yaml": "^2.7.0",
46
+ "zod": "^3.23.8"
47
+ },
48
+ "peerDependencies": {
49
+ "@langchain/core": "^0.3.27",
50
+ "@langchain/langgraph": "^0.2.39",
51
+ "@langchain/langgraph-checkpoint": "^0.0.13",
52
+ "typescript": "^5.5.4"
53
+ },
54
+ "devDependencies": {
55
+ "@langchain/langgraph-sdk": "^0.0.33",
56
+ "@types/node": "^22.2.0",
57
+ "@types/uuid": "^10.0.0",
58
+ "postgres": "^3.4.5",
59
+ "prettier": "^3.3.3",
60
+ "vitest": "^2.1.8"
61
+ }
62
+ }