langgraph-api 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- LICENSE +93 -0
- langgraph_api/__init__.py +0 -0
- langgraph_api/api/__init__.py +63 -0
- langgraph_api/api/assistants.py +326 -0
- langgraph_api/api/meta.py +71 -0
- langgraph_api/api/openapi.py +32 -0
- langgraph_api/api/runs.py +463 -0
- langgraph_api/api/store.py +116 -0
- langgraph_api/api/threads.py +263 -0
- langgraph_api/asyncio.py +201 -0
- langgraph_api/auth/__init__.py +0 -0
- langgraph_api/auth/langsmith/__init__.py +0 -0
- langgraph_api/auth/langsmith/backend.py +67 -0
- langgraph_api/auth/langsmith/client.py +145 -0
- langgraph_api/auth/middleware.py +41 -0
- langgraph_api/auth/noop.py +14 -0
- langgraph_api/cli.py +209 -0
- langgraph_api/config.py +70 -0
- langgraph_api/cron_scheduler.py +60 -0
- langgraph_api/errors.py +52 -0
- langgraph_api/graph.py +314 -0
- langgraph_api/http.py +168 -0
- langgraph_api/http_logger.py +89 -0
- langgraph_api/js/.gitignore +2 -0
- langgraph_api/js/build.mts +49 -0
- langgraph_api/js/client.mts +849 -0
- langgraph_api/js/global.d.ts +6 -0
- langgraph_api/js/package.json +33 -0
- langgraph_api/js/remote.py +673 -0
- langgraph_api/js/server_sent_events.py +126 -0
- langgraph_api/js/src/graph.mts +88 -0
- langgraph_api/js/src/hooks.mjs +12 -0
- langgraph_api/js/src/parser/parser.mts +443 -0
- langgraph_api/js/src/parser/parser.worker.mjs +12 -0
- langgraph_api/js/src/schema/types.mts +2136 -0
- langgraph_api/js/src/schema/types.template.mts +74 -0
- langgraph_api/js/src/utils/importMap.mts +85 -0
- langgraph_api/js/src/utils/pythonSchemas.mts +28 -0
- langgraph_api/js/src/utils/serde.mts +21 -0
- langgraph_api/js/tests/api.test.mts +1566 -0
- langgraph_api/js/tests/compose-postgres.yml +56 -0
- langgraph_api/js/tests/graphs/.gitignore +1 -0
- langgraph_api/js/tests/graphs/agent.mts +127 -0
- langgraph_api/js/tests/graphs/error.mts +17 -0
- langgraph_api/js/tests/graphs/langgraph.json +8 -0
- langgraph_api/js/tests/graphs/nested.mts +44 -0
- langgraph_api/js/tests/graphs/package.json +7 -0
- langgraph_api/js/tests/graphs/weather.mts +57 -0
- langgraph_api/js/tests/graphs/yarn.lock +159 -0
- langgraph_api/js/tests/parser.test.mts +870 -0
- langgraph_api/js/tests/utils.mts +17 -0
- langgraph_api/js/yarn.lock +1340 -0
- langgraph_api/lifespan.py +41 -0
- langgraph_api/logging.py +121 -0
- langgraph_api/metadata.py +101 -0
- langgraph_api/models/__init__.py +0 -0
- langgraph_api/models/run.py +229 -0
- langgraph_api/patch.py +42 -0
- langgraph_api/queue.py +245 -0
- langgraph_api/route.py +118 -0
- langgraph_api/schema.py +190 -0
- langgraph_api/serde.py +124 -0
- langgraph_api/server.py +48 -0
- langgraph_api/sse.py +118 -0
- langgraph_api/state.py +67 -0
- langgraph_api/stream.py +289 -0
- langgraph_api/utils.py +60 -0
- langgraph_api/validation.py +141 -0
- langgraph_api-0.0.1.dist-info/LICENSE +93 -0
- langgraph_api-0.0.1.dist-info/METADATA +26 -0
- langgraph_api-0.0.1.dist-info/RECORD +86 -0
- langgraph_api-0.0.1.dist-info/WHEEL +4 -0
- langgraph_api-0.0.1.dist-info/entry_points.txt +3 -0
- langgraph_license/__init__.py +0 -0
- langgraph_license/middleware.py +21 -0
- langgraph_license/validation.py +11 -0
- langgraph_storage/__init__.py +0 -0
- langgraph_storage/checkpoint.py +94 -0
- langgraph_storage/database.py +190 -0
- langgraph_storage/ops.py +1523 -0
- langgraph_storage/queue.py +108 -0
- langgraph_storage/retry.py +27 -0
- langgraph_storage/store.py +28 -0
- langgraph_storage/ttl_dict.py +54 -0
- logging.json +22 -0
- openapi.json +4304 -0
|
@@ -0,0 +1,849 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import { Hono } from "hono";
|
|
3
|
+
import { serve } from "@hono/node-server";
|
|
4
|
+
import { zValidator } from "@hono/zod-validator";
|
|
5
|
+
import { streamSSE } from "hono/streaming";
|
|
6
|
+
import { HTTPException } from "hono/http-exception";
|
|
7
|
+
import pRetry from "p-retry";
|
|
8
|
+
import {
|
|
9
|
+
BaseStore,
|
|
10
|
+
Item,
|
|
11
|
+
Operation,
|
|
12
|
+
OperationResults,
|
|
13
|
+
type Checkpoint,
|
|
14
|
+
type CheckpointMetadata,
|
|
15
|
+
type CheckpointTuple,
|
|
16
|
+
type CompiledGraph,
|
|
17
|
+
} from "@langchain/langgraph";
|
|
18
|
+
import {
|
|
19
|
+
BaseCheckpointSaver,
|
|
20
|
+
type ChannelVersions,
|
|
21
|
+
type ChannelProtocol,
|
|
22
|
+
} from "@langchain/langgraph-checkpoint";
|
|
23
|
+
import { createHash } from "node:crypto";
|
|
24
|
+
import * as fs from "node:fs/promises";
|
|
25
|
+
import * as path from "node:path";
|
|
26
|
+
import { serialiseAsDict } from "./src/utils/serde.mjs";
|
|
27
|
+
import * as importMap from "./src/utils/importMap.mjs";
|
|
28
|
+
|
|
29
|
+
import { createLogger, format, transports } from "winston";
|
|
30
|
+
import { Agent, fetch } from "undici";
|
|
31
|
+
|
|
32
|
+
import { load } from "@langchain/core/load";
|
|
33
|
+
import { BaseMessageChunk, isBaseMessage } from "@langchain/core/messages";
|
|
34
|
+
import type { PyItem, PyResult } from "./src/utils/pythonSchemas.mts";
|
|
35
|
+
import type { RunnableConfig } from "@langchain/core/runnables";
|
|
36
|
+
import {
|
|
37
|
+
runGraphSchemaWorker,
|
|
38
|
+
GraphSchema,
|
|
39
|
+
resolveGraph,
|
|
40
|
+
GraphSpec,
|
|
41
|
+
} from "./src/graph.mts";
|
|
42
|
+
|
|
43
|
+
const logger = createLogger({
|
|
44
|
+
level: "debug",
|
|
45
|
+
format: format.combine(
|
|
46
|
+
format.errors({ stack: true }),
|
|
47
|
+
format.timestamp(),
|
|
48
|
+
format.json(),
|
|
49
|
+
format.printf((info) => {
|
|
50
|
+
const { timestamp, level, message, ...rest } = info;
|
|
51
|
+
|
|
52
|
+
let event;
|
|
53
|
+
if (typeof message === "string") {
|
|
54
|
+
event = message;
|
|
55
|
+
} else {
|
|
56
|
+
event = JSON.stringify(message);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (rest.stack) {
|
|
60
|
+
rest.message = event;
|
|
61
|
+
event = rest.stack;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return JSON.stringify({ timestamp, level, event, ...rest });
|
|
65
|
+
})
|
|
66
|
+
),
|
|
67
|
+
transports: [
|
|
68
|
+
new transports.Console({
|
|
69
|
+
handleExceptions: true,
|
|
70
|
+
handleRejections: true,
|
|
71
|
+
}),
|
|
72
|
+
],
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
let GRAPH_SCHEMA: Record<string, Record<string, GraphSchema>> = {};
|
|
76
|
+
const GRAPH_RESOLVED: Record<string, CompiledGraph<string>> = {};
|
|
77
|
+
const GRAPH_SPEC: Record<string, GraphSpec> = {};
|
|
78
|
+
|
|
79
|
+
function getGraph(graphId: string) {
|
|
80
|
+
if (!GRAPH_RESOLVED[graphId])
|
|
81
|
+
throw new HTTPException(404, { message: `Graph "${graphId}" not found` });
|
|
82
|
+
return GRAPH_RESOLVED[graphId];
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async function getOrExtractSchema(graphId: string) {
|
|
86
|
+
if (!(graphId in GRAPH_SPEC)) {
|
|
87
|
+
throw new Error(`Spec for ${graphId} not found`);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (!GRAPH_SCHEMA[graphId]) {
|
|
91
|
+
try {
|
|
92
|
+
const timer = logger.startTimer();
|
|
93
|
+
GRAPH_SCHEMA[graphId] = await runGraphSchemaWorker(GRAPH_SPEC[graphId]);
|
|
94
|
+
timer.done({ message: `Extracting schema for ${graphId} finished` });
|
|
95
|
+
} catch (error) {
|
|
96
|
+
throw new Error(`Failed to extract schema for "${graphId}": ${error}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return GRAPH_SCHEMA[graphId];
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const GRAPH_SOCKET = "./graph.sock";
|
|
104
|
+
const CHECKPOINTER_SOCKET = "./checkpointer.sock";
|
|
105
|
+
const STORE_SOCKET = "./store.sock";
|
|
106
|
+
|
|
107
|
+
const checkpointerDispatcher = new Agent({
|
|
108
|
+
connect: { socketPath: CHECKPOINTER_SOCKET },
|
|
109
|
+
});
|
|
110
|
+
const storeDispatcher = new Agent({ connect: { socketPath: STORE_SOCKET } });
|
|
111
|
+
|
|
112
|
+
const RunnableConfigSchema = z.object({
|
|
113
|
+
tags: z.array(z.string()).optional(),
|
|
114
|
+
metadata: z.record(z.unknown()).optional(),
|
|
115
|
+
run_name: z.string().optional(),
|
|
116
|
+
max_concurrency: z.number().optional(),
|
|
117
|
+
recursion_limit: z.number().optional(),
|
|
118
|
+
configurable: z.record(z.unknown()).optional(),
|
|
119
|
+
run_id: z.string().uuid().optional(),
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
const getRunnableConfig = (
|
|
123
|
+
userConfig: z.infer<typeof RunnableConfigSchema> | null | undefined
|
|
124
|
+
) => {
|
|
125
|
+
if (!userConfig) return {};
|
|
126
|
+
return {
|
|
127
|
+
configurable: userConfig.configurable,
|
|
128
|
+
tags: userConfig.tags,
|
|
129
|
+
metadata: userConfig.metadata,
|
|
130
|
+
runName: userConfig.run_name,
|
|
131
|
+
maxConcurrency: userConfig.max_concurrency,
|
|
132
|
+
recursionLimit: userConfig.recursion_limit,
|
|
133
|
+
runId: userConfig.run_id,
|
|
134
|
+
};
|
|
135
|
+
};
|
|
136
|
+
|
|
137
|
+
function tryFetch(...args: Parameters<typeof fetch>) {
|
|
138
|
+
return pRetry(
|
|
139
|
+
async () => {
|
|
140
|
+
const response = await fetch(...args).catch((error) => {
|
|
141
|
+
throw new Error(`${args[0]} connecfailed: ${error}`);
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
if (!response.ok) {
|
|
145
|
+
let errorMessage = `${args[0]} failed: HTTP ${response.status}`;
|
|
146
|
+
try {
|
|
147
|
+
errorMessage += `: ${await response.text()}`;
|
|
148
|
+
} catch {}
|
|
149
|
+
throw new Error(errorMessage);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
return response;
|
|
153
|
+
},
|
|
154
|
+
{
|
|
155
|
+
retries: 3,
|
|
156
|
+
factor: 2,
|
|
157
|
+
minTimeout: 1000,
|
|
158
|
+
onFailedAttempt: (error) => void logger.error(error),
|
|
159
|
+
}
|
|
160
|
+
);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
class RemoteCheckpointer extends BaseCheckpointSaver<number | string> {
|
|
164
|
+
async getTuple(config: RunnableConfig): Promise<CheckpointTuple | undefined> {
|
|
165
|
+
const res = await tryFetch("http://checkpointer/get_tuple", {
|
|
166
|
+
dispatcher: checkpointerDispatcher,
|
|
167
|
+
method: "POST",
|
|
168
|
+
headers: { "Content-Type": "application/json" },
|
|
169
|
+
body: JSON.stringify({ config }),
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
const text = await res.text();
|
|
173
|
+
const result = (await load(text, {
|
|
174
|
+
importMap,
|
|
175
|
+
optionalImportEntrypoints: [],
|
|
176
|
+
optionalImportsMap: {},
|
|
177
|
+
secretsMap: {},
|
|
178
|
+
})) as any;
|
|
179
|
+
|
|
180
|
+
if (!result) return undefined;
|
|
181
|
+
return {
|
|
182
|
+
checkpoint: result.checkpoint,
|
|
183
|
+
config: result.config,
|
|
184
|
+
metadata: result.metadata,
|
|
185
|
+
parentConfig: result.parent_config,
|
|
186
|
+
pendingWrites: result.pending_writes,
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
async *list(
|
|
190
|
+
config: RunnableConfig,
|
|
191
|
+
options?: {
|
|
192
|
+
limit?: number;
|
|
193
|
+
before?: RunnableConfig;
|
|
194
|
+
filter?: Record<string, any>;
|
|
195
|
+
}
|
|
196
|
+
): AsyncGenerator<CheckpointTuple> {
|
|
197
|
+
const res = await tryFetch("http://checkpointer/list", {
|
|
198
|
+
dispatcher: checkpointerDispatcher,
|
|
199
|
+
method: "POST",
|
|
200
|
+
headers: { "Content-Type": "application/json" },
|
|
201
|
+
body: JSON.stringify({ config, ...options }),
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
const text = await res.text();
|
|
205
|
+
const result = (await load(text, {
|
|
206
|
+
importMap,
|
|
207
|
+
optionalImportEntrypoints: [],
|
|
208
|
+
optionalImportsMap: {},
|
|
209
|
+
secretsMap: {},
|
|
210
|
+
})) as any;
|
|
211
|
+
|
|
212
|
+
for (const item of result) {
|
|
213
|
+
yield {
|
|
214
|
+
checkpoint: item.checkpoint,
|
|
215
|
+
config: item.config,
|
|
216
|
+
metadata: item.metadata,
|
|
217
|
+
parentConfig: item.parent_config,
|
|
218
|
+
pendingWrites: item.pending_writes,
|
|
219
|
+
} satisfies CheckpointTuple;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
async put(
|
|
223
|
+
config: RunnableConfig,
|
|
224
|
+
checkpoint: Checkpoint,
|
|
225
|
+
metadata: CheckpointMetadata,
|
|
226
|
+
newVersions: ChannelVersions
|
|
227
|
+
): Promise<RunnableConfig> {
|
|
228
|
+
const response = await tryFetch("http://checkpointer/put", {
|
|
229
|
+
dispatcher: checkpointerDispatcher,
|
|
230
|
+
method: "POST",
|
|
231
|
+
headers: { "Content-Type": "application/json" },
|
|
232
|
+
body: JSON.stringify({
|
|
233
|
+
config,
|
|
234
|
+
checkpoint,
|
|
235
|
+
metadata,
|
|
236
|
+
new_versions: newVersions,
|
|
237
|
+
}),
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
return (await response.json()) as RunnableConfig;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
async putWrites(
|
|
244
|
+
config: RunnableConfig,
|
|
245
|
+
writes: [string, unknown][],
|
|
246
|
+
taskId: string
|
|
247
|
+
): Promise<void> {
|
|
248
|
+
// Implementation of the inherited abstract member 'putWrites'
|
|
249
|
+
await tryFetch("http://checkpointer/put_writes", {
|
|
250
|
+
dispatcher: checkpointerDispatcher,
|
|
251
|
+
method: "POST",
|
|
252
|
+
headers: { "Content-Type": "application/json" },
|
|
253
|
+
body: JSON.stringify({ config, writes, taskId }),
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
getNextVersion(
|
|
258
|
+
current: number | string | undefined,
|
|
259
|
+
_channel: ChannelProtocol
|
|
260
|
+
): string {
|
|
261
|
+
let currentVersion = 0;
|
|
262
|
+
|
|
263
|
+
if (current == null) {
|
|
264
|
+
currentVersion = 0;
|
|
265
|
+
} else if (typeof current === "number") {
|
|
266
|
+
currentVersion = current;
|
|
267
|
+
} else if (typeof current === "string") {
|
|
268
|
+
currentVersion = Number.parseInt(current.split(".")[0], 10);
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
const nextVersion = String(currentVersion + 1).padStart(32, "0");
|
|
272
|
+
try {
|
|
273
|
+
const hash = createHash("md5")
|
|
274
|
+
.update(serialiseAsDict(_channel.checkpoint()))
|
|
275
|
+
.digest("hex");
|
|
276
|
+
return `${nextVersion}.${hash}`;
|
|
277
|
+
} catch {}
|
|
278
|
+
|
|
279
|
+
return nextVersion;
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
function camelToSnake(operation: Operation) {
|
|
284
|
+
const snakeCaseKeys = (obj: Record<string, any>): Record<string, any> => {
|
|
285
|
+
return Object.fromEntries(
|
|
286
|
+
Object.entries(obj).map(([key, value]) => {
|
|
287
|
+
const snakeKey = key.replace(
|
|
288
|
+
/[A-Z]/g,
|
|
289
|
+
(letter) => `_${letter.toLowerCase()}`
|
|
290
|
+
);
|
|
291
|
+
if (
|
|
292
|
+
typeof value === "object" &&
|
|
293
|
+
value !== null &&
|
|
294
|
+
!Array.isArray(value)
|
|
295
|
+
) {
|
|
296
|
+
return [snakeKey, snakeCaseKeys(value)];
|
|
297
|
+
}
|
|
298
|
+
return [snakeKey, value];
|
|
299
|
+
})
|
|
300
|
+
);
|
|
301
|
+
};
|
|
302
|
+
|
|
303
|
+
if ("namespace" in operation && "key" in operation) {
|
|
304
|
+
return {
|
|
305
|
+
namespace: operation.namespace,
|
|
306
|
+
key: operation.key,
|
|
307
|
+
...("value" in operation ? { value: operation.value } : {}),
|
|
308
|
+
};
|
|
309
|
+
} else if ("namespacePrefix" in operation) {
|
|
310
|
+
return {
|
|
311
|
+
namespace_prefix: operation.namespacePrefix,
|
|
312
|
+
filter: operation.filter,
|
|
313
|
+
limit: operation.limit,
|
|
314
|
+
offset: operation.offset,
|
|
315
|
+
};
|
|
316
|
+
} else if ("matchConditions" in operation) {
|
|
317
|
+
return {
|
|
318
|
+
match_conditions: operation.matchConditions?.map((condition) => ({
|
|
319
|
+
match_type: condition.matchType,
|
|
320
|
+
path: condition.path,
|
|
321
|
+
})),
|
|
322
|
+
max_depth: operation.maxDepth,
|
|
323
|
+
limit: operation.limit,
|
|
324
|
+
offset: operation.offset,
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
return snakeCaseKeys(operation) as Operation;
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
function pyItemToJs(item?: PyItem): Item | undefined {
|
|
332
|
+
if (!item) {
|
|
333
|
+
return undefined;
|
|
334
|
+
}
|
|
335
|
+
return {
|
|
336
|
+
namespace: item.namespace,
|
|
337
|
+
key: item.key,
|
|
338
|
+
value: item.value,
|
|
339
|
+
createdAt: item.created_at,
|
|
340
|
+
updatedAt: item.updated_at,
|
|
341
|
+
};
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
export class RemoteStore extends BaseStore {
|
|
345
|
+
async batch<Op extends Operation[]>(
|
|
346
|
+
operations: Op
|
|
347
|
+
): Promise<OperationResults<Op>> {
|
|
348
|
+
const response = await tryFetch("http://store/items/batch", {
|
|
349
|
+
dispatcher: storeDispatcher,
|
|
350
|
+
method: "POST",
|
|
351
|
+
headers: { "Content-Type": "application/json" },
|
|
352
|
+
body: JSON.stringify({ operations: operations.map(camelToSnake) }),
|
|
353
|
+
});
|
|
354
|
+
|
|
355
|
+
const results = (await response.json()) as PyResult[];
|
|
356
|
+
return results.map((result) => {
|
|
357
|
+
if (Array.isArray(result)) {
|
|
358
|
+
return result.map((item) => pyItemToJs(item));
|
|
359
|
+
} else if (
|
|
360
|
+
result &&
|
|
361
|
+
typeof result === "object" &&
|
|
362
|
+
"value" in result &&
|
|
363
|
+
"key" in result
|
|
364
|
+
) {
|
|
365
|
+
return pyItemToJs(result);
|
|
366
|
+
}
|
|
367
|
+
return result;
|
|
368
|
+
}) as OperationResults<Op>;
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
async get(namespace: string[], key: string): Promise<Item | null> {
|
|
372
|
+
const queryParams = new URLSearchParams({
|
|
373
|
+
namespace: namespace.join("."),
|
|
374
|
+
key,
|
|
375
|
+
});
|
|
376
|
+
const urlWithParams = `http://store/items?${queryParams.toString()}`;
|
|
377
|
+
const response = await tryFetch(urlWithParams, {
|
|
378
|
+
dispatcher: storeDispatcher,
|
|
379
|
+
method: "GET",
|
|
380
|
+
});
|
|
381
|
+
return (await response.json()) as Item | null;
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
async search(
|
|
385
|
+
namespacePrefix: string[],
|
|
386
|
+
options?: {
|
|
387
|
+
filter?: Record<string, any>;
|
|
388
|
+
limit?: number;
|
|
389
|
+
offset?: number;
|
|
390
|
+
}
|
|
391
|
+
): Promise<Item[]> {
|
|
392
|
+
const response = await tryFetch("http://store/items/search", {
|
|
393
|
+
dispatcher: storeDispatcher,
|
|
394
|
+
method: "POST",
|
|
395
|
+
headers: { "Content-Type": "application/json" },
|
|
396
|
+
body: JSON.stringify({ namespace_prefix: namespacePrefix, ...options }),
|
|
397
|
+
});
|
|
398
|
+
return (await response.json()) as Item[];
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
async put(
|
|
402
|
+
namespace: string[],
|
|
403
|
+
key: string,
|
|
404
|
+
value: Record<string, any>
|
|
405
|
+
): Promise<void> {
|
|
406
|
+
await tryFetch("http://store/items", {
|
|
407
|
+
dispatcher: storeDispatcher,
|
|
408
|
+
method: "PUT",
|
|
409
|
+
headers: { "Content-Type": "application/json" },
|
|
410
|
+
body: JSON.stringify({ namespace, key, value }),
|
|
411
|
+
});
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
async delete(namespace: string[], key: string): Promise<void> {
|
|
415
|
+
await tryFetch("http://store/items", {
|
|
416
|
+
dispatcher: storeDispatcher,
|
|
417
|
+
method: "DELETE",
|
|
418
|
+
headers: { "Content-Type": "application/json" },
|
|
419
|
+
body: JSON.stringify({ namespace, key }),
|
|
420
|
+
});
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
async listNamespaces(options: {
|
|
424
|
+
prefix?: string[];
|
|
425
|
+
suffix?: string[];
|
|
426
|
+
maxDepth?: number;
|
|
427
|
+
limit?: number;
|
|
428
|
+
offset?: number;
|
|
429
|
+
}): Promise<string[][]> {
|
|
430
|
+
const response = await tryFetch("http://store/list/namespaces", {
|
|
431
|
+
dispatcher: storeDispatcher,
|
|
432
|
+
method: "POST",
|
|
433
|
+
headers: { "Content-Type": "application/json" },
|
|
434
|
+
body: JSON.stringify({ max_depth: options?.maxDepth, ...options }),
|
|
435
|
+
});
|
|
436
|
+
|
|
437
|
+
const data = (await response.json()) as { namespaces: string[][] };
|
|
438
|
+
return data.namespaces;
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
const StreamModeSchema = z.union([
|
|
443
|
+
z.literal("updates"),
|
|
444
|
+
z.literal("debug"),
|
|
445
|
+
z.literal("values"),
|
|
446
|
+
]);
|
|
447
|
+
|
|
448
|
+
const ExtraStreamModeSchema = z.union([
|
|
449
|
+
StreamModeSchema,
|
|
450
|
+
z.literal("messages"),
|
|
451
|
+
]);
|
|
452
|
+
|
|
453
|
+
const __dirname = new URL(".", import.meta.url).pathname;
|
|
454
|
+
|
|
455
|
+
async function main() {
|
|
456
|
+
const app = new Hono();
|
|
457
|
+
const checkpointer = new RemoteCheckpointer();
|
|
458
|
+
const store = new RemoteStore();
|
|
459
|
+
|
|
460
|
+
const specs = z
|
|
461
|
+
.record(z.string())
|
|
462
|
+
.parse(JSON.parse(process.env.LANGSERVE_GRAPHS));
|
|
463
|
+
|
|
464
|
+
if (!process.argv.includes("--skip-schema-cache")) {
|
|
465
|
+
try {
|
|
466
|
+
GRAPH_SCHEMA = JSON.parse(
|
|
467
|
+
await fs.readFile(path.resolve(__dirname, "client.schemas.json"), {
|
|
468
|
+
encoding: "utf-8",
|
|
469
|
+
})
|
|
470
|
+
);
|
|
471
|
+
} catch {
|
|
472
|
+
// pass
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
await Promise.all(
|
|
477
|
+
Object.entries(specs).map(async ([graphId, rawSpec]) => {
|
|
478
|
+
logger.info(`Resolving graph ${graphId}`);
|
|
479
|
+
const { resolved, ...spec } = await resolveGraph(rawSpec);
|
|
480
|
+
|
|
481
|
+
// TODO: make sure the types do not need to be upfront
|
|
482
|
+
// @ts-expect-error Overriding checkpointer with different value type
|
|
483
|
+
resolved.checkpointer = checkpointer;
|
|
484
|
+
resolved.store = store;
|
|
485
|
+
|
|
486
|
+
// registering the graph runtime
|
|
487
|
+
GRAPH_RESOLVED[graphId] = resolved;
|
|
488
|
+
GRAPH_SPEC[graphId] = spec;
|
|
489
|
+
})
|
|
490
|
+
);
|
|
491
|
+
|
|
492
|
+
app.post(
|
|
493
|
+
"/:graphId/streamEvents",
|
|
494
|
+
zValidator(
|
|
495
|
+
"json",
|
|
496
|
+
z.object({
|
|
497
|
+
input: z.unknown(),
|
|
498
|
+
stream_mode: z
|
|
499
|
+
.union([ExtraStreamModeSchema, z.array(ExtraStreamModeSchema)])
|
|
500
|
+
.optional(),
|
|
501
|
+
config: RunnableConfigSchema.nullish(),
|
|
502
|
+
interrupt_before: z
|
|
503
|
+
.union([z.array(z.string()), z.literal("*")])
|
|
504
|
+
.nullish(),
|
|
505
|
+
interrupt_after: z
|
|
506
|
+
.union([z.array(z.string()), z.literal("*")])
|
|
507
|
+
.nullish(),
|
|
508
|
+
subgraphs: z.boolean().optional(),
|
|
509
|
+
})
|
|
510
|
+
),
|
|
511
|
+
async (c) => {
|
|
512
|
+
const graph = getGraph(c.req.param("graphId"));
|
|
513
|
+
const payload = c.req.valid("json");
|
|
514
|
+
|
|
515
|
+
const userStreamMode =
|
|
516
|
+
payload.stream_mode == null
|
|
517
|
+
? []
|
|
518
|
+
: Array.isArray(payload.stream_mode)
|
|
519
|
+
? payload.stream_mode
|
|
520
|
+
: [payload.stream_mode];
|
|
521
|
+
|
|
522
|
+
const graphStreamMode: Set<"updates" | "debug" | "values"> = new Set();
|
|
523
|
+
if (payload.stream_mode) {
|
|
524
|
+
for (const mode of userStreamMode) {
|
|
525
|
+
if (mode === "messages") {
|
|
526
|
+
graphStreamMode.add("values");
|
|
527
|
+
} else {
|
|
528
|
+
graphStreamMode.add(mode);
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
const config = getRunnableConfig(payload.config);
|
|
534
|
+
|
|
535
|
+
return streamSSE(c, async (stream) => {
|
|
536
|
+
const messages: Record<string, BaseMessageChunk> = {};
|
|
537
|
+
const completedIds = new Set<string>();
|
|
538
|
+
|
|
539
|
+
let interruptBefore: typeof payload.interrupt_before =
|
|
540
|
+
payload.interrupt_before ?? undefined;
|
|
541
|
+
|
|
542
|
+
if (Array.isArray(interruptBefore) && interruptBefore.length === 0)
|
|
543
|
+
interruptBefore = undefined;
|
|
544
|
+
|
|
545
|
+
let interruptAfter: typeof payload.interrupt_after =
|
|
546
|
+
payload.interrupt_after ?? undefined;
|
|
547
|
+
|
|
548
|
+
if (Array.isArray(interruptAfter) && interruptAfter.length === 0)
|
|
549
|
+
interruptAfter = undefined;
|
|
550
|
+
|
|
551
|
+
const streamMode = [...graphStreamMode];
|
|
552
|
+
|
|
553
|
+
try {
|
|
554
|
+
for await (const data of graph.streamEvents(payload.input, {
|
|
555
|
+
...config,
|
|
556
|
+
version: "v2",
|
|
557
|
+
streamMode,
|
|
558
|
+
subgraphs: payload.subgraphs,
|
|
559
|
+
interruptBefore,
|
|
560
|
+
interruptAfter,
|
|
561
|
+
})) {
|
|
562
|
+
// TODO: upstream this fix to LangGraphJS
|
|
563
|
+
if (streamMode.length === 1 && !Array.isArray(data.data.chunk)) {
|
|
564
|
+
data.data.chunk = [streamMode[0], data.data.chunk];
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
if (payload.subgraphs) {
|
|
568
|
+
if (
|
|
569
|
+
Array.isArray(data.data.chunk) &&
|
|
570
|
+
data.data.chunk.length === 2
|
|
571
|
+
) {
|
|
572
|
+
data.data.chunk = [[], ...data.data.chunk];
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
await stream.writeSSE({
|
|
577
|
+
event: "streamLog",
|
|
578
|
+
data: serialiseAsDict(data),
|
|
579
|
+
});
|
|
580
|
+
|
|
581
|
+
if (userStreamMode.includes("messages")) {
|
|
582
|
+
if (
|
|
583
|
+
data.event === "on_chain_stream" &&
|
|
584
|
+
data.run_id === config.runId
|
|
585
|
+
) {
|
|
586
|
+
const newMessages: Array<BaseMessageChunk> = [];
|
|
587
|
+
const [_, chunk]: [string, any] = data.data.chunk;
|
|
588
|
+
|
|
589
|
+
let chunkMessages: Array<BaseMessageChunk> = [];
|
|
590
|
+
if (
|
|
591
|
+
typeof chunk === "object" &&
|
|
592
|
+
chunk != null &&
|
|
593
|
+
"messages" in chunk &&
|
|
594
|
+
!isBaseMessage(chunk)
|
|
595
|
+
) {
|
|
596
|
+
chunkMessages = chunk?.messages;
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
if (!Array.isArray(chunkMessages)) {
|
|
600
|
+
chunkMessages = [chunkMessages];
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
for (const message of chunkMessages) {
|
|
604
|
+
if (!message.id || completedIds.has(message.id)) continue;
|
|
605
|
+
completedIds.add(message.id);
|
|
606
|
+
newMessages.push(message);
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
if (newMessages.length > 0) {
|
|
610
|
+
await stream.writeSSE({
|
|
611
|
+
event: "streamLog",
|
|
612
|
+
data: serialiseAsDict({
|
|
613
|
+
event: "on_custom_event",
|
|
614
|
+
name: "messages/complete",
|
|
615
|
+
data: newMessages,
|
|
616
|
+
}),
|
|
617
|
+
});
|
|
618
|
+
}
|
|
619
|
+
} else if (
|
|
620
|
+
data.event === "on_chat_model_stream" &&
|
|
621
|
+
!data.tags?.includes("nostream")
|
|
622
|
+
) {
|
|
623
|
+
const message: BaseMessageChunk = data.data.chunk;
|
|
624
|
+
|
|
625
|
+
if (!message.id) continue;
|
|
626
|
+
|
|
627
|
+
if (messages[message.id] == null) {
|
|
628
|
+
messages[message.id] = message;
|
|
629
|
+
await stream.writeSSE({
|
|
630
|
+
event: "streamLog",
|
|
631
|
+
data: serialiseAsDict({
|
|
632
|
+
event: "on_custom_event",
|
|
633
|
+
name: "messages/metadata",
|
|
634
|
+
data: { [message.id]: { metadata: data.metadata } },
|
|
635
|
+
}),
|
|
636
|
+
});
|
|
637
|
+
} else {
|
|
638
|
+
messages[message.id] = messages[message.id].concat(message);
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
await stream.writeSSE({
|
|
642
|
+
event: "streamLog",
|
|
643
|
+
data: serialiseAsDict({
|
|
644
|
+
event: "on_custom_event",
|
|
645
|
+
name: "messages/partial",
|
|
646
|
+
data: [messages[message.id]],
|
|
647
|
+
}),
|
|
648
|
+
});
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
} catch (error) {
|
|
653
|
+
const errorName = error instanceof Error ? error.name : "Error";
|
|
654
|
+
const errorMessage =
|
|
655
|
+
error instanceof Error ? error.message : JSON.stringify(error);
|
|
656
|
+
|
|
657
|
+
await stream.writeSSE({
|
|
658
|
+
event: "error",
|
|
659
|
+
data: serialiseAsDict({
|
|
660
|
+
error: errorName,
|
|
661
|
+
message: errorMessage,
|
|
662
|
+
}),
|
|
663
|
+
});
|
|
664
|
+
|
|
665
|
+
// Still print out the error, as the stack
|
|
666
|
+
// trace is not carried over in Python
|
|
667
|
+
logger.error(error);
|
|
668
|
+
}
|
|
669
|
+
});
|
|
670
|
+
}
|
|
671
|
+
);
|
|
672
|
+
|
|
673
|
+
app.post(
|
|
674
|
+
"/:graphId/getGraph",
|
|
675
|
+
zValidator(
|
|
676
|
+
"json",
|
|
677
|
+
z.object({
|
|
678
|
+
config: RunnableConfigSchema.nullish(),
|
|
679
|
+
xray: z.union([z.number(), z.boolean()]).nullish(),
|
|
680
|
+
})
|
|
681
|
+
),
|
|
682
|
+
async (c) => {
|
|
683
|
+
const graphId = c.req.param("graphId");
|
|
684
|
+
const graph = getGraph(graphId);
|
|
685
|
+
return c.json(
|
|
686
|
+
graph
|
|
687
|
+
.getGraph({
|
|
688
|
+
...getRunnableConfig(c.req.valid("json").config),
|
|
689
|
+
xray: c.req.valid("json").xray ?? undefined,
|
|
690
|
+
})
|
|
691
|
+
.toJSON()
|
|
692
|
+
);
|
|
693
|
+
}
|
|
694
|
+
);
|
|
695
|
+
|
|
696
|
+
app.post(
|
|
697
|
+
"/:graphId/getSubgraphs",
|
|
698
|
+
zValidator(
|
|
699
|
+
"json",
|
|
700
|
+
z.object({
|
|
701
|
+
namespace: z.string().nullish(),
|
|
702
|
+
recurse: z.boolean().nullish(),
|
|
703
|
+
})
|
|
704
|
+
),
|
|
705
|
+
|
|
706
|
+
async (c) => {
|
|
707
|
+
const graphId = c.req.param("graphId");
|
|
708
|
+
const graph = getGraph(graphId);
|
|
709
|
+
|
|
710
|
+
const payload = c.req.valid("json");
|
|
711
|
+
const result: Array<[name: string, Record<string, any>]> = [];
|
|
712
|
+
|
|
713
|
+
const graphSchema = await getOrExtractSchema(graphId);
|
|
714
|
+
const rootGraphId = Object.keys(graphSchema).find(
|
|
715
|
+
(i) => !i.includes("|")
|
|
716
|
+
);
|
|
717
|
+
|
|
718
|
+
if (!rootGraphId)
|
|
719
|
+
throw new HTTPException(500, { message: "Failed to find root graph" });
|
|
720
|
+
|
|
721
|
+
for (const [name] of graph.getSubgraphs(
|
|
722
|
+
payload.namespace ?? undefined,
|
|
723
|
+
payload.recurse ?? undefined
|
|
724
|
+
)) {
|
|
725
|
+
const schema =
|
|
726
|
+
graphSchema[`${rootGraphId}|${name}`] || graphSchema[rootGraphId];
|
|
727
|
+
result.push([name, schema]);
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
return c.json(Object.fromEntries(result));
|
|
731
|
+
}
|
|
732
|
+
);
|
|
733
|
+
|
|
734
|
+
app.post(
|
|
735
|
+
"/:graphId/getState",
|
|
736
|
+
zValidator(
|
|
737
|
+
"json",
|
|
738
|
+
z.object({
|
|
739
|
+
config: RunnableConfigSchema,
|
|
740
|
+
subgraphs: z.boolean().nullish(),
|
|
741
|
+
})
|
|
742
|
+
),
|
|
743
|
+
async (c) => {
|
|
744
|
+
const graph = getGraph(c.req.param("graphId"));
|
|
745
|
+
const payload = c.req.valid("json");
|
|
746
|
+
|
|
747
|
+
const state = await graph.getState(getRunnableConfig(payload.config), {
|
|
748
|
+
subgraphs: payload.subgraphs ?? undefined,
|
|
749
|
+
});
|
|
750
|
+
// TODO: just send the JSON directly, don't ser/de twice
|
|
751
|
+
return c.json(JSON.parse(serialiseAsDict(state)));
|
|
752
|
+
}
|
|
753
|
+
);
|
|
754
|
+
|
|
755
|
+
app.post(
|
|
756
|
+
"/:graphId/updateState",
|
|
757
|
+
zValidator(
|
|
758
|
+
"json",
|
|
759
|
+
z.object({
|
|
760
|
+
config: RunnableConfigSchema,
|
|
761
|
+
values: z.unknown(),
|
|
762
|
+
as_node: z.string().nullish(),
|
|
763
|
+
})
|
|
764
|
+
),
|
|
765
|
+
async (c) => {
|
|
766
|
+
const graph = getGraph(c.req.param("graphId"));
|
|
767
|
+
const payload = c.req.valid("json");
|
|
768
|
+
|
|
769
|
+
const config = await graph.updateState(
|
|
770
|
+
getRunnableConfig(payload.config),
|
|
771
|
+
payload.values,
|
|
772
|
+
payload.as_node ?? undefined
|
|
773
|
+
);
|
|
774
|
+
|
|
775
|
+
return c.json(config);
|
|
776
|
+
}
|
|
777
|
+
);
|
|
778
|
+
|
|
779
|
+
app.post("/:graphId/getSchema", async (c) => {
|
|
780
|
+
const schemas = await getOrExtractSchema(c.req.param("graphId"));
|
|
781
|
+
const rootGraphId = Object.keys(schemas).find((i) => !i.includes("|"));
|
|
782
|
+
if (!rootGraphId) {
|
|
783
|
+
throw new HTTPException(500, { message: "Failed to find root graph" });
|
|
784
|
+
}
|
|
785
|
+
return c.json(schemas[rootGraphId]);
|
|
786
|
+
});
|
|
787
|
+
|
|
788
|
+
app.post(
|
|
789
|
+
"/:graphId/getStateHistory",
|
|
790
|
+
zValidator(
|
|
791
|
+
"json",
|
|
792
|
+
z.object({
|
|
793
|
+
config: RunnableConfigSchema,
|
|
794
|
+
limit: z.number().nullish(),
|
|
795
|
+
before: RunnableConfigSchema.nullish(),
|
|
796
|
+
filter: z.record(z.unknown()).nullish(),
|
|
797
|
+
})
|
|
798
|
+
),
|
|
799
|
+
async (c) => {
|
|
800
|
+
const graph = getGraph(c.req.param("graphId"));
|
|
801
|
+
const payload = c.req.valid("json");
|
|
802
|
+
|
|
803
|
+
return streamSSE(c, async (stream) => {
|
|
804
|
+
for await (const item of graph.getStateHistory(
|
|
805
|
+
getRunnableConfig(payload.config),
|
|
806
|
+
{
|
|
807
|
+
limit: payload.limit ?? undefined,
|
|
808
|
+
before: payload.before
|
|
809
|
+
? getRunnableConfig(payload.before)
|
|
810
|
+
: undefined,
|
|
811
|
+
filter: payload.filter ?? undefined,
|
|
812
|
+
}
|
|
813
|
+
)) {
|
|
814
|
+
await stream.writeSSE({
|
|
815
|
+
data: serialiseAsDict(item),
|
|
816
|
+
event: "getStateHistory",
|
|
817
|
+
});
|
|
818
|
+
}
|
|
819
|
+
});
|
|
820
|
+
}
|
|
821
|
+
);
|
|
822
|
+
|
|
823
|
+
app.get("/ok", (c) => c.json({ ok: true }));
|
|
824
|
+
|
|
825
|
+
app.onError((err, c) => {
|
|
826
|
+
logger.error(err);
|
|
827
|
+
if (err instanceof HTTPException && err.status === 401) {
|
|
828
|
+
return err.getResponse();
|
|
829
|
+
}
|
|
830
|
+
return c.text("Internal server error", 500);
|
|
831
|
+
});
|
|
832
|
+
|
|
833
|
+
await fs.unlink(GRAPH_SOCKET).catch(() => void 0);
|
|
834
|
+
serve(
|
|
835
|
+
{
|
|
836
|
+
fetch: app.fetch,
|
|
837
|
+
hostname: "localhost",
|
|
838
|
+
port: GRAPH_SOCKET as any,
|
|
839
|
+
},
|
|
840
|
+
(c) => logger.info(`Listening to ${c}`)
|
|
841
|
+
);
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
process.on("uncaughtExceptionMonitor", (error) => {
|
|
845
|
+
logger.error(error);
|
|
846
|
+
process.exit(1);
|
|
847
|
+
});
|
|
848
|
+
|
|
849
|
+
main();
|