langgraph-api 0.0.13__py3-none-any.whl → 0.0.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

@@ -0,0 +1,856 @@
1
+ /// <reference types="./global.d.ts" />
2
+
3
+ import { z } from "zod";
4
+ import zeromq from "zeromq";
5
+ import PQueue from "p-queue";
6
+ import { v4 as uuid4 } from "uuid";
7
+ import {
8
+ BaseStore,
9
+ Item,
10
+ Operation,
11
+ Command,
12
+ OperationResults,
13
+ type Checkpoint,
14
+ type CheckpointMetadata,
15
+ type CheckpointTuple,
16
+ type CompiledGraph,
17
+ } from "@langchain/langgraph";
18
+ import {
19
+ BaseCheckpointSaver,
20
+ type ChannelVersions,
21
+ type ChannelProtocol,
22
+ } from "@langchain/langgraph-checkpoint";
23
+ import { createHash } from "node:crypto";
24
+ import * as fs from "node:fs/promises";
25
+ import * as path from "node:path";
26
+ import { serialiseAsDict, serializeError } from "./src/utils/serde.mjs";
27
+ import * as importMap from "./src/utils/importMap.mjs";
28
+
29
+ import { createLogger, format, transports } from "winston";
30
+
31
+ import { load } from "@langchain/core/load";
32
+ import { BaseMessageChunk, isBaseMessage } from "@langchain/core/messages";
33
+ import type { PyItem, PyResult } from "./src/utils/pythonSchemas.mts";
34
+ import type { RunnableConfig } from "@langchain/core/runnables";
35
+ import {
36
+ runGraphSchemaWorker,
37
+ GraphSchema,
38
+ resolveGraph,
39
+ GraphSpec,
40
+ } from "./src/graph.mts";
41
+
42
+ const logger = createLogger({
43
+ level: "debug",
44
+ format: format.combine(
45
+ format.errors({ stack: true }),
46
+ format.timestamp(),
47
+ format.json(),
48
+ format.printf((info) => {
49
+ const { timestamp, level, message, ...rest } = info;
50
+
51
+ let event;
52
+ if (typeof message === "string") {
53
+ event = message;
54
+ } else {
55
+ event = JSON.stringify(message);
56
+ }
57
+
58
+ if (rest.stack) {
59
+ rest.message = event;
60
+ event = rest.stack;
61
+ }
62
+
63
+ return JSON.stringify({ timestamp, level, event, ...rest });
64
+ })
65
+ ),
66
+ transports: [
67
+ new transports.Console({
68
+ handleExceptions: true,
69
+ handleRejections: true,
70
+ }),
71
+ ],
72
+ });
73
+
74
+ let GRAPH_SCHEMA: Record<string, Record<string, GraphSchema>> = {};
75
+ const GRAPH_RESOLVED: Record<string, CompiledGraph<string>> = {};
76
+ const GRAPH_SPEC: Record<string, GraphSpec> = {};
77
+
78
+ function getGraph(graphId: string) {
79
+ if (!GRAPH_RESOLVED[graphId]) throw new Error(`Graph "${graphId}" not found`);
80
+ return GRAPH_RESOLVED[graphId];
81
+ }
82
+
83
+ async function getOrExtractSchema(graphId: string) {
84
+ if (!(graphId in GRAPH_SPEC)) {
85
+ throw new Error(`Spec for ${graphId} not found`);
86
+ }
87
+
88
+ if (!GRAPH_SCHEMA[graphId]) {
89
+ try {
90
+ const timer = logger.startTimer();
91
+ GRAPH_SCHEMA[graphId] = await runGraphSchemaWorker(GRAPH_SPEC[graphId]);
92
+ timer.done({ message: `Extracting schema for ${graphId} finished` });
93
+ } catch (error) {
94
+ throw new Error(`Failed to extract schema for "${graphId}": ${error}`);
95
+ }
96
+ }
97
+
98
+ return GRAPH_SCHEMA[graphId];
99
+ }
100
+
101
+ const CLIENT_ADDR = "tcp://*:5556";
102
+ const REMOTE_ADDR = "tcp://0.0.0.0:5555";
103
+
104
+ const CLIENT_HEARTBEAT_INTERVAL_MS = 5_000;
105
+
106
+ const clientRouter = new zeromq.Router();
107
+ const remoteDealer = new zeromq.Dealer();
108
+
109
+ const RunnableConfigSchema = z.object({
110
+ tags: z.array(z.string()).optional(),
111
+ metadata: z.record(z.unknown()).optional(),
112
+ run_name: z.string().optional(),
113
+ max_concurrency: z.number().optional(),
114
+ recursion_limit: z.number().optional(),
115
+ configurable: z.record(z.unknown()).optional(),
116
+ run_id: z.string().uuid().optional(),
117
+ });
118
+
119
+ const getRunnableConfig = (
120
+ userConfig: z.infer<typeof RunnableConfigSchema> | null | undefined
121
+ ) => {
122
+ if (!userConfig) return {};
123
+ return {
124
+ configurable: userConfig.configurable,
125
+ tags: userConfig.tags,
126
+ metadata: userConfig.metadata,
127
+ runName: userConfig.run_name,
128
+ maxConcurrency: userConfig.max_concurrency,
129
+ recursionLimit: userConfig.recursion_limit,
130
+ runId: userConfig.run_id,
131
+ };
132
+ };
133
+
134
+ const textEncoder = new TextEncoder();
135
+ const textDecoder = new TextDecoder();
136
+
137
+ // TODO: consider swapping to msgpackr
138
+ const packPlain = (value: unknown) => textEncoder.encode(JSON.stringify(value));
139
+ const pack = (value: unknown) => textEncoder.encode(serialiseAsDict(value));
140
+
141
+ function unpackPlain<T>(value: AllowSharedBufferSource) {
142
+ return JSON.parse(textDecoder.decode(value)) as T;
143
+ }
144
+
145
+ function unpack<T>(value: AllowSharedBufferSource) {
146
+ return load<T>(textDecoder.decode(value), {
147
+ importMap,
148
+ optionalImportEntrypoints: [],
149
+ optionalImportsMap: {},
150
+ secretsMap: {},
151
+ });
152
+ }
153
+
154
+ interface Future<T> {
155
+ resolve: (value: T) => void;
156
+ reject: (reason?: any) => void;
157
+ promise: Promise<T>;
158
+ }
159
+
160
+ const remoteTasks: Record<string, Future<unknown>> = {};
161
+
162
+ const createFuture = (id: string) => {
163
+ const newPromise = new Promise<unknown>((resolve, reject) => {
164
+ remoteTasks[id] = { resolve, reject, promise: null! };
165
+ });
166
+ remoteTasks[id].promise = newPromise;
167
+ };
168
+
169
+ // Only a singular read is allowed at a time
170
+ const queue = new PQueue({ concurrency: 1 });
171
+ const scheduleRead = async (): Promise<void> => {
172
+ type ResponsePayload =
173
+ | { method: string; id: string; success: true; data: unknown }
174
+ | {
175
+ method: string;
176
+ id: string;
177
+ success: false;
178
+ data: { error: string; message: string };
179
+ };
180
+
181
+ const [buf] = await remoteDealer.receive();
182
+ const response = await unpack<ResponsePayload>(buf);
183
+
184
+ const future = remoteTasks[response.id];
185
+ if (!future) throw new Error(`No future for ${response.id}`);
186
+
187
+ if (response.success) {
188
+ future.resolve(response.data);
189
+ } else {
190
+ future.reject(new Error(response.data.message || response.data.error));
191
+ }
192
+ };
193
+
194
+ interface RouterPacket {
195
+ header: Buffer;
196
+ input: {
197
+ method: string;
198
+ id: string;
199
+ data: Record<string, any>;
200
+ };
201
+ }
202
+
203
+ async function* getRouterPackets(): AsyncGenerator<RouterPacket> {
204
+ for await (const [header, binary] of clientRouter) {
205
+ const data = unpackPlain<RouterPacket["input"]>(binary);
206
+ yield { header, input: data };
207
+ }
208
+ }
209
+
210
+ async function sendRecv<T = any>(
211
+ method: `${"checkpointer" | "store"}_${string}`,
212
+ data: unknown
213
+ ): Promise<T> {
214
+ const id = uuid4();
215
+ createFuture(id);
216
+
217
+ try {
218
+ await remoteDealer.send(packPlain({ method, id, data }));
219
+ queue.add(scheduleRead, { timeout: 10_000, throwOnTimeout: true });
220
+
221
+ return (await remoteTasks[id].promise) as T;
222
+ } finally {
223
+ delete remoteTasks[id];
224
+ }
225
+ }
226
+
227
+ const createSendWithTTL = (packet: RouterPacket) => {
228
+ const { header, input } = packet;
229
+ const { method, id } = input;
230
+
231
+ let timer: NodeJS.Timeout | undefined = undefined;
232
+ const sendData = async (result?: { success: boolean; data: unknown }) => {
233
+ clearTimeout(timer);
234
+ await clientRouter.send([header, pack({ method, id, ...result })]);
235
+ timer = setTimeout(() => sendData(), CLIENT_HEARTBEAT_INTERVAL_MS);
236
+ };
237
+
238
+ return { sendData, clear: () => clearTimeout(timer) };
239
+ };
240
+
241
+ const handleInvoke = async <T extends z.ZodType<any>>(
242
+ packet: RouterPacket,
243
+ schema: T,
244
+ request: (rawPayload: z.infer<T>) => Promise<any>
245
+ ) => {
246
+ const { sendData, clear } = createSendWithTTL(packet);
247
+ try {
248
+ const data = await request(schema.parse(packet.input.data));
249
+ await sendData({ success: true, data });
250
+ } catch (error) {
251
+ logger.error(error);
252
+ const data = serializeError(error);
253
+ await sendData({ success: false, data });
254
+ } finally {
255
+ clear();
256
+ }
257
+ };
258
+
259
+ const handleStream = async <T extends z.ZodType<any>>(
260
+ packet: RouterPacket,
261
+ schema: T,
262
+ request: (rawPayload: z.infer<T>) => AsyncGenerator<any>
263
+ ) => {
264
+ const { sendData, clear } = createSendWithTTL(packet);
265
+
266
+ let done = false;
267
+ try {
268
+ const generator = request(schema.parse(packet.input.data));
269
+ while (!done) {
270
+ const data = await generator.next();
271
+ done = data.done ?? false;
272
+ await sendData({ success: true, data });
273
+ }
274
+ } catch (error) {
275
+ logger.error(error);
276
+ const data = serializeError(error);
277
+ await sendData({ success: false, data });
278
+ } finally {
279
+ clear();
280
+ }
281
+ };
282
+
283
+ class RemoteCheckpointer extends BaseCheckpointSaver<number | string> {
284
+ async getTuple(config: RunnableConfig): Promise<CheckpointTuple | undefined> {
285
+ const result = await sendRecv("checkpointer_get_tuple", { config });
286
+
287
+ if (!result) return undefined;
288
+ return {
289
+ checkpoint: result.checkpoint,
290
+ config: result.config,
291
+ metadata: result.metadata,
292
+ parentConfig: result.parent_config,
293
+ pendingWrites: result.pending_writes,
294
+ };
295
+ }
296
+
297
+ async *list(
298
+ config: RunnableConfig,
299
+ options?: {
300
+ limit?: number;
301
+ before?: RunnableConfig;
302
+ filter?: Record<string, any>;
303
+ }
304
+ ): AsyncGenerator<CheckpointTuple> {
305
+ const result = await sendRecv("checkpointer_list", { config, ...options });
306
+
307
+ for (const item of result) {
308
+ yield {
309
+ checkpoint: item.checkpoint,
310
+ config: item.config,
311
+ metadata: item.metadata,
312
+ parentConfig: item.parent_config,
313
+ pendingWrites: item.pending_writes,
314
+ };
315
+ }
316
+ }
317
+
318
+ async put(
319
+ config: RunnableConfig,
320
+ checkpoint: Checkpoint,
321
+ metadata: CheckpointMetadata,
322
+ newVersions: ChannelVersions
323
+ ): Promise<RunnableConfig> {
324
+ return await sendRecv<RunnableConfig>("checkpointer_put", {
325
+ config,
326
+ checkpoint,
327
+ metadata,
328
+ new_versions: newVersions,
329
+ });
330
+ }
331
+
332
+ async putWrites(
333
+ config: RunnableConfig,
334
+ writes: [string, unknown][],
335
+ taskId: string
336
+ ): Promise<void> {
337
+ await sendRecv("checkpointer_put_writes", { config, writes, taskId });
338
+ }
339
+
340
+ getNextVersion(
341
+ current: number | string | undefined,
342
+ _channel: ChannelProtocol
343
+ ): string {
344
+ let currentVersion = 0;
345
+
346
+ if (current == null) {
347
+ currentVersion = 0;
348
+ } else if (typeof current === "number") {
349
+ currentVersion = current;
350
+ } else if (typeof current === "string") {
351
+ currentVersion = Number.parseInt(current.split(".")[0], 10);
352
+ }
353
+
354
+ const nextVersion = String(currentVersion + 1).padStart(32, "0");
355
+ try {
356
+ const hash = createHash("md5")
357
+ .update(serialiseAsDict(_channel.checkpoint()))
358
+ .digest("hex");
359
+ return `${nextVersion}.${hash}`;
360
+ } catch {}
361
+
362
+ return nextVersion;
363
+ }
364
+ }
365
+
366
+ function camelToSnake(operation: Operation) {
367
+ const snakeCaseKeys = (obj: Record<string, any>): Record<string, any> => {
368
+ return Object.fromEntries(
369
+ Object.entries(obj).map(([key, value]) => {
370
+ const snakeKey = key.replace(
371
+ /[A-Z]/g,
372
+ (letter) => `_${letter.toLowerCase()}`
373
+ );
374
+ if (
375
+ typeof value === "object" &&
376
+ value !== null &&
377
+ !Array.isArray(value)
378
+ ) {
379
+ return [snakeKey, snakeCaseKeys(value)];
380
+ }
381
+ return [snakeKey, value];
382
+ })
383
+ );
384
+ };
385
+
386
+ if ("namespace" in operation && "key" in operation) {
387
+ return {
388
+ namespace: operation.namespace,
389
+ key: operation.key,
390
+ ...("value" in operation ? { value: operation.value } : {}),
391
+ };
392
+ } else if ("namespacePrefix" in operation) {
393
+ return {
394
+ namespace_prefix: operation.namespacePrefix,
395
+ filter: operation.filter,
396
+ limit: operation.limit,
397
+ offset: operation.offset,
398
+ };
399
+ } else if ("matchConditions" in operation) {
400
+ return {
401
+ match_conditions: operation.matchConditions?.map((condition) => ({
402
+ match_type: condition.matchType,
403
+ path: condition.path,
404
+ })),
405
+ max_depth: operation.maxDepth,
406
+ limit: operation.limit,
407
+ offset: operation.offset,
408
+ };
409
+ }
410
+
411
+ return snakeCaseKeys(operation) as Operation;
412
+ }
413
+
414
+ function pyItemToJs(item?: PyItem): Item | undefined {
415
+ if (!item) {
416
+ return undefined;
417
+ }
418
+ return {
419
+ namespace: item.namespace,
420
+ key: item.key,
421
+ value: item.value,
422
+ createdAt: item.created_at,
423
+ updatedAt: item.updated_at,
424
+ };
425
+ }
426
+
427
+ export class RemoteStore extends BaseStore {
428
+ async batch<Op extends Operation[]>(
429
+ operations: Op
430
+ ): Promise<OperationResults<Op>> {
431
+ const results = await sendRecv<PyResult[]>("store_batch", {
432
+ operations: operations.map(camelToSnake),
433
+ });
434
+
435
+ return results.map((result) => {
436
+ if (Array.isArray(result)) {
437
+ return result.map((item) => pyItemToJs(item));
438
+ } else if (
439
+ result &&
440
+ typeof result === "object" &&
441
+ "value" in result &&
442
+ "key" in result
443
+ ) {
444
+ return pyItemToJs(result);
445
+ }
446
+ return result;
447
+ }) as OperationResults<Op>;
448
+ }
449
+
450
+ async get(namespace: string[], key: string): Promise<Item | null> {
451
+ return await sendRecv<Item | null>("store_get", {
452
+ namespace: namespace.join("."),
453
+ key,
454
+ });
455
+ }
456
+
457
+ async search(
458
+ namespacePrefix: string[],
459
+ options?: {
460
+ filter?: Record<string, any>;
461
+ limit?: number;
462
+ offset?: number;
463
+ }
464
+ ): Promise<Item[]> {
465
+ return await sendRecv<Item[]>("store_search", {
466
+ namespace_prefix: namespacePrefix,
467
+ ...options,
468
+ });
469
+ }
470
+
471
+ async put(
472
+ namespace: string[],
473
+ key: string,
474
+ value: Record<string, any>
475
+ ): Promise<void> {
476
+ await sendRecv("store_put", { namespace, key, value });
477
+ }
478
+
479
+ async delete(namespace: string[], key: string): Promise<void> {
480
+ await sendRecv("store_delete", { namespace, key });
481
+ }
482
+
483
+ async listNamespaces(options: {
484
+ prefix?: string[];
485
+ suffix?: string[];
486
+ maxDepth?: number;
487
+ limit?: number;
488
+ offset?: number;
489
+ }): Promise<string[][]> {
490
+ const data = await sendRecv<{ namespaces: string[][] }>(
491
+ "store_list_namespaces",
492
+ { max_depth: options?.maxDepth, ...options }
493
+ );
494
+ return data.namespaces;
495
+ }
496
+ }
497
+
498
+ const StreamModeSchema = z.union([
499
+ z.literal("updates"),
500
+ z.literal("debug"),
501
+ z.literal("values"),
502
+ ]);
503
+
504
+ const ExtraStreamModeSchema = z.union([
505
+ StreamModeSchema,
506
+ z.literal("messages"),
507
+ ]);
508
+
509
+ const StreamEventsPayload = z.object({
510
+ graph_id: z.string(),
511
+ input: z.unknown(),
512
+ command: z.object({ resume: z.unknown() }).nullish(),
513
+ stream_mode: z
514
+ .union([ExtraStreamModeSchema, z.array(ExtraStreamModeSchema)])
515
+ .optional(),
516
+ config: RunnableConfigSchema.nullish(),
517
+ interrupt_before: z.union([z.array(z.string()), z.literal("*")]).nullish(),
518
+ interrupt_after: z.union([z.array(z.string()), z.literal("*")]).nullish(),
519
+ subgraphs: z.boolean().optional(),
520
+ });
521
+
522
+ async function* streamEventsRequest(
523
+ rawPayload: z.infer<typeof StreamEventsPayload>
524
+ ) {
525
+ const { graph_id: graphId, ...payload } = rawPayload;
526
+ const graph = getGraph(graphId);
527
+
528
+ const input = payload.command ? new Command(payload.command) : payload.input;
529
+
530
+ const userStreamMode =
531
+ payload.stream_mode == null
532
+ ? []
533
+ : Array.isArray(payload.stream_mode)
534
+ ? payload.stream_mode
535
+ : [payload.stream_mode];
536
+
537
+ const graphStreamMode: Set<"updates" | "debug" | "values"> = new Set();
538
+ if (payload.stream_mode) {
539
+ for (const mode of userStreamMode) {
540
+ if (mode === "messages") {
541
+ graphStreamMode.add("values");
542
+ } else {
543
+ graphStreamMode.add(mode);
544
+ }
545
+ }
546
+ }
547
+
548
+ const config = getRunnableConfig(payload.config);
549
+
550
+ const messages: Record<string, BaseMessageChunk> = {};
551
+ const completedIds = new Set<string>();
552
+
553
+ let interruptBefore: typeof payload.interrupt_before =
554
+ payload.interrupt_before ?? undefined;
555
+
556
+ if (Array.isArray(interruptBefore) && interruptBefore.length === 0)
557
+ interruptBefore = undefined;
558
+
559
+ let interruptAfter: typeof payload.interrupt_after =
560
+ payload.interrupt_after ?? undefined;
561
+
562
+ if (Array.isArray(interruptAfter) && interruptAfter.length === 0)
563
+ interruptAfter = undefined;
564
+
565
+ const streamMode = [...graphStreamMode];
566
+
567
+ for await (const data of graph.streamEvents(input, {
568
+ ...config,
569
+ version: "v2",
570
+ streamMode,
571
+ subgraphs: payload.subgraphs,
572
+ interruptBefore,
573
+ interruptAfter,
574
+ })) {
575
+ // TODO: upstream this fix to LangGraphJS
576
+ if (streamMode.length === 1 && !Array.isArray(data.data.chunk)) {
577
+ data.data.chunk = [streamMode[0], data.data.chunk];
578
+ }
579
+
580
+ if (payload.subgraphs) {
581
+ if (Array.isArray(data.data.chunk) && data.data.chunk.length === 2) {
582
+ data.data.chunk = [[], ...data.data.chunk];
583
+ }
584
+ }
585
+
586
+ yield data;
587
+
588
+ if (userStreamMode.includes("messages")) {
589
+ if (data.event === "on_chain_stream" && data.run_id === config.runId) {
590
+ const newMessages: Array<BaseMessageChunk> = [];
591
+ const [_, chunk]: [string, any] = data.data.chunk;
592
+
593
+ let chunkMessages: Array<BaseMessageChunk> = [];
594
+ if (
595
+ typeof chunk === "object" &&
596
+ chunk != null &&
597
+ "messages" in chunk &&
598
+ !isBaseMessage(chunk)
599
+ ) {
600
+ chunkMessages = chunk?.messages;
601
+ }
602
+
603
+ if (!Array.isArray(chunkMessages)) {
604
+ chunkMessages = [chunkMessages];
605
+ }
606
+
607
+ for (const message of chunkMessages) {
608
+ if (!message.id || completedIds.has(message.id)) continue;
609
+ completedIds.add(message.id);
610
+ newMessages.push(message);
611
+ }
612
+
613
+ if (newMessages.length > 0) {
614
+ yield {
615
+ event: "on_custom_event",
616
+ name: "messages/complete",
617
+ data: newMessages,
618
+ };
619
+ }
620
+ } else if (
621
+ data.event === "on_chat_model_stream" &&
622
+ !data.tags?.includes("nostream")
623
+ ) {
624
+ const message: BaseMessageChunk = data.data.chunk;
625
+
626
+ if (!message.id) continue;
627
+
628
+ if (messages[message.id] == null) {
629
+ messages[message.id] = message;
630
+ yield {
631
+ event: "on_custom_event",
632
+ name: "messages/metadata",
633
+ data: { [message.id]: { metadata: data.metadata } },
634
+ };
635
+ } else {
636
+ messages[message.id] = messages[message.id].concat(message);
637
+ }
638
+
639
+ yield {
640
+ event: "on_custom_event",
641
+ name: "messages/partial",
642
+ data: [messages[message.id]],
643
+ };
644
+ }
645
+ }
646
+ }
647
+ }
648
+
649
+ const GetGraphPayload = z.object({
650
+ graph_id: z.string(),
651
+ config: RunnableConfigSchema.nullish(),
652
+ xray: z.union([z.number(), z.boolean()]).nullish(),
653
+ });
654
+
655
+ async function getGraphRequest(rawPayload: z.infer<typeof GetGraphPayload>) {
656
+ const { graph_id: graphId, ...payload } = rawPayload;
657
+ const graph = getGraph(graphId);
658
+ return graph
659
+ .getGraph({
660
+ ...getRunnableConfig(payload.config),
661
+ xray: payload.xray ?? undefined,
662
+ })
663
+ .toJSON();
664
+ }
665
+
666
+ const GetSubgraphsPayload = z.object({
667
+ graph_id: z.string(),
668
+ namespace: z.string().nullish(),
669
+ recurse: z.boolean().nullish(),
670
+ });
671
+
672
+ async function getSubgraphsRequest(
673
+ rawPayload: z.infer<typeof GetSubgraphsPayload>
674
+ ) {
675
+ const { graph_id: graphId, ...payload } = rawPayload;
676
+ const graph = getGraph(graphId);
677
+ const result: Array<[name: string, Record<string, any>]> = [];
678
+
679
+ const graphSchema = await getOrExtractSchema(graphId);
680
+ const rootGraphId = Object.keys(graphSchema).find((i) => !i.includes("|"));
681
+
682
+ if (!rootGraphId) throw new Error("Failed to find root graph");
683
+
684
+ for (const [name] of graph.getSubgraphs(
685
+ payload.namespace ?? undefined,
686
+ payload.recurse ?? undefined
687
+ )) {
688
+ const schema =
689
+ graphSchema[`${rootGraphId}|${name}`] || graphSchema[rootGraphId];
690
+ result.push([name, schema]);
691
+ }
692
+
693
+ // TODO: make this a stream
694
+ return Object.fromEntries(result);
695
+ }
696
+
697
+ const GetStatePayload = z.object({
698
+ graph_id: z.string(),
699
+ config: RunnableConfigSchema,
700
+ subgraphs: z.boolean().nullish(),
701
+ });
702
+
703
+ async function getStateRequest(rawPayload: z.infer<typeof GetStatePayload>) {
704
+ const { graph_id: graphId, ...payload } = rawPayload;
705
+ const graph = getGraph(graphId);
706
+
707
+ const state = await graph.getState(getRunnableConfig(payload.config), {
708
+ subgraphs: payload.subgraphs ?? undefined,
709
+ });
710
+
711
+ return state;
712
+ }
713
+
714
+ const UpdateStatePayload = z.object({
715
+ graph_id: z.string(),
716
+ config: RunnableConfigSchema,
717
+ values: z.unknown(),
718
+ as_node: z.string().nullish(),
719
+ });
720
+
721
+ async function updateStateRequest(
722
+ rawPayload: z.infer<typeof UpdateStatePayload>
723
+ ) {
724
+ const { graph_id: graphId, ...payload } = rawPayload;
725
+ const graph = getGraph(graphId);
726
+
727
+ const config = await graph.updateState(
728
+ getRunnableConfig(payload.config),
729
+ payload.values,
730
+ payload.as_node ?? undefined
731
+ );
732
+
733
+ return config;
734
+ }
735
+
736
+ const GetSchemaPayload = z.object({ graph_id: z.string() });
737
+
738
+ async function getSchemaRequest(payload: z.infer<typeof GetSchemaPayload>) {
739
+ const { graph_id: graphId } = payload;
740
+ const schemas = await getOrExtractSchema(graphId);
741
+ const rootGraphId = Object.keys(schemas).find((i) => !i.includes("|"));
742
+ if (!rootGraphId) {
743
+ throw new Error("Failed to find root graph");
744
+ }
745
+ return schemas[rootGraphId];
746
+ }
747
+
748
+ const GetStateHistoryPayload = z.object({
749
+ graph_id: z.string(),
750
+ config: RunnableConfigSchema,
751
+ limit: z.number().nullish(),
752
+ before: RunnableConfigSchema.nullish(),
753
+ filter: z.record(z.unknown()).nullish(),
754
+ });
755
+
756
+ async function* getStateHistoryRequest(
757
+ rawPayload: z.infer<typeof GetStateHistoryPayload>
758
+ ) {
759
+ const { graph_id: graphId, ...payload } = rawPayload;
760
+ const graph = getGraph(graphId);
761
+
762
+ for await (const item of graph.getStateHistory(
763
+ getRunnableConfig(payload.config),
764
+ {
765
+ limit: payload.limit ?? undefined,
766
+ before: payload.before ? getRunnableConfig(payload.before) : undefined,
767
+ filter: payload.filter ?? undefined,
768
+ }
769
+ )) {
770
+ yield item;
771
+ }
772
+ }
773
+
774
+ const __dirname = new URL(".", import.meta.url).pathname;
775
+
776
+ async function main() {
777
+ remoteDealer.connect(REMOTE_ADDR);
778
+ await clientRouter.bind(CLIENT_ADDR);
779
+
780
+ const checkpointer = new RemoteCheckpointer();
781
+ const store = new RemoteStore();
782
+
783
+ const specs = z
784
+ .record(z.string())
785
+ .parse(JSON.parse(process.env.LANGSERVE_GRAPHS ?? "{}"));
786
+
787
+ if (!process.argv.includes("--skip-schema-cache")) {
788
+ try {
789
+ GRAPH_SCHEMA = JSON.parse(
790
+ await fs.readFile(path.resolve(__dirname, "client.schemas.json"), {
791
+ encoding: "utf-8",
792
+ })
793
+ );
794
+ } catch {
795
+ // pass
796
+ }
797
+ }
798
+
799
+ await Promise.all(
800
+ Object.entries(specs).map(async ([graphId, rawSpec]) => {
801
+ logger.info(`Resolving graph ${graphId}`);
802
+ const { resolved, ...spec } = await resolveGraph(rawSpec);
803
+
804
+ // TODO: make sure the types do not need to be upfront
805
+ // @ts-expect-error Overriding checkpointer with different value type
806
+ resolved.checkpointer = checkpointer;
807
+ resolved.store = store;
808
+
809
+ // registering the graph runtime
810
+ GRAPH_RESOLVED[graphId] = resolved;
811
+ GRAPH_SPEC[graphId] = spec;
812
+ })
813
+ );
814
+
815
+ for await (const packet of getRouterPackets()) {
816
+ switch (packet.input.method) {
817
+ case "streamEvents":
818
+ handleStream(packet, StreamEventsPayload, streamEventsRequest);
819
+ break;
820
+ case "getGraph":
821
+ handleInvoke(packet, GetGraphPayload, getGraphRequest);
822
+ break;
823
+ case "getSubgraphs":
824
+ handleInvoke(packet, GetSubgraphsPayload, getSubgraphsRequest);
825
+ break;
826
+ case "getState":
827
+ handleInvoke(packet, GetStatePayload, getStateRequest);
828
+ break;
829
+ case "updateState":
830
+ handleInvoke(packet, UpdateStatePayload, updateStateRequest);
831
+ break;
832
+ case "getSchema":
833
+ handleInvoke(packet, GetSchemaPayload, getSchemaRequest);
834
+ break;
835
+ case "getStateHistory":
836
+ handleStream(packet, GetStateHistoryPayload, getStateHistoryRequest);
837
+ break;
838
+ case "ok":
839
+ handleInvoke(packet, z.any(), () => Promise.resolve("ok"));
840
+ break;
841
+ default:
842
+ logger.error(`Unknown method: ${packet.input.method}`);
843
+ handleInvoke(packet, z.any(), () => {
844
+ throw new Error(`Unknown method: ${packet.input.method}`);
845
+ });
846
+ break;
847
+ }
848
+ }
849
+ }
850
+
851
+ process.on("uncaughtExceptionMonitor", (error) => {
852
+ logger.error(error);
853
+ process.exit(1);
854
+ });
855
+
856
+ main();