langgraph-api 0.1.15__py3-none-any.whl → 0.1.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.15"
1
+ __version__ = "0.1.17"
langgraph_api/config.py CHANGED
@@ -18,6 +18,11 @@ class CorsConfig(TypedDict, total=False):
18
18
  max_age: int
19
19
 
20
20
 
21
+ class ConfigurableHeaders(TypedDict):
22
+ includes: list[str] | None
23
+ excludes: list[str] | None
24
+
25
+
21
26
  class HttpConfig(TypedDict, total=False):
22
27
  app: str
23
28
  """Import path for a custom Starlette/FastAPI app to mount"""
@@ -39,6 +44,7 @@ class HttpConfig(TypedDict, total=False):
39
44
  """Disable /mcp routes"""
40
45
  mount_prefix: str
41
46
  """Prefix for mounted routes. E.g., "/my-deployment/api"."""
47
+ configurable_headers: ConfigurableHeaders | None
42
48
 
43
49
 
44
50
  class ThreadTTLConfig(TypedDict, total=False):
langgraph_api/graph.py CHANGED
@@ -487,7 +487,7 @@ def _graph_from_spec(spec: GraphSpec) -> GraphValue:
487
487
  return graph
488
488
 
489
489
 
490
- @functools.lru_cache
490
+ @functools.lru_cache(maxsize=1)
491
491
  def _get_init_embeddings() -> Callable[[str, ...], "Embeddings"] | None:
492
492
  try:
493
493
  from langchain.embeddings import init_embeddings
langgraph_api/js/base.py CHANGED
@@ -27,3 +27,6 @@ class BaseRemotePregel(Runnable):
27
27
 
28
28
  # Config passed from get_graph()
29
29
  config: Config
30
+
31
+ async def get_nodes_executed(self) -> int:
32
+ return 0
@@ -1,26 +1,49 @@
1
1
  /// <reference types="./global.d.ts" />
2
+ import "./src/preload.mjs";
2
3
 
3
4
  import { z } from "zod";
4
5
  import * as fs from "node:fs/promises";
5
6
  import * as path from "node:path";
6
7
  import {
7
- GraphSchema,
8
+ type GraphSchema,
8
9
  resolveGraph,
9
10
  runGraphSchemaWorker,
10
11
  } from "./src/graph.mts";
11
12
  import { build } from "@langchain/langgraph-ui";
13
+ import { checkLangGraphSemver } from "@langchain/langgraph-api/semver";
12
14
  import { filterValidExportPath } from "./src/utils/files.mts";
13
15
 
14
16
  const __dirname = new URL(".", import.meta.url).pathname;
15
17
 
16
18
  async function main() {
17
19
  const specs = Object.entries(
18
- z.record(z.string()).parse(JSON.parse(process.env.LANGSERVE_GRAPHS))
20
+ z.record(z.string()).parse(JSON.parse(process.env.LANGSERVE_GRAPHS)),
19
21
  ).filter(([_, spec]) => filterValidExportPath(spec));
20
22
 
21
23
  const GRAPH_SCHEMAS: Record<string, Record<string, GraphSchema> | false> = {};
22
- let failed = false;
23
24
 
25
+ const semver = await checkLangGraphSemver();
26
+ const invalidPackages = semver.filter(
27
+ (s) => !s.satisfies && s.version !== "0.0.0",
28
+ );
29
+ if (invalidPackages.length > 0) {
30
+ console.error(
31
+ `Some LangGraph.js dependencies required by the LangGraph API server are not up to date. \n` +
32
+ `Please make sure to upgrade them to the required version:\n` +
33
+ invalidPackages
34
+ .map(
35
+ (i) =>
36
+ `- ${i.name}@${i.version} is not up to date. Required: ${i.required}`,
37
+ )
38
+ .join("\n") +
39
+ "\n" +
40
+ "Visit https://langchain-ai.github.io/langgraphjs/cloud/deployment/setup_javascript/ for more information.",
41
+ );
42
+
43
+ process.exit(1);
44
+ }
45
+
46
+ let failed = false;
24
47
  try {
25
48
  await Promise.all(
26
49
  specs.map(async ([graphId, rawSpec]) => {
@@ -38,13 +61,13 @@ async function main() {
38
61
  console.error(`[${graphId}]: Error extracting schema: ${error}`);
39
62
  GRAPH_SCHEMAS[graphId] = false;
40
63
  }
41
- })
64
+ }),
42
65
  );
43
66
 
44
67
  await fs.writeFile(
45
68
  path.resolve(__dirname, "client.schemas.json"),
46
69
  JSON.stringify(GRAPH_SCHEMAS),
47
- { encoding: "utf-8" }
70
+ { encoding: "utf-8" },
48
71
  );
49
72
  } catch (error) {
50
73
  console.error(`Error resolving graphs: ${error}`);
@@ -76,7 +76,7 @@ const logger = createLogger({
76
76
  }
77
77
 
78
78
  return JSON.stringify({ timestamp, level, event, ...rest });
79
- })
79
+ }),
80
80
  ),
81
81
  transports: [
82
82
  new transports.Console({
@@ -91,6 +91,20 @@ let GRAPH_OPTIONS: {
91
91
  checkpointer?: BaseCheckpointSaver<string | number>;
92
92
  store?: BaseStore;
93
93
  } = {};
94
+ let nodesExecuted = 0;
95
+ function incrementNodes() {
96
+ nodesExecuted++;
97
+ }
98
+
99
+ const version = await (async () => {
100
+ try {
101
+ const packageJson = await import("@langchain/langgraph/package.json");
102
+ return packageJson["version"];
103
+ } catch (error) {
104
+ logger.error(error);
105
+ }
106
+ return undefined;
107
+ })();
94
108
 
95
109
  const GRAPH_RESOLVED: Record<
96
110
  string,
@@ -101,7 +115,7 @@ const GRAPH_SPEC: Record<string, GraphSpec> = {};
101
115
  async function getGraph(
102
116
  graphId: string,
103
117
  config: { configurable?: Record<string, unknown> },
104
- name: string | null | undefined
118
+ name: string | null | undefined,
105
119
  ) {
106
120
  if (!GRAPH_RESOLVED[graphId])
107
121
  throw new HTTPException(404, { message: `Graph "${graphId}" not found` });
@@ -145,7 +159,7 @@ async function getOrExtractSchema(graphId: string) {
145
159
  try {
146
160
  timeoutMs = Number.parseInt(
147
161
  process.env.LANGGRAPH_SCHEMA_RESOLVE_TIMEOUT_MS || "30000",
148
- 10
162
+ 10,
149
163
  );
150
164
  if (Number.isNaN(timeoutMs) || timeoutMs <= 0) timeoutMs = undefined;
151
165
  } catch {
@@ -178,7 +192,7 @@ const RunnableConfigSchema = z.object({
178
192
  });
179
193
 
180
194
  const getRunnableConfig = (
181
- userConfig: z.infer<typeof RunnableConfigSchema> | null | undefined
195
+ userConfig: z.infer<typeof RunnableConfigSchema> | null | undefined,
182
196
  ) => {
183
197
  if (!userConfig) return {};
184
198
  return {
@@ -214,13 +228,13 @@ function tryFetch(...args: Parameters<typeof fetch>) {
214
228
  factor: 2,
215
229
  minTimeout: 1000,
216
230
  onFailedAttempt: (error) => void logger.error(error),
217
- }
231
+ },
218
232
  );
219
233
  }
220
234
 
221
235
  async function sendRecv<T = any>(
222
236
  method: `${"checkpointer" | "store"}_${string}`,
223
- data: unknown
237
+ data: unknown,
224
238
  ): Promise<T> {
225
239
  const res = await tryFetch(`http://localhost:${REMOTE_PORT}/${method}`, {
226
240
  method: "POST",
@@ -240,7 +254,7 @@ const HEARTBEAT_MS = 5_000;
240
254
  const handleInvoke = <T extends z.ZodType<any>>(
241
255
  name: string,
242
256
  _schema: T,
243
- handler: (rawPayload: z.infer<T>) => Promise<any>
257
+ handler: (rawPayload: z.infer<T>) => Promise<any>,
244
258
  ) => {
245
259
  return async (c: Context<any, any, { in: z.infer<T>; out: any }>) => {
246
260
  const graphId = c.req.param("graphId");
@@ -262,7 +276,7 @@ const handleInvoke = <T extends z.ZodType<any>>(
262
276
  let interval = setInterval(() => enqueueWrite(" "), HEARTBEAT_MS);
263
277
 
264
278
  const response = JSON.stringify(
265
- await handler({ graph_id: graphId, ...body })
279
+ await handler({ graph_id: graphId, ...body }),
266
280
  );
267
281
 
268
282
  clearInterval(interval);
@@ -274,7 +288,7 @@ const handleInvoke = <T extends z.ZodType<any>>(
274
288
  const handleStream = <T extends z.ZodType<any>>(
275
289
  name: string,
276
290
  _schema: T,
277
- handler: (rawPayload: z.infer<T>) => AsyncGenerator<any, void, unknown>
291
+ handler: (rawPayload: z.infer<T>) => AsyncGenerator<any, void, unknown>,
278
292
  ) => {
279
293
  return (c: Context<any, any, { in: z.infer<T>; out: any }>) => {
280
294
  const graphId = c.req.param("graphId");
@@ -333,7 +347,7 @@ class RemoteCheckpointer extends BaseCheckpointSaver<number | string> {
333
347
  limit?: number;
334
348
  before?: RunnableConfig;
335
349
  filter?: Record<string, any>;
336
- }
350
+ },
337
351
  ): AsyncGenerator<CheckpointTuple> {
338
352
  const result = await sendRecv("checkpointer_list", { config, ...options });
339
353
 
@@ -352,7 +366,7 @@ class RemoteCheckpointer extends BaseCheckpointSaver<number | string> {
352
366
  config: RunnableConfig,
353
367
  checkpoint: Checkpoint,
354
368
  metadata: CheckpointMetadata,
355
- newVersions: ChannelVersions
369
+ newVersions: ChannelVersions,
356
370
  ): Promise<RunnableConfig> {
357
371
  return await sendRecv<RunnableConfig>("checkpointer_put", {
358
372
  config,
@@ -365,14 +379,14 @@ class RemoteCheckpointer extends BaseCheckpointSaver<number | string> {
365
379
  async putWrites(
366
380
  config: RunnableConfig,
367
381
  writes: [string, unknown][],
368
- taskId: string
382
+ taskId: string,
369
383
  ): Promise<void> {
370
384
  await sendRecv("checkpointer_put_writes", { config, writes, taskId });
371
385
  }
372
386
 
373
387
  getNextVersion(
374
388
  current: number | string | undefined,
375
- _channel: ChannelProtocol
389
+ _channel: ChannelProtocol,
376
390
  ): string {
377
391
  let currentVersion = 0;
378
392
 
@@ -402,7 +416,7 @@ function camelToSnake(operation: Operation) {
402
416
  Object.entries(obj).map(([key, value]) => {
403
417
  const snakeKey = key.replace(
404
418
  /[A-Z]/g,
405
- (letter) => `_${letter.toLowerCase()}`
419
+ (letter) => `_${letter.toLowerCase()}`,
406
420
  );
407
421
  if (
408
422
  typeof value === "object" &&
@@ -412,7 +426,7 @@ function camelToSnake(operation: Operation) {
412
426
  return [snakeKey, snakeCaseKeys(value)];
413
427
  }
414
428
  return [snakeKey, value];
415
- })
429
+ }),
416
430
  );
417
431
  };
418
432
 
@@ -459,7 +473,7 @@ function pyItemToJs(item?: PyItem): Item | undefined {
459
473
 
460
474
  export class RemoteStore extends BaseStore {
461
475
  async batch<Op extends Operation[]>(
462
- operations: Op
476
+ operations: Op,
463
477
  ): Promise<OperationResults<Op>> {
464
478
  const results = await sendRecv<PyResult[]>("store_batch", {
465
479
  operations: operations.map(camelToSnake),
@@ -493,7 +507,7 @@ export class RemoteStore extends BaseStore {
493
507
  filter?: Record<string, any>;
494
508
  limit?: number;
495
509
  offset?: number;
496
- }
510
+ },
497
511
  ): Promise<Item[]> {
498
512
  return await sendRecv<Item[]>("store_search", {
499
513
  namespace_prefix: namespacePrefix,
@@ -504,7 +518,7 @@ export class RemoteStore extends BaseStore {
504
518
  async put(
505
519
  namespace: string[],
506
520
  key: string,
507
- value: Record<string, any>
521
+ value: Record<string, any>,
508
522
  ): Promise<void> {
509
523
  await sendRecv("store_put", { namespace, key, value });
510
524
  }
@@ -522,7 +536,7 @@ export class RemoteStore extends BaseStore {
522
536
  }): Promise<string[][]> {
523
537
  const data = await sendRecv<{ namespaces: string[][] }>(
524
538
  "store_list_namespaces",
525
- { max_depth: options?.maxDepth, ...options }
539
+ { max_depth: options?.maxDepth, ...options },
526
540
  );
527
541
  return data.namespaces;
528
542
  }
@@ -569,7 +583,7 @@ const StreamEventsPayload = z.object({
569
583
  });
570
584
 
571
585
  function reviveCommand(
572
- command: z.infer<typeof StreamEventsPayload>["command"]
586
+ command: z.infer<typeof StreamEventsPayload>["command"],
573
587
  ): Command | undefined {
574
588
  if (command == null) return undefined;
575
589
  let { goto, update, resume, graph } = command;
@@ -589,12 +603,17 @@ function reviveCommand(
589
603
  }
590
604
 
591
605
  async function* streamEventsRequest(
592
- rawPayload: z.infer<typeof StreamEventsPayload>
606
+ rawPayload: z.infer<typeof StreamEventsPayload>,
593
607
  ) {
594
608
  const { graph_id: graphId, ...payload } = rawPayload;
595
609
  const config = getRunnableConfig(payload.config);
596
610
  const graph = await getGraph(graphId, config, payload.graph_name);
597
611
  const input = reviveCommand(payload.command) ?? payload.input;
612
+ // TODO Check if it's a remote graph and don't set in that case
613
+ config.configurable = {
614
+ ...config.configurable,
615
+ ["__pregel_node_finished"]: incrementNodes,
616
+ };
598
617
 
599
618
  const userStreamMode =
600
619
  payload.stream_mode == null
@@ -635,6 +654,11 @@ async function* streamEventsRequest(
635
654
 
636
655
  const streamMode = [...graphStreamMode];
637
656
 
657
+ if (version != null) {
658
+ config.metadata ??= {};
659
+ config.metadata.langgraph_version = version;
660
+ }
661
+
638
662
  for await (const data of graph.streamEvents(input, {
639
663
  ...config,
640
664
  version: "v2",
@@ -731,7 +755,7 @@ async function getGraphRequest(rawPayload: z.infer<typeof GetGraphPayload>) {
731
755
  const graph = await getGraph(
732
756
  graphId,
733
757
  getRunnableConfig(payload.graph_config),
734
- payload.graph_name
758
+ payload.graph_name,
735
759
  );
736
760
 
737
761
  const drawable = await graph.getGraphAsync({
@@ -750,7 +774,7 @@ const GetSubgraphsPayload = z.object({
750
774
  });
751
775
 
752
776
  async function getSubgraphsRequest(
753
- rawPayload: z.infer<typeof GetSubgraphsPayload>
777
+ rawPayload: z.infer<typeof GetSubgraphsPayload>,
754
778
  ) {
755
779
  const { graph_id: graphId, ...payload } = rawPayload;
756
780
  const graphConfig = getRunnableConfig(payload.graph_config);
@@ -764,7 +788,7 @@ async function getSubgraphsRequest(
764
788
 
765
789
  for await (const [name] of graph.getSubgraphsAsync(
766
790
  payload.namespace ?? undefined,
767
- payload.recurse ?? undefined
791
+ payload.recurse ?? undefined,
768
792
  )) {
769
793
  const schema =
770
794
  graphSchema[`${rootGraphId}|${name}`] || graphSchema[rootGraphId];
@@ -806,7 +830,7 @@ const UpdateStatePayload = z.object({
806
830
  });
807
831
 
808
832
  async function updateStateRequest(
809
- rawPayload: z.infer<typeof UpdateStatePayload>
833
+ rawPayload: z.infer<typeof UpdateStatePayload>,
810
834
  ) {
811
835
  const { graph_id: graphId, ...payload } = rawPayload;
812
836
  const graphConfig = getRunnableConfig(payload.graph_config);
@@ -815,7 +839,7 @@ async function updateStateRequest(
815
839
  const config = await graph.updateState(
816
840
  getRunnableConfig(payload.config),
817
841
  payload.values,
818
- payload.as_node ?? undefined
842
+ payload.as_node ?? undefined,
819
843
  );
820
844
 
821
845
  return config;
@@ -850,7 +874,7 @@ const GetStateHistoryPayload = z.object({
850
874
  });
851
875
 
852
876
  async function* getStateHistoryRequest(
853
- rawPayload: z.infer<typeof GetStateHistoryPayload>
877
+ rawPayload: z.infer<typeof GetStateHistoryPayload>,
854
878
  ) {
855
879
  const { graph_id: graphId, ...payload } = rawPayload;
856
880
  const config = getRunnableConfig(payload.graph_config);
@@ -862,7 +886,7 @@ async function* getStateHistoryRequest(
862
886
  limit: payload.limit ?? undefined,
863
887
  before: payload.before ? getRunnableConfig(payload.before) : undefined,
864
888
  filter: payload.filter ?? undefined,
865
- }
889
+ },
866
890
  )) {
867
891
  yield item;
868
892
  }
@@ -879,7 +903,9 @@ async function main() {
879
903
  };
880
904
 
881
905
  const specs = Object.entries(
882
- z.record(z.string()).parse(JSON.parse(process.env.LANGSERVE_GRAPHS ?? "{}"))
906
+ z
907
+ .record(z.string())
908
+ .parse(JSON.parse(process.env.LANGSERVE_GRAPHS ?? "{}")),
883
909
  ).filter(([_, spec]) => filterValidExportPath(spec));
884
910
 
885
911
  if (!process.argv.includes("--skip-schema-cache")) {
@@ -887,7 +913,7 @@ async function main() {
887
913
  GRAPH_SCHEMA = JSON.parse(
888
914
  await fs.readFile(path.resolve(__dirname, "client.schemas.json"), {
889
915
  encoding: "utf-8",
890
- })
916
+ }),
891
917
  );
892
918
  } catch {
893
919
  // pass
@@ -901,43 +927,43 @@ async function main() {
901
927
 
902
928
  GRAPH_RESOLVED[graphId] = resolved;
903
929
  GRAPH_SPEC[graphId] = spec;
904
- })
930
+ }),
905
931
  );
906
932
 
907
933
  app.post(
908
934
  "/:graphId/streamEvents",
909
935
  zValidator("json", StreamEventsPayload),
910
- handleStream("streamEvents", StreamEventsPayload, streamEventsRequest)
936
+ handleStream("streamEvents", StreamEventsPayload, streamEventsRequest),
911
937
  );
912
938
 
913
939
  app.post(
914
940
  "/:graphId/getGraph",
915
941
  zValidator("json", GetGraphPayload),
916
- handleInvoke("getGraph", GetGraphPayload, getGraphRequest)
942
+ handleInvoke("getGraph", GetGraphPayload, getGraphRequest),
917
943
  );
918
944
 
919
945
  app.post(
920
946
  "/:graphId/getSubgraphs",
921
947
  zValidator("json", GetSubgraphsPayload),
922
- handleInvoke("getSubgraphs", GetSubgraphsPayload, getSubgraphsRequest)
948
+ handleInvoke("getSubgraphs", GetSubgraphsPayload, getSubgraphsRequest),
923
949
  );
924
950
 
925
951
  app.post(
926
952
  "/:graphId/getState",
927
953
  zValidator("json", GetStatePayload),
928
- handleInvoke("getState", GetStatePayload, getStateRequest)
954
+ handleInvoke("getState", GetStatePayload, getStateRequest),
929
955
  );
930
956
 
931
957
  app.post(
932
958
  "/:graphId/updateState",
933
959
  zValidator("json", UpdateStatePayload),
934
- handleInvoke("updateState", UpdateStatePayload, updateStateRequest)
960
+ handleInvoke("updateState", UpdateStatePayload, updateStateRequest),
935
961
  );
936
962
 
937
963
  app.post(
938
964
  "/:graphId/getSchema",
939
965
  zValidator("json", GetSchemaPayload),
940
- handleInvoke("getSchema", GetSchemaPayload, getSchemaRequest)
966
+ handleInvoke("getSchema", GetSchemaPayload, getSchemaRequest),
941
967
  );
942
968
 
943
969
  app.post(
@@ -946,8 +972,26 @@ async function main() {
946
972
  handleStream(
947
973
  "getStateHistory",
948
974
  GetStateHistoryPayload,
949
- getStateHistoryRequest
950
- )
975
+ getStateHistoryRequest,
976
+ ),
977
+ );
978
+ app.post(
979
+ "/:graphId/getNodesExecuted",
980
+ zValidator("json", GetNodesExecutedPayload),
981
+ handleInvoke(
982
+ "getNodesExecuted",
983
+ GetNodesExecutedPayload,
984
+ getNodesExecutedRequest,
985
+ ),
986
+ );
987
+ app.post(
988
+ "/:graphId/getNodesExecuted",
989
+ zValidator("json", GetNodesExecutedPayload),
990
+ handleInvoke(
991
+ "getNodesExecuted",
992
+ GetNodesExecutedPayload,
993
+ getNodesExecutedRequest,
994
+ ),
951
995
  );
952
996
 
953
997
  // Load LANGGRAPH_AUTH
@@ -972,7 +1016,7 @@ async function main() {
972
1016
  headers.delete("x-langgraph-auth-method");
973
1017
 
974
1018
  const context = await authenticate(
975
- new Request(authUrl, { headers, method })
1019
+ new Request(authUrl, { headers, method }),
976
1020
  );
977
1021
 
978
1022
  return c.json(context);
@@ -984,7 +1028,7 @@ async function main() {
984
1028
  status: error.res?.status ?? error.status,
985
1029
  headers: error.res?.headers,
986
1030
  },
987
- error.status as StatusCode
1031
+ error.status as StatusCode,
988
1032
  );
989
1033
  }
990
1034
 
@@ -1018,7 +1062,7 @@ async function main() {
1018
1062
  });
1019
1063
 
1020
1064
  serve({ fetch: app.fetch, hostname: "localhost", port: GRAPH_PORT }, (c) =>
1021
- logger.info(`Listening to ${c.address}:${c.port}`)
1065
+ logger.info(`Listening to ${c.address}:${c.port}`),
1022
1066
  );
1023
1067
  }
1024
1068
 
@@ -1027,5 +1071,17 @@ process.on("uncaughtExceptionMonitor", (error) => {
1027
1071
  gracefulExit();
1028
1072
  });
1029
1073
 
1074
+ const GetNodesExecutedPayload = z.object({
1075
+ graph_id: z.string(),
1076
+ });
1077
+
1078
+ async function getNodesExecutedRequest(
1079
+ _payload: z.infer<typeof GetNodesExecutedPayload>,
1080
+ ) {
1081
+ const value = nodesExecuted;
1082
+ nodesExecuted = 0;
1083
+ return { nodesExecuted: value };
1084
+ }
1085
+
1030
1086
  asyncExitHook(() => awaitAllCallbacks(), { wait: 3_000 });
1031
1087
  main();
@@ -24,8 +24,8 @@
24
24
  "undici": "^6.21.1",
25
25
  "uuid": "^10.0.0",
26
26
  "winston": "^3.17.0",
27
- "@langchain/langgraph-api": "~0.0.21",
28
- "@langchain/langgraph-ui": "~0.0.21",
27
+ "@langchain/langgraph-api": "~0.0.29",
28
+ "@langchain/langgraph-ui": "~0.0.29",
29
29
  "zod": "^3.23.8"
30
30
  },
31
31
  "resolutions": {
@@ -315,26 +315,30 @@ class RemotePregel(BaseRemotePregel):
315
315
  *,
316
316
  xray: int | bool = False,
317
317
  ) -> dict[str, Any]:
318
- raise Exception("Not implemented")
318
+ raise NotImplementedError()
319
319
 
320
320
  def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
321
- raise Exception("Not implemented")
321
+ raise NotImplementedError()
322
322
 
323
323
  def get_output_schema(
324
324
  self, config: RunnableConfig | None = None
325
325
  ) -> type[BaseModel]:
326
- raise Exception("Not implemented")
326
+ raise NotImplementedError()
327
327
 
328
328
  def config_schema(self) -> type[BaseModel]:
329
- raise Exception("Not implemented")
329
+ raise NotImplementedError()
330
330
 
331
331
  async def invoke(self, input: Any, config: RunnableConfig | None = None):
332
- raise Exception("Not implemented")
332
+ raise NotImplementedError()
333
333
 
334
334
  def copy(self, update: dict[str, Any] | None = None) -> Self:
335
335
  attrs = {**self.__dict__, **(update or {})}
336
336
  return self.__class__(**attrs)
337
337
 
338
+ async def fetch_nodes_executed(self):
339
+ result = await _client_invoke("getNodesExecuted", {"graph_id": self.graph_id})
340
+ return result["nodesExecuted"]
341
+
338
342
 
339
343
  async def run_js_process(paths_str: str, watch: bool = False):
340
344
  # check if tsx is available
@@ -346,9 +350,22 @@ async def run_js_process(paths_str: str, watch: bool = False):
346
350
  attempt = 0
347
351
  while not asyncio.current_task().cancelled():
348
352
  client_file = os.path.join(os.path.dirname(__file__), "client.mts")
349
- args = ("tsx", client_file)
350
- if watch:
351
- args = ("tsx", "watch", client_file, "--skip-schema-cache")
353
+ client_preload_file = os.path.join(
354
+ os.path.dirname(__file__), "src", "preload.mjs"
355
+ )
356
+
357
+ args = (
358
+ (
359
+ "tsx",
360
+ "watch",
361
+ "--import",
362
+ client_preload_file,
363
+ client_file,
364
+ "--skip-schema-cache",
365
+ )
366
+ if watch
367
+ else ("tsx", "--import", client_preload_file, client_file)
368
+ )
352
369
  try:
353
370
  process = await asyncio.create_subprocess_exec(
354
371
  *args,
@@ -843,12 +860,14 @@ async def handle_js_auth_event(
843
860
  "resource": ctx.resource,
844
861
  "action": ctx.action,
845
862
  "value": value,
846
- "context": {
847
- "user": cast(DotDict, ctx.user).dict(),
848
- "scopes": ctx.permissions,
849
- }
850
- if ctx
851
- else None,
863
+ "context": (
864
+ {
865
+ "user": cast(DotDict, ctx.user).dict(),
866
+ "scopes": ctx.permissions,
867
+ }
868
+ if ctx
869
+ else None
870
+ ),
852
871
  }
853
872
  ),
854
873
  )
@@ -5,9 +5,6 @@ import type { CompiledGraph, Graph } from "@langchain/langgraph";
5
5
  import * as path from "node:path";
6
6
  import type { JSONSchema7 } from "json-schema";
7
7
 
8
- // enforce API @langchain/langgraph precedence
9
- register("./hooks.mjs", import.meta.url);
10
-
11
8
  export interface GraphSchema {
12
9
  state: JSONSchema7 | undefined;
13
10
  input: JSONSchema7 | undefined;