langgraph-api 0.0.37__py3-none-any.whl → 0.0.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

@@ -140,9 +140,9 @@ if "inmem" in MIGRATIONS_PATH:
140
140
  async def truncate(request: Request):
141
141
  from langgraph_storage.checkpoint import Checkpointer
142
142
 
143
- Checkpointer().clear()
143
+ await asyncio.to_thread(Checkpointer().clear)
144
144
  async with connect() as conn:
145
- conn.clear()
145
+ await asyncio.to_thread(conn.clear)
146
146
  return JSONResponse({"ok": True})
147
147
 
148
148
  routes.insert(0, Route("/internal/truncate", truncate, methods=["POST"]))
langgraph_api/api/mcp.py CHANGED
@@ -351,7 +351,9 @@ async def handle_tools_list(
351
351
 
352
352
  # Get assistants from the API
353
353
  # For now set a large limit to get all assistants
354
- assistants = await client.assistants.search(offset=cursor, limit=DEFAULT_PAGE_SIZE)
354
+ assistants = await client.assistants.search(
355
+ offset=cursor, limit=DEFAULT_PAGE_SIZE, headers=request.headers
356
+ )
355
357
 
356
358
  if len(assistants) == DEFAULT_PAGE_SIZE:
357
359
  next_cursor = cursor + DEFAULT_PAGE_SIZE
@@ -370,7 +372,7 @@ async def handle_tools_list(
370
372
  else:
371
373
  seen_names.add(name)
372
374
 
373
- schemas = await client.assistants.get_schemas(id_)
375
+ schemas = await client.assistants.get_schemas(id_, headers=request.headers)
374
376
  tools.append(
375
377
  {
376
378
  "name": name,
@@ -408,7 +410,9 @@ async def handle_tools_call(
408
410
  }
409
411
 
410
412
  arguments = params.get("arguments", {})
411
- assistants = await client.assistants.search(limit=MAX_ASSISTANTS)
413
+ assistants = await client.assistants.search(
414
+ limit=MAX_ASSISTANTS, headers=request.headers
415
+ )
412
416
  matching_assistant = [
413
417
  assistant for assistant in assistants if assistant["name"] == tool_name
414
418
  ]
@@ -437,7 +441,11 @@ async def handle_tools_call(
437
441
  tool_name = matching_assistant[0]["assistant_id"]
438
442
 
439
443
  value = await client.runs.wait(
440
- thread_id=None, assistant_id=tool_name, input=arguments, raise_error=False
444
+ thread_id=None,
445
+ assistant_id=tool_name,
446
+ input=arguments,
447
+ raise_error=False,
448
+ headers=request.headers,
441
449
  )
442
450
 
443
451
  if "__error__" in value:
@@ -379,13 +379,16 @@ def _depends() -> Any:
379
379
  return None
380
380
 
381
381
 
382
+ _EXCLUDED = ("values", "keys", "items", "dict")
383
+
384
+
382
385
  class DotDict:
383
386
  def __init__(self, dictionary: dict[str, Any]):
384
387
  self._dict = dictionary
385
388
  for key, value in dictionary.items():
386
389
  if isinstance(value, dict):
387
390
  setattr(self, key, DotDict(value))
388
- else:
391
+ elif key not in _EXCLUDED:
389
392
  setattr(self, key, value)
390
393
 
391
394
  def __getattr__(self, name):
@@ -393,6 +396,9 @@ class DotDict:
393
396
  raise AttributeError(f"'DotDict' object has no attribute '{name}'")
394
397
  return self._dict[name]
395
398
 
399
+ def __contains__(self, key: str) -> bool:
400
+ return key in self._dict
401
+
396
402
  def __getitem__(self, key):
397
403
  return self._dict[key]
398
404
 
@@ -409,6 +415,21 @@ class DotDict:
409
415
  def dict(self):
410
416
  return self._dict
411
417
 
418
+ def items(self):
419
+ return self._dict.items()
420
+
421
+ def values(self):
422
+ return self._dict.values()
423
+
424
+ def keys(self):
425
+ return self._dict.keys()
426
+
427
+ def __iter__(self):
428
+ return iter(self._dict)
429
+
430
+ def __len__(self):
431
+ return len(self._dict)
432
+
412
433
 
413
434
  class ProxyUser(BaseUser):
414
435
  """A proxy that wraps a user object to ensure it has all BaseUser properties.
@@ -462,6 +483,9 @@ class ProxyUser(BaseUser):
462
483
  **d,
463
484
  }
464
485
 
486
+ def __contains__(self, key: str) -> bool:
487
+ return key in self._user
488
+
465
489
  def __getitem__(self, key):
466
490
  return self._user[key]
467
491
 
langgraph_api/cli.py CHANGED
@@ -131,6 +131,7 @@ def run_server(
131
131
  http: typing.Optional["HttpConfig"] = None,
132
132
  studio_url: str | None = None,
133
133
  disable_persistence: bool = False,
134
+ allow_blocking: bool = False,
134
135
  **kwargs: typing.Any,
135
136
  ):
136
137
  """Run the LangGraph API server."""
@@ -191,6 +192,8 @@ def run_server(
191
192
  LANGGRAPH_HTTP=json.dumps(http) if http else None,
192
193
  LANGGRAPH_API_URL=local_url,
193
194
  LANGGRAPH_DISABLE_FILE_PERSISTENCE=str(disable_persistence).lower(),
195
+ # If true, we will not raise on blocking IO calls (via blockbuster)
196
+ LANGGRAPH_ALLOW_BLOCKING=str(allow_blocking).lower(),
194
197
  # See https://developer.chrome.com/blog/private-network-access-update-2024-03
195
198
  ALLOW_PRIVATE_NETWORK="true",
196
199
  )
langgraph_api/config.py CHANGED
@@ -304,3 +304,5 @@ USES_INDEXING = (
304
304
  and STORE_CONFIG.get("index").get("embed")
305
305
  )
306
306
  USES_CUSTOM_APP = HTTP_CONFIG and HTTP_CONFIG.get("app")
307
+
308
+ API_VARIANT = env("LANGSMITH_LANGGRAPH_API_VARIANT", cast=str, default="")
langgraph_api/graph.py CHANGED
@@ -6,6 +6,7 @@ import inspect
6
6
  import json
7
7
  import os
8
8
  import sys
9
+ import warnings
9
10
  from collections.abc import AsyncIterator, Callable
10
11
  from contextlib import asynccontextmanager
11
12
  from itertools import filterfalse
@@ -22,6 +23,7 @@ from langgraph.store.base import BaseStore
22
23
  from starlette.exceptions import HTTPException
23
24
 
24
25
  from langgraph_api import asyncio as lg_asyncio
26
+ from langgraph_api import config
25
27
  from langgraph_api.js.base import BaseRemotePregel
26
28
  from langgraph_api.schema import Config
27
29
 
@@ -111,18 +113,14 @@ async def get_graph(
111
113
  status_code=424,
112
114
  detail=f"Graph '{graph_id}' is not valid. Review graph registration.",
113
115
  )
114
- if isinstance(graph_obj, BaseRemotePregel):
115
- graph_obj.checkpointer = checkpointer
116
- graph_obj.name = graph_id
117
- yield graph_obj
118
- return
119
-
120
116
  update = {
121
117
  "checkpointer": checkpointer,
122
118
  "store": store,
123
119
  }
124
120
  if graph_obj.name == "LangGraph":
125
121
  update["name"] = graph_id
122
+ if isinstance(graph_obj, BaseRemotePregel):
123
+ update["config"] = config
126
124
  yield graph_obj.copy(update=update)
127
125
 
128
126
 
@@ -237,7 +235,7 @@ async def collect_graphs_from_env(register: bool = False) -> None:
237
235
  py_specs = list(filterfalse(is_js_spec, specs))
238
236
 
239
237
  if js_specs:
240
- if os.environ.get("LANGSMITH_LANGGRAPH_API_VARIANT") == "local_dev":
238
+ if config.API_VARIANT == "local_dev":
241
239
  raise NotImplementedError(
242
240
  "LangGraph.JS graphs are not yet supported in local development mode. "
243
241
  "To run your JS graphs, either use the LangGraph Studio application "
@@ -270,7 +268,7 @@ async def collect_graphs_from_env(register: bool = False) -> None:
270
268
  await wait_until_js_ready()
271
269
 
272
270
  for spec in js_specs:
273
- graph = RemotePregel.load(graph_id=spec.id)
271
+ graph = RemotePregel(graph_id=spec.id)
274
272
  if register:
275
273
  await register_graph(spec.id, graph, spec.config)
276
274
 
@@ -316,7 +314,7 @@ def _graph_from_spec(spec: GraphSpec) -> GraphValue:
316
314
  modspec.loader.exec_module(module)
317
315
  except ImportError as e:
318
316
  e.add_note(f"Could not import python module for graph:\n{spec}")
319
- if os.environ.get("LANGSMITH_LANGGRAPH_API_VARIANT") == "local_dev":
317
+ if config.API_VARIANT == "local_dev":
320
318
  e.add_note(
321
319
  "This error likely means you haven't installed your project and its dependencies yet. Before running the server, install your project:\n\n"
322
320
  "If you are using requirements.txt:\n"
@@ -374,7 +372,29 @@ def _graph_from_spec(spec: GraphSpec) -> GraphValue:
374
372
  elif isinstance(graph, Graph):
375
373
  graph = graph.compile()
376
374
  elif isinstance(graph, Pregel):
377
- pass
375
+ # We don't want to fail real deployments, but this will help folks catch unnecessary custom components
376
+ # before they deploy
377
+ if config.API_VARIANT == "local_dev":
378
+ has_checkpointer = graph.checkpointer is not None
379
+ has_store = graph.store is not None
380
+ if has_checkpointer or has_store:
381
+ components = []
382
+ if has_checkpointer:
383
+ components.append("checkpointer")
384
+ if has_store:
385
+ components.append("store")
386
+ component_list = " and ".join(components)
387
+
388
+ raise ValueError(
389
+ f"Heads up! Your graph '{spec.variable}' from '{spec.path}' includes a custom {component_list}. "
390
+ f"With LangGraph API, persistence is handled automatically by the platform, "
391
+ f"so providing a custom {component_list} here isn't necessary and will be ignored when deployed.\n\n"
392
+ f"To simplify your setup and use the built-in persistence, please remove the custom {component_list} "
393
+ f"from your graph definition. If you are looking to customize which postgres database to connect to,"
394
+ " please set the `POSTGRES_URI_CUSTOM` environment variable."
395
+ " See https://langchain-ai.github.io/langgraph/cloud/reference/env_var/#postgres_uri_custom for more details."
396
+ )
397
+
378
398
  else:
379
399
  raise ValueError(
380
400
  f"Variable '{spec.variable}' in module '{spec.path}' is not a Graph or Graph factory function"
@@ -466,7 +486,7 @@ def resolve_embeddings(index_config: dict) -> "Embeddings":
466
486
 
467
487
  except ImportError as e:
468
488
  e.add_note(f"Could not import embeddings module:\n{module_name}\n\n")
469
- if os.environ.get("LANGSMITH_LANGGRAPH_API_VARIANT") == "local_dev":
489
+ if config.API_VARIANT == "local_dev":
470
490
  e.add_note(
471
491
  "If you're in development mode, make sure you've installed your project "
472
492
  "and its dependencies:\n"
@@ -488,4 +508,10 @@ def resolve_embeddings(index_config: dict) -> "Embeddings":
488
508
  " or specify 'embed' as a path to a "
489
509
  "variable in a Python file instead."
490
510
  )
491
- return init_embeddings(embed)
511
+ # Capture warnings
512
+ with warnings.catch_warnings():
513
+ warnings.filterwarnings(
514
+ "ignore",
515
+ message=("The function `init_embeddings` is in beta."),
516
+ )
517
+ return init_embeddings(embed)
langgraph_api/js/base.py CHANGED
@@ -1,9 +1,12 @@
1
1
  from langchain_core.runnables import Runnable
2
2
 
3
+ from langgraph_api.schema import Config
4
+
3
5
 
4
6
  class BaseRemotePregel(Runnable):
5
- # TODO: implement name overriding
6
7
  name: str = "LangGraph"
7
8
 
8
- # TODO: implement graph_id overriding
9
9
  graph_id: str
10
+
11
+ # Config passed from get_graph()
12
+ config: Config
@@ -54,14 +54,21 @@ async function main() {
54
54
  .record(z.string())
55
55
  .parse(JSON.parse(process.env.LANGGRAPH_UI || "{}"));
56
56
 
57
+ const uiConfig = z
58
+ .object({ shared: z.array(z.string()).optional() })
59
+ .parse(JSON.parse(process.env.LANGGRAPH_UI_CONFIG || "{}"));
60
+
57
61
  if (Object.keys(uiSpecs).length > 0) {
58
62
  try {
59
63
  const schemas: Record<string, { assets: string[]; name: string }> = {};
60
64
  await Promise.all(
61
- Object.entries(uiSpecs).map(async ([graphId, uiUserPath]) => {
65
+ Object.entries(uiSpecs).map(async ([graphId, userPath]) => {
62
66
  console.info(`[${graphId}]: Building UI`);
63
- const userPath = path.resolve(process.cwd(), uiUserPath);
64
- const files = await build(graphId, userPath);
67
+ const files = await build(graphId, {
68
+ userPath,
69
+ cwd: process.cwd(),
70
+ config: uiConfig,
71
+ });
65
72
  await Promise.all([
66
73
  ...files.map(async (item) => {
67
74
  const folder = path.resolve(__dirname, "ui", graphId);
@@ -42,6 +42,7 @@ import {
42
42
  resolveGraph,
43
43
  GraphSpec,
44
44
  filterValidGraphSpecs,
45
+ type CompiledGraphFactory,
45
46
  } from "./src/graph.mts";
46
47
  import { asyncExitHook, gracefulExit } from "exit-hook";
47
48
  import { awaitAllCallbacks } from "@langchain/core/callbacks/promises";
@@ -79,13 +80,44 @@ const logger = createLogger({
79
80
  });
80
81
 
81
82
  let GRAPH_SCHEMA: Record<string, Record<string, GraphSchema> | false> = {};
82
- const GRAPH_RESOLVED: Record<string, CompiledGraph<string>> = {};
83
+ let GRAPH_OPTIONS: {
84
+ checkpointer?: BaseCheckpointSaver<string | number>;
85
+ store?: BaseStore;
86
+ } = {};
87
+
88
+ const GRAPH_RESOLVED: Record<
89
+ string,
90
+ CompiledGraph<string> | CompiledGraphFactory<string>
91
+ > = {};
83
92
  const GRAPH_SPEC: Record<string, GraphSpec> = {};
84
93
 
85
- function getGraph(graphId: string) {
94
+ async function getGraph(
95
+ graphId: string,
96
+ config: { configurable?: Record<string, unknown> },
97
+ name: string | null | undefined
98
+ ) {
86
99
  if (!GRAPH_RESOLVED[graphId])
87
100
  throw new HTTPException(404, { message: `Graph "${graphId}" not found` });
88
- return GRAPH_RESOLVED[graphId];
101
+
102
+ const resolved =
103
+ typeof GRAPH_RESOLVED[graphId] === "function"
104
+ ? await GRAPH_RESOLVED[graphId](config)
105
+ : GRAPH_RESOLVED[graphId];
106
+
107
+ if (GRAPH_OPTIONS.checkpointer) {
108
+ // @ts-expect-error BaseCheckpointSaver<string | number> != BaseCheckpointSaver<string>
109
+ resolved.checkpointer = GRAPH_OPTIONS.checkpointer;
110
+ }
111
+
112
+ if (GRAPH_OPTIONS.store) {
113
+ resolved.store = GRAPH_OPTIONS.store;
114
+ }
115
+
116
+ if (resolved.name == null || resolved.name === "LangGraph") {
117
+ resolved.name = name ?? undefined;
118
+ }
119
+
120
+ return resolved;
89
121
  }
90
122
 
91
123
  async function getOrExtractSchema(graphId: string) {
@@ -504,6 +536,8 @@ const ExtraStreamModeSchema = z.union([
504
536
 
505
537
  const StreamEventsPayload = z.object({
506
538
  graph_id: z.string(),
539
+ graph_name: z.string().nullish(),
540
+ graph_config: RunnableConfigSchema.nullish(),
507
541
  input: z.unknown(),
508
542
  command: z.object({ resume: z.unknown() }).nullish(),
509
543
  stream_mode: z
@@ -519,7 +553,8 @@ async function* streamEventsRequest(
519
553
  rawPayload: z.infer<typeof StreamEventsPayload>
520
554
  ) {
521
555
  const { graph_id: graphId, ...payload } = rawPayload;
522
- const graph = getGraph(graphId);
556
+ const config = getRunnableConfig(payload.config);
557
+ const graph = await getGraph(graphId, config, payload.graph_name);
523
558
 
524
559
  const input = payload.command ? new Command(payload.command) : payload.input;
525
560
 
@@ -545,8 +580,6 @@ async function* streamEventsRequest(
545
580
  }
546
581
  }
547
582
 
548
- const config = getRunnableConfig(payload.config);
549
-
550
583
  const messages: Record<string, BaseMessageChunk> = {};
551
584
  const completedIds = new Set<string>();
552
585
 
@@ -648,13 +681,20 @@ async function* streamEventsRequest(
648
681
 
649
682
  const GetGraphPayload = z.object({
650
683
  graph_id: z.string(),
684
+ graph_name: z.string().nullish(),
685
+ graph_config: RunnableConfigSchema.nullish(),
651
686
  config: RunnableConfigSchema.nullish(),
652
687
  xray: z.union([z.number(), z.boolean()]).nullish(),
653
688
  });
654
689
 
655
690
  async function getGraphRequest(rawPayload: z.infer<typeof GetGraphPayload>) {
656
691
  const { graph_id: graphId, ...payload } = rawPayload;
657
- const graph = getGraph(graphId);
692
+
693
+ const graph = await getGraph(
694
+ graphId,
695
+ getRunnableConfig(payload.graph_config),
696
+ payload.graph_name
697
+ );
658
698
 
659
699
  const drawable = await graph.getGraphAsync({
660
700
  ...getRunnableConfig(payload.config),
@@ -665,6 +705,8 @@ async function getGraphRequest(rawPayload: z.infer<typeof GetGraphPayload>) {
665
705
 
666
706
  const GetSubgraphsPayload = z.object({
667
707
  graph_id: z.string(),
708
+ graph_config: RunnableConfigSchema.nullish(),
709
+ graph_name: z.string().nullish(),
668
710
  namespace: z.string().nullish(),
669
711
  recurse: z.boolean().nullish(),
670
712
  });
@@ -673,7 +715,8 @@ async function getSubgraphsRequest(
673
715
  rawPayload: z.infer<typeof GetSubgraphsPayload>
674
716
  ) {
675
717
  const { graph_id: graphId, ...payload } = rawPayload;
676
- const graph = getGraph(graphId);
718
+ const graphConfig = getRunnableConfig(payload.graph_config);
719
+ const graph = await getGraph(graphId, graphConfig, payload.graph_name);
677
720
  const result: Array<[name: string, Record<string, any>]> = [];
678
721
 
679
722
  const graphSchema = await getOrExtractSchema(graphId);
@@ -696,13 +739,16 @@ async function getSubgraphsRequest(
696
739
 
697
740
  const GetStatePayload = z.object({
698
741
  graph_id: z.string(),
742
+ graph_config: RunnableConfigSchema.nullish(),
743
+ graph_name: z.string().nullish(),
699
744
  config: RunnableConfigSchema,
700
745
  subgraphs: z.boolean().nullish(),
701
746
  });
702
747
 
703
748
  async function getStateRequest(rawPayload: z.infer<typeof GetStatePayload>) {
704
749
  const { graph_id: graphId, ...payload } = rawPayload;
705
- const graph = getGraph(graphId);
750
+ const graphConfig = getRunnableConfig(payload.graph_config);
751
+ const graph = await getGraph(graphId, graphConfig, payload.graph_name);
706
752
 
707
753
  const state = await graph.getState(getRunnableConfig(payload.config), {
708
754
  subgraphs: payload.subgraphs ?? undefined,
@@ -714,6 +760,8 @@ async function getStateRequest(rawPayload: z.infer<typeof GetStatePayload>) {
714
760
 
715
761
  const UpdateStatePayload = z.object({
716
762
  graph_id: z.string(),
763
+ graph_config: RunnableConfigSchema.nullish(),
764
+ graph_name: z.string().nullish(),
717
765
  config: RunnableConfigSchema,
718
766
  values: z.unknown(),
719
767
  as_node: z.string().nullish(),
@@ -723,7 +771,8 @@ async function updateStateRequest(
723
771
  rawPayload: z.infer<typeof UpdateStatePayload>
724
772
  ) {
725
773
  const { graph_id: graphId, ...payload } = rawPayload;
726
- const graph = getGraph(graphId);
774
+ const graphConfig = getRunnableConfig(payload.graph_config);
775
+ const graph = await getGraph(graphId, graphConfig, payload.graph_name);
727
776
 
728
777
  const config = await graph.updateState(
729
778
  getRunnableConfig(payload.config),
@@ -734,10 +783,16 @@ async function updateStateRequest(
734
783
  return config;
735
784
  }
736
785
 
737
- const GetSchemaPayload = z.object({ graph_id: z.string() });
786
+ const GetSchemaPayload = z.object({
787
+ graph_id: z.string(),
788
+ graph_config: RunnableConfigSchema.nullish(),
789
+ graph_name: z.string().nullish(),
790
+ });
738
791
 
739
792
  async function getSchemaRequest(payload: z.infer<typeof GetSchemaPayload>) {
740
793
  const { graph_id: graphId } = payload;
794
+ // TODO: add support for schema inference with dynamic graphs (now that Zod is supported)
795
+
741
796
  const schemas = await getOrExtractSchema(graphId);
742
797
  const rootGraphId = Object.keys(schemas).find((i) => !i.includes("|"));
743
798
  if (!rootGraphId) {
@@ -748,6 +803,8 @@ async function getSchemaRequest(payload: z.infer<typeof GetSchemaPayload>) {
748
803
 
749
804
  const GetStateHistoryPayload = z.object({
750
805
  graph_id: z.string(),
806
+ graph_config: RunnableConfigSchema.nullish(),
807
+ graph_name: z.string().nullish(),
751
808
  config: RunnableConfigSchema,
752
809
  limit: z.number().nullish(),
753
810
  before: RunnableConfigSchema.nullish(),
@@ -758,7 +815,8 @@ async function* getStateHistoryRequest(
758
815
  rawPayload: z.infer<typeof GetStateHistoryPayload>
759
816
  ) {
760
817
  const { graph_id: graphId, ...payload } = rawPayload;
761
- const graph = getGraph(graphId);
818
+ const config = getRunnableConfig(payload.graph_config);
819
+ const graph = await getGraph(graphId, config, payload.graph_name);
762
820
 
763
821
  for await (const item of graph.getStateHistory(
764
822
  getRunnableConfig(payload.config),
@@ -776,8 +834,11 @@ const __dirname = new URL(".", import.meta.url).pathname;
776
834
 
777
835
  async function main() {
778
836
  const app = new Hono();
779
- const checkpointer = new RemoteCheckpointer();
780
- const store = new RemoteStore();
837
+
838
+ GRAPH_OPTIONS = {
839
+ checkpointer: new RemoteCheckpointer(),
840
+ store: new RemoteStore(),
841
+ };
781
842
 
782
843
  const specs = filterValidGraphSpecs(
783
844
  z.record(z.string()).parse(JSON.parse(process.env.LANGSERVE_GRAPHS ?? "{}"))
@@ -800,12 +861,6 @@ async function main() {
800
861
  logger.info(`Resolving graph ${graphId}`);
801
862
  const { resolved, ...spec } = await resolveGraph(rawSpec);
802
863
 
803
- // TODO: make sure the types do not need to be upfront
804
- // @ts-expect-error Overriding checkpointer with different value type
805
- resolved.checkpointer = checkpointer;
806
- resolved.store = store;
807
-
808
- // registering the graph runtime
809
864
  GRAPH_RESOLVED[graphId] = resolved;
810
865
  GRAPH_SPEC[graphId] = spec;
811
866
  })
@@ -2,6 +2,7 @@ declare namespace NodeJS {
2
2
  interface ProcessEnv {
3
3
  LANGSERVE_GRAPHS: string;
4
4
  LANGGRAPH_UI?: string;
5
+ LANGGRAPH_UI_CONFIG?: string;
5
6
  PORT: string;
6
7
  }
7
8
  }
@@ -24,14 +24,14 @@
24
24
  "undici": "^6.21.1",
25
25
  "uuid": "^10.0.0",
26
26
  "winston": "^3.17.0",
27
- "@langchain/langgraph-api": "~0.0.14",
27
+ "@langchain/langgraph-api": "~0.0.19",
28
28
  "zod": "^3.23.8"
29
29
  },
30
30
  "resolutions": {
31
31
  "esbuild": "^0.25.0"
32
32
  },
33
33
  "devDependencies": {
34
- "@langchain/langgraph-sdk": "^0.0.31",
34
+ "@langchain/langgraph-sdk": "^0.0.60",
35
35
  "@types/react": "^19.0.8",
36
36
  "@types/react-dom": "^19.0.3",
37
37
  "@types/node": "^22.2.0",
@@ -5,7 +5,7 @@ import shutil
5
5
  import ssl
6
6
  from collections.abc import AsyncIterator
7
7
  from contextlib import AbstractContextManager
8
- from typing import Any, Literal
8
+ from typing import Any, Literal, Self
9
9
 
10
10
  import certifi
11
11
  import httpx
@@ -34,6 +34,7 @@ from langgraph_api.js.base import BaseRemotePregel
34
34
  from langgraph_api.js.errors import RemoteException
35
35
  from langgraph_api.js.sse import SSEDecoder, aiter_lines_raw
36
36
  from langgraph_api.route import ApiResponse
37
+ from langgraph_api.schema import Config
37
38
  from langgraph_api.serde import json_dumpb
38
39
  from langgraph_api.utils import AsyncConnectionProto
39
40
 
@@ -89,11 +90,16 @@ async def _client_invoke(method: str, data: dict[str, Any]):
89
90
 
90
91
 
91
92
  class RemotePregel(BaseRemotePregel):
92
- @staticmethod
93
- def load(graph_id: str):
94
- model = RemotePregel()
95
- model.graph_id = graph_id
96
- return model
93
+ def __init__(
94
+ self,
95
+ graph_id: str,
96
+ *,
97
+ config: Config | None = None,
98
+ **kwargs: Any,
99
+ ):
100
+ super().__init__()
101
+ self.graph_id = graph_id
102
+ self.config = config
97
103
 
98
104
  async def astream_events(
99
105
  self,
@@ -108,6 +114,8 @@ class RemotePregel(BaseRemotePregel):
108
114
 
109
115
  data = {
110
116
  "graph_id": self.graph_id,
117
+ "graph_config": self.config,
118
+ "graph_name": self.name,
111
119
  "command" if isinstance(input, Command) else "input": input,
112
120
  "config": config,
113
121
  **kwargs,
@@ -129,7 +137,14 @@ class RemotePregel(BaseRemotePregel):
129
137
  xray: int | bool = False,
130
138
  ) -> DrawableGraph:
131
139
  response = await _client_invoke(
132
- "getGraph", {"graph_id": self.graph_id, "config": config, "xray": xray}
140
+ "getGraph",
141
+ {
142
+ "graph_id": self.graph_id,
143
+ "graph_config": self.config,
144
+ "graph_name": self.name,
145
+ "config": config,
146
+ "xray": xray,
147
+ },
133
148
  )
134
149
 
135
150
  nodes: list[Any] = response.pop("nodes")
@@ -157,11 +172,22 @@ class RemotePregel(BaseRemotePregel):
157
172
  )
158
173
 
159
174
  async def fetch_subgraphs(
160
- self, *, namespace: str | None = None, recurse: bool = False
175
+ self,
176
+ *,
177
+ namespace: str | None = None,
178
+ config: RunnableConfig | None = None,
179
+ recurse: bool = False,
161
180
  ) -> dict[str, dict]:
162
181
  return await _client_invoke(
163
182
  "getSubgraphs",
164
- {"graph_id": self.graph_id, "namespace": namespace, "recurse": recurse},
183
+ {
184
+ "graph_id": self.graph_id,
185
+ "graph_config": self.config,
186
+ "graph_name": self.name,
187
+ "namespace": namespace,
188
+ "recurse": recurse,
189
+ "config": config,
190
+ },
165
191
  )
166
192
 
167
193
  def _convert_state_snapshot(self, item: dict) -> StateSnapshot:
@@ -214,7 +240,13 @@ class RemotePregel(BaseRemotePregel):
214
240
  return self._convert_state_snapshot(
215
241
  await _client_invoke(
216
242
  "getState",
217
- {"graph_id": self.graph_id, "config": config, "subgraphs": subgraphs},
243
+ {
244
+ "graph_id": self.graph_id,
245
+ "graph_config": self.config,
246
+ "graph_name": self.name,
247
+ "config": config,
248
+ "subgraphs": subgraphs,
249
+ },
218
250
  )
219
251
  )
220
252
 
@@ -228,6 +260,8 @@ class RemotePregel(BaseRemotePregel):
228
260
  "updateState",
229
261
  {
230
262
  "graph_id": self.graph_id,
263
+ "graph_config": self.config,
264
+ "graph_name": self.name,
231
265
  "config": config,
232
266
  "values": values,
233
267
  "as_node": as_node,
@@ -247,6 +281,8 @@ class RemotePregel(BaseRemotePregel):
247
281
  "getStateHistory",
248
282
  {
249
283
  "graph_id": self.graph_id,
284
+ "graph_config": self.config,
285
+ "graph_name": self.name,
250
286
  "config": config,
251
287
  "limit": limit,
252
288
  "filter": filter,
@@ -277,6 +313,10 @@ class RemotePregel(BaseRemotePregel):
277
313
  async def invoke(self, input: Any, config: RunnableConfig | None = None):
278
314
  raise Exception("Not implemented")
279
315
 
316
+ def copy(self, update: dict[str, Any] | None = None) -> Self:
317
+ attrs = {**self.__dict__, **(update or {})}
318
+ return self.__class__(**attrs)
319
+
280
320
 
281
321
  async def run_js_process(paths_str: str, watch: bool = False):
282
322
  # check if tsx is available