langgraph-api 0.2.27__py3-none-any.whl → 0.2.28__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

Files changed (40) hide show
  1. langgraph_api/__init__.py +1 -1
  2. langgraph_api/api/assistants.py +4 -4
  3. langgraph_api/api/store.py +10 -6
  4. langgraph_api/config.py +1 -0
  5. langgraph_api/graph.py +28 -5
  6. langgraph_api/js/remote.py +16 -11
  7. langgraph_api/metadata.py +28 -16
  8. langgraph_api/store.py +127 -0
  9. langgraph_api/stream.py +17 -7
  10. langgraph_api/worker.py +1 -1
  11. {langgraph_api-0.2.27.dist-info → langgraph_api-0.2.28.dist-info}/METADATA +24 -30
  12. {langgraph_api-0.2.27.dist-info → langgraph_api-0.2.28.dist-info}/RECORD +42 -65
  13. {langgraph_api-0.2.27.dist-info → langgraph_api-0.2.28.dist-info}/WHEEL +1 -1
  14. langgraph_api-0.2.28.dist-info/entry_points.txt +2 -0
  15. langgraph_api/js/tests/api.test.mts +0 -2194
  16. langgraph_api/js/tests/auth.test.mts +0 -648
  17. langgraph_api/js/tests/compose-postgres.auth.yml +0 -59
  18. langgraph_api/js/tests/compose-postgres.yml +0 -59
  19. langgraph_api/js/tests/graphs/.gitignore +0 -1
  20. langgraph_api/js/tests/graphs/agent.css +0 -1
  21. langgraph_api/js/tests/graphs/agent.mts +0 -187
  22. langgraph_api/js/tests/graphs/agent.ui.tsx +0 -10
  23. langgraph_api/js/tests/graphs/agent_simple.mts +0 -105
  24. langgraph_api/js/tests/graphs/auth.mts +0 -106
  25. langgraph_api/js/tests/graphs/command.mts +0 -48
  26. langgraph_api/js/tests/graphs/delay.mts +0 -30
  27. langgraph_api/js/tests/graphs/dynamic.mts +0 -24
  28. langgraph_api/js/tests/graphs/error.mts +0 -17
  29. langgraph_api/js/tests/graphs/http.mts +0 -76
  30. langgraph_api/js/tests/graphs/langgraph.json +0 -11
  31. langgraph_api/js/tests/graphs/nested.mts +0 -44
  32. langgraph_api/js/tests/graphs/package.json +0 -13
  33. langgraph_api/js/tests/graphs/weather.mts +0 -57
  34. langgraph_api/js/tests/graphs/yarn.lock +0 -242
  35. langgraph_api/js/tests/utils.mts +0 -17
  36. langgraph_api-0.2.27.dist-info/LICENSE +0 -93
  37. langgraph_api-0.2.27.dist-info/entry_points.txt +0 -3
  38. logging.json +0 -22
  39. openapi.json +0 -4562
  40. /LICENSE → /langgraph_api-0.2.28.dist-info/licenses/LICENSE +0 -0
langgraph_api/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.27"
1
+ __version__ = "0.2.28"
@@ -9,6 +9,7 @@ from starlette.exceptions import HTTPException
9
9
  from starlette.responses import Response
10
10
  from starlette.routing import BaseRoute
11
11
 
12
+ from langgraph_api import store as api_store
12
13
  from langgraph_api.graph import get_assistant_id, get_graph
13
14
  from langgraph_api.js.base import BaseRemotePregel
14
15
  from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
@@ -25,7 +26,6 @@ from langgraph_runtime.checkpoint import Checkpointer
25
26
  from langgraph_runtime.database import connect
26
27
  from langgraph_runtime.ops import Assistants
27
28
  from langgraph_runtime.retry import retry_db
28
- from langgraph_runtime.store import Store
29
29
 
30
30
  logger = structlog.stdlib.get_logger(__name__)
31
31
 
@@ -194,7 +194,7 @@ async def get_assistant_graph(
194
194
  assistant["graph_id"],
195
195
  config,
196
196
  checkpointer=Checkpointer(conn),
197
- store=Store(),
197
+ store=(await api_store.get_store()),
198
198
  ) as graph:
199
199
  xray: bool | int = False
200
200
  xray_query = request.query_params.get("xray")
@@ -240,7 +240,7 @@ async def get_assistant_subgraphs(
240
240
  assistant["graph_id"],
241
241
  config,
242
242
  checkpointer=Checkpointer(conn),
243
- store=Store(),
243
+ store=(await api_store.get_store()),
244
244
  ) as graph:
245
245
  namespace = request.path_params.get("namespace")
246
246
 
@@ -286,7 +286,7 @@ async def get_assistant_schemas(
286
286
  assistant["graph_id"],
287
287
  config,
288
288
  checkpointer=Checkpointer(conn),
289
- store=Store(),
289
+ store=(await api_store.get_store()),
290
290
  ) as graph:
291
291
  if isinstance(graph, BaseRemotePregel):
292
292
  schemas = await graph.fetch_state_schema()
@@ -6,6 +6,7 @@ from starlette.routing import BaseRoute
6
6
 
7
7
  from langgraph_api.auth.custom import handle_event as _handle_event
8
8
  from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
9
+ from langgraph_api.store import get_store
9
10
  from langgraph_api.utils import get_auth_ctx
10
11
  from langgraph_api.validation import (
11
12
  StoreDeleteRequest,
@@ -14,7 +15,6 @@ from langgraph_api.validation import (
14
15
  StoreSearchRequest,
15
16
  )
16
17
  from langgraph_runtime.retry import retry_db
17
- from langgraph_runtime.store import Store
18
18
 
19
19
 
20
20
  def _validate_namespace(namespace: tuple[str, ...]) -> Response | None:
@@ -57,7 +57,9 @@ async def put_item(request: ApiRequest):
57
57
  "value": payload["value"],
58
58
  }
59
59
  await handle_event("put", handler_payload)
60
- await Store().aput(namespace, handler_payload["key"], handler_payload["value"])
60
+ await (await get_store()).aput(
61
+ namespace, handler_payload["key"], handler_payload["value"]
62
+ )
61
63
  return Response(status_code=204)
62
64
 
63
65
 
@@ -75,7 +77,7 @@ async def get_item(request: ApiRequest):
75
77
  "key": key,
76
78
  }
77
79
  await handle_event("get", handler_payload)
78
- result = await Store().aget(namespace, key)
80
+ result = await (await get_store()).aget(namespace, key)
79
81
  return ApiResponse(result.dict() if result is not None else None)
80
82
 
81
83
 
@@ -91,7 +93,9 @@ async def delete_item(request: ApiRequest):
91
93
  "key": payload["key"],
92
94
  }
93
95
  await handle_event("delete", handler_payload)
94
- await Store().adelete(handler_payload["namespace"], handler_payload["key"])
96
+ await (await get_store()).adelete(
97
+ handler_payload["namespace"], handler_payload["key"]
98
+ )
95
99
  return Response(status_code=204)
96
100
 
97
101
 
@@ -114,7 +118,7 @@ async def search_items(request: ApiRequest):
114
118
  "query": query,
115
119
  }
116
120
  await handle_event("search", handler_payload)
117
- items = await Store().asearch(
121
+ items = await (await get_store()).asearch(
118
122
  handler_payload["namespace"],
119
123
  filter=handler_payload["filter"],
120
124
  limit=handler_payload["limit"],
@@ -145,7 +149,7 @@ async def list_namespaces(request: ApiRequest):
145
149
  "offset": offset,
146
150
  }
147
151
  await handle_event("list_namespaces", handler_payload)
148
- result = await Store().alist_namespaces(
152
+ result = await (await get_store()).alist_namespaces(
149
153
  prefix=handler_payload["namespace"],
150
154
  suffix=handler_payload["suffix"],
151
155
  max_depth=handler_payload["max_depth"],
langgraph_api/config.py CHANGED
@@ -109,6 +109,7 @@ class TTLConfig(TypedDict, total=False):
109
109
 
110
110
 
111
111
  class StoreConfig(TypedDict, total=False):
112
+ path: str
112
113
  index: IndexConfig
113
114
  ttl: TTLConfig
114
115
 
langgraph_api/graph.py CHANGED
@@ -3,7 +3,6 @@ import functools
3
3
  import glob
4
4
  import importlib.util
5
5
  import inspect
6
- import json
7
6
  import os
8
7
  import sys
9
8
  import warnings
@@ -14,6 +13,7 @@ from random import choice
14
13
  from typing import TYPE_CHECKING, Any, NamedTuple
15
14
  from uuid import UUID, uuid5
16
15
 
16
+ import orjson
17
17
  import structlog
18
18
  from langchain_core.runnables.config import run_in_executor, var_child_runnable_config
19
19
  from langgraph.checkpoint.base import BaseCheckpointSaver
@@ -60,6 +60,12 @@ async def register_graph(
60
60
  if callable(graph):
61
61
  FACTORY_ACCEPTS_CONFIG[graph_id] = len(inspect.signature(graph).parameters) > 0
62
62
  async with connect() as conn:
63
+ graph_name = getattr(graph, "name", None) if isinstance(graph, Pregel) else None
64
+ assistant_name = (
65
+ graph_name
66
+ if graph_name is not None and graph_name != "LangGraph"
67
+ else graph_id
68
+ )
63
69
  await Assistants.put(
64
70
  conn,
65
71
  str(uuid5(NAMESPACE_GRAPH, graph_id)),
@@ -67,7 +73,7 @@ async def register_graph(
67
73
  metadata={"created_by": "system"},
68
74
  config=config or {},
69
75
  if_exists="do_nothing",
70
- name=graph_id,
76
+ name=assistant_name,
71
77
  description=description,
72
78
  )
73
79
 
@@ -200,10 +206,19 @@ def _load_graph_config_from_env() -> dict | None:
200
206
  config_str = os.getenv("LANGGRAPH_CONFIG")
201
207
  if not config_str:
202
208
  return None
209
+ try:
210
+ config_per_id = orjson.loads(config_str)
211
+ except orjson.JSONDecodeError as e:
212
+ raise ValueError(
213
+ "Provided environment variable LANGGRAPH_CONFIG must be a valid JSON object"
214
+ f"\nFound: {config_str}"
215
+ ) from e
203
216
 
204
- config_per_id = json.loads(config_str)
205
217
  if not isinstance(config_per_id, dict):
206
- raise ValueError("LANGGRAPH_CONFIG must be a JSON object")
218
+ raise ValueError(
219
+ "Provided environment variable LANGGRAPH_CONFIG must be a JSON object"
220
+ f"\nFound: {config_str}"
221
+ )
207
222
 
208
223
  return config_per_id
209
224
 
@@ -218,7 +233,15 @@ async def collect_graphs_from_env(register: bool = False) -> None:
218
233
  specs = []
219
234
  # graphs-config can be either a mapping from graph id to path where the graph
220
235
  # is defined or graph id to a dictionary containing information about the graph.
221
- graphs_config = json.loads(paths_str)
236
+ try:
237
+ graphs_config = orjson.loads(paths_str)
238
+ except orjson.JSONDecodeError as e:
239
+ raise ValueError(
240
+ "LANGSERVE_GRAPHS must be a valid JSON object."
241
+ f"\nFound: {paths_str}"
242
+ "\n The LANGSERVE_GRAPHS environment variable is typically set"
243
+ 'from the "graphs" field in your configuration (langgraph.json) file.'
244
+ ) from e
222
245
 
223
246
  for key, value in graphs_config.items():
224
247
  if isinstance(value, dict) and "path" in value:
@@ -39,6 +39,7 @@ from starlette.exceptions import HTTPException
39
39
  from starlette.requests import HTTPConnection, Request
40
40
  from starlette.routing import Route
41
41
 
42
+ from langgraph_api import store as api_store
42
43
  from langgraph_api.auth.custom import DotDict, ProxyUser
43
44
  from langgraph_api.config import LANGGRAPH_AUTH_TYPE
44
45
  from langgraph_api.js.base import BaseRemotePregel
@@ -70,6 +71,12 @@ _client = httpx.AsyncClient(
70
71
  )
71
72
 
72
73
 
74
+ def _snapshot_defaults():
75
+ if not hasattr(StateSnapshot, "interrupts"):
76
+ return {}
77
+ return {"interrupts": tuple()}
78
+
79
+
73
80
  def default_command(obj):
74
81
  if isinstance(obj, Send):
75
82
  return {"node": obj.node, "args": obj.arg}
@@ -251,7 +258,7 @@ class RemotePregel(BaseRemotePregel):
251
258
  item.get("parentConfig"),
252
259
  _convert_tasks(item.get("tasks", [])),
253
260
  # TODO: add handling of interrupts when multiple resumes land in JS
254
- tuple(),
261
+ **_snapshot_defaults(),
255
262
  )
256
263
 
257
264
  async def aget_state(
@@ -473,10 +480,8 @@ def _get_passthrough_checkpointer(conn: AsyncConnectionProto):
473
480
  return checkpointer
474
481
 
475
482
 
476
- def _get_passthrough_store():
477
- from langgraph_runtime.store import Store
478
-
479
- return Store()
483
+ async def _get_passthrough_store():
484
+ return await api_store.get_store()
480
485
 
481
486
 
482
487
  # Setup a HTTP server on top of CHECKPOINTER_SOCKET unix socket
@@ -574,7 +579,7 @@ async def run_remote_checkpointer():
574
579
  else:
575
580
  raise ValueError(f"Unknown operation type: {op}")
576
581
 
577
- store = _get_passthrough_store()
582
+ store = await _get_passthrough_store()
578
583
  results = await store.abatch(processed_operations)
579
584
 
580
585
  # Handle potentially undefined or non-dict results
@@ -613,7 +618,7 @@ async def run_remote_checkpointer():
613
618
 
614
619
  namespaces = namespaces_str.split(".")
615
620
 
616
- store = _get_passthrough_store()
621
+ store = await _get_passthrough_store()
617
622
  result = await store.aget(namespaces, key)
618
623
 
619
624
  return result
@@ -626,7 +631,7 @@ async def run_remote_checkpointer():
626
631
  value = payload["value"]
627
632
  index = payload.get("index")
628
633
 
629
- store = _get_passthrough_store()
634
+ store = await _get_passthrough_store()
630
635
  await store.aput(namespace, key, value, index=index)
631
636
 
632
637
  return {"success": True}
@@ -639,7 +644,7 @@ async def run_remote_checkpointer():
639
644
  offset = payload.get("offset", 0)
640
645
  query = payload.get("query")
641
646
 
642
- store = _get_passthrough_store()
647
+ store = await _get_passthrough_store()
643
648
  result = await store.asearch(
644
649
  namespace_prefix, filter=filter, limit=limit, offset=offset, query=query
645
650
  )
@@ -652,7 +657,7 @@ async def run_remote_checkpointer():
652
657
  namespace = tuple(payload["namespace"])
653
658
  key = payload["key"]
654
659
 
655
- store = _get_passthrough_store()
660
+ store = await _get_passthrough_store()
656
661
  await store.adelete(namespace, key)
657
662
 
658
663
  return {"success": True}
@@ -665,7 +670,7 @@ async def run_remote_checkpointer():
665
670
  limit = payload.get("limit", 100)
666
671
  offset = payload.get("offset", 0)
667
672
 
668
- store = _get_passthrough_store()
673
+ store = await _get_passthrough_store()
669
674
  result = await store.alist_namespaces(
670
675
  prefix=prefix,
671
676
  suffix=suffix,
langgraph_api/metadata.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import asyncio
2
2
  import os
3
+ from collections import defaultdict
3
4
  from datetime import UTC, datetime
4
5
 
5
6
  import langgraph.version
@@ -36,8 +37,8 @@ PLAN = "enterprise" if plus_features_enabled() else "developer"
36
37
  USER_API_URL = os.getenv("LANGGRAPH_API_URL", None)
37
38
 
38
39
  LOGS: list[dict] = []
39
- RUN_COUNTER = 0
40
- NODE_COUNTER = 0
40
+ RUN_COUNTER = defaultdict(int)
41
+ NODE_COUNTER = defaultdict(int)
41
42
  FROM_TIMESTAMP = datetime.now(UTC).isoformat()
42
43
 
43
44
  if (
@@ -49,14 +50,12 @@ else:
49
50
  METADATA_ENDPOINT = "https://api.smith.langchain.com/v1/metadata/submit"
50
51
 
51
52
 
52
- def incr_runs(*, incr: int = 1) -> None:
53
- global RUN_COUNTER
54
- RUN_COUNTER += incr
53
+ def incr_runs(*, graph_id: str | None = None, incr: int = 1) -> None:
54
+ RUN_COUNTER[graph_id] += incr
55
55
 
56
56
 
57
- def incr_nodes(_, *, incr: int = 1) -> None:
58
- global NODE_COUNTER
59
- NODE_COUNTER += incr
57
+ def incr_nodes(*_, graph_id: str | None = None, incr: int = 1) -> None:
58
+ NODE_COUNTER[graph_id] += incr
60
59
 
61
60
 
62
61
  def append_log(log: dict) -> None:
@@ -89,13 +88,23 @@ async def metadata_loop() -> None:
89
88
  # we don't need a lock as long as there's no awaits in this block
90
89
  from_timestamp = FROM_TIMESTAMP
91
90
  to_timestamp = datetime.now(UTC).isoformat()
92
- nodes = NODE_COUNTER
93
- runs = RUN_COUNTER
91
+ nodes = NODE_COUNTER.copy()
92
+ runs = RUN_COUNTER.copy()
94
93
  logs = LOGS.copy()
95
94
  LOGS.clear()
96
- RUN_COUNTER = 0
97
- NODE_COUNTER = 0
95
+ RUN_COUNTER.clear()
96
+ NODE_COUNTER.clear()
98
97
  FROM_TIMESTAMP = to_timestamp
98
+ graph_measures = {
99
+ f"langgraph.platform.graph_runs.{graph_id}": runs.get(graph_id, 0)
100
+ for graph_id in runs
101
+ }
102
+ graph_measures.update(
103
+ {
104
+ f"langgraph.platform.graph_nodes.{graph_id}": nodes.get(graph_id, 0)
105
+ for graph_id in nodes
106
+ }
107
+ )
99
108
 
100
109
  payload = {
101
110
  "license_key": LANGGRAPH_CLOUD_LICENSE_KEY,
@@ -120,8 +129,9 @@ async def metadata_loop() -> None:
120
129
  "user_app.uses_store_ttl": str(USES_STORE_TTL),
121
130
  },
122
131
  "measures": {
123
- "langgraph.platform.runs": runs,
124
- "langgraph.platform.nodes": nodes,
132
+ "langgraph.platform.runs": sum(runs.values()),
133
+ "langgraph.platform.nodes": sum(nodes.values()),
134
+ **graph_measures,
125
135
  },
126
136
  "logs": logs,
127
137
  }
@@ -134,8 +144,10 @@ async def metadata_loop() -> None:
134
144
  )
135
145
  except Exception as e:
136
146
  # retry on next iteration
137
- incr_runs(incr=runs)
138
- incr_nodes("", incr=nodes)
147
+ for graph_id, incr in runs.items():
148
+ incr_runs(graph_id=graph_id, incr=incr)
149
+ for graph_id, incr in nodes.items():
150
+ incr_nodes(graph_id=graph_id, incr=incr)
139
151
  FROM_TIMESTAMP = from_timestamp
140
152
  await logger.ainfo("Metadata submission skipped.", error=str(e))
141
153
  await asyncio.sleep(INTERVAL)
langgraph_api/store.py ADDED
@@ -0,0 +1,127 @@
1
+ import asyncio
2
+ import importlib.util
3
+ import sys
4
+ import threading
5
+ from collections.abc import Callable
6
+ from contextlib import AsyncExitStack, asynccontextmanager
7
+ from random import choice
8
+ from typing import Any
9
+
10
+ import structlog
11
+ from langchain_core.runnables.config import run_in_executor
12
+ from langgraph.graph import Graph
13
+ from langgraph.pregel import Pregel
14
+ from langgraph.store.base import BaseStore
15
+
16
+ from langgraph_api import config
17
+
18
+ logger = structlog.stdlib.get_logger(__name__)
19
+
20
+ CUSTOM_STORE: BaseStore | Callable[[], BaseStore] | None = None
21
+ STORE_STACK = threading.local()
22
+
23
+
24
+ async def get_store() -> BaseStore:
25
+ if CUSTOM_STORE:
26
+ if not hasattr(STORE_STACK, "stack"):
27
+ stack = AsyncExitStack()
28
+ STORE_STACK.stack = stack
29
+ store = await stack.enter_async_context(_yield_store(CUSTOM_STORE))
30
+ STORE_STACK.store = store
31
+ await logger.ainfo(f"Using custom store: {store}", kind=str(type(store)))
32
+ return store
33
+ return STORE_STACK.store
34
+ else:
35
+ from langgraph_runtime.store import Store
36
+
37
+ return Store()
38
+
39
+
40
+ async def exit_store():
41
+ if not CUSTOM_STORE:
42
+ return
43
+ if not hasattr(STORE_STACK, "stack"):
44
+ return
45
+ await STORE_STACK.stack.aclose()
46
+
47
+
48
+ @asynccontextmanager
49
+ async def _yield_store(value: Any):
50
+ if isinstance(value, BaseStore):
51
+ yield value
52
+ elif hasattr(value, "__aenter__") and hasattr(value, "__aexit__"):
53
+ async with value as ctx_value:
54
+ yield ctx_value
55
+ elif hasattr(value, "__enter__") and hasattr(value, "__exit__"):
56
+ with value as ctx_value:
57
+ yield ctx_value
58
+ elif asyncio.iscoroutine(value):
59
+ yield await value
60
+ elif callable(value):
61
+ async with _yield_store(value()) as ctx_value:
62
+ yield ctx_value
63
+ else:
64
+ raise ValueError(
65
+ f"Unsupported store type: {type(value)}. Expected an instance of BaseStore "
66
+ "or a function or async generator that returns one."
67
+ )
68
+
69
+
70
+ async def collect_store_from_env() -> None:
71
+ global CUSTOM_STORE
72
+ if not config.STORE_CONFIG or not (store_path := config.STORE_CONFIG.get("path")):
73
+ return
74
+ await logger.ainfo(
75
+ f"Heads up! You are configuring a custom long-term memory store at {store_path}\n\n"
76
+ "This store will be used IN STEAD OF the default postgres + pgvector store."
77
+ "Some functionality, such as TTLs and vector search, may not be available."
78
+ "Search performance & other capabilities will depend on the quality of your implementation."
79
+ )
80
+ # Try to load. The loaded object can either be a BaseStore instance, a function that generates it, etc.
81
+ value = await run_in_executor(None, _load_store, store_path)
82
+ CUSTOM_STORE = value
83
+
84
+
85
+ def _load_store(store_path: str) -> Any:
86
+ if "/" in store_path or ".py:" in store_path:
87
+ modname = "".join(choice("abcdefghijklmnopqrstuvwxyz") for _ in range(24))
88
+ path_name, function = store_path.rsplit(":", 1)
89
+ module_name = path_name.rstrip(":")
90
+ # Load from file path
91
+ modspec = importlib.util.spec_from_file_location(modname, module_name)
92
+ if modspec is None:
93
+ raise ValueError(f"Could not find store file: {path_name}")
94
+ module = importlib.util.module_from_spec(modspec)
95
+ sys.modules[module_name] = module
96
+ modspec.loader.exec_module(module)
97
+
98
+ else:
99
+ path_name, function = store_path.rsplit(".", 1)
100
+ module = importlib.import_module(path_name)
101
+
102
+ try:
103
+ store: BaseStore | Callable[[config.StoreConfig], BaseStore] = module.__dict__[
104
+ function
105
+ ]
106
+ except KeyError as e:
107
+ available = [k for k in module.__dict__ if not k.startswith("__")]
108
+ suggestion = ""
109
+ if available:
110
+ likely = [
111
+ k for k in available if isinstance(module.__dict__[k], Graph | Pregel)
112
+ ]
113
+ if likely:
114
+ likely_ = "\n".join(
115
+ [f"\t- {path_name}:{k}" if path_name else k for k in likely]
116
+ )
117
+ suggestion = f"\nDid you mean to use one of the following?\n{likely_}"
118
+ elif available:
119
+ suggestion = f"\nFound the following exports: {', '.join(available)}"
120
+
121
+ raise ValueError(
122
+ f"Could not find store '{store_path}'. "
123
+ f"Please check that:\n"
124
+ f"1. The file exports a variable named '{store_path}'\n"
125
+ f"2. The variable name in your config matches the export name{suggestion}"
126
+ ) from e
127
+ return store
langgraph_api/stream.py CHANGED
@@ -1,6 +1,6 @@
1
+ import functools
1
2
  from collections.abc import AsyncIterator, Callable
2
3
  from contextlib import AsyncExitStack, aclosing
3
- from functools import lru_cache
4
4
  from typing import Any, cast
5
5
 
6
6
  import langgraph.version
@@ -23,6 +23,7 @@ from langgraph.pregel.debug import CheckpointPayload, TaskResultPayload
23
23
  from pydantic import ValidationError
24
24
  from pydantic.v1 import ValidationError as ValidationErrorLegacy
25
25
 
26
+ from langgraph_api import store as api_store
26
27
  from langgraph_api.asyncio import ValueEvent, wait_if_not_done
27
28
  from langgraph_api.command import map_cmd
28
29
  from langgraph_api.graph import get_graph
@@ -33,7 +34,6 @@ from langgraph_api.serde import json_dumpb
33
34
  from langgraph_api.utils import AsyncConnectionProto
34
35
  from langgraph_runtime.checkpoint import Checkpointer
35
36
  from langgraph_runtime.ops import Runs
36
- from langgraph_runtime.store import Store
37
37
 
38
38
  logger = structlog.stdlib.get_logger(__name__)
39
39
 
@@ -94,7 +94,7 @@ async def astream_state(
94
94
  get_graph(
95
95
  config["configurable"]["graph_id"],
96
96
  config,
97
- store=Store(),
97
+ store=(await api_store.get_store()),
98
98
  checkpointer=None if temporary else Checkpointer(conn),
99
99
  )
100
100
  )
@@ -119,7 +119,9 @@ async def astream_state(
119
119
  # attach node counter
120
120
  is_remote_pregel = isinstance(graph, BaseRemotePregel)
121
121
  if not is_remote_pregel:
122
- config["configurable"]["__pregel_node_finished"] = incr_nodes
122
+ config["configurable"]["__pregel_node_finished"] = functools.partial(
123
+ incr_nodes, graph_id=_get_graph_id(run)
124
+ )
123
125
 
124
126
  # attach run_id to config
125
127
  # for attempts beyond the first, use a fresh, unique run_id
@@ -263,10 +265,10 @@ async def astream_state(
263
265
  yield mode, chunk
264
266
  # --- end shared logic with astream_events ---
265
267
  if is_remote_pregel:
266
- # increament the remote runs
268
+ # increment the remote runs
267
269
  try:
268
270
  nodes_executed = await graph.fetch_nodes_executed()
269
- incr_nodes(None, incr=nodes_executed)
271
+ incr_nodes(graph_id=graph.graph_id, incr=nodes_executed)
270
272
  except Exception as e:
271
273
  logger.warning(f"Failed to fetch nodes executed for {graph.graph_id}: {e}")
272
274
 
@@ -301,7 +303,7 @@ def get_feedback_urls(run_id: str, feedback_keys: list[str]) -> dict[str, str]:
301
303
  return {key: token.url for key, token in zip(feedback_keys, tokens, strict=False)}
302
304
 
303
305
 
304
- @lru_cache(maxsize=1)
306
+ @functools.lru_cache(maxsize=1)
305
307
  def get_langsmith_client() -> langsmith.Client:
306
308
  return langsmith.Client()
307
309
 
@@ -315,3 +317,11 @@ EXPECTED_ERRORS = (
315
317
  ValidationError,
316
318
  ValidationErrorLegacy,
317
319
  )
320
+
321
+
322
+ def _get_graph_id(run: Run) -> str | None:
323
+ try:
324
+ return run["kwargs"]["config"]["configurable"]["graph_id"]
325
+ except Exception:
326
+ logger.info(f"Failed to get graph_id from run {run['run_id']}")
327
+ return "Unknown"
langgraph_api/worker.py CHANGED
@@ -69,7 +69,7 @@ async def worker(
69
69
  ) -> WorkerResult:
70
70
  run_id = run["run_id"]
71
71
  if attempt == 1:
72
- incr_runs()
72
+ incr_runs(graph_id=_get_graph_id(run))
73
73
  checkpoint: CheckpointPayload | None = None
74
74
  exception: Exception | None = None
75
75
  status: str | None = None
@@ -1,35 +1,29 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: langgraph-api
3
- Version: 0.2.27
4
- Summary:
3
+ Version: 0.2.28
4
+ Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
5
5
  License: Elastic-2.0
6
- Author: Nuno Campos
7
- Author-email: nuno@langchain.dev
6
+ License-File: LICENSE
8
7
  Requires-Python: >=3.11
9
- Classifier: License :: Other/Proprietary License
10
- Classifier: Programming Language :: Python :: 3
11
- Classifier: Programming Language :: Python :: 3.11
12
- Classifier: Programming Language :: Python :: 3.12
13
- Classifier: Programming Language :: Python :: 3.13
14
- Requires-Dist: cloudpickle (>=3.0.0,<4.0.0)
15
- Requires-Dist: cryptography (>=42.0.0,<45.0)
16
- Requires-Dist: httpx (>=0.25.0)
17
- Requires-Dist: jsonschema-rs (>=0.20.0,<0.30)
18
- Requires-Dist: langchain-core (>=0.2.38) ; python_version < "4.0"
19
- Requires-Dist: langgraph (>=0.3.27) ; python_version < "4.0"
20
- Requires-Dist: langgraph-checkpoint (>=2.0.23) ; python_version < "4.0"
21
- Requires-Dist: langgraph-runtime-inmem (>=0.0.9,<0.1)
22
- Requires-Dist: langgraph-sdk (>=0.1.66,<0.2.0) ; python_version < "4.0"
23
- Requires-Dist: langsmith (>=0.1.63)
24
- Requires-Dist: orjson (>=3.9.7)
25
- Requires-Dist: pyjwt (>=2.9.0,<3.0.0)
26
- Requires-Dist: sse-starlette (>=2.1.0,<2.2.0)
27
- Requires-Dist: starlette (>=0.38.6)
28
- Requires-Dist: structlog (>=24.1.0,<26)
29
- Requires-Dist: tenacity (>=8.0.0)
30
- Requires-Dist: truststore (>=0.1)
31
- Requires-Dist: uvicorn (>=0.26.0)
32
- Requires-Dist: watchfiles (>=0.13)
8
+ Requires-Dist: cloudpickle>=3.0.0
9
+ Requires-Dist: cryptography<45.0,>=42.0.0
10
+ Requires-Dist: httpx>=0.25.0
11
+ Requires-Dist: jsonschema-rs<0.30,>=0.20.0
12
+ Requires-Dist: langchain-core>=0.2.38
13
+ Requires-Dist: langgraph-checkpoint>=2.0.23
14
+ Requires-Dist: langgraph-runtime-inmem<0.2,>=0.1.0
15
+ Requires-Dist: langgraph-sdk>=0.1.66
16
+ Requires-Dist: langgraph>=0.3.27
17
+ Requires-Dist: langsmith>=0.1.112
18
+ Requires-Dist: orjson>=3.9.7
19
+ Requires-Dist: pyjwt>=2.9.0
20
+ Requires-Dist: sse-starlette<2.2.0,>=2.1.0
21
+ Requires-Dist: starlette>=0.38.6
22
+ Requires-Dist: structlog<26,>=24.1.0
23
+ Requires-Dist: tenacity>=8.0.0
24
+ Requires-Dist: truststore>=0.1
25
+ Requires-Dist: uvicorn>=0.26.0
26
+ Requires-Dist: watchfiles>=0.13
33
27
  Description-Content-Type: text/markdown
34
28
 
35
29
  # LangGraph API
@@ -132,4 +126,4 @@ Options:
132
126
 
133
127
  ## License
134
128
 
135
- This project is licensed under the Elastic License 2.0 - see the [LICENSE](./LICENSE) file for details.
129
+ This project is licensed under the Elastic License 2.0 - see the [LICENSE](./LICENSE) file for details.