langgraph-api 0.4.40__py3-none-any.whl → 0.5.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

Files changed (41) hide show
  1. langgraph_api/__init__.py +1 -1
  2. langgraph_api/api/assistants.py +65 -61
  3. langgraph_api/api/meta.py +6 -0
  4. langgraph_api/api/threads.py +11 -7
  5. langgraph_api/auth/custom.py +29 -24
  6. langgraph_api/cli.py +2 -49
  7. langgraph_api/config.py +131 -16
  8. langgraph_api/graph.py +1 -1
  9. langgraph_api/grpc/client.py +183 -0
  10. langgraph_api/grpc/config_conversion.py +225 -0
  11. langgraph_api/grpc/generated/core_api_pb2.py +275 -0
  12. langgraph_api/{grpc_ops → grpc}/generated/core_api_pb2.pyi +35 -40
  13. langgraph_api/grpc/generated/engine_common_pb2.py +190 -0
  14. langgraph_api/grpc/generated/engine_common_pb2.pyi +634 -0
  15. langgraph_api/grpc/generated/engine_common_pb2_grpc.py +24 -0
  16. langgraph_api/grpc/ops.py +1045 -0
  17. langgraph_api/js/build.mts +1 -1
  18. langgraph_api/js/client.http.mts +1 -1
  19. langgraph_api/js/client.mts +1 -1
  20. langgraph_api/js/package.json +12 -12
  21. langgraph_api/js/src/graph.mts +20 -0
  22. langgraph_api/js/yarn.lock +176 -234
  23. langgraph_api/metadata.py +29 -21
  24. langgraph_api/queue_entrypoint.py +2 -2
  25. langgraph_api/route.py +14 -4
  26. langgraph_api/schema.py +2 -2
  27. langgraph_api/self_hosted_metrics.py +48 -2
  28. langgraph_api/serde.py +58 -14
  29. langgraph_api/server.py +16 -2
  30. langgraph_api/worker.py +1 -1
  31. {langgraph_api-0.4.40.dist-info → langgraph_api-0.5.6.dist-info}/METADATA +6 -6
  32. {langgraph_api-0.4.40.dist-info → langgraph_api-0.5.6.dist-info}/RECORD +38 -34
  33. langgraph_api/grpc_ops/client.py +0 -80
  34. langgraph_api/grpc_ops/generated/core_api_pb2.py +0 -274
  35. langgraph_api/grpc_ops/ops.py +0 -610
  36. /langgraph_api/{grpc_ops → grpc}/__init__.py +0 -0
  37. /langgraph_api/{grpc_ops → grpc}/generated/__init__.py +0 -0
  38. /langgraph_api/{grpc_ops → grpc}/generated/core_api_pb2_grpc.py +0 -0
  39. {langgraph_api-0.4.40.dist-info → langgraph_api-0.5.6.dist-info}/WHEEL +0 -0
  40. {langgraph_api-0.4.40.dist-info → langgraph_api-0.5.6.dist-info}/entry_points.txt +0 -0
  41. {langgraph_api-0.4.40.dist-info → langgraph_api-0.5.6.dist-info}/licenses/LICENSE +0 -0
langgraph_api/metadata.py CHANGED
@@ -2,6 +2,7 @@ import asyncio
2
2
  import os
3
3
  import uuid
4
4
  from datetime import UTC, datetime
5
+ from typing import Any
5
6
 
6
7
  import langgraph.version
7
8
  import orjson
@@ -60,7 +61,7 @@ NODE_COUNTER = 0
60
61
  FROM_TIMESTAMP = datetime.now(UTC).isoformat()
61
62
 
62
63
  # Beacon endpoint for license key submissions
63
- BEACON_ENDPOINT = "https://api.smith.langchain.com/v1/metadata/submit"
64
+ BEACON_ENDPOINT = "https://beacon.langchain.com/v1/beacon/metadata/submit"
64
65
 
65
66
  # LangChain auth endpoint for API key submissions
66
67
  LANGCHAIN_METADATA_ENDPOINT = None
@@ -123,6 +124,7 @@ async def metadata_loop() -> None:
123
124
  __version__ = None
124
125
  if not LANGGRAPH_CLOUD_LICENSE_KEY and not LANGSMITH_CONTROL_PLANE_API_KEY:
125
126
  return
127
+ lg_version = langgraph.version.__version__
126
128
 
127
129
  if (
128
130
  LANGGRAPH_CLOUD_LICENSE_KEY
@@ -135,6 +137,25 @@ async def metadata_loop() -> None:
135
137
  logger.info("Starting metadata loop")
136
138
 
137
139
  global RUN_COUNTER, NODE_COUNTER, FROM_TIMESTAMP
140
+ base_tags = _ensure_strings(
141
+ # Tag values must be strings.
142
+ {
143
+ "langgraph.python.version": lg_version,
144
+ "langgraph_api.version": __version__ or "",
145
+ "langgraph.platform.revision": REVISION or "",
146
+ "langgraph.platform.variant": VARIANT or "",
147
+ "langgraph.platform.host": HOST,
148
+ "langgraph.platform.tenant_id": TENANT_ID or "",
149
+ "langgraph.platform.project_id": PROJECT_ID or "",
150
+ "langgraph.platform.plan": PLAN,
151
+ # user app features
152
+ "user_app.uses_indexing": USES_INDEXING or "",
153
+ "user_app.uses_custom_app": USES_CUSTOM_APP or "",
154
+ "user_app.uses_custom_auth": USES_CUSTOM_AUTH or "",
155
+ "user_app.uses_thread_ttl": USES_THREAD_TTL or "",
156
+ "user_app.uses_store_ttl": USES_STORE_TTL or "",
157
+ }
158
+ )
138
159
  while True:
139
160
  # because we always read and write from coroutines in main thread
140
161
  # we don't need a lock as long as there's no awaits in this block
@@ -150,27 +171,10 @@ async def metadata_loop() -> None:
150
171
  base_payload = {
151
172
  "from_timestamp": from_timestamp,
152
173
  "to_timestamp": to_timestamp,
153
- "tags": {
154
- # Tag values must be strings.
155
- "langgraph.python.version": langgraph.version.__version__,
156
- "langgraph_api.version": __version__ or "",
157
- "langgraph.platform.revision": REVISION or "",
158
- "langgraph.platform.variant": VARIANT or "",
159
- "langgraph.platform.host": HOST,
160
- "langgraph.platform.tenant_id": TENANT_ID or "",
161
- "langgraph.platform.project_id": PROJECT_ID or "",
162
- "langgraph.platform.plan": PLAN,
163
- # user app features
164
- "user_app.uses_indexing": str(USES_INDEXING or ""),
165
- "user_app.uses_custom_app": str(USES_CUSTOM_APP or ""),
166
- "user_app.uses_custom_auth": str(USES_CUSTOM_AUTH),
167
- "user_app.uses_thread_ttl": str(USES_THREAD_TTL),
168
- "user_app.uses_store_ttl": str(USES_STORE_TTL),
169
- **usage_tags,
170
- },
174
+ "tags": base_tags | _ensure_strings(usage_tags),
171
175
  "measures": {
172
- "langgraph.platform.runs": runs,
173
- "langgraph.platform.nodes": nodes,
176
+ "langgraph.platform.runs": int(runs),
177
+ "langgraph.platform.nodes": int(nodes),
174
178
  **usage_measures,
175
179
  },
176
180
  "logs": [],
@@ -238,3 +242,7 @@ async def metadata_loop() -> None:
238
242
  )
239
243
 
240
244
  await asyncio.sleep(INTERVAL)
245
+
246
+
247
+ def _ensure_strings(payload: dict[str, Any]) -> dict[str, Any]:
248
+ return {k: "" if v is None else str(v) for k, v in payload.items()}
@@ -1,8 +1,6 @@
1
1
  # ruff: noqa: E402
2
2
  import os
3
3
 
4
- from langgraph_api.api.meta import METRICS_FORMATS
5
-
6
4
  if not (
7
5
  (disable_truststore := os.getenv("DISABLE_TRUSTSTORE"))
8
6
  and disable_truststore.lower() == "true"
@@ -45,6 +43,8 @@ async def health_and_metrics_server():
45
43
  from starlette.responses import JSONResponse, PlainTextResponse
46
44
  from starlette.routing import Route
47
45
 
46
+ from langgraph_api.api.meta import METRICS_FORMATS
47
+
48
48
  port = int(os.getenv("PORT", "8080"))
49
49
  host = os.getenv("LANGGRAPH_SERVER_HOST", "0.0.0.0")
50
50
 
langgraph_api/route.py CHANGED
@@ -14,6 +14,7 @@ from starlette.responses import JSONResponse
14
14
  from starlette.routing import Route, compile_path, get_name
15
15
  from starlette.types import ASGIApp, Receive, Scope, Send
16
16
 
17
+ from langgraph_api import config
17
18
  from langgraph_api.serde import json_dumpb
18
19
  from langgraph_api.utils import get_auth_ctx, with_user
19
20
 
@@ -58,10 +59,11 @@ class ApiResponse(JSONResponse):
58
59
 
59
60
 
60
61
  def _json_loads(content: bytearray, schema: SchemaType) -> typing.Any:
61
- json = orjson.loads(content)
62
+ """Parse JSON and validate schema. Used by threadpool for large payloads."""
63
+ json_data = orjson.loads(content)
62
64
  if schema is not None:
63
- schema.validate(json)
64
- return json
65
+ schema.validate(json_data)
66
+ return json_data
65
67
 
66
68
 
67
69
  class ApiRequest(Request):
@@ -76,8 +78,16 @@ class ApiRequest(Request):
76
78
  async def json(self, schema: SchemaType = None) -> typing.Any:
77
79
  if not hasattr(self, "_json"):
78
80
  body = await self.body()
81
+
82
+ # Hybrid approach for optimal performance:
83
+ # - Small payloads: parse directly (fast, no queueing/thread pool limitations)
84
+ # - Large payloads: use dedicated thread pool (safer, doesn't block event loop)
79
85
  try:
80
- self._json = await run_in_threadpool(_json_loads, body, schema)
86
+ self._json = (
87
+ await run_in_threadpool(_json_loads, body, schema)
88
+ if len(body) > config.JSON_THREAD_POOL_MINIMUM_SIZE_BYTES
89
+ else _json_loads(body, schema)
90
+ )
81
91
  except orjson.JSONDecodeError as e:
82
92
  raise HTTPException(
83
93
  status_code=422, detail="Invalid JSON in request body"
langgraph_api/schema.py CHANGED
@@ -249,8 +249,8 @@ class ThreadUpdateResponse(TypedDict):
249
249
  class QueueStats(TypedDict):
250
250
  n_pending: int
251
251
  n_running: int
252
- max_age_secs: datetime | None
253
- med_age_secs: datetime | None
252
+ pending_runs_wait_time_max_secs: float | None
253
+ pending_runs_wait_time_med_secs: float | None
254
254
 
255
255
 
256
256
  # Canonical field sets for select= validation and type aliases for ops
@@ -109,6 +109,20 @@ def initialize_self_hosted_metrics():
109
109
  callbacks=[_get_running_runs_callback],
110
110
  )
111
111
 
112
+ meter.create_observable_gauge(
113
+ name="lg_api_pending_runs_wait_time_max",
114
+ description="The maximum time a run has been pending, in seconds",
115
+ unit="s",
116
+ callbacks=[_get_pending_runs_wait_time_max_callback],
117
+ )
118
+
119
+ meter.create_observable_gauge(
120
+ name="lg_api_pending_runs_wait_time_med",
121
+ description="The median pending wait time across runs, in seconds",
122
+ unit="s",
123
+ callbacks=[_get_pending_runs_wait_time_med_callback],
124
+ )
125
+
112
126
  if config.N_JOBS_PER_WORKER > 0:
113
127
  meter.create_observable_gauge(
114
128
  name="lg_api_workers_max",
@@ -232,14 +246,24 @@ def _get_queue_stats():
232
246
  return await Runs.stats(conn)
233
247
  except Exception as e:
234
248
  logger.warning("Failed to get queue stats from database", exc_info=e)
235
- return {"n_pending": 0, "n_running": 0}
249
+ return {
250
+ "n_pending": 0,
251
+ "n_running": 0,
252
+ "pending_runs_wait_time_max_secs": 0,
253
+ "pending_runs_wait_time_med_secs": 0,
254
+ }
236
255
 
237
256
  try:
238
257
  future = lg_asyncio.run_coroutine_threadsafe(_fetch_queue_stats())
239
258
  return future.result(timeout=5)
240
259
  except Exception as e:
241
260
  logger.warning("Failed to get queue stats", exc_info=e)
242
- return {"n_pending": 0, "n_running": 0}
261
+ return {
262
+ "n_pending": 0,
263
+ "n_running": 0,
264
+ "pending_runs_wait_time_max_secs": 0,
265
+ "pending_runs_wait_time_med_secs": 0,
266
+ }
243
267
 
244
268
 
245
269
  def _get_pool_stats():
@@ -280,6 +304,28 @@ def _get_running_runs_callback(options: CallbackOptions):
280
304
  return [Observation(0, attributes=_customer_attributes)]
281
305
 
282
306
 
307
+ def _get_pending_runs_wait_time_max_callback(options: CallbackOptions):
308
+ try:
309
+ stats = _get_queue_stats()
310
+ value = stats.get("pending_runs_wait_time_max_secs")
311
+ value = 0 if value is None else value
312
+ return [Observation(value, attributes=_customer_attributes)]
313
+ except Exception as e:
314
+ logger.warning("Failed to get max pending wait time", exc_info=e)
315
+ return [Observation(0, attributes=_customer_attributes)]
316
+
317
+
318
+ def _get_pending_runs_wait_time_med_callback(options: CallbackOptions):
319
+ try:
320
+ stats = _get_queue_stats()
321
+ value = stats.get("pending_runs_wait_time_med_secs")
322
+ value = 0 if value is None else value
323
+ return [Observation(value, attributes=_customer_attributes)]
324
+ except Exception as e:
325
+ logger.warning("Failed to get median pending wait time", exc_info=e)
326
+ return [Observation(0, attributes=_customer_attributes)]
327
+
328
+
283
329
  def _get_workers_max_callback(options: CallbackOptions):
284
330
  try:
285
331
  metrics_data = get_metrics()
langgraph_api/serde.py CHANGED
@@ -3,7 +3,7 @@ import re
3
3
  import uuid
4
4
  from base64 import b64encode
5
5
  from collections import deque
6
- from collections.abc import Mapping
6
+ from collections.abc import Callable, Mapping
7
7
  from datetime import timedelta, timezone
8
8
  from decimal import Decimal
9
9
  from ipaddress import (
@@ -16,7 +16,7 @@ from ipaddress import (
16
16
  )
17
17
  from pathlib import Path
18
18
  from re import Pattern
19
- from typing import Any, NamedTuple, cast
19
+ from typing import Any, Literal, NamedTuple, cast
20
20
  from zoneinfo import ZoneInfo
21
21
 
22
22
  import cloudpickle
@@ -113,16 +113,24 @@ _option = orjson.OPT_SERIALIZE_NUMPY | orjson.OPT_NON_STR_KEYS
113
113
  _SURROGATE_RE = re.compile(r"[\ud800-\udfff]")
114
114
 
115
115
 
116
- def _strip_surr(s: str) -> str:
117
- return s if _SURROGATE_RE.search(s) is None else _SURROGATE_RE.sub("", s)
116
+ def _replace_surr(s: str) -> str:
117
+ return s if _SURROGATE_RE.search(s) is None else _SURROGATE_RE.sub("?", s)
118
118
 
119
119
 
120
120
  def _sanitise(o: Any) -> Any:
121
121
  if isinstance(o, str):
122
- return _strip_surr(o)
122
+ return _replace_surr(o)
123
123
  if isinstance(o, Mapping):
124
124
  return {_sanitise(k): _sanitise(v) for k, v in o.items()}
125
125
  if isinstance(o, list | tuple | set):
126
+ if (
127
+ isinstance(o, tuple)
128
+ and hasattr(o, "_asdict")
129
+ and callable(o._asdict)
130
+ and hasattr(o, "_fields")
131
+ and isinstance(o._fields, tuple)
132
+ ): # named tuple
133
+ return {f: _sanitise(ov) for f, ov in zip(o._fields, o, strict=True)}
126
134
  ctor = list if isinstance(o, list) else type(o)
127
135
  return ctor(_sanitise(x) for x in o)
128
136
  return o
@@ -158,18 +166,46 @@ async def ajson_loads(content: bytes | Fragment) -> Any:
158
166
 
159
167
 
160
168
  class Serializer(JsonPlusSerializer):
169
+ def __init__(
170
+ self,
171
+ __unpack_ext_hook__: Callable[[int, bytes], Any] | None = None,
172
+ pickle_fallback: bool | None = None,
173
+ ):
174
+ from langgraph_api.config import SERDE
175
+
176
+ allowed_json_modules: list[tuple[str, ...]] | Literal[True] | None = None
177
+ if SERDE and "allowed_json_modules" in SERDE:
178
+ allowed_ = SERDE["allowed_json_modules"]
179
+ if allowed_ is True:
180
+ allowed_json_modules = True
181
+ elif allowed_ is None:
182
+ allowed_json_modules = None
183
+ else:
184
+ allowed_json_modules = [tuple(x) for x in allowed_]
185
+ if pickle_fallback is None:
186
+ if SERDE and "pickle_fallback" in SERDE:
187
+ pickle_fallback = SERDE["pickle_fallback"]
188
+ else:
189
+ pickle_fallback = True
190
+
191
+ super().__init__(
192
+ allowed_json_modules=allowed_json_modules,
193
+ __unpack_ext_hook__=__unpack_ext_hook__,
194
+ )
195
+ self.pickle_fallback = pickle_fallback
196
+
161
197
  def dumps_typed(self, obj: Any) -> tuple[str, bytes]:
162
198
  try:
163
199
  return super().dumps_typed(obj)
164
200
  except TypeError:
165
201
  return "pickle", cloudpickle.dumps(obj)
166
202
 
167
- def dumps(self, obj: Any) -> bytes:
168
- # See comment above (in json_dumpb)
169
- return super().dumps(obj).replace(rb"\\u0000", b"").replace(rb"\u0000", b"")
170
-
171
203
  def loads_typed(self, data: tuple[str, bytes]) -> Any:
172
204
  if data[0] == "pickle":
205
+ if not self.pickle_fallback:
206
+ raise ValueError(
207
+ "Pickle fallback is disabled. Cannot deserialize pickled object."
208
+ )
173
209
  try:
174
210
  return cloudpickle.loads(data[1])
175
211
  except Exception as e:
@@ -177,8 +213,16 @@ class Serializer(JsonPlusSerializer):
177
213
  "Failed to unpickle object, replacing w None", exc_info=e
178
214
  )
179
215
  return None
180
- return super().loads_typed(data)
181
-
182
-
183
- mpack_keys = {"method", "value"}
184
- SERIALIZER = Serializer()
216
+ try:
217
+ return super().loads_typed(data)
218
+ except Exception:
219
+ if data[0] == "json":
220
+ logger.exception(
221
+ "Heads up! There was a deserialization error of an item stored using 'json'-type serialization."
222
+ ' For security reasons, starting in langgraph-api version 0.5.0, we no longer serialize objects using the "json" type.'
223
+ " If you would like to retain the ability to deserialize old checkpoints saved in this format, "
224
+ 'please set the "allowed_json_modules" option in your langgraph.json configuration to add the'
225
+ " necessary module and type paths to an allow-list to be deserialized. You can alkso retain the"
226
+ ' ability to insecurely deserialize custom types by setting it to "true".'
227
+ )
228
+ raise
langgraph_api/server.py CHANGED
@@ -151,6 +151,9 @@ custom_middleware = (
151
151
  auth_before_custom_middleware = (
152
152
  config.HTTP_CONFIG and config.HTTP_CONFIG.get("middleware_order") == "auth_first"
153
153
  )
154
+ enable_auth_on_custom_routes = config.HTTP_CONFIG and config.HTTP_CONFIG.get(
155
+ "enable_custom_route_auth"
156
+ )
154
157
  # Custom middleware to be applied at the route/mount level, not globally (app level).
155
158
  route_level_custom_middleware = (
156
159
  custom_middleware if auth_before_custom_middleware else []
@@ -186,11 +189,22 @@ if user_router:
186
189
  # custom middleware before *or* after authentication middleware,
187
190
  # depending on the `middleware_order` config.
188
191
  user_app = apply_middleware(
189
- routes=app.routes, middleware=route_level_custom_middleware
192
+ routes=app.routes,
193
+ middleware=(
194
+ middleware_for_protected_routes if enable_auth_on_custom_routes else []
195
+ )
196
+ + route_level_custom_middleware,
190
197
  )
191
198
  app.user_middleware = global_middleware
192
199
  else:
193
- user_app = app.routes
200
+ user_app = (
201
+ apply_middleware(
202
+ routes=app.routes,
203
+ middleware=middleware_for_protected_routes,
204
+ )
205
+ if enable_auth_on_custom_routes
206
+ else app.routes
207
+ )
194
208
  app.user_middleware = custom_middleware + global_middleware
195
209
 
196
210
  app.router.routes = (
langgraph_api/worker.py CHANGED
@@ -269,7 +269,7 @@ async def worker(
269
269
  elif isinstance(exception, TimeoutError):
270
270
  status = "timeout"
271
271
  await logger.awarning(
272
- "Background run timed out",
272
+ "Background run timed out. To increase the timeout, set the BG_JOB_TIMEOUT_SECS environment variable (integer, defaults to 3600).",
273
273
  **log_info,
274
274
  )
275
275
  if not temporary:
@@ -1,21 +1,21 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-api
3
- Version: 0.4.40
4
- Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
3
+ Version: 0.5.6
4
+ Author-email: Will Fu-Hinthorn <will@langchain.dev>, Josh Rogers <josh@langchain.dev>, Parker Rule <parker@langchain.dev>
5
5
  License: Elastic-2.0
6
6
  License-File: LICENSE
7
7
  Requires-Python: >=3.11
8
8
  Requires-Dist: cloudpickle>=3.0.0
9
9
  Requires-Dist: cryptography<45.0,>=42.0.0
10
- Requires-Dist: grpcio-tools<2.0.0,>=1.75.0
10
+ Requires-Dist: grpcio-tools==1.75.1
11
11
  Requires-Dist: grpcio<2.0.0,>=1.75.0
12
12
  Requires-Dist: httpx>=0.25.0
13
13
  Requires-Dist: jsonschema-rs<0.30,>=0.20.0
14
14
  Requires-Dist: langchain-core>=0.3.64
15
- Requires-Dist: langgraph-checkpoint>=2.0.23
16
- Requires-Dist: langgraph-runtime-inmem<0.15.0,>=0.14.0
15
+ Requires-Dist: langgraph-checkpoint<4,>=3.0.1
16
+ Requires-Dist: langgraph-runtime-inmem<0.18.0,>=0.17.0
17
17
  Requires-Dist: langgraph-sdk>=0.2.0
18
- Requires-Dist: langgraph>=0.4.0
18
+ Requires-Dist: langgraph<2,>=0.4.10
19
19
  Requires-Dist: langsmith>=0.3.45
20
20
  Requires-Dist: opentelemetry-api>=1.37.0
21
21
  Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.37.0
@@ -1,27 +1,27 @@
1
- langgraph_api/__init__.py,sha256=pha_-Rd-dI-gM_t5o_SGFnaO84vxXW0uH3Bs10X8-DE,23
1
+ langgraph_api/__init__.py,sha256=CMH34Gt1AqO7z_TqRj94XwohGoVCf8aes0djkqm45mk,22
2
2
  langgraph_api/asgi_transport.py,sha256=XtiLOu4WWsd-xizagBLzT5xUkxc9ZG9YqwvETBPjBFE,5161
3
3
  langgraph_api/asyncio.py,sha256=FEEkLm_N-15cbElo4vQ309MkDKBZuRqAYV8VJ1DocNw,9860
4
- langgraph_api/cli.py,sha256=Yl2GOChDChsoJcYAk8HhkuR1YKTy8rmbYlS5doqWBpk,19671
4
+ langgraph_api/cli.py,sha256=aEI2pfztEEziIwUk2imiLkNVK1LapMp_3dxvcar1org,18341
5
5
  langgraph_api/command.py,sha256=Bh-rvuTLwdHCqFWryCjB1M8oWxPBwRBUjMNj_04KPxM,852
6
- langgraph_api/config.py,sha256=gO25XRPc19Room51P3FewE54pSFvfwuVDtvyjkDSzEs,13251
6
+ langgraph_api/config.py,sha256=eWt3GhpW2CX7BIrensQCYGVXa-vXgVXiwWTd1hmJuqU,17144
7
7
  langgraph_api/cron_scheduler.py,sha256=25wYzEQrhPEivZrAPYOmzLPDOQa-aFogU37mTXc9TJk,2566
8
8
  langgraph_api/errors.py,sha256=zlnl3xXIwVG0oGNKKpXf1an9Rn_SBDHSyhe53hU6aLw,1858
9
9
  langgraph_api/executor_entrypoint.py,sha256=CaX813ygtf9CpOaBkfkQXJAHjFtmlScCkrOvTDmu4Aw,750
10
10
  langgraph_api/feature_flags.py,sha256=taZRhukeBV8r62EmEo92rxfBwYhIw56-P_UvSzQLzt8,576
11
- langgraph_api/graph.py,sha256=YDNncFFnjOjX_ylHDVY3Z4Ehj62zyHFJPaiRCkLAZus,25285
11
+ langgraph_api/graph.py,sha256=e5_LHHxwJf5QuVgPv2POsrGAcSS4swHdVxz4IBV5p-c,25281
12
12
  langgraph_api/http.py,sha256=fyK-H-0UfNy_BzuVW3aWWGvhRavmGAVMkDwDArryJ_4,5659
13
13
  langgraph_api/http_metrics.py,sha256=vw3UT9uj9qgxQ_DwJq77HGZqh6LHSjyxylWhqkf2jAw,5095
14
14
  langgraph_api/http_metrics_utils.py,sha256=sjxF7SYGTzY0Wz_G0dzatsYNnWr31S6ujej4JmBG2yo,866
15
15
  langgraph_api/logging.py,sha256=o5iVARqtFYKIcRrK2nk1ymcKEiVYKd_dHmhXLF2khFI,6090
16
- langgraph_api/metadata.py,sha256=Z54bd-uf51qo3KR7_jxETbalp6vD9mWGe-UBeMw1AP4,8412
16
+ langgraph_api/metadata.py,sha256=Ah5x5TB8O1VAypzDa1UTrsptS1hjy9z-PuNF8WYl3VM,8597
17
17
  langgraph_api/patch.py,sha256=J0MmcfpZG15SUVaVcI0Z4x_c0-0rbbT7Pwh9fDAQOpA,1566
18
- langgraph_api/queue_entrypoint.py,sha256=z3ZUBl3CpnMm0KFPqCuGvSohPAmYQbhAdyRizSJSClM,8481
19
- langgraph_api/route.py,sha256=EBhELuJ1He-ZYcAnR5YTImcIeDtWthDae5CHELBxPkM,5056
20
- langgraph_api/schema.py,sha256=spZ_XPT4AMJfw2YatsdnMZZLzgB9Sm3YR8n0SlgGdJ8,8480
18
+ langgraph_api/queue_entrypoint.py,sha256=VtelUvo_WB1GplliLCetHeTjLC8DCVYQkqIhI8pL2fo,8485
19
+ langgraph_api/route.py,sha256=wh2vMKksTpXJRQ_rLLrFXBSlG608fSMJguZATSWu0Y8,5593
20
+ langgraph_api/schema.py,sha256=vqCw9OE6cerlEK1WH1xxtHfhi-unLH4Z8Z8G0S_wCb0,8512
21
21
  langgraph_api/self_hosted_logs.py,sha256=9ljOz3KH3O1SwsD7eTKnreyJ80NbeR7nj7SuxBlrmCc,4422
22
- langgraph_api/self_hosted_metrics.py,sha256=3FFezxjU0Vs-bsH39f4Dcwn7fporTLHV9REQ3UQ315A,14004
23
- langgraph_api/serde.py,sha256=Jkww6ixP5o2YZmnXtM7ihuAYC6YSuNDNPvE-8ILoqVo,5499
24
- langgraph_api/server.py,sha256=h4qeDzcEYF8NFCrTkpL1gsXXFbd7P2JN_wVpmy1Fgwg,9658
22
+ langgraph_api/self_hosted_metrics.py,sha256=u1HstgccqJbtUEwTle-89CS9T2rbHv86lA0OBknxnNQ,15760
23
+ langgraph_api/serde.py,sha256=iFq_7CI7OHvUdVKx19MxJvYeorAqaDBRSu8KvzywnVY,7663
24
+ langgraph_api/server.py,sha256=PExNHgem0tY_KkRFiFzj8m8Np6TrP4M0XJsEw6O2SAU,10112
25
25
  langgraph_api/sse.py,sha256=SLdtZmTdh5D8fbWrQjuY9HYLd2dg8Rmi6ZMmFMVc2iE,4204
26
26
  langgraph_api/state.py,sha256=AjkLbUQakIwK7oGzJ8oqubazRsXxG3vDMnRa0s0mzDM,4716
27
27
  langgraph_api/store.py,sha256=NIoNZojs6NbtG3VLBPQEFNttvp7XPkHAfjbQ3gY7aLY,4701
@@ -30,50 +30,54 @@ langgraph_api/thread_ttl.py,sha256=KyHnvD0e1p1cV4Z_ZvKNVzDztuI2RBCUsUO2V7GlOSw,1
30
30
  langgraph_api/traceblock.py,sha256=Qq5CUdefnMDaRDnyvBSWGBClEj-f3oO7NbH6fedxOSE,630
31
31
  langgraph_api/validation.py,sha256=-ZJy-HY3Qs6dJ4J67m1eDhIF0oA-P57VrsUXl0Vy-Bc,5381
32
32
  langgraph_api/webhook.py,sha256=SvSM1rdnNtiH4q3JQYmAqJUk2Sable5xAcwOLuRhtlo,1723
33
- langgraph_api/worker.py,sha256=HHgf590xElF7v02lgn0lG0iK2v2sENMjdx7TVFCvYXY,15399
33
+ langgraph_api/worker.py,sha256=DAEFhnlSV_2N91JiSd6QmnDCEKe5gYXc7s6v3sa3RTA,15503
34
34
  langgraph_api/api/__init__.py,sha256=wrnxz_204b2Vhv4-N0WpiPf-ZpDDlmIQkbh-TiXPnOo,5997
35
35
  langgraph_api/api/a2a.py,sha256=HIHZkLnIcM1u1FJti-L2NH-h1I9BZ_d-QW9z3gFonn8,53995
36
- langgraph_api/api/assistants.py,sha256=tRJse7Gr2BTeTZPljL05UvGkFiULpA-6hy03nBx9PF4,18177
36
+ langgraph_api/api/assistants.py,sha256=4JRZGImv1hnuQu26RdH1GTkU_UV169xjGSSKCmv67fY,17977
37
37
  langgraph_api/api/mcp.py,sha256=qe10ZRMN3f-Hli-9TI8nbQyWvMeBb72YB1PZVbyqBQw,14418
38
- langgraph_api/api/meta.py,sha256=_jG61UKs0J_alsCDgIwCAx1rX5pYuUwKrmOEpWnzR1I,4817
38
+ langgraph_api/api/meta.py,sha256=EMT2wzn4O45GmG8J3uI3e0ef20Kaio3k2TRwzKTM5IE,5610
39
39
  langgraph_api/api/openapi.py,sha256=If-z1ckXt-Yu5bwQytK1LWyX_T7G46UtLfixgEP8hwc,11959
40
40
  langgraph_api/api/runs.py,sha256=keHlFu1iy-l1IICJHc6AKrSUoQA-LZi6FYsja7la9Xw,25436
41
41
  langgraph_api/api/store.py,sha256=xGcPFx4v-VxlK6HRU9uCjzCQ0v66cvc3o_PB5_g7n0Q,5550
42
- langgraph_api/api/threads.py,sha256=5-ZEcs48bL3vot_yCt3ImuA9hzg93LxuAd_DXd2xj4Y,12915
42
+ langgraph_api/api/threads.py,sha256=16YkaHN_bZ131lenPFNqtKswTiXXg-IpP0AHp0_zHzc,13116
43
43
  langgraph_api/api/ui.py,sha256=_genglTUy5BMHlL0lkQypX524yFv6Z5fraIvnrxp7yE,2639
44
44
  langgraph_api/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
- langgraph_api/auth/custom.py,sha256=psETw_GpLWClBbd_ESVPRLUz9GLQ0_XNsuUDSVbtZy0,22522
45
+ langgraph_api/auth/custom.py,sha256=JzKS1x3iNqSyL9rC1QeaMycP3d04zuXsTdVbXM668uw,22849
46
46
  langgraph_api/auth/middleware.py,sha256=jDA4t41DUoAArEY_PNoXesIUBJ0nGhh85QzRdn5EPD0,1916
47
47
  langgraph_api/auth/noop.py,sha256=Bk6Nf3p8D_iMVy_OyfPlyiJp_aEwzL-sHrbxoXpCbac,586
48
48
  langgraph_api/auth/studio_user.py,sha256=fojJpexdIZYI1w3awiqOLSwMUiK_M_3p4mlfQI0o-BE,454
49
49
  langgraph_api/auth/langsmith/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
50
  langgraph_api/auth/langsmith/backend.py,sha256=JVf8-q1IvB5EeiLJge3cOtPvDg6qHzK_4cR-R8hPXXQ,3753
51
51
  langgraph_api/auth/langsmith/client.py,sha256=79kwCVeHU64nsHsxWipfZhf44lM6vfs2nlfTxlJF6LU,4142
52
- langgraph_api/grpc_ops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
- langgraph_api/grpc_ops/client.py,sha256=VB740C9QMhJJrpAEjsADmasN-uGd0apGYtuv_ho0Rl8,2452
54
- langgraph_api/grpc_ops/ops.py,sha256=VFmFIgXZmE3Xi1tGx-eZrqls6qMG0w5a2Ym7w2Wm9Iw,19733
55
- langgraph_api/grpc_ops/generated/__init__.py,sha256=dRiB_iGscPKdMpuLp9ueLwAmIfRaNjNXC64ABtb4cg8,135
56
- langgraph_api/grpc_ops/generated/core_api_pb2.py,sha256=fY01nY_MwlujYiyZcNMvR2DGEnyIkYbZ59p6mNjQKZE,42149
57
- langgraph_api/grpc_ops/generated/core_api_pb2.pyi,sha256=-vJ6C020K9Kt44mH7-S9sy0QBLMXJsyB3T9AKIH1s4g,49184
58
- langgraph_api/grpc_ops/generated/core_api_pb2_grpc.py,sha256=Qav2DuCMUSmR8nP4-fVtUBbY0Vc42jqjCs3L4LdIl-0,52467
52
+ langgraph_api/grpc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
+ langgraph_api/grpc/client.py,sha256=Qr07JVaJrMr3jWQKFHngyC3gqsj-VNLzahbnpj1vDO8,5753
54
+ langgraph_api/grpc/config_conversion.py,sha256=IYQdA6atMFNyMXMpCnYrTUkT4Er4YdrS-LOSw8qgVcg,7560
55
+ langgraph_api/grpc/ops.py,sha256=7_Pp__lq3bpBU2McMtg7-Pv6881igvHfCys4Eg48YDc,32922
56
+ langgraph_api/grpc/generated/__init__.py,sha256=dRiB_iGscPKdMpuLp9ueLwAmIfRaNjNXC64ABtb4cg8,135
57
+ langgraph_api/grpc/generated/core_api_pb2.py,sha256=3NwuQd9_BvXE8sfoDR7iUwWs8V4QChGugbeVL7h9pGk,42064
58
+ langgraph_api/grpc/generated/core_api_pb2.pyi,sha256=nNg9_t8N6dvfUJ-l45vUPxgGpN1AcUZTUYJoqFP-7Tg,49507
59
+ langgraph_api/grpc/generated/core_api_pb2_grpc.py,sha256=Qav2DuCMUSmR8nP4-fVtUBbY0Vc42jqjCs3L4LdIl-0,52467
60
+ langgraph_api/grpc/generated/engine_common_pb2.py,sha256=NlwfuIO7nz27hfN1Oba-7k8GVoJX8VqM7-Jlk3j4oNE,24531
61
+ langgraph_api/grpc/generated/engine_common_pb2.pyi,sha256=bsJumLkaYk3rtOwv5U7j4qquldd8zKONt5ew9x9MBgs,32805
62
+ langgraph_api/grpc/generated/engine_common_pb2_grpc.py,sha256=ChVXQ2OvT6i5OsWWvS-Cn2ldyXDbaPP1LwWmZfU3ya8,894
59
63
  langgraph_api/js/.gitignore,sha256=l5yI6G_V6F1600I1IjiUKn87f4uYIrBAYU1MOyBBhg4,59
60
64
  langgraph_api/js/.prettierrc,sha256=0es3ovvyNIqIw81rPQsdt1zCQcOdBqyR_DMbFE4Ifms,19
61
65
  langgraph_api/js/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
66
  langgraph_api/js/base.py,sha256=CJihwc51MwOVkis80f8zudRa1fQz_5jrom4rY8trww8,1133
63
- langgraph_api/js/build.mts,sha256=wguMiExRjJYpnxol_IxNHuC65CnJFsasQhZiIVSZZq8,3377
64
- langgraph_api/js/client.http.mts,sha256=ZnikriJdcRSkBmUTBFMpLB3GKgK4xbiSqtUE-l9nqeM,4880
65
- langgraph_api/js/client.mts,sha256=gDvYiW7Qfl4re2YhZ5oNqtuvffnW_Sf7DK5aUbKB3vw,32330
67
+ langgraph_api/js/build.mts,sha256=-LVN4xxh5tY0JvJFZKT8vE6uT-O4oXjQlgCp9NwmVnQ,3380
68
+ langgraph_api/js/client.http.mts,sha256=FeVM53vduTPCyMPaYs__kmB3iWcz0k0om811DG0JvH0,4883
69
+ langgraph_api/js/client.mts,sha256=8T5wp_114c2wGPfktY77StTnejhYL3ZWBmLwaUvp5XU,32333
66
70
  langgraph_api/js/errors.py,sha256=Cm1TKWlUCwZReDC5AQ6SgNIVGD27Qov2xcgHyf8-GXo,361
67
71
  langgraph_api/js/global.d.ts,sha256=j4GhgtQSZ5_cHzjSPcHgMJ8tfBThxrH-pUOrrJGteOU,196
68
- langgraph_api/js/package.json,sha256=TLyPB9pZyZ1KQXC08NvbBbf8X5dKAF8WXc5cyjFtWZE,1335
72
+ langgraph_api/js/package.json,sha256=V0QYOBlL7oHw2XOefDsoTJ8F1f6ApSQc-sU0JKixpf8,1330
69
73
  langgraph_api/js/remote.py,sha256=gBk273R7esmXg8aR6InxasNFc5E6Qju2bv2DhmmGJyU,38676
70
74
  langgraph_api/js/schema.py,sha256=M4fLtr50O1jck8H1hm_0W4cZOGYGdkrB7riLyCes4oY,438
71
75
  langgraph_api/js/sse.py,sha256=hHkbncnYnXNIbHhAWneGWYkHp4UhhhGB7-MYtDrY264,4141
72
76
  langgraph_api/js/traceblock.mts,sha256=QtGSN5VpzmGqDfbArrGXkMiONY94pMQ5CgzetT_bKYg,761
73
77
  langgraph_api/js/tsconfig.json,sha256=imCYqVnqFpaBoZPx8k1nO4slHIWBFsSlmCYhO73cpBs,341
74
78
  langgraph_api/js/ui.py,sha256=l9regrvKIxLOjH5SIYE2nhr8QCKLK1Q_1pZgxdL71X4,2488
75
- langgraph_api/js/yarn.lock,sha256=FCizZGxfI4SVoeAZWbezvonYBXsuvlWMUHX-1yACFz8,84352
76
- langgraph_api/js/src/graph.mts,sha256=9zTQNdtanI_CFnOwNRoamoCVHHQHGbNlbm91aRxDeOc,2675
79
+ langgraph_api/js/yarn.lock,sha256=rgbpcHxgQ9jk3SkPAJiDsYKJKUcZ8M-43jO25AKzNj0,81970
80
+ langgraph_api/js/src/graph.mts,sha256=etZd27NaoVevyitJ-LAUue0HeR7V3F2YNeSGwWHm13s,3417
77
81
  langgraph_api/js/src/load.hooks.mjs,sha256=xNVHq75W0Lk6MUKl1pQYrx-wtQ8_neiUyI6SO-k0ecM,2235
78
82
  langgraph_api/js/src/preload.mjs,sha256=8m3bYkf9iZLCQzKAYAdU8snxUwAG3dVLwGvAjfGfgIc,959
79
83
  langgraph_api/js/src/utils/files.mts,sha256=nU09Y8lN8SYsg0x2ffmbIW8LEDBl-SWkmxsoXunFU0M,219
@@ -110,8 +114,8 @@ langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,11
110
114
  LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
111
115
  logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
112
116
  openapi.json,sha256=Oi2tU1b8PsXb-6XNHafQvcZv934vLNQhBNPYXr9e2nU,172620
113
- langgraph_api-0.4.40.dist-info/METADATA,sha256=jkZKHqP86HksHgYEeZRc-IZwhLeB1AOvLRlSv13eNYI,4156
114
- langgraph_api-0.4.40.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
115
- langgraph_api-0.4.40.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
116
- langgraph_api-0.4.40.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
117
- langgraph_api-0.4.40.dist-info/RECORD,,
117
+ langgraph_api-0.5.6.dist-info/METADATA,sha256=nfAnUIWvt7D3GyDrgbUdxIexl7LuvrjbpaXFLmBZOEg,4190
118
+ langgraph_api-0.5.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
119
+ langgraph_api-0.5.6.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
120
+ langgraph_api-0.5.6.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
121
+ langgraph_api-0.5.6.dist-info/RECORD,,
@@ -1,80 +0,0 @@
1
- """gRPC client wrapper for LangGraph persistence services."""
2
-
3
- import os
4
-
5
- import structlog
6
- from grpc import aio # type: ignore[import]
7
-
8
- from .generated.core_api_pb2_grpc import AdminStub, AssistantsStub
9
-
10
- logger = structlog.stdlib.get_logger(__name__)
11
-
12
-
13
- class GrpcClient:
14
- """gRPC client for LangGraph persistence services."""
15
-
16
- def __init__(
17
- self,
18
- server_address: str | None = None,
19
- ):
20
- """Initialize the gRPC client.
21
-
22
- Args:
23
- server_address: The gRPC server address (default: localhost:50051)
24
- """
25
- self.server_address = server_address or os.getenv(
26
- "GRPC_SERVER_ADDRESS", "localhost:50051"
27
- )
28
- self._channel: aio.Channel | None = None
29
- self._assistants_stub: AssistantsStub | None = None
30
- self._admin_stub: AdminStub | None = None
31
-
32
- async def __aenter__(self):
33
- """Async context manager entry."""
34
- await self.connect()
35
- return self
36
-
37
- async def __aexit__(self, exc_type, exc_val, exc_tb):
38
- """Async context manager exit."""
39
- await self.close()
40
-
41
- async def connect(self):
42
- """Connect to the gRPC server."""
43
- if self._channel is not None:
44
- return
45
-
46
- self._channel = aio.insecure_channel(self.server_address)
47
-
48
- self._assistants_stub = AssistantsStub(self._channel)
49
- self._admin_stub = AdminStub(self._channel)
50
-
51
- await logger.adebug(
52
- "Connected to gRPC server", server_address=self.server_address
53
- )
54
-
55
- async def close(self):
56
- """Close the gRPC connection."""
57
- if self._channel is not None:
58
- await self._channel.close()
59
- self._channel = None
60
- self._assistants_stub = None
61
- self._admin_stub = None
62
- await logger.adebug("Closed gRPC connection")
63
-
64
- @property
65
- def assistants(self) -> AssistantsStub:
66
- """Get the assistants service stub."""
67
- if self._assistants_stub is None:
68
- raise RuntimeError(
69
- "Client not connected. Use async context manager or call connect() first."
70
- )
71
- return self._assistants_stub
72
-
73
- @property
74
- def admin(self) -> AdminStub:
75
- """Get the admin service stub."""
76
- if self._admin_stub is None:
77
- raise RuntimeError(
78
- "Client not connected. Use async context manager or call connect() first."
79
- )
80
- return self._admin_stub