langgraph-api 0.2.135__py3-none-any.whl → 0.2.137__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.135"
1
+ __version__ = "0.2.137"
@@ -82,9 +82,12 @@ def _get_configurable_jsonschema(graph: Pregel) -> dict:
82
82
  json_schema["properties"].pop(key, None)
83
83
  # The type name of the configurable type is not preserved.
84
84
  # We'll add it back to the schema if we can.
85
- if hasattr(graph, "config_type") and graph.config_type is not None:
86
- if hasattr(graph.config_type, "__name__"):
87
- json_schema["title"] = graph.config_type.__name__
85
+ if (
86
+ hasattr(graph, "config_type")
87
+ and graph.config_type is not None
88
+ and hasattr(graph.config_type, "__name__")
89
+ ):
90
+ json_schema["title"] = graph.config_type.__name__
88
91
  return json_schema
89
92
  # If the schema does not have a configurable field, return an empty schema.
90
93
  return {}
@@ -263,18 +266,16 @@ async def get_assistant_graph(
263
266
  drawable_graph = await graph.fetch_graph(xray=xray)
264
267
  json_graph = drawable_graph.to_json()
265
268
  for node in json_graph.get("nodes", []):
266
- if data := node.get("data"):
267
- if isinstance(data, dict):
268
- data.pop("id", None)
269
+ if (data := node.get("data")) and isinstance(data, dict):
270
+ data.pop("id", None)
269
271
  return ApiResponse(json_graph)
270
272
 
271
273
  try:
272
274
  drawable_graph = await graph.aget_graph(xray=xray)
273
275
  json_graph = drawable_graph.to_json()
274
276
  for node in json_graph.get("nodes", []):
275
- if data := node.get("data"):
276
- if isinstance(data, dict):
277
- data.pop("id", None)
277
+ if (data := node.get("data")) and isinstance(data, dict):
278
+ data.pop("id", None)
278
279
  return ApiResponse(json_graph)
279
280
  except NotImplementedError:
280
281
  raise HTTPException(
langgraph_api/api/runs.py CHANGED
@@ -212,9 +212,11 @@ async def wait_run(request: ApiRequest):
212
212
  )
213
213
  ) as stream:
214
214
  async for mode, chunk, _ in stream:
215
- if mode == b"values":
216
- vchunk = chunk
217
- elif mode == b"updates" and b"__interrupt__" in chunk:
215
+ if (
216
+ mode == b"values"
217
+ or mode == b"updates"
218
+ and b"__interrupt__" in chunk
219
+ ):
218
220
  vchunk = chunk
219
221
  elif mode == b"error":
220
222
  vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
@@ -295,9 +297,11 @@ async def wait_run_stateless(request: ApiRequest):
295
297
  )
296
298
  ) as stream:
297
299
  async for mode, chunk, _ in stream:
298
- if mode == b"values":
299
- vchunk = chunk
300
- elif mode == b"updates" and b"__interrupt__" in chunk:
300
+ if (
301
+ mode == b"values"
302
+ or mode == b"updates"
303
+ and b"__interrupt__" in chunk
304
+ ):
301
305
  vchunk = chunk
302
306
  elif mode == b"error":
303
307
  vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
@@ -134,6 +134,7 @@ async def list_namespaces(request: ApiRequest):
134
134
  payload = await request.json(StoreListNamespacesRequest)
135
135
  prefix = tuple(payload["prefix"]) if payload.get("prefix") else None
136
136
  suffix = tuple(payload["suffix"]) if payload.get("suffix") else None
137
+ err = None
137
138
  if prefix and (err := _validate_namespace(prefix)):
138
139
  return err
139
140
  if suffix and (err := _validate_namespace(suffix)):
langgraph_api/asyncio.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import asyncio
2
2
  import concurrent.futures
3
3
  from collections.abc import AsyncIterator, Coroutine
4
- from contextlib import AbstractAsyncContextManager
4
+ from contextlib import AbstractAsyncContextManager, suppress
5
5
  from functools import partial
6
6
  from typing import Any, Generic, TypeVar
7
7
 
@@ -26,10 +26,8 @@ def get_event_loop() -> asyncio.AbstractEventLoop:
26
26
 
27
27
 
28
28
  async def sleep_if_not_done(delay: float, done: asyncio.Event) -> None:
29
- try:
29
+ with suppress(TimeoutError):
30
30
  await asyncio.wait_for(done.wait(), delay)
31
- except TimeoutError:
32
- pass
33
31
 
34
32
 
35
33
  class ValueEvent(asyncio.Event):
@@ -95,14 +93,13 @@ PENDING_TASKS = set()
95
93
 
96
94
 
97
95
  def _create_task_done_callback(
98
- ignore_exceptions: tuple[Exception, ...],
96
+ ignore_exceptions: tuple[type[Exception], ...],
99
97
  task: asyncio.Task | asyncio.Future,
100
98
  ) -> None:
101
99
  PENDING_TASKS.discard(task)
102
100
  try:
103
- if exc := task.exception():
104
- if not isinstance(exc, ignore_exceptions):
105
- logger.exception("asyncio.task failed", exc_info=exc)
101
+ if (exc := task.exception()) and not isinstance(exc, ignore_exceptions):
102
+ logger.exception("asyncio.task failed", exc_info=exc)
106
103
  except asyncio.CancelledError:
107
104
  pass
108
105
 
@@ -176,16 +173,13 @@ class SimpleTaskGroup(AbstractAsyncContextManager["SimpleTaskGroup"]):
176
173
  self.taskgroup_name = f" {taskgroup_name} " if taskgroup_name else ""
177
174
 
178
175
  def _create_task_done_callback(
179
- self, ignore_exceptions: tuple[Exception, ...], task: asyncio.Task
176
+ self, ignore_exceptions: tuple[type[Exception], ...], task: asyncio.Task
180
177
  ) -> None:
181
- try:
178
+ with suppress(AttributeError):
182
179
  self.tasks.remove(task)
183
- except AttributeError:
184
- pass
185
180
  try:
186
- if exc := task.exception():
187
- if not isinstance(exc, ignore_exceptions):
188
- logger.exception("asyncio.task failed in task group", exc_info=exc)
181
+ if (exc := task.exception()) and not isinstance(exc, ignore_exceptions):
182
+ logger.exception("asyncio.task failed in task group", exc_info=exc)
189
183
  except asyncio.CancelledError:
190
184
  pass
191
185
 
@@ -286,13 +280,8 @@ class AsyncQueue(Generic[T], asyncio.Queue[T]):
286
280
  await getter
287
281
  except:
288
282
  getter.cancel() # Just in case getter is not done yet.
289
- try:
290
- # Clean self._getters from canceled getters.
283
+ with suppress(ValueError):
291
284
  self._getters.remove(getter)
292
- except ValueError:
293
- # The getter could be removed from self._getters by a
294
- # previous put_nowait call.
295
- pass
296
285
  if not self.empty() and not getter.cancelled():
297
286
  # We were woken up by put_nowait(), but can't take
298
287
  # the call. Wake up the next in line.
@@ -239,14 +239,11 @@ def _get_custom_auth_middleware(
239
239
 
240
240
  @functools.lru_cache(maxsize=1)
241
241
  def _get_auth_instance(path: str | None = None) -> Auth | Literal["js"] | None:
242
- if path is not None:
243
- auth_instance = _load_auth_obj(path)
244
- else:
245
- auth_instance = None
242
+ auth_instance = _load_auth_obj(path) if path is not None else None
246
243
 
247
244
  if auth_instance == "js":
248
245
  return auth_instance
249
-
246
+ deps = None
250
247
  if auth_instance is not None and (
251
248
  deps := _get_dependencies(auth_instance._authenticate_handler)
252
249
  ):
@@ -525,6 +522,12 @@ class ProxyUser(BaseUser):
525
522
  """Proxy any other attributes to the underlying user object."""
526
523
  return getattr(self._user, name)
527
524
 
525
+ def __iter__(self):
526
+ return iter(self._user)
527
+
528
+ def __len__(self):
529
+ return len(self._user)
530
+
528
531
  def __str__(self) -> str:
529
532
  return f"{self._user}"
530
533
 
@@ -86,9 +86,10 @@ class LangsmithAuthBackend(AuthenticationBackend):
86
86
  # If tenant id verification is disabled, the bearer token requests
87
87
  # are not required to match the tenant id. Api key requests are
88
88
  # always required to match the tenant id.
89
- if LANGSMITH_AUTH_VERIFY_TENANT_ID or conn.headers.get("x-api-key"):
90
- if auth_dict["tenant_id"] != LANGSMITH_TENANT_ID:
91
- raise AuthenticationError("Invalid tenant ID")
89
+ if (
90
+ LANGSMITH_AUTH_VERIFY_TENANT_ID or conn.headers.get("x-api-key")
91
+ ) and auth_dict["tenant_id"] != LANGSMITH_TENANT_ID:
92
+ raise AuthenticationError("Invalid tenant ID")
92
93
 
93
94
  credentials = AuthCredentials(["authenticated"])
94
95
  user = StudioUser(auth_dict.get("user_id"), is_authenticated=True)
@@ -1,5 +1,5 @@
1
1
  from collections.abc import AsyncGenerator
2
- from contextlib import asynccontextmanager
2
+ from contextlib import asynccontextmanager, suppress
3
3
  from typing import Any
4
4
 
5
5
  import httpx
@@ -17,11 +17,10 @@ _client: "JsonHttpClient"
17
17
  def is_retriable_error(exception: BaseException) -> bool:
18
18
  if isinstance(exception, httpx.TransportError):
19
19
  return True
20
- if isinstance(exception, httpx.HTTPStatusError):
21
- if exception.response.status_code > 499:
22
- return True
23
-
24
- return False
20
+ return (
21
+ isinstance(exception, httpx.HTTPStatusError)
22
+ and exception.response.status_code > 499
23
+ )
25
24
 
26
25
 
27
26
  retry_httpx = retry(
@@ -106,10 +105,8 @@ def create_client() -> JsonHttpClient:
106
105
  async def close_auth_client() -> None:
107
106
  """Close the auth http client."""
108
107
  global _client
109
- try:
108
+ with suppress(NameError):
110
109
  await _client.client.aclose()
111
- except NameError:
112
- pass
113
110
 
114
111
 
115
112
  async def initialize_auth_client() -> None:
@@ -132,10 +129,7 @@ async def auth_client() -> AsyncGenerator[JsonHttpClient, None]:
132
129
  await client.client.aclose()
133
130
  else:
134
131
  try:
135
- if not _client.client.is_closed:
136
- found = True
137
- else:
138
- found = False
132
+ found = bool(not _client.client.is_closed)
139
133
  except NameError:
140
134
  found = False
141
135
  if found:
langgraph_api/config.py CHANGED
@@ -242,9 +242,12 @@ CORS_CONFIG: CorsConfig | None = env("CORS_CONFIG", cast=_parse_json, default=No
242
242
  }
243
243
  }
244
244
  """
245
- if CORS_CONFIG is not None and CORS_ALLOW_ORIGINS != "*":
246
- if CORS_CONFIG.get("allow_origins") is None:
247
- CORS_CONFIG["allow_origins"] = CORS_ALLOW_ORIGINS
245
+ if (
246
+ CORS_CONFIG is not None
247
+ and CORS_ALLOW_ORIGINS != "*"
248
+ and CORS_CONFIG.get("allow_origins") is None
249
+ ):
250
+ CORS_CONFIG["allow_origins"] = CORS_ALLOW_ORIGINS
248
251
 
249
252
  # queue
250
253
 
@@ -371,7 +374,7 @@ API_VARIANT = env("LANGSMITH_LANGGRAPH_API_VARIANT", cast=str, default="")
371
374
  # UI
372
375
  UI_USE_BUNDLER = env("LANGGRAPH_UI_BUNDLER", cast=bool, default=False)
373
376
  IS_QUEUE_ENTRYPOINT = False
374
-
377
+ ref_sha = None
375
378
  if not os.getenv("LANGCHAIN_REVISION_ID") and (
376
379
  ref_sha := os.getenv("LANGSMITH_LANGGRAPH_GIT_REF_SHA")
377
380
  ):
@@ -46,12 +46,11 @@ async def cron_scheduler():
46
46
  cron["cron_id"],
47
47
  )
48
48
  )
49
- except Exception as e:
50
- logger.error(
49
+ except Exception:
50
+ logger.exception(
51
51
  "Error scheduling cron run cron_id={}".format(
52
52
  cron["cron_id"]
53
- ),
54
- exc_info=e,
53
+ )
55
54
  )
56
55
  next_run_date = await run_in_executor(
57
56
  None, next_cron_date, cron["schedule"], cron["now"]
@@ -63,6 +62,6 @@ async def cron_scheduler():
63
62
  await asyncio.sleep(SLEEP_TIME)
64
63
  except asyncio.CancelledError:
65
64
  raise
66
- except Exception as e:
67
- logger.error("Error in cron_scheduler", exc_info=e)
65
+ except Exception:
66
+ logger.exception("Error in cron_scheduler")
68
67
  await asyncio.sleep(SLEEP_TIME + random())
langgraph_api/http.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import contextlib
2
3
 
3
4
  import httpx
4
5
  from tenacity import (
@@ -65,10 +66,8 @@ class JsonHttpClient:
65
66
  res.raise_for_status()
66
67
  finally:
67
68
  # We don't need the response body, so we close the response
68
- try:
69
+ with contextlib.suppress(UnboundLocalError):
69
70
  await res.aclose()
70
- except UnboundLocalError:
71
- pass
72
71
 
73
72
 
74
73
  _http_client: JsonHttpClient
@@ -173,10 +172,7 @@ async def http_request(
173
172
 
174
173
  content = None
175
174
  if body is not None:
176
- if isinstance(body, str):
177
- content = body.encode("utf-8")
178
- else:
179
- content = body
175
+ content = body.encode("utf-8") if isinstance(body, str) else body
180
176
  elif json is not None:
181
177
  content = json_dumpb(json)
182
178
 
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import contextlib
2
3
  import json
3
4
  import logging
4
5
  import os
@@ -7,7 +8,6 @@ import shutil
7
8
  import ssl
8
9
  from collections import deque
9
10
  from collections.abc import AsyncIterator, Callable
10
- from contextlib import AbstractContextManager
11
11
  from typing import Any, Literal, Self, cast
12
12
 
13
13
  import certifi
@@ -57,11 +57,10 @@ GRAPH_PORT = 5556
57
57
  GRAPH_HTTP_PORT = 5557
58
58
  SSL = ssl.create_default_context(cafile=certifi.where())
59
59
 
60
- if port := int(os.getenv("PORT", "8080")):
61
- if port in (GRAPH_PORT, REMOTE_PORT):
62
- raise ValueError(
63
- f"PORT={port} is a reserved port for the JS worker. Please choose a different port."
64
- )
60
+ if (port := int(os.getenv("PORT", "8080"))) and port in (GRAPH_PORT, REMOTE_PORT):
61
+ raise ValueError(
62
+ f"PORT={port} is a reserved port for the JS worker. Please choose a different port."
63
+ )
65
64
 
66
65
  _client = httpx.AsyncClient(
67
66
  base_url=f"http://localhost:{GRAPH_PORT}",
@@ -379,6 +378,7 @@ async def run_js_process(paths_str: str | None, watch: bool = False):
379
378
  if False
380
379
  else ("tsx", "--import", client_preload_file, client_file)
381
380
  )
381
+ process = None
382
382
  try:
383
383
  process = await asyncio.create_subprocess_exec(
384
384
  *args,
@@ -395,11 +395,10 @@ async def run_js_process(paths_str: str | None, watch: bool = False):
395
395
  raise Exception(f"JS process exited with code {code}")
396
396
  except asyncio.CancelledError:
397
397
  logger.info("Terminating JS graphs process")
398
- try:
399
- process.terminate()
400
- await process.wait()
401
- except (UnboundLocalError, ProcessLookupError):
402
- pass
398
+ if process is not None:
399
+ with contextlib.suppress(ProcessLookupError):
400
+ process.terminate()
401
+ await process.wait()
403
402
  raise
404
403
  except Exception:
405
404
  if attempt >= 3:
@@ -424,6 +423,7 @@ async def run_js_http_process(
424
423
  client_file = os.path.join(os.path.dirname(__file__), "client.http.mts")
425
424
  args = ("tsx", "watch", client_file) if watch else ("tsx", client_file)
426
425
  pid = None
426
+ process = None
427
427
  try:
428
428
  process = await asyncio.create_subprocess_exec(
429
429
  *args,
@@ -445,11 +445,10 @@ async def run_js_http_process(
445
445
 
446
446
  except asyncio.CancelledError:
447
447
  logger.info("Shutting down JS HTTP process [%d]", pid or -1)
448
- try:
449
- process.terminate()
450
- await process.wait()
451
- except (UnboundLocalError, ProcessLookupError):
452
- pass
448
+ if process is not None:
449
+ with contextlib.suppress(ProcessLookupError):
450
+ process.terminate()
451
+ await process.wait()
453
452
  raise
454
453
  except Exception:
455
454
  if attempt >= 3:
@@ -766,7 +765,7 @@ async def run_remote_checkpointer():
766
765
  await server.serve()
767
766
 
768
767
 
769
- class DisableHttpxLoggingContextManager(AbstractContextManager):
768
+ class DisableHttpxLoggingContextManager(contextlib.AbstractContextManager):
770
769
  """
771
770
  Disable HTTP/1.1 200 OK logs spamming stdout.
772
771
  """
@@ -891,11 +890,10 @@ class CustomJsAuthBackend(AuthenticationBackend):
891
890
  cache_key = None
892
891
  if self.cache_keys:
893
892
  cache_key = tuple((k, headers[k]) for k in self.cache_keys if k in headers)
894
- if cache_key:
895
- if self.ttl_cache is not None:
896
- cached = self.ttl_cache.get(cache_key)
897
- if cached:
898
- return cached
893
+ if cache_key and self.ttl_cache is not None:
894
+ cached = self.ttl_cache.get(cache_key)
895
+ if cached:
896
+ return cached
899
897
 
900
898
  res = await _client.post("/auth/authenticate", headers=headers)
901
899
  data = res.json()
@@ -961,11 +959,14 @@ async def handle_js_auth_event(
961
959
  # mutate metadata in value if applicable
962
960
  # we need to preserve the identity of the object, so cannot create a new
963
961
  # dictionary, otherwise the changes will not persist
964
- if isinstance(value, dict) and (updated_value := response.get("value")):
965
- if isinstance(value.get("metadata"), dict) and (
966
- metadata := updated_value.get("metadata")
967
- ):
968
- value["metadata"].update(metadata)
962
+ metadata = None
963
+ if (
964
+ isinstance(value, dict)
965
+ and (updated_value := response.get("value"))
966
+ and isinstance(value.get("metadata"), dict)
967
+ and (metadata := updated_value.get("metadata"))
968
+ ):
969
+ value["metadata"].update(metadata)
969
970
 
970
971
  return filters
971
972
 
langgraph_api/js/sse.py CHANGED
@@ -1,5 +1,6 @@
1
1
  """Adapted from httpx_sse to split lines on \n, \r, \r\n per the SSE spec."""
2
2
 
3
+ import contextlib
3
4
  from collections.abc import AsyncIterator
4
5
 
5
6
  import httpx
@@ -119,10 +120,8 @@ class SSEDecoder:
119
120
  else:
120
121
  self._last_event_id = value.decode()
121
122
  elif fieldname == b"retry":
122
- try:
123
+ with contextlib.suppress(TypeError, ValueError):
123
124
  self._retry = int(value)
124
- except (TypeError, ValueError):
125
- pass
126
125
  else:
127
126
  pass # Field is ignored.
128
127
 
langgraph_api/js/ui.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import contextlib
2
3
  import os
3
4
  import shutil
4
5
  import sys
@@ -56,6 +57,7 @@ async def _start_ui_bundler_process():
56
57
  os.mkdir(UI_ROOT_DIR)
57
58
 
58
59
  pid = None
60
+ process = None
59
61
  try:
60
62
  process = await asyncio.create_subprocess_exec(
61
63
  npx_path,
@@ -74,11 +76,10 @@ async def _start_ui_bundler_process():
74
76
 
75
77
  except asyncio.CancelledError:
76
78
  logger.info("Shutting down UI bundler process [%d]", pid or -1)
77
- try:
78
- process.terminate()
79
- await process.wait()
80
- except (UnboundLocalError, ProcessLookupError):
81
- pass
79
+ if process is not None:
80
+ with contextlib.suppress(ProcessLookupError):
81
+ process.terminate()
82
+ await process.wait()
82
83
  raise
83
84
 
84
85
 
langgraph_api/logging.py CHANGED
@@ -16,8 +16,16 @@ LOG_JSON = log_env("LOG_JSON", cast=bool, default=False)
16
16
  LOG_COLOR = log_env("LOG_COLOR", cast=bool, default=True)
17
17
  LOG_LEVEL = log_env("LOG_LEVEL", cast=str, default="INFO")
18
18
 
19
+ logger = logging.getLogger()
19
20
  logging.getLogger().setLevel(LOG_LEVEL.upper())
20
21
  logging.getLogger("psycopg").setLevel(logging.WARNING)
22
+ if hasattr(logger, "isEnabledFor"):
23
+ LOG_LEVEL_DEBUG = logger.isEnabledFor(logging.DEBUG)
24
+ elif hasattr(logger, "is_enabled_for"):
25
+ LOG_LEVEL_DEBUG = logger.is_enabled_for(logging.DEBUG)
26
+ else:
27
+ LOG_LEVEL_DEBUG = False
28
+ del logger
21
29
 
22
30
  worker_config = contextvars.ContextVar[dict[str, typing.Any] | None](
23
31
  "worker_config", default=None
@@ -86,6 +94,7 @@ class AddLoggingContext:
86
94
  ) -> EventDict:
87
95
  if (ctx := worker_config.get()) is not None:
88
96
  event_dict.update(ctx)
97
+ lgnode = None
89
98
  if (
90
99
  self.cvar is not None
91
100
  and (conf := self.cvar.get())
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import contextlib
2
3
  import time
3
4
  import urllib.parse
4
5
  import uuid
@@ -381,19 +382,18 @@ async def create_valid_run(
381
382
  else None
382
383
  ),
383
384
  run_put_ms=int((time.time() - put_time_start) * 1_000),
385
+ checkpoint_id=str(checkpoint_id),
384
386
  )
385
387
  # inserted, proceed
386
388
  if multitask_strategy in ("interrupt", "rollback") and inflight_runs:
387
- try:
389
+ with contextlib.suppress(HTTPException):
390
+ # if we can't find the inflight runs again, we can proceeed
388
391
  await Runs.cancel(
389
392
  conn,
390
393
  [run["run_id"] for run in inflight_runs],
391
394
  thread_id=thread_id_,
392
395
  action=multitask_strategy,
393
396
  )
394
- except HTTPException:
395
- # if we can't find the inflight runs again, we can proceeed
396
- pass
397
397
  return first
398
398
  elif multitask_strategy == "reject":
399
399
  raise HTTPException(
langgraph_api/serde.py CHANGED
@@ -84,11 +84,10 @@ def default(obj):
84
84
  return obj.total_seconds()
85
85
  elif isinstance(obj, Decimal):
86
86
  return decimal_encoder(obj)
87
- elif isinstance(obj, uuid.UUID):
88
- return str(obj)
89
87
  elif isinstance( # noqa: UP038
90
88
  obj,
91
89
  (
90
+ uuid.UUID,
92
91
  IPv4Address,
93
92
  IPv4Interface,
94
93
  IPv4Network,
langgraph_api/store.py CHANGED
@@ -69,6 +69,7 @@ async def _yield_store(value: Any):
69
69
 
70
70
  async def collect_store_from_env() -> None:
71
71
  global CUSTOM_STORE
72
+ store_path = None
72
73
  if not config.STORE_CONFIG or not (store_path := config.STORE_CONFIG.get("path")):
73
74
  return
74
75
  await logger.ainfo(
langgraph_api/stream.py CHANGED
@@ -14,6 +14,7 @@ from langchain_core.messages import (
14
14
  convert_to_messages,
15
15
  message_chunk_to_message,
16
16
  )
17
+ from langchain_core.runnables import RunnableConfig
17
18
  from langgraph.errors import (
18
19
  EmptyChannelError,
19
20
  EmptyInputError,
@@ -107,7 +108,7 @@ def _preprocess_debug_checkpoint(
107
108
  **payload,
108
109
  "checkpoint": runnable_config_to_checkpoint(payload["config"]),
109
110
  "parent_checkpoint": runnable_config_to_checkpoint(
110
- payload["parent_config"] if "parent_config" in payload else None
111
+ payload.get("parent_config", None)
111
112
  ),
112
113
  "tasks": [_preproces_debug_checkpoint_task(t) for t in payload["tasks"]],
113
114
  }
@@ -124,7 +125,7 @@ async def astream_state(
124
125
  attempt: int,
125
126
  done: ValueEvent,
126
127
  *,
127
- on_checkpoint: Callable[[CheckpointPayload], None] = lambda _: None,
128
+ on_checkpoint: Callable[[CheckpointPayload | None], None] = lambda _: None,
128
129
  on_task_result: Callable[[TaskResultPayload], None] = lambda _: None,
129
130
  ) -> AnyStream:
130
131
  """Stream messages from the runnable."""
@@ -136,11 +137,12 @@ async def astream_state(
136
137
  subgraphs = kwargs.get("subgraphs", False)
137
138
  temporary = kwargs.pop("temporary", False)
138
139
  context = kwargs.pop("context", None)
139
- config = kwargs.pop("config")
140
+ config = cast(RunnableConfig, kwargs.pop("config"))
141
+ configurable = config["configurable"]
140
142
  stack = AsyncExitStack()
141
143
  graph = await stack.enter_async_context(
142
144
  get_graph(
143
- config["configurable"]["graph_id"],
145
+ configurable["graph_id"],
144
146
  config,
145
147
  store=(await api_store.get_store()),
146
148
  checkpointer=None if temporary else Checkpointer(),
@@ -171,6 +173,8 @@ async def astream_state(
171
173
  if "updates" not in stream_modes_set:
172
174
  stream_modes_set.add("updates")
173
175
  only_interrupt_updates = True
176
+ else:
177
+ only_interrupt_updates = False
174
178
  # attach attempt metadata
175
179
  config["metadata"]["run_attempt"] = attempt
176
180
  # attach langgraph metadata
@@ -182,7 +186,7 @@ async def astream_state(
182
186
  # attach node counter
183
187
  is_remote_pregel = isinstance(graph, BaseRemotePregel)
184
188
  if not is_remote_pregel:
185
- config["configurable"]["__pregel_node_finished"] = incr_nodes
189
+ configurable["__pregel_node_finished"] = incr_nodes
186
190
 
187
191
  # attach run_id to config
188
192
  # for attempts beyond the first, use a fresh, unique run_id
@@ -195,9 +199,9 @@ async def astream_state(
195
199
  yield "metadata", {"run_id": run_id, "attempt": attempt}
196
200
 
197
201
  # is a langsmith tracing project is specified, additionally pass that in to tracing context
198
- if ls_project := config["configurable"].get("__langsmith_project__"):
202
+ if ls_project := configurable.get("__langsmith_project__"):
199
203
  updates = None
200
- if example_id := config["configurable"].get("__langsmith_example_id__"):
204
+ if example_id := configurable.get("__langsmith_example_id__"):
201
205
  updates = {"reference_example_id": example_id}
202
206
 
203
207
  await stack.enter_async_context(
@@ -237,13 +241,18 @@ async def astream_state(
237
241
  event = cast(dict, event)
238
242
  if event.get("tags") and "langsmith:hidden" in event["tags"]:
239
243
  continue
240
- if "messages" in stream_mode and isinstance(graph, BaseRemotePregel):
241
- if event["event"] == "on_custom_event" and event["name"] in (
244
+ if (
245
+ "messages" in stream_mode
246
+ and isinstance(graph, BaseRemotePregel)
247
+ and event["event"] == "on_custom_event"
248
+ and event["name"]
249
+ in (
242
250
  "messages/complete",
243
251
  "messages/partial",
244
252
  "messages/metadata",
245
- ):
246
- yield event["name"], event["data"]
253
+ )
254
+ ):
255
+ yield event["name"], event["data"]
247
256
  # TODO support messages-tuple for js graphs
248
257
  if event["event"] == "on_chain_stream" and event["run_id"] == run_id:
249
258
  if subgraphs:
langgraph_api/worker.py CHANGED
@@ -118,8 +118,10 @@ async def worker(
118
118
  resumable=resumable,
119
119
  )
120
120
 
121
- def on_checkpoint(checkpoint_arg: CheckpointPayload):
121
+ def on_checkpoint(checkpoint_arg: CheckpointPayload | None):
122
122
  nonlocal checkpoint
123
+ if checkpoint_arg is None:
124
+ logger.warning("Null checkpoint received")
123
125
  checkpoint = checkpoint_arg
124
126
 
125
127
  def on_task_result(task_result: TaskResultPayload):
@@ -140,9 +142,8 @@ async def worker(
140
142
  await consume(stream, run_id, resumable, stream_modes)
141
143
  except Exception as e:
142
144
  if not isinstance(e, UserRollback | UserInterrupt):
143
- logger.error(
145
+ logger.exception(
144
146
  f"Run encountered an error in graph: {type(e)}({e})",
145
- exc_info=e,
146
147
  )
147
148
  # TimeoutError is a special case where we rely on asyncio.wait_for to timeout runs
148
149
  # Convert user TimeoutErrors to a custom class so we can distinguish and later convert back
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-api
3
- Version: 0.2.135
3
+ Version: 0.2.137
4
4
  Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
5
5
  License: Elastic-2.0
6
6
  License-File: LICENSE
@@ -1,51 +1,51 @@
1
- langgraph_api/__init__.py,sha256=EI8dPBQX_KEje0C_yiwGFf1oJ65gVj2GXRmDyafg6MM,24
1
+ langgraph_api/__init__.py,sha256=Ma7kyjCpQ7p4EHYWYV7s3pmyAmbmTKBpxJUymCIyQNo,24
2
2
  langgraph_api/asgi_transport.py,sha256=XtiLOu4WWsd-xizagBLzT5xUkxc9ZG9YqwvETBPjBFE,5161
3
- langgraph_api/asyncio.py,sha256=l4fVoYIcczMqC2Wrj4LTk50nKV29AXwweiehOwaeC4Y,9754
3
+ langgraph_api/asyncio.py,sha256=mZ7G32JjrGxrlH4OMy7AKlBQo5bZt4Sm2rlrBcU-Vj8,9483
4
4
  langgraph_api/cli.py,sha256=-ruIeKi1imvS6GriOfRDZY-waV4SbWiJ0BEFAciPVYI,16330
5
5
  langgraph_api/command.py,sha256=3O9v3i0OPa96ARyJ_oJbLXkfO8rPgDhLCswgO9koTFA,768
6
- langgraph_api/config.py,sha256=LxWOteBq-RNyr0Dc3Tm7HwoXrn1aDdvgnKlDsXCDFMg,12111
7
- langgraph_api/cron_scheduler.py,sha256=CiwZ-U4gDOdG9zl9dlr7mH50USUgNB2Fvb8YTKVRBN4,2625
6
+ langgraph_api/config.py,sha256=9wXEcxvVfPQjiR7jugomwTJ3vs5d-YoYYfgLDjRc3EQ,12133
7
+ langgraph_api/cron_scheduler.py,sha256=25wYzEQrhPEivZrAPYOmzLPDOQa-aFogU37mTXc9TJk,2566
8
8
  langgraph_api/errors.py,sha256=zlnl3xXIwVG0oGNKKpXf1an9Rn_SBDHSyhe53hU6aLw,1858
9
9
  langgraph_api/executor_entrypoint.py,sha256=ClMyM9TB9oPisQzHqixA77Lnj_QGUg55MtQx-xku4o8,671
10
10
  langgraph_api/feature_flags.py,sha256=GjwmNjfg0Jhs3OzR2VbK2WgrRy3o5l8ibIYiUtQkDPA,363
11
11
  langgraph_api/graph.py,sha256=HTjJNQadrdi1tzJYNJ_iPIR6-zqC4-hj6YTD6zGQHYA,25072
12
- langgraph_api/http.py,sha256=xCeyqm9Vafx_8OaUfwlIMPZTspJQzivgcJqTc4wweaE,5704
12
+ langgraph_api/http.py,sha256=fyK-H-0UfNy_BzuVW3aWWGvhRavmGAVMkDwDArryJ_4,5659
13
13
  langgraph_api/http_metrics.py,sha256=MU9ccXt7aBb0AJ2SWEjwtbtbJEWmeqSdx7-CI51e32o,5594
14
- langgraph_api/logging.py,sha256=v7TOQt_YuZ_lTQ4rp_9hE6pLtSKxObkuFxyAdHW0y5c,4862
14
+ langgraph_api/logging.py,sha256=ZZ95dDdWDayIbH1bgwNfn0U3CQ8kDoAvDFBDACna4-A,5150
15
15
  langgraph_api/metadata.py,sha256=fVsbwxVitAj4LGVYpCcadYeIFANEaNtcx6LBxQLcTqg,6949
16
16
  langgraph_api/patch.py,sha256=iLwSd9ZWoVj6MxozMyGyMvWWbE9RIP5eZX1dpCBSlSU,1480
17
17
  langgraph_api/queue_entrypoint.py,sha256=KDLpQtBu3amZTbNHS-RGFLR0DphuVQN6kUZm3ZGLe9g,5991
18
18
  langgraph_api/route.py,sha256=EBhELuJ1He-ZYcAnR5YTImcIeDtWthDae5CHELBxPkM,5056
19
19
  langgraph_api/schema.py,sha256=6gabS4_1IeRWV5lyuDV-2i__8brXl89elAlmD5BmEII,8370
20
- langgraph_api/serde.py,sha256=5F4xMTRY3kNwpdkAzM48KzxFEUmVD1I3CaVWzp_syT8,6067
20
+ langgraph_api/serde.py,sha256=3GvelKhySjlXaNqpg2GyUxU6-NEkvif7WlMF9if_EgU,6029
21
21
  langgraph_api/server.py,sha256=uCAqPgCLJ6ckslLs0i_dacSR8mzuR0Y6PkkJYk0O3bE,7196
22
22
  langgraph_api/sse.py,sha256=SLdtZmTdh5D8fbWrQjuY9HYLd2dg8Rmi6ZMmFMVc2iE,4204
23
23
  langgraph_api/state.py,sha256=5RTOShiFVnkx-o6t99_x63CGwXw_8Eb-dSTpYirP8ro,4683
24
- langgraph_api/store.py,sha256=_UwEzGXKMFvpnyz1DUeOmfpy2w3WhPAtAJzIh7VTRBY,4679
25
- langgraph_api/stream.py,sha256=P82M1yVbn1N20ZRSLb6_F1wbkfQLVU1OGEHF2ES-Nvg,18199
24
+ langgraph_api/store.py,sha256=NIoNZojs6NbtG3VLBPQEFNttvp7XPkHAfjbQ3gY7aLY,4701
25
+ langgraph_api/stream.py,sha256=iEApgVxJU9v58J5oKSuNe_c7ThpWf0bprgIDHUs8IzA,18397
26
26
  langgraph_api/thread_ttl.py,sha256=7H3gFlWcUiODPoaEzcwB0LR61uvcuyjD0ew_4BztB7k,1902
27
27
  langgraph_api/traceblock.py,sha256=Qq5CUdefnMDaRDnyvBSWGBClEj-f3oO7NbH6fedxOSE,630
28
28
  langgraph_api/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  langgraph_api/validation.py,sha256=86jftgOsMa7tkeshBw6imYe7zyUXPoVuf5Voh6dFiR8,5285
30
30
  langgraph_api/webhook.py,sha256=SvSM1rdnNtiH4q3JQYmAqJUk2Sable5xAcwOLuRhtlo,1723
31
- langgraph_api/worker.py,sha256=0ztx8AbggDdEjnW40Fai85S2jVGtFcNLU1kGWdN_w24,15198
31
+ langgraph_api/worker.py,sha256=HVGyGVEYcXG-iKVgoBdFgANGxPjSs57JRl5OB4ra4nw,15267
32
32
  langgraph_api/api/__init__.py,sha256=WHy6oNLWtH1K7AxmmsU9RD-Vm6WP-Ov16xS8Ey9YCmQ,6090
33
- langgraph_api/api/assistants.py,sha256=ffXBUTTs6bBxDISuOs1KVfcjBJuaS8R_j5S6Omzp1i4,16848
33
+ langgraph_api/api/assistants.py,sha256=5gVvU58Y1-EftBhCHGbEaOi_7cqGMKWhOt_GVfBC0Gg,16836
34
34
  langgraph_api/api/mcp.py,sha256=qe10ZRMN3f-Hli-9TI8nbQyWvMeBb72YB1PZVbyqBQw,14418
35
35
  langgraph_api/api/meta.py,sha256=w88TK1Wu4xOhgCfs04LBfL4pZkWhUW6QRwwAWdFby5A,4245
36
36
  langgraph_api/api/openapi.py,sha256=If-z1ckXt-Yu5bwQytK1LWyX_T7G46UtLfixgEP8hwc,11959
37
- langgraph_api/api/runs.py,sha256=Y52LiXsEtMOF05WhgK00g0CsYrqUUcWxVaUVCsoujtM,21760
38
- langgraph_api/api/store.py,sha256=TSeMiuMfrifmEnEbL0aObC2DPeseLlmZvAMaMzPgG3Y,5535
37
+ langgraph_api/api/runs.py,sha256=AiohGTFLjWCb-oTXoNDvPMod4v6RS_ivlieoiqDmtQM,21812
38
+ langgraph_api/api/store.py,sha256=xGcPFx4v-VxlK6HRU9uCjzCQ0v66cvc3o_PB5_g7n0Q,5550
39
39
  langgraph_api/api/threads.py,sha256=Ap5zUcYqK5GJqwEc-q4QY6qCkmbLxfMmEvQZm0MCFxk,10427
40
40
  langgraph_api/api/ui.py,sha256=_genglTUy5BMHlL0lkQypX524yFv6Z5fraIvnrxp7yE,2639
41
41
  langgraph_api/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
- langgraph_api/auth/custom.py,sha256=b2NOPqBFUQiFkwlfFg4agZo3YfskTZMJyolv52suCeI,22433
42
+ langgraph_api/auth/custom.py,sha256=psETw_GpLWClBbd_ESVPRLUz9GLQ0_XNsuUDSVbtZy0,22522
43
43
  langgraph_api/auth/middleware.py,sha256=jDA4t41DUoAArEY_PNoXesIUBJ0nGhh85QzRdn5EPD0,1916
44
44
  langgraph_api/auth/noop.py,sha256=Bk6Nf3p8D_iMVy_OyfPlyiJp_aEwzL-sHrbxoXpCbac,586
45
45
  langgraph_api/auth/studio_user.py,sha256=fojJpexdIZYI1w3awiqOLSwMUiK_M_3p4mlfQI0o-BE,454
46
46
  langgraph_api/auth/langsmith/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- langgraph_api/auth/langsmith/backend.py,sha256=36nQnVb9VtNvSnLiNYWAI9o9H74I-mSN2X-FrWoj0QA,3646
48
- langgraph_api/auth/langsmith/client.py,sha256=-KyZSTyeiMhupkPfr--nlm_ELR1ZkjM-h61eGcMG5E0,4002
47
+ langgraph_api/auth/langsmith/backend.py,sha256=060NPoZ82J1Y23hr3XgZnxyzhQ5lZngBWPcUy0QntjY,3658
48
+ langgraph_api/auth/langsmith/client.py,sha256=Kn9503en1tmlNtkbvqRxYSRCOUrWaVpqvxyLLb1cgzY,3908
49
49
  langgraph_api/js/.gitignore,sha256=l5yI6G_V6F1600I1IjiUKn87f4uYIrBAYU1MOyBBhg4,59
50
50
  langgraph_api/js/.prettierrc,sha256=0es3ovvyNIqIw81rPQsdt1zCQcOdBqyR_DMbFE4Ifms,19
51
51
  langgraph_api/js/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -56,12 +56,12 @@ langgraph_api/js/client.mts,sha256=gDvYiW7Qfl4re2YhZ5oNqtuvffnW_Sf7DK5aUbKB3vw,3
56
56
  langgraph_api/js/errors.py,sha256=Cm1TKWlUCwZReDC5AQ6SgNIVGD27Qov2xcgHyf8-GXo,361
57
57
  langgraph_api/js/global.d.ts,sha256=j4GhgtQSZ5_cHzjSPcHgMJ8tfBThxrH-pUOrrJGteOU,196
58
58
  langgraph_api/js/package.json,sha256=syy2fEcmTxGQVfz4P9MUTgoTxHr1MUcA1rDXemAig2U,1335
59
- langgraph_api/js/remote.py,sha256=x2gO12HBriCb4bFXf4lt6uqDgoOKFihy95kHFSBz7bA,38374
59
+ langgraph_api/js/remote.py,sha256=aeszJ0HGbcL9oExyeWYHb0xLV75U3KgVSxjm3ZK_a48,38403
60
60
  langgraph_api/js/schema.py,sha256=M4fLtr50O1jck8H1hm_0W4cZOGYGdkrB7riLyCes4oY,438
61
- langgraph_api/js/sse.py,sha256=tVcAGVz5jOKWsESxoqm0Nk1B9yP2A7cRcVDNnR1bUv4,4144
61
+ langgraph_api/js/sse.py,sha256=hHkbncnYnXNIbHhAWneGWYkHp4UhhhGB7-MYtDrY264,4141
62
62
  langgraph_api/js/traceblock.mts,sha256=QtGSN5VpzmGqDfbArrGXkMiONY94pMQ5CgzetT_bKYg,761
63
63
  langgraph_api/js/tsconfig.json,sha256=imCYqVnqFpaBoZPx8k1nO4slHIWBFsSlmCYhO73cpBs,341
64
- langgraph_api/js/ui.py,sha256=XNT8iBcyT8XmbIqSQUWd-j_00HsaWB2vRTVabwFBkik,2439
64
+ langgraph_api/js/ui.py,sha256=l9regrvKIxLOjH5SIYE2nhr8QCKLK1Q_1pZgxdL71X4,2488
65
65
  langgraph_api/js/yarn.lock,sha256=M-XjLAvW6cz56lc-IwNPbjLw8KNIKVS_k-haRP4QmRE,84904
66
66
  langgraph_api/js/src/graph.mts,sha256=9zTQNdtanI_CFnOwNRoamoCVHHQHGbNlbm91aRxDeOc,2675
67
67
  langgraph_api/js/src/load.hooks.mjs,sha256=xNVHq75W0Lk6MUKl1pQYrx-wtQ8_neiUyI6SO-k0ecM,2235
@@ -75,7 +75,7 @@ langgraph_api/middleware/http_logger.py,sha256=2LABfhzTAUtqT8nf1ACy8cYXteatkwraB
75
75
  langgraph_api/middleware/private_network.py,sha256=eYgdyU8AzU2XJu362i1L8aSFoQRiV7_aLBPw7_EgeqI,2111
76
76
  langgraph_api/middleware/request_id.py,sha256=SDj3Yi3WvTbFQ2ewrPQBjAV8sYReOJGeIiuoHeZpR9g,1242
77
77
  langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
78
- langgraph_api/models/run.py,sha256=p5F7npi9TFcMUyyn81_InljCg8LE8jKoFSWVl4XtbZ4,15434
78
+ langgraph_api/models/run.py,sha256=q99y57RqUgZgw5kNkUeezo5E9yazg-4-uxKhJR2agag,15479
79
79
  langgraph_api/tunneling/cloudflare.py,sha256=iKb6tj-VWPlDchHFjuQyep2Dpb-w2NGfJKt-WJG9LH0,3650
80
80
  langgraph_api/utils/__init__.py,sha256=kj3uCnO2Md9EEhabm331Tg4Jx9qXcxbACMh2T2P-FYw,5028
81
81
  langgraph_api/utils/cache.py,sha256=SrtIWYibbrNeZzLXLUGBFhJPkMVNQnVxR5giiYGHEfI,1810
@@ -97,8 +97,8 @@ langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,11
97
97
  LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
98
98
  logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
99
99
  openapi.json,sha256=h1LbSeGqr2Oor6vO8d3m67XJ1lHhVYVyt2ULvyhf_Ks,160215
100
- langgraph_api-0.2.135.dist-info/METADATA,sha256=rQuOdto6JZPKYdzkN_TJF1rK2xjaiXnQ_Pck-Aa1seQ,3892
101
- langgraph_api-0.2.135.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
102
- langgraph_api-0.2.135.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
103
- langgraph_api-0.2.135.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
104
- langgraph_api-0.2.135.dist-info/RECORD,,
100
+ langgraph_api-0.2.137.dist-info/METADATA,sha256=TF23VjTlddkNdgJewtd8X-EQxYt2wY2w_jF_etcntSU,3892
101
+ langgraph_api-0.2.137.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
102
+ langgraph_api-0.2.137.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
103
+ langgraph_api-0.2.137.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
104
+ langgraph_api-0.2.137.dist-info/RECORD,,