langgraph-api 0.2.48__py3-none-any.whl → 0.2.54__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.48"
1
+ __version__ = "0.2.54"
langgraph_api/asyncio.py CHANGED
@@ -78,10 +78,13 @@ async def wait_if_not_done(coro: Coroutine[Any, Any, T], done: ValueEvent) -> T:
78
78
  return await coro_task
79
79
  except asyncio.CancelledError as e:
80
80
  if e.args and asyncio.isfuture(e.args[-1]):
81
+ fut = e.args[-1]
81
82
  await logger.ainfo(
82
- "Awaiting future upon cancellation", task=str(e.args[-1])
83
+ "Awaiting future upon cancellation.",
84
+ task=str(fut),
83
85
  )
84
- await e.args[-1]
86
+ await fut
87
+ await logger.ainfo("Done awaiting.", task=str(fut))
85
88
  if e.args and isinstance(e.args[0], Exception):
86
89
  raise e.args[0] from None
87
90
  raise
langgraph_api/config.py CHANGED
@@ -1,3 +1,4 @@
1
+ import os
1
2
  from os import environ, getenv
2
3
  from typing import Literal, TypedDict
3
4
 
@@ -149,6 +150,11 @@ STATS_INTERVAL_SECS = env("STATS_INTERVAL_SECS", cast=int, default=60)
149
150
  DATABASE_URI = env("DATABASE_URI", cast=str, default=getenv("POSTGRES_URI", undefined))
150
151
  MIGRATIONS_PATH = env("MIGRATIONS_PATH", cast=str, default="/storage/migrations")
151
152
  POSTGRES_POOL_MAX_SIZE = env("LANGGRAPH_POSTGRES_POOL_MAX_SIZE", cast=int, default=150)
153
+ RESUMABLE_STREAM_TTL_SECONDS = env(
154
+ "RESUMABLE_STREAM_TTL_SECONDS",
155
+ cast=int,
156
+ default=3600, # 1 hour
157
+ )
152
158
 
153
159
 
154
160
  def _get_encryption_key(key_str: str | None):
@@ -239,7 +245,9 @@ BG_JOB_INTERVAL = 30 # seconds
239
245
  BG_JOB_MAX_RETRIES = 3
240
246
  BG_JOB_ISOLATED_LOOPS = env("BG_JOB_ISOLATED_LOOPS", cast=bool, default=False)
241
247
  BG_JOB_SHUTDOWN_GRACE_PERIOD_SECS = env(
242
- "BG_JOB_SHUTDOWN_GRACE_PERIOD_SECS", cast=int, default=3600
248
+ "BG_JOB_SHUTDOWN_GRACE_PERIOD_SECS",
249
+ cast=int,
250
+ default=180, # 3 minutes
243
251
  )
244
252
  MAX_STREAM_CHUNK_SIZE_BYTES = env(
245
253
  "MAX_STREAM_CHUNK_SIZE_BYTES", cast=int, default=1024 * 1024 * 128
@@ -353,3 +361,10 @@ API_VARIANT = env("LANGSMITH_LANGGRAPH_API_VARIANT", cast=str, default="")
353
361
 
354
362
  # UI
355
363
  UI_USE_BUNDLER = env("LANGGRAPH_UI_BUNDLER", cast=bool, default=False)
364
+
365
+ if not os.getenv("LANGCHAIN_REVISION_ID") and (
366
+ ref_sha := os.getenv("LANGSMITH_LANGGRAPH_GIT_REF_SHA")
367
+ ):
368
+ # This is respected by the langsmith SDK env inference
369
+ # https://github.com/langchain-ai/langsmith-sdk/blob/1b93e4c13b8369d92db891ae3babc3e2254f0e56/python/langsmith/env/_runtime_env.py#L190
370
+ os.environ["LANGCHAIN_REVISION_ID"] = ref_sha
langgraph_api/graph.py CHANGED
@@ -117,7 +117,6 @@ async def get_graph(
117
117
  """Return the runnable."""
118
118
  assert_graph_exists(graph_id)
119
119
  value = GRAPHS[graph_id]
120
- token = None
121
120
  if graph_id in FACTORY_ACCEPTS_CONFIG:
122
121
  config = ensure_config(config)
123
122
  if store is not None and not config["configurable"].get(CONFIG_KEY_STORE):
@@ -126,7 +125,7 @@ async def get_graph(
126
125
  CONFIG_KEY_CHECKPOINTER
127
126
  ):
128
127
  config["configurable"][CONFIG_KEY_CHECKPOINTER] = checkpointer
129
- token = var_child_runnable_config.set(config)
128
+ var_child_runnable_config.set(config)
130
129
  value = value(config) if FACTORY_ACCEPTS_CONFIG[graph_id] else value()
131
130
  try:
132
131
  async with _generate_graph(value) as graph_obj:
@@ -147,8 +146,7 @@ async def get_graph(
147
146
  update["config"] = config
148
147
  yield graph_obj.copy(update=update)
149
148
  finally:
150
- if token is not None:
151
- var_child_runnable_config.reset(token)
149
+ var_child_runnable_config.set(None)
152
150
 
153
151
 
154
152
  def graph_exists(graph_id: str) -> bool:
@@ -159,7 +157,10 @@ def graph_exists(graph_id: str) -> bool:
159
157
  def assert_graph_exists(graph_id: str) -> None:
160
158
  """Assert that a graph exists."""
161
159
  if not graph_exists(graph_id):
162
- raise HTTPException(status_code=404, detail=f"Graph '{graph_id}' not found")
160
+ raise HTTPException(
161
+ status_code=404,
162
+ detail=f"Graph '{graph_id}' not found. Expected one of: {sorted(GRAPHS.keys())}",
163
+ )
163
164
 
164
165
 
165
166
  def get_assistant_id(assistant_id: str) -> str:
langgraph_api/metadata.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import asyncio
2
2
  import os
3
+ import uuid
3
4
  from datetime import UTC, datetime
4
5
 
5
6
  import langgraph.version
@@ -26,6 +27,20 @@ REVISION = os.getenv("LANGSMITH_LANGGRAPH_API_REVISION")
26
27
  VARIANT = os.getenv("LANGSMITH_LANGGRAPH_API_VARIANT")
27
28
  PROJECT_ID = os.getenv("LANGSMITH_HOST_PROJECT_ID")
28
29
  TENANT_ID = os.getenv("LANGSMITH_TENANT_ID")
30
+ if PROJECT_ID:
31
+ try:
32
+ uuid.UUID(PROJECT_ID)
33
+ except ValueError:
34
+ raise ValueError(
35
+ f"Invalid project ID: {PROJECT_ID}. Must be a valid UUID"
36
+ ) from None
37
+ if TENANT_ID:
38
+ try:
39
+ uuid.UUID(TENANT_ID)
40
+ except ValueError:
41
+ raise ValueError(
42
+ f"Invalid tenant ID: {TENANT_ID}. Must be a valid UUID"
43
+ ) from None
29
44
  if VARIANT == "cloud":
30
45
  HOST = "saas"
31
46
  elif PROJECT_ID:
@@ -5,8 +5,6 @@ import structlog
5
5
  from starlette.requests import ClientDisconnect
6
6
  from starlette.types import Message, Receive, Scope, Send
7
7
 
8
- from langgraph_api.logging import LOG_JSON
9
-
10
8
  asgi = structlog.stdlib.get_logger("asgi")
11
9
 
12
10
  PATHS_IGNORE = {"/ok", "/metrics"}
@@ -28,7 +26,7 @@ class AccessLoggerMiddleware:
28
26
  self.debug_enabled = False
29
27
 
30
28
  async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
31
- if scope["type"] != "http" or (LOG_JSON and scope.get("path") in PATHS_IGNORE):
29
+ if scope["type"] != "http" or scope.get("path") in PATHS_IGNORE:
32
30
  return await self.app(scope, receive, send) # pragma: no cover
33
31
 
34
32
  loop = asyncio.get_event_loop()
@@ -21,6 +21,8 @@ class RequestIdMiddleware:
21
21
 
22
22
  async def __call__(self, scope: Scope, receive: Receive, send: Send):
23
23
  if scope["type"] == "http" and self.pattern.match(scope["path"]):
24
+ from langgraph_api.logging import set_logging_context
25
+
24
26
  request_id = next(
25
27
  (h[1] for h in scope["headers"] if h[0] == b"x-request-id"),
26
28
  None,
@@ -29,4 +31,5 @@ class RequestIdMiddleware:
29
31
  request_id = str(uuid.uuid4()).encode()
30
32
  scope["headers"].append((b"x-request-id", request_id))
31
33
  scope["request_start_time_ms"] = int(time.time() * 1000)
34
+ set_logging_context({"request_id": request_id.decode()})
32
35
  await self.app(scope, receive, send)
@@ -56,6 +56,9 @@ async def healthcheck_server():
56
56
 
57
57
 
58
58
  async def entrypoint():
59
+ from langgraph_api import logging as lg_logging
60
+
61
+ lg_logging.set_logging_context({"entrypoint": "python-queue"})
59
62
  tasks: set[asyncio.Task] = set()
60
63
  # start simple http server for health checks
61
64
  tasks.add(asyncio.create_task(healthcheck_server()))
@@ -65,7 +68,7 @@ async def entrypoint():
65
68
  try:
66
69
  await asyncio.gather(*tasks)
67
70
  except asyncio.CancelledError:
68
- pass
71
+ await logger.awarning("Queue entrypoint cancelled", exc_info=True)
69
72
 
70
73
 
71
74
  if __name__ == "__main__":
langgraph_api/stream.py CHANGED
@@ -24,6 +24,7 @@ from langsmith.utils import get_tracer_project
24
24
  from pydantic import ValidationError
25
25
  from pydantic.v1 import ValidationError as ValidationErrorLegacy
26
26
 
27
+ from langgraph_api import __version__
27
28
  from langgraph_api import store as api_store
28
29
  from langgraph_api.asyncio import ValueEvent, wait_if_not_done
29
30
  from langgraph_api.command import map_cmd
@@ -118,6 +119,7 @@ async def astream_state(
118
119
  config["metadata"]["run_attempt"] = attempt
119
120
  # attach langgraph metadata
120
121
  config["metadata"]["langgraph_version"] = langgraph.version.__version__
122
+ config["metadata"]["langgraph_api_version"] = __version__
121
123
  config["metadata"]["langgraph_plan"] = PLAN
122
124
  config["metadata"]["langgraph_host"] = HOST
123
125
  config["metadata"]["langgraph_api_url"] = USER_API_URL
langgraph_api/worker.py CHANGED
@@ -178,17 +178,21 @@ async def worker(
178
178
  else None
179
179
  ),
180
180
  )
181
- except Exception as ee:
182
- # Note we don't handle asyncio.CancelledError here, as we want to
183
- # let it bubble up and rollback db transaction, thus marking the run
184
- # as available to be picked up by another worker
181
+ except (Exception, asyncio.CancelledError) as ee:
185
182
  exception = ee
183
+ except BaseException as eee:
184
+ await logger.aerror(
185
+ "Bubbling failed background run",
186
+ run_id=str(run_id),
187
+ exception_type=str(type(eee)),
188
+ exception=str(eee),
189
+ )
190
+ raise
186
191
 
187
192
  # handle exceptions and set status
188
193
  async with connect() as conn:
189
194
  if exception is None:
190
195
  status = "success"
191
- await Runs.set_status(conn, run_id, "success")
192
196
  # If a stateful run succeeded but no checkpoint was returned, likely
193
197
  # there was a retriable exception that resumed right at the end
194
198
  if checkpoint is None and not temporary:
@@ -209,6 +213,9 @@ async def worker(
209
213
  run_id=str(run_id),
210
214
  run_attempt=attempt,
211
215
  )
216
+ await Threads.set_joint_status(
217
+ conn, run["thread_id"], run_id, status, checkpoint=checkpoint
218
+ )
212
219
  elif isinstance(exception, TimeoutError):
213
220
  status = "timeout"
214
221
  run_ended_at = datetime.now(UTC).isoformat()
@@ -226,13 +233,17 @@ async def worker(
226
233
  else None
227
234
  ),
228
235
  )
229
- await Runs.set_status(conn, run_id, "timeout")
236
+ await Threads.set_joint_status(
237
+ conn, run["thread_id"], run_id, status, checkpoint=checkpoint
238
+ )
230
239
  elif isinstance(exception, UserRollback):
231
240
  status = "rollback"
232
241
  run_ended_at_dt = datetime.now(UTC)
233
242
  run_ended_at = run_ended_at_dt.isoformat()
234
243
  try:
235
- await Runs.delete(conn, run_id, thread_id=run["thread_id"])
244
+ await Threads.set_joint_status(
245
+ conn, run["thread_id"], run_id, status, checkpoint=checkpoint
246
+ )
236
247
  await logger.ainfo(
237
248
  "Background run rolled back",
238
249
  run_id=str(run_id),
@@ -280,13 +291,15 @@ async def worker(
280
291
  else None
281
292
  ),
282
293
  )
283
- await Runs.set_status(conn, run_id, "interrupted")
294
+ await Threads.set_joint_status(
295
+ conn, run["thread_id"], run_id, status, checkpoint, exception
296
+ )
284
297
  elif isinstance(exception, RETRIABLE_EXCEPTIONS):
285
298
  status = "retry"
286
299
  run_ended_at_dt = datetime.now(UTC)
287
300
  run_ended_at = run_ended_at_dt.isoformat()
288
301
  await logger.awarning(
289
- f"Background run failed, will retry. Exception: {exception}",
302
+ f"Background run failed, will retry. Exception: {type(exception)}({exception})",
290
303
  exc_info=True,
291
304
  run_id=str(run_id),
292
305
  run_attempt=attempt,
@@ -295,13 +308,14 @@ async def worker(
295
308
  run_ended_at=run_ended_at,
296
309
  run_exec_ms=ms(run_ended_at_dt, run_started_at),
297
310
  )
311
+ # Don't update thread status yet.
298
312
  await Runs.set_status(conn, run_id, "pending")
299
313
  else:
300
314
  status = "error"
301
315
  run_ended_at_dt = datetime.now(UTC)
302
316
  run_ended_at = run_ended_at_dt.isoformat()
303
317
  await logger.aexception(
304
- f"Background run failed. Exception: {exception}",
318
+ f"Background run failed. Exception: {type(exception)}({exception})",
305
319
  exc_info=not isinstance(exception, RemoteException),
306
320
  run_id=str(run_id),
307
321
  run_attempt=attempt,
@@ -310,7 +324,9 @@ async def worker(
310
324
  run_ended_at=run_ended_at,
311
325
  run_exec_ms=ms(run_ended_at_dt, run_started_at),
312
326
  )
313
- await Runs.set_status(conn, run_id, "error")
327
+ await Threads.set_joint_status(
328
+ conn, run["thread_id"], run_id, status, checkpoint, exception
329
+ )
314
330
 
315
331
  # delete or set status of thread
316
332
  if not isinstance(exception, RETRIABLE_EXCEPTIONS):
@@ -331,6 +347,7 @@ async def worker(
331
347
  raise
332
348
 
333
349
  if isinstance(exception, RETRIABLE_EXCEPTIONS):
350
+ await logger.awarning("RETRYING", exc_info=exception)
334
351
  # re-raise so Runs.enter knows not to mark as done
335
352
  # Runs.enter will catch the exception, but what triggers the retry
336
353
  # is setting the status to "pending"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-api
3
- Version: 0.2.48
3
+ Version: 0.2.54
4
4
  Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
5
5
  License: Elastic-2.0
6
6
  License-File: LICENSE
@@ -11,7 +11,7 @@ Requires-Dist: httpx>=0.25.0
11
11
  Requires-Dist: jsonschema-rs<0.30,>=0.20.0
12
12
  Requires-Dist: langchain-core>=0.3.64
13
13
  Requires-Dist: langgraph-checkpoint>=2.0.23
14
- Requires-Dist: langgraph-runtime-inmem<0.3,>=0.2.0
14
+ Requires-Dist: langgraph-runtime-inmem<0.4,>=0.3.0
15
15
  Requires-Dist: langgraph-sdk>=0.1.66
16
16
  Requires-Dist: langgraph>=0.3.27
17
17
  Requires-Dist: langsmith>=0.3.45
@@ -1,17 +1,17 @@
1
- langgraph_api/__init__.py,sha256=thmEir0DEg2DFeSxHPQuT-AAqrUZNBg_iYmSBYkzc4c,23
1
+ langgraph_api/__init__.py,sha256=oNewG1bC0xUC2UKGSu-zMR1_CZwFYXnhulfuiJ3G4hw,23
2
2
  langgraph_api/asgi_transport.py,sha256=eqifhHxNnxvI7jJqrY1_8RjL4Fp9NdN4prEub2FWBt8,5091
3
- langgraph_api/asyncio.py,sha256=nelZwKL7iOjM5GHj1rVjiPE7igUIKLNKtc-3urxmlfo,9250
3
+ langgraph_api/asyncio.py,sha256=Odnc6mAJIGF3eFWT8Xcrg2Zam7FwzXkfCWEHaXfrzQQ,9371
4
4
  langgraph_api/cli.py,sha256=9Ou3tGDDY_VVLt5DFle8UviJdpI4ZigC5hElYvq2-To,14519
5
5
  langgraph_api/command.py,sha256=3O9v3i0OPa96ARyJ_oJbLXkfO8rPgDhLCswgO9koTFA,768
6
- langgraph_api/config.py,sha256=pFIZb4t2Vo7HbX0ZMjUNDR7Q7Bpj-stp_TmSmI026yo,11115
6
+ langgraph_api/config.py,sha256=do03SoO93rfL7PKuxviLZbYuVlzlZJayXnodkO-nxv0,11623
7
7
  langgraph_api/cron_scheduler.py,sha256=i87j4pJrcsmsqMKeKUs69gaAjrGaSM3pM3jnXdN5JDQ,2630
8
8
  langgraph_api/errors.py,sha256=Bu_i5drgNTyJcLiyrwVE_6-XrSU50BHf9TDpttki9wQ,1690
9
- langgraph_api/graph.py,sha256=MPm8DvInBQsq2em45c2YD5bW6T_G1LlDkAuWq-19gCQ,23240
9
+ langgraph_api/graph.py,sha256=JUJgmYF4Bfj3zKH9DZ2c4fgCOaZZ4QA3rbeC5Gtp4Bg,23255
10
10
  langgraph_api/http.py,sha256=gYbxxjY8aLnsXeJymcJ7G7Nj_yToOGpPYQqmZ1_ggfA,5240
11
11
  langgraph_api/logging.py,sha256=1BXELwUBY8gZeOWCYElbBu_GyHLM2jjlDVJznlekqvQ,4268
12
- langgraph_api/metadata.py,sha256=2sz9ECnbnQtgqN6ptDkRmymaVKfQPgaX-JuDJDJB47c,4254
12
+ langgraph_api/metadata.py,sha256=Gx0b6YszLRjdWLDVN8OcVgC_YYQG_nQitPfUfgQx1w8,4648
13
13
  langgraph_api/patch.py,sha256=Dgs0PXHytekX4SUL6KsjjN0hHcOtGLvv1GRGbh6PswU,1408
14
- langgraph_api/queue_entrypoint.py,sha256=_41ZveMDdn9bapjA7Ik9FG3r4hyIwXESUM5F1PdlieE,2309
14
+ langgraph_api/queue_entrypoint.py,sha256=dzEOqW4AHGjDKMlsNYBMVjHfQM7pm_49LImfa5G5ySM,2491
15
15
  langgraph_api/route.py,sha256=4VBkJMeusfiZtLzyUaKm1HwLHTq0g15y2CRiRhM6xyA,4773
16
16
  langgraph_api/schema.py,sha256=2711t4PIBk5dky4gmMndrTRC9CVvAgH47C9FKDxhkBo,5444
17
17
  langgraph_api/serde.py,sha256=8fQXg7T7RVUqj_jgOoSOJrWVpQDW0qJKjAjSsEhPHo4,4803
@@ -19,12 +19,12 @@ langgraph_api/server.py,sha256=Z_VL-kIphybTRDWBIqHMfRhgCmAFyTRqAGlgnHQF0Zg,6973
19
19
  langgraph_api/sse.py,sha256=F7swfjKBDrlUmXZ_dWuDVHtp-3o1Cpjq1lwp0bJD-nw,4223
20
20
  langgraph_api/state.py,sha256=8jx4IoTCOjTJuwzuXJKKFwo1VseHjNnw_CCq4x1SW14,2284
21
21
  langgraph_api/store.py,sha256=_xGhdwEIMoY1_hIy_oWwxZp4Y7FH833BNJfgFIhT80E,4640
22
- langgraph_api/stream.py,sha256=Rb9mIeG7nnGtKJhImzkNlE3c0g6C0yM6bbYXQs5GOHU,13560
22
+ langgraph_api/stream.py,sha256=UoJzqCzWhTVildxOlU87mX4NtZSUhtl1RGDeMFRYyNI,13660
23
23
  langgraph_api/thread_ttl.py,sha256=-Ox8NFHqUH3wGNdEKMIfAXUubY5WGifIgCaJ7npqLgw,1762
24
24
  langgraph_api/utils.py,sha256=92mSti9GfGdMRRWyESKQW5yV-75Z9icGHnIrBYvdypU,3619
25
25
  langgraph_api/validation.py,sha256=zMuKmwUEBjBgFMwAaeLZmatwGVijKv2sOYtYg7gfRtc,4950
26
26
  langgraph_api/webhook.py,sha256=1ncwO0rIZcj-Df9sxSnFEzd1gP1bfS4okeZQS8NSRoE,1382
27
- langgraph_api/worker.py,sha256=CWXSc4LHQLtNPJgCJC2khw6jb3DNKXnqQ4oB8-UyNSY,15163
27
+ langgraph_api/worker.py,sha256=FgTYpOA4Unt3xcqWpOKPqSZpFqND5GcqLk6O0fi4MHU,15812
28
28
  langgraph_api/api/__init__.py,sha256=YVzpbn5IQotvuuLG9fhS9QMrxXfP4s4EpEMG0n4q3Nw,5625
29
29
  langgraph_api/api/assistants.py,sha256=6IPVKQBlI95-Z4nYdqBY9st9oynGJAocL67cwnDaZCk,15744
30
30
  langgraph_api/api/mcp.py,sha256=RvRYgANqRzNQzSmgjNkq4RlKTtoEJYil04ot9lsmEtE,14352
@@ -66,9 +66,9 @@ langgraph_api/js/src/utils/importMap.mts,sha256=pX4TGOyUpuuWF82kXcxcv3-8mgusRezO
66
66
  langgraph_api/js/src/utils/pythonSchemas.mts,sha256=98IW7Z_VP7L_CHNRMb3_MsiV3BgLE2JsWQY_PQcRR3o,685
67
67
  langgraph_api/js/src/utils/serde.mts,sha256=D9o6MwTgwPezC_DEmsWS5NnLPnjPMVWIb1I1D4QPEPo,743
68
68
  langgraph_api/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
- langgraph_api/middleware/http_logger.py,sha256=aj4mdisRobFePkD3Iy6-w_Mujwx4TQRaEhPvSd6HgLk,3284
69
+ langgraph_api/middleware/http_logger.py,sha256=c1kLzngi_gpSV1V1O5kViIipkH9vDCmrZs8XPXfwPBs,3225
70
70
  langgraph_api/middleware/private_network.py,sha256=eYgdyU8AzU2XJu362i1L8aSFoQRiV7_aLBPw7_EgeqI,2111
71
- langgraph_api/middleware/request_id.py,sha256=fmtp0jLqoSqXTD39d9PW7jVlIvGqGurqgteMCeexRvY,1106
71
+ langgraph_api/middleware/request_id.py,sha256=SDj3Yi3WvTbFQ2ewrPQBjAV8sYReOJGeIiuoHeZpR9g,1242
72
72
  langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
73
  langgraph_api/models/run.py,sha256=j1s9KRfFXgjKUudB9z7IVJ34Klo85PPeaVFtmWHhEdo,14514
74
74
  langgraph_api/tunneling/cloudflare.py,sha256=iKb6tj-VWPlDchHFjuQyep2Dpb-w2NGfJKt-WJG9LH0,3650
@@ -86,8 +86,8 @@ langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,11
86
86
  LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
87
87
  logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
88
88
  openapi.json,sha256=wrJup7sCRlZXTRagjzGZ7474U1wma4ZzYTkkninrT6M,141875
89
- langgraph_api-0.2.48.dist-info/METADATA,sha256=_LjPeqovDHH7euM-CZMwN6ywYfiyGq8mkk3RX_TIDKo,3891
90
- langgraph_api-0.2.48.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
91
- langgraph_api-0.2.48.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
92
- langgraph_api-0.2.48.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
93
- langgraph_api-0.2.48.dist-info/RECORD,,
89
+ langgraph_api-0.2.54.dist-info/METADATA,sha256=fL0RwEb1lBCBPnjK0nwECORd51dAbiuDMRuz6YLW_Xk,3891
90
+ langgraph_api-0.2.54.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
91
+ langgraph_api-0.2.54.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
92
+ langgraph_api-0.2.54.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
93
+ langgraph_api-0.2.54.dist-info/RECORD,,