langgraph-api 0.0.34__py3-none-any.whl → 0.0.36__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/api/runs.py +42 -0
- langgraph_api/api/threads.py +1 -0
- langgraph_api/auth/custom.py +6 -13
- langgraph_api/cli.py +11 -3
- langgraph_api/config.py +23 -0
- langgraph_api/lifespan.py +8 -2
- langgraph_api/server.py +10 -1
- langgraph_api/thread_ttl.py +46 -0
- langgraph_api/validation.py +1 -0
- {langgraph_api-0.0.34.dist-info → langgraph_api-0.0.36.dist-info}/METADATA +1 -1
- {langgraph_api-0.0.34.dist-info → langgraph_api-0.0.36.dist-info}/RECORD +17 -16
- langgraph_storage/ops.py +126 -61
- langgraph_storage/store.py +3 -0
- openapi.json +115 -0
- {langgraph_api-0.0.34.dist-info → langgraph_api-0.0.36.dist-info}/LICENSE +0 -0
- {langgraph_api-0.0.34.dist-info → langgraph_api-0.0.36.dist-info}/WHEEL +0 -0
- {langgraph_api-0.0.34.dist-info → langgraph_api-0.0.36.dist-info}/entry_points.txt +0 -0
langgraph_api/api/runs.py
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
from collections.abc import AsyncIterator
|
|
3
|
+
from typing import Literal
|
|
3
4
|
|
|
4
5
|
import orjson
|
|
5
6
|
from langgraph.checkpoint.base.id import uuid6
|
|
7
|
+
from starlette.exceptions import HTTPException
|
|
6
8
|
from starlette.responses import Response, StreamingResponse
|
|
7
9
|
|
|
8
10
|
from langgraph_api import config
|
|
@@ -17,6 +19,7 @@ from langgraph_api.validation import (
|
|
|
17
19
|
RunBatchCreate,
|
|
18
20
|
RunCreateStateful,
|
|
19
21
|
RunCreateStateless,
|
|
22
|
+
RunsCancel,
|
|
20
23
|
)
|
|
21
24
|
from langgraph_license.validation import plus_features_enabled
|
|
22
25
|
from langgraph_storage.database import connect
|
|
@@ -394,6 +397,44 @@ async def cancel_run(
|
|
|
394
397
|
return Response(status_code=204 if wait else 202)
|
|
395
398
|
|
|
396
399
|
|
|
400
|
+
@retry_db
|
|
401
|
+
async def cancel_runs(
|
|
402
|
+
request: ApiRequest,
|
|
403
|
+
):
|
|
404
|
+
"""Cancel a run."""
|
|
405
|
+
body = await request.json(RunsCancel)
|
|
406
|
+
status = body.get("status")
|
|
407
|
+
if status:
|
|
408
|
+
status = status.lower()
|
|
409
|
+
if status not in ("pending", "running", "all"):
|
|
410
|
+
raise HTTPException(
|
|
411
|
+
status_code=422,
|
|
412
|
+
detail="Invalid status: must be 'pending', 'running', or 'all'",
|
|
413
|
+
)
|
|
414
|
+
thread_id = None
|
|
415
|
+
run_ids = None
|
|
416
|
+
else:
|
|
417
|
+
thread_id = body.get("thread_id")
|
|
418
|
+
run_ids = body.get("run_ids")
|
|
419
|
+
validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
|
|
420
|
+
for rid in run_ids:
|
|
421
|
+
validate_uuid(rid, "Invalid run ID: must be a UUID")
|
|
422
|
+
action_str = request.query_params.get("action", "interrupt")
|
|
423
|
+
action: Literal["interrupt", "rollback"] = (
|
|
424
|
+
action_str if action_str in ("interrupt", "rollback") else "interrupt"
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
async with connect() as conn:
|
|
428
|
+
await Runs.cancel(
|
|
429
|
+
conn,
|
|
430
|
+
run_ids,
|
|
431
|
+
action=action,
|
|
432
|
+
thread_id=thread_id,
|
|
433
|
+
status=status,
|
|
434
|
+
)
|
|
435
|
+
return Response(status_code=204)
|
|
436
|
+
|
|
437
|
+
|
|
397
438
|
@retry_db
|
|
398
439
|
async def delete_run(request: ApiRequest):
|
|
399
440
|
"""Delete a run by ID."""
|
|
@@ -486,6 +527,7 @@ runs_routes = [
|
|
|
486
527
|
ApiRoute("/runs/wait", wait_run_stateless, methods=["POST"]),
|
|
487
528
|
ApiRoute("/runs", create_stateless_run, methods=["POST"]),
|
|
488
529
|
ApiRoute("/runs/batch", create_stateless_run_batch, methods=["POST"]),
|
|
530
|
+
ApiRoute("/runs/cancel", cancel_runs, methods=["POST"]),
|
|
489
531
|
(
|
|
490
532
|
ApiRoute("/runs/crons", create_cron, methods=["POST"])
|
|
491
533
|
if config.FF_CRONS_ENABLED and plus_features_enabled()
|
langgraph_api/api/threads.py
CHANGED
langgraph_api/auth/custom.py
CHANGED
|
@@ -176,21 +176,14 @@ class CustomAuthBackend(AuthenticationBackend):
|
|
|
176
176
|
except (AuthenticationError, HTTPException):
|
|
177
177
|
raise
|
|
178
178
|
except Auth.exceptions.HTTPException as e:
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
except AssertionError as e:
|
|
185
|
-
raise AuthenticationError(str(e)) from None
|
|
179
|
+
if e.status_code == 401 or e.status_code == 403:
|
|
180
|
+
raise AuthenticationError(e.detail) from None
|
|
181
|
+
else:
|
|
182
|
+
await logger.aerror("Error authenticating request", exc_info=e)
|
|
183
|
+
raise
|
|
186
184
|
except Exception as e:
|
|
187
185
|
await logger.aerror("Error authenticating request", exc_info=e)
|
|
188
|
-
|
|
189
|
-
detail = getattr(e, "detail", "Unauthorized")
|
|
190
|
-
headers = getattr(e, "headers", None)
|
|
191
|
-
raise HTTPException(
|
|
192
|
-
status_code=status_code, detail=detail, headers=headers
|
|
193
|
-
) from None
|
|
186
|
+
raise e
|
|
194
187
|
|
|
195
188
|
|
|
196
189
|
def _get_custom_auth_middleware(
|
langgraph_api/cli.py
CHANGED
|
@@ -178,8 +178,7 @@ def run_server(
|
|
|
178
178
|
logger.info("Debugger attached. Starting server...")
|
|
179
179
|
|
|
180
180
|
local_url = f"http://{host}:{port}"
|
|
181
|
-
|
|
182
|
-
with patch_environment(
|
|
181
|
+
to_patch = dict(
|
|
183
182
|
MIGRATIONS_PATH="__inmem",
|
|
184
183
|
DATABASE_URI=":memory:",
|
|
185
184
|
REDIS_URI="fake",
|
|
@@ -192,7 +191,16 @@ def run_server(
|
|
|
192
191
|
LANGGRAPH_API_URL=local_url,
|
|
193
192
|
# See https://developer.chrome.com/blog/private-network-access-update-2024-03
|
|
194
193
|
ALLOW_PRIVATE_NETWORK="true",
|
|
195
|
-
|
|
194
|
+
)
|
|
195
|
+
if env_vars is not None:
|
|
196
|
+
# Don't overwrite.
|
|
197
|
+
for k, v in env_vars.items():
|
|
198
|
+
if k in to_patch:
|
|
199
|
+
logger.debug(f"Skipping loaded env var {k}={v}")
|
|
200
|
+
continue
|
|
201
|
+
to_patch[k] = v
|
|
202
|
+
with patch_environment(
|
|
203
|
+
**to_patch,
|
|
196
204
|
):
|
|
197
205
|
studio_origin = studio_url or _get_ls_origin() or "https://smith.langchain.com"
|
|
198
206
|
full_studio_url = f"{studio_origin}/studio/?baseUrl={local_url}"
|
langgraph_api/config.py
CHANGED
|
@@ -39,6 +39,12 @@ class HttpConfig(TypedDict, total=False):
|
|
|
39
39
|
"""Disable /mcp routes"""
|
|
40
40
|
|
|
41
41
|
|
|
42
|
+
class ThreadTTLConfig(TypedDict, total=False):
|
|
43
|
+
strategy: Literal["delete"]
|
|
44
|
+
default_ttl: float | None
|
|
45
|
+
sweep_interval_minutes: int | None
|
|
46
|
+
|
|
47
|
+
|
|
42
48
|
class IndexConfig(TypedDict, total=False):
|
|
43
49
|
"""Configuration for indexing documents for semantic search in the store."""
|
|
44
50
|
|
|
@@ -204,6 +210,23 @@ BG_JOB_MAX_RETRIES = 3
|
|
|
204
210
|
BG_JOB_ISOLATED_LOOPS = env("BG_JOB_ISOLATED_LOOPS", cast=bool, default=False)
|
|
205
211
|
|
|
206
212
|
|
|
213
|
+
def _parse_thread_ttl(value: str | None) -> ThreadTTLConfig | None:
|
|
214
|
+
if not value:
|
|
215
|
+
return None
|
|
216
|
+
if str(value).strip().startswith("{"):
|
|
217
|
+
return _parse_json(value.strip())
|
|
218
|
+
return {
|
|
219
|
+
"strategy": "delete",
|
|
220
|
+
# We permit float values mainly for testing purposes
|
|
221
|
+
"default_ttl": float(value),
|
|
222
|
+
"sweep_interval_minutes": 5.1,
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
THREAD_TTL: ThreadTTLConfig | None = env(
|
|
227
|
+
"LANGGRAPH_THREAD_TTL", cast=_parse_thread_ttl, default=None
|
|
228
|
+
)
|
|
229
|
+
|
|
207
230
|
N_JOBS_PER_WORKER = env("N_JOBS_PER_WORKER", cast=int, default=10)
|
|
208
231
|
BG_JOB_TIMEOUT_SECS = env("BG_JOB_TIMEOUT_SECS", cast=float, default=3600)
|
|
209
232
|
FF_CRONS_ENABLED = env("FF_CRONS_ENABLED", cast=bool, default=True)
|
langgraph_api/lifespan.py
CHANGED
|
@@ -2,6 +2,8 @@ import asyncio
|
|
|
2
2
|
from contextlib import asynccontextmanager
|
|
3
3
|
|
|
4
4
|
import structlog
|
|
5
|
+
from langchain_core.runnables.config import var_child_runnable_config
|
|
6
|
+
from langgraph.constants import CONF, CONFIG_KEY_STORE
|
|
5
7
|
from starlette.applications import Starlette
|
|
6
8
|
|
|
7
9
|
import langgraph_api.config as config
|
|
@@ -10,6 +12,7 @@ from langgraph_api.cron_scheduler import cron_scheduler
|
|
|
10
12
|
from langgraph_api.graph import collect_graphs_from_env, stop_remote_graphs
|
|
11
13
|
from langgraph_api.http import start_http_client, stop_http_client
|
|
12
14
|
from langgraph_api.metadata import metadata_loop
|
|
15
|
+
from langgraph_api.thread_ttl import thread_ttl_sweep_loop
|
|
13
16
|
from langgraph_license.validation import get_license_status, plus_features_enabled
|
|
14
17
|
from langgraph_storage.database import start_pool, stop_pool
|
|
15
18
|
from langgraph_storage.queue import queue
|
|
@@ -54,8 +57,11 @@ async def lifespan(
|
|
|
54
57
|
and plus_features_enabled()
|
|
55
58
|
):
|
|
56
59
|
tg.create_task(cron_scheduler())
|
|
57
|
-
|
|
58
|
-
|
|
60
|
+
store = Store()
|
|
61
|
+
tg.create_task(Store().start_ttl_sweeper())
|
|
62
|
+
tg.create_task(thread_ttl_sweep_loop())
|
|
63
|
+
var_child_runnable_config.set({CONF: {CONFIG_KEY_STORE: store}})
|
|
64
|
+
|
|
59
65
|
yield
|
|
60
66
|
finally:
|
|
61
67
|
await stop_remote_graphs()
|
langgraph_api/server.py
CHANGED
|
@@ -16,7 +16,7 @@ from starlette.middleware.cors import CORSMiddleware
|
|
|
16
16
|
from langgraph_api.api.openapi import set_custom_spec
|
|
17
17
|
|
|
18
18
|
import langgraph_api.config as config
|
|
19
|
-
from langgraph_api.api import routes, user_router
|
|
19
|
+
from langgraph_api.api import routes, meta_routes, user_router
|
|
20
20
|
from langgraph_api.errors import (
|
|
21
21
|
overloaded_error_handler,
|
|
22
22
|
validation_error_handler,
|
|
@@ -92,6 +92,15 @@ def update_openapi_spec(app):
|
|
|
92
92
|
if user_router:
|
|
93
93
|
# Merge routes
|
|
94
94
|
app = user_router
|
|
95
|
+
|
|
96
|
+
meta_route_paths = [route.path for route in meta_routes]
|
|
97
|
+
custom_route_paths = [
|
|
98
|
+
route.path
|
|
99
|
+
for route in user_router.router.routes
|
|
100
|
+
if route.path not in meta_route_paths
|
|
101
|
+
]
|
|
102
|
+
logger.info(f"Custom route paths: {custom_route_paths}")
|
|
103
|
+
|
|
95
104
|
update_openapi_spec(app)
|
|
96
105
|
for route in routes:
|
|
97
106
|
if route.path in ("/docs", "/openapi.json"):
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""Sweeping logic for cleaning up expired threads and checkpoints."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
|
|
5
|
+
import structlog
|
|
6
|
+
|
|
7
|
+
from langgraph_api.config import THREAD_TTL
|
|
8
|
+
from langgraph_storage.database import connect
|
|
9
|
+
|
|
10
|
+
logger = structlog.stdlib.get_logger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def thread_ttl_sweep_loop():
|
|
14
|
+
"""Periodically delete threads based on TTL configuration.
|
|
15
|
+
|
|
16
|
+
Currently implements the 'delete' strategy, which deletes entire threads
|
|
17
|
+
that have been inactive for longer than their configured TTL period.
|
|
18
|
+
"""
|
|
19
|
+
# Use the same interval as store TTL sweep
|
|
20
|
+
thread_ttl_config = THREAD_TTL or {}
|
|
21
|
+
strategy = thread_ttl_config.get("strategy", "delete")
|
|
22
|
+
if strategy != "delete":
|
|
23
|
+
raise NotImplementedError(
|
|
24
|
+
f"Unrecognized thread deletion strategy: {strategy}." " Expected 'delete'."
|
|
25
|
+
)
|
|
26
|
+
sweep_interval_minutes = thread_ttl_config.get("sweep_interval_minutes", 5)
|
|
27
|
+
await logger.ainfo(
|
|
28
|
+
"Starting thread TTL sweeper",
|
|
29
|
+
interval_minutes=sweep_interval_minutes,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
from langgraph_storage.ops import Threads
|
|
33
|
+
|
|
34
|
+
while True:
|
|
35
|
+
await asyncio.sleep(sweep_interval_minutes * 60)
|
|
36
|
+
try:
|
|
37
|
+
async with connect() as conn:
|
|
38
|
+
threads_processed, threads_deleted = await Threads.sweep_ttl(conn)
|
|
39
|
+
if threads_processed > 0:
|
|
40
|
+
await logger.ainfo(
|
|
41
|
+
"Thread TTL sweep completed",
|
|
42
|
+
threads_processed=threads_processed,
|
|
43
|
+
threads_deleted=threads_deleted,
|
|
44
|
+
)
|
|
45
|
+
except Exception as exc:
|
|
46
|
+
logger.exception("Thread TTL sweep iteration failed", exc_info=exc)
|
langgraph_api/validation.py
CHANGED
|
@@ -113,6 +113,7 @@ RunCreateStateful = jsonschema_rs.validator_for(
|
|
|
113
113
|
},
|
|
114
114
|
}
|
|
115
115
|
)
|
|
116
|
+
RunsCancel = jsonschema_rs.validator_for(openapi["components"]["schemas"]["RunsCancel"])
|
|
116
117
|
CronCreate = jsonschema_rs.validator_for(openapi["components"]["schemas"]["CronCreate"])
|
|
117
118
|
CronSearch = jsonschema_rs.validator_for(openapi["components"]["schemas"]["CronSearch"])
|
|
118
119
|
|
|
@@ -5,22 +5,22 @@ langgraph_api/api/assistants.py,sha256=nU6tnbgdr_6Utlq0A9nw2a6xxpUM_DNuCFI42_Kcs
|
|
|
5
5
|
langgraph_api/api/mcp.py,sha256=dpKT9DgIoLERTmYZ4sSOPyHbfGbm7hCyb2MrMS_ol18,13593
|
|
6
6
|
langgraph_api/api/meta.py,sha256=ifJ_Ki0Qf2DYbmY6OKlqKhLGxbt55gm0lEqH1A0cJbw,2790
|
|
7
7
|
langgraph_api/api/openapi.py,sha256=f9gfmWN2AMKNUpLCpSgZuw_aeOF9jCXPdOtFT5PaTWM,10960
|
|
8
|
-
langgraph_api/api/runs.py,sha256=
|
|
8
|
+
langgraph_api/api/runs.py,sha256=uijgtrw_FSf1lZHsx8pM-CIj_ur2O88Y7ys-CJZ4SNQ,17988
|
|
9
9
|
langgraph_api/api/store.py,sha256=VzAJVOwO0IxosBB7km5TTf2rhlWGyPkVz_LpvbxetVY,5437
|
|
10
|
-
langgraph_api/api/threads.py,sha256=
|
|
10
|
+
langgraph_api/api/threads.py,sha256=kvv8pmRoUIvPFEuAhJpMC6qMv7Xo5itrzb5EzJFyWMg,8966
|
|
11
11
|
langgraph_api/api/ui.py,sha256=LiOZVewKOPbKEykCm30hCEaOA7vuS_Ti5hB32EEy4vw,2082
|
|
12
12
|
langgraph_api/asyncio.py,sha256=hVuAxWTHoUyNqTzcIEKTkAvh7HFrdGK6WIDowDxORAE,8397
|
|
13
13
|
langgraph_api/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
-
langgraph_api/auth/custom.py,sha256=
|
|
14
|
+
langgraph_api/auth/custom.py,sha256=Gays0bYwZmRQA8-LSmQSNp1zS2_NG_xybG2cW79YweU,20890
|
|
15
15
|
langgraph_api/auth/langsmith/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
16
|
langgraph_api/auth/langsmith/backend.py,sha256=InScaL-HYCnxYEauhxU198gRZV9pJn9SzzBoR9Edn7g,2654
|
|
17
17
|
langgraph_api/auth/langsmith/client.py,sha256=eKchvAom7hdkUXauD8vHNceBDDUijrFgdTV8bKd7x4Q,3998
|
|
18
18
|
langgraph_api/auth/middleware.py,sha256=jU8aDSIZHdzCGdifejRF7ndHkSjBtqIHcBwFIuUdHEA,1875
|
|
19
19
|
langgraph_api/auth/noop.py,sha256=Bk6Nf3p8D_iMVy_OyfPlyiJp_aEwzL-sHrbxoXpCbac,586
|
|
20
20
|
langgraph_api/auth/studio_user.py,sha256=FzFQRROKDlA9JjtBuwyZvk6Mbwno5M9RVYjDO6FU3F8,186
|
|
21
|
-
langgraph_api/cli.py,sha256=
|
|
21
|
+
langgraph_api/cli.py,sha256=cSRKZzaxSbfDQO3-2YWAo2NgeE5IK0mabYWt4m5q-dU,12047
|
|
22
22
|
langgraph_api/command.py,sha256=3O9v3i0OPa96ARyJ_oJbLXkfO8rPgDhLCswgO9koTFA,768
|
|
23
|
-
langgraph_api/config.py,sha256=
|
|
23
|
+
langgraph_api/config.py,sha256=MwTyJUr2wwZdUbL0gIvol_cCGi24Py9DvBnSozE1ai8,9342
|
|
24
24
|
langgraph_api/cron_scheduler.py,sha256=9yzbbGxzNgJdIg4ZT7yu2oTwT_wRuPxD1c2sbbd52xs,2630
|
|
25
25
|
langgraph_api/errors.py,sha256=Bu_i5drgNTyJcLiyrwVE_6-XrSU50BHf9TDpttki9wQ,1690
|
|
26
26
|
langgraph_api/graph.py,sha256=5Lo9FghptdPIFJoeTDkLjoIQLuNy49RWtAHJmzOCrZI,17209
|
|
@@ -60,7 +60,7 @@ langgraph_api/js/tests/graphs/yarn.lock,sha256=i2AAIgXA3XBLM8-oU45wgUefCSG-Tne4g
|
|
|
60
60
|
langgraph_api/js/tests/parser.test.mts,sha256=dEC8KTqKygeb1u39ZvpPqCT4HtfPD947nLmITt2buxA,27883
|
|
61
61
|
langgraph_api/js/tests/utils.mts,sha256=2kTybJ3O7Yfe1q3ehDouqV54ibXkNzsPZ_wBZLJvY-4,421
|
|
62
62
|
langgraph_api/js/yarn.lock,sha256=W89dVYZMThcec08lJMcYnvEEnQK7VM5cPglvwpIdRv0,82773
|
|
63
|
-
langgraph_api/lifespan.py,sha256=
|
|
63
|
+
langgraph_api/lifespan.py,sha256=0F4Xn6IC4wUfvBWbm6KCJ-jiF0S7iWDOHJYJZ-A3o3s,2739
|
|
64
64
|
langgraph_api/logging.py,sha256=JJIzbNIgLCN6ClQ3tA-Mm5ffuBGvpRDSZsEvnIlsuu4,3693
|
|
65
65
|
langgraph_api/metadata.py,sha256=5Mu3MUtUc-iIocU3X2SZDoGIqnUmTdT3517MhP94npI,3495
|
|
66
66
|
langgraph_api/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -73,12 +73,13 @@ langgraph_api/queue_entrypoint.py,sha256=4xICUxXarNV8DhnaqAMhVi3xCmyVKCL3J5NzHxP
|
|
|
73
73
|
langgraph_api/route.py,sha256=fM4qYCGbmH0a3_cV8uKocb1sLklehxO6HhdRXqLK6OM,4421
|
|
74
74
|
langgraph_api/schema.py,sha256=hNbg6ep2wiGBBtBJVNBgMYA8uC33AfaqhRXXVUY_At8,5361
|
|
75
75
|
langgraph_api/serde.py,sha256=HfaIBNfdKg2W_mIpw_er5irG3GlIiiVqkqfbDgnYHpg,3671
|
|
76
|
-
langgraph_api/server.py,sha256=
|
|
76
|
+
langgraph_api/server.py,sha256=bnXOOYztQmqR-QVpEFoRWB5Fzd33PuEIlwBK2R7W8NE,4849
|
|
77
77
|
langgraph_api/sse.py,sha256=2wNodCOP2eg7a9mpSu0S3FQ0CHk2BBV_vv0UtIgJIcc,4034
|
|
78
78
|
langgraph_api/state.py,sha256=8jx4IoTCOjTJuwzuXJKKFwo1VseHjNnw_CCq4x1SW14,2284
|
|
79
79
|
langgraph_api/stream.py,sha256=lhjnom-T8GbquUZry-KSkajnqYjElaIERhPiXPtpw1E,11354
|
|
80
|
+
langgraph_api/thread_ttl.py,sha256=ubkWMymTR7p9nGWd60-WKEOQ20ZgIkWB6WGstQUmRS4,1658
|
|
80
81
|
langgraph_api/utils.py,sha256=92mSti9GfGdMRRWyESKQW5yV-75Z9icGHnIrBYvdypU,3619
|
|
81
|
-
langgraph_api/validation.py,sha256=
|
|
82
|
+
langgraph_api/validation.py,sha256=Qo3EkSRXzIRe3GRuqRWbElTcUXRMXMyA1w0VbMvdwME,4934
|
|
82
83
|
langgraph_api/webhook.py,sha256=1ncwO0rIZcj-Df9sxSnFEzd1gP1bfS4okeZQS8NSRoE,1382
|
|
83
84
|
langgraph_api/worker.py,sha256=zlkk2yg1tOBQHYwN-WHjc9NJgd3CAvsl8ET0-7tqQIk,9838
|
|
84
85
|
langgraph_license/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -88,15 +89,15 @@ langgraph_storage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
|
|
|
88
89
|
langgraph_storage/checkpoint.py,sha256=X6YHrCwIPVE0iEQy16GhleyyHUdXdCkC-c-Q1SW-rAM,3884
|
|
89
90
|
langgraph_storage/database.py,sha256=I0AgFeJ-NSTT34vxKxQBUf1z2syFP0S8QpKCqTixrzY,5652
|
|
90
91
|
langgraph_storage/inmem_stream.py,sha256=8bxkILIuFpr7P7RQ37SQAxrpRKvmbHdRB_nbfFiomlk,3263
|
|
91
|
-
langgraph_storage/ops.py,sha256=
|
|
92
|
+
langgraph_storage/ops.py,sha256=S0QHbnquEXvAM63NQMgqtyTjm-8nlq0pOvgJgFD38Z8,75312
|
|
92
93
|
langgraph_storage/queue.py,sha256=UDgsUTtUMfBSRDrQ8Onis-FJO4n7KTsX6sdpbY8Hs0A,5055
|
|
93
94
|
langgraph_storage/retry.py,sha256=XmldOP4e_H5s264CagJRVnQMDFcEJR_dldVR1Hm5XvM,763
|
|
94
|
-
langgraph_storage/store.py,sha256=
|
|
95
|
+
langgraph_storage/store.py,sha256=8QSM0gm6ZV40antWw0YHfnU71swZRsozjd85R-MSrjg,3100
|
|
95
96
|
langgraph_storage/ttl_dict.py,sha256=FlpEY8EANeXWKo_G5nmIotPquABZGyIJyk6HD9u6vqY,1533
|
|
96
97
|
logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
|
|
97
|
-
openapi.json,sha256
|
|
98
|
-
langgraph_api-0.0.
|
|
99
|
-
langgraph_api-0.0.
|
|
100
|
-
langgraph_api-0.0.
|
|
101
|
-
langgraph_api-0.0.
|
|
102
|
-
langgraph_api-0.0.
|
|
98
|
+
openapi.json,sha256=-25y3NRQ88e_944UXo76Goa34HJhC7pj6I9tjYUwvuE,131492
|
|
99
|
+
langgraph_api-0.0.36.dist-info/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
100
|
+
langgraph_api-0.0.36.dist-info/METADATA,sha256=T5nIH34PPCdvY8DcFJWKDgrrNutgpgwXGmv52fX_2AI,4027
|
|
101
|
+
langgraph_api-0.0.36.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
102
|
+
langgraph_api-0.0.36.dist-info/entry_points.txt,sha256=3EYLgj89DfzqJHHYGxPH4A_fEtClvlRbWRUHaXO7hj4,77
|
|
103
|
+
langgraph_api-0.0.36.dist-info/RECORD,,
|
langgraph_storage/ops.py
CHANGED
|
@@ -24,6 +24,7 @@ from starlette.exceptions import HTTPException
|
|
|
24
24
|
from langgraph_api.asyncio import SimpleTaskGroup, ValueEvent, create_task
|
|
25
25
|
from langgraph_api.auth.custom import handle_event
|
|
26
26
|
from langgraph_api.command import map_cmd
|
|
27
|
+
from langgraph_api.config import ThreadTTLConfig
|
|
27
28
|
from langgraph_api.errors import UserInterrupt, UserRollback
|
|
28
29
|
from langgraph_api.graph import get_graph
|
|
29
30
|
from langgraph_api.schema import (
|
|
@@ -672,6 +673,7 @@ class Threads(Authenticated):
|
|
|
672
673
|
*,
|
|
673
674
|
metadata: MetadataInput,
|
|
674
675
|
if_exists: OnConflictBehavior,
|
|
676
|
+
ttl: ThreadTTLConfig | None = None,
|
|
675
677
|
ctx: Auth.types.BaseAuthContext | None = None,
|
|
676
678
|
) -> AsyncIterator[Thread]:
|
|
677
679
|
"""Insert or update a thread."""
|
|
@@ -961,6 +963,16 @@ class Threads(Authenticated):
|
|
|
961
963
|
|
|
962
964
|
return row_generator()
|
|
963
965
|
|
|
966
|
+
@staticmethod
|
|
967
|
+
async def sweep_ttl(
|
|
968
|
+
conn: InMemConnectionProto,
|
|
969
|
+
*,
|
|
970
|
+
limit: int | None = None,
|
|
971
|
+
batch_size: int = 100,
|
|
972
|
+
) -> tuple[int, int]:
|
|
973
|
+
# Not implemented for inmem server
|
|
974
|
+
return (0, 0)
|
|
975
|
+
|
|
964
976
|
class State(Authenticated):
|
|
965
977
|
# We will treat this like a runs resource for now.
|
|
966
978
|
resource = "threads"
|
|
@@ -1453,6 +1465,7 @@ class Runs(Authenticated):
|
|
|
1453
1465
|
),
|
|
1454
1466
|
created_at=datetime.now(UTC),
|
|
1455
1467
|
updated_at=datetime.now(UTC),
|
|
1468
|
+
values=b"",
|
|
1456
1469
|
)
|
|
1457
1470
|
await logger.ainfo("Creating thread", thread_id=thread_id)
|
|
1458
1471
|
conn.store["threads"].append(thread)
|
|
@@ -1669,88 +1682,140 @@ class Runs(Authenticated):
|
|
|
1669
1682
|
@staticmethod
|
|
1670
1683
|
async def cancel(
|
|
1671
1684
|
conn: InMemConnectionProto,
|
|
1672
|
-
run_ids: Sequence[UUID],
|
|
1685
|
+
run_ids: Sequence[UUID] | None = None,
|
|
1673
1686
|
*,
|
|
1674
1687
|
action: Literal["interrupt", "rollback"] = "interrupt",
|
|
1675
|
-
thread_id: UUID,
|
|
1688
|
+
thread_id: UUID | None = None,
|
|
1689
|
+
status: Literal["pending", "running", "all"] | None = None,
|
|
1676
1690
|
ctx: Auth.types.BaseAuthContext | None = None,
|
|
1677
1691
|
) -> None:
|
|
1678
|
-
"""
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1692
|
+
"""
|
|
1693
|
+
Cancel runs in memory. Must provide either:
|
|
1694
|
+
1) thread_id + run_ids, or
|
|
1695
|
+
2) status in {"pending", "running", "all"}.
|
|
1696
|
+
|
|
1697
|
+
Steps:
|
|
1698
|
+
- Validate arguments (one usage pattern or the other).
|
|
1699
|
+
- Auth check: 'update' event via handle_event().
|
|
1700
|
+
- Gather runs matching either the (thread_id, run_ids) set or the given status.
|
|
1701
|
+
- For each run found:
|
|
1702
|
+
* Send a cancellation message through the stream manager.
|
|
1703
|
+
* If 'pending', set to 'interrupted' or delete (if action='rollback' and not actively queued).
|
|
1704
|
+
* If 'running', the worker will pick up the message.
|
|
1705
|
+
* Otherwise, log a warning for non-cancelable states.
|
|
1706
|
+
- 404 if no runs are found or authorized.
|
|
1707
|
+
"""
|
|
1708
|
+
# 1. Validate arguments
|
|
1709
|
+
if status is not None:
|
|
1710
|
+
# If status is set, user must NOT specify thread_id or run_ids
|
|
1711
|
+
if thread_id is not None or run_ids is not None:
|
|
1712
|
+
raise HTTPException(
|
|
1713
|
+
status_code=422,
|
|
1714
|
+
detail="Cannot specify 'thread_id' or 'run_ids' when using 'status'",
|
|
1715
|
+
)
|
|
1716
|
+
else:
|
|
1717
|
+
# If status is not set, user must specify both thread_id and run_ids
|
|
1718
|
+
if thread_id is None or run_ids is None:
|
|
1719
|
+
raise HTTPException(
|
|
1720
|
+
status_code=422,
|
|
1721
|
+
detail="Must provide either a status or both 'thread_id' and 'run_ids'",
|
|
1722
|
+
)
|
|
1723
|
+
|
|
1724
|
+
# Convert and normalize inputs
|
|
1725
|
+
if run_ids is not None:
|
|
1726
|
+
run_ids = [_ensure_uuid(rid) for rid in run_ids]
|
|
1727
|
+
if thread_id is not None:
|
|
1728
|
+
thread_id = _ensure_uuid(thread_id)
|
|
1729
|
+
|
|
1688
1730
|
filters = await Runs.handle_event(
|
|
1689
1731
|
ctx,
|
|
1690
1732
|
"update",
|
|
1691
1733
|
Auth.types.ThreadsUpdate(
|
|
1692
|
-
thread_id=thread_id,
|
|
1734
|
+
thread_id=thread_id, # type: ignore
|
|
1693
1735
|
action=action,
|
|
1694
|
-
metadata={
|
|
1736
|
+
metadata={
|
|
1737
|
+
"run_ids": run_ids,
|
|
1738
|
+
"status": status,
|
|
1739
|
+
},
|
|
1695
1740
|
),
|
|
1696
1741
|
)
|
|
1697
1742
|
|
|
1743
|
+
status_list: tuple[str, ...] = ()
|
|
1744
|
+
if status is not None:
|
|
1745
|
+
if status == "all":
|
|
1746
|
+
status_list = ("pending", "running")
|
|
1747
|
+
elif status in ("pending", "running"):
|
|
1748
|
+
status_list = (status,)
|
|
1749
|
+
else:
|
|
1750
|
+
raise ValueError(f"Unsupported status: {status}")
|
|
1751
|
+
|
|
1752
|
+
def is_run_match(r: dict) -> bool:
|
|
1753
|
+
"""
|
|
1754
|
+
Check whether a run in `conn.store["runs"]` meets the selection criteria.
|
|
1755
|
+
"""
|
|
1756
|
+
if status_list:
|
|
1757
|
+
return r["status"] in status_list
|
|
1758
|
+
else:
|
|
1759
|
+
return r["thread_id"] == thread_id and r["run_id"] in run_ids # type: ignore
|
|
1760
|
+
|
|
1761
|
+
candidate_runs = [r for r in conn.store["runs"] if is_run_match(r)]
|
|
1762
|
+
|
|
1763
|
+
if filters:
|
|
1764
|
+
# If a run is found but not authorized by the thread filters, skip it
|
|
1765
|
+
thread = (
|
|
1766
|
+
await Threads._get_with_filters(conn, thread_id, filters)
|
|
1767
|
+
if thread_id
|
|
1768
|
+
else None
|
|
1769
|
+
)
|
|
1770
|
+
# If there's no matching thread, no runs are authorized.
|
|
1771
|
+
if thread_id and not thread:
|
|
1772
|
+
candidate_runs = []
|
|
1773
|
+
# Otherwise, we might trust that `_get_with_filters` is the only constraint
|
|
1774
|
+
# on thread. If your filters also apply to runs, you might do more checks here.
|
|
1775
|
+
|
|
1776
|
+
if not candidate_runs:
|
|
1777
|
+
raise HTTPException(status_code=404, detail="No runs found to cancel.")
|
|
1778
|
+
|
|
1698
1779
|
stream_manager = get_stream_manager()
|
|
1699
|
-
found_runs = []
|
|
1700
1780
|
coros = []
|
|
1701
|
-
for
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
if r["run_id"] == run_id and r["thread_id"] == thread_id
|
|
1707
|
-
),
|
|
1708
|
-
None,
|
|
1781
|
+
for run in candidate_runs:
|
|
1782
|
+
run_id = run["run_id"]
|
|
1783
|
+
control_message = Message(
|
|
1784
|
+
topic=f"run:{run_id}:control".encode(),
|
|
1785
|
+
data=action.encode(),
|
|
1709
1786
|
)
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
if not thread:
|
|
1714
|
-
continue
|
|
1715
|
-
found_runs.append(run)
|
|
1716
|
-
# Send cancellation message through stream manager
|
|
1717
|
-
control_message = Message(
|
|
1718
|
-
topic=f"run:{run_id}:control".encode(),
|
|
1719
|
-
data=action.encode(),
|
|
1720
|
-
)
|
|
1721
|
-
queues = stream_manager.get_queues(run_id)
|
|
1722
|
-
coros.append(stream_manager.put(run_id, control_message))
|
|
1723
|
-
|
|
1724
|
-
# Update status for pending runs
|
|
1725
|
-
if run["status"] in ("pending", "running"):
|
|
1726
|
-
if queues or action != "rollback":
|
|
1727
|
-
run["status"] = "interrupted"
|
|
1728
|
-
run["updated_at"] = datetime.now(tz=UTC)
|
|
1729
|
-
else:
|
|
1730
|
-
await logger.ainfo(
|
|
1731
|
-
"Eagerly deleting unscheduled run with rollback action",
|
|
1732
|
-
run_id=run_id,
|
|
1733
|
-
thread_id=thread_id,
|
|
1734
|
-
)
|
|
1735
|
-
coros.append(Runs.delete(conn, run_id, thread_id=thread_id))
|
|
1787
|
+
coros.append(stream_manager.put(run_id, control_message))
|
|
1788
|
+
|
|
1789
|
+
queues = stream_manager.get_queues(run_id)
|
|
1736
1790
|
|
|
1791
|
+
if run["status"] in ("pending", "running"):
|
|
1792
|
+
if queues or action != "rollback":
|
|
1793
|
+
run["status"] = "interrupted"
|
|
1794
|
+
run["updated_at"] = datetime.now(tz=UTC)
|
|
1737
1795
|
else:
|
|
1738
|
-
await logger.
|
|
1739
|
-
"
|
|
1740
|
-
run_id=run_id,
|
|
1796
|
+
await logger.ainfo(
|
|
1797
|
+
"Eagerly deleting pending run with rollback action",
|
|
1798
|
+
run_id=str(run_id),
|
|
1741
1799
|
status=run["status"],
|
|
1742
1800
|
)
|
|
1801
|
+
coros.append(Runs.delete(conn, run_id, thread_id=run["thread_id"]))
|
|
1802
|
+
else:
|
|
1803
|
+
await logger.awarning(
|
|
1804
|
+
"Attempted to cancel non-pending run.",
|
|
1805
|
+
run_id=str(run_id),
|
|
1806
|
+
status=run["status"],
|
|
1807
|
+
)
|
|
1808
|
+
|
|
1743
1809
|
if coros:
|
|
1744
1810
|
await asyncio.gather(*coros)
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
raise HTTPException(status_code=404, detail="Run not found")
|
|
1811
|
+
|
|
1812
|
+
await logger.ainfo(
|
|
1813
|
+
"Cancelled runs",
|
|
1814
|
+
run_ids=[str(r["run_id"]) for r in candidate_runs],
|
|
1815
|
+
thread_id=str(thread_id) if thread_id else None,
|
|
1816
|
+
status=status,
|
|
1817
|
+
action=action,
|
|
1818
|
+
)
|
|
1754
1819
|
|
|
1755
1820
|
@staticmethod
|
|
1756
1821
|
async def search(
|
langgraph_storage/store.py
CHANGED
|
@@ -62,6 +62,9 @@ class BatchedStore(AsyncBatchedBaseStore):
|
|
|
62
62
|
async def abatch(self, ops: Iterable[Op]) -> list[Result]:
|
|
63
63
|
return await self._store.abatch(ops)
|
|
64
64
|
|
|
65
|
+
async def start_ttl_sweeper(self) -> asyncio.Task[None]:
|
|
66
|
+
return await self._store.start_ttl_sweeper()
|
|
67
|
+
|
|
65
68
|
def close(self) -> None:
|
|
66
69
|
self._store.close()
|
|
67
70
|
|
openapi.json
CHANGED
|
@@ -2268,6 +2268,68 @@
|
|
|
2268
2268
|
}
|
|
2269
2269
|
}
|
|
2270
2270
|
},
|
|
2271
|
+
"/runs/cancel": {
|
|
2272
|
+
"post": {
|
|
2273
|
+
"tags": [
|
|
2274
|
+
"Thread Runs"
|
|
2275
|
+
],
|
|
2276
|
+
"summary": "Cancel Runs",
|
|
2277
|
+
"description": "Cancel one or more runs. Can cancel runs by thread ID and run IDs, or by status filter.",
|
|
2278
|
+
"operationId": "cancel_runs_post",
|
|
2279
|
+
"parameters": [
|
|
2280
|
+
{
|
|
2281
|
+
"description": "Action to take when cancelling the run. Possible values are `interrupt` or `rollback`. `interrupt` will simply cancel the run. `rollback` will cancel the run and delete the run and associated checkpoints afterwards.",
|
|
2282
|
+
"required": false,
|
|
2283
|
+
"schema": {
|
|
2284
|
+
"type": "string",
|
|
2285
|
+
"enum": [
|
|
2286
|
+
"interrupt",
|
|
2287
|
+
"rollback"
|
|
2288
|
+
],
|
|
2289
|
+
"title": "Action",
|
|
2290
|
+
"default": "interrupt"
|
|
2291
|
+
},
|
|
2292
|
+
"name": "action",
|
|
2293
|
+
"in": "query"
|
|
2294
|
+
}
|
|
2295
|
+
],
|
|
2296
|
+
"requestBody": {
|
|
2297
|
+
"content": {
|
|
2298
|
+
"application/json": {
|
|
2299
|
+
"schema": {
|
|
2300
|
+
"$ref": "#/components/schemas/RunsCancel"
|
|
2301
|
+
}
|
|
2302
|
+
}
|
|
2303
|
+
},
|
|
2304
|
+
"required": true
|
|
2305
|
+
},
|
|
2306
|
+
"responses": {
|
|
2307
|
+
"204": {
|
|
2308
|
+
"description": "Success - Runs cancelled"
|
|
2309
|
+
},
|
|
2310
|
+
"404": {
|
|
2311
|
+
"description": "Not Found",
|
|
2312
|
+
"content": {
|
|
2313
|
+
"application/json": {
|
|
2314
|
+
"schema": {
|
|
2315
|
+
"$ref": "#/components/schemas/ErrorResponse"
|
|
2316
|
+
}
|
|
2317
|
+
}
|
|
2318
|
+
}
|
|
2319
|
+
},
|
|
2320
|
+
"422": {
|
|
2321
|
+
"description": "Validation Error",
|
|
2322
|
+
"content": {
|
|
2323
|
+
"application/json": {
|
|
2324
|
+
"schema": {
|
|
2325
|
+
"$ref": "#/components/schemas/ErrorResponse"
|
|
2326
|
+
}
|
|
2327
|
+
}
|
|
2328
|
+
}
|
|
2329
|
+
}
|
|
2330
|
+
}
|
|
2331
|
+
}
|
|
2332
|
+
},
|
|
2271
2333
|
"/runs/wait": {
|
|
2272
2334
|
"post": {
|
|
2273
2335
|
"tags": [
|
|
@@ -3959,6 +4021,23 @@
|
|
|
3959
4021
|
"description": "How to handle duplicate creation. Must be either 'raise' (raise error if duplicate), or 'do_nothing' (return existing thread).",
|
|
3960
4022
|
"default": "raise"
|
|
3961
4023
|
},
|
|
4024
|
+
"ttl": {
|
|
4025
|
+
"type": "object",
|
|
4026
|
+
"title": "TTL",
|
|
4027
|
+
"description": "The time-to-live for the thread.",
|
|
4028
|
+
"properties": {
|
|
4029
|
+
"strategy": {
|
|
4030
|
+
"type": "string",
|
|
4031
|
+
"enum": ["delete"],
|
|
4032
|
+
"description": "The TTL strategy. 'delete' removes the entire thread.",
|
|
4033
|
+
"default": "delete"
|
|
4034
|
+
},
|
|
4035
|
+
"ttl": {
|
|
4036
|
+
"type": "number",
|
|
4037
|
+
"description": "The time-to-live in minutes from now until thread should be swept."
|
|
4038
|
+
}
|
|
4039
|
+
}
|
|
4040
|
+
},
|
|
3962
4041
|
"supersteps": {
|
|
3963
4042
|
"type": "array",
|
|
3964
4043
|
"items": {
|
|
@@ -4398,6 +4477,42 @@
|
|
|
4398
4477
|
},
|
|
4399
4478
|
"description": "Represents a single document or data entry in the graph's Store. Items are used to store cross-thread memories."
|
|
4400
4479
|
},
|
|
4480
|
+
"RunsCancel": {
|
|
4481
|
+
"type": "object",
|
|
4482
|
+
"title": "RunsCancel",
|
|
4483
|
+
"description": "Payload for cancelling runs.",
|
|
4484
|
+
"properties": {
|
|
4485
|
+
"status": {
|
|
4486
|
+
"type": "string",
|
|
4487
|
+
"enum": ["pending", "running", "all"],
|
|
4488
|
+
"title": "Status",
|
|
4489
|
+
"description": "Filter runs by status to cancel. Must be one of 'pending', 'running', or 'all'."
|
|
4490
|
+
},
|
|
4491
|
+
"thread_id": {
|
|
4492
|
+
"type": "string",
|
|
4493
|
+
"format": "uuid",
|
|
4494
|
+
"title": "Thread Id",
|
|
4495
|
+
"description": "The ID of the thread containing runs to cancel."
|
|
4496
|
+
},
|
|
4497
|
+
"run_ids": {
|
|
4498
|
+
"type": "array",
|
|
4499
|
+
"items": {
|
|
4500
|
+
"type": "string",
|
|
4501
|
+
"format": "uuid"
|
|
4502
|
+
},
|
|
4503
|
+
"title": "Run Ids",
|
|
4504
|
+
"description": "List of run IDs to cancel."
|
|
4505
|
+
}
|
|
4506
|
+
},
|
|
4507
|
+
"oneOf": [
|
|
4508
|
+
{
|
|
4509
|
+
"required": ["status"]
|
|
4510
|
+
},
|
|
4511
|
+
{
|
|
4512
|
+
"required": ["thread_id", "run_ids"]
|
|
4513
|
+
}
|
|
4514
|
+
]
|
|
4515
|
+
},
|
|
4401
4516
|
"SearchItemsResponse": {
|
|
4402
4517
|
"type": "object",
|
|
4403
4518
|
"required": [
|
|
File without changes
|
|
File without changes
|
|
File without changes
|