langgraph-api 0.2.45__tar.gz → 0.2.48__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/PKG-INFO +1 -1
- langgraph_api-0.2.48/langgraph_api/__init__.py +1 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/assistants.py +35 -35
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/meta.py +3 -1
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/runs.py +10 -1
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/remote.py +25 -33
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/stream.py +2 -5
- langgraph_api-0.2.48/langgraph_api/worker.py +366 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/openapi.json +394 -106
- langgraph_api-0.2.45/langgraph_api/__init__.py +0 -1
- langgraph_api-0.2.45/langgraph_api/worker.py +0 -374
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/.gitignore +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/LICENSE +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/Makefile +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/README.md +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/benchmark/weather.js +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/constraints.txt +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/forbidden.txt +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/healthcheck.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/__init__.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/mcp.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/openapi.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/store.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/threads.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/api/ui.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/asgi_transport.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/asyncio.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/auth/__init__.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/auth/custom.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/auth/langsmith/__init__.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/auth/langsmith/backend.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/auth/langsmith/client.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/auth/middleware.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/auth/noop.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/auth/studio_user.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/cli.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/command.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/config.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/cron_scheduler.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/errors.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/graph.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/http.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/.gitignore +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/.prettierrc +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/__init__.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/base.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/build.mts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/client.http.mts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/client.mts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/errors.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/global.d.ts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/package.json +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/schema.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/src/graph.mts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/src/load.hooks.mjs +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/src/preload.mjs +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/src/utils/files.mts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/src/utils/importMap.mts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/src/utils/pythonSchemas.mts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/src/utils/serde.mts +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/sse.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/tsconfig.json +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/ui.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/js/yarn.lock +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/logging.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/metadata.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/middleware/__init__.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/middleware/http_logger.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/middleware/private_network.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/middleware/request_id.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/models/__init__.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/models/run.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/patch.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/queue_entrypoint.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/route.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/schema.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/serde.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/server.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/sse.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/state.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/store.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/thread_ttl.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/tunneling/cloudflare.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/utils.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/validation.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_api/webhook.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_license/__init__.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_license/validation.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/__init__.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/checkpoint.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/database.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/lifespan.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/metrics.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/ops.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/queue.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/retry.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/langgraph_runtime/store.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/logging.json +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/pyproject.toml +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/scripts/create_license.py +0 -0
- {langgraph_api-0.2.45 → langgraph_api-0.2.48}/uv.lock +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.2.48"
|
|
@@ -189,40 +189,40 @@ async def get_assistant_graph(
|
|
|
189
189
|
async with connect() as conn:
|
|
190
190
|
assistant_ = await Assistants.get(conn, assistant_id)
|
|
191
191
|
assistant = await fetchone(assistant_)
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
192
|
+
config = await ajson_loads(assistant["config"])
|
|
193
|
+
async with get_graph(
|
|
194
|
+
assistant["graph_id"],
|
|
195
|
+
config,
|
|
196
|
+
checkpointer=Checkpointer(),
|
|
197
|
+
store=(await api_store.get_store()),
|
|
198
|
+
) as graph:
|
|
199
|
+
xray: bool | int = False
|
|
200
|
+
xray_query = request.query_params.get("xray")
|
|
201
|
+
if xray_query:
|
|
202
|
+
if xray_query in ("true", "True"):
|
|
203
|
+
xray = True
|
|
204
|
+
elif xray_query in ("false", "False"):
|
|
205
|
+
xray = False
|
|
206
|
+
else:
|
|
207
|
+
try:
|
|
208
|
+
xray = int(xray_query)
|
|
209
|
+
except ValueError:
|
|
210
|
+
raise HTTPException(422, detail="Invalid xray value") from None
|
|
211
|
+
|
|
212
|
+
if xray <= 0:
|
|
213
|
+
raise HTTPException(422, detail="Invalid xray value") from None
|
|
214
|
+
|
|
215
|
+
if isinstance(graph, BaseRemotePregel):
|
|
216
|
+
drawable_graph = await graph.fetch_graph(xray=xray)
|
|
217
|
+
return ApiResponse(drawable_graph.to_json())
|
|
218
218
|
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
219
|
+
try:
|
|
220
|
+
drawable_graph = await graph.aget_graph(xray=xray)
|
|
221
|
+
return ApiResponse(drawable_graph.to_json())
|
|
222
|
+
except NotImplementedError:
|
|
223
|
+
raise HTTPException(
|
|
224
|
+
422, detail="The graph does not support visualization"
|
|
225
|
+
) from None
|
|
226
226
|
|
|
227
227
|
|
|
228
228
|
@retry_db
|
|
@@ -239,7 +239,7 @@ async def get_assistant_subgraphs(
|
|
|
239
239
|
async with get_graph(
|
|
240
240
|
assistant["graph_id"],
|
|
241
241
|
config,
|
|
242
|
-
checkpointer=Checkpointer(
|
|
242
|
+
checkpointer=Checkpointer(),
|
|
243
243
|
store=(await api_store.get_store()),
|
|
244
244
|
) as graph:
|
|
245
245
|
namespace = request.path_params.get("namespace")
|
|
@@ -285,7 +285,7 @@ async def get_assistant_schemas(
|
|
|
285
285
|
async with get_graph(
|
|
286
286
|
assistant["graph_id"],
|
|
287
287
|
config,
|
|
288
|
-
checkpointer=Checkpointer(
|
|
288
|
+
checkpointer=Checkpointer(),
|
|
289
289
|
store=(await api_store.get_store()),
|
|
290
290
|
) as graph:
|
|
291
291
|
if isinstance(graph, BaseRemotePregel):
|
|
@@ -2,7 +2,7 @@ import os
|
|
|
2
2
|
|
|
3
3
|
from starlette.responses import JSONResponse, PlainTextResponse
|
|
4
4
|
|
|
5
|
-
from langgraph_api import config, metadata
|
|
5
|
+
from langgraph_api import __version__, config, metadata
|
|
6
6
|
from langgraph_api.route import ApiRequest
|
|
7
7
|
from langgraph_license.validation import plus_features_enabled
|
|
8
8
|
from langgraph_runtime.database import connect, pool_stats
|
|
@@ -16,10 +16,12 @@ async def meta_info(request: ApiRequest):
|
|
|
16
16
|
plus = plus_features_enabled()
|
|
17
17
|
return JSONResponse(
|
|
18
18
|
{
|
|
19
|
+
"version": __version__,
|
|
19
20
|
"flags": {
|
|
20
21
|
"assistants": True,
|
|
21
22
|
"crons": plus and config.FF_CRONS_ENABLED,
|
|
22
23
|
"langsmith": bool(config.LANGSMITH_API_KEY) and bool(config.TRACING),
|
|
24
|
+
"langsmith_tracing_replicas": True,
|
|
23
25
|
},
|
|
24
26
|
"host": {
|
|
25
27
|
"kind": metadata.HOST,
|
|
@@ -222,6 +222,9 @@ async def wait_run(request: ApiRequest):
|
|
|
222
222
|
stream = asyncio.create_task(consume())
|
|
223
223
|
while True:
|
|
224
224
|
try:
|
|
225
|
+
if stream.done():
|
|
226
|
+
# raise stream exception if any
|
|
227
|
+
stream.result()
|
|
225
228
|
yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
|
|
226
229
|
break
|
|
227
230
|
except TimeoutError:
|
|
@@ -270,7 +273,10 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
270
273
|
vchunk: bytes | None = None
|
|
271
274
|
async with aclosing(
|
|
272
275
|
Runs.Stream.join(
|
|
273
|
-
run["run_id"],
|
|
276
|
+
run["run_id"],
|
|
277
|
+
thread_id=run["thread_id"],
|
|
278
|
+
stream_mode=await sub,
|
|
279
|
+
ignore_404=True,
|
|
274
280
|
)
|
|
275
281
|
) as stream:
|
|
276
282
|
async for mode, chunk, _ in stream:
|
|
@@ -290,6 +296,9 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
290
296
|
stream = asyncio.create_task(consume())
|
|
291
297
|
while True:
|
|
292
298
|
try:
|
|
299
|
+
if stream.done():
|
|
300
|
+
# raise stream exception if any
|
|
301
|
+
stream.result()
|
|
293
302
|
yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
|
|
294
303
|
break
|
|
295
304
|
except TimeoutError:
|
|
@@ -48,7 +48,6 @@ from langgraph_api.js.sse import SSEDecoder, aiter_lines_raw
|
|
|
48
48
|
from langgraph_api.route import ApiResponse
|
|
49
49
|
from langgraph_api.schema import Config
|
|
50
50
|
from langgraph_api.serde import json_dumpb
|
|
51
|
-
from langgraph_api.utils import AsyncConnectionProto
|
|
52
51
|
|
|
53
52
|
logger = structlog.stdlib.get_logger(__name__)
|
|
54
53
|
|
|
@@ -469,10 +468,10 @@ class PassthroughSerialiser(SerializerProtocol):
|
|
|
469
468
|
return orjson.loads(payload)
|
|
470
469
|
|
|
471
470
|
|
|
472
|
-
def _get_passthrough_checkpointer(
|
|
471
|
+
def _get_passthrough_checkpointer():
|
|
473
472
|
from langgraph_runtime.checkpoint import Checkpointer
|
|
474
473
|
|
|
475
|
-
checkpointer = Checkpointer(
|
|
474
|
+
checkpointer = Checkpointer()
|
|
476
475
|
# This checkpointer does not attempt to revive LC-objects.
|
|
477
476
|
# Instead, it will pass through the JSON values as-is.
|
|
478
477
|
checkpointer.serde = PassthroughSerialiser()
|
|
@@ -487,53 +486,46 @@ async def _get_passthrough_store():
|
|
|
487
486
|
# Setup a HTTP server on top of CHECKPOINTER_SOCKET unix socket
|
|
488
487
|
# used by `client.mts` to communicate with the Python checkpointer
|
|
489
488
|
async def run_remote_checkpointer():
|
|
490
|
-
from langgraph_runtime.database import connect
|
|
491
|
-
|
|
492
489
|
async def checkpointer_list(payload: dict):
|
|
493
490
|
"""Search checkpoints"""
|
|
494
491
|
|
|
495
492
|
result = []
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
)
|
|
504
|
-
result.append(item)
|
|
493
|
+
checkpointer = _get_passthrough_checkpointer()
|
|
494
|
+
async for item in checkpointer.alist(
|
|
495
|
+
config=payload.get("config"),
|
|
496
|
+
limit=int(payload.get("limit") or 10),
|
|
497
|
+
before=payload.get("before"),
|
|
498
|
+
filter=payload.get("filter"),
|
|
499
|
+
):
|
|
500
|
+
result.append(item)
|
|
505
501
|
|
|
506
502
|
return result
|
|
507
503
|
|
|
508
504
|
async def checkpointer_put(payload: dict):
|
|
509
505
|
"""Put the new checkpoint metadata"""
|
|
510
506
|
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
)
|
|
507
|
+
checkpointer = _get_passthrough_checkpointer()
|
|
508
|
+
return await checkpointer.aput(
|
|
509
|
+
payload["config"],
|
|
510
|
+
payload["checkpoint"],
|
|
511
|
+
payload["metadata"],
|
|
512
|
+
payload.get("new_versions", {}),
|
|
513
|
+
)
|
|
519
514
|
|
|
520
515
|
async def checkpointer_get_tuple(payload: dict):
|
|
521
516
|
"""Get actual checkpoint values (reads)"""
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
checkpointer = _get_passthrough_checkpointer(conn)
|
|
525
|
-
return await checkpointer.aget_tuple(config=payload["config"])
|
|
517
|
+
checkpointer = _get_passthrough_checkpointer()
|
|
518
|
+
return await checkpointer.aget_tuple(config=payload["config"])
|
|
526
519
|
|
|
527
520
|
async def checkpointer_put_writes(payload: dict):
|
|
528
521
|
"""Put actual checkpoint values (writes)"""
|
|
529
522
|
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
)
|
|
523
|
+
checkpointer = _get_passthrough_checkpointer()
|
|
524
|
+
return await checkpointer.aput_writes(
|
|
525
|
+
payload["config"],
|
|
526
|
+
payload["writes"],
|
|
527
|
+
payload["taskId"],
|
|
528
|
+
)
|
|
537
529
|
|
|
538
530
|
async def store_batch(payload: dict):
|
|
539
531
|
"""Batch operations on the store"""
|
|
@@ -32,7 +32,6 @@ from langgraph_api.js.base import BaseRemotePregel
|
|
|
32
32
|
from langgraph_api.metadata import HOST, PLAN, USER_API_URL, incr_nodes
|
|
33
33
|
from langgraph_api.schema import Run, StreamMode
|
|
34
34
|
from langgraph_api.serde import json_dumpb
|
|
35
|
-
from langgraph_api.utils import AsyncConnectionProto
|
|
36
35
|
from langgraph_runtime.checkpoint import Checkpointer
|
|
37
36
|
from langgraph_runtime.ops import Runs
|
|
38
37
|
|
|
@@ -79,8 +78,6 @@ async def async_tracing_context(*args, **kwargs):
|
|
|
79
78
|
|
|
80
79
|
|
|
81
80
|
async def astream_state(
|
|
82
|
-
stack: AsyncExitStack,
|
|
83
|
-
conn: AsyncConnectionProto,
|
|
84
81
|
run: Run,
|
|
85
82
|
attempt: int,
|
|
86
83
|
done: ValueEvent,
|
|
@@ -90,7 +87,6 @@ async def astream_state(
|
|
|
90
87
|
) -> AnyStream:
|
|
91
88
|
"""Stream messages from the runnable."""
|
|
92
89
|
run_id = str(run["run_id"])
|
|
93
|
-
await stack.enter_async_context(conn.pipeline())
|
|
94
90
|
# extract args from run
|
|
95
91
|
kwargs = run["kwargs"].copy()
|
|
96
92
|
kwargs.pop("webhook", None)
|
|
@@ -98,12 +94,13 @@ async def astream_state(
|
|
|
98
94
|
subgraphs = kwargs.get("subgraphs", False)
|
|
99
95
|
temporary = kwargs.pop("temporary", False)
|
|
100
96
|
config = kwargs.pop("config")
|
|
97
|
+
stack = AsyncExitStack()
|
|
101
98
|
graph = await stack.enter_async_context(
|
|
102
99
|
get_graph(
|
|
103
100
|
config["configurable"]["graph_id"],
|
|
104
101
|
config,
|
|
105
102
|
store=(await api_store.get_store()),
|
|
106
|
-
checkpointer=None if temporary else Checkpointer(
|
|
103
|
+
checkpointer=None if temporary else Checkpointer(),
|
|
107
104
|
)
|
|
108
105
|
)
|
|
109
106
|
input = kwargs.pop("input")
|
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import time
|
|
3
|
+
from collections.abc import AsyncGenerator
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from typing import TypedDict, cast
|
|
7
|
+
|
|
8
|
+
import structlog
|
|
9
|
+
from langgraph.pregel.debug import CheckpointPayload, TaskResultPayload
|
|
10
|
+
from starlette.exceptions import HTTPException
|
|
11
|
+
|
|
12
|
+
import langgraph_api.logging as lg_logging
|
|
13
|
+
from langgraph_api.auth.custom import SimpleUser, normalize_user
|
|
14
|
+
from langgraph_api.config import (
|
|
15
|
+
BG_JOB_ISOLATED_LOOPS,
|
|
16
|
+
BG_JOB_MAX_RETRIES,
|
|
17
|
+
BG_JOB_TIMEOUT_SECS,
|
|
18
|
+
)
|
|
19
|
+
from langgraph_api.errors import UserInterrupt, UserRollback
|
|
20
|
+
from langgraph_api.js.errors import RemoteException
|
|
21
|
+
from langgraph_api.metadata import incr_runs
|
|
22
|
+
from langgraph_api.schema import Run
|
|
23
|
+
from langgraph_api.state import state_snapshot_to_thread_state
|
|
24
|
+
from langgraph_api.stream import astream_state, consume
|
|
25
|
+
from langgraph_api.utils import with_user
|
|
26
|
+
from langgraph_runtime.database import connect
|
|
27
|
+
from langgraph_runtime.ops import Runs, Threads
|
|
28
|
+
from langgraph_runtime.retry import RETRIABLE_EXCEPTIONS
|
|
29
|
+
|
|
30
|
+
logger = structlog.stdlib.get_logger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class WorkerResult(TypedDict):
|
|
34
|
+
checkpoint: CheckpointPayload | None
|
|
35
|
+
status: str | None
|
|
36
|
+
exception: Exception | None
|
|
37
|
+
run: Run
|
|
38
|
+
webhook: str | None
|
|
39
|
+
run_started_at: str
|
|
40
|
+
run_ended_at: str | None
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@asynccontextmanager
|
|
44
|
+
async def set_auth_ctx_for_run(
|
|
45
|
+
run_kwargs: dict, user_id: str | None = None
|
|
46
|
+
) -> AsyncGenerator[None, None]:
|
|
47
|
+
# user_id is a fallback.
|
|
48
|
+
try:
|
|
49
|
+
user = run_kwargs["config"]["configurable"]["langgraph_auth_user"]
|
|
50
|
+
permissions = run_kwargs["config"]["configurable"]["langgraph_auth_permissions"]
|
|
51
|
+
user = normalize_user(user)
|
|
52
|
+
except Exception:
|
|
53
|
+
user = SimpleUser(user_id) if user_id is not None else None
|
|
54
|
+
permissions = None
|
|
55
|
+
if user is not None:
|
|
56
|
+
async with with_user(user, permissions):
|
|
57
|
+
yield None
|
|
58
|
+
else:
|
|
59
|
+
yield None
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
async def worker(
|
|
63
|
+
run: Run,
|
|
64
|
+
attempt: int,
|
|
65
|
+
main_loop: asyncio.AbstractEventLoop,
|
|
66
|
+
) -> WorkerResult:
|
|
67
|
+
run_id = run["run_id"]
|
|
68
|
+
if attempt == 1:
|
|
69
|
+
incr_runs()
|
|
70
|
+
checkpoint: CheckpointPayload | None = None
|
|
71
|
+
exception: Exception | None = None
|
|
72
|
+
status: str | None = None
|
|
73
|
+
webhook = run["kwargs"].get("webhook", None)
|
|
74
|
+
request_created_at: int | None = run["kwargs"]["config"]["configurable"].get(
|
|
75
|
+
"__request_start_time_ms__"
|
|
76
|
+
)
|
|
77
|
+
after_seconds = run["kwargs"]["config"]["configurable"].get("__after_seconds__", 0)
|
|
78
|
+
run_ended_at: str | None = None
|
|
79
|
+
run_started_at = datetime.now(UTC)
|
|
80
|
+
# Note that "created_at" is inclusive of the `after_seconds`
|
|
81
|
+
run_creation_ms = (
|
|
82
|
+
int(
|
|
83
|
+
((run["created_at"].timestamp() - after_seconds) * 1_000)
|
|
84
|
+
- request_created_at
|
|
85
|
+
)
|
|
86
|
+
if request_created_at is not None
|
|
87
|
+
else None
|
|
88
|
+
)
|
|
89
|
+
temporary = run["kwargs"].get("temporary", False)
|
|
90
|
+
resumable = run["kwargs"].get("resumable", False)
|
|
91
|
+
run_created_at = run["created_at"].isoformat()
|
|
92
|
+
lg_logging.set_logging_context(
|
|
93
|
+
{
|
|
94
|
+
"run_id": str(run_id),
|
|
95
|
+
"run_attempt": attempt,
|
|
96
|
+
"thread_id": str(run.get("thread_id")),
|
|
97
|
+
"assistant_id": str(run.get("assistant_id")),
|
|
98
|
+
"graph_id": _get_graph_id(run),
|
|
99
|
+
"request_id": _get_request_id(run),
|
|
100
|
+
}
|
|
101
|
+
)
|
|
102
|
+
run_stream_started_at = datetime.now(UTC)
|
|
103
|
+
await logger.ainfo(
|
|
104
|
+
"Starting background run",
|
|
105
|
+
run_started_at=run_started_at.isoformat(),
|
|
106
|
+
run_creation_ms=run_creation_ms,
|
|
107
|
+
run_queue_ms=ms(run_started_at, run["created_at"]),
|
|
108
|
+
run_stream_start_ms=ms(run_stream_started_at, run_started_at),
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
def on_checkpoint(checkpoint_arg: CheckpointPayload):
|
|
112
|
+
nonlocal checkpoint
|
|
113
|
+
checkpoint = checkpoint_arg
|
|
114
|
+
|
|
115
|
+
def on_task_result(task_result: TaskResultPayload):
|
|
116
|
+
if checkpoint is not None:
|
|
117
|
+
for task in checkpoint["tasks"]:
|
|
118
|
+
if task["id"] == task_result["id"]:
|
|
119
|
+
task.update(task_result)
|
|
120
|
+
break
|
|
121
|
+
|
|
122
|
+
async with Runs.enter(run_id, main_loop) as done:
|
|
123
|
+
# attempt the run
|
|
124
|
+
try:
|
|
125
|
+
if attempt > BG_JOB_MAX_RETRIES:
|
|
126
|
+
await logger.aerror(
|
|
127
|
+
"Run exceeded max attempts",
|
|
128
|
+
run_id=run["run_id"],
|
|
129
|
+
run_completed_in_ms=(
|
|
130
|
+
int((time.time() * 1_000) - request_created_at)
|
|
131
|
+
if request_created_at is not None
|
|
132
|
+
else None
|
|
133
|
+
),
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
error_message = (
|
|
137
|
+
f"Run {run['run_id']} exceeded max attempts ({BG_JOB_MAX_RETRIES}).\n\n"
|
|
138
|
+
"This may happen if your code blocks the event loop with synchronous I/O bound calls (network requests, database queries, etc.).\n\n"
|
|
139
|
+
"If that is the case, your issues may be resolved by converting synchronous operations to async (e.g., use aiohttp instead of requests).\n\n"
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
if not BG_JOB_ISOLATED_LOOPS:
|
|
143
|
+
error_message += (
|
|
144
|
+
"Also consider setting BG_JOB_ISOLATED_LOOPS=true in your environment. This will isolate I/O-bound operations to avoid"
|
|
145
|
+
" blocking the main API server.\n\n"
|
|
146
|
+
"See: https://langchain-ai.github.io/langgraph/cloud/reference/env_var/#bg_job_isolated_loops\n\n"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
raise RuntimeError(error_message)
|
|
150
|
+
async with set_auth_ctx_for_run(run["kwargs"]):
|
|
151
|
+
if temporary:
|
|
152
|
+
stream = astream_state(cast(Run, run), attempt, done)
|
|
153
|
+
else:
|
|
154
|
+
stream = astream_state(
|
|
155
|
+
cast(Run, run),
|
|
156
|
+
attempt,
|
|
157
|
+
done,
|
|
158
|
+
on_checkpoint=on_checkpoint,
|
|
159
|
+
on_task_result=on_task_result,
|
|
160
|
+
)
|
|
161
|
+
await asyncio.wait_for(
|
|
162
|
+
consume(stream, run_id, resumable),
|
|
163
|
+
BG_JOB_TIMEOUT_SECS,
|
|
164
|
+
)
|
|
165
|
+
run_ended_at_dt = datetime.now(UTC)
|
|
166
|
+
run_ended_at = run_ended_at_dt.isoformat()
|
|
167
|
+
await logger.ainfo(
|
|
168
|
+
"Background run succeeded",
|
|
169
|
+
run_id=str(run_id),
|
|
170
|
+
run_attempt=attempt,
|
|
171
|
+
run_created_at=run_created_at,
|
|
172
|
+
run_started_at=run_started_at.isoformat(),
|
|
173
|
+
run_ended_at=run_ended_at,
|
|
174
|
+
run_exec_ms=ms(run_ended_at_dt, run_started_at),
|
|
175
|
+
run_completed_in_ms=(
|
|
176
|
+
int((run_ended_at_dt.timestamp() * 1_000) - request_created_at)
|
|
177
|
+
if request_created_at is not None
|
|
178
|
+
else None
|
|
179
|
+
),
|
|
180
|
+
)
|
|
181
|
+
except Exception as ee:
|
|
182
|
+
# Note we don't handle asyncio.CancelledError here, as we want to
|
|
183
|
+
# let it bubble up and rollback db transaction, thus marking the run
|
|
184
|
+
# as available to be picked up by another worker
|
|
185
|
+
exception = ee
|
|
186
|
+
|
|
187
|
+
# handle exceptions and set status
|
|
188
|
+
async with connect() as conn:
|
|
189
|
+
if exception is None:
|
|
190
|
+
status = "success"
|
|
191
|
+
await Runs.set_status(conn, run_id, "success")
|
|
192
|
+
# If a stateful run succeeded but no checkpoint was returned, likely
|
|
193
|
+
# there was a retriable exception that resumed right at the end
|
|
194
|
+
if checkpoint is None and not temporary:
|
|
195
|
+
await logger.ainfo(
|
|
196
|
+
"Fetching missing checkpoint for webhook",
|
|
197
|
+
run_id=str(run_id),
|
|
198
|
+
run_attempt=attempt,
|
|
199
|
+
)
|
|
200
|
+
try:
|
|
201
|
+
state_snapshot = await Threads.State.get(
|
|
202
|
+
conn, run["kwargs"]["config"]
|
|
203
|
+
)
|
|
204
|
+
checkpoint = state_snapshot_to_thread_state(state_snapshot)
|
|
205
|
+
except Exception:
|
|
206
|
+
await logger.aerror(
|
|
207
|
+
"Failed to fetch missing checkpoint for webhook. Continuing...",
|
|
208
|
+
exc_info=True,
|
|
209
|
+
run_id=str(run_id),
|
|
210
|
+
run_attempt=attempt,
|
|
211
|
+
)
|
|
212
|
+
elif isinstance(exception, TimeoutError):
|
|
213
|
+
status = "timeout"
|
|
214
|
+
run_ended_at = datetime.now(UTC).isoformat()
|
|
215
|
+
await logger.awarning(
|
|
216
|
+
"Background run timed out",
|
|
217
|
+
run_id=str(run_id),
|
|
218
|
+
run_attempt=attempt,
|
|
219
|
+
run_created_at=run_created_at,
|
|
220
|
+
run_started_at=run_started_at.isoformat(),
|
|
221
|
+
run_ended_at=run_ended_at,
|
|
222
|
+
run_exec_ms=ms(datetime.now(UTC), run_started_at),
|
|
223
|
+
run_completed_in_ms=(
|
|
224
|
+
int((run_ended_at_dt.timestamp() * 1_000) - request_created_at)
|
|
225
|
+
if request_created_at is not None
|
|
226
|
+
else None
|
|
227
|
+
),
|
|
228
|
+
)
|
|
229
|
+
await Runs.set_status(conn, run_id, "timeout")
|
|
230
|
+
elif isinstance(exception, UserRollback):
|
|
231
|
+
status = "rollback"
|
|
232
|
+
run_ended_at_dt = datetime.now(UTC)
|
|
233
|
+
run_ended_at = run_ended_at_dt.isoformat()
|
|
234
|
+
try:
|
|
235
|
+
await Runs.delete(conn, run_id, thread_id=run["thread_id"])
|
|
236
|
+
await logger.ainfo(
|
|
237
|
+
"Background run rolled back",
|
|
238
|
+
run_id=str(run_id),
|
|
239
|
+
run_attempt=attempt,
|
|
240
|
+
run_created_at=run_created_at,
|
|
241
|
+
run_started_at=run_started_at.isoformat(),
|
|
242
|
+
run_ended_at=run_ended_at,
|
|
243
|
+
run_exec_ms=ms(run_ended_at_dt, run_started_at),
|
|
244
|
+
run_completed_in_ms=(
|
|
245
|
+
int(
|
|
246
|
+
(run_ended_at_dt.timestamp() * 1_000)
|
|
247
|
+
- request_created_at
|
|
248
|
+
)
|
|
249
|
+
if request_created_at is not None
|
|
250
|
+
else None
|
|
251
|
+
),
|
|
252
|
+
)
|
|
253
|
+
except HTTPException as e:
|
|
254
|
+
if e.status_code == 404:
|
|
255
|
+
await logger.ainfo(
|
|
256
|
+
"Ignoring rollback error for missing run",
|
|
257
|
+
run_id=str(run_id),
|
|
258
|
+
run_attempt=attempt,
|
|
259
|
+
run_created_at=run_created_at,
|
|
260
|
+
)
|
|
261
|
+
else:
|
|
262
|
+
raise
|
|
263
|
+
|
|
264
|
+
checkpoint = None # reset the checkpoint
|
|
265
|
+
elif isinstance(exception, UserInterrupt):
|
|
266
|
+
status = "interrupted"
|
|
267
|
+
run_ended_at_dt = datetime.now(UTC)
|
|
268
|
+
run_ended_at = run_ended_at_dt.isoformat()
|
|
269
|
+
await logger.ainfo(
|
|
270
|
+
"Background run interrupted",
|
|
271
|
+
run_id=str(run_id),
|
|
272
|
+
run_attempt=attempt,
|
|
273
|
+
run_created_at=run_created_at,
|
|
274
|
+
run_started_at=run_started_at.isoformat(),
|
|
275
|
+
run_ended_at=run_ended_at,
|
|
276
|
+
run_exec_ms=ms(run_ended_at_dt, run_started_at),
|
|
277
|
+
run_completed_in_ms=(
|
|
278
|
+
int((run_ended_at_dt.timestamp() * 1_000) - request_created_at)
|
|
279
|
+
if request_created_at is not None
|
|
280
|
+
else None
|
|
281
|
+
),
|
|
282
|
+
)
|
|
283
|
+
await Runs.set_status(conn, run_id, "interrupted")
|
|
284
|
+
elif isinstance(exception, RETRIABLE_EXCEPTIONS):
|
|
285
|
+
status = "retry"
|
|
286
|
+
run_ended_at_dt = datetime.now(UTC)
|
|
287
|
+
run_ended_at = run_ended_at_dt.isoformat()
|
|
288
|
+
await logger.awarning(
|
|
289
|
+
f"Background run failed, will retry. Exception: {exception}",
|
|
290
|
+
exc_info=True,
|
|
291
|
+
run_id=str(run_id),
|
|
292
|
+
run_attempt=attempt,
|
|
293
|
+
run_created_at=run_created_at,
|
|
294
|
+
run_started_at=run_started_at.isoformat(),
|
|
295
|
+
run_ended_at=run_ended_at,
|
|
296
|
+
run_exec_ms=ms(run_ended_at_dt, run_started_at),
|
|
297
|
+
)
|
|
298
|
+
await Runs.set_status(conn, run_id, "pending")
|
|
299
|
+
else:
|
|
300
|
+
status = "error"
|
|
301
|
+
run_ended_at_dt = datetime.now(UTC)
|
|
302
|
+
run_ended_at = run_ended_at_dt.isoformat()
|
|
303
|
+
await logger.aexception(
|
|
304
|
+
f"Background run failed. Exception: {exception}",
|
|
305
|
+
exc_info=not isinstance(exception, RemoteException),
|
|
306
|
+
run_id=str(run_id),
|
|
307
|
+
run_attempt=attempt,
|
|
308
|
+
run_created_at=run_created_at,
|
|
309
|
+
run_started_at=run_started_at.isoformat(),
|
|
310
|
+
run_ended_at=run_ended_at,
|
|
311
|
+
run_exec_ms=ms(run_ended_at_dt, run_started_at),
|
|
312
|
+
)
|
|
313
|
+
await Runs.set_status(conn, run_id, "error")
|
|
314
|
+
|
|
315
|
+
# delete or set status of thread
|
|
316
|
+
if not isinstance(exception, RETRIABLE_EXCEPTIONS):
|
|
317
|
+
if temporary:
|
|
318
|
+
await Threads.delete(conn, run["thread_id"])
|
|
319
|
+
else:
|
|
320
|
+
try:
|
|
321
|
+
await Threads.set_status(
|
|
322
|
+
conn, run["thread_id"], checkpoint, exception
|
|
323
|
+
)
|
|
324
|
+
except HTTPException as e:
|
|
325
|
+
if e.status_code == 404:
|
|
326
|
+
await logger.ainfo(
|
|
327
|
+
"Ignoring set_status error for missing thread",
|
|
328
|
+
exc=str(e),
|
|
329
|
+
)
|
|
330
|
+
else:
|
|
331
|
+
raise
|
|
332
|
+
|
|
333
|
+
if isinstance(exception, RETRIABLE_EXCEPTIONS):
|
|
334
|
+
# re-raise so Runs.enter knows not to mark as done
|
|
335
|
+
# Runs.enter will catch the exception, but what triggers the retry
|
|
336
|
+
# is setting the status to "pending"
|
|
337
|
+
raise exception
|
|
338
|
+
|
|
339
|
+
return WorkerResult(
|
|
340
|
+
checkpoint=checkpoint,
|
|
341
|
+
status=status,
|
|
342
|
+
exception=exception,
|
|
343
|
+
run=run,
|
|
344
|
+
webhook=webhook,
|
|
345
|
+
run_started_at=run_started_at.isoformat(),
|
|
346
|
+
run_ended_at=run_ended_at,
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def ms(after: datetime, before: datetime) -> int:
|
|
351
|
+
return int((after - before).total_seconds() * 1000)
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
def _get_request_id(run: Run) -> str | None:
|
|
355
|
+
try:
|
|
356
|
+
return run["kwargs"]["config"]["configurable"]["langgraph_request_id"]
|
|
357
|
+
except Exception:
|
|
358
|
+
return None
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def _get_graph_id(run: Run) -> str | None:
|
|
362
|
+
try:
|
|
363
|
+
return run["kwargs"]["config"]["configurable"]["graph_id"]
|
|
364
|
+
except Exception:
|
|
365
|
+
logger.info(f"Failed to get graph_id from run {run['run_id']}")
|
|
366
|
+
return "Unknown"
|