langgraph-api 0.0.26__py3-none-any.whl → 0.0.28rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/api/__init__.py +2 -0
- langgraph_api/api/assistants.py +43 -13
- langgraph_api/api/meta.py +2 -1
- langgraph_api/api/runs.py +14 -1
- langgraph_api/api/ui.py +68 -0
- langgraph_api/asyncio.py +43 -4
- langgraph_api/auth/middleware.py +2 -2
- langgraph_api/cli.py +72 -57
- langgraph_api/config.py +23 -1
- langgraph_api/cron_scheduler.py +1 -1
- langgraph_api/graph.py +5 -0
- langgraph_api/http.py +24 -7
- langgraph_api/js/.gitignore +2 -0
- langgraph_api/js/build.mts +49 -3
- langgraph_api/js/client.mts +84 -40
- langgraph_api/js/global.d.ts +1 -0
- langgraph_api/js/package.json +15 -7
- langgraph_api/js/remote.py +662 -16
- langgraph_api/js/src/graph.mts +5 -4
- langgraph_api/js/sse.py +138 -0
- langgraph_api/js/tests/api.test.mts +28 -0
- langgraph_api/js/tests/compose-postgres.yml +2 -2
- langgraph_api/js/tests/graphs/agent.css +1 -0
- langgraph_api/js/tests/graphs/agent.ui.tsx +10 -0
- langgraph_api/js/tests/graphs/package.json +2 -2
- langgraph_api/js/tests/graphs/yarn.lock +13 -13
- langgraph_api/js/yarn.lock +710 -1187
- langgraph_api/lifespan.py +15 -5
- langgraph_api/logging.py +9 -0
- langgraph_api/metadata.py +5 -1
- langgraph_api/middleware/http_logger.py +1 -1
- langgraph_api/patch.py +2 -0
- langgraph_api/queue_entrypoint.py +63 -0
- langgraph_api/schema.py +2 -0
- langgraph_api/stream.py +1 -0
- langgraph_api/webhook.py +42 -0
- langgraph_api/{queue.py → worker.py} +52 -166
- {langgraph_api-0.0.26.dist-info → langgraph_api-0.0.28rc1.dist-info}/METADATA +8 -8
- {langgraph_api-0.0.26.dist-info → langgraph_api-0.0.28rc1.dist-info}/RECORD +49 -46
- langgraph_storage/database.py +8 -22
- langgraph_storage/inmem_stream.py +108 -0
- langgraph_storage/ops.py +80 -57
- langgraph_storage/queue.py +126 -103
- langgraph_storage/retry.py +5 -1
- langgraph_storage/store.py +5 -1
- openapi.json +3 -3
- langgraph_api/js/client.new.mts +0 -861
- langgraph_api/js/remote_new.py +0 -694
- langgraph_api/js/remote_old.py +0 -667
- langgraph_api/js/server_sent_events.py +0 -126
- {langgraph_api-0.0.26.dist-info → langgraph_api-0.0.28rc1.dist-info}/LICENSE +0 -0
- {langgraph_api-0.0.26.dist-info → langgraph_api-0.0.28rc1.dist-info}/WHEEL +0 -0
- {langgraph_api-0.0.26.dist-info → langgraph_api-0.0.28rc1.dist-info}/entry_points.txt +0 -0
langgraph_storage/queue.py
CHANGED
|
@@ -1,108 +1,131 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
import logging
|
|
3
|
-
from collections import defaultdict
|
|
4
|
-
from dataclasses import dataclass
|
|
5
|
-
from uuid import UUID
|
|
6
2
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
async def put(self, run_id: UUID, message: Message) -> None:
|
|
45
|
-
topic = message.topic.decode()
|
|
46
|
-
if "control" in topic:
|
|
47
|
-
self.control_queues[run_id].append(message)
|
|
48
|
-
queues = self.queues.get(run_id, [])
|
|
49
|
-
coros = [queue.put(message) for queue in queues]
|
|
50
|
-
results = await asyncio.gather(*coros, return_exceptions=True)
|
|
51
|
-
for result in results:
|
|
52
|
-
if isinstance(result, Exception):
|
|
53
|
-
logger.exception(f"Failed to put message in queue: {result}")
|
|
54
|
-
|
|
55
|
-
async def add_queue(self, run_id: UUID) -> asyncio.Queue:
|
|
56
|
-
queue = ContextQueue()
|
|
57
|
-
self.queues[run_id].append(queue)
|
|
58
|
-
for control_msg in self.control_queues[run_id]:
|
|
59
|
-
try:
|
|
60
|
-
await queue.put(control_msg)
|
|
61
|
-
except Exception:
|
|
3
|
+
import structlog
|
|
4
|
+
|
|
5
|
+
from langgraph_api.config import (
|
|
6
|
+
BG_JOB_HEARTBEAT,
|
|
7
|
+
N_JOBS_PER_WORKER,
|
|
8
|
+
STATS_INTERVAL_SECS,
|
|
9
|
+
)
|
|
10
|
+
from langgraph_api.graph import is_js_graph
|
|
11
|
+
from langgraph_api.schema import Run
|
|
12
|
+
from langgraph_api.webhook import call_webhook
|
|
13
|
+
from langgraph_api.worker import WorkerResult, worker
|
|
14
|
+
from langgraph_storage.database import connect
|
|
15
|
+
from langgraph_storage.ops import Runs
|
|
16
|
+
|
|
17
|
+
logger = structlog.stdlib.get_logger(__name__)
|
|
18
|
+
|
|
19
|
+
WORKERS: set[asyncio.Task] = set()
|
|
20
|
+
SHUTDOWN_GRACE_PERIOD_SECS = 5
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def queue():
|
|
24
|
+
concurrency = N_JOBS_PER_WORKER
|
|
25
|
+
loop = asyncio.get_running_loop()
|
|
26
|
+
last_stats_secs: int | None = None
|
|
27
|
+
last_sweep_secs: int | None = None
|
|
28
|
+
semaphore = asyncio.Semaphore(concurrency)
|
|
29
|
+
WEBHOOKS: set[asyncio.Task] = set()
|
|
30
|
+
|
|
31
|
+
def cleanup(task: asyncio.Task):
|
|
32
|
+
WORKERS.remove(task)
|
|
33
|
+
semaphore.release()
|
|
34
|
+
try:
|
|
35
|
+
if task.cancelled():
|
|
36
|
+
return
|
|
37
|
+
exc = task.exception()
|
|
38
|
+
if exc and not isinstance(exc, asyncio.CancelledError):
|
|
62
39
|
logger.exception(
|
|
63
|
-
f"
|
|
40
|
+
f"Background worker failed for task {task}", exc_info=exc
|
|
64
41
|
)
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
global stream_manager
|
|
84
|
-
stream_manager = StreamManager()
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
async def stop_queue() -> None:
|
|
88
|
-
"""Clean up the queue system.
|
|
89
|
-
Clear all queues and stored control messages."""
|
|
90
|
-
global stream_manager
|
|
91
|
-
|
|
92
|
-
# Send 'done' message to all active queues before clearing
|
|
93
|
-
for run_id in list(stream_manager.queues.keys()):
|
|
94
|
-
control_message = Message(topic=f"run:{run_id}:control".encode(), data=b"done")
|
|
95
|
-
for queue in stream_manager.queues[run_id]:
|
|
42
|
+
return
|
|
43
|
+
result: WorkerResult | None = task.result()
|
|
44
|
+
if result and result["webhook"]:
|
|
45
|
+
hook_task = loop.create_task(
|
|
46
|
+
call_webhook(result),
|
|
47
|
+
name=f"webhook-{result['run']['run_id']}",
|
|
48
|
+
)
|
|
49
|
+
WEBHOOKS.add(hook_task)
|
|
50
|
+
hook_task.add_done_callback(WEBHOOKS.remove)
|
|
51
|
+
except asyncio.CancelledError:
|
|
52
|
+
pass
|
|
53
|
+
except Exception as exc:
|
|
54
|
+
logger.exception("Background worker cleanup failed", exc_info=exc)
|
|
55
|
+
|
|
56
|
+
await logger.ainfo(f"Starting {concurrency} background workers")
|
|
57
|
+
try:
|
|
58
|
+
run: Run | None = None
|
|
59
|
+
while True:
|
|
96
60
|
try:
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
61
|
+
# check if we need to sweep runs
|
|
62
|
+
do_sweep = (
|
|
63
|
+
last_sweep_secs is None
|
|
64
|
+
or loop.time() - last_sweep_secs > BG_JOB_HEARTBEAT * 2
|
|
65
|
+
)
|
|
66
|
+
# check if we need to update stats
|
|
67
|
+
if calc_stats := (
|
|
68
|
+
last_stats_secs is None
|
|
69
|
+
or loop.time() - last_stats_secs > STATS_INTERVAL_SECS
|
|
70
|
+
):
|
|
71
|
+
last_stats_secs = loop.time()
|
|
72
|
+
active = len(WORKERS)
|
|
73
|
+
await logger.ainfo(
|
|
74
|
+
"Worker stats",
|
|
75
|
+
max=concurrency,
|
|
76
|
+
available=concurrency - active,
|
|
77
|
+
active=active,
|
|
78
|
+
)
|
|
79
|
+
# wait for semaphore to respect concurrency
|
|
80
|
+
await semaphore.acquire()
|
|
81
|
+
# skip the wait, if 1st time, or got a run last time
|
|
82
|
+
wait = run is None and last_stats_secs is not None
|
|
83
|
+
# try to get a run, handle it
|
|
84
|
+
run = None
|
|
85
|
+
async for run, attempt in Runs.next(wait=wait, limit=1):
|
|
86
|
+
graph_id = (
|
|
87
|
+
run["kwargs"]
|
|
88
|
+
.get("config", {})
|
|
89
|
+
.get("configurable", {})
|
|
90
|
+
.get("graph_id")
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
if graph_id and is_js_graph(graph_id):
|
|
94
|
+
task_name = f"js-run-{run['run_id']}-attempt-{attempt}"
|
|
95
|
+
else:
|
|
96
|
+
task_name = f"run-{run['run_id']}-attempt-{attempt}"
|
|
97
|
+
task = asyncio.create_task(
|
|
98
|
+
worker(run, attempt, loop),
|
|
99
|
+
name=task_name,
|
|
100
|
+
)
|
|
101
|
+
task.add_done_callback(cleanup)
|
|
102
|
+
WORKERS.add(task)
|
|
103
|
+
else:
|
|
104
|
+
semaphore.release()
|
|
105
|
+
# run stats and sweep if needed
|
|
106
|
+
if calc_stats or do_sweep:
|
|
107
|
+
async with connect() as conn:
|
|
108
|
+
# update stats if needed
|
|
109
|
+
if calc_stats:
|
|
110
|
+
stats = await Runs.stats(conn)
|
|
111
|
+
await logger.ainfo("Queue stats", **stats)
|
|
112
|
+
# sweep runs if needed
|
|
113
|
+
if do_sweep:
|
|
114
|
+
last_sweep_secs = loop.time()
|
|
115
|
+
run_ids = await Runs.sweep(conn)
|
|
116
|
+
logger.info("Sweeped runs", run_ids=run_ids)
|
|
117
|
+
except Exception as exc:
|
|
118
|
+
# keep trying to run the scheduler indefinitely
|
|
119
|
+
logger.exception("Background worker scheduler failed", exc_info=exc)
|
|
120
|
+
semaphore.release()
|
|
121
|
+
await exit.aclose()
|
|
122
|
+
finally:
|
|
123
|
+
logger.info("Shutting down background workers")
|
|
124
|
+
for task in WORKERS:
|
|
125
|
+
task.cancel()
|
|
126
|
+
for task in WEBHOOKS:
|
|
127
|
+
task.cancel()
|
|
128
|
+
await asyncio.wait_for(
|
|
129
|
+
asyncio.gather(*WORKERS, *WEBHOOKS, return_exceptions=True),
|
|
130
|
+
SHUTDOWN_GRACE_PERIOD_SECS,
|
|
131
|
+
)
|
langgraph_storage/retry.py
CHANGED
|
@@ -7,7 +7,11 @@ P = ParamSpec("P")
|
|
|
7
7
|
T = TypeVar("T")
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
|
|
10
|
+
class RetryableException(Exception):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
RETRIABLE_EXCEPTIONS: tuple[type[BaseException], ...] = (RetryableException,)
|
|
11
15
|
OVERLOADED_EXCEPTIONS: tuple[type[BaseException], ...] = ()
|
|
12
16
|
|
|
13
17
|
|
langgraph_storage/store.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
|
+
import threading
|
|
2
3
|
from collections import defaultdict
|
|
3
4
|
from collections.abc import Iterable
|
|
4
5
|
from typing import Any
|
|
@@ -65,6 +66,7 @@ _STORE_FILE = os.path.join(".langgraph_api", "store.pckl")
|
|
|
65
66
|
_VECTOR_FILE = os.path.join(".langgraph_api", "store.vectors.pckl")
|
|
66
67
|
os.makedirs(".langgraph_api", exist_ok=True)
|
|
67
68
|
STORE = DiskBackedInMemStore()
|
|
69
|
+
BATCHED_STORE = threading.local()
|
|
68
70
|
|
|
69
71
|
|
|
70
72
|
def set_store_config(config: dict) -> None:
|
|
@@ -77,4 +79,6 @@ def set_store_config(config: dict) -> None:
|
|
|
77
79
|
|
|
78
80
|
|
|
79
81
|
def Store(*args: Any, **kwargs: Any) -> DiskBackedInMemStore:
|
|
80
|
-
|
|
82
|
+
if not hasattr(BATCHED_STORE, "store"):
|
|
83
|
+
BATCHED_STORE.store = BatchedStore(STORE)
|
|
84
|
+
return BATCHED_STORE.store
|
openapi.json
CHANGED
|
@@ -2962,7 +2962,7 @@
|
|
|
2962
2962
|
"type": "string",
|
|
2963
2963
|
"maxLength": 65536,
|
|
2964
2964
|
"minLength": 1,
|
|
2965
|
-
"format": "uri",
|
|
2965
|
+
"format": "uri-reference",
|
|
2966
2966
|
"title": "Webhook",
|
|
2967
2967
|
"description": "Webhook to call after LangGraph API call is done."
|
|
2968
2968
|
},
|
|
@@ -3380,7 +3380,7 @@
|
|
|
3380
3380
|
"type": "string",
|
|
3381
3381
|
"maxLength": 65536,
|
|
3382
3382
|
"minLength": 1,
|
|
3383
|
-
"format": "uri",
|
|
3383
|
+
"format": "uri-reference",
|
|
3384
3384
|
"title": "Webhook",
|
|
3385
3385
|
"description": "Webhook to call after LangGraph API call is done."
|
|
3386
3386
|
},
|
|
@@ -3607,7 +3607,7 @@
|
|
|
3607
3607
|
"type": "string",
|
|
3608
3608
|
"maxLength": 65536,
|
|
3609
3609
|
"minLength": 1,
|
|
3610
|
-
"format": "uri",
|
|
3610
|
+
"format": "uri-reference",
|
|
3611
3611
|
"title": "Webhook",
|
|
3612
3612
|
"description": "Webhook to call after LangGraph API call is done."
|
|
3613
3613
|
},
|