langgraph-api 0.0.27__tar.gz → 0.0.28rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/PKG-INFO +2 -2
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/api/__init__.py +2 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/api/assistants.py +43 -13
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/api/meta.py +1 -1
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/api/runs.py +14 -1
- langgraph_api-0.0.28rc1/langgraph_api/api/ui.py +68 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/asyncio.py +43 -4
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/auth/middleware.py +2 -2
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/config.py +14 -1
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/cron_scheduler.py +1 -1
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/graph.py +5 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/http.py +24 -7
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/.gitignore +2 -0
- langgraph_api-0.0.28rc1/langgraph_api/js/build.mts +98 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/client.mts +67 -31
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/global.d.ts +1 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/package.json +11 -5
- langgraph_api-0.0.27/langgraph_api/js/remote_old.py → langgraph_api-0.0.28rc1/langgraph_api/js/remote.py +48 -54
- langgraph_api-0.0.28rc1/langgraph_api/js/sse.py +138 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/api.test.mts +28 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/compose-postgres.yml +2 -2
- langgraph_api-0.0.28rc1/langgraph_api/js/tests/graphs/agent.css +1 -0
- langgraph_api-0.0.28rc1/langgraph_api/js/tests/graphs/agent.ui.tsx +10 -0
- langgraph_api-0.0.28rc1/langgraph_api/js/tests/graphs/package.json +7 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/graphs/yarn.lock +13 -13
- langgraph_api-0.0.28rc1/langgraph_api/js/yarn.lock +1692 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/lifespan.py +15 -5
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/logging.py +9 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/metadata.py +5 -1
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/middleware/http_logger.py +1 -1
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/patch.py +2 -0
- langgraph_api-0.0.28rc1/langgraph_api/queue_entrypoint.py +63 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/schema.py +2 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/stream.py +1 -0
- langgraph_api-0.0.28rc1/langgraph_api/webhook.py +42 -0
- langgraph_api-0.0.27/langgraph_api/queue.py → langgraph_api-0.0.28rc1/langgraph_api/worker.py +52 -166
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_storage/database.py +8 -22
- langgraph_api-0.0.27/langgraph_storage/queue.py → langgraph_api-0.0.28rc1/langgraph_storage/inmem_stream.py +2 -2
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_storage/ops.py +80 -57
- langgraph_api-0.0.28rc1/langgraph_storage/queue.py +131 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_storage/retry.py +5 -1
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_storage/store.py +5 -1
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/openapi.json +3 -3
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/pyproject.toml +2 -2
- langgraph_api-0.0.27/langgraph_api/js/build.mts +0 -55
- langgraph_api-0.0.27/langgraph_api/js/client.new.mts +0 -875
- langgraph_api-0.0.27/langgraph_api/js/remote.py +0 -18
- langgraph_api-0.0.27/langgraph_api/js/remote_new.py +0 -694
- langgraph_api-0.0.27/langgraph_api/js/server_sent_events.py +0 -126
- langgraph_api-0.0.27/langgraph_api/js/tests/graphs/package.json +0 -7
- langgraph_api-0.0.27/langgraph_api/js/yarn.lock +0 -2174
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/LICENSE +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/README.md +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/__init__.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/api/openapi.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/api/store.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/api/threads.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/auth/__init__.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/auth/custom.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/auth/langsmith/__init__.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/auth/langsmith/backend.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/auth/langsmith/client.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/auth/noop.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/auth/studio_user.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/cli.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/errors.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/base.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/errors.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/schema.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/graph.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/hooks.mjs +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/parser/parser.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/parser/parser.worker.mjs +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/schema/types.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/schema/types.template.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/utils/importMap.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/utils/pythonSchemas.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/src/utils/serde.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/graphs/.gitignore +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/graphs/agent.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/graphs/delay.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/graphs/error.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/graphs/langgraph.json +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/graphs/nested.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/graphs/weather.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/parser.test.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/js/tests/utils.mts +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/middleware/__init__.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/middleware/private_network.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/models/__init__.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/models/run.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/route.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/serde.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/server.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/sse.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/state.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/utils.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_api/validation.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_license/__init__.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_license/middleware.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_license/validation.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_storage/__init__.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_storage/checkpoint.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/langgraph_storage/ttl_dict.py +0 -0
- {langgraph_api-0.0.27 → langgraph_api-0.0.28rc1}/logging.json +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: langgraph-api
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.28rc1
|
|
4
4
|
Summary:
|
|
5
5
|
License: Elastic-2.0
|
|
6
6
|
Author: Nuno Campos
|
|
@@ -22,7 +22,7 @@ Requires-Dist: orjson (>=3.9.7)
|
|
|
22
22
|
Requires-Dist: pyjwt (>=2.9.0,<3.0.0)
|
|
23
23
|
Requires-Dist: sse-starlette (>=2.1.0,<2.2.0)
|
|
24
24
|
Requires-Dist: starlette (>=0.38.6)
|
|
25
|
-
Requires-Dist: structlog (>=
|
|
25
|
+
Requires-Dist: structlog (>=24.1.0,<26)
|
|
26
26
|
Requires-Dist: tenacity (>=8.0.0)
|
|
27
27
|
Requires-Dist: uvicorn (>=0.26.0)
|
|
28
28
|
Requires-Dist: watchfiles (>=0.13)
|
|
@@ -15,6 +15,7 @@ from langgraph_api.api.openapi import get_openapi_spec
|
|
|
15
15
|
from langgraph_api.api.runs import runs_routes
|
|
16
16
|
from langgraph_api.api.store import store_routes
|
|
17
17
|
from langgraph_api.api.threads import threads_routes
|
|
18
|
+
from langgraph_api.api.ui import ui_routes
|
|
18
19
|
from langgraph_api.auth.middleware import auth_middleware
|
|
19
20
|
from langgraph_api.config import HTTP_CONFIG, MIGRATIONS_PATH
|
|
20
21
|
from langgraph_api.graph import js_bg_tasks
|
|
@@ -68,6 +69,7 @@ else:
|
|
|
68
69
|
protected_routes.extend(runs_routes)
|
|
69
70
|
protected_routes.extend(threads_routes)
|
|
70
71
|
protected_routes.extend(store_routes)
|
|
72
|
+
protected_routes.extend(ui_routes)
|
|
71
73
|
|
|
72
74
|
routes: list[BaseRoute] = []
|
|
73
75
|
user_router = None
|
|
@@ -4,6 +4,7 @@ from uuid import uuid4
|
|
|
4
4
|
import structlog
|
|
5
5
|
from langchain_core.runnables.utils import create_model
|
|
6
6
|
from langgraph.pregel import Pregel
|
|
7
|
+
from pydantic import TypeAdapter
|
|
7
8
|
from starlette.exceptions import HTTPException
|
|
8
9
|
from starlette.responses import Response
|
|
9
10
|
from starlette.routing import BaseRoute
|
|
@@ -27,6 +28,38 @@ from langgraph_storage.retry import retry_db
|
|
|
27
28
|
logger = structlog.stdlib.get_logger(__name__)
|
|
28
29
|
|
|
29
30
|
|
|
31
|
+
def _get_configurable_jsonschema(graph: Pregel) -> dict:
|
|
32
|
+
"""Get the JSON schema for the configurable part of the graph.
|
|
33
|
+
|
|
34
|
+
Important: we only return the `configurable` part of the schema.
|
|
35
|
+
|
|
36
|
+
The default get_config_schema method returns the entire schema (RunnableConfig),
|
|
37
|
+
which includes other root keys like "max_concurrency", which we
|
|
38
|
+
do not want to expose.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
graph: The graph to get the schema for.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
The JSON schema for the configurable part of the graph.
|
|
45
|
+
"""
|
|
46
|
+
# Otherwise, use the config_schema method.
|
|
47
|
+
config_schema = graph.config_schema()
|
|
48
|
+
|
|
49
|
+
if "configurable" in config_schema.__fields__:
|
|
50
|
+
configurable = TypeAdapter(config_schema.__fields__["configurable"].annotation)
|
|
51
|
+
json_schema = configurable.json_schema()
|
|
52
|
+
# The type name of the configurable type is not preserved.
|
|
53
|
+
# We'll add it back to the schema if we can.
|
|
54
|
+
if hasattr(graph, "config_type") and graph.config_type is not None:
|
|
55
|
+
if hasattr(graph.config_type, "__name__"):
|
|
56
|
+
json_schema["title"] = graph.config_type.__name__
|
|
57
|
+
return json_schema
|
|
58
|
+
|
|
59
|
+
# If the schema does not have a configurable field, return an empty schema.
|
|
60
|
+
return {}
|
|
61
|
+
|
|
62
|
+
|
|
30
63
|
def _state_jsonschema(graph: Pregel) -> dict | None:
|
|
31
64
|
fields: dict = {}
|
|
32
65
|
for k in graph.stream_channels_list:
|
|
@@ -56,11 +89,7 @@ def _graph_schemas(graph: Pregel) -> dict:
|
|
|
56
89
|
output_schema = None
|
|
57
90
|
state_schema = _state_jsonschema(graph)
|
|
58
91
|
try:
|
|
59
|
-
config_schema = (
|
|
60
|
-
graph.config_schema().__fields__["configurable"].annotation.schema()
|
|
61
|
-
if "configurable" in graph.config_schema().__fields__
|
|
62
|
-
else {}
|
|
63
|
-
)
|
|
92
|
+
config_schema = _get_configurable_jsonschema(graph)
|
|
64
93
|
except Exception as e:
|
|
65
94
|
logger.warning(
|
|
66
95
|
f"Failed to get config schema for graph {graph.name} with error: `{str(e)}`"
|
|
@@ -227,14 +256,14 @@ async def get_assistant_schemas(
|
|
|
227
256
|
)
|
|
228
257
|
|
|
229
258
|
try:
|
|
230
|
-
input_schema = graph.
|
|
259
|
+
input_schema = graph.get_input_jsonschema()
|
|
231
260
|
except Exception as e:
|
|
232
261
|
logger.warning(
|
|
233
262
|
f"Failed to get input schema for graph {graph.name} with error: `{str(e)}`"
|
|
234
263
|
)
|
|
235
264
|
input_schema = None
|
|
236
265
|
try:
|
|
237
|
-
output_schema = graph.
|
|
266
|
+
output_schema = graph.get_output_jsonschema()
|
|
238
267
|
except Exception as e:
|
|
239
268
|
logger.warning(
|
|
240
269
|
f"Failed to get output schema for graph {graph.name} with error: `{str(e)}`"
|
|
@@ -243,11 +272,7 @@ async def get_assistant_schemas(
|
|
|
243
272
|
|
|
244
273
|
state_schema = _state_jsonschema(graph)
|
|
245
274
|
try:
|
|
246
|
-
config_schema = (
|
|
247
|
-
graph.config_schema().__fields__["configurable"].annotation.schema()
|
|
248
|
-
if "configurable" in graph.config_schema().__fields__
|
|
249
|
-
else {}
|
|
250
|
-
)
|
|
275
|
+
config_schema = _get_configurable_jsonschema(graph)
|
|
251
276
|
except Exception as e:
|
|
252
277
|
config_schema = None
|
|
253
278
|
logger.warning(
|
|
@@ -309,7 +334,12 @@ async def get_assistant_versions(request: ApiRequest) -> ApiResponse:
|
|
|
309
334
|
limit=payload.get("limit") or 10,
|
|
310
335
|
offset=payload.get("offset") or 0,
|
|
311
336
|
)
|
|
312
|
-
|
|
337
|
+
assistants = [assistant async for assistant in assistants_iter]
|
|
338
|
+
if not assistants:
|
|
339
|
+
raise HTTPException(
|
|
340
|
+
status_code=404, detail=f"Assistant {assistant_id} not found"
|
|
341
|
+
)
|
|
342
|
+
return ApiResponse(assistants)
|
|
313
343
|
|
|
314
344
|
|
|
315
345
|
@retry_db
|
|
@@ -3,11 +3,11 @@ import os
|
|
|
3
3
|
from starlette.responses import JSONResponse, PlainTextResponse
|
|
4
4
|
|
|
5
5
|
from langgraph_api import config
|
|
6
|
-
from langgraph_api.queue import WORKERS
|
|
7
6
|
from langgraph_api.route import ApiRequest
|
|
8
7
|
from langgraph_license.validation import plus_features_enabled
|
|
9
8
|
from langgraph_storage.database import connect, pool_stats
|
|
10
9
|
from langgraph_storage.ops import Runs
|
|
10
|
+
from langgraph_storage.queue import WORKERS
|
|
11
11
|
|
|
12
12
|
METRICS_FORMATS = {"prometheus", "json"}
|
|
13
13
|
|
|
@@ -184,7 +184,16 @@ async def wait_run(request: ApiRequest):
|
|
|
184
184
|
vchunk = chunk
|
|
185
185
|
elif mode == b"error":
|
|
186
186
|
vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
|
|
187
|
+
if vchunk is not None:
|
|
187
188
|
last_chunk.set(vchunk)
|
|
189
|
+
else:
|
|
190
|
+
async with connect() as conn:
|
|
191
|
+
thread_iter = await Threads.get(conn, thread_id)
|
|
192
|
+
try:
|
|
193
|
+
thread = await anext(thread_iter)
|
|
194
|
+
last_chunk.set(thread["values"])
|
|
195
|
+
except StopAsyncIteration:
|
|
196
|
+
last_chunk.set(b"{}")
|
|
188
197
|
|
|
189
198
|
# keep the connection open by sending whitespace every 5 seconds
|
|
190
199
|
# leading whitespace will be ignored by json parsers
|
|
@@ -244,7 +253,11 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
244
253
|
vchunk = chunk
|
|
245
254
|
elif mode == b"error":
|
|
246
255
|
vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
|
|
256
|
+
if vchunk is not None:
|
|
247
257
|
last_chunk.set(vchunk)
|
|
258
|
+
else:
|
|
259
|
+
# we can't fetch the thread (it was deleted), so just return empty values
|
|
260
|
+
last_chunk.set(b"{}")
|
|
248
261
|
|
|
249
262
|
# keep the connection open by sending whitespace every 5 seconds
|
|
250
263
|
# leading whitespace will be ignored by json parsers
|
|
@@ -359,7 +372,7 @@ async def cancel_run(
|
|
|
359
372
|
run_id = request.path_params["run_id"]
|
|
360
373
|
validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
|
|
361
374
|
validate_uuid(run_id, "Invalid run ID: must be a UUID")
|
|
362
|
-
wait_str = request.query_params.get("wait",
|
|
375
|
+
wait_str = request.query_params.get("wait", "false")
|
|
363
376
|
wait = wait_str.lower() in {"true", "yes", "1"}
|
|
364
377
|
action_str = request.query_params.get("action", "interrupt")
|
|
365
378
|
action = action_str if action_str in {"interrupt", "rollback"} else "interrupt"
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from functools import lru_cache
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import TypedDict
|
|
6
|
+
|
|
7
|
+
from orjson import loads
|
|
8
|
+
from starlette.responses import Response
|
|
9
|
+
from starlette.routing import BaseRoute, Mount
|
|
10
|
+
from starlette.staticfiles import StaticFiles
|
|
11
|
+
|
|
12
|
+
from langgraph_api.route import ApiRequest, ApiRoute
|
|
13
|
+
|
|
14
|
+
# Get path to built UI assets
|
|
15
|
+
UI_DIR = Path(os.path.dirname(__file__)).parent / "js" / "ui"
|
|
16
|
+
SCHEMAS_FILE = Path(os.path.dirname(__file__)).parent / "js" / "client.ui.schemas.json"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class UiSchema(TypedDict):
|
|
20
|
+
name: str
|
|
21
|
+
assets: list[str]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@lru_cache(maxsize=1)
|
|
25
|
+
def load_ui_schemas() -> dict[str, UiSchema]:
|
|
26
|
+
"""Load and cache UI schema mappings from JSON file."""
|
|
27
|
+
if not SCHEMAS_FILE.exists():
|
|
28
|
+
return {}
|
|
29
|
+
|
|
30
|
+
with open(SCHEMAS_FILE) as f:
|
|
31
|
+
return loads(f.read())
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
async def handle_ui(request: ApiRequest) -> Response:
|
|
35
|
+
"""Serve UI HTML with appropriate script/style tags."""
|
|
36
|
+
graph_id = request.path_params["graph_id"]
|
|
37
|
+
host = request.headers.get("host")
|
|
38
|
+
message = await request.json(schema=None)
|
|
39
|
+
|
|
40
|
+
# Load UI file paths from schema
|
|
41
|
+
schemas = load_ui_schemas()
|
|
42
|
+
|
|
43
|
+
if graph_id not in schemas:
|
|
44
|
+
return Response(f"UI not found for graph '{graph_id}'", status_code=404)
|
|
45
|
+
|
|
46
|
+
result = []
|
|
47
|
+
for filepath in schemas[graph_id]["assets"]:
|
|
48
|
+
basename = os.path.basename(filepath)
|
|
49
|
+
ext = os.path.splitext(basename)[1]
|
|
50
|
+
|
|
51
|
+
if ext == ".css":
|
|
52
|
+
result.append(
|
|
53
|
+
f'<link rel="stylesheet" href="//{host}/ui/{graph_id}/{basename}" />'
|
|
54
|
+
)
|
|
55
|
+
elif ext == ".js":
|
|
56
|
+
result.append(
|
|
57
|
+
f'<script src="//{host}/ui/{graph_id}/{basename}" '
|
|
58
|
+
f'onload=\'__LGUI_{graph_id}.render({json.dumps(message["name"])}, "{{{{shadowRootId}}}}")\'>'
|
|
59
|
+
'</script>'
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
return Response(content="\n".join(result), headers={"Content-Type": "text/html"})
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
ui_routes: list[BaseRoute] = [
|
|
66
|
+
ApiRoute("/ui/{graph_id}", handle_ui, methods=["POST"]),
|
|
67
|
+
Mount("/ui", StaticFiles(directory=UI_DIR, check_dir=False)),
|
|
68
|
+
]
|
|
@@ -82,7 +82,7 @@ def _create_task_done_callback(
|
|
|
82
82
|
try:
|
|
83
83
|
if exc := task.exception():
|
|
84
84
|
if not isinstance(exc, ignore_exceptions):
|
|
85
|
-
logger.exception("
|
|
85
|
+
logger.exception("asyncio.task failed", exc_info=exc)
|
|
86
86
|
except asyncio.CancelledError:
|
|
87
87
|
pass
|
|
88
88
|
|
|
@@ -107,11 +107,18 @@ class SimpleTaskGroup(AbstractAsyncContextManager["SimpleTaskGroup"]):
|
|
|
107
107
|
tasks: set[asyncio.Task]
|
|
108
108
|
|
|
109
109
|
def __init__(
|
|
110
|
-
self,
|
|
110
|
+
self,
|
|
111
|
+
*coros: Coroutine[Any, Any, T],
|
|
112
|
+
cancel: bool = False,
|
|
113
|
+
wait: bool = True,
|
|
114
|
+
taskset: set[asyncio.Task] | None = None,
|
|
111
115
|
) -> None:
|
|
112
|
-
self.tasks = set()
|
|
116
|
+
self.tasks = taskset if taskset is not None else set()
|
|
113
117
|
self.cancel = cancel
|
|
114
118
|
self.wait = wait
|
|
119
|
+
if taskset:
|
|
120
|
+
for task in tuple(taskset):
|
|
121
|
+
task.add_done_callback(partial(self._create_task_done_callback, ()))
|
|
115
122
|
for coro in coros:
|
|
116
123
|
self.create_task(coro)
|
|
117
124
|
|
|
@@ -125,7 +132,7 @@ class SimpleTaskGroup(AbstractAsyncContextManager["SimpleTaskGroup"]):
|
|
|
125
132
|
try:
|
|
126
133
|
if exc := task.exception():
|
|
127
134
|
if not isinstance(exc, ignore_exceptions):
|
|
128
|
-
logger.exception("
|
|
135
|
+
logger.exception("asyncio.task failed in task group", exc_info=exc)
|
|
129
136
|
except asyncio.CancelledError:
|
|
130
137
|
pass
|
|
131
138
|
|
|
@@ -204,3 +211,35 @@ async def aclosing_aiter(aiter: AsyncIterator[T]) -> AsyncIterator[T]:
|
|
|
204
211
|
async with aclosing(aiter):
|
|
205
212
|
async for item in aiter:
|
|
206
213
|
yield item
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
class AsyncQueue(Generic[T], asyncio.Queue[T]):
|
|
217
|
+
"""Async unbounded FIFO queue with a wait() method.
|
|
218
|
+
|
|
219
|
+
Subclassed from asyncio.Queue, adding a wait() method."""
|
|
220
|
+
|
|
221
|
+
async def wait(self) -> None:
|
|
222
|
+
"""If queue is empty, wait until an item is available.
|
|
223
|
+
|
|
224
|
+
Copied from Queue.get(), removing the call to .get_nowait(),
|
|
225
|
+
ie. this doesn't consume the item, just waits for it.
|
|
226
|
+
"""
|
|
227
|
+
while self.empty():
|
|
228
|
+
getter = self._get_loop().create_future()
|
|
229
|
+
self._getters.append(getter)
|
|
230
|
+
try:
|
|
231
|
+
await getter
|
|
232
|
+
except:
|
|
233
|
+
getter.cancel() # Just in case getter is not done yet.
|
|
234
|
+
try:
|
|
235
|
+
# Clean self._getters from canceled getters.
|
|
236
|
+
self._getters.remove(getter)
|
|
237
|
+
except ValueError:
|
|
238
|
+
# The getter could be removed from self._getters by a
|
|
239
|
+
# previous put_nowait call.
|
|
240
|
+
pass
|
|
241
|
+
if not self.empty() and not getter.cancelled():
|
|
242
|
+
# We were woken up by put_nowait(), but can't take
|
|
243
|
+
# the call. Wake up the next in line.
|
|
244
|
+
self._wakeup_next(self._getters)
|
|
245
|
+
raise
|
|
@@ -14,12 +14,12 @@ logger = structlog.stdlib.get_logger(__name__)
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
def get_auth_backend():
|
|
17
|
-
logger.info(f"Using auth of type={LANGGRAPH_AUTH_TYPE}")
|
|
18
17
|
if LANGGRAPH_AUTH:
|
|
19
18
|
from langgraph_api.auth.custom import get_custom_auth_middleware
|
|
20
19
|
|
|
20
|
+
logger.info("Using auth of type=custom")
|
|
21
21
|
return get_custom_auth_middleware()
|
|
22
|
-
|
|
22
|
+
logger.info(f"Using auth of type={LANGGRAPH_AUTH_TYPE}")
|
|
23
23
|
if LANGGRAPH_AUTH_TYPE == "langsmith":
|
|
24
24
|
from langgraph_api.auth.langsmith.backend import LangsmithAuthBackend
|
|
25
25
|
|
|
@@ -59,6 +59,7 @@ MIGRATIONS_PATH = env("MIGRATIONS_PATH", cast=str, default="/storage/migrations"
|
|
|
59
59
|
REDIS_URI = env("REDIS_URI", cast=str)
|
|
60
60
|
REDIS_CLUSTER = env("REDIS_CLUSTER", cast=bool, default=False)
|
|
61
61
|
REDIS_MAX_CONNECTIONS = env("REDIS_MAX_CONNECTIONS", cast=int, default=500)
|
|
62
|
+
REDIS_CONNECT_TIMEOUT = env("REDIS_CONNECT_TIMEOUT", cast=float, default=10.0)
|
|
62
63
|
|
|
63
64
|
# server
|
|
64
65
|
ALLOW_PRIVATE_NETWORK = env("ALLOW_PRIVATE_NETWORK", cast=bool, default=False)
|
|
@@ -68,6 +69,7 @@ See https://developer.chrome.com/blog/private-network-access-update-2024-03
|
|
|
68
69
|
"""
|
|
69
70
|
|
|
70
71
|
HTTP_CONFIG: HttpConfig | None = env("LANGGRAPH_HTTP", cast=_parse_json, default=None)
|
|
72
|
+
STORE_CONFIG: dict | None = env("LANGGRAPH_STORE", cast=_parse_json, default=None)
|
|
71
73
|
CORS_ALLOW_ORIGINS = env("CORS_ALLOW_ORIGINS", cast=CommaSeparatedStrings, default="*")
|
|
72
74
|
if HTTP_CONFIG and HTTP_CONFIG.get("cors"):
|
|
73
75
|
CORS_CONFIG = HTTP_CONFIG["cors"]
|
|
@@ -119,11 +121,13 @@ if CORS_CONFIG is not None and CORS_ALLOW_ORIGINS != "*":
|
|
|
119
121
|
|
|
120
122
|
BG_JOB_HEARTBEAT = 120 # seconds
|
|
121
123
|
BG_JOB_INTERVAL = 30 # seconds
|
|
124
|
+
BG_JOB_MAX_RETRIES = 3
|
|
125
|
+
BG_JOB_ISOLATED_LOOPS = env("BG_JOB_ISOLATED_LOOPS", cast=bool, default=False)
|
|
126
|
+
|
|
122
127
|
|
|
123
128
|
N_JOBS_PER_WORKER = env("N_JOBS_PER_WORKER", cast=int, default=10)
|
|
124
129
|
BG_JOB_TIMEOUT_SECS = env("BG_JOB_TIMEOUT_SECS", cast=float, default=3600)
|
|
125
130
|
FF_CRONS_ENABLED = env("FF_CRONS_ENABLED", cast=bool, default=True)
|
|
126
|
-
FF_JS_ZEROMQ_ENABLED = env("FF_JS_ZEROMQ_ENABLED", cast=bool, default=False)
|
|
127
131
|
|
|
128
132
|
# auth
|
|
129
133
|
|
|
@@ -182,3 +186,12 @@ TRACING = (
|
|
|
182
186
|
|
|
183
187
|
if getenv("LANGSMITH_LANGGRAPH_API_VARIANT") == "licensed" and LANGSMITH_API_KEY:
|
|
184
188
|
environ["LANGSMITH_LANGGRAPH_API_VARIANT"] = "local"
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
# Metrics.
|
|
192
|
+
USES_INDEXING = (
|
|
193
|
+
STORE_CONFIG
|
|
194
|
+
and STORE_CONFIG.get("index")
|
|
195
|
+
and STORE_CONFIG.get("index").get("embed")
|
|
196
|
+
)
|
|
197
|
+
USES_CUSTOM_APP = HTTP_CONFIG and HTTP_CONFIG.get("app")
|
|
@@ -5,8 +5,8 @@ import structlog
|
|
|
5
5
|
from langchain_core.runnables.config import run_in_executor
|
|
6
6
|
|
|
7
7
|
from langgraph_api.models.run import create_valid_run
|
|
8
|
-
from langgraph_api.queue import set_auth_ctx_for_run
|
|
9
8
|
from langgraph_api.utils import next_cron_date
|
|
9
|
+
from langgraph_api.worker import set_auth_ctx_for_run
|
|
10
10
|
from langgraph_storage.database import connect
|
|
11
11
|
from langgraph_storage.ops import Crons
|
|
12
12
|
from langgraph_storage.retry import retry_db
|
|
@@ -77,6 +77,11 @@ async def _generate_graph(value: Any) -> AsyncIterator[Any]:
|
|
|
77
77
|
yield value
|
|
78
78
|
|
|
79
79
|
|
|
80
|
+
def is_js_graph(graph_id: str) -> bool:
|
|
81
|
+
"""Return whether a graph is a JS graph."""
|
|
82
|
+
return graph_id in GRAPHS and isinstance(GRAPHS[graph_id], BaseRemotePregel)
|
|
83
|
+
|
|
84
|
+
|
|
80
85
|
@asynccontextmanager
|
|
81
86
|
async def get_graph(
|
|
82
87
|
graph_id: str,
|
|
@@ -45,13 +45,15 @@ class JsonHttpClient:
|
|
|
45
45
|
path,
|
|
46
46
|
content=json_dumpb(json) if json else content,
|
|
47
47
|
headers=request_headers,
|
|
48
|
-
timeout=
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
48
|
+
timeout=(
|
|
49
|
+
httpx.Timeout(
|
|
50
|
+
request_timeout or connect_timeout,
|
|
51
|
+
connect=connect_timeout,
|
|
52
|
+
read=request_timeout,
|
|
53
|
+
)
|
|
54
|
+
if connect_timeout or request_timeout
|
|
55
|
+
else None
|
|
56
|
+
),
|
|
55
57
|
params=params,
|
|
56
58
|
),
|
|
57
59
|
# httpx timeout controls are additive for each operation
|
|
@@ -70,6 +72,7 @@ class JsonHttpClient:
|
|
|
70
72
|
|
|
71
73
|
|
|
72
74
|
_http_client: JsonHttpClient
|
|
75
|
+
_loopback_client: JsonHttpClient = None
|
|
73
76
|
|
|
74
77
|
|
|
75
78
|
async def start_http_client() -> None:
|
|
@@ -96,6 +99,20 @@ def get_http_client() -> JsonHttpClient:
|
|
|
96
99
|
return _http_client
|
|
97
100
|
|
|
98
101
|
|
|
102
|
+
def get_loopback_client() -> JsonHttpClient:
|
|
103
|
+
global _loopback_client
|
|
104
|
+
if _loopback_client is None:
|
|
105
|
+
from langgraph_api.server import app # type: ignore
|
|
106
|
+
|
|
107
|
+
_loopback_client = JsonHttpClient(
|
|
108
|
+
client=httpx.AsyncClient(
|
|
109
|
+
base_url="http://api",
|
|
110
|
+
transport=httpx.ASGITransport(app, root_path="/noauth"),
|
|
111
|
+
),
|
|
112
|
+
)
|
|
113
|
+
return _loopback_client
|
|
114
|
+
|
|
115
|
+
|
|
99
116
|
def is_retriable_error(exception: Exception) -> bool:
|
|
100
117
|
if isinstance(exception, httpx.HTTPError):
|
|
101
118
|
return (
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
/// <reference types="./global.d.ts" />
|
|
2
|
+
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
import * as fs from "node:fs/promises";
|
|
5
|
+
import * as path from "node:path";
|
|
6
|
+
import {
|
|
7
|
+
filterValidGraphSpecs,
|
|
8
|
+
GraphSchema,
|
|
9
|
+
resolveGraph,
|
|
10
|
+
runGraphSchemaWorker,
|
|
11
|
+
} from "./src/graph.mts";
|
|
12
|
+
import { build } from "@langchain/langgraph-api/ui/bundler";
|
|
13
|
+
|
|
14
|
+
const __dirname = new URL(".", import.meta.url).pathname;
|
|
15
|
+
|
|
16
|
+
async function main() {
|
|
17
|
+
const specs = filterValidGraphSpecs(
|
|
18
|
+
z.record(z.string()).parse(JSON.parse(process.env.LANGSERVE_GRAPHS))
|
|
19
|
+
);
|
|
20
|
+
|
|
21
|
+
const GRAPH_SCHEMAS: Record<string, Record<string, GraphSchema> | false> = {};
|
|
22
|
+
|
|
23
|
+
try {
|
|
24
|
+
await Promise.all(
|
|
25
|
+
specs.map(async ([graphId, rawSpec]) => {
|
|
26
|
+
console.info(`[${graphId}]: Checking for source file existence`);
|
|
27
|
+
const { resolved, ...spec } = await resolveGraph(rawSpec, {
|
|
28
|
+
onlyFilePresence: true,
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
try {
|
|
32
|
+
console.info(`[${graphId}]: Extracting schema`);
|
|
33
|
+
GRAPH_SCHEMAS[graphId] = await runGraphSchemaWorker(spec, {
|
|
34
|
+
timeoutMs: 120_000,
|
|
35
|
+
});
|
|
36
|
+
} catch (error) {
|
|
37
|
+
console.error(`[${graphId}]: Error extracting schema: ${error}`);
|
|
38
|
+
GRAPH_SCHEMAS[graphId] = false;
|
|
39
|
+
}
|
|
40
|
+
})
|
|
41
|
+
);
|
|
42
|
+
|
|
43
|
+
await fs.writeFile(
|
|
44
|
+
path.resolve(__dirname, "client.schemas.json"),
|
|
45
|
+
JSON.stringify(GRAPH_SCHEMAS),
|
|
46
|
+
{ encoding: "utf-8" }
|
|
47
|
+
);
|
|
48
|
+
} catch (error) {
|
|
49
|
+
console.error(`Error resolving graphs: ${error}`);
|
|
50
|
+
process.exit(1);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const uiSpecs = z
|
|
54
|
+
.record(z.string())
|
|
55
|
+
.parse(JSON.parse(process.env.LANGGRAPH_UI || "{}"));
|
|
56
|
+
|
|
57
|
+
if (Object.keys(uiSpecs).length > 0) {
|
|
58
|
+
try {
|
|
59
|
+
const schemas: Record<string, { assets: string[]; name: string }> = {};
|
|
60
|
+
await Promise.all(
|
|
61
|
+
Object.entries(uiSpecs).map(async ([graphId, uiUserPath]) => {
|
|
62
|
+
console.info(`[${graphId}]: Building UI`);
|
|
63
|
+
const userPath = path.resolve(process.cwd(), uiUserPath);
|
|
64
|
+
const files = await build(graphId, userPath);
|
|
65
|
+
await Promise.all([
|
|
66
|
+
...files.map(async (item) => {
|
|
67
|
+
const folder = path.resolve(__dirname, "ui", graphId);
|
|
68
|
+
const source = path.resolve(folder, item.basename);
|
|
69
|
+
|
|
70
|
+
await fs.mkdir(path.dirname(source), { recursive: true });
|
|
71
|
+
await fs.writeFile(source, item.contents);
|
|
72
|
+
|
|
73
|
+
schemas[graphId] ??= { assets: [], name: graphId };
|
|
74
|
+
|
|
75
|
+
const relative = path.relative(
|
|
76
|
+
path.resolve(__dirname, "ui", graphId),
|
|
77
|
+
source
|
|
78
|
+
);
|
|
79
|
+
|
|
80
|
+
schemas[graphId].assets.push(relative);
|
|
81
|
+
}),
|
|
82
|
+
]);
|
|
83
|
+
})
|
|
84
|
+
);
|
|
85
|
+
|
|
86
|
+
await fs.writeFile(
|
|
87
|
+
path.resolve(__dirname, "client.ui.schemas.json"),
|
|
88
|
+
JSON.stringify(schemas),
|
|
89
|
+
{ encoding: "utf-8" }
|
|
90
|
+
);
|
|
91
|
+
} catch (error) {
|
|
92
|
+
console.error(`Error building UI: ${error}`);
|
|
93
|
+
process.exit(1);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
main();
|