langgraph-api 0.2.129__py3-none-any.whl → 0.2.132__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/assistants.py +6 -5
- langgraph_api/api/meta.py +3 -1
- langgraph_api/api/openapi.py +1 -1
- langgraph_api/api/runs.py +13 -10
- langgraph_api/api/ui.py +2 -0
- langgraph_api/asgi_transport.py +2 -2
- langgraph_api/asyncio.py +10 -8
- langgraph_api/auth/custom.py +9 -4
- langgraph_api/auth/langsmith/client.py +1 -1
- langgraph_api/cli.py +5 -4
- langgraph_api/config.py +2 -0
- langgraph_api/executor_entrypoint.py +23 -0
- langgraph_api/graph.py +25 -9
- langgraph_api/http.py +10 -7
- langgraph_api/http_metrics.py +4 -1
- langgraph_api/js/base.py +0 -3
- langgraph_api/js/build.mts +11 -2
- langgraph_api/js/client.http.mts +2 -0
- langgraph_api/js/client.mts +15 -11
- langgraph_api/js/remote.py +22 -12
- langgraph_api/js/src/preload.mjs +9 -1
- langgraph_api/js/src/utils/files.mts +5 -2
- langgraph_api/js/sse.py +1 -1
- langgraph_api/logging.py +3 -3
- langgraph_api/middleware/http_logger.py +4 -3
- langgraph_api/models/run.py +20 -15
- langgraph_api/patch.py +2 -2
- langgraph_api/queue_entrypoint.py +33 -18
- langgraph_api/route.py +7 -1
- langgraph_api/schema.py +20 -1
- langgraph_api/serde.py +32 -5
- langgraph_api/server.py +5 -3
- langgraph_api/state.py +8 -8
- langgraph_api/store.py +1 -1
- langgraph_api/stream.py +35 -20
- langgraph_api/traceblock.py +1 -1
- langgraph_api/utils/__init__.py +21 -5
- langgraph_api/utils/config.py +13 -4
- langgraph_api/utils/future.py +1 -1
- langgraph_api/utils/headers.py +22 -5
- langgraph_api/utils/uuids.py +87 -0
- langgraph_api/webhook.py +20 -20
- langgraph_api/worker.py +36 -9
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/METADATA +2 -2
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/RECORD +50 -48
- openapi.json +2 -2
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/WHEEL +0 -0
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/licenses/LICENSE +0 -0
langgraph_api/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.2.
|
|
1
|
+
__version__ = "0.2.132"
|
langgraph_api/api/assistants.py
CHANGED
|
@@ -61,7 +61,8 @@ def _get_configurable_jsonschema(graph: Pregel) -> dict:
|
|
|
61
61
|
in favor of graph.get_context_jsonschema().
|
|
62
62
|
"""
|
|
63
63
|
# Otherwise, use the config_schema method.
|
|
64
|
-
|
|
64
|
+
# TODO: Remove this when we no longer support langgraph < 0.6
|
|
65
|
+
config_schema = graph.config_schema() # type: ignore[deprecated]
|
|
65
66
|
model_fields = getattr(config_schema, "model_fields", None) or getattr(
|
|
66
67
|
config_schema, "__fields__", None
|
|
67
68
|
)
|
|
@@ -87,11 +88,11 @@ def _state_jsonschema(graph: Pregel) -> dict | None:
|
|
|
87
88
|
for k in graph.stream_channels_list:
|
|
88
89
|
v = graph.channels[k]
|
|
89
90
|
try:
|
|
90
|
-
create_model(k, __root__=(v.UpdateType, None)).
|
|
91
|
+
create_model(k, __root__=(v.UpdateType, None)).model_json_schema()
|
|
91
92
|
fields[k] = (v.UpdateType, None)
|
|
92
93
|
except Exception:
|
|
93
94
|
fields[k] = (Any, None)
|
|
94
|
-
return create_model(graph.get_name("State"), **fields).
|
|
95
|
+
return create_model(graph.get_name("State"), **fields).model_json_schema()
|
|
95
96
|
|
|
96
97
|
|
|
97
98
|
def _graph_schemas(graph: Pregel) -> dict:
|
|
@@ -132,7 +133,7 @@ def _graph_schemas(graph: Pregel) -> dict:
|
|
|
132
133
|
logger.warning(
|
|
133
134
|
f"Failed to get context schema for graph {graph.name} with error: `{str(e)}`"
|
|
134
135
|
)
|
|
135
|
-
context_schema = graph.config_schema()
|
|
136
|
+
context_schema = graph.config_schema() # type: ignore[deprecated]
|
|
136
137
|
else:
|
|
137
138
|
context_schema = None
|
|
138
139
|
|
|
@@ -366,7 +367,7 @@ async def patch_assistant(
|
|
|
366
367
|
|
|
367
368
|
|
|
368
369
|
@retry_db
|
|
369
|
-
async def delete_assistant(request: ApiRequest) ->
|
|
370
|
+
async def delete_assistant(request: ApiRequest) -> Response:
|
|
370
371
|
"""Delete an assistant by ID."""
|
|
371
372
|
assistant_id = request.path_params["assistant_id"]
|
|
372
373
|
validate_uuid(assistant_id, "Invalid assistant ID: must be a UUID")
|
langgraph_api/api/meta.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing import cast
|
|
2
|
+
|
|
1
3
|
import langgraph.version
|
|
2
4
|
from starlette.responses import JSONResponse, PlainTextResponse
|
|
3
5
|
|
|
@@ -43,7 +45,7 @@ async def meta_metrics(request: ApiRequest):
|
|
|
43
45
|
|
|
44
46
|
# collect stats
|
|
45
47
|
metrics = get_metrics()
|
|
46
|
-
worker_metrics = metrics["workers"]
|
|
48
|
+
worker_metrics = cast(dict[str, int], metrics["workers"])
|
|
47
49
|
workers_max = worker_metrics["max"]
|
|
48
50
|
workers_active = worker_metrics["active"]
|
|
49
51
|
workers_available = worker_metrics["available"]
|
langgraph_api/api/openapi.py
CHANGED
langgraph_api/api/runs.py
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
from collections.abc import AsyncIterator
|
|
3
|
-
from typing import Literal
|
|
3
|
+
from typing import Literal, cast
|
|
4
4
|
|
|
5
5
|
import orjson
|
|
6
|
-
from langgraph.checkpoint.base.id import uuid6
|
|
7
6
|
from starlette.exceptions import HTTPException
|
|
8
7
|
from starlette.responses import Response, StreamingResponse
|
|
9
8
|
|
|
@@ -12,7 +11,7 @@ from langgraph_api.asyncio import ValueEvent, aclosing
|
|
|
12
11
|
from langgraph_api.models.run import create_valid_run
|
|
13
12
|
from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
|
|
14
13
|
from langgraph_api.sse import EventSourceResponse
|
|
15
|
-
from langgraph_api.utils import fetchone, get_pagination_headers, validate_uuid
|
|
14
|
+
from langgraph_api.utils import fetchone, get_pagination_headers, uuid7, validate_uuid
|
|
16
15
|
from langgraph_api.validation import (
|
|
17
16
|
CronCreate,
|
|
18
17
|
CronSearch,
|
|
@@ -92,7 +91,7 @@ async def stream_run(
|
|
|
92
91
|
thread_id = request.path_params["thread_id"]
|
|
93
92
|
payload = await request.json(RunCreateStateful)
|
|
94
93
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
95
|
-
run_id =
|
|
94
|
+
run_id = uuid7()
|
|
96
95
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
97
96
|
|
|
98
97
|
try:
|
|
@@ -132,7 +131,7 @@ async def stream_run_stateless(
|
|
|
132
131
|
"""Create a stateless run."""
|
|
133
132
|
payload = await request.json(RunCreateStateless)
|
|
134
133
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
135
|
-
run_id =
|
|
134
|
+
run_id = uuid7()
|
|
136
135
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
137
136
|
|
|
138
137
|
try:
|
|
@@ -173,7 +172,7 @@ async def wait_run(request: ApiRequest):
|
|
|
173
172
|
thread_id = request.path_params["thread_id"]
|
|
174
173
|
payload = await request.json(RunCreateStateful)
|
|
175
174
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
176
|
-
run_id =
|
|
175
|
+
run_id = uuid7()
|
|
177
176
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
178
177
|
|
|
179
178
|
try:
|
|
@@ -255,7 +254,7 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
255
254
|
"""Create a stateless run, wait for the output."""
|
|
256
255
|
payload = await request.json(RunCreateStateless)
|
|
257
256
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
258
|
-
run_id =
|
|
257
|
+
run_id = uuid7()
|
|
259
258
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
260
259
|
|
|
261
260
|
try:
|
|
@@ -425,7 +424,10 @@ async def cancel_run(
|
|
|
425
424
|
wait_str = request.query_params.get("wait", "false")
|
|
426
425
|
wait = wait_str.lower() in {"true", "yes", "1"}
|
|
427
426
|
action_str = request.query_params.get("action", "interrupt")
|
|
428
|
-
action =
|
|
427
|
+
action = cast(
|
|
428
|
+
Literal["interrupt", "rollback"],
|
|
429
|
+
action_str if action_str in {"interrupt", "rollback"} else "interrupt",
|
|
430
|
+
)
|
|
429
431
|
|
|
430
432
|
async with connect() as conn:
|
|
431
433
|
await Runs.cancel(
|
|
@@ -471,8 +473,9 @@ async def cancel_runs(
|
|
|
471
473
|
for rid in run_ids:
|
|
472
474
|
validate_uuid(rid, "Invalid run ID: must be a UUID")
|
|
473
475
|
action_str = request.query_params.get("action", "interrupt")
|
|
474
|
-
action
|
|
475
|
-
|
|
476
|
+
action = cast(
|
|
477
|
+
Literal["interrupt", "rollback"],
|
|
478
|
+
action_str if action_str in ("interrupt", "rollback") else "interrupt",
|
|
476
479
|
)
|
|
477
480
|
|
|
478
481
|
async with connect() as conn:
|
langgraph_api/api/ui.py
CHANGED
|
@@ -56,6 +56,8 @@ async def handle_ui(request: ApiRequest) -> Response:
|
|
|
56
56
|
|
|
57
57
|
# Use http:// protocol if accessing a localhost service
|
|
58
58
|
def is_host(needle: str) -> bool:
|
|
59
|
+
if not isinstance(host, str):
|
|
60
|
+
return False
|
|
59
61
|
return host.startswith(needle + ":") or host == needle
|
|
60
62
|
|
|
61
63
|
protocol = "http:" if is_host("localhost") or is_host("127.0.0.1") else ""
|
langgraph_api/asgi_transport.py
CHANGED
|
@@ -13,7 +13,7 @@ from httpx import AsyncByteStream, Request, Response
|
|
|
13
13
|
if typing.TYPE_CHECKING: # pragma: no cover
|
|
14
14
|
import asyncio
|
|
15
15
|
|
|
16
|
-
import trio
|
|
16
|
+
import trio # type: ignore[unresolved-import]
|
|
17
17
|
|
|
18
18
|
Event = asyncio.Event | trio.Event
|
|
19
19
|
|
|
@@ -37,7 +37,7 @@ def is_running_trio() -> bool:
|
|
|
37
37
|
|
|
38
38
|
def create_event() -> Event:
|
|
39
39
|
if is_running_trio():
|
|
40
|
-
import trio
|
|
40
|
+
import trio # type: ignore[unresolved-import]
|
|
41
41
|
|
|
42
42
|
return trio.Event()
|
|
43
43
|
|
langgraph_api/asyncio.py
CHANGED
|
@@ -119,7 +119,7 @@ def create_task(
|
|
|
119
119
|
|
|
120
120
|
def run_coroutine_threadsafe(
|
|
121
121
|
coro: Coroutine[Any, Any, T], ignore_exceptions: tuple[type[Exception], ...] = ()
|
|
122
|
-
) -> concurrent.futures.Future[T | None]:
|
|
122
|
+
) -> concurrent.futures.Future[T] | concurrent.futures.Future[None]:
|
|
123
123
|
if _MAIN_LOOP is None:
|
|
124
124
|
raise RuntimeError("No event loop set")
|
|
125
125
|
future = asyncio.run_coroutine_threadsafe(coro, _MAIN_LOOP)
|
|
@@ -226,7 +226,7 @@ def to_aiter(*args: T) -> AsyncIterator[T]:
|
|
|
226
226
|
V = TypeVar("V")
|
|
227
227
|
|
|
228
228
|
|
|
229
|
-
class aclosing(Generic[V], AbstractAsyncContextManager):
|
|
229
|
+
class aclosing(Generic[V], AbstractAsyncContextManager[V]):
|
|
230
230
|
"""Async context manager for safely finalizing an asynchronously cleaned-up
|
|
231
231
|
resource such as an async generator, calling its ``aclose()`` method.
|
|
232
232
|
|
|
@@ -255,14 +255,16 @@ class aclosing(Generic[V], AbstractAsyncContextManager):
|
|
|
255
255
|
await self.thing.aclose()
|
|
256
256
|
|
|
257
257
|
|
|
258
|
-
async def aclosing_aiter(
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
258
|
+
async def aclosing_aiter(
|
|
259
|
+
aiterator: AsyncIterator[T],
|
|
260
|
+
) -> AsyncIterator[T]:
|
|
261
|
+
if hasattr(aiterator, "__aenter__"):
|
|
262
|
+
async with aiterator: # type: ignore[invalid-context-manager]
|
|
263
|
+
async for item in aiterator:
|
|
262
264
|
yield item
|
|
263
265
|
else:
|
|
264
|
-
async with aclosing(
|
|
265
|
-
async for item in
|
|
266
|
+
async with aclosing(aiterator):
|
|
267
|
+
async for item in aiterator:
|
|
266
268
|
yield item
|
|
267
269
|
|
|
268
270
|
|
langgraph_api/auth/custom.py
CHANGED
|
@@ -251,14 +251,15 @@ def _get_auth_instance(path: str | None = None) -> Auth | Literal["js"] | None:
|
|
|
251
251
|
deps := _get_dependencies(auth_instance._authenticate_handler)
|
|
252
252
|
):
|
|
253
253
|
auth_instance._authenticate_handler = _solve_fastapi_dependencies(
|
|
254
|
-
auth_instance._authenticate_handler,
|
|
254
|
+
auth_instance._authenticate_handler, # type: ignore[invalid-argument-type]
|
|
255
|
+
deps,
|
|
255
256
|
)
|
|
256
257
|
logger.info(f"Loaded auth instance from path {path}: {auth_instance}")
|
|
257
258
|
return auth_instance
|
|
258
259
|
|
|
259
260
|
|
|
260
261
|
def _extract_arguments_from_scope(
|
|
261
|
-
scope:
|
|
262
|
+
scope: Mapping[str, Any],
|
|
262
263
|
param_names: set[str],
|
|
263
264
|
request: Request | None = None,
|
|
264
265
|
response: Response | None = None,
|
|
@@ -283,7 +284,11 @@ def _extract_arguments_from_scope(
|
|
|
283
284
|
if "path" in param_names:
|
|
284
285
|
args["path"] = scope["path"]
|
|
285
286
|
if "query_params" in param_names:
|
|
286
|
-
|
|
287
|
+
query_params = scope.get("query_string")
|
|
288
|
+
if query_params:
|
|
289
|
+
args["query_params"] = QueryParams(query_params)
|
|
290
|
+
else:
|
|
291
|
+
args["query_params"] = QueryParams()
|
|
287
292
|
if "headers" in param_names:
|
|
288
293
|
args["headers"] = dict(scope.get("headers", {}))
|
|
289
294
|
if "authorization" in param_names:
|
|
@@ -595,7 +600,7 @@ def _load_auth_obj(path: str) -> Auth | Literal["js"]:
|
|
|
595
600
|
raise ValueError(f"Could not load file: {module_name}")
|
|
596
601
|
module = importlib.util.module_from_spec(modspec)
|
|
597
602
|
sys.modules[modname] = module
|
|
598
|
-
modspec.loader.exec_module(module)
|
|
603
|
+
modspec.loader.exec_module(module) # type: ignore[possibly-unbound-attribute]
|
|
599
604
|
else:
|
|
600
605
|
# Load from Python module
|
|
601
606
|
module = importlib.import_module(module_name)
|
|
@@ -14,7 +14,7 @@ from langgraph_api.config import LANGSMITH_AUTH_ENDPOINT
|
|
|
14
14
|
_client: "JsonHttpClient"
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
def is_retriable_error(exception:
|
|
17
|
+
def is_retriable_error(exception: BaseException) -> bool:
|
|
18
18
|
if isinstance(exception, httpx.TransportError):
|
|
19
19
|
return True
|
|
20
20
|
if isinstance(exception, httpx.HTTPStatusError):
|
langgraph_api/cli.py
CHANGED
|
@@ -204,7 +204,7 @@ def run_server(
|
|
|
204
204
|
mount_prefix = os.environ.get("LANGGRAPH_MOUNT_PREFIX")
|
|
205
205
|
if isinstance(env, str | pathlib.Path):
|
|
206
206
|
try:
|
|
207
|
-
from dotenv.main import DotEnv
|
|
207
|
+
from dotenv.main import DotEnv # type: ignore[unresolved-import]
|
|
208
208
|
|
|
209
209
|
env_vars = DotEnv(dotenv_path=env).dict() or {}
|
|
210
210
|
logger.debug(f"Loaded environment variables from {env}: {sorted(env_vars)}")
|
|
@@ -216,7 +216,7 @@ def run_server(
|
|
|
216
216
|
|
|
217
217
|
if debug_port is not None:
|
|
218
218
|
try:
|
|
219
|
-
import debugpy
|
|
219
|
+
import debugpy # type: ignore[unresolved-import]
|
|
220
220
|
except ImportError:
|
|
221
221
|
logger.warning("debugpy is not installed. Debugging will not be available.")
|
|
222
222
|
logger.info("To enable debugging, install debugpy: pip install debugpy")
|
|
@@ -301,6 +301,7 @@ def run_server(
|
|
|
301
301
|
def _open_browser():
|
|
302
302
|
nonlocal studio_origin, full_studio_url
|
|
303
303
|
import time
|
|
304
|
+
import urllib.error
|
|
304
305
|
import urllib.request
|
|
305
306
|
import webbrowser
|
|
306
307
|
from concurrent.futures import ThreadPoolExecutor
|
|
@@ -377,8 +378,8 @@ For production use, please use LangGraph Platform.
|
|
|
377
378
|
reload=reload,
|
|
378
379
|
env_file=env_file,
|
|
379
380
|
access_log=False,
|
|
380
|
-
reload_includes=reload_includes,
|
|
381
|
-
reload_excludes=reload_excludes,
|
|
381
|
+
reload_includes=list(reload_includes) if reload_includes else None,
|
|
382
|
+
reload_excludes=list(reload_excludes) if reload_excludes else None,
|
|
382
383
|
log_config={
|
|
383
384
|
"version": 1,
|
|
384
385
|
"incremental": False,
|
langgraph_api/config.py
CHANGED
|
@@ -51,6 +51,7 @@ class HttpConfig(TypedDict, total=False):
|
|
|
51
51
|
mount_prefix: str
|
|
52
52
|
"""Prefix for mounted routes. E.g., "/my-deployment/api"."""
|
|
53
53
|
configurable_headers: ConfigurableHeaders | None
|
|
54
|
+
logging_headers: ConfigurableHeaders | None
|
|
54
55
|
|
|
55
56
|
|
|
56
57
|
class ThreadTTLConfig(TypedDict, total=False):
|
|
@@ -286,6 +287,7 @@ if THREAD_TTL is None and CHECKPOINTER_CONFIG is not None:
|
|
|
286
287
|
N_JOBS_PER_WORKER = env("N_JOBS_PER_WORKER", cast=int, default=10)
|
|
287
288
|
BG_JOB_TIMEOUT_SECS = env("BG_JOB_TIMEOUT_SECS", cast=float, default=3600)
|
|
288
289
|
FF_CRONS_ENABLED = env("FF_CRONS_ENABLED", cast=bool, default=True)
|
|
290
|
+
FF_RICH_THREADS = env("FF_RICH_THREADS", cast=bool, default=True)
|
|
289
291
|
|
|
290
292
|
# auth
|
|
291
293
|
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import asyncio
|
|
3
|
+
import json
|
|
4
|
+
import logging.config
|
|
5
|
+
import pathlib
|
|
6
|
+
|
|
7
|
+
from langgraph_api.queue_entrypoint import main
|
|
8
|
+
|
|
9
|
+
if __name__ == "__main__":
|
|
10
|
+
parser = argparse.ArgumentParser()
|
|
11
|
+
|
|
12
|
+
parser.add_argument("--grpc-port", type=int, default=50051)
|
|
13
|
+
args = parser.parse_args()
|
|
14
|
+
with open(pathlib.Path(__file__).parent.parent / "logging.json") as file:
|
|
15
|
+
loaded_config = json.load(file)
|
|
16
|
+
logging.config.dictConfig(loaded_config)
|
|
17
|
+
try:
|
|
18
|
+
import uvloop # type: ignore[unresolved-import]
|
|
19
|
+
|
|
20
|
+
uvloop.install()
|
|
21
|
+
except ImportError:
|
|
22
|
+
pass
|
|
23
|
+
asyncio.run(main(grpc_port=args.grpc_port, entrypoint_name="python-executor"))
|
langgraph_api/graph.py
CHANGED
|
@@ -9,7 +9,7 @@ import warnings
|
|
|
9
9
|
from collections.abc import AsyncIterator, Callable
|
|
10
10
|
from contextlib import asynccontextmanager
|
|
11
11
|
from itertools import filterfalse
|
|
12
|
-
from typing import TYPE_CHECKING, Any, NamedTuple, cast
|
|
12
|
+
from typing import TYPE_CHECKING, Any, NamedTuple, TypeGuard, cast
|
|
13
13
|
from uuid import UUID, uuid5
|
|
14
14
|
|
|
15
15
|
import orjson
|
|
@@ -35,10 +35,10 @@ logger = structlog.stdlib.get_logger(__name__)
|
|
|
35
35
|
|
|
36
36
|
GraphFactoryFromConfig = Callable[[Config], Pregel | StateGraph]
|
|
37
37
|
GraphFactory = Callable[[], Pregel | StateGraph]
|
|
38
|
-
GraphValue = Pregel | GraphFactory
|
|
38
|
+
GraphValue = Pregel | GraphFactory | GraphFactoryFromConfig
|
|
39
39
|
|
|
40
40
|
|
|
41
|
-
GRAPHS: dict[str,
|
|
41
|
+
GRAPHS: dict[str, GraphValue] = {}
|
|
42
42
|
NAMESPACE_GRAPH = UUID("6ba7b821-9dad-11d1-80b4-00c04fd430c8")
|
|
43
43
|
FACTORY_ACCEPTS_CONFIG: dict[str, bool] = {}
|
|
44
44
|
|
|
@@ -110,11 +110,23 @@ async def _generate_graph(value: Any) -> AsyncIterator[Any]:
|
|
|
110
110
|
yield value
|
|
111
111
|
|
|
112
112
|
|
|
113
|
-
def is_js_graph(graph_id: str) ->
|
|
113
|
+
def is_js_graph(graph_id: str) -> TypeGuard[BaseRemotePregel]:
|
|
114
114
|
"""Return whether a graph is a JS graph."""
|
|
115
115
|
return graph_id in GRAPHS and isinstance(GRAPHS[graph_id], BaseRemotePregel)
|
|
116
116
|
|
|
117
117
|
|
|
118
|
+
def is_factory(
|
|
119
|
+
value: GraphValue, graph_id: str
|
|
120
|
+
) -> TypeGuard[GraphFactoryFromConfig | GraphFactory]:
|
|
121
|
+
return graph_id in FACTORY_ACCEPTS_CONFIG
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def factory_accepts_config(
|
|
125
|
+
value: GraphValue, graph_id: str
|
|
126
|
+
) -> TypeGuard[GraphFactoryFromConfig]:
|
|
127
|
+
return FACTORY_ACCEPTS_CONFIG.get(graph_id, False)
|
|
128
|
+
|
|
129
|
+
|
|
118
130
|
@asynccontextmanager
|
|
119
131
|
async def get_graph(
|
|
120
132
|
graph_id: str,
|
|
@@ -128,7 +140,7 @@ async def get_graph(
|
|
|
128
140
|
|
|
129
141
|
assert_graph_exists(graph_id)
|
|
130
142
|
value = GRAPHS[graph_id]
|
|
131
|
-
if graph_id
|
|
143
|
+
if is_factory(value, graph_id):
|
|
132
144
|
config = lg_config.ensure_config(config)
|
|
133
145
|
|
|
134
146
|
if store is not None:
|
|
@@ -139,6 +151,8 @@ async def get_graph(
|
|
|
139
151
|
runtime = config["configurable"].get(CONFIG_KEY_RUNTIME)
|
|
140
152
|
if runtime is None:
|
|
141
153
|
patched_runtime = Runtime(store=store)
|
|
154
|
+
elif isinstance(runtime, dict):
|
|
155
|
+
patched_runtime = Runtime(**(runtime | {"store": store}))
|
|
142
156
|
elif runtime.store is None:
|
|
143
157
|
patched_runtime = cast(Runtime, runtime).override(store=store)
|
|
144
158
|
else:
|
|
@@ -156,7 +170,7 @@ async def get_graph(
|
|
|
156
170
|
):
|
|
157
171
|
config["configurable"][CONFIG_KEY_CHECKPOINTER] = checkpointer
|
|
158
172
|
var_child_runnable_config.set(config)
|
|
159
|
-
value = value(config) if
|
|
173
|
+
value = value(config) if factory_accepts_config(value, graph_id) else value()
|
|
160
174
|
try:
|
|
161
175
|
async with _generate_graph(value) as graph_obj:
|
|
162
176
|
if isinstance(graph_obj, StateGraph):
|
|
@@ -451,7 +465,7 @@ def _graph_from_spec(spec: GraphSpec) -> GraphValue:
|
|
|
451
465
|
raise ValueError(f"Could not find python file for graph: {spec}")
|
|
452
466
|
module = importlib.util.module_from_spec(modspec)
|
|
453
467
|
sys.modules[modname] = module
|
|
454
|
-
modspec.loader.exec_module(module)
|
|
468
|
+
modspec.loader.exec_module(module) # type: ignore[possibly-unbound-attribute]
|
|
455
469
|
except ImportError as e:
|
|
456
470
|
e.add_note(f"Could not import python module for graph:\n{spec}")
|
|
457
471
|
if config.API_VARIANT == "local_dev":
|
|
@@ -565,7 +579,9 @@ def _graph_from_spec(spec: GraphSpec) -> GraphValue:
|
|
|
565
579
|
@functools.lru_cache(maxsize=1)
|
|
566
580
|
def _get_init_embeddings() -> Callable[[str, ...], "Embeddings"] | None:
|
|
567
581
|
try:
|
|
568
|
-
from langchain.embeddings import
|
|
582
|
+
from langchain.embeddings import ( # type: ignore[unresolved-import]
|
|
583
|
+
init_embeddings,
|
|
584
|
+
)
|
|
569
585
|
|
|
570
586
|
return init_embeddings
|
|
571
587
|
except ImportError:
|
|
@@ -606,7 +622,7 @@ def resolve_embeddings(index_config: dict) -> "Embeddings":
|
|
|
606
622
|
raise ValueError(f"Could not find embeddings file: {module_name}")
|
|
607
623
|
module = importlib.util.module_from_spec(modspec)
|
|
608
624
|
sys.modules[modname] = module
|
|
609
|
-
modspec.loader.exec_module(module)
|
|
625
|
+
modspec.loader.exec_module(module) # type: ignore[possibly-unbound-attribute]
|
|
610
626
|
else:
|
|
611
627
|
# Load from Python module
|
|
612
628
|
module = importlib.import_module(module_name)
|
langgraph_api/http.py
CHANGED
|
@@ -72,7 +72,7 @@ class JsonHttpClient:
|
|
|
72
72
|
|
|
73
73
|
|
|
74
74
|
_http_client: JsonHttpClient
|
|
75
|
-
_loopback_client: JsonHttpClient = None
|
|
75
|
+
_loopback_client: JsonHttpClient | None = None
|
|
76
76
|
|
|
77
77
|
|
|
78
78
|
async def start_http_client() -> None:
|
|
@@ -113,16 +113,16 @@ def get_loopback_client() -> JsonHttpClient:
|
|
|
113
113
|
return _loopback_client
|
|
114
114
|
|
|
115
115
|
|
|
116
|
-
def is_retriable_error(exception:
|
|
116
|
+
def is_retriable_error(exception: BaseException) -> bool:
|
|
117
117
|
# httpx error hierarchy: https://www.python-httpx.org/exceptions/
|
|
118
118
|
# Retry all timeout related errors
|
|
119
119
|
if isinstance(exception, httpx.TimeoutException | httpx.NetworkError):
|
|
120
120
|
return True
|
|
121
121
|
# Seems to just apply to HttpStatusError but doesn't hurt to check all
|
|
122
122
|
if isinstance(exception, httpx.HTTPError):
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
or
|
|
123
|
+
response = getattr(exception, "response", None)
|
|
124
|
+
return response is not None and (
|
|
125
|
+
response.status_code >= 500 or response.status_code == 429
|
|
126
126
|
)
|
|
127
127
|
return False
|
|
128
128
|
|
|
@@ -149,7 +149,7 @@ async def http_request(
|
|
|
149
149
|
request_timeout: float | None = 30,
|
|
150
150
|
raise_error: bool = True,
|
|
151
151
|
client: JsonHttpClient | None = None,
|
|
152
|
-
) ->
|
|
152
|
+
) -> None:
|
|
153
153
|
"""Make an HTTP request with retries.
|
|
154
154
|
|
|
155
155
|
Args:
|
|
@@ -173,7 +173,10 @@ async def http_request(
|
|
|
173
173
|
|
|
174
174
|
content = None
|
|
175
175
|
if body is not None:
|
|
176
|
-
|
|
176
|
+
if isinstance(body, str):
|
|
177
|
+
content = body.encode("utf-8")
|
|
178
|
+
else:
|
|
179
|
+
content = body
|
|
177
180
|
elif json is not None:
|
|
178
181
|
content = json_dumpb(json)
|
|
179
182
|
|
langgraph_api/http_metrics.py
CHANGED
|
@@ -98,7 +98,10 @@ class HTTPMetricsCollector:
|
|
|
98
98
|
hist_data["count"] += 1
|
|
99
99
|
|
|
100
100
|
def get_metrics(
|
|
101
|
-
self,
|
|
101
|
+
self,
|
|
102
|
+
project_id: str | None,
|
|
103
|
+
revision_id: str | None,
|
|
104
|
+
format: str = "prometheus",
|
|
102
105
|
) -> dict | list[str]:
|
|
103
106
|
if format == "json":
|
|
104
107
|
return {
|
langgraph_api/js/base.py
CHANGED
langgraph_api/js/build.mts
CHANGED
|
@@ -17,7 +17,14 @@ const __dirname = new URL(".", import.meta.url).pathname;
|
|
|
17
17
|
|
|
18
18
|
async function main() {
|
|
19
19
|
const specs = Object.entries(
|
|
20
|
-
z
|
|
20
|
+
z
|
|
21
|
+
.record(
|
|
22
|
+
z.union([
|
|
23
|
+
z.string(),
|
|
24
|
+
z.object({ path: z.string(), description: z.string().nullish() }),
|
|
25
|
+
]),
|
|
26
|
+
)
|
|
27
|
+
.parse(JSON.parse(process.env.LANGSERVE_GRAPHS)),
|
|
21
28
|
).filter(([_, spec]) => filterValidExportPath(spec));
|
|
22
29
|
|
|
23
30
|
let GRAPH_SCHEMAS: Record<string, Record<string, GraphSchema> | false> = {};
|
|
@@ -49,7 +56,9 @@ async function main() {
|
|
|
49
56
|
await Promise.all(
|
|
50
57
|
specs.map(async ([graphId, rawSpec]) => {
|
|
51
58
|
console.info(`[${graphId}]: Checking for source file existence`);
|
|
52
|
-
const
|
|
59
|
+
const importPath =
|
|
60
|
+
typeof rawSpec === "string" ? rawSpec : rawSpec.path;
|
|
61
|
+
const { resolved, ...spec } = await resolveGraph(importPath, {
|
|
53
62
|
onlyFilePresence: true,
|
|
54
63
|
});
|
|
55
64
|
|
langgraph_api/js/client.http.mts
CHANGED
langgraph_api/js/client.mts
CHANGED
|
@@ -116,6 +116,7 @@ let GRAPH_OPTIONS: {
|
|
|
116
116
|
let nodesExecuted = 0;
|
|
117
117
|
function incrementNodes() {
|
|
118
118
|
nodesExecuted++;
|
|
119
|
+
logger.debug(`Incremented nodes executed to ${nodesExecuted}`);
|
|
119
120
|
}
|
|
120
121
|
|
|
121
122
|
const version = await (async () => {
|
|
@@ -949,6 +950,8 @@ async function* getStateHistoryRequest(
|
|
|
949
950
|
const __dirname = new URL(".", import.meta.url).pathname;
|
|
950
951
|
|
|
951
952
|
async function main() {
|
|
953
|
+
logger.info("Starting graph loop", { pid: process.pid });
|
|
954
|
+
|
|
952
955
|
const app = new Hono();
|
|
953
956
|
|
|
954
957
|
GRAPH_OPTIONS = {
|
|
@@ -958,7 +961,12 @@ async function main() {
|
|
|
958
961
|
|
|
959
962
|
const specs = Object.entries(
|
|
960
963
|
z
|
|
961
|
-
.record(
|
|
964
|
+
.record(
|
|
965
|
+
z.union([
|
|
966
|
+
z.string(),
|
|
967
|
+
z.object({ path: z.string(), description: z.string().nullish() }),
|
|
968
|
+
]),
|
|
969
|
+
)
|
|
962
970
|
.parse(JSON.parse(process.env.LANGSERVE_GRAPHS ?? "{}")),
|
|
963
971
|
).filter(([_, spec]) => filterValidExportPath(spec));
|
|
964
972
|
|
|
@@ -977,7 +985,8 @@ async function main() {
|
|
|
977
985
|
await Promise.all(
|
|
978
986
|
specs.map(async ([graphId, rawSpec]) => {
|
|
979
987
|
logger.info(`Resolving graph ${graphId}`);
|
|
980
|
-
const
|
|
988
|
+
const importPath = typeof rawSpec === "string" ? rawSpec : rawSpec.path;
|
|
989
|
+
const { resolved, ...spec } = await resolveGraph(importPath);
|
|
981
990
|
|
|
982
991
|
GRAPH_RESOLVED[graphId] = resolved;
|
|
983
992
|
GRAPH_SPEC[graphId] = spec;
|
|
@@ -1029,15 +1038,7 @@ async function main() {
|
|
|
1029
1038
|
getStateHistoryRequest,
|
|
1030
1039
|
),
|
|
1031
1040
|
);
|
|
1032
|
-
|
|
1033
|
-
"/:graphId/getNodesExecuted",
|
|
1034
|
-
zValidator("json", GetNodesExecutedPayload),
|
|
1035
|
-
handleInvoke(
|
|
1036
|
-
"getNodesExecuted",
|
|
1037
|
-
GetNodesExecutedPayload,
|
|
1038
|
-
getNodesExecutedRequest,
|
|
1039
|
-
),
|
|
1040
|
-
);
|
|
1041
|
+
|
|
1041
1042
|
app.post(
|
|
1042
1043
|
"/:graphId/getNodesExecuted",
|
|
1043
1044
|
zValidator("json", GetNodesExecutedPayload),
|
|
@@ -1147,6 +1148,9 @@ async function getNodesExecutedRequest(
|
|
|
1147
1148
|
) {
|
|
1148
1149
|
const value = nodesExecuted;
|
|
1149
1150
|
nodesExecuted = 0;
|
|
1151
|
+
logger.debug(
|
|
1152
|
+
`Returning ${value} nodes executed. Reset nodes executed to ${nodesExecuted}.`,
|
|
1153
|
+
);
|
|
1150
1154
|
return { nodesExecuted: value };
|
|
1151
1155
|
}
|
|
1152
1156
|
patchFetch();
|