langgraph-api 0.5.4__py3-none-any.whl → 0.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/__init__.py +93 -27
- langgraph_api/api/a2a.py +36 -32
- langgraph_api/api/assistants.py +114 -26
- langgraph_api/api/mcp.py +3 -3
- langgraph_api/api/meta.py +15 -2
- langgraph_api/api/openapi.py +27 -17
- langgraph_api/api/profile.py +108 -0
- langgraph_api/api/runs.py +114 -57
- langgraph_api/api/store.py +19 -2
- langgraph_api/api/threads.py +133 -10
- langgraph_api/asgi_transport.py +14 -9
- langgraph_api/auth/custom.py +23 -13
- langgraph_api/cli.py +86 -41
- langgraph_api/command.py +2 -2
- langgraph_api/config/__init__.py +532 -0
- langgraph_api/config/_parse.py +58 -0
- langgraph_api/config/schemas.py +431 -0
- langgraph_api/cron_scheduler.py +17 -1
- langgraph_api/encryption/__init__.py +15 -0
- langgraph_api/encryption/aes_json.py +158 -0
- langgraph_api/encryption/context.py +35 -0
- langgraph_api/encryption/custom.py +280 -0
- langgraph_api/encryption/middleware.py +632 -0
- langgraph_api/encryption/shared.py +63 -0
- langgraph_api/errors.py +12 -1
- langgraph_api/executor_entrypoint.py +11 -6
- langgraph_api/feature_flags.py +19 -0
- langgraph_api/graph.py +163 -64
- langgraph_api/{grpc_ops → grpc}/client.py +142 -12
- langgraph_api/{grpc_ops → grpc}/config_conversion.py +16 -10
- langgraph_api/grpc/generated/__init__.py +29 -0
- langgraph_api/grpc/generated/checkpointer_pb2.py +63 -0
- langgraph_api/grpc/generated/checkpointer_pb2.pyi +99 -0
- langgraph_api/grpc/generated/checkpointer_pb2_grpc.py +329 -0
- langgraph_api/grpc/generated/core_api_pb2.py +216 -0
- langgraph_api/{grpc_ops → grpc}/generated/core_api_pb2.pyi +292 -372
- langgraph_api/{grpc_ops → grpc}/generated/core_api_pb2_grpc.py +252 -31
- langgraph_api/grpc/generated/engine_common_pb2.py +219 -0
- langgraph_api/{grpc_ops → grpc}/generated/engine_common_pb2.pyi +178 -104
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2.pyi +12 -0
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_durability_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_durability_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_durability_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_run_status_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_run_status_pb2.pyi +22 -0
- langgraph_api/grpc/generated/enum_run_status_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2.pyi +28 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/errors_pb2.py +39 -0
- langgraph_api/grpc/generated/errors_pb2.pyi +21 -0
- langgraph_api/grpc/generated/errors_pb2_grpc.py +24 -0
- langgraph_api/grpc/ops/__init__.py +370 -0
- langgraph_api/grpc/ops/assistants.py +424 -0
- langgraph_api/grpc/ops/runs.py +792 -0
- langgraph_api/grpc/ops/threads.py +1013 -0
- langgraph_api/http.py +16 -5
- langgraph_api/js/client.mts +1 -4
- langgraph_api/js/package.json +28 -27
- langgraph_api/js/remote.py +39 -17
- langgraph_api/js/sse.py +2 -2
- langgraph_api/js/ui.py +1 -1
- langgraph_api/js/yarn.lock +1139 -869
- langgraph_api/metadata.py +29 -3
- langgraph_api/middleware/http_logger.py +1 -1
- langgraph_api/middleware/private_network.py +7 -7
- langgraph_api/models/run.py +44 -26
- langgraph_api/otel_context.py +205 -0
- langgraph_api/patch.py +2 -2
- langgraph_api/queue_entrypoint.py +34 -35
- langgraph_api/route.py +33 -1
- langgraph_api/schema.py +84 -9
- langgraph_api/self_hosted_logs.py +2 -2
- langgraph_api/self_hosted_metrics.py +73 -3
- langgraph_api/serde.py +16 -4
- langgraph_api/server.py +33 -31
- langgraph_api/state.py +3 -2
- langgraph_api/store.py +25 -16
- langgraph_api/stream.py +20 -16
- langgraph_api/thread_ttl.py +28 -13
- langgraph_api/timing/__init__.py +25 -0
- langgraph_api/timing/profiler.py +200 -0
- langgraph_api/timing/timer.py +318 -0
- langgraph_api/utils/__init__.py +53 -8
- langgraph_api/utils/config.py +2 -1
- langgraph_api/utils/future.py +10 -6
- langgraph_api/utils/uuids.py +29 -62
- langgraph_api/validation.py +6 -0
- langgraph_api/webhook.py +120 -6
- langgraph_api/worker.py +54 -24
- {langgraph_api-0.5.4.dist-info → langgraph_api-0.7.3.dist-info}/METADATA +8 -6
- langgraph_api-0.7.3.dist-info/RECORD +168 -0
- {langgraph_api-0.5.4.dist-info → langgraph_api-0.7.3.dist-info}/WHEEL +1 -1
- langgraph_runtime/__init__.py +1 -0
- langgraph_runtime/routes.py +11 -0
- logging.json +1 -3
- openapi.json +635 -537
- langgraph_api/config.py +0 -523
- langgraph_api/grpc_ops/generated/__init__.py +0 -5
- langgraph_api/grpc_ops/generated/core_api_pb2.py +0 -275
- langgraph_api/grpc_ops/generated/engine_common_pb2.py +0 -194
- langgraph_api/grpc_ops/ops.py +0 -1045
- langgraph_api-0.5.4.dist-info/RECORD +0 -121
- /langgraph_api/{grpc_ops → grpc}/__init__.py +0 -0
- /langgraph_api/{grpc_ops → grpc}/generated/engine_common_pb2_grpc.py +0 -0
- {langgraph_api-0.5.4.dist-info → langgraph_api-0.7.3.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.5.4.dist-info → langgraph_api-0.7.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
# ruff: noqa: E402
|
|
4
|
+
import time
|
|
5
|
+
|
|
6
|
+
# Capture the time when this module is first imported (early in server startup)
|
|
7
|
+
_PROCESS_START_TIME = time.monotonic()
|
|
8
|
+
|
|
9
|
+
import contextlib
|
|
10
|
+
import functools
|
|
11
|
+
import inspect
|
|
12
|
+
import logging
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from typing import TYPE_CHECKING, Any, Generic, ParamSpec, TypeVar, overload
|
|
15
|
+
|
|
16
|
+
import structlog
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from collections.abc import Callable
|
|
20
|
+
from contextlib import AbstractAsyncContextManager
|
|
21
|
+
|
|
22
|
+
from starlette.applications import Starlette
|
|
23
|
+
|
|
24
|
+
logger = structlog.stdlib.get_logger(__name__)
|
|
25
|
+
|
|
26
|
+
P = ParamSpec("P")
|
|
27
|
+
R = TypeVar("R")
|
|
28
|
+
T = TypeVar("T", covariant=True)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass(frozen=True)
|
|
32
|
+
class TimerConfig(Generic[P]):
|
|
33
|
+
message: str = "Function timing"
|
|
34
|
+
metadata_fn: Callable[P, dict[str, Any]] | None = None
|
|
35
|
+
warn_threshold_secs: float | None = None
|
|
36
|
+
warn_message: str | None = None
|
|
37
|
+
error_threshold_secs: float | None = None
|
|
38
|
+
error_message: str | None = None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@overload
|
|
42
|
+
def timer(_func: Callable[P, R], /, **kwargs) -> Callable[P, R]: ...
|
|
43
|
+
@overload
|
|
44
|
+
def timer(
|
|
45
|
+
_func: None = None, /, **kwargs
|
|
46
|
+
) -> Callable[[Callable[P, R]], Callable[P, R]]: ...
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def timer(
|
|
50
|
+
_func: Callable[P, R] | None = None,
|
|
51
|
+
/,
|
|
52
|
+
message: str = "Function timing",
|
|
53
|
+
metadata_fn: Callable[P, dict[str, Any]] | None = None,
|
|
54
|
+
warn_threshold_secs: float | None = None,
|
|
55
|
+
warn_message: str | None = None,
|
|
56
|
+
error_threshold_secs: float | None = None,
|
|
57
|
+
error_message: str | None = None,
|
|
58
|
+
):
|
|
59
|
+
"""
|
|
60
|
+
Decorator for sync *and* async callables.
|
|
61
|
+
"""
|
|
62
|
+
cfg = TimerConfig[P](
|
|
63
|
+
message=message,
|
|
64
|
+
metadata_fn=metadata_fn,
|
|
65
|
+
warn_threshold_secs=warn_threshold_secs,
|
|
66
|
+
warn_message=warn_message,
|
|
67
|
+
error_threshold_secs=error_threshold_secs,
|
|
68
|
+
error_message=error_message,
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
def decorator(func: Callable[P, R]) -> Callable[P, R]:
|
|
72
|
+
if inspect.iscoroutinefunction(func):
|
|
73
|
+
|
|
74
|
+
@functools.wraps(func)
|
|
75
|
+
async def awrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
76
|
+
start = time.perf_counter()
|
|
77
|
+
exc: BaseException | None = None
|
|
78
|
+
try:
|
|
79
|
+
return await func(*args, **kwargs) # type: ignore[misc]
|
|
80
|
+
except BaseException as e:
|
|
81
|
+
exc = e
|
|
82
|
+
raise
|
|
83
|
+
finally:
|
|
84
|
+
elapsed = time.perf_counter() - start
|
|
85
|
+
_log_timing(
|
|
86
|
+
name=func.__qualname__,
|
|
87
|
+
elapsed=elapsed,
|
|
88
|
+
cfg=cfg, # type: ignore[arg-type]
|
|
89
|
+
args=args,
|
|
90
|
+
kwargs=kwargs,
|
|
91
|
+
exc=exc,
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
return awrapper # type: ignore[return-value]
|
|
95
|
+
|
|
96
|
+
@functools.wraps(func)
|
|
97
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
98
|
+
start = time.perf_counter()
|
|
99
|
+
exc: BaseException | None = None
|
|
100
|
+
try:
|
|
101
|
+
return func(*args, **kwargs)
|
|
102
|
+
except BaseException as e:
|
|
103
|
+
exc = e
|
|
104
|
+
raise
|
|
105
|
+
finally:
|
|
106
|
+
elapsed = time.perf_counter() - start
|
|
107
|
+
_log_timing(
|
|
108
|
+
name=func.__qualname__,
|
|
109
|
+
elapsed=elapsed,
|
|
110
|
+
cfg=cfg, # type: ignore[arg-type]
|
|
111
|
+
args=args,
|
|
112
|
+
kwargs=kwargs,
|
|
113
|
+
exc=exc,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
return wrapper
|
|
117
|
+
|
|
118
|
+
return decorator(_func) if _func is not None else decorator
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class aenter_timed(Generic[T]):
|
|
122
|
+
"""
|
|
123
|
+
Wraps an async context manager and logs the time spent in *its __aenter__*.
|
|
124
|
+
__aexit__ is delegated without additional timing.
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
def __init__(
|
|
128
|
+
self,
|
|
129
|
+
inner: AbstractAsyncContextManager[T],
|
|
130
|
+
*,
|
|
131
|
+
name: str,
|
|
132
|
+
cfg: TimerConfig[Any],
|
|
133
|
+
extra: dict[str, Any] | None = None,
|
|
134
|
+
) -> None:
|
|
135
|
+
self._inner = inner
|
|
136
|
+
self._name = name
|
|
137
|
+
self._cfg = cfg
|
|
138
|
+
self._extra = extra or {}
|
|
139
|
+
|
|
140
|
+
async def __aenter__(self) -> T:
|
|
141
|
+
start = time.perf_counter()
|
|
142
|
+
exc: BaseException | None = None
|
|
143
|
+
try:
|
|
144
|
+
return await self._inner.__aenter__()
|
|
145
|
+
except BaseException as e:
|
|
146
|
+
exc = e
|
|
147
|
+
raise
|
|
148
|
+
finally:
|
|
149
|
+
elapsed = time.perf_counter() - start
|
|
150
|
+
_log_timing(
|
|
151
|
+
name=self._name,
|
|
152
|
+
elapsed=elapsed,
|
|
153
|
+
cfg=self._cfg,
|
|
154
|
+
exc=exc,
|
|
155
|
+
extra=self._extra,
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
async def __aexit__(self, exc_type, exc, tb) -> bool | None:
|
|
159
|
+
return await self._inner.__aexit__(exc_type, exc, tb)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def time_aenter(
|
|
163
|
+
cm: AbstractAsyncContextManager[T],
|
|
164
|
+
*,
|
|
165
|
+
name: str,
|
|
166
|
+
message: str,
|
|
167
|
+
warn_threshold_secs: float | None = None,
|
|
168
|
+
warn_message: str | None = None,
|
|
169
|
+
error_threshold_secs: float | None = None,
|
|
170
|
+
error_message: str | None = None,
|
|
171
|
+
extra: dict[str, Any] | None = None,
|
|
172
|
+
) -> aenter_timed[T]:
|
|
173
|
+
"""
|
|
174
|
+
Convenience helper to wrap any async CM and time only its __aenter__.
|
|
175
|
+
"""
|
|
176
|
+
cfg = TimerConfig[Any](
|
|
177
|
+
message=message,
|
|
178
|
+
warn_threshold_secs=warn_threshold_secs,
|
|
179
|
+
warn_message=warn_message,
|
|
180
|
+
error_threshold_secs=error_threshold_secs,
|
|
181
|
+
error_message=error_message,
|
|
182
|
+
metadata_fn=None,
|
|
183
|
+
)
|
|
184
|
+
return aenter_timed(cm, name=name, cfg=cfg, extra=extra)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def wrap_lifespan_context_aenter(
|
|
188
|
+
lifespan_ctx: Callable[[Any], AbstractAsyncContextManager[Any]],
|
|
189
|
+
*,
|
|
190
|
+
name: str = "user_router.lifespan",
|
|
191
|
+
message: str = "Entered lifespan context",
|
|
192
|
+
warn_threshold_secs: float | None = 10,
|
|
193
|
+
warn_message: str | None = (
|
|
194
|
+
"User lifespan startup exceeded expected time. "
|
|
195
|
+
"Slow work done at entry time within lifespan context can delay readiness, "
|
|
196
|
+
"reduce scale-out capacity, and may cause deployments to be marked unhealthy."
|
|
197
|
+
),
|
|
198
|
+
error_threshold_secs: float | None = 30,
|
|
199
|
+
error_message: str | None = None,
|
|
200
|
+
) -> Callable[[Any], AbstractAsyncContextManager[Any]]:
|
|
201
|
+
@functools.wraps(lifespan_ctx)
|
|
202
|
+
def wrapped(app: Any) -> AbstractAsyncContextManager[Any]:
|
|
203
|
+
return time_aenter(
|
|
204
|
+
lifespan_ctx(app),
|
|
205
|
+
name=name,
|
|
206
|
+
message=message,
|
|
207
|
+
warn_threshold_secs=warn_threshold_secs,
|
|
208
|
+
warn_message=warn_message,
|
|
209
|
+
error_threshold_secs=error_threshold_secs,
|
|
210
|
+
error_message=error_message,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return wrapped
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
LP = ParamSpec("LP")
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def combine_lifespans(
|
|
220
|
+
*lifespans: Callable[[Starlette], AbstractAsyncContextManager] | None,
|
|
221
|
+
) -> Callable[[Starlette], AbstractAsyncContextManager]:
|
|
222
|
+
@contextlib.asynccontextmanager
|
|
223
|
+
async def combined_lifespan(app):
|
|
224
|
+
async with contextlib.AsyncExitStack() as stack:
|
|
225
|
+
for ls in lifespans:
|
|
226
|
+
if ls is not None:
|
|
227
|
+
await stack.enter_async_context(ls(app))
|
|
228
|
+
elapsed = get_startup_elapsed()
|
|
229
|
+
logger.info(f"Application started up in {elapsed:2.3f}s", elapsed=elapsed)
|
|
230
|
+
yield
|
|
231
|
+
|
|
232
|
+
return combined_lifespan
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def get_startup_elapsed() -> float:
|
|
236
|
+
"""Return elapsed seconds since the process started (module import time)."""
|
|
237
|
+
return time.monotonic() - _PROCESS_START_TIME
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def _log_timing(
|
|
241
|
+
*,
|
|
242
|
+
name: str,
|
|
243
|
+
elapsed: float,
|
|
244
|
+
cfg: TimerConfig[Any],
|
|
245
|
+
args: tuple[Any, ...] = (),
|
|
246
|
+
kwargs: dict[str, Any] | None = None,
|
|
247
|
+
exc: BaseException | None = None,
|
|
248
|
+
extra: dict[str, Any] | None = None,
|
|
249
|
+
) -> None:
|
|
250
|
+
from langgraph_api import config
|
|
251
|
+
|
|
252
|
+
kwargs = kwargs or {}
|
|
253
|
+
|
|
254
|
+
log_data: dict[str, Any] = {
|
|
255
|
+
"name": name,
|
|
256
|
+
"elapsed_seconds": elapsed,
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
if extra:
|
|
260
|
+
log_data.update(extra)
|
|
261
|
+
|
|
262
|
+
if cfg.metadata_fn is not None:
|
|
263
|
+
try:
|
|
264
|
+
md = cfg.metadata_fn(*args, **kwargs) # type: ignore[misc]
|
|
265
|
+
if not isinstance(md, dict):
|
|
266
|
+
raise TypeError("metadata_fn must return a dict")
|
|
267
|
+
log_data.update(md)
|
|
268
|
+
except Exception as meta_exc:
|
|
269
|
+
log_data["metadata_error"] = repr(meta_exc)
|
|
270
|
+
|
|
271
|
+
if exc is not None:
|
|
272
|
+
log_data["exception"] = repr(exc)
|
|
273
|
+
|
|
274
|
+
level, msg = _pick_level_and_message(
|
|
275
|
+
elapsed=elapsed,
|
|
276
|
+
message=cfg.message,
|
|
277
|
+
warn_threshold_secs=cfg.warn_threshold_secs,
|
|
278
|
+
warn_message=cfg.warn_message,
|
|
279
|
+
error_threshold_secs=cfg.error_threshold_secs,
|
|
280
|
+
error_message=cfg.error_message,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
# Allow {graph_id} etc.
|
|
284
|
+
msg = msg.format(**log_data)
|
|
285
|
+
|
|
286
|
+
# Add profiler hint if we hit warn/error threshold and profiling isn't enabled
|
|
287
|
+
if level >= logging.WARNING and not config.FF_PROFILE_IMPORTS:
|
|
288
|
+
msg = (
|
|
289
|
+
f"{msg}\n"
|
|
290
|
+
f" To get detailed profiling of slow operations, set FF_PROFILE_IMPORTS=true"
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
logger.log(level, msg, **log_data)
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def _pick_level_and_message(
|
|
297
|
+
*,
|
|
298
|
+
elapsed: float,
|
|
299
|
+
message: str,
|
|
300
|
+
warn_threshold_secs: float | None,
|
|
301
|
+
warn_message: str | None,
|
|
302
|
+
error_threshold_secs: float | None,
|
|
303
|
+
error_message: str | None,
|
|
304
|
+
) -> tuple[int, str]:
|
|
305
|
+
level = logging.INFO
|
|
306
|
+
msg = message
|
|
307
|
+
|
|
308
|
+
if warn_threshold_secs is not None and elapsed > warn_threshold_secs:
|
|
309
|
+
level = logging.WARNING
|
|
310
|
+
if warn_message is not None:
|
|
311
|
+
msg = warn_message
|
|
312
|
+
|
|
313
|
+
if error_threshold_secs is not None and elapsed > error_threshold_secs:
|
|
314
|
+
level = logging.ERROR
|
|
315
|
+
if error_message is not None:
|
|
316
|
+
msg = error_message
|
|
317
|
+
|
|
318
|
+
return level, msg
|
langgraph_api/utils/__init__.py
CHANGED
|
@@ -7,6 +7,7 @@ from datetime import datetime
|
|
|
7
7
|
from typing import Any, Protocol, TypeAlias, TypeVar, cast
|
|
8
8
|
|
|
9
9
|
import structlog
|
|
10
|
+
from langchain_core.runnables import RunnableConfig
|
|
10
11
|
from langgraph_sdk import Auth
|
|
11
12
|
from starlette.authentication import AuthCredentials, BaseUser
|
|
12
13
|
from starlette.exceptions import HTTPException
|
|
@@ -36,7 +37,7 @@ async def with_user(
|
|
|
36
37
|
if current is None:
|
|
37
38
|
return
|
|
38
39
|
set_auth_ctx(
|
|
39
|
-
cast(BaseUser, current.user), AuthCredentials(scopes=current.permissions)
|
|
40
|
+
cast("BaseUser", current.user), AuthCredentials(scopes=current.permissions)
|
|
40
41
|
)
|
|
41
42
|
|
|
42
43
|
|
|
@@ -60,6 +61,42 @@ def get_auth_ctx() -> Auth.types.BaseAuthContext | None:
|
|
|
60
61
|
return AuthContext.get()
|
|
61
62
|
|
|
62
63
|
|
|
64
|
+
def get_user_id(user: BaseUser | None) -> str | None:
|
|
65
|
+
if user is None:
|
|
66
|
+
return None
|
|
67
|
+
try:
|
|
68
|
+
return user.identity
|
|
69
|
+
except NotImplementedError:
|
|
70
|
+
try:
|
|
71
|
+
return user.display_name
|
|
72
|
+
except NotImplementedError:
|
|
73
|
+
pass
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def merge_auth(
|
|
77
|
+
config: RunnableConfig,
|
|
78
|
+
ctx: Auth.types.BaseAuthContext | None = None,
|
|
79
|
+
) -> RunnableConfig:
|
|
80
|
+
"""Inject auth context into config's configurable dict.
|
|
81
|
+
|
|
82
|
+
If ctx is not provided, attempts to get it from the current context.
|
|
83
|
+
"""
|
|
84
|
+
if ctx is None:
|
|
85
|
+
ctx = get_auth_ctx()
|
|
86
|
+
if ctx is None:
|
|
87
|
+
return config
|
|
88
|
+
|
|
89
|
+
configurable = config.setdefault("configurable", {})
|
|
90
|
+
return config | {
|
|
91
|
+
"configurable": configurable
|
|
92
|
+
| {
|
|
93
|
+
"langgraph_auth_user": cast("BaseUser | None", ctx.user),
|
|
94
|
+
"langgraph_auth_user_id": get_user_id(cast("BaseUser | None", ctx.user)),
|
|
95
|
+
"langgraph_auth_permissions": ctx.permissions,
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
|
|
63
100
|
class AsyncCursorProto(Protocol):
|
|
64
101
|
async def fetchone(self) -> Row: ...
|
|
65
102
|
|
|
@@ -139,9 +176,17 @@ class SchemaGenerator(BaseSchemaGenerator):
|
|
|
139
176
|
for endpoint in endpoints_info:
|
|
140
177
|
try:
|
|
141
178
|
parsed = self.parse_docstring(endpoint.func)
|
|
142
|
-
except
|
|
143
|
-
|
|
144
|
-
|
|
179
|
+
except Exception as exc:
|
|
180
|
+
docstring = getattr(endpoint.func, "__doc__", None) or ""
|
|
181
|
+
logger.warning(
|
|
182
|
+
"Unable to parse docstring from OpenAPI schema for route %s (%s): %s\n\nUsing as description",
|
|
183
|
+
endpoint.path,
|
|
184
|
+
endpoint.func.__qualname__,
|
|
185
|
+
exc,
|
|
186
|
+
exc_info=exc,
|
|
187
|
+
docstring=docstring,
|
|
188
|
+
)
|
|
189
|
+
parsed = {"description": docstring}
|
|
145
190
|
|
|
146
191
|
if endpoint.path not in schema["paths"]:
|
|
147
192
|
schema["paths"][endpoint.path] = {}
|
|
@@ -186,14 +231,14 @@ def validate_select_columns(
|
|
|
186
231
|
|
|
187
232
|
|
|
188
233
|
__all__ = [
|
|
234
|
+
"AsyncConnectionProto",
|
|
189
235
|
"AsyncCursorProto",
|
|
190
236
|
"AsyncPipelineProto",
|
|
191
|
-
"AsyncConnectionProto",
|
|
192
|
-
"fetchone",
|
|
193
|
-
"validate_uuid",
|
|
194
|
-
"next_cron_date",
|
|
195
237
|
"SchemaGenerator",
|
|
238
|
+
"fetchone",
|
|
196
239
|
"get_pagination_headers",
|
|
240
|
+
"next_cron_date",
|
|
197
241
|
"uuid7",
|
|
198
242
|
"validate_select_columns",
|
|
243
|
+
"validate_uuid",
|
|
199
244
|
]
|
langgraph_api/utils/config.py
CHANGED
|
@@ -4,7 +4,6 @@ import asyncio
|
|
|
4
4
|
import functools
|
|
5
5
|
import typing
|
|
6
6
|
from collections import ChainMap
|
|
7
|
-
from concurrent.futures import Executor
|
|
8
7
|
from contextvars import copy_context
|
|
9
8
|
from os import getenv
|
|
10
9
|
from typing import Any, ParamSpec, TypeVar
|
|
@@ -13,6 +12,8 @@ from langgraph.constants import CONF
|
|
|
13
12
|
from typing_extensions import TypedDict
|
|
14
13
|
|
|
15
14
|
if typing.TYPE_CHECKING:
|
|
15
|
+
from concurrent.futures import Executor
|
|
16
|
+
|
|
16
17
|
from langchain_core.runnables import RunnableConfig
|
|
17
18
|
|
|
18
19
|
try:
|
langgraph_api/utils/future.py
CHANGED
|
@@ -2,12 +2,14 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import concurrent.futures
|
|
5
|
-
import contextvars
|
|
6
5
|
import inspect
|
|
7
6
|
import sys
|
|
8
7
|
import types
|
|
9
|
-
from
|
|
10
|
-
|
|
8
|
+
from typing import TYPE_CHECKING, TypeVar, cast
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
import contextvars
|
|
12
|
+
from collections.abc import Awaitable, Coroutine, Generator
|
|
11
13
|
|
|
12
14
|
T = TypeVar("T")
|
|
13
15
|
AnyFuture = asyncio.Future | concurrent.futures.Future
|
|
@@ -45,7 +47,8 @@ def _set_concurrent_future_state(
|
|
|
45
47
|
source: AnyFuture,
|
|
46
48
|
) -> None:
|
|
47
49
|
"""Copy state from a future to a concurrent.futures.Future."""
|
|
48
|
-
|
|
50
|
+
if not source.done():
|
|
51
|
+
raise ValueError("Future is not done")
|
|
49
52
|
if source.cancelled():
|
|
50
53
|
concurrent.cancel()
|
|
51
54
|
if not concurrent.set_running_or_notify_cancel():
|
|
@@ -65,7 +68,8 @@ def _copy_future_state(source: AnyFuture, dest: asyncio.Future) -> None:
|
|
|
65
68
|
"""
|
|
66
69
|
if dest.done():
|
|
67
70
|
return
|
|
68
|
-
|
|
71
|
+
if not source.done():
|
|
72
|
+
raise ValueError("Future is not done")
|
|
69
73
|
if dest.cancelled():
|
|
70
74
|
return
|
|
71
75
|
if source.cancelled():
|
|
@@ -152,7 +156,7 @@ def _ensure_future(
|
|
|
152
156
|
if not asyncio.iscoroutine(coro_or_future):
|
|
153
157
|
if inspect.isawaitable(coro_or_future):
|
|
154
158
|
coro_or_future = cast(
|
|
155
|
-
Coroutine[None, None, T], _wrap_awaitable(coro_or_future)
|
|
159
|
+
"Coroutine[None, None, T]", _wrap_awaitable(coro_or_future)
|
|
156
160
|
)
|
|
157
161
|
called_wrap_awaitable = True
|
|
158
162
|
else:
|
langgraph_api/utils/uuids.py
CHANGED
|
@@ -1,32 +1,35 @@
|
|
|
1
|
-
|
|
2
|
-
import time
|
|
3
|
-
from uuid import UUID, SafeUUID
|
|
1
|
+
"""UUID utility functions.
|
|
4
2
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
3
|
+
This module exports a uuid7 function to generate monotonic, time-ordered UUIDs
|
|
4
|
+
for tracing and similar operations.
|
|
5
|
+
"""
|
|
8
6
|
|
|
7
|
+
from __future__ import annotations
|
|
9
8
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
# 42-bit counter with MSB set to 0
|
|
13
|
-
counter = (rand >> 32) & 0x1FF_FFFF_FFFF
|
|
14
|
-
# 32-bit random data
|
|
15
|
-
tail = rand & 0xFFFF_FFFF
|
|
16
|
-
return counter, tail
|
|
9
|
+
import typing
|
|
10
|
+
from uuid import UUID
|
|
17
11
|
|
|
12
|
+
from uuid_utils.compat import uuid7 as _uuid_utils_uuid7
|
|
18
13
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
object.__setattr__(uid, "int", value)
|
|
22
|
-
object.__setattr__(uid, "is_safe", SafeUUID.unknown)
|
|
23
|
-
return uid
|
|
14
|
+
if typing.TYPE_CHECKING:
|
|
15
|
+
from uuid import UUID
|
|
24
16
|
|
|
17
|
+
_NANOS_PER_SECOND: typing.Final = 1_000_000_000
|
|
25
18
|
|
|
26
|
-
|
|
27
|
-
|
|
19
|
+
|
|
20
|
+
def _to_timestamp_and_nanos(nanoseconds: int) -> tuple[int, int]:
|
|
21
|
+
"""Split a nanosecond timestamp into seconds and remaining nanoseconds."""
|
|
22
|
+
seconds, nanos = divmod(nanoseconds, _NANOS_PER_SECOND)
|
|
23
|
+
return seconds, nanos
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def uuid7(nanoseconds: int | None = None) -> UUID:
|
|
27
|
+
"""Generate a UUID from a Unix timestamp in nanoseconds and random bits.
|
|
28
28
|
|
|
29
29
|
UUIDv7 objects feature monotonicity within a millisecond.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
nanoseconds: Optional ns timestamp. If not provided, uses current time.
|
|
30
33
|
"""
|
|
31
34
|
# --- 48 --- -- 4 -- --- 12 --- -- 2 -- --- 30 --- - 32 -
|
|
32
35
|
# unix_ts_ms | version | counter_hi | variant | counter_lo | random
|
|
@@ -41,47 +44,11 @@ def uuid7():
|
|
|
41
44
|
# advanced and the counter is reset to a random 42-bit integer with MSB
|
|
42
45
|
# set to 0.
|
|
43
46
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
if _last_timestamp_v7 is None or timestamp_ms > _last_timestamp_v7:
|
|
51
|
-
counter, tail = _uuid7_get_counter_and_tail()
|
|
52
|
-
else:
|
|
53
|
-
if timestamp_ms < _last_timestamp_v7:
|
|
54
|
-
timestamp_ms = _last_timestamp_v7 + 1
|
|
55
|
-
# advance the 42-bit counter
|
|
56
|
-
counter = _last_counter_v7 + 1
|
|
57
|
-
if counter > 0x3FF_FFFF_FFFF:
|
|
58
|
-
# advance the 48-bit timestamp
|
|
59
|
-
timestamp_ms += 1
|
|
60
|
-
counter, tail = _uuid7_get_counter_and_tail()
|
|
61
|
-
else:
|
|
62
|
-
# 32-bit random data
|
|
63
|
-
tail = int.from_bytes(os.urandom(4))
|
|
64
|
-
|
|
65
|
-
unix_ts_ms = timestamp_ms & 0xFFFF_FFFF_FFFF
|
|
66
|
-
counter_msbs = counter >> 30
|
|
67
|
-
# keep 12 counter's MSBs and clear variant bits
|
|
68
|
-
counter_hi = counter_msbs & 0x0FFF
|
|
69
|
-
# keep 30 counter's LSBs and clear version bits
|
|
70
|
-
counter_lo = counter & 0x3FFF_FFFF
|
|
71
|
-
# ensure that the tail is always a 32-bit integer (by construction,
|
|
72
|
-
# it is already the case, but future interfaces may allow the user
|
|
73
|
-
# to specify the random tail)
|
|
74
|
-
tail &= 0xFFFF_FFFF
|
|
47
|
+
# For now, just delegate to the uuid_utils implementation
|
|
48
|
+
if nanoseconds is None:
|
|
49
|
+
return _uuid_utils_uuid7()
|
|
50
|
+
seconds, nanos = _to_timestamp_and_nanos(nanoseconds)
|
|
51
|
+
return _uuid_utils_uuid7(timestamp=seconds, nanos=nanos)
|
|
75
52
|
|
|
76
|
-
int_uuid_7 = unix_ts_ms << 80
|
|
77
|
-
int_uuid_7 |= counter_hi << 64
|
|
78
|
-
int_uuid_7 |= counter_lo << 32
|
|
79
|
-
int_uuid_7 |= tail
|
|
80
|
-
# by construction, the variant and version bits are already cleared
|
|
81
|
-
int_uuid_7 |= _RFC_4122_VERSION_7_FLAGS
|
|
82
|
-
res = _from_int(int_uuid_7)
|
|
83
53
|
|
|
84
|
-
|
|
85
|
-
_last_timestamp_v7 = timestamp_ms
|
|
86
|
-
_last_counter_v7 = counter
|
|
87
|
-
return res
|
|
54
|
+
__all__ = ["uuid7"]
|
langgraph_api/validation.py
CHANGED
|
@@ -52,6 +52,9 @@ ThreadCreate = jsonschema_rs.validator_for(
|
|
|
52
52
|
ThreadPatch = jsonschema_rs.validator_for(
|
|
53
53
|
openapi["components"]["schemas"]["ThreadPatch"]
|
|
54
54
|
)
|
|
55
|
+
ThreadPruneRequest = jsonschema_rs.validator_for(
|
|
56
|
+
openapi["components"]["schemas"]["ThreadPruneRequest"]
|
|
57
|
+
)
|
|
55
58
|
ThreadStateUpdate = jsonschema_rs.validator_for(
|
|
56
59
|
{
|
|
57
60
|
**openapi["components"]["schemas"]["ThreadStateUpdate"],
|
|
@@ -124,6 +127,9 @@ RunCreateStateful = jsonschema_rs.validator_for(
|
|
|
124
127
|
)
|
|
125
128
|
RunsCancel = jsonschema_rs.validator_for(openapi["components"]["schemas"]["RunsCancel"])
|
|
126
129
|
CronCreate = jsonschema_rs.validator_for(openapi["components"]["schemas"]["CronCreate"])
|
|
130
|
+
ThreadCronCreate = jsonschema_rs.validator_for(
|
|
131
|
+
openapi["components"]["schemas"]["ThreadCronCreate"]
|
|
132
|
+
)
|
|
127
133
|
CronSearch = jsonschema_rs.validator_for(openapi["components"]["schemas"]["CronSearch"])
|
|
128
134
|
CronCountRequest = jsonschema_rs.validator_for(
|
|
129
135
|
openapi["components"]["schemas"]["CronCountRequest"]
|