langgraph-api 0.2.83__py3-none-any.whl → 0.2.86__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/mcp.py +2 -1
- langgraph_api/api/meta.py +2 -0
- langgraph_api/api/runs.py +7 -1
- langgraph_api/auth/langsmith/backend.py +29 -3
- langgraph_api/config.py +1 -1
- langgraph_api/http.py +3 -3
- langgraph_api/utils/cache.py +58 -0
- langgraph_api/worker.py +3 -17
- {langgraph_api-0.2.83.dist-info → langgraph_api-0.2.86.dist-info}/METADATA +1 -1
- {langgraph_api-0.2.83.dist-info → langgraph_api-0.2.86.dist-info}/RECORD +15 -14
- langgraph_license/validation.py +6 -1
- {langgraph_api-0.2.83.dist-info → langgraph_api-0.2.86.dist-info}/WHEEL +0 -0
- {langgraph_api-0.2.83.dist-info → langgraph_api-0.2.86.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.2.83.dist-info → langgraph_api-0.2.86.dist-info}/licenses/LICENSE +0 -0
langgraph_api/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.2.
|
|
1
|
+
__version__ = "0.2.86"
|
langgraph_api/api/mcp.py
CHANGED
|
@@ -385,11 +385,12 @@ async def handle_tools_list(
|
|
|
385
385
|
seen_names.add(name)
|
|
386
386
|
|
|
387
387
|
schemas = await client.assistants.get_schemas(id_, headers=request.headers)
|
|
388
|
+
description = assistant.get("description") or ""
|
|
388
389
|
tools.append(
|
|
389
390
|
{
|
|
390
391
|
"name": name,
|
|
391
392
|
"inputSchema": schemas.get("input_schema", {}),
|
|
392
|
-
"description":
|
|
393
|
+
"description": description,
|
|
393
394
|
},
|
|
394
395
|
)
|
|
395
396
|
|
langgraph_api/api/meta.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import langgraph.version
|
|
1
2
|
from starlette.responses import JSONResponse, PlainTextResponse
|
|
2
3
|
|
|
3
4
|
from langgraph_api import __version__, config, metadata
|
|
@@ -16,6 +17,7 @@ async def meta_info(request: ApiRequest):
|
|
|
16
17
|
return JSONResponse(
|
|
17
18
|
{
|
|
18
19
|
"version": __version__,
|
|
20
|
+
"langgraph_py_version": langgraph.version.__version__,
|
|
19
21
|
"flags": {
|
|
20
22
|
"assistants": True,
|
|
21
23
|
"crons": plus and config.FF_CRONS_ENABLED,
|
langgraph_api/api/runs.py
CHANGED
|
@@ -172,6 +172,7 @@ async def wait_run(request: ApiRequest):
|
|
|
172
172
|
"""Create a run, wait for the output."""
|
|
173
173
|
thread_id = request.path_params["thread_id"]
|
|
174
174
|
payload = await request.json(RunCreateStateful)
|
|
175
|
+
on_disconnect = payload.get("on_disconnect", "continue")
|
|
175
176
|
run_id = uuid6()
|
|
176
177
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
177
178
|
|
|
@@ -197,7 +198,10 @@ async def wait_run(request: ApiRequest):
|
|
|
197
198
|
vchunk: bytes | None = None
|
|
198
199
|
async with aclosing(
|
|
199
200
|
Runs.Stream.join(
|
|
200
|
-
run["run_id"],
|
|
201
|
+
run["run_id"],
|
|
202
|
+
thread_id=run["thread_id"],
|
|
203
|
+
stream_mode=await sub,
|
|
204
|
+
cancel_on_disconnect=on_disconnect == "cancel",
|
|
201
205
|
)
|
|
202
206
|
) as stream:
|
|
203
207
|
async for mode, chunk, _ in stream:
|
|
@@ -248,6 +252,7 @@ async def wait_run(request: ApiRequest):
|
|
|
248
252
|
async def wait_run_stateless(request: ApiRequest):
|
|
249
253
|
"""Create a stateless run, wait for the output."""
|
|
250
254
|
payload = await request.json(RunCreateStateless)
|
|
255
|
+
on_disconnect = payload.get("on_disconnect", "continue")
|
|
251
256
|
run_id = uuid6()
|
|
252
257
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
253
258
|
|
|
@@ -277,6 +282,7 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
277
282
|
thread_id=run["thread_id"],
|
|
278
283
|
stream_mode=await sub,
|
|
279
284
|
ignore_404=True,
|
|
285
|
+
cancel_on_disconnect=on_disconnect == "cancel",
|
|
280
286
|
)
|
|
281
287
|
) as stream:
|
|
282
288
|
async for mode, chunk, _ in stream:
|
|
@@ -14,6 +14,7 @@ from langgraph_api.config import (
|
|
|
14
14
|
LANGSMITH_AUTH_VERIFY_TENANT_ID,
|
|
15
15
|
LANGSMITH_TENANT_ID,
|
|
16
16
|
)
|
|
17
|
+
from langgraph_api.utils.cache import LRUCache
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
class AuthDict(TypedDict):
|
|
@@ -23,7 +24,22 @@ class AuthDict(TypedDict):
|
|
|
23
24
|
user_email: NotRequired[str]
|
|
24
25
|
|
|
25
26
|
|
|
27
|
+
class AuthCacheEntry(TypedDict):
|
|
28
|
+
credentials: AuthCredentials
|
|
29
|
+
user: StudioUser
|
|
30
|
+
|
|
31
|
+
|
|
26
32
|
class LangsmithAuthBackend(AuthenticationBackend):
|
|
33
|
+
def __init__(self):
|
|
34
|
+
self._cache = LRUCache[AuthCacheEntry](max_size=1000, ttl=60)
|
|
35
|
+
|
|
36
|
+
def _get_cache_key(self, headers):
|
|
37
|
+
"""Generate cache key from authentication headers"""
|
|
38
|
+
relevant_headers = tuple(
|
|
39
|
+
(name, value) for name, value in headers if value is not None
|
|
40
|
+
)
|
|
41
|
+
return str(hash(relevant_headers))
|
|
42
|
+
|
|
27
43
|
async def authenticate(
|
|
28
44
|
self, conn: HTTPConnection
|
|
29
45
|
) -> tuple[AuthCredentials, BaseUser] | None:
|
|
@@ -37,6 +53,12 @@ class LangsmithAuthBackend(AuthenticationBackend):
|
|
|
37
53
|
]
|
|
38
54
|
if not any(h[1] for h in headers):
|
|
39
55
|
raise AuthenticationError("Missing authentication headers")
|
|
56
|
+
|
|
57
|
+
# Check cache first
|
|
58
|
+
cache_key = self._get_cache_key(headers)
|
|
59
|
+
if cached_entry := self._cache.get(cache_key):
|
|
60
|
+
return cached_entry["credentials"], cached_entry["user"]
|
|
61
|
+
|
|
40
62
|
async with auth_client() as auth:
|
|
41
63
|
if not LANGSMITH_AUTH_VERIFY_TENANT_ID and not conn.headers.get(
|
|
42
64
|
"x-api-key"
|
|
@@ -66,6 +88,10 @@ class LangsmithAuthBackend(AuthenticationBackend):
|
|
|
66
88
|
if auth_dict["tenant_id"] != LANGSMITH_TENANT_ID:
|
|
67
89
|
raise AuthenticationError("Invalid tenant ID")
|
|
68
90
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
91
|
+
credentials = AuthCredentials(["authenticated"])
|
|
92
|
+
user = StudioUser(auth_dict.get("user_id"), is_authenticated=True)
|
|
93
|
+
|
|
94
|
+
# Cache the result
|
|
95
|
+
self._cache.set(cache_key, AuthCacheEntry(credentials=credentials, user=user))
|
|
96
|
+
|
|
97
|
+
return credentials, user
|
langgraph_api/config.py
CHANGED
|
@@ -173,7 +173,7 @@ LANGGRAPH_AES_KEY = env("LANGGRAPH_AES_KEY", default=None, cast=_get_encryption_
|
|
|
173
173
|
# redis
|
|
174
174
|
REDIS_URI = env("REDIS_URI", cast=str)
|
|
175
175
|
REDIS_CLUSTER = env("REDIS_CLUSTER", cast=bool, default=False)
|
|
176
|
-
REDIS_MAX_CONNECTIONS = env("REDIS_MAX_CONNECTIONS", cast=int, default=
|
|
176
|
+
REDIS_MAX_CONNECTIONS = env("REDIS_MAX_CONNECTIONS", cast=int, default=2000)
|
|
177
177
|
REDIS_CONNECT_TIMEOUT = env("REDIS_CONNECT_TIMEOUT", cast=float, default=10.0)
|
|
178
178
|
REDIS_MAX_IDLE_TIME = env("REDIS_MAX_IDLE_TIME", cast=float, default=120.0)
|
|
179
179
|
REDIS_KEY_PREFIX = env("REDIS_KEY_PREFIX", cast=str, default="")
|
langgraph_api/http.py
CHANGED
|
@@ -120,9 +120,9 @@ def is_retriable_error(exception: Exception) -> bool:
|
|
|
120
120
|
return True
|
|
121
121
|
# Seems to just apply to HttpStatusError but doesn't hurt to check all
|
|
122
122
|
if isinstance(exception, httpx.HTTPError):
|
|
123
|
-
return (
|
|
124
|
-
|
|
125
|
-
|
|
123
|
+
return getattr(exception, "response", None) is not None and (
|
|
124
|
+
exception.response.status_code >= 500
|
|
125
|
+
or exception.response.status_code == 429
|
|
126
126
|
)
|
|
127
127
|
return False
|
|
128
128
|
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import time
|
|
3
|
+
from collections import OrderedDict
|
|
4
|
+
from typing import Generic, TypeVar
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class LRUCache(Generic[T]):
|
|
10
|
+
"""LRU cache with TTL support."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, max_size: int = 1000, ttl: float = 60):
|
|
13
|
+
self._cache: OrderedDict[str, tuple[T, float]] = OrderedDict()
|
|
14
|
+
self._max_size = max_size if max_size > 0 else 1000
|
|
15
|
+
self._ttl = ttl
|
|
16
|
+
|
|
17
|
+
def _get_time(self) -> float:
|
|
18
|
+
"""Get current time, using loop.time() if available for better performance."""
|
|
19
|
+
try:
|
|
20
|
+
return asyncio.get_event_loop().time()
|
|
21
|
+
except RuntimeError:
|
|
22
|
+
return time.monotonic()
|
|
23
|
+
|
|
24
|
+
def get(self, key: str) -> T | None:
|
|
25
|
+
"""Get item from cache, returning None if expired or not found."""
|
|
26
|
+
if key not in self._cache:
|
|
27
|
+
return None
|
|
28
|
+
|
|
29
|
+
value, timestamp = self._cache[key]
|
|
30
|
+
if self._get_time() - timestamp >= self._ttl:
|
|
31
|
+
# Expired, remove and return None
|
|
32
|
+
del self._cache[key]
|
|
33
|
+
return None
|
|
34
|
+
|
|
35
|
+
# Move to end (most recently used)
|
|
36
|
+
self._cache.move_to_end(key)
|
|
37
|
+
return value
|
|
38
|
+
|
|
39
|
+
def set(self, key: str, value: T) -> None:
|
|
40
|
+
"""Set item in cache, evicting old entries if needed."""
|
|
41
|
+
# Remove if already exists (to update timestamp)
|
|
42
|
+
if key in self._cache:
|
|
43
|
+
del self._cache[key]
|
|
44
|
+
|
|
45
|
+
# Evict oldest entries if needed
|
|
46
|
+
while len(self._cache) >= self._max_size:
|
|
47
|
+
self._cache.popitem(last=False) # Remove oldest (FIFO)
|
|
48
|
+
|
|
49
|
+
# Add new entry
|
|
50
|
+
self._cache[key] = (value, self._get_time())
|
|
51
|
+
|
|
52
|
+
def size(self) -> int:
|
|
53
|
+
"""Return current cache size."""
|
|
54
|
+
return len(self._cache)
|
|
55
|
+
|
|
56
|
+
def clear(self) -> None:
|
|
57
|
+
"""Clear all entries from cache."""
|
|
58
|
+
self._cache.clear()
|
langgraph_api/worker.py
CHANGED
|
@@ -307,23 +307,9 @@ async def worker(
|
|
|
307
307
|
conn, run["thread_id"], run_id, status, checkpoint, exception
|
|
308
308
|
)
|
|
309
309
|
|
|
310
|
-
# delete
|
|
311
|
-
if not isinstance(exception, ALL_RETRIABLE_EXCEPTIONS):
|
|
312
|
-
|
|
313
|
-
await Threads.delete(conn, run["thread_id"])
|
|
314
|
-
else:
|
|
315
|
-
try:
|
|
316
|
-
await Threads.set_status(
|
|
317
|
-
conn, run["thread_id"], checkpoint, exception
|
|
318
|
-
)
|
|
319
|
-
except HTTPException as e:
|
|
320
|
-
if e.status_code == 404:
|
|
321
|
-
await logger.ainfo(
|
|
322
|
-
"Ignoring set_status error for missing thread",
|
|
323
|
-
exc=str(e),
|
|
324
|
-
)
|
|
325
|
-
else:
|
|
326
|
-
raise
|
|
310
|
+
# delete thread if it's temporary and we don't want to retry
|
|
311
|
+
if temporary and not isinstance(exception, ALL_RETRIABLE_EXCEPTIONS):
|
|
312
|
+
await Threads.delete(conn, run["thread_id"])
|
|
327
313
|
|
|
328
314
|
if isinstance(exception, ALL_RETRIABLE_EXCEPTIONS):
|
|
329
315
|
await logger.awarning("RETRYING", exc_info=exception)
|
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
langgraph_api/__init__.py,sha256=
|
|
1
|
+
langgraph_api/__init__.py,sha256=UX0NFs_UycABtPhNbHs9Deehd2GdHX9DySKa6T9arUg,23
|
|
2
2
|
langgraph_api/asgi_transport.py,sha256=eqifhHxNnxvI7jJqrY1_8RjL4Fp9NdN4prEub2FWBt8,5091
|
|
3
3
|
langgraph_api/asyncio.py,sha256=qrYEqPRrqtGq7E7KjcMC-ALyN79HkRnmp9rM2TAw9L8,9404
|
|
4
4
|
langgraph_api/cli.py,sha256=-R0fvxg4KNxTkSe7xvDZruF24UMhStJYjpAYlUx3PBk,16018
|
|
5
5
|
langgraph_api/command.py,sha256=3O9v3i0OPa96ARyJ_oJbLXkfO8rPgDhLCswgO9koTFA,768
|
|
6
|
-
langgraph_api/config.py,sha256
|
|
6
|
+
langgraph_api/config.py,sha256=j4nohZGUGybr36BtuM72K1phOQjUBEx6_2h77BB78Vs,11822
|
|
7
7
|
langgraph_api/cron_scheduler.py,sha256=CiwZ-U4gDOdG9zl9dlr7mH50USUgNB2Fvb8YTKVRBN4,2625
|
|
8
8
|
langgraph_api/errors.py,sha256=zlnl3xXIwVG0oGNKKpXf1an9Rn_SBDHSyhe53hU6aLw,1858
|
|
9
9
|
langgraph_api/graph.py,sha256=pw_3jVZNe0stO5-Y8kLUuC8EJ5tFqdLu9fLpwUz4Hc4,23574
|
|
10
|
-
langgraph_api/http.py,sha256=
|
|
10
|
+
langgraph_api/http.py,sha256=L0leP5fH4NIiFgJd1YPMnTRWqrUUYq_4m5j558UwM5E,5612
|
|
11
11
|
langgraph_api/http_metrics.py,sha256=VgM45yU1FkXuI9CIOE_astxAAu2G-OJ42BRbkcos_CQ,5555
|
|
12
12
|
langgraph_api/logging.py,sha256=LL2LNuMYFrqDhG_KbyKy9AoAPghcdlFj2T50zMyPddk,4182
|
|
13
13
|
langgraph_api/metadata.py,sha256=lfovneEMLA5vTNa61weMkQkiZCtwo-qdwFwqNSj5qVs,6638
|
|
@@ -25,13 +25,13 @@ langgraph_api/thread_ttl.py,sha256=-Ox8NFHqUH3wGNdEKMIfAXUubY5WGifIgCaJ7npqLgw,1
|
|
|
25
25
|
langgraph_api/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
26
|
langgraph_api/validation.py,sha256=zMuKmwUEBjBgFMwAaeLZmatwGVijKv2sOYtYg7gfRtc,4950
|
|
27
27
|
langgraph_api/webhook.py,sha256=VCJp4dI5E1oSJ15XP34cnPiOi8Ya8Q1BnBwVGadOpLI,1636
|
|
28
|
-
langgraph_api/worker.py,sha256=
|
|
28
|
+
langgraph_api/worker.py,sha256=HHgdwq79gBLFLiIwaFap_TmBigIu3Tfno_SwsjdyjGU,13675
|
|
29
29
|
langgraph_api/api/__init__.py,sha256=WHy6oNLWtH1K7AxmmsU9RD-Vm6WP-Ov16xS8Ey9YCmQ,6090
|
|
30
30
|
langgraph_api/api/assistants.py,sha256=w7nXjEknDVHSuP228S8ZLh4bG0nRGnSwVP9pECQOK90,16247
|
|
31
|
-
langgraph_api/api/mcp.py,sha256=
|
|
32
|
-
langgraph_api/api/meta.py,sha256=
|
|
31
|
+
langgraph_api/api/mcp.py,sha256=qe10ZRMN3f-Hli-9TI8nbQyWvMeBb72YB1PZVbyqBQw,14418
|
|
32
|
+
langgraph_api/api/meta.py,sha256=fmc7btbtl5KVlU_vQ3Bj4J861IjlqmjBKNtnxSV-S-Q,4198
|
|
33
33
|
langgraph_api/api/openapi.py,sha256=KToI2glOEsvrhDpwdScdBnL9xoLOqkTxx5zKq2pMuKQ,11957
|
|
34
|
-
langgraph_api/api/runs.py,sha256=
|
|
34
|
+
langgraph_api/api/runs.py,sha256=66x7Nywqr1OoMHHlG03OGuLlrbKYbfvLJepYLg6oXeE,19975
|
|
35
35
|
langgraph_api/api/store.py,sha256=TSeMiuMfrifmEnEbL0aObC2DPeseLlmZvAMaMzPgG3Y,5535
|
|
36
36
|
langgraph_api/api/threads.py,sha256=ogMKmEoiycuaV3fa5kpupDohJ7fwUOfVczt6-WSK4FE,9322
|
|
37
37
|
langgraph_api/api/ui.py,sha256=2nlipYV2nUGR4T9pceaAbgN1lS3-T2zPBh7Nv3j9eZQ,2479
|
|
@@ -41,7 +41,7 @@ langgraph_api/auth/middleware.py,sha256=jDA4t41DUoAArEY_PNoXesIUBJ0nGhh85QzRdn5E
|
|
|
41
41
|
langgraph_api/auth/noop.py,sha256=Bk6Nf3p8D_iMVy_OyfPlyiJp_aEwzL-sHrbxoXpCbac,586
|
|
42
42
|
langgraph_api/auth/studio_user.py,sha256=fojJpexdIZYI1w3awiqOLSwMUiK_M_3p4mlfQI0o-BE,454
|
|
43
43
|
langgraph_api/auth/langsmith/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
44
|
-
langgraph_api/auth/langsmith/backend.py,sha256=
|
|
44
|
+
langgraph_api/auth/langsmith/backend.py,sha256=bmTbbb4UGfO244sDRStxdB78IdQCJuX08Rhs3Bl7iag,3608
|
|
45
45
|
langgraph_api/auth/langsmith/client.py,sha256=eKchvAom7hdkUXauD8vHNceBDDUijrFgdTV8bKd7x4Q,3998
|
|
46
46
|
langgraph_api/js/.gitignore,sha256=l5yI6G_V6F1600I1IjiUKn87f4uYIrBAYU1MOyBBhg4,59
|
|
47
47
|
langgraph_api/js/.prettierrc,sha256=0es3ovvyNIqIw81rPQsdt1zCQcOdBqyR_DMbFE4Ifms,19
|
|
@@ -74,10 +74,11 @@ langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
|
|
|
74
74
|
langgraph_api/models/run.py,sha256=j1s9KRfFXgjKUudB9z7IVJ34Klo85PPeaVFtmWHhEdo,14514
|
|
75
75
|
langgraph_api/tunneling/cloudflare.py,sha256=iKb6tj-VWPlDchHFjuQyep2Dpb-w2NGfJKt-WJG9LH0,3650
|
|
76
76
|
langgraph_api/utils/__init__.py,sha256=92mSti9GfGdMRRWyESKQW5yV-75Z9icGHnIrBYvdypU,3619
|
|
77
|
+
langgraph_api/utils/cache.py,sha256=SrtIWYibbrNeZzLXLUGBFhJPkMVNQnVxR5giiYGHEfI,1810
|
|
77
78
|
langgraph_api/utils/config.py,sha256=gONI0UsoSpuR72D9lSGAmpr-_iSMDFdD4M_tiXXjmNk,3936
|
|
78
79
|
langgraph_api/utils/future.py,sha256=CGhUb_Ht4_CnTuXc2kI8evEn1gnMKYN0ce9ZyUkW5G4,7251
|
|
79
80
|
langgraph_license/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
80
|
-
langgraph_license/validation.py,sha256=
|
|
81
|
+
langgraph_license/validation.py,sha256=CU38RUZ5xhP1S8F_y8TNeV6OmtO-tIGjCXbXTwJjJO4,612
|
|
81
82
|
langgraph_runtime/__init__.py,sha256=O4GgSmu33c-Pr8Xzxj_brcK5vkm70iNTcyxEjICFZxA,1075
|
|
82
83
|
langgraph_runtime/checkpoint.py,sha256=J2ePryEyKJWGgxjs27qEHrjj87uPMX3Rqm3hLvG63uk,119
|
|
83
84
|
langgraph_runtime/database.py,sha256=ANEtfm4psr19FtpVcNs5CFWHw-JhfHvIMnkaORa4QSM,117
|
|
@@ -90,8 +91,8 @@ langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,11
|
|
|
90
91
|
LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
91
92
|
logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
|
|
92
93
|
openapi.json,sha256=p5tn_cNRiFA0HN3L6JfC9Nm16Hgv-BxvAQcJymKhVWI,143296
|
|
93
|
-
langgraph_api-0.2.
|
|
94
|
-
langgraph_api-0.2.
|
|
95
|
-
langgraph_api-0.2.
|
|
96
|
-
langgraph_api-0.2.
|
|
97
|
-
langgraph_api-0.2.
|
|
94
|
+
langgraph_api-0.2.86.dist-info/METADATA,sha256=kYoKk78nUSCT6BKYrjpYPBGF7GFFNODJLvxx6wToGD8,3891
|
|
95
|
+
langgraph_api-0.2.86.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
96
|
+
langgraph_api-0.2.86.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
|
|
97
|
+
langgraph_api-0.2.86.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
98
|
+
langgraph_api-0.2.86.dist-info/RECORD,,
|
langgraph_license/validation.py
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
"""Noop license middleware"""
|
|
2
2
|
|
|
3
|
+
import structlog
|
|
4
|
+
|
|
5
|
+
logger = structlog.stdlib.get_logger(__name__)
|
|
6
|
+
|
|
3
7
|
|
|
4
8
|
async def get_license_status() -> bool:
|
|
5
9
|
"""Always return true"""
|
|
@@ -17,6 +21,7 @@ async def check_license_periodically(_: int = 60):
|
|
|
17
21
|
If the license ever fails, you could decide to log,
|
|
18
22
|
raise an exception, or attempt a graceful shutdown.
|
|
19
23
|
"""
|
|
20
|
-
|
|
24
|
+
await logger.ainfo(
|
|
21
25
|
"This is a noop license middleware. No license check is performed."
|
|
22
26
|
)
|
|
27
|
+
return None
|
|
File without changes
|
|
File without changes
|
|
File without changes
|