langgraph-api 0.4.20__py3-none-any.whl → 0.4.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/a2a.py +395 -24
- langgraph_api/api/assistants.py +4 -4
- langgraph_api/api/runs.py +181 -117
- langgraph_api/cli.py +139 -42
- langgraph_api/command.py +1 -1
- langgraph_api/js/package.json +1 -1
- langgraph_api/js/remote.py +1 -1
- langgraph_api/js/yarn.lock +4 -4
- langgraph_api/metadata.py +37 -0
- langgraph_api/models/run.py +1 -1
- langgraph_api/patch.py +3 -1
- langgraph_api/serde.py +2 -1
- langgraph_api/server.py +4 -2
- langgraph_api/stream.py +46 -13
- {langgraph_api-0.4.20.dist-info → langgraph_api-0.4.22.dist-info}/METADATA +2 -2
- {langgraph_api-0.4.20.dist-info → langgraph_api-0.4.22.dist-info}/RECORD +20 -20
- {langgraph_api-0.4.20.dist-info → langgraph_api-0.4.22.dist-info}/WHEEL +0 -0
- {langgraph_api-0.4.20.dist-info → langgraph_api-0.4.22.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.4.20.dist-info → langgraph_api-0.4.22.dist-info}/licenses/LICENSE +0 -0
langgraph_api/api/runs.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from collections.abc import AsyncIterator
|
|
2
|
+
from collections.abc import AsyncIterator, Awaitable, Callable
|
|
3
3
|
from typing import Literal, cast
|
|
4
|
-
from uuid import uuid4
|
|
4
|
+
from uuid import UUID, uuid4
|
|
5
5
|
|
|
6
6
|
import orjson
|
|
7
7
|
import structlog
|
|
@@ -32,12 +32,116 @@ from langgraph_api.validation import (
|
|
|
32
32
|
)
|
|
33
33
|
from langgraph_license.validation import plus_features_enabled
|
|
34
34
|
from langgraph_runtime.database import connect
|
|
35
|
-
from langgraph_runtime.ops import Crons, Runs, Threads
|
|
35
|
+
from langgraph_runtime.ops import Crons, Runs, StreamHandler, Threads
|
|
36
36
|
from langgraph_runtime.retry import retry_db
|
|
37
37
|
|
|
38
38
|
logger = structlog.stdlib.get_logger(__name__)
|
|
39
39
|
|
|
40
40
|
|
|
41
|
+
_RunResultFallback = Callable[[], Awaitable[bytes]]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _thread_values_fallback(thread_id: UUID) -> _RunResultFallback:
|
|
45
|
+
async def fetch_thread_values() -> bytes:
|
|
46
|
+
async with connect() as conn:
|
|
47
|
+
thread_iter = await Threads.get(conn, thread_id)
|
|
48
|
+
try:
|
|
49
|
+
thread = await anext(thread_iter)
|
|
50
|
+
if thread["status"] == "error":
|
|
51
|
+
return orjson.dumps({"__error__": orjson.Fragment(thread["error"])})
|
|
52
|
+
if thread["status"] == "interrupted":
|
|
53
|
+
# Get an interrupt for the thread. There is the case where there are multiple interrupts for the same run and we may not show the same
|
|
54
|
+
# interrupt, but we'll always show one. Long term we should show all of them.
|
|
55
|
+
try:
|
|
56
|
+
if isinstance(thread["interrupts"], dict):
|
|
57
|
+
# Handle in memory format
|
|
58
|
+
interrupt_map = thread["interrupts"]
|
|
59
|
+
else:
|
|
60
|
+
interrupt_map = orjson.loads(thread["interrupts"].buf)
|
|
61
|
+
interrupt = [next(iter(interrupt_map.values()))[0]]
|
|
62
|
+
return orjson.dumps({"__interrupt__": interrupt})
|
|
63
|
+
except Exception:
|
|
64
|
+
# No interrupt, but status is interrupted from a before/after block. Default back to values.
|
|
65
|
+
pass
|
|
66
|
+
return cast(bytes, thread["values"])
|
|
67
|
+
except StopAsyncIteration:
|
|
68
|
+
await logger.awarning(
|
|
69
|
+
f"No checkpoint found for thread {thread_id}",
|
|
70
|
+
thread_id=thread_id,
|
|
71
|
+
)
|
|
72
|
+
return b"{}"
|
|
73
|
+
|
|
74
|
+
return fetch_thread_values
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _run_result_body(
|
|
78
|
+
*,
|
|
79
|
+
run_id: UUID,
|
|
80
|
+
thread_id: UUID,
|
|
81
|
+
sub: StreamHandler,
|
|
82
|
+
cancel_on_disconnect: bool = False,
|
|
83
|
+
ignore_404: bool = False,
|
|
84
|
+
fallback: _RunResultFallback | None = None,
|
|
85
|
+
cancel_message: str | None = None,
|
|
86
|
+
) -> Callable[[], AsyncIterator[bytes]]:
|
|
87
|
+
last_chunk = ValueEvent()
|
|
88
|
+
|
|
89
|
+
async def consume() -> None:
|
|
90
|
+
vchunk: bytes | None = None
|
|
91
|
+
try:
|
|
92
|
+
async for mode, chunk, _ in Runs.Stream.join(
|
|
93
|
+
run_id,
|
|
94
|
+
stream_channel=sub,
|
|
95
|
+
cancel_on_disconnect=cancel_on_disconnect,
|
|
96
|
+
thread_id=thread_id,
|
|
97
|
+
ignore_404=ignore_404,
|
|
98
|
+
):
|
|
99
|
+
if (
|
|
100
|
+
mode == b"values"
|
|
101
|
+
or mode == b"updates"
|
|
102
|
+
and b"__interrupt__" in chunk
|
|
103
|
+
):
|
|
104
|
+
vchunk = chunk
|
|
105
|
+
elif mode == b"error":
|
|
106
|
+
vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
|
|
107
|
+
if vchunk is not None:
|
|
108
|
+
last_chunk.set(vchunk)
|
|
109
|
+
elif fallback is not None:
|
|
110
|
+
last_chunk.set(await fallback())
|
|
111
|
+
else:
|
|
112
|
+
last_chunk.set(b"{}")
|
|
113
|
+
finally:
|
|
114
|
+
# Make sure to always clean up the pubsub
|
|
115
|
+
await sub.__aexit__(None, None, None)
|
|
116
|
+
|
|
117
|
+
# keep the connection open by sending whitespace every 5 seconds
|
|
118
|
+
# leading whitespace will be ignored by json parsers
|
|
119
|
+
async def body() -> AsyncIterator[bytes]:
|
|
120
|
+
try:
|
|
121
|
+
stream = asyncio.create_task(consume())
|
|
122
|
+
while True:
|
|
123
|
+
try:
|
|
124
|
+
if stream.done():
|
|
125
|
+
# raise stream exception if any
|
|
126
|
+
stream.result()
|
|
127
|
+
yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
|
|
128
|
+
break
|
|
129
|
+
except TimeoutError:
|
|
130
|
+
yield b"\n"
|
|
131
|
+
except asyncio.CancelledError:
|
|
132
|
+
if cancel_message is not None:
|
|
133
|
+
stream.cancel(cancel_message)
|
|
134
|
+
else:
|
|
135
|
+
stream.cancel()
|
|
136
|
+
await stream
|
|
137
|
+
raise
|
|
138
|
+
finally:
|
|
139
|
+
# Make sure to always clean up the pubsub
|
|
140
|
+
await sub.__aexit__(None, None, None)
|
|
141
|
+
|
|
142
|
+
return body
|
|
143
|
+
|
|
144
|
+
|
|
41
145
|
@retry_db
|
|
42
146
|
async def create_run(request: ApiRequest):
|
|
43
147
|
"""Create a run."""
|
|
@@ -218,56 +322,13 @@ async def wait_run(request: ApiRequest):
|
|
|
218
322
|
await sub.__aexit__(None, None, None)
|
|
219
323
|
raise
|
|
220
324
|
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
stream_channel=sub,
|
|
229
|
-
cancel_on_disconnect=on_disconnect == "cancel",
|
|
230
|
-
):
|
|
231
|
-
if mode == b"values" or mode == b"updates" and b"__interrupt__" in chunk:
|
|
232
|
-
vchunk = chunk
|
|
233
|
-
elif mode == b"error":
|
|
234
|
-
vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
|
|
235
|
-
if vchunk is not None:
|
|
236
|
-
last_chunk.set(vchunk)
|
|
237
|
-
else:
|
|
238
|
-
async with connect() as conn:
|
|
239
|
-
thread_iter = await Threads.get(conn, thread_id)
|
|
240
|
-
try:
|
|
241
|
-
thread = await anext(thread_iter)
|
|
242
|
-
last_chunk.set(thread["values"])
|
|
243
|
-
except StopAsyncIteration:
|
|
244
|
-
await logger.awarning(
|
|
245
|
-
f"No checkpoint found for thread {thread_id}",
|
|
246
|
-
thread_id=thread_id,
|
|
247
|
-
)
|
|
248
|
-
last_chunk.set(b"{}")
|
|
249
|
-
|
|
250
|
-
# keep the connection open by sending whitespace every 5 seconds
|
|
251
|
-
# leading whitespace will be ignored by json parsers
|
|
252
|
-
async def body() -> AsyncIterator[bytes]:
|
|
253
|
-
try:
|
|
254
|
-
stream = asyncio.create_task(consume())
|
|
255
|
-
while True:
|
|
256
|
-
try:
|
|
257
|
-
if stream.done():
|
|
258
|
-
# raise stream exception if any
|
|
259
|
-
stream.result()
|
|
260
|
-
yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
|
|
261
|
-
break
|
|
262
|
-
except TimeoutError:
|
|
263
|
-
yield b"\n"
|
|
264
|
-
except asyncio.CancelledError:
|
|
265
|
-
stream.cancel()
|
|
266
|
-
await stream
|
|
267
|
-
raise
|
|
268
|
-
finally:
|
|
269
|
-
# Make sure to always clean up the pubsub
|
|
270
|
-
await sub.__aexit__(None, None, None)
|
|
325
|
+
body = _run_result_body(
|
|
326
|
+
run_id=run["run_id"],
|
|
327
|
+
thread_id=run["thread_id"],
|
|
328
|
+
sub=sub,
|
|
329
|
+
cancel_on_disconnect=on_disconnect == "cancel",
|
|
330
|
+
fallback=_thread_values_fallback(thread_id),
|
|
331
|
+
)
|
|
271
332
|
|
|
272
333
|
return StreamingResponse(
|
|
273
334
|
body(),
|
|
@@ -305,53 +366,23 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
305
366
|
await sub.__aexit__(None, None, None)
|
|
306
367
|
raise
|
|
307
368
|
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
async for mode, chunk, _ in Runs.Stream.join(
|
|
313
|
-
run["run_id"],
|
|
369
|
+
async def stateless_fallback() -> bytes:
|
|
370
|
+
await logger.awarning(
|
|
371
|
+
"No checkpoint emitted for stateless run",
|
|
372
|
+
run_id=run["run_id"],
|
|
314
373
|
thread_id=run["thread_id"],
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
await logger.awarning(
|
|
328
|
-
"No checkpoint emitted for stateless run",
|
|
329
|
-
run_id=run["run_id"],
|
|
330
|
-
thread_id=run["thread_id"],
|
|
331
|
-
)
|
|
332
|
-
last_chunk.set(b"{}")
|
|
333
|
-
|
|
334
|
-
# keep the connection open by sending whitespace every 5 seconds
|
|
335
|
-
# leading whitespace will be ignored by json parsers
|
|
336
|
-
async def body() -> AsyncIterator[bytes]:
|
|
337
|
-
try:
|
|
338
|
-
stream = asyncio.create_task(consume())
|
|
339
|
-
while True:
|
|
340
|
-
try:
|
|
341
|
-
if stream.done():
|
|
342
|
-
# raise stream exception if any
|
|
343
|
-
stream.result()
|
|
344
|
-
yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
|
|
345
|
-
break
|
|
346
|
-
except TimeoutError:
|
|
347
|
-
yield b"\n"
|
|
348
|
-
except asyncio.CancelledError:
|
|
349
|
-
stream.cancel("Run stream cancelled")
|
|
350
|
-
await stream
|
|
351
|
-
raise
|
|
352
|
-
finally:
|
|
353
|
-
# Make sure to always clean up the pubsub
|
|
354
|
-
await sub.__aexit__(None, None, None)
|
|
374
|
+
)
|
|
375
|
+
return b"{}"
|
|
376
|
+
|
|
377
|
+
body = _run_result_body(
|
|
378
|
+
run_id=run["run_id"],
|
|
379
|
+
thread_id=run["thread_id"],
|
|
380
|
+
sub=sub,
|
|
381
|
+
cancel_on_disconnect=on_disconnect == "cancel",
|
|
382
|
+
ignore_404=True,
|
|
383
|
+
fallback=stateless_fallback,
|
|
384
|
+
cancel_message="Run stream cancelled",
|
|
385
|
+
)
|
|
355
386
|
|
|
356
387
|
return StreamingResponse(
|
|
357
388
|
body(),
|
|
@@ -422,11 +453,23 @@ async def join_run(request: ApiRequest):
|
|
|
422
453
|
validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
|
|
423
454
|
validate_uuid(run_id, "Invalid run ID: must be a UUID")
|
|
424
455
|
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
456
|
+
# A touch redundant, but to meet the existing signature of join, we need to throw any 404s before we enter the streaming body
|
|
457
|
+
await Runs.Stream.check_run_stream_auth(run_id, thread_id)
|
|
458
|
+
sub = await Runs.Stream.subscribe(run_id, thread_id)
|
|
459
|
+
body = _run_result_body(
|
|
460
|
+
run_id=run_id,
|
|
461
|
+
thread_id=thread_id,
|
|
462
|
+
sub=sub,
|
|
463
|
+
fallback=_thread_values_fallback(thread_id),
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
return StreamingResponse(
|
|
467
|
+
body(),
|
|
468
|
+
media_type="application/json",
|
|
469
|
+
headers={
|
|
470
|
+
"Location": f"/threads/{thread_id}/runs/{run_id}/join",
|
|
471
|
+
"Content-Location": f"/threads/{thread_id}/runs/{run_id}",
|
|
472
|
+
},
|
|
430
473
|
)
|
|
431
474
|
|
|
432
475
|
|
|
@@ -456,6 +499,10 @@ async def join_run_stream(request: ApiRequest):
|
|
|
456
499
|
|
|
457
500
|
return EventSourceResponse(
|
|
458
501
|
body(),
|
|
502
|
+
headers={
|
|
503
|
+
"Location": f"/threads/{thread_id}/runs/{run_id}/stream",
|
|
504
|
+
"Content-Location": f"/threads/{thread_id}/runs/{run_id}",
|
|
505
|
+
},
|
|
459
506
|
)
|
|
460
507
|
|
|
461
508
|
|
|
@@ -476,19 +523,36 @@ async def cancel_run(
|
|
|
476
523
|
action_str if action_str in {"interrupt", "rollback"} else "interrupt",
|
|
477
524
|
)
|
|
478
525
|
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
526
|
+
sub = await Runs.Stream.subscribe(run_id, thread_id) if wait else None
|
|
527
|
+
try:
|
|
528
|
+
async with connect() as conn:
|
|
529
|
+
await Runs.cancel(
|
|
530
|
+
conn,
|
|
531
|
+
[run_id],
|
|
532
|
+
action=action,
|
|
533
|
+
thread_id=thread_id,
|
|
534
|
+
)
|
|
535
|
+
except Exception:
|
|
536
|
+
if sub is not None:
|
|
537
|
+
await sub.__aexit__(None, None, None)
|
|
538
|
+
raise
|
|
539
|
+
if not wait:
|
|
540
|
+
return Response(status_code=202)
|
|
541
|
+
|
|
542
|
+
body = _run_result_body(
|
|
543
|
+
run_id=run_id,
|
|
544
|
+
thread_id=thread_id,
|
|
545
|
+
sub=sub,
|
|
546
|
+
)
|
|
547
|
+
|
|
548
|
+
return StreamingResponse(
|
|
549
|
+
body(),
|
|
550
|
+
media_type="application/json",
|
|
551
|
+
headers={
|
|
552
|
+
"Location": f"/threads/{thread_id}/runs/{run_id}/join",
|
|
553
|
+
"Content-Location": f"/threads/{thread_id}/runs/{run_id}",
|
|
554
|
+
},
|
|
555
|
+
)
|
|
492
556
|
|
|
493
557
|
|
|
494
558
|
@retry_db
|
langgraph_api/cli.py
CHANGED
|
@@ -11,12 +11,17 @@ from typing import Literal
|
|
|
11
11
|
from typing_extensions import TypedDict
|
|
12
12
|
|
|
13
13
|
if typing.TYPE_CHECKING:
|
|
14
|
+
from packaging.version import Version
|
|
15
|
+
|
|
14
16
|
from langgraph_api.config import HttpConfig, StoreConfig
|
|
15
17
|
|
|
16
18
|
logging.basicConfig(level=logging.INFO)
|
|
17
19
|
logger = logging.getLogger(__name__)
|
|
18
20
|
|
|
19
21
|
|
|
22
|
+
SUPPORT_STATUS = Literal["active", "critical", "eol"]
|
|
23
|
+
|
|
24
|
+
|
|
20
25
|
def _get_ls_origin() -> str | None:
|
|
21
26
|
from langsmith.client import Client
|
|
22
27
|
from langsmith.utils import tracing_is_enabled
|
|
@@ -121,46 +126,6 @@ class AuthConfig(TypedDict, total=False):
|
|
|
121
126
|
cache: CacheConfig | None
|
|
122
127
|
|
|
123
128
|
|
|
124
|
-
def _check_newer_version(pkg: str, timeout: float = 0.2) -> None:
|
|
125
|
-
"""Log a notice if PyPI reports a newer version."""
|
|
126
|
-
import importlib.metadata as md
|
|
127
|
-
import json
|
|
128
|
-
import urllib.request
|
|
129
|
-
|
|
130
|
-
from packaging.version import Version
|
|
131
|
-
|
|
132
|
-
thread_logger = logging.getLogger("check_version")
|
|
133
|
-
if not thread_logger.handlers:
|
|
134
|
-
handler = logging.StreamHandler()
|
|
135
|
-
handler.setFormatter(logging.Formatter("%(message)s"))
|
|
136
|
-
thread_logger.addHandler(handler)
|
|
137
|
-
|
|
138
|
-
try:
|
|
139
|
-
current = Version(md.version(pkg))
|
|
140
|
-
with urllib.request.urlopen(
|
|
141
|
-
f"https://pypi.org/pypi/{pkg}/json", timeout=timeout
|
|
142
|
-
) as resp:
|
|
143
|
-
latest_str = json.load(resp)["info"]["version"]
|
|
144
|
-
latest = Version(latest_str)
|
|
145
|
-
if latest > current:
|
|
146
|
-
thread_logger.info(
|
|
147
|
-
"🔔 A newer version of %s is available: %s → %s (pip install -U %s)",
|
|
148
|
-
pkg,
|
|
149
|
-
current,
|
|
150
|
-
latest,
|
|
151
|
-
pkg,
|
|
152
|
-
)
|
|
153
|
-
|
|
154
|
-
except Exception:
|
|
155
|
-
pass
|
|
156
|
-
|
|
157
|
-
except RuntimeError:
|
|
158
|
-
thread_logger.info(
|
|
159
|
-
f"Failed to check for newer version of {pkg}."
|
|
160
|
-
" To disable version checks, set LANGGRAPH_NO_VERSION_CHECK=true"
|
|
161
|
-
)
|
|
162
|
-
|
|
163
|
-
|
|
164
129
|
def run_server(
|
|
165
130
|
host: str = "127.0.0.1",
|
|
166
131
|
port: int = 2024,
|
|
@@ -291,7 +256,7 @@ def run_server(
|
|
|
291
256
|
if k in to_patch:
|
|
292
257
|
logger.debug(f"Skipping loaded env var {k}={v}")
|
|
293
258
|
continue
|
|
294
|
-
to_patch[k] = v
|
|
259
|
+
to_patch[k] = v # type: ignore[invalid-assignment]
|
|
295
260
|
with patch_environment(
|
|
296
261
|
**to_patch,
|
|
297
262
|
):
|
|
@@ -362,8 +327,12 @@ For production use, please use LangGraph Platform.
|
|
|
362
327
|
threading.Thread(target=_open_browser, daemon=True).start()
|
|
363
328
|
nvc = os.getenv("LANGGRAPH_NO_VERSION_CHECK")
|
|
364
329
|
if nvc is None or nvc.lower() not in ("true", "1"):
|
|
330
|
+
from langgraph_api import __version__
|
|
331
|
+
|
|
365
332
|
threading.Thread(
|
|
366
|
-
target=_check_newer_version,
|
|
333
|
+
target=_check_newer_version,
|
|
334
|
+
args=("langgraph-api", __version__),
|
|
335
|
+
daemon=True,
|
|
367
336
|
).start()
|
|
368
337
|
supported_kwargs = {
|
|
369
338
|
k: v
|
|
@@ -471,5 +440,133 @@ def main():
|
|
|
471
440
|
)
|
|
472
441
|
|
|
473
442
|
|
|
443
|
+
def _check_newer_version(pkg: str, current_version: str, timeout: float = 0.5) -> None:
|
|
444
|
+
"""Check PyPI for newer versions and log support status.
|
|
445
|
+
|
|
446
|
+
Critical = one minor behind on same major, OR latest minor of previous major while latest is X.0.*
|
|
447
|
+
EOL = two+ minors behind on same major, OR any previous major after X.1.*
|
|
448
|
+
"""
|
|
449
|
+
import json
|
|
450
|
+
import urllib.request
|
|
451
|
+
|
|
452
|
+
from packaging.version import InvalidVersion, Version
|
|
453
|
+
|
|
454
|
+
log = logging.getLogger("version_check")
|
|
455
|
+
if not log.handlers:
|
|
456
|
+
h = logging.StreamHandler()
|
|
457
|
+
h.setFormatter(logging.Formatter("%(message)s"))
|
|
458
|
+
log.addHandler(h)
|
|
459
|
+
|
|
460
|
+
if os.getenv("LANGGRAPH_NO_VERSION_CHECK", "").lower() in ("true", "1"):
|
|
461
|
+
return
|
|
462
|
+
|
|
463
|
+
def _parse(v: str) -> Version | None:
|
|
464
|
+
try:
|
|
465
|
+
return Version(v)
|
|
466
|
+
except InvalidVersion:
|
|
467
|
+
return None
|
|
468
|
+
|
|
469
|
+
try:
|
|
470
|
+
current = Version(current_version)
|
|
471
|
+
except InvalidVersion:
|
|
472
|
+
log.info(
|
|
473
|
+
f"[version] Could not parse installed version {current_version!r}. Skipping support check."
|
|
474
|
+
)
|
|
475
|
+
return
|
|
476
|
+
|
|
477
|
+
try:
|
|
478
|
+
with urllib.request.urlopen(
|
|
479
|
+
f"https://pypi.org/pypi/{pkg}/json", timeout=timeout
|
|
480
|
+
) as resp:
|
|
481
|
+
payload = json.load(resp)
|
|
482
|
+
latest_str = payload["info"]["version"]
|
|
483
|
+
latest = Version(latest_str)
|
|
484
|
+
releases: dict[str, list[dict]] = payload.get("releases", {})
|
|
485
|
+
except Exception:
|
|
486
|
+
log.debug("Failed to retrieve latest version info for %s", pkg)
|
|
487
|
+
return
|
|
488
|
+
prev_major_latest_minor: Version | None = None
|
|
489
|
+
if latest.major > 0:
|
|
490
|
+
pm = latest.major - 1
|
|
491
|
+
prev_major_versions = [
|
|
492
|
+
v
|
|
493
|
+
for s in releases
|
|
494
|
+
if (v := _parse(s)) is not None and not v.is_prerelease and v.major == pm
|
|
495
|
+
]
|
|
496
|
+
if prev_major_versions:
|
|
497
|
+
prev_major_latest_minor = max(
|
|
498
|
+
prev_major_versions, key=lambda v: (v.major, v.minor, v.micro)
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
if latest > current and not current.is_prerelease:
|
|
502
|
+
log.info(
|
|
503
|
+
"[version] A newer version of %s is available: %s → %s (pip install -U %s)",
|
|
504
|
+
pkg,
|
|
505
|
+
current,
|
|
506
|
+
latest,
|
|
507
|
+
pkg,
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
level = _support_level(current, latest, prev_major_latest_minor)
|
|
511
|
+
changelog = (
|
|
512
|
+
"https://docs.langchain.com/langgraph-platform/langgraph-server-changelog"
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
if level == "critical":
|
|
516
|
+
# Distinguish same-major vs cross-major grace in the wording
|
|
517
|
+
if current.major == latest.major and current.minor == latest.minor - 1:
|
|
518
|
+
tail = "You are one minor version behind the latest (%d.%d.x).\n"
|
|
519
|
+
else:
|
|
520
|
+
tail = "You are on the latest minor of the previous major while a new major (%d.%d.x) just released.\n"
|
|
521
|
+
log.info(
|
|
522
|
+
"⚠️ [support] %s %s is in Critical support.\n"
|
|
523
|
+
"Only critical security and installation fixes are provided.\n"
|
|
524
|
+
+ tail
|
|
525
|
+
+ "Please plan an upgrade soon. See changelog: %s",
|
|
526
|
+
pkg,
|
|
527
|
+
current,
|
|
528
|
+
latest.major,
|
|
529
|
+
latest.minor,
|
|
530
|
+
changelog,
|
|
531
|
+
)
|
|
532
|
+
elif level == "eol":
|
|
533
|
+
log.info(
|
|
534
|
+
"⚠️ [support] %s %s is End of Life.\n"
|
|
535
|
+
"No bug fixes or security updates will be provided.\n"
|
|
536
|
+
"You are two or more minor versions behind the latest (%d.%d.x).\n"
|
|
537
|
+
"You should upgrade immediately. See changelog: %s",
|
|
538
|
+
pkg,
|
|
539
|
+
current,
|
|
540
|
+
latest.major,
|
|
541
|
+
latest.minor,
|
|
542
|
+
changelog,
|
|
543
|
+
)
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
def _support_level(
|
|
547
|
+
cur: "Version", lat: "Version", prev_major_latest_minor: "Version | None"
|
|
548
|
+
) -> SUPPORT_STATUS:
|
|
549
|
+
if cur.major > lat.major:
|
|
550
|
+
return "active"
|
|
551
|
+
if cur.major == lat.major:
|
|
552
|
+
if cur.minor == lat.minor:
|
|
553
|
+
return "active"
|
|
554
|
+
if cur.minor == lat.minor - 1:
|
|
555
|
+
return "critical"
|
|
556
|
+
if cur.minor <= lat.minor - 2:
|
|
557
|
+
return "eol"
|
|
558
|
+
return "active"
|
|
559
|
+
|
|
560
|
+
if cur.major == lat.major - 1 and lat.minor == 0:
|
|
561
|
+
if (
|
|
562
|
+
prev_major_latest_minor is not None
|
|
563
|
+
and cur.minor == prev_major_latest_minor.minor
|
|
564
|
+
):
|
|
565
|
+
return "critical"
|
|
566
|
+
return "eol"
|
|
567
|
+
|
|
568
|
+
return "eol"
|
|
569
|
+
|
|
570
|
+
|
|
474
571
|
if __name__ == "__main__":
|
|
475
572
|
main()
|
langgraph_api/command.py
CHANGED
|
@@ -21,7 +21,7 @@ def map_cmd(cmd: RunCommand) -> Command:
|
|
|
21
21
|
update=update,
|
|
22
22
|
goto=(
|
|
23
23
|
[
|
|
24
|
-
it if isinstance(it, str) else Send(it["node"], it["input"])
|
|
24
|
+
it if isinstance(it, str) else Send(it["node"], it["input"]) # type: ignore[non-subscriptable]
|
|
25
25
|
for it in goto
|
|
26
26
|
]
|
|
27
27
|
if goto
|
langgraph_api/js/package.json
CHANGED
langgraph_api/js/remote.py
CHANGED
|
@@ -239,7 +239,7 @@ class RemotePregel(BaseRemotePregel):
|
|
|
239
239
|
tuple(task["path"]) if task.get("path") else tuple(),
|
|
240
240
|
# TODO: figure out how to properly deserialise errors
|
|
241
241
|
task.get("error"),
|
|
242
|
-
tuple(interrupts),
|
|
242
|
+
tuple(interrupts), # type: ignore[arg-type]
|
|
243
243
|
state,
|
|
244
244
|
task.get("result"),
|
|
245
245
|
)
|
langgraph_api/js/yarn.lock
CHANGED
|
@@ -974,10 +974,10 @@ has-flag@^4.0.0:
|
|
|
974
974
|
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
|
|
975
975
|
integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
|
|
976
976
|
|
|
977
|
-
hono@^4.5.4:
|
|
978
|
-
version "4.
|
|
979
|
-
resolved "https://registry.yarnpkg.com/hono/-/hono-4.
|
|
980
|
-
integrity sha512-
|
|
977
|
+
hono@^4.5.4, hono@^4.9.7:
|
|
978
|
+
version "4.9.7"
|
|
979
|
+
resolved "https://registry.yarnpkg.com/hono/-/hono-4.9.7.tgz#8ac244477397de71d7d3d393fad129209b5b631e"
|
|
980
|
+
integrity sha512-t4Te6ERzIaC48W3x4hJmBwgNlLhmiEdEE5ViYb02ffw4ignHNHa5IBtPjmbKstmtKa8X6C35iWwK4HaqvrzG9w==
|
|
981
981
|
|
|
982
982
|
icss-utils@^5.0.0, icss-utils@^5.1.0:
|
|
983
983
|
version "5.1.0"
|
langgraph_api/metadata.py
CHANGED
|
@@ -7,6 +7,8 @@ import langgraph.version
|
|
|
7
7
|
import orjson
|
|
8
8
|
import structlog
|
|
9
9
|
|
|
10
|
+
import langgraph_api.config as config
|
|
11
|
+
from langgraph_api.auth.custom import get_auth_instance
|
|
10
12
|
from langgraph_api.config import (
|
|
11
13
|
LANGGRAPH_CLOUD_LICENSE_KEY,
|
|
12
14
|
LANGSMITH_API_KEY,
|
|
@@ -17,7 +19,9 @@ from langgraph_api.config import (
|
|
|
17
19
|
USES_STORE_TTL,
|
|
18
20
|
USES_THREAD_TTL,
|
|
19
21
|
)
|
|
22
|
+
from langgraph_api.graph import GRAPHS, is_js_graph
|
|
20
23
|
from langgraph_api.http import http_request
|
|
24
|
+
from langgraph_api.js.base import is_js_path
|
|
21
25
|
from langgraph_license.validation import plus_features_enabled
|
|
22
26
|
|
|
23
27
|
logger = structlog.stdlib.get_logger(__name__)
|
|
@@ -72,6 +76,36 @@ if LANGSMITH_AUTH_ENDPOINT:
|
|
|
72
76
|
)
|
|
73
77
|
|
|
74
78
|
|
|
79
|
+
def _lang_usage_metadata() -> tuple[dict[str, str], dict[str, int]]:
|
|
80
|
+
js_graph_count = sum(1 for graph_id in GRAPHS if is_js_graph(graph_id))
|
|
81
|
+
py_graph_count = len(GRAPHS) - js_graph_count
|
|
82
|
+
|
|
83
|
+
auth_instance = get_auth_instance()
|
|
84
|
+
custom_auth_enabled = auth_instance is not None
|
|
85
|
+
custom_js_auth_enabled = auth_instance == "js"
|
|
86
|
+
|
|
87
|
+
js_proxy_middleware_enabled = False
|
|
88
|
+
if (
|
|
89
|
+
config.HTTP_CONFIG
|
|
90
|
+
and (app := config.HTTP_CONFIG.get("app"))
|
|
91
|
+
and isinstance(app, str)
|
|
92
|
+
):
|
|
93
|
+
app_path = app.split(":", 1)[0] # type: ignore[possibly-unresolved-reference]
|
|
94
|
+
js_proxy_middleware_enabled = is_js_path(app_path)
|
|
95
|
+
|
|
96
|
+
tags = {
|
|
97
|
+
"langgraph.platform.uses_custom_auth": str(custom_auth_enabled),
|
|
98
|
+
"langgraph.platform.uses_js_custom_auth": str(custom_js_auth_enabled),
|
|
99
|
+
"langgraph.platform.uses_js_proxy_middleware": str(js_proxy_middleware_enabled),
|
|
100
|
+
}
|
|
101
|
+
measures = {
|
|
102
|
+
"langgraph.platform.py_graphs": py_graph_count,
|
|
103
|
+
"langgraph.platform.js_graphs": js_graph_count,
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
return tags, measures
|
|
107
|
+
|
|
108
|
+
|
|
75
109
|
def incr_runs(*, incr: int = 1) -> None:
|
|
76
110
|
global RUN_COUNTER
|
|
77
111
|
RUN_COUNTER += incr
|
|
@@ -111,6 +145,7 @@ async def metadata_loop() -> None:
|
|
|
111
145
|
RUN_COUNTER = 0
|
|
112
146
|
NODE_COUNTER = 0
|
|
113
147
|
FROM_TIMESTAMP = to_timestamp
|
|
148
|
+
usage_tags, usage_measures = _lang_usage_metadata()
|
|
114
149
|
|
|
115
150
|
base_payload = {
|
|
116
151
|
"from_timestamp": from_timestamp,
|
|
@@ -131,10 +166,12 @@ async def metadata_loop() -> None:
|
|
|
131
166
|
"user_app.uses_custom_auth": str(USES_CUSTOM_AUTH),
|
|
132
167
|
"user_app.uses_thread_ttl": str(USES_THREAD_TTL),
|
|
133
168
|
"user_app.uses_store_ttl": str(USES_STORE_TTL),
|
|
169
|
+
**usage_tags,
|
|
134
170
|
},
|
|
135
171
|
"measures": {
|
|
136
172
|
"langgraph.platform.runs": runs,
|
|
137
173
|
"langgraph.platform.nodes": nodes,
|
|
174
|
+
**usage_measures,
|
|
138
175
|
},
|
|
139
176
|
"logs": [],
|
|
140
177
|
}
|