langgraph-api 0.2.129__py3-none-any.whl → 0.2.132__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/assistants.py +6 -5
- langgraph_api/api/meta.py +3 -1
- langgraph_api/api/openapi.py +1 -1
- langgraph_api/api/runs.py +13 -10
- langgraph_api/api/ui.py +2 -0
- langgraph_api/asgi_transport.py +2 -2
- langgraph_api/asyncio.py +10 -8
- langgraph_api/auth/custom.py +9 -4
- langgraph_api/auth/langsmith/client.py +1 -1
- langgraph_api/cli.py +5 -4
- langgraph_api/config.py +2 -0
- langgraph_api/executor_entrypoint.py +23 -0
- langgraph_api/graph.py +25 -9
- langgraph_api/http.py +10 -7
- langgraph_api/http_metrics.py +4 -1
- langgraph_api/js/base.py +0 -3
- langgraph_api/js/build.mts +11 -2
- langgraph_api/js/client.http.mts +2 -0
- langgraph_api/js/client.mts +15 -11
- langgraph_api/js/remote.py +22 -12
- langgraph_api/js/src/preload.mjs +9 -1
- langgraph_api/js/src/utils/files.mts +5 -2
- langgraph_api/js/sse.py +1 -1
- langgraph_api/logging.py +3 -3
- langgraph_api/middleware/http_logger.py +4 -3
- langgraph_api/models/run.py +20 -15
- langgraph_api/patch.py +2 -2
- langgraph_api/queue_entrypoint.py +33 -18
- langgraph_api/route.py +7 -1
- langgraph_api/schema.py +20 -1
- langgraph_api/serde.py +32 -5
- langgraph_api/server.py +5 -3
- langgraph_api/state.py +8 -8
- langgraph_api/store.py +1 -1
- langgraph_api/stream.py +35 -20
- langgraph_api/traceblock.py +1 -1
- langgraph_api/utils/__init__.py +21 -5
- langgraph_api/utils/config.py +13 -4
- langgraph_api/utils/future.py +1 -1
- langgraph_api/utils/headers.py +22 -5
- langgraph_api/utils/uuids.py +87 -0
- langgraph_api/webhook.py +20 -20
- langgraph_api/worker.py +36 -9
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/METADATA +2 -2
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/RECORD +50 -48
- openapi.json +2 -2
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/WHEEL +0 -0
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.2.129.dist-info → langgraph_api-0.2.132.dist-info}/licenses/LICENSE +0 -0
langgraph_api/stream.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import uuid
|
|
1
2
|
from collections.abc import AsyncIterator, Callable
|
|
2
3
|
from contextlib import AsyncExitStack, aclosing, asynccontextmanager
|
|
3
4
|
from functools import lru_cache
|
|
@@ -93,21 +94,23 @@ def _preproces_debug_checkpoint_task(task: dict[str, Any]) -> dict[str, Any]:
|
|
|
93
94
|
return task
|
|
94
95
|
|
|
95
96
|
|
|
96
|
-
def _preprocess_debug_checkpoint(
|
|
97
|
+
def _preprocess_debug_checkpoint(
|
|
98
|
+
payload: CheckpointPayload | None,
|
|
99
|
+
) -> dict[str, Any] | None:
|
|
97
100
|
from langgraph_api.state import runnable_config_to_checkpoint
|
|
98
101
|
|
|
99
102
|
if not payload:
|
|
100
103
|
return None
|
|
101
104
|
|
|
102
|
-
payload["checkpoint"] = runnable_config_to_checkpoint(payload["config"])
|
|
103
|
-
payload["parent_checkpoint"] = runnable_config_to_checkpoint(
|
|
104
|
-
payload["parent_config"] if "parent_config" in payload else None
|
|
105
|
-
)
|
|
106
|
-
|
|
107
|
-
payload["tasks"] = [_preproces_debug_checkpoint_task(t) for t in payload["tasks"]]
|
|
108
|
-
|
|
109
105
|
# TODO: deprecate the `config`` and `parent_config`` fields
|
|
110
|
-
return
|
|
106
|
+
return {
|
|
107
|
+
**payload,
|
|
108
|
+
"checkpoint": runnable_config_to_checkpoint(payload["config"]),
|
|
109
|
+
"parent_checkpoint": runnable_config_to_checkpoint(
|
|
110
|
+
payload["parent_config"] if "parent_config" in payload else None
|
|
111
|
+
),
|
|
112
|
+
"tasks": [_preproces_debug_checkpoint_task(t) for t in payload["tasks"]],
|
|
113
|
+
}
|
|
111
114
|
|
|
112
115
|
|
|
113
116
|
@asynccontextmanager
|
|
@@ -216,7 +219,7 @@ async def astream_state(
|
|
|
216
219
|
if use_astream_events:
|
|
217
220
|
async with (
|
|
218
221
|
stack,
|
|
219
|
-
aclosing(
|
|
222
|
+
aclosing( # type: ignore[invalid-argument-type]
|
|
220
223
|
graph.astream_events(
|
|
221
224
|
input,
|
|
222
225
|
config,
|
|
@@ -231,6 +234,7 @@ async def astream_state(
|
|
|
231
234
|
event = await wait_if_not_done(anext(stream, sentinel), done)
|
|
232
235
|
if event is sentinel:
|
|
233
236
|
break
|
|
237
|
+
event = cast(dict, event)
|
|
234
238
|
if event.get("tags") and "langsmith:hidden" in event["tags"]:
|
|
235
239
|
continue
|
|
236
240
|
if "messages" in stream_mode and isinstance(graph, BaseRemotePregel):
|
|
@@ -251,6 +255,7 @@ async def astream_state(
|
|
|
251
255
|
if mode == "debug":
|
|
252
256
|
if chunk["type"] == "checkpoint":
|
|
253
257
|
checkpoint = _preprocess_debug_checkpoint(chunk["payload"])
|
|
258
|
+
chunk["payload"] = checkpoint
|
|
254
259
|
on_checkpoint(checkpoint)
|
|
255
260
|
elif chunk["type"] == "task_result":
|
|
256
261
|
on_task_result(chunk["payload"])
|
|
@@ -261,11 +266,14 @@ async def astream_state(
|
|
|
261
266
|
else:
|
|
262
267
|
yield "messages", chunk
|
|
263
268
|
else:
|
|
264
|
-
|
|
269
|
+
msg_, meta = cast(
|
|
265
270
|
tuple[BaseMessage | dict, dict[str, Any]], chunk
|
|
266
271
|
)
|
|
267
|
-
|
|
268
|
-
|
|
272
|
+
msg = (
|
|
273
|
+
convert_to_messages([msg_])[0]
|
|
274
|
+
if isinstance(msg_, dict)
|
|
275
|
+
else cast(BaseMessage, msg_)
|
|
276
|
+
)
|
|
269
277
|
if msg.id in messages:
|
|
270
278
|
messages[msg.id] += msg
|
|
271
279
|
else:
|
|
@@ -323,14 +331,15 @@ async def astream_state(
|
|
|
323
331
|
if event is sentinel:
|
|
324
332
|
break
|
|
325
333
|
if subgraphs:
|
|
326
|
-
ns, mode, chunk = event
|
|
334
|
+
ns, mode, chunk = cast(tuple[str, str, dict[str, Any]], event)
|
|
327
335
|
else:
|
|
328
|
-
mode, chunk = event
|
|
336
|
+
mode, chunk = cast(tuple[str, dict[str, Any]], event)
|
|
329
337
|
ns = None
|
|
330
338
|
# --- begin shared logic with astream_events ---
|
|
331
339
|
if mode == "debug":
|
|
332
340
|
if chunk["type"] == "checkpoint":
|
|
333
341
|
checkpoint = _preprocess_debug_checkpoint(chunk["payload"])
|
|
342
|
+
chunk["payload"] = checkpoint
|
|
334
343
|
on_checkpoint(checkpoint)
|
|
335
344
|
elif chunk["type"] == "task_result":
|
|
336
345
|
on_task_result(chunk["payload"])
|
|
@@ -341,11 +350,15 @@ async def astream_state(
|
|
|
341
350
|
else:
|
|
342
351
|
yield "messages", chunk
|
|
343
352
|
else:
|
|
344
|
-
|
|
353
|
+
msg_, meta = cast(
|
|
345
354
|
tuple[BaseMessage | dict, dict[str, Any]], chunk
|
|
346
355
|
)
|
|
347
|
-
|
|
348
|
-
|
|
356
|
+
msg = (
|
|
357
|
+
convert_to_messages([msg_])[0]
|
|
358
|
+
if isinstance(msg_, dict)
|
|
359
|
+
else cast(BaseMessage, msg_)
|
|
360
|
+
)
|
|
361
|
+
|
|
349
362
|
if msg.id in messages:
|
|
350
363
|
messages[msg.id] += msg
|
|
351
364
|
else:
|
|
@@ -386,6 +399,8 @@ async def astream_state(
|
|
|
386
399
|
incr_nodes(None, incr=nodes_executed)
|
|
387
400
|
except Exception as e:
|
|
388
401
|
logger.warning(f"Failed to fetch nodes executed for {graph.graph_id}: {e}")
|
|
402
|
+
else:
|
|
403
|
+
await logger.adebug("Graph is not an instance of BaseRemotePregel")
|
|
389
404
|
|
|
390
405
|
# Get feedback URLs
|
|
391
406
|
if feedback_keys:
|
|
@@ -397,7 +412,7 @@ async def astream_state(
|
|
|
397
412
|
|
|
398
413
|
async def consume(
|
|
399
414
|
stream: AnyStream,
|
|
400
|
-
run_id: str,
|
|
415
|
+
run_id: str | uuid.UUID,
|
|
401
416
|
resumable: bool = False,
|
|
402
417
|
stream_modes: set[StreamMode] | None = None,
|
|
403
418
|
) -> None:
|
|
@@ -406,7 +421,7 @@ async def consume(
|
|
|
406
421
|
stream_modes.add("messages")
|
|
407
422
|
stream_modes.add("metadata")
|
|
408
423
|
|
|
409
|
-
async with aclosing(stream):
|
|
424
|
+
async with aclosing(stream): # type: ignore[invalid-argument-type]
|
|
410
425
|
try:
|
|
411
426
|
async for mode, payload in stream:
|
|
412
427
|
await Runs.Stream.publish(
|
langgraph_api/traceblock.py
CHANGED
langgraph_api/utils/__init__.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import contextvars
|
|
2
2
|
import uuid
|
|
3
|
-
from collections.abc import
|
|
3
|
+
from collections.abc import AsyncIterator
|
|
4
4
|
from contextlib import asynccontextmanager
|
|
5
5
|
from datetime import datetime
|
|
6
|
-
from typing import Any, Protocol, TypeAlias, TypeVar
|
|
6
|
+
from typing import Any, Protocol, TypeAlias, TypeVar, cast
|
|
7
7
|
|
|
8
8
|
import structlog
|
|
9
9
|
from langgraph_sdk import Auth
|
|
@@ -12,6 +12,7 @@ from starlette.exceptions import HTTPException
|
|
|
12
12
|
from starlette.schemas import BaseSchemaGenerator
|
|
13
13
|
|
|
14
14
|
from langgraph_api.auth.custom import SimpleUser
|
|
15
|
+
from langgraph_api.utils.uuids import uuid7
|
|
15
16
|
|
|
16
17
|
logger = structlog.stdlib.get_logger(__name__)
|
|
17
18
|
|
|
@@ -32,7 +33,9 @@ async def with_user(
|
|
|
32
33
|
yield
|
|
33
34
|
if current is None:
|
|
34
35
|
return
|
|
35
|
-
set_auth_ctx(
|
|
36
|
+
set_auth_ctx(
|
|
37
|
+
cast(BaseUser, current.user), AuthCredentials(scopes=current.permissions)
|
|
38
|
+
)
|
|
36
39
|
|
|
37
40
|
|
|
38
41
|
def set_auth_ctx(
|
|
@@ -99,7 +102,7 @@ def validate_uuid(uuid_str: str, invalid_uuid_detail: str | None) -> uuid.UUID:
|
|
|
99
102
|
|
|
100
103
|
|
|
101
104
|
def next_cron_date(schedule: str, base_time: datetime) -> datetime:
|
|
102
|
-
import croniter
|
|
105
|
+
import croniter # type: ignore[unresolved-import]
|
|
103
106
|
|
|
104
107
|
cron_iter = croniter.croniter(schedule, base_time)
|
|
105
108
|
return cron_iter.get_next(datetime)
|
|
@@ -130,7 +133,7 @@ class SchemaGenerator(BaseSchemaGenerator):
|
|
|
130
133
|
|
|
131
134
|
|
|
132
135
|
async def get_pagination_headers(
|
|
133
|
-
resource:
|
|
136
|
+
resource: AsyncIterator[T],
|
|
134
137
|
next_offset: int | None,
|
|
135
138
|
offset: int,
|
|
136
139
|
) -> tuple[list[T], dict[str, str]]:
|
|
@@ -143,3 +146,16 @@ async def get_pagination_headers(
|
|
|
143
146
|
"X-Pagination-Next": str(next_offset),
|
|
144
147
|
}
|
|
145
148
|
return resources, response_headers
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
__all__ = [
|
|
152
|
+
"AsyncCursorProto",
|
|
153
|
+
"AsyncPipelineProto",
|
|
154
|
+
"AsyncConnectionProto",
|
|
155
|
+
"fetchone",
|
|
156
|
+
"validate_uuid",
|
|
157
|
+
"next_cron_date",
|
|
158
|
+
"SchemaGenerator",
|
|
159
|
+
"get_pagination_headers",
|
|
160
|
+
"uuid7",
|
|
161
|
+
]
|
langgraph_api/utils/config.py
CHANGED
|
@@ -7,9 +7,10 @@ from collections import ChainMap
|
|
|
7
7
|
from concurrent.futures import Executor
|
|
8
8
|
from contextvars import copy_context
|
|
9
9
|
from os import getenv
|
|
10
|
-
from typing import Any, ParamSpec, TypeVar
|
|
10
|
+
from typing import Any, ParamSpec, TypeVar
|
|
11
11
|
|
|
12
12
|
from langgraph.constants import CONF
|
|
13
|
+
from typing_extensions import TypedDict
|
|
13
14
|
|
|
14
15
|
if typing.TYPE_CHECKING:
|
|
15
16
|
from langchain_core.runnables import RunnableConfig
|
|
@@ -19,7 +20,7 @@ try:
|
|
|
19
20
|
var_child_runnable_config,
|
|
20
21
|
)
|
|
21
22
|
except ImportError:
|
|
22
|
-
var_child_runnable_config = None
|
|
23
|
+
var_child_runnable_config = None # type: ignore[invalid-assignment]
|
|
23
24
|
|
|
24
25
|
CONFIG_KEYS = [
|
|
25
26
|
"tags",
|
|
@@ -52,6 +53,14 @@ def _is_not_empty(value: Any) -> bool:
|
|
|
52
53
|
return value is not None
|
|
53
54
|
|
|
54
55
|
|
|
56
|
+
class _Config(TypedDict):
|
|
57
|
+
tags: list[str]
|
|
58
|
+
metadata: ChainMap
|
|
59
|
+
callbacks: None
|
|
60
|
+
recursion_limit: int
|
|
61
|
+
configurable: dict[str, Any]
|
|
62
|
+
|
|
63
|
+
|
|
55
64
|
def ensure_config(*configs: RunnableConfig | None) -> RunnableConfig:
|
|
56
65
|
"""Return a config with all keys, merging any provided configs.
|
|
57
66
|
|
|
@@ -61,7 +70,7 @@ def ensure_config(*configs: RunnableConfig | None) -> RunnableConfig:
|
|
|
61
70
|
Returns:
|
|
62
71
|
RunnableConfig: The merged and ensured config.
|
|
63
72
|
"""
|
|
64
|
-
empty =
|
|
73
|
+
empty = _Config(
|
|
65
74
|
tags=[],
|
|
66
75
|
metadata=ChainMap(),
|
|
67
76
|
callbacks=None,
|
|
@@ -84,7 +93,7 @@ def ensure_config(*configs: RunnableConfig | None) -> RunnableConfig:
|
|
|
84
93
|
for k, v in config.items():
|
|
85
94
|
if _is_not_empty(v) and k in CONFIG_KEYS:
|
|
86
95
|
if k == CONF:
|
|
87
|
-
empty[k] =
|
|
96
|
+
empty[k] = v.copy() # type: ignore
|
|
88
97
|
else:
|
|
89
98
|
empty[k] = v # type: ignore[literal-required]
|
|
90
99
|
for k, v in config.items():
|
langgraph_api/utils/future.py
CHANGED
|
@@ -167,7 +167,7 @@ def _ensure_future(
|
|
|
167
167
|
elif EAGER_NOT_SUPPORTED or lazy:
|
|
168
168
|
return loop.create_task(coro_or_future, name=name, context=context)
|
|
169
169
|
else:
|
|
170
|
-
return asyncio.eager_task_factory(
|
|
170
|
+
return asyncio.eager_task_factory( # type: ignore[unresolved-attribute]
|
|
171
171
|
loop, coro_or_future, name=name, context=context
|
|
172
172
|
)
|
|
173
173
|
except RuntimeError:
|
langgraph_api/utils/headers.py
CHANGED
|
@@ -24,15 +24,15 @@ def translate_pattern(pat: str) -> re.Pattern[str]:
|
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
@functools.lru_cache(maxsize=1)
|
|
27
|
-
def get_header_patterns(
|
|
28
|
-
|
|
29
|
-
]:
|
|
27
|
+
def get_header_patterns(
|
|
28
|
+
key: str,
|
|
29
|
+
) -> tuple[list[re.Pattern[str]] | None, list[re.Pattern[str]] | None]:
|
|
30
30
|
"""Get the configured header include/exclude patterns."""
|
|
31
31
|
from langgraph_api import config
|
|
32
32
|
|
|
33
33
|
if not config.HTTP_CONFIG:
|
|
34
34
|
return None, None
|
|
35
|
-
configurable = config.HTTP_CONFIG.get(
|
|
35
|
+
configurable = config.HTTP_CONFIG.get(key)
|
|
36
36
|
if not configurable:
|
|
37
37
|
return None, None
|
|
38
38
|
header_includes = configurable.get("includes") or configurable.get("include") or []
|
|
@@ -59,8 +59,25 @@ def should_include_header(key: str) -> bool:
|
|
|
59
59
|
Returns:
|
|
60
60
|
True if the header should be included, False otherwise
|
|
61
61
|
"""
|
|
62
|
-
include_patterns, exclude_patterns = get_header_patterns()
|
|
62
|
+
include_patterns, exclude_patterns = get_header_patterns("configurable_headers")
|
|
63
63
|
|
|
64
|
+
return pattern_matches(key, include_patterns, exclude_patterns)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@functools.lru_cache(maxsize=512)
|
|
68
|
+
def should_include_header_in_logs(key: str) -> bool:
|
|
69
|
+
"""Check if header should be included in logs specifically."""
|
|
70
|
+
|
|
71
|
+
include_patterns, exclude_patterns = get_header_patterns("logging_headers")
|
|
72
|
+
|
|
73
|
+
return pattern_matches(key, include_patterns, exclude_patterns)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def pattern_matches(
|
|
77
|
+
key: str,
|
|
78
|
+
include_patterns: list[re.Pattern[str]] | None,
|
|
79
|
+
exclude_patterns: list[re.Pattern[str]] | None,
|
|
80
|
+
) -> bool:
|
|
64
81
|
# Handle configurable behavior
|
|
65
82
|
if exclude_patterns and any(pattern.match(key) for pattern in exclude_patterns):
|
|
66
83
|
return False
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
from uuid import UUID, SafeUUID
|
|
4
|
+
|
|
5
|
+
_last_timestamp_v7 = None
|
|
6
|
+
_last_counter_v7 = 0 # 42-bit counter
|
|
7
|
+
_RFC_4122_VERSION_7_FLAGS = (7 << 76) | (0x8000 << 48)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _uuid7_get_counter_and_tail():
|
|
11
|
+
rand = int.from_bytes(os.urandom(10))
|
|
12
|
+
# 42-bit counter with MSB set to 0
|
|
13
|
+
counter = (rand >> 32) & 0x1FF_FFFF_FFFF
|
|
14
|
+
# 32-bit random data
|
|
15
|
+
tail = rand & 0xFFFF_FFFF
|
|
16
|
+
return counter, tail
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _from_int(value: int) -> UUID:
|
|
20
|
+
uid = object.__new__(UUID)
|
|
21
|
+
object.__setattr__(uid, "int", value)
|
|
22
|
+
object.__setattr__(uid, "is_safe", SafeUUID.unknown)
|
|
23
|
+
return uid
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def uuid7():
|
|
27
|
+
"""Generate a UUID from a Unix timestamp in milliseconds and random bits.
|
|
28
|
+
|
|
29
|
+
UUIDv7 objects feature monotonicity within a millisecond.
|
|
30
|
+
"""
|
|
31
|
+
# --- 48 --- -- 4 -- --- 12 --- -- 2 -- --- 30 --- - 32 -
|
|
32
|
+
# unix_ts_ms | version | counter_hi | variant | counter_lo | random
|
|
33
|
+
#
|
|
34
|
+
# 'counter = counter_hi | counter_lo' is a 42-bit counter constructed
|
|
35
|
+
# with Method 1 of RFC 9562, §6.2, and its MSB is set to 0.
|
|
36
|
+
#
|
|
37
|
+
# 'random' is a 32-bit random value regenerated for every new UUID.
|
|
38
|
+
#
|
|
39
|
+
# If multiple UUIDs are generated within the same millisecond, the LSB
|
|
40
|
+
# of 'counter' is incremented by 1. When overflowing, the timestamp is
|
|
41
|
+
# advanced and the counter is reset to a random 42-bit integer with MSB
|
|
42
|
+
# set to 0.
|
|
43
|
+
|
|
44
|
+
global _last_timestamp_v7
|
|
45
|
+
global _last_counter_v7
|
|
46
|
+
|
|
47
|
+
nanoseconds = time.time_ns()
|
|
48
|
+
timestamp_ms = nanoseconds // 1_000_000
|
|
49
|
+
|
|
50
|
+
if _last_timestamp_v7 is None or timestamp_ms > _last_timestamp_v7:
|
|
51
|
+
counter, tail = _uuid7_get_counter_and_tail()
|
|
52
|
+
else:
|
|
53
|
+
if timestamp_ms < _last_timestamp_v7:
|
|
54
|
+
timestamp_ms = _last_timestamp_v7 + 1
|
|
55
|
+
# advance the 42-bit counter
|
|
56
|
+
counter = _last_counter_v7 + 1
|
|
57
|
+
if counter > 0x3FF_FFFF_FFFF:
|
|
58
|
+
# advance the 48-bit timestamp
|
|
59
|
+
timestamp_ms += 1
|
|
60
|
+
counter, tail = _uuid7_get_counter_and_tail()
|
|
61
|
+
else:
|
|
62
|
+
# 32-bit random data
|
|
63
|
+
tail = int.from_bytes(os.urandom(4))
|
|
64
|
+
|
|
65
|
+
unix_ts_ms = timestamp_ms & 0xFFFF_FFFF_FFFF
|
|
66
|
+
counter_msbs = counter >> 30
|
|
67
|
+
# keep 12 counter's MSBs and clear variant bits
|
|
68
|
+
counter_hi = counter_msbs & 0x0FFF
|
|
69
|
+
# keep 30 counter's LSBs and clear version bits
|
|
70
|
+
counter_lo = counter & 0x3FFF_FFFF
|
|
71
|
+
# ensure that the tail is always a 32-bit integer (by construction,
|
|
72
|
+
# it is already the case, but future interfaces may allow the user
|
|
73
|
+
# to specify the random tail)
|
|
74
|
+
tail &= 0xFFFF_FFFF
|
|
75
|
+
|
|
76
|
+
int_uuid_7 = unix_ts_ms << 80
|
|
77
|
+
int_uuid_7 |= counter_hi << 64
|
|
78
|
+
int_uuid_7 |= counter_lo << 32
|
|
79
|
+
int_uuid_7 |= tail
|
|
80
|
+
# by construction, the variant and version bits are already cleared
|
|
81
|
+
int_uuid_7 |= _RFC_4122_VERSION_7_FLAGS
|
|
82
|
+
res = _from_int(int_uuid_7)
|
|
83
|
+
|
|
84
|
+
# defer global update until all computations are done
|
|
85
|
+
_last_timestamp_v7 = timestamp_ms
|
|
86
|
+
_last_counter_v7 = counter
|
|
87
|
+
return res
|
langgraph_api/webhook.py
CHANGED
|
@@ -27,23 +27,23 @@ async def call_webhook(result: "WorkerResult") -> None:
|
|
|
27
27
|
}
|
|
28
28
|
if exception := result["exception"]:
|
|
29
29
|
payload["error"] = str(exception)
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
30
|
+
webhook = result.get("webhook")
|
|
31
|
+
if webhook:
|
|
32
|
+
try:
|
|
33
|
+
if webhook.startswith("/"):
|
|
34
|
+
# Call into this own app
|
|
35
|
+
webhook_client = get_loopback_client()
|
|
36
|
+
else:
|
|
37
|
+
webhook_client = get_http_client()
|
|
38
|
+
await http_request("POST", webhook, json=payload, client=webhook_client)
|
|
39
|
+
await logger.ainfo(
|
|
40
|
+
"Background worker called webhook",
|
|
41
|
+
webhook=result["webhook"],
|
|
42
|
+
run_id=result["run"]["run_id"],
|
|
43
|
+
)
|
|
44
|
+
except Exception as exc:
|
|
45
|
+
logger.exception(
|
|
46
|
+
f"Background worker failed to call webhook {result['webhook']}",
|
|
47
|
+
exc_info=exc,
|
|
48
|
+
webhook=result["webhook"],
|
|
49
|
+
)
|
langgraph_api/worker.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import time
|
|
3
|
+
import uuid
|
|
3
4
|
from collections.abc import AsyncGenerator
|
|
4
5
|
from contextlib import asynccontextmanager
|
|
5
6
|
from datetime import UTC, datetime
|
|
6
|
-
from typing import cast
|
|
7
7
|
|
|
8
8
|
import structlog
|
|
9
9
|
from langgraph.pregel.debug import CheckpointPayload, TaskResultPayload
|
|
@@ -73,7 +73,7 @@ async def worker(
|
|
|
73
73
|
if attempt == 1:
|
|
74
74
|
incr_runs()
|
|
75
75
|
checkpoint: CheckpointPayload | None = None
|
|
76
|
-
exception: Exception | None = None
|
|
76
|
+
exception: Exception | asyncio.CancelledError | None = None
|
|
77
77
|
status: str | None = None
|
|
78
78
|
webhook = run["kwargs"].get("webhook", None)
|
|
79
79
|
request_created_at: int | None = run["kwargs"]["config"]["configurable"].get(
|
|
@@ -131,7 +131,10 @@ async def worker(
|
|
|
131
131
|
|
|
132
132
|
# Wrap the graph execution to separate user errors from server errors
|
|
133
133
|
async def wrap_user_errors(
|
|
134
|
-
stream: AnyStream,
|
|
134
|
+
stream: AnyStream,
|
|
135
|
+
run_id: str | uuid.UUID,
|
|
136
|
+
resumable: bool,
|
|
137
|
+
stream_modes: set[StreamMode],
|
|
135
138
|
):
|
|
136
139
|
try:
|
|
137
140
|
await consume(stream, run_id, resumable, stream_modes)
|
|
@@ -177,10 +180,10 @@ async def worker(
|
|
|
177
180
|
raise RuntimeError(error_message)
|
|
178
181
|
async with set_auth_ctx_for_run(run["kwargs"]):
|
|
179
182
|
if temporary:
|
|
180
|
-
stream = astream_state(
|
|
183
|
+
stream = astream_state(run, attempt, done)
|
|
181
184
|
else:
|
|
182
185
|
stream = astream_state(
|
|
183
|
-
|
|
186
|
+
run,
|
|
184
187
|
attempt,
|
|
185
188
|
done,
|
|
186
189
|
on_checkpoint=on_checkpoint,
|
|
@@ -209,6 +212,7 @@ async def worker(
|
|
|
209
212
|
|
|
210
213
|
# handle exceptions and set status
|
|
211
214
|
async with connect() as conn:
|
|
215
|
+
graph_id = run["kwargs"]["config"]["configurable"]["graph_id"]
|
|
212
216
|
log_info = {
|
|
213
217
|
"run_id": str(run_id),
|
|
214
218
|
"run_attempt": attempt,
|
|
@@ -252,7 +256,12 @@ async def worker(
|
|
|
252
256
|
)
|
|
253
257
|
if not temporary:
|
|
254
258
|
await Threads.set_joint_status(
|
|
255
|
-
conn,
|
|
259
|
+
conn,
|
|
260
|
+
run["thread_id"],
|
|
261
|
+
run_id,
|
|
262
|
+
status,
|
|
263
|
+
graph_id=graph_id,
|
|
264
|
+
checkpoint=checkpoint,
|
|
256
265
|
)
|
|
257
266
|
elif isinstance(exception, TimeoutError):
|
|
258
267
|
status = "timeout"
|
|
@@ -262,7 +271,12 @@ async def worker(
|
|
|
262
271
|
)
|
|
263
272
|
if not temporary:
|
|
264
273
|
await Threads.set_joint_status(
|
|
265
|
-
conn,
|
|
274
|
+
conn,
|
|
275
|
+
run["thread_id"],
|
|
276
|
+
run_id,
|
|
277
|
+
status,
|
|
278
|
+
graph_id=graph_id,
|
|
279
|
+
checkpoint=checkpoint,
|
|
266
280
|
)
|
|
267
281
|
elif isinstance(exception, UserRollback):
|
|
268
282
|
status = "rollback"
|
|
@@ -273,6 +287,7 @@ async def worker(
|
|
|
273
287
|
run["thread_id"],
|
|
274
288
|
run_id,
|
|
275
289
|
status,
|
|
290
|
+
graph_id=graph_id,
|
|
276
291
|
checkpoint=checkpoint,
|
|
277
292
|
)
|
|
278
293
|
await logger.ainfo(
|
|
@@ -297,7 +312,13 @@ async def worker(
|
|
|
297
312
|
)
|
|
298
313
|
if not temporary:
|
|
299
314
|
await Threads.set_joint_status(
|
|
300
|
-
conn,
|
|
315
|
+
conn,
|
|
316
|
+
run["thread_id"],
|
|
317
|
+
run_id,
|
|
318
|
+
status,
|
|
319
|
+
graph_id,
|
|
320
|
+
checkpoint,
|
|
321
|
+
exception,
|
|
301
322
|
)
|
|
302
323
|
elif isinstance(exception, ALL_RETRIABLE_EXCEPTIONS):
|
|
303
324
|
status = "retry"
|
|
@@ -322,7 +343,13 @@ async def worker(
|
|
|
322
343
|
)
|
|
323
344
|
if not temporary:
|
|
324
345
|
await Threads.set_joint_status(
|
|
325
|
-
conn,
|
|
346
|
+
conn,
|
|
347
|
+
run["thread_id"],
|
|
348
|
+
run_id,
|
|
349
|
+
status,
|
|
350
|
+
graph_id,
|
|
351
|
+
checkpoint,
|
|
352
|
+
exception,
|
|
326
353
|
)
|
|
327
354
|
|
|
328
355
|
# delete thread if it's temporary and we don't want to retry
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langgraph-api
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.132
|
|
4
4
|
Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
|
|
5
5
|
License: Elastic-2.0
|
|
6
6
|
License-File: LICENSE
|
|
@@ -11,7 +11,7 @@ Requires-Dist: httpx>=0.25.0
|
|
|
11
11
|
Requires-Dist: jsonschema-rs<0.30,>=0.20.0
|
|
12
12
|
Requires-Dist: langchain-core>=0.3.64
|
|
13
13
|
Requires-Dist: langgraph-checkpoint>=2.0.23
|
|
14
|
-
Requires-Dist: langgraph-runtime-inmem<0.7,>=0.6.
|
|
14
|
+
Requires-Dist: langgraph-runtime-inmem<0.7,>=0.6.13
|
|
15
15
|
Requires-Dist: langgraph-sdk>=0.2.0
|
|
16
16
|
Requires-Dist: langgraph>=0.4.0
|
|
17
17
|
Requires-Dist: langsmith>=0.3.45
|