langgraph-api 0.4.1__py3-none-any.whl → 0.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/__init__.py +111 -51
- langgraph_api/api/a2a.py +1610 -0
- langgraph_api/api/assistants.py +212 -89
- langgraph_api/api/mcp.py +3 -3
- langgraph_api/api/meta.py +52 -28
- langgraph_api/api/openapi.py +27 -17
- langgraph_api/api/profile.py +108 -0
- langgraph_api/api/runs.py +342 -195
- langgraph_api/api/store.py +19 -2
- langgraph_api/api/threads.py +209 -27
- langgraph_api/asgi_transport.py +14 -9
- langgraph_api/asyncio.py +14 -4
- langgraph_api/auth/custom.py +52 -37
- langgraph_api/auth/langsmith/backend.py +4 -3
- langgraph_api/auth/langsmith/client.py +13 -8
- langgraph_api/cli.py +230 -133
- langgraph_api/command.py +5 -3
- langgraph_api/config/__init__.py +532 -0
- langgraph_api/config/_parse.py +58 -0
- langgraph_api/config/schemas.py +431 -0
- langgraph_api/cron_scheduler.py +17 -1
- langgraph_api/encryption/__init__.py +15 -0
- langgraph_api/encryption/aes_json.py +158 -0
- langgraph_api/encryption/context.py +35 -0
- langgraph_api/encryption/custom.py +280 -0
- langgraph_api/encryption/middleware.py +632 -0
- langgraph_api/encryption/shared.py +63 -0
- langgraph_api/errors.py +12 -1
- langgraph_api/executor_entrypoint.py +11 -6
- langgraph_api/feature_flags.py +29 -0
- langgraph_api/graph.py +176 -76
- langgraph_api/grpc/client.py +313 -0
- langgraph_api/grpc/config_conversion.py +231 -0
- langgraph_api/grpc/generated/__init__.py +29 -0
- langgraph_api/grpc/generated/checkpointer_pb2.py +63 -0
- langgraph_api/grpc/generated/checkpointer_pb2.pyi +99 -0
- langgraph_api/grpc/generated/checkpointer_pb2_grpc.py +329 -0
- langgraph_api/grpc/generated/core_api_pb2.py +216 -0
- langgraph_api/grpc/generated/core_api_pb2.pyi +905 -0
- langgraph_api/grpc/generated/core_api_pb2_grpc.py +1621 -0
- langgraph_api/grpc/generated/engine_common_pb2.py +219 -0
- langgraph_api/grpc/generated/engine_common_pb2.pyi +722 -0
- langgraph_api/grpc/generated/engine_common_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2.pyi +12 -0
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_durability_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_durability_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_durability_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_run_status_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_run_status_pb2.pyi +22 -0
- langgraph_api/grpc/generated/enum_run_status_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2.pyi +28 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/errors_pb2.py +39 -0
- langgraph_api/grpc/generated/errors_pb2.pyi +21 -0
- langgraph_api/grpc/generated/errors_pb2_grpc.py +24 -0
- langgraph_api/grpc/ops/__init__.py +370 -0
- langgraph_api/grpc/ops/assistants.py +424 -0
- langgraph_api/grpc/ops/runs.py +792 -0
- langgraph_api/grpc/ops/threads.py +1013 -0
- langgraph_api/http.py +16 -5
- langgraph_api/http_metrics.py +15 -35
- langgraph_api/http_metrics_utils.py +38 -0
- langgraph_api/js/build.mts +1 -1
- langgraph_api/js/client.http.mts +13 -7
- langgraph_api/js/client.mts +2 -5
- langgraph_api/js/package.json +29 -28
- langgraph_api/js/remote.py +56 -30
- langgraph_api/js/src/graph.mts +20 -0
- langgraph_api/js/sse.py +2 -2
- langgraph_api/js/ui.py +1 -1
- langgraph_api/js/yarn.lock +1204 -1006
- langgraph_api/logging.py +29 -2
- langgraph_api/metadata.py +99 -28
- langgraph_api/middleware/http_logger.py +7 -2
- langgraph_api/middleware/private_network.py +7 -7
- langgraph_api/models/run.py +54 -93
- langgraph_api/otel_context.py +205 -0
- langgraph_api/patch.py +5 -3
- langgraph_api/queue_entrypoint.py +154 -65
- langgraph_api/route.py +47 -5
- langgraph_api/schema.py +88 -10
- langgraph_api/self_hosted_logs.py +124 -0
- langgraph_api/self_hosted_metrics.py +450 -0
- langgraph_api/serde.py +79 -37
- langgraph_api/server.py +138 -60
- langgraph_api/state.py +4 -3
- langgraph_api/store.py +25 -16
- langgraph_api/stream.py +80 -29
- langgraph_api/thread_ttl.py +31 -13
- langgraph_api/timing/__init__.py +25 -0
- langgraph_api/timing/profiler.py +200 -0
- langgraph_api/timing/timer.py +318 -0
- langgraph_api/utils/__init__.py +53 -8
- langgraph_api/utils/cache.py +47 -10
- langgraph_api/utils/config.py +2 -1
- langgraph_api/utils/errors.py +77 -0
- langgraph_api/utils/future.py +10 -6
- langgraph_api/utils/headers.py +76 -2
- langgraph_api/utils/retriable_client.py +74 -0
- langgraph_api/utils/stream_codec.py +315 -0
- langgraph_api/utils/uuids.py +29 -62
- langgraph_api/validation.py +9 -0
- langgraph_api/webhook.py +120 -6
- langgraph_api/worker.py +55 -24
- {langgraph_api-0.4.1.dist-info → langgraph_api-0.7.3.dist-info}/METADATA +16 -8
- langgraph_api-0.7.3.dist-info/RECORD +168 -0
- {langgraph_api-0.4.1.dist-info → langgraph_api-0.7.3.dist-info}/WHEEL +1 -1
- langgraph_runtime/__init__.py +1 -0
- langgraph_runtime/routes.py +11 -0
- logging.json +1 -3
- openapi.json +839 -478
- langgraph_api/config.py +0 -387
- langgraph_api/js/isolate-0x130008000-46649-46649-v8.log +0 -4430
- langgraph_api/js/isolate-0x138008000-44681-44681-v8.log +0 -4430
- langgraph_api/js/package-lock.json +0 -3308
- langgraph_api-0.4.1.dist-info/RECORD +0 -107
- /langgraph_api/{utils.py → grpc/__init__.py} +0 -0
- {langgraph_api-0.4.1.dist-info → langgraph_api-0.7.3.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.4.1.dist-info → langgraph_api-0.7.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import base64
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
|
|
6
|
+
import orjson
|
|
7
|
+
import structlog
|
|
8
|
+
|
|
9
|
+
PROTOCOL_VERSION = 1
|
|
10
|
+
"""
|
|
11
|
+
---
|
|
12
|
+
Version 1:
|
|
13
|
+
Byte Offsets
|
|
14
|
+
0 1 3 5 5+N 5+N+M
|
|
15
|
+
+--------+------------------+----------------+------------------+------------------+--------------------+
|
|
16
|
+
| version| stream_id_len | event_len | stream_id | event | message |
|
|
17
|
+
+--------+------------------+----------------+------------------+------------------+--------------------+
|
|
18
|
+
1 B 2 B 2 B N B M B variable
|
|
19
|
+
|
|
20
|
+
---- Old (to be dropped soon / multiple formats)
|
|
21
|
+
Version 0 (old):
|
|
22
|
+
1) b"$:" + <stream_id> + b"$:" + <event> + b"$:" + <raw_json>
|
|
23
|
+
2) b"$:" + <stream_id> + b"$:" + <raw_json>
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
BYTE_MASK = 0xFF
|
|
27
|
+
HEADER_LEN = 5
|
|
28
|
+
logger = structlog.stdlib.get_logger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class StreamFormatError(ValueError):
|
|
32
|
+
"""Raised when a stream frame fails validation."""
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass(slots=True)
|
|
36
|
+
class StreamPacket:
|
|
37
|
+
version: int
|
|
38
|
+
event: memoryview | bytes
|
|
39
|
+
message: memoryview | bytes
|
|
40
|
+
stream_id: memoryview | bytes | None
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def event_bytes(self) -> bytes:
|
|
44
|
+
return (
|
|
45
|
+
self.event.tobytes() if isinstance(self.event, memoryview) else self.event
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def message_bytes(self) -> bytes:
|
|
50
|
+
return (
|
|
51
|
+
self.message.tobytes()
|
|
52
|
+
if isinstance(self.message, memoryview)
|
|
53
|
+
else self.message
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
@property
|
|
57
|
+
def resumable(self) -> bool:
|
|
58
|
+
return self.stream_id is not None
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def stream_id_bytes(self) -> bytes | None:
|
|
62
|
+
if self.stream_id is None:
|
|
63
|
+
return None
|
|
64
|
+
if isinstance(self.stream_id, bytes):
|
|
65
|
+
return self.stream_id
|
|
66
|
+
return self.stream_id.tobytes()
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class StreamCodec:
|
|
70
|
+
"""Codec for encoding and decoding stream packets."""
|
|
71
|
+
|
|
72
|
+
__slots__ = ("_version",)
|
|
73
|
+
|
|
74
|
+
def __init__(self, *, protocol_version: int = PROTOCOL_VERSION) -> None:
|
|
75
|
+
self._version = protocol_version & BYTE_MASK
|
|
76
|
+
|
|
77
|
+
def encode(
|
|
78
|
+
self,
|
|
79
|
+
event: str,
|
|
80
|
+
message: bytes,
|
|
81
|
+
*,
|
|
82
|
+
stream_id: str | None = None,
|
|
83
|
+
) -> bytes:
|
|
84
|
+
if not event:
|
|
85
|
+
raise StreamFormatError("event cannot be empty")
|
|
86
|
+
event_bytes = event.encode("utf-8")
|
|
87
|
+
if len(event_bytes) > 0xFFFF:
|
|
88
|
+
raise StreamFormatError("event exceeds 65535 bytes; cannot encode")
|
|
89
|
+
if not event_bytes:
|
|
90
|
+
raise StreamFormatError("event cannot be empty")
|
|
91
|
+
|
|
92
|
+
if stream_id:
|
|
93
|
+
# It's a resumable stream
|
|
94
|
+
stream_id_bytes = stream_id.encode("utf-8")
|
|
95
|
+
if len(stream_id_bytes) > 0xFFFF:
|
|
96
|
+
raise StreamFormatError("stream_id exceeds 65535 bytes; cannot encode")
|
|
97
|
+
else:
|
|
98
|
+
stream_id_bytes = None
|
|
99
|
+
stream_id_len = len(stream_id_bytes) if stream_id_bytes else 0
|
|
100
|
+
event_len = len(event_bytes)
|
|
101
|
+
frame = bytearray(HEADER_LEN + stream_id_len + event_len + len(message))
|
|
102
|
+
frame[0] = self._version
|
|
103
|
+
frame[1:3] = stream_id_len.to_bytes(2, "big")
|
|
104
|
+
frame[3:5] = event_len.to_bytes(2, "big")
|
|
105
|
+
|
|
106
|
+
cursor = HEADER_LEN
|
|
107
|
+
if stream_id_bytes is not None:
|
|
108
|
+
frame[cursor : cursor + stream_id_len] = stream_id_bytes
|
|
109
|
+
cursor += stream_id_len
|
|
110
|
+
|
|
111
|
+
frame[cursor : cursor + event_len] = event_bytes
|
|
112
|
+
cursor += event_len
|
|
113
|
+
frame[cursor:] = message
|
|
114
|
+
return bytes(frame)
|
|
115
|
+
|
|
116
|
+
def decode(self, data: bytes | bytearray | memoryview) -> StreamPacket:
|
|
117
|
+
view = data if isinstance(data, memoryview) else memoryview(data)
|
|
118
|
+
if len(view) < HEADER_LEN:
|
|
119
|
+
raise StreamFormatError("frame too short")
|
|
120
|
+
|
|
121
|
+
version = view[0]
|
|
122
|
+
if version != self._version:
|
|
123
|
+
raise StreamFormatError(f"unsupported protocol version: {version}")
|
|
124
|
+
|
|
125
|
+
stream_id_len = int.from_bytes(view[1:3], "big")
|
|
126
|
+
event_len = int.from_bytes(view[3:5], "big")
|
|
127
|
+
if event_len == 0:
|
|
128
|
+
raise StreamFormatError("event cannot be empty")
|
|
129
|
+
offset = HEADER_LEN
|
|
130
|
+
if stream_id_len > 0:
|
|
131
|
+
stream_id_view = view[offset : offset + stream_id_len]
|
|
132
|
+
offset += stream_id_len
|
|
133
|
+
else:
|
|
134
|
+
# Not resumable
|
|
135
|
+
stream_id_view = None
|
|
136
|
+
if len(view) < offset + event_len:
|
|
137
|
+
raise StreamFormatError("truncated event payload")
|
|
138
|
+
event_view = view[offset : offset + event_len]
|
|
139
|
+
offset += event_len
|
|
140
|
+
message_view = view[offset:]
|
|
141
|
+
return StreamPacket(
|
|
142
|
+
version=version,
|
|
143
|
+
event=event_view,
|
|
144
|
+
message=message_view,
|
|
145
|
+
stream_id=stream_id_view,
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
def decode_safe(self, data: bytes | bytearray | memoryview) -> StreamPacket | None:
|
|
149
|
+
try:
|
|
150
|
+
return self.decode(data)
|
|
151
|
+
except StreamFormatError as e:
|
|
152
|
+
logger.warning(f"Failed to decode as version {self._version}", error=e)
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
STREAM_CODEC = StreamCodec()
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def decode_stream_message(
|
|
160
|
+
data: bytes | bytearray | memoryview,
|
|
161
|
+
*,
|
|
162
|
+
channel: bytes | str | None = None,
|
|
163
|
+
) -> StreamPacket:
|
|
164
|
+
if isinstance(data, memoryview):
|
|
165
|
+
view = data
|
|
166
|
+
elif isinstance(data, (bytes, bytearray)):
|
|
167
|
+
view = memoryview(data)
|
|
168
|
+
else:
|
|
169
|
+
logger.warning("Unknown type for stream message", type=type(data))
|
|
170
|
+
view = memoryview(bytes(data))
|
|
171
|
+
|
|
172
|
+
# Current protocol version
|
|
173
|
+
if packet := STREAM_CODEC.decode_safe(view):
|
|
174
|
+
return packet
|
|
175
|
+
logger.debug("Attempting to decode a v0 formatted stream message")
|
|
176
|
+
# Legacy codecs. Yuck. Won't be hit unless you have stale pods running (or for a brief period during upgrade).
|
|
177
|
+
# Schedule for removal in next major release.
|
|
178
|
+
if packet := _decode_v0_resumable_format(view, channel):
|
|
179
|
+
return packet
|
|
180
|
+
|
|
181
|
+
# Non-resumable format.
|
|
182
|
+
if packet := _decode_v0_live_format(view, channel):
|
|
183
|
+
return packet
|
|
184
|
+
raise StreamFormatError("failed to decode stream message")
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
_STREAMING_DELIMITER = b"$:"
|
|
188
|
+
_STREAMING_DELIMITER_LEN = len(_STREAMING_DELIMITER)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _decode_v0_resumable_format(
|
|
192
|
+
view: memoryview,
|
|
193
|
+
channel: bytes | str | None = None,
|
|
194
|
+
) -> StreamPacket | None:
|
|
195
|
+
"""
|
|
196
|
+
Legacy v0 resumable format:
|
|
197
|
+
1) b"$:" + <stream_id> + b"$:" + <event> + b"$:" + <raw_json>
|
|
198
|
+
2) b"$:" + <stream_id> + b"$:" + <raw_json>
|
|
199
|
+
"""
|
|
200
|
+
|
|
201
|
+
# must start with "$:"
|
|
202
|
+
if (
|
|
203
|
+
len(view) < _STREAMING_DELIMITER_LEN
|
|
204
|
+
or view[:_STREAMING_DELIMITER_LEN] != _STREAMING_DELIMITER
|
|
205
|
+
):
|
|
206
|
+
return None
|
|
207
|
+
|
|
208
|
+
# "$:<stream_id>$:"
|
|
209
|
+
first = _find_delim(view, _STREAMING_DELIMITER_LEN, _STREAMING_DELIMITER)
|
|
210
|
+
if first == -1:
|
|
211
|
+
return None
|
|
212
|
+
stream_view = view[_STREAMING_DELIMITER_LEN:first]
|
|
213
|
+
|
|
214
|
+
# try "$:<event>$:"
|
|
215
|
+
second = _find_delim(view, first + _STREAMING_DELIMITER_LEN, _STREAMING_DELIMITER)
|
|
216
|
+
if second != -1:
|
|
217
|
+
event_view = view[first + _STREAMING_DELIMITER_LEN : second]
|
|
218
|
+
msg_view = view[second + _STREAMING_DELIMITER_LEN :]
|
|
219
|
+
return StreamPacket(
|
|
220
|
+
version=0,
|
|
221
|
+
event=event_view,
|
|
222
|
+
message=msg_view,
|
|
223
|
+
stream_id=stream_view,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
chan_bytes = channel.encode("utf-8") if isinstance(channel, str) else channel
|
|
227
|
+
|
|
228
|
+
if chan_bytes:
|
|
229
|
+
marker = b":stream:"
|
|
230
|
+
idx = chan_bytes.rfind(marker)
|
|
231
|
+
event_bytes = chan_bytes[idx + len(marker) :] if idx != -1 else chan_bytes
|
|
232
|
+
else:
|
|
233
|
+
event_bytes = b""
|
|
234
|
+
|
|
235
|
+
msg_view = view[first + _STREAMING_DELIMITER_LEN :]
|
|
236
|
+
return StreamPacket(
|
|
237
|
+
version=0,
|
|
238
|
+
event=memoryview(event_bytes),
|
|
239
|
+
message=msg_view,
|
|
240
|
+
stream_id=stream_view,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def _decode_v0_live_format(
|
|
245
|
+
view: memoryview, channel: bytes | str | None = None
|
|
246
|
+
) -> StreamPacket | None:
|
|
247
|
+
try:
|
|
248
|
+
package = orjson.loads(view)
|
|
249
|
+
except orjson.JSONDecodeError:
|
|
250
|
+
return _decode_v0_flat_format(view, channel)
|
|
251
|
+
if (
|
|
252
|
+
not isinstance(package, dict)
|
|
253
|
+
or "event" not in package
|
|
254
|
+
or "message" not in package
|
|
255
|
+
):
|
|
256
|
+
return _decode_v0_flat_format(view, channel)
|
|
257
|
+
event_obj = package.get("event")
|
|
258
|
+
message_obj = package.get("message")
|
|
259
|
+
if event_obj is None:
|
|
260
|
+
event_bytes = b""
|
|
261
|
+
elif isinstance(event_obj, str):
|
|
262
|
+
event_bytes = event_obj.encode()
|
|
263
|
+
elif isinstance(event_obj, (bytes, bytearray, memoryview)):
|
|
264
|
+
event_bytes = bytes(event_obj)
|
|
265
|
+
else:
|
|
266
|
+
event_bytes = orjson.dumps(event_obj)
|
|
267
|
+
|
|
268
|
+
if isinstance(message_obj, (bytes, bytearray, memoryview)):
|
|
269
|
+
message_view = memoryview(bytes(message_obj))
|
|
270
|
+
elif isinstance(message_obj, str):
|
|
271
|
+
try:
|
|
272
|
+
message_view = memoryview(base64.b64decode(message_obj))
|
|
273
|
+
except Exception:
|
|
274
|
+
message_view = memoryview(message_obj.encode())
|
|
275
|
+
elif message_obj is None:
|
|
276
|
+
message_view = memoryview(b"")
|
|
277
|
+
else:
|
|
278
|
+
message_view = memoryview(orjson.dumps(message_obj))
|
|
279
|
+
|
|
280
|
+
return StreamPacket(
|
|
281
|
+
event=event_bytes,
|
|
282
|
+
message=message_view,
|
|
283
|
+
stream_id=None,
|
|
284
|
+
version=0,
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _decode_v0_flat_format(
|
|
289
|
+
view: memoryview, channel: bytes | str | None = None
|
|
290
|
+
) -> StreamPacket | None:
|
|
291
|
+
packet = bytes(view)
|
|
292
|
+
stream_id = None
|
|
293
|
+
if channel is None:
|
|
294
|
+
return
|
|
295
|
+
if packet.startswith(b"$:"):
|
|
296
|
+
_, stream_id, packet = packet.split(b":", 2)
|
|
297
|
+
channel = channel.encode("utf-8") if isinstance(channel, str) else channel
|
|
298
|
+
channel = channel.split(b":")[-1]
|
|
299
|
+
return StreamPacket(
|
|
300
|
+
version=0,
|
|
301
|
+
event=memoryview(channel),
|
|
302
|
+
message=memoryview(packet),
|
|
303
|
+
stream_id=stream_id,
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def _find_delim(view: memoryview, start: int, delimiter: bytes) -> int:
|
|
308
|
+
delim_len = len(delimiter)
|
|
309
|
+
end = len(view) - delim_len
|
|
310
|
+
i = start
|
|
311
|
+
while i <= end:
|
|
312
|
+
if view[i : i + delim_len] == delimiter:
|
|
313
|
+
return i
|
|
314
|
+
i += 1
|
|
315
|
+
return -1
|
langgraph_api/utils/uuids.py
CHANGED
|
@@ -1,32 +1,35 @@
|
|
|
1
|
-
|
|
2
|
-
import time
|
|
3
|
-
from uuid import UUID, SafeUUID
|
|
1
|
+
"""UUID utility functions.
|
|
4
2
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
3
|
+
This module exports a uuid7 function to generate monotonic, time-ordered UUIDs
|
|
4
|
+
for tracing and similar operations.
|
|
5
|
+
"""
|
|
8
6
|
|
|
7
|
+
from __future__ import annotations
|
|
9
8
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
# 42-bit counter with MSB set to 0
|
|
13
|
-
counter = (rand >> 32) & 0x1FF_FFFF_FFFF
|
|
14
|
-
# 32-bit random data
|
|
15
|
-
tail = rand & 0xFFFF_FFFF
|
|
16
|
-
return counter, tail
|
|
9
|
+
import typing
|
|
10
|
+
from uuid import UUID
|
|
17
11
|
|
|
12
|
+
from uuid_utils.compat import uuid7 as _uuid_utils_uuid7
|
|
18
13
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
object.__setattr__(uid, "int", value)
|
|
22
|
-
object.__setattr__(uid, "is_safe", SafeUUID.unknown)
|
|
23
|
-
return uid
|
|
14
|
+
if typing.TYPE_CHECKING:
|
|
15
|
+
from uuid import UUID
|
|
24
16
|
|
|
17
|
+
_NANOS_PER_SECOND: typing.Final = 1_000_000_000
|
|
25
18
|
|
|
26
|
-
|
|
27
|
-
|
|
19
|
+
|
|
20
|
+
def _to_timestamp_and_nanos(nanoseconds: int) -> tuple[int, int]:
|
|
21
|
+
"""Split a nanosecond timestamp into seconds and remaining nanoseconds."""
|
|
22
|
+
seconds, nanos = divmod(nanoseconds, _NANOS_PER_SECOND)
|
|
23
|
+
return seconds, nanos
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def uuid7(nanoseconds: int | None = None) -> UUID:
|
|
27
|
+
"""Generate a UUID from a Unix timestamp in nanoseconds and random bits.
|
|
28
28
|
|
|
29
29
|
UUIDv7 objects feature monotonicity within a millisecond.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
nanoseconds: Optional ns timestamp. If not provided, uses current time.
|
|
30
33
|
"""
|
|
31
34
|
# --- 48 --- -- 4 -- --- 12 --- -- 2 -- --- 30 --- - 32 -
|
|
32
35
|
# unix_ts_ms | version | counter_hi | variant | counter_lo | random
|
|
@@ -41,47 +44,11 @@ def uuid7():
|
|
|
41
44
|
# advanced and the counter is reset to a random 42-bit integer with MSB
|
|
42
45
|
# set to 0.
|
|
43
46
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
if _last_timestamp_v7 is None or timestamp_ms > _last_timestamp_v7:
|
|
51
|
-
counter, tail = _uuid7_get_counter_and_tail()
|
|
52
|
-
else:
|
|
53
|
-
if timestamp_ms < _last_timestamp_v7:
|
|
54
|
-
timestamp_ms = _last_timestamp_v7 + 1
|
|
55
|
-
# advance the 42-bit counter
|
|
56
|
-
counter = _last_counter_v7 + 1
|
|
57
|
-
if counter > 0x3FF_FFFF_FFFF:
|
|
58
|
-
# advance the 48-bit timestamp
|
|
59
|
-
timestamp_ms += 1
|
|
60
|
-
counter, tail = _uuid7_get_counter_and_tail()
|
|
61
|
-
else:
|
|
62
|
-
# 32-bit random data
|
|
63
|
-
tail = int.from_bytes(os.urandom(4))
|
|
64
|
-
|
|
65
|
-
unix_ts_ms = timestamp_ms & 0xFFFF_FFFF_FFFF
|
|
66
|
-
counter_msbs = counter >> 30
|
|
67
|
-
# keep 12 counter's MSBs and clear variant bits
|
|
68
|
-
counter_hi = counter_msbs & 0x0FFF
|
|
69
|
-
# keep 30 counter's LSBs and clear version bits
|
|
70
|
-
counter_lo = counter & 0x3FFF_FFFF
|
|
71
|
-
# ensure that the tail is always a 32-bit integer (by construction,
|
|
72
|
-
# it is already the case, but future interfaces may allow the user
|
|
73
|
-
# to specify the random tail)
|
|
74
|
-
tail &= 0xFFFF_FFFF
|
|
47
|
+
# For now, just delegate to the uuid_utils implementation
|
|
48
|
+
if nanoseconds is None:
|
|
49
|
+
return _uuid_utils_uuid7()
|
|
50
|
+
seconds, nanos = _to_timestamp_and_nanos(nanoseconds)
|
|
51
|
+
return _uuid_utils_uuid7(timestamp=seconds, nanos=nanos)
|
|
75
52
|
|
|
76
|
-
int_uuid_7 = unix_ts_ms << 80
|
|
77
|
-
int_uuid_7 |= counter_hi << 64
|
|
78
|
-
int_uuid_7 |= counter_lo << 32
|
|
79
|
-
int_uuid_7 |= tail
|
|
80
|
-
# by construction, the variant and version bits are already cleared
|
|
81
|
-
int_uuid_7 |= _RFC_4122_VERSION_7_FLAGS
|
|
82
|
-
res = _from_int(int_uuid_7)
|
|
83
53
|
|
|
84
|
-
|
|
85
|
-
_last_timestamp_v7 = timestamp_ms
|
|
86
|
-
_last_counter_v7 = counter
|
|
87
|
-
return res
|
|
54
|
+
__all__ = ["uuid7"]
|
langgraph_api/validation.py
CHANGED
|
@@ -8,6 +8,9 @@ with open(pathlib.Path(__file__).parent.parent / "openapi.json") as f:
|
|
|
8
8
|
|
|
9
9
|
openapi = orjson.loads(openapi_str)
|
|
10
10
|
|
|
11
|
+
ConfigValidator = jsonschema_rs.validator_for(
|
|
12
|
+
openapi["components"]["schemas"]["Config"]
|
|
13
|
+
)
|
|
11
14
|
AssistantVersionsSearchRequest = jsonschema_rs.validator_for(
|
|
12
15
|
openapi["components"]["schemas"]["AssistantVersionsSearchRequest"]
|
|
13
16
|
)
|
|
@@ -49,6 +52,9 @@ ThreadCreate = jsonschema_rs.validator_for(
|
|
|
49
52
|
ThreadPatch = jsonschema_rs.validator_for(
|
|
50
53
|
openapi["components"]["schemas"]["ThreadPatch"]
|
|
51
54
|
)
|
|
55
|
+
ThreadPruneRequest = jsonschema_rs.validator_for(
|
|
56
|
+
openapi["components"]["schemas"]["ThreadPruneRequest"]
|
|
57
|
+
)
|
|
52
58
|
ThreadStateUpdate = jsonschema_rs.validator_for(
|
|
53
59
|
{
|
|
54
60
|
**openapi["components"]["schemas"]["ThreadStateUpdate"],
|
|
@@ -121,6 +127,9 @@ RunCreateStateful = jsonschema_rs.validator_for(
|
|
|
121
127
|
)
|
|
122
128
|
RunsCancel = jsonschema_rs.validator_for(openapi["components"]["schemas"]["RunsCancel"])
|
|
123
129
|
CronCreate = jsonschema_rs.validator_for(openapi["components"]["schemas"]["CronCreate"])
|
|
130
|
+
ThreadCronCreate = jsonschema_rs.validator_for(
|
|
131
|
+
openapi["components"]["schemas"]["ThreadCronCreate"]
|
|
132
|
+
)
|
|
124
133
|
CronSearch = jsonschema_rs.validator_for(openapi["components"]["schemas"]["CronSearch"])
|
|
125
134
|
CronCountRequest = jsonschema_rs.validator_for(
|
|
126
135
|
openapi["components"]["schemas"]["CronCountRequest"]
|
langgraph_api/webhook.py
CHANGED
|
@@ -1,14 +1,117 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import ipaddress
|
|
3
|
+
import socket
|
|
1
4
|
from datetime import UTC, datetime
|
|
5
|
+
from typing import TYPE_CHECKING
|
|
6
|
+
from urllib.parse import urlparse
|
|
2
7
|
|
|
3
8
|
import structlog
|
|
9
|
+
from starlette.exceptions import HTTPException
|
|
4
10
|
|
|
5
|
-
from langgraph_api.config import HTTP_CONFIG
|
|
6
|
-
from langgraph_api.
|
|
7
|
-
from langgraph_api.
|
|
11
|
+
from langgraph_api.config import HTTP_CONFIG, WEBHOOKS_CONFIG
|
|
12
|
+
from langgraph_api.config.schemas import WebhookUrlPolicy
|
|
13
|
+
from langgraph_api.http import ensure_http_client, get_loopback_client, http_request
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from langgraph_api.worker import WorkerResult
|
|
8
17
|
|
|
9
18
|
logger = structlog.stdlib.get_logger(__name__)
|
|
10
19
|
|
|
11
20
|
|
|
21
|
+
async def validate_webhook_url_or_raise(url: str) -> None:
|
|
22
|
+
"""Validate a user-provided webhook URL against configured policy.
|
|
23
|
+
|
|
24
|
+
No-ops when WEBHOOKS_CONFIG is not set (preserves legacy behavior).
|
|
25
|
+
"""
|
|
26
|
+
cfg = WEBHOOKS_CONFIG
|
|
27
|
+
if not cfg:
|
|
28
|
+
return
|
|
29
|
+
|
|
30
|
+
policy = WebhookUrlPolicy(cfg.get("url") or {})
|
|
31
|
+
allowed_domains = policy.get("allowed_domains") or []
|
|
32
|
+
allowed_ports = policy.get("allowed_ports")
|
|
33
|
+
max_url_length = int(policy.get("max_url_length", 4096))
|
|
34
|
+
# TODO: We should flip this in the next minor release
|
|
35
|
+
require_https = bool(policy.get("require_https", False))
|
|
36
|
+
disable_loopback = bool(policy.get("disable_loopback", False))
|
|
37
|
+
|
|
38
|
+
if len(url) > max_url_length:
|
|
39
|
+
raise HTTPException(status_code=422, detail="Webhook URL too long")
|
|
40
|
+
|
|
41
|
+
# Relative loopback URL (internal route)
|
|
42
|
+
if url.startswith("/"):
|
|
43
|
+
if disable_loopback:
|
|
44
|
+
raise HTTPException(
|
|
45
|
+
status_code=422, detail="Loopback webhooks are disabled"
|
|
46
|
+
)
|
|
47
|
+
# The other checks would fail here, so we can just return
|
|
48
|
+
return
|
|
49
|
+
|
|
50
|
+
parsed = urlparse(url)
|
|
51
|
+
if require_https and parsed.scheme.lower() != "https":
|
|
52
|
+
raise HTTPException(status_code=422, detail="Webhook must use https")
|
|
53
|
+
|
|
54
|
+
# Port policy: only enforce if configured; omit default enforcement otherwise
|
|
55
|
+
if allowed_ports:
|
|
56
|
+
if parsed.port is not None:
|
|
57
|
+
port = parsed.port
|
|
58
|
+
else:
|
|
59
|
+
port = 443 if parsed.scheme == "https" else 80
|
|
60
|
+
if port not in allowed_ports:
|
|
61
|
+
raise HTTPException(
|
|
62
|
+
status_code=422, detail=f"Webhook port {port} not allowed"
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
host = parsed.hostname or ""
|
|
66
|
+
if not host:
|
|
67
|
+
raise HTTPException(
|
|
68
|
+
status_code=422, detail=f"Invalid webhook hostname '{host}'"
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# Domain allowlist
|
|
72
|
+
if allowed_domains:
|
|
73
|
+
host_allowed = False
|
|
74
|
+
for pattern in allowed_domains:
|
|
75
|
+
pattern = pattern.strip().lower()
|
|
76
|
+
if pattern.startswith("*."):
|
|
77
|
+
base = pattern[2:]
|
|
78
|
+
if host.lower().endswith("." + base):
|
|
79
|
+
host_allowed = True
|
|
80
|
+
break
|
|
81
|
+
else:
|
|
82
|
+
if host.lower() == pattern:
|
|
83
|
+
host_allowed = True
|
|
84
|
+
break
|
|
85
|
+
if not host_allowed:
|
|
86
|
+
raise HTTPException(status_code=422, detail="Webhook domain not allowed")
|
|
87
|
+
|
|
88
|
+
# Note we don't do default SSRF protections mainly because it would require a minor bump since it could break valid use cases.
|
|
89
|
+
try:
|
|
90
|
+
infos = await asyncio.to_thread(socket.getaddrinfo, host, None)
|
|
91
|
+
except Exception as e:
|
|
92
|
+
raise HTTPException(
|
|
93
|
+
status_code=422, detail="Failed to resolve webhook host"
|
|
94
|
+
) from e
|
|
95
|
+
|
|
96
|
+
for info in infos:
|
|
97
|
+
ip_str = info[4][0]
|
|
98
|
+
try:
|
|
99
|
+
ip = ipaddress.ip_address(ip_str)
|
|
100
|
+
except ValueError:
|
|
101
|
+
# Skip non-IP entries just in case
|
|
102
|
+
continue
|
|
103
|
+
if (
|
|
104
|
+
ip.is_private
|
|
105
|
+
or ip.is_loopback
|
|
106
|
+
or ip.is_link_local
|
|
107
|
+
or ip.is_multicast
|
|
108
|
+
or ip.is_reserved
|
|
109
|
+
):
|
|
110
|
+
raise HTTPException(
|
|
111
|
+
status_code=422, detail="Webhook host resolves to a disallowed IP"
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
|
|
12
115
|
async def call_webhook(result: "WorkerResult") -> None:
|
|
13
116
|
if HTTP_CONFIG and HTTP_CONFIG.get("disable_webhooks"):
|
|
14
117
|
logger.info(
|
|
@@ -30,16 +133,27 @@ async def call_webhook(result: "WorkerResult") -> None:
|
|
|
30
133
|
webhook = result.get("webhook")
|
|
31
134
|
if webhook:
|
|
32
135
|
try:
|
|
136
|
+
# We've already validated on ingestion, but you could technically have an issue if you re-deployed with a different environment
|
|
137
|
+
await validate_webhook_url_or_raise(webhook)
|
|
138
|
+
# Note: header templates should have already been evaluated against the env at load time.
|
|
139
|
+
headers = WEBHOOKS_CONFIG.get("headers") if WEBHOOKS_CONFIG else None
|
|
140
|
+
|
|
33
141
|
if webhook.startswith("/"):
|
|
34
142
|
# Call into this own app
|
|
35
143
|
webhook_client = get_loopback_client()
|
|
36
144
|
else:
|
|
37
|
-
webhook_client =
|
|
38
|
-
await http_request(
|
|
145
|
+
webhook_client = await ensure_http_client()
|
|
146
|
+
await http_request(
|
|
147
|
+
"POST",
|
|
148
|
+
webhook,
|
|
149
|
+
json=payload,
|
|
150
|
+
headers=headers,
|
|
151
|
+
client=webhook_client,
|
|
152
|
+
)
|
|
39
153
|
await logger.ainfo(
|
|
40
154
|
"Background worker called webhook",
|
|
41
155
|
webhook=result["webhook"],
|
|
42
|
-
run_id=result["run"]["run_id"],
|
|
156
|
+
run_id=str(result["run"]["run_id"]),
|
|
43
157
|
)
|
|
44
158
|
except Exception as exc:
|
|
45
159
|
logger.exception(
|