langgraph-api 0.2.109__py3-none-any.whl → 0.2.111__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/runs.py +9 -27
- langgraph_api/js/client.mts +4 -4
- langgraph_api/models/run.py +3 -0
- langgraph_api/stream.py +15 -2
- {langgraph_api-0.2.109.dist-info → langgraph_api-0.2.111.dist-info}/METADATA +2 -2
- {langgraph_api-0.2.109.dist-info → langgraph_api-0.2.111.dist-info}/RECORD +10 -10
- {langgraph_api-0.2.109.dist-info → langgraph_api-0.2.111.dist-info}/WHEEL +0 -0
- {langgraph_api-0.2.109.dist-info → langgraph_api-0.2.111.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.2.109.dist-info → langgraph_api-0.2.111.dist-info}/licenses/LICENSE +0 -0
langgraph_api/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.2.
|
|
1
|
+
__version__ = "0.2.111"
|
langgraph_api/api/runs.py
CHANGED
|
@@ -116,7 +116,8 @@ async def stream_run(
|
|
|
116
116
|
run["run_id"],
|
|
117
117
|
thread_id=thread_id,
|
|
118
118
|
cancel_on_disconnect=on_disconnect == "cancel",
|
|
119
|
-
|
|
119
|
+
stream_channel=await sub,
|
|
120
|
+
stream_mode=payload.get("stream_mode", []),
|
|
120
121
|
last_event_id=None,
|
|
121
122
|
),
|
|
122
123
|
headers={
|
|
@@ -157,7 +158,8 @@ async def stream_run_stateless(
|
|
|
157
158
|
thread_id=run["thread_id"],
|
|
158
159
|
ignore_404=True,
|
|
159
160
|
cancel_on_disconnect=on_disconnect == "cancel",
|
|
160
|
-
|
|
161
|
+
stream_channel=await sub,
|
|
162
|
+
stream_mode=payload.get("stream_mode", []),
|
|
161
163
|
last_event_id=None,
|
|
162
164
|
),
|
|
163
165
|
headers={
|
|
@@ -172,17 +174,6 @@ async def wait_run(request: ApiRequest):
|
|
|
172
174
|
"""Create a run, wait for the output."""
|
|
173
175
|
thread_id = request.path_params["thread_id"]
|
|
174
176
|
payload = await request.json(RunCreateStateful)
|
|
175
|
-
|
|
176
|
-
# Ensure stream_mode always includes "values" and "updates" while respecting other modes
|
|
177
|
-
user_stream_mode = payload.get("stream_mode", ["values"])
|
|
178
|
-
if isinstance(user_stream_mode, str):
|
|
179
|
-
user_stream_mode = [user_stream_mode]
|
|
180
|
-
|
|
181
|
-
# Always include "values" and "updates" if not already present
|
|
182
|
-
required_modes = {"values", "updates"}
|
|
183
|
-
final_stream_mode = list(set(user_stream_mode) | required_modes)
|
|
184
|
-
payload["stream_mode"] = final_stream_mode
|
|
185
|
-
|
|
186
177
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
187
178
|
run_id = uuid6()
|
|
188
179
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
@@ -211,7 +202,8 @@ async def wait_run(request: ApiRequest):
|
|
|
211
202
|
Runs.Stream.join(
|
|
212
203
|
run["run_id"],
|
|
213
204
|
thread_id=run["thread_id"],
|
|
214
|
-
|
|
205
|
+
stream_channel=await sub,
|
|
206
|
+
stream_mode=["updates", "values", "error"],
|
|
215
207
|
cancel_on_disconnect=on_disconnect == "cancel",
|
|
216
208
|
)
|
|
217
209
|
) as stream:
|
|
@@ -266,17 +258,6 @@ async def wait_run(request: ApiRequest):
|
|
|
266
258
|
async def wait_run_stateless(request: ApiRequest):
|
|
267
259
|
"""Create a stateless run, wait for the output."""
|
|
268
260
|
payload = await request.json(RunCreateStateless)
|
|
269
|
-
|
|
270
|
-
# Ensure stream_mode always includes "values" and "updates" while respecting other modes
|
|
271
|
-
user_stream_mode = payload.get("stream_mode", ["values"])
|
|
272
|
-
if isinstance(user_stream_mode, str):
|
|
273
|
-
user_stream_mode = [user_stream_mode]
|
|
274
|
-
|
|
275
|
-
# Always include "values" and "updates" if not already present
|
|
276
|
-
required_modes = {"values", "updates"}
|
|
277
|
-
final_stream_mode = list(set(user_stream_mode) | required_modes)
|
|
278
|
-
payload["stream_mode"] = final_stream_mode
|
|
279
|
-
|
|
280
261
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
281
262
|
run_id = uuid6()
|
|
282
263
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
@@ -305,7 +286,8 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
305
286
|
Runs.Stream.join(
|
|
306
287
|
run["run_id"],
|
|
307
288
|
thread_id=run["thread_id"],
|
|
308
|
-
|
|
289
|
+
stream_channel=await sub,
|
|
290
|
+
stream_mode=["updates", "values", "error"],
|
|
309
291
|
ignore_404=True,
|
|
310
292
|
cancel_on_disconnect=on_disconnect == "cancel",
|
|
311
293
|
)
|
|
@@ -424,7 +406,7 @@ async def join_run_stream(request: ApiRequest):
|
|
|
424
406
|
cancel_on_disconnect = cancel_on_disconnect_str.lower() in {"true", "yes", "1"}
|
|
425
407
|
validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
|
|
426
408
|
validate_uuid(run_id, "Invalid run ID: must be a UUID")
|
|
427
|
-
stream_mode = request.query_params.get("stream_mode") or
|
|
409
|
+
stream_mode = request.query_params.get("stream_mode") or []
|
|
428
410
|
last_event_id = request.headers.get("last-event-id") or None
|
|
429
411
|
return EventSourceResponse(
|
|
430
412
|
Runs.Stream.join(
|
langgraph_api/js/client.mts
CHANGED
|
@@ -344,6 +344,7 @@ const handleStream = <T extends z.ZodType<any>>(
|
|
|
344
344
|
};
|
|
345
345
|
|
|
346
346
|
try {
|
|
347
|
+
timer = setTimeout(sendHeartbeat, HEARTBEAT_MS);
|
|
347
348
|
for await (const data of handler({ graph_id: graphId, ...body })) {
|
|
348
349
|
await sendSSE(name, data);
|
|
349
350
|
}
|
|
@@ -1094,14 +1095,13 @@ async function main() {
|
|
|
1094
1095
|
}
|
|
1095
1096
|
|
|
1096
1097
|
app.get("/ok", (c) => c.json({ ok: true }));
|
|
1097
|
-
|
|
1098
|
+
|
|
1098
1099
|
app.get("/debug/heapdump", async (c) => {
|
|
1099
1100
|
try {
|
|
1100
|
-
const target =
|
|
1101
|
-
`/tmp/heapdump-${Date.now()}.heapsnapshot`;
|
|
1101
|
+
const target = `/tmp/heapdump-${Date.now()}.heapsnapshot`;
|
|
1102
1102
|
await fs.mkdir(path.dirname(target), { recursive: true });
|
|
1103
1103
|
const written = writeHeapSnapshot(target);
|
|
1104
|
-
return c.json({ ok: true, written });
|
|
1104
|
+
return c.json({ ok: true, written }); // 200
|
|
1105
1105
|
} catch (error) {
|
|
1106
1106
|
if (error instanceof HTTPException) {
|
|
1107
1107
|
return c.json(serializeError(error), error.status);
|
langgraph_api/models/run.py
CHANGED
|
@@ -86,6 +86,9 @@ class RunCreateDict(TypedDict):
|
|
|
86
86
|
- "updates": Stream the state updates returned by each node.
|
|
87
87
|
- "events": Stream all events produced by sub-runs (eg. nodes, LLMs, etc.).
|
|
88
88
|
- "custom": Stream custom events produced by your nodes.
|
|
89
|
+
|
|
90
|
+
Note: __interrupt__ events are always included in the updates stream, even when "updates"
|
|
91
|
+
is not explicitly requested, to ensure interrupt events are always visible.
|
|
89
92
|
"""
|
|
90
93
|
stream_subgraphs: bool | None
|
|
91
94
|
"""Stream output from subgraphs. By default, streams only the top graph."""
|
langgraph_api/stream.py
CHANGED
|
@@ -118,6 +118,9 @@ async def astream_state(
|
|
|
118
118
|
if "messages-tuple" in stream_modes_set and not isinstance(graph, BaseRemotePregel):
|
|
119
119
|
stream_modes_set.remove("messages-tuple")
|
|
120
120
|
stream_modes_set.add("messages")
|
|
121
|
+
if "updates" not in stream_modes_set:
|
|
122
|
+
stream_modes_set.add("updates")
|
|
123
|
+
only_interrupt_updates = True
|
|
121
124
|
# attach attempt metadata
|
|
122
125
|
config["metadata"]["run_attempt"] = attempt
|
|
123
126
|
# attach langgraph metadata
|
|
@@ -226,7 +229,12 @@ async def astream_state(
|
|
|
226
229
|
),
|
|
227
230
|
[message_chunk_to_message(messages[msg.id])],
|
|
228
231
|
)
|
|
229
|
-
elif mode in stream_mode
|
|
232
|
+
elif mode in stream_mode or (
|
|
233
|
+
mode == "updates"
|
|
234
|
+
and isinstance(chunk, dict)
|
|
235
|
+
and "__interrupt__" in chunk
|
|
236
|
+
and only_interrupt_updates
|
|
237
|
+
):
|
|
230
238
|
if subgraphs and ns:
|
|
231
239
|
yield f"{mode}|{'|'.join(ns)}", chunk
|
|
232
240
|
else:
|
|
@@ -292,7 +300,12 @@ async def astream_state(
|
|
|
292
300
|
),
|
|
293
301
|
[message_chunk_to_message(messages[msg.id])],
|
|
294
302
|
)
|
|
295
|
-
elif mode in stream_mode
|
|
303
|
+
elif mode in stream_mode or (
|
|
304
|
+
mode == "updates"
|
|
305
|
+
and isinstance(chunk, dict)
|
|
306
|
+
and "__interrupt__" in chunk
|
|
307
|
+
and only_interrupt_updates
|
|
308
|
+
):
|
|
296
309
|
if subgraphs and ns:
|
|
297
310
|
yield f"{mode}|{'|'.join(ns)}", chunk
|
|
298
311
|
else:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langgraph-api
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.111
|
|
4
4
|
Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
|
|
5
5
|
License: Elastic-2.0
|
|
6
6
|
License-File: LICENSE
|
|
@@ -11,7 +11,7 @@ Requires-Dist: httpx>=0.25.0
|
|
|
11
11
|
Requires-Dist: jsonschema-rs<0.30,>=0.20.0
|
|
12
12
|
Requires-Dist: langchain-core>=0.3.64
|
|
13
13
|
Requires-Dist: langgraph-checkpoint>=2.0.23
|
|
14
|
-
Requires-Dist: langgraph-runtime-inmem<0.7,>=0.6.
|
|
14
|
+
Requires-Dist: langgraph-runtime-inmem<0.7,>=0.6.5
|
|
15
15
|
Requires-Dist: langgraph-sdk>=0.2.0
|
|
16
16
|
Requires-Dist: langgraph>=0.4.0
|
|
17
17
|
Requires-Dist: langsmith>=0.3.45
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
langgraph_api/__init__.py,sha256=
|
|
1
|
+
langgraph_api/__init__.py,sha256=1vHLiVVqXa26VwB4MICUFKzbta25Fuuf7oCnCAdaCCk,24
|
|
2
2
|
langgraph_api/asgi_transport.py,sha256=eqifhHxNnxvI7jJqrY1_8RjL4Fp9NdN4prEub2FWBt8,5091
|
|
3
3
|
langgraph_api/asyncio.py,sha256=Wv4Rwm-a-Cf6JpfgJmVuVlXQ7SlwrjbTn0eq1ux8I2Q,9652
|
|
4
4
|
langgraph_api/cli.py,sha256=xQojITwmmKSJw48Lr2regcnRPRq2FJqWlPpeyr5TgbU,16158
|
|
@@ -21,7 +21,7 @@ langgraph_api/server.py,sha256=Z_VL-kIphybTRDWBIqHMfRhgCmAFyTRqAGlgnHQF0Zg,6973
|
|
|
21
21
|
langgraph_api/sse.py,sha256=SLdtZmTdh5D8fbWrQjuY9HYLd2dg8Rmi6ZMmFMVc2iE,4204
|
|
22
22
|
langgraph_api/state.py,sha256=P2mCo-0bqPu2v9FSFGJtUCjPPNvv6wLUKQh8SdxAtc8,4387
|
|
23
23
|
langgraph_api/store.py,sha256=srRI0fQXNFo_RSUs4apucr4BEp_KrIseJksZXs32MlQ,4635
|
|
24
|
-
langgraph_api/stream.py,sha256=
|
|
24
|
+
langgraph_api/stream.py,sha256=rV7VpTBjqMZcmYm-EJ7DtdSQwIylmgF4toe8IPIYl9Y,14401
|
|
25
25
|
langgraph_api/thread_ttl.py,sha256=-Ox8NFHqUH3wGNdEKMIfAXUubY5WGifIgCaJ7npqLgw,1762
|
|
26
26
|
langgraph_api/traceblock.py,sha256=2aWS6TKGTcQ0G1fOtnjVrzkpeGvDsR0spDbfddEqgRU,594
|
|
27
27
|
langgraph_api/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -33,7 +33,7 @@ langgraph_api/api/assistants.py,sha256=e6da5aYMVAxj2rZG9RxZbBKFM0bP_f8Xbx0P5UhK8
|
|
|
33
33
|
langgraph_api/api/mcp.py,sha256=qe10ZRMN3f-Hli-9TI8nbQyWvMeBb72YB1PZVbyqBQw,14418
|
|
34
34
|
langgraph_api/api/meta.py,sha256=fmc7btbtl5KVlU_vQ3Bj4J861IjlqmjBKNtnxSV-S-Q,4198
|
|
35
35
|
langgraph_api/api/openapi.py,sha256=KToI2glOEsvrhDpwdScdBnL9xoLOqkTxx5zKq2pMuKQ,11957
|
|
36
|
-
langgraph_api/api/runs.py,sha256=
|
|
36
|
+
langgraph_api/api/runs.py,sha256=whjpK5Kn3nQy9g9qQK94F_rTlX4oG65WODUyj5TSOwM,20877
|
|
37
37
|
langgraph_api/api/store.py,sha256=TSeMiuMfrifmEnEbL0aObC2DPeseLlmZvAMaMzPgG3Y,5535
|
|
38
38
|
langgraph_api/api/threads.py,sha256=nQMlGnsrFD1F4S-ID_q0HZrF2GZ0Pm7aV04Sh1eYgds,9588
|
|
39
39
|
langgraph_api/api/ui.py,sha256=17QrRy2XVzP7x_0RdRw7pmSv-n1lmnb54byHCGGeNhM,2490
|
|
@@ -51,7 +51,7 @@ langgraph_api/js/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
|
51
51
|
langgraph_api/js/base.py,sha256=GORqRDbGAOQX2ygT6dMcqBDCA9tdAp8EpG4bfqUPMg4,1198
|
|
52
52
|
langgraph_api/js/build.mts,sha256=bRQo11cglDFXlLN7Y48CQPTSMLenp7MqIWuP1DkSIo0,3139
|
|
53
53
|
langgraph_api/js/client.http.mts,sha256=AGA-p8J85IcNh2oXZjDxHQ4PnQdJmt-LPcpZp6j0Cws,4687
|
|
54
|
-
langgraph_api/js/client.mts,sha256=
|
|
54
|
+
langgraph_api/js/client.mts,sha256=CEz5oOtI_mwqsvsC33S_2TQbxD6mW-AKl6shsRI5G18,32000
|
|
55
55
|
langgraph_api/js/errors.py,sha256=Cm1TKWlUCwZReDC5AQ6SgNIVGD27Qov2xcgHyf8-GXo,361
|
|
56
56
|
langgraph_api/js/global.d.ts,sha256=j4GhgtQSZ5_cHzjSPcHgMJ8tfBThxrH-pUOrrJGteOU,196
|
|
57
57
|
langgraph_api/js/package.json,sha256=BpNAO88mbE-Gv4WzQfj1TLktCWGqm6XBqI892ObuOUw,1333
|
|
@@ -74,7 +74,7 @@ langgraph_api/middleware/http_logger.py,sha256=L7ZhypmQjlHBfm93GqZaqUXzu0r-ieaoO
|
|
|
74
74
|
langgraph_api/middleware/private_network.py,sha256=eYgdyU8AzU2XJu362i1L8aSFoQRiV7_aLBPw7_EgeqI,2111
|
|
75
75
|
langgraph_api/middleware/request_id.py,sha256=SDj3Yi3WvTbFQ2ewrPQBjAV8sYReOJGeIiuoHeZpR9g,1242
|
|
76
76
|
langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
77
|
-
langgraph_api/models/run.py,sha256=
|
|
77
|
+
langgraph_api/models/run.py,sha256=RX2LaE1kmTruX8o8HgvqeEt5YpPGHILWByChjMZVJ58,15035
|
|
78
78
|
langgraph_api/tunneling/cloudflare.py,sha256=iKb6tj-VWPlDchHFjuQyep2Dpb-w2NGfJKt-WJG9LH0,3650
|
|
79
79
|
langgraph_api/utils/__init__.py,sha256=92mSti9GfGdMRRWyESKQW5yV-75Z9icGHnIrBYvdypU,3619
|
|
80
80
|
langgraph_api/utils/cache.py,sha256=SrtIWYibbrNeZzLXLUGBFhJPkMVNQnVxR5giiYGHEfI,1810
|
|
@@ -94,8 +94,8 @@ langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,11
|
|
|
94
94
|
LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
95
95
|
logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
|
|
96
96
|
openapi.json,sha256=jyQZW5U4V15zWciiIvaDPasYZd3k1iMiQ2vkPxf3zb4,145614
|
|
97
|
-
langgraph_api-0.2.
|
|
98
|
-
langgraph_api-0.2.
|
|
99
|
-
langgraph_api-0.2.
|
|
100
|
-
langgraph_api-0.2.
|
|
101
|
-
langgraph_api-0.2.
|
|
97
|
+
langgraph_api-0.2.111.dist-info/METADATA,sha256=udH8QNksRyKI6_9ZWKNEMdzr5WM8ftswNyowGia9DrM,3890
|
|
98
|
+
langgraph_api-0.2.111.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
99
|
+
langgraph_api-0.2.111.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
|
|
100
|
+
langgraph_api-0.2.111.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
101
|
+
langgraph_api-0.2.111.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|