langgraph-api 0.2.99__py3-none-any.whl → 0.2.102__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.99"
1
+ __version__ = "0.2.102"
langgraph_api/api/runs.py CHANGED
@@ -172,6 +172,17 @@ async def wait_run(request: ApiRequest):
172
172
  """Create a run, wait for the output."""
173
173
  thread_id = request.path_params["thread_id"]
174
174
  payload = await request.json(RunCreateStateful)
175
+
176
+ # Ensure stream_mode always includes "values" and "updates" while respecting other modes
177
+ user_stream_mode = payload.get("stream_mode", ["values"])
178
+ if isinstance(user_stream_mode, str):
179
+ user_stream_mode = [user_stream_mode]
180
+
181
+ # Always include "values" and "updates" if not already present
182
+ required_modes = {"values", "updates"}
183
+ final_stream_mode = list(set(user_stream_mode) | required_modes)
184
+ payload["stream_mode"] = final_stream_mode
185
+
175
186
  on_disconnect = payload.get("on_disconnect", "continue")
176
187
  run_id = uuid6()
177
188
  sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
@@ -207,6 +218,9 @@ async def wait_run(request: ApiRequest):
207
218
  async for mode, chunk, _ in stream:
208
219
  if mode == b"values":
209
220
  vchunk = chunk
221
+ elif mode == b"updates" and b"__interrupt__" in chunk:
222
+ # Include the interrupt message in the values
223
+ vchunk = chunk
210
224
  elif mode == b"error":
211
225
  vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
212
226
  if vchunk is not None:
@@ -252,6 +266,17 @@ async def wait_run(request: ApiRequest):
252
266
  async def wait_run_stateless(request: ApiRequest):
253
267
  """Create a stateless run, wait for the output."""
254
268
  payload = await request.json(RunCreateStateless)
269
+
270
+ # Ensure stream_mode always includes "values" and "updates" while respecting other modes
271
+ user_stream_mode = payload.get("stream_mode", ["values"])
272
+ if isinstance(user_stream_mode, str):
273
+ user_stream_mode = [user_stream_mode]
274
+
275
+ # Always include "values" and "updates" if not already present
276
+ required_modes = {"values", "updates"}
277
+ final_stream_mode = list(set(user_stream_mode) | required_modes)
278
+ payload["stream_mode"] = final_stream_mode
279
+
255
280
  on_disconnect = payload.get("on_disconnect", "continue")
256
281
  run_id = uuid6()
257
282
  sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
@@ -288,6 +313,9 @@ async def wait_run_stateless(request: ApiRequest):
288
313
  async for mode, chunk, _ in stream:
289
314
  if mode == b"values":
290
315
  vchunk = chunk
316
+ elif mode == b"updates" and b"__interrupt__" in chunk:
317
+ # Include the interrupt message in the values
318
+ vchunk = chunk
291
319
  elif mode == b"error":
292
320
  vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
293
321
  if vchunk is not None:
@@ -9,6 +9,7 @@ import { HTTPException } from "hono/http-exception";
9
9
  import { fetch } from "undici";
10
10
  import pRetry from "p-retry";
11
11
  import {
12
+ getConfig,
12
13
  BaseStore,
13
14
  Item,
14
15
  Operation,
@@ -25,7 +26,6 @@ import {
25
26
  type ChannelVersions,
26
27
  type ChannelProtocol,
27
28
  } from "@langchain/langgraph-checkpoint";
28
- import { createHash } from "node:crypto";
29
29
  import * as fs from "node:fs/promises";
30
30
  import * as path from "node:path";
31
31
  import { serialiseAsDict, serializeError } from "./src/utils/serde.mjs";
@@ -58,9 +58,20 @@ import {
58
58
  import { filterValidExportPath } from "./src/utils/files.mts";
59
59
  import { patchFetch } from "./traceblock.mts";
60
60
 
61
+ const injectConfigFormatter = format((info) => {
62
+ const config = getConfig();
63
+ if (config == null) return info;
64
+
65
+ const node = config.metadata?.["langgraph_node"];
66
+ if (node != null) info.langgraph_node = node;
67
+
68
+ return info;
69
+ });
70
+
61
71
  const logger = createLogger({
62
72
  level: "debug",
63
73
  format: format.combine(
74
+ injectConfigFormatter(),
64
75
  format.errors({ stack: true }),
65
76
  format.timestamp(),
66
77
  format.json(),
@@ -90,6 +101,12 @@ const logger = createLogger({
90
101
  ],
91
102
  });
92
103
 
104
+ const GLOBAL_LOGGER = Symbol.for("langgraph.api.sdk-logger");
105
+ type GLOBAL_LOGGER = typeof GLOBAL_LOGGER;
106
+
107
+ const maybeGlobal = globalThis as unknown as { [GLOBAL_LOGGER]: typeof logger };
108
+ maybeGlobal[GLOBAL_LOGGER] = logger;
109
+
93
110
  let GRAPH_SCHEMA: Record<string, Record<string, GraphSchema> | false> = {};
94
111
  let GRAPH_OPTIONS: {
95
112
  checkpointer?: BaseCheckpointSaver<string | number>;
langgraph_api/metadata.py CHANGED
@@ -61,6 +61,11 @@ BEACON_ENDPOINT = "https://api.smith.langchain.com/v1/metadata/submit"
61
61
  # LangChain auth endpoint for API key submissions
62
62
  LANGCHAIN_METADATA_ENDPOINT = None
63
63
  if LANGSMITH_AUTH_ENDPOINT:
64
+ if "/api/v1" in LANGSMITH_AUTH_ENDPOINT:
65
+ # If the endpoint already has /api/v1 (for self-hosted control plane deployments), we assume it's the correct format
66
+ LANGCHAIN_METADATA_ENDPOINT = (
67
+ LANGSMITH_AUTH_ENDPOINT.rstrip("/") + "/metadata/submit"
68
+ )
64
69
  LANGCHAIN_METADATA_ENDPOINT = (
65
70
  LANGSMITH_AUTH_ENDPOINT.rstrip("/") + "/v1/metadata/submit"
66
71
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-api
3
- Version: 0.2.99
3
+ Version: 0.2.102
4
4
  Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
5
5
  License: Elastic-2.0
6
6
  License-File: LICENSE
@@ -1,4 +1,4 @@
1
- langgraph_api/__init__.py,sha256=unbm6aMJnlE7ig-L8G69Amu7KIiYpnO-YG4-H6hA--c,23
1
+ langgraph_api/__init__.py,sha256=UgW99xF_ewL10OON3ND4NU5ExK_oQzyPWtjLPFIkYF4,24
2
2
  langgraph_api/asgi_transport.py,sha256=eqifhHxNnxvI7jJqrY1_8RjL4Fp9NdN4prEub2FWBt8,5091
3
3
  langgraph_api/asyncio.py,sha256=Wv4Rwm-a-Cf6JpfgJmVuVlXQ7SlwrjbTn0eq1ux8I2Q,9652
4
4
  langgraph_api/cli.py,sha256=-R0fvxg4KNxTkSe7xvDZruF24UMhStJYjpAYlUx3PBk,16018
@@ -11,7 +11,7 @@ langgraph_api/graph.py,sha256=Q9tRf1WBaxFBOgs_orlMAlBVJM0-cpZVduknU_fbzRM,24235
11
11
  langgraph_api/http.py,sha256=L0leP5fH4NIiFgJd1YPMnTRWqrUUYq_4m5j558UwM5E,5612
12
12
  langgraph_api/http_metrics.py,sha256=VgM45yU1FkXuI9CIOE_astxAAu2G-OJ42BRbkcos_CQ,5555
13
13
  langgraph_api/logging.py,sha256=4K1Fnq8rrGC9CqJubZtP34Y9P2zh7VXf_41q7bH3OXU,4849
14
- langgraph_api/metadata.py,sha256=lfovneEMLA5vTNa61weMkQkiZCtwo-qdwFwqNSj5qVs,6638
14
+ langgraph_api/metadata.py,sha256=rAfbsvbzHfCi4ErmWDAIsxhJi46xNbaJopFR6YygxMI,6927
15
15
  langgraph_api/patch.py,sha256=Dgs0PXHytekX4SUL6KsjjN0hHcOtGLvv1GRGbh6PswU,1408
16
16
  langgraph_api/queue_entrypoint.py,sha256=hC8j-A4cUxibusiiPJBlK0mkmChNZxNcXn5GVwL0yic,4889
17
17
  langgraph_api/route.py,sha256=4VBkJMeusfiZtLzyUaKm1HwLHTq0g15y2CRiRhM6xyA,4773
@@ -33,7 +33,7 @@ langgraph_api/api/assistants.py,sha256=w7nXjEknDVHSuP228S8ZLh4bG0nRGnSwVP9pECQOK
33
33
  langgraph_api/api/mcp.py,sha256=qe10ZRMN3f-Hli-9TI8nbQyWvMeBb72YB1PZVbyqBQw,14418
34
34
  langgraph_api/api/meta.py,sha256=fmc7btbtl5KVlU_vQ3Bj4J861IjlqmjBKNtnxSV-S-Q,4198
35
35
  langgraph_api/api/openapi.py,sha256=KToI2glOEsvrhDpwdScdBnL9xoLOqkTxx5zKq2pMuKQ,11957
36
- langgraph_api/api/runs.py,sha256=WlZ_gzS7S4YOBMzKOZSlJm-2Qullj61u47-3lFDK8U0,20291
36
+ langgraph_api/api/runs.py,sha256=DxmGkRnZsQgr5bmflguDKXEvY3J9Q-bt7YwbuSFAMxU,21579
37
37
  langgraph_api/api/store.py,sha256=TSeMiuMfrifmEnEbL0aObC2DPeseLlmZvAMaMzPgG3Y,5535
38
38
  langgraph_api/api/threads.py,sha256=nQMlGnsrFD1F4S-ID_q0HZrF2GZ0Pm7aV04Sh1eYgds,9588
39
39
  langgraph_api/api/ui.py,sha256=17QrRy2XVzP7x_0RdRw7pmSv-n1lmnb54byHCGGeNhM,2490
@@ -51,7 +51,7 @@ langgraph_api/js/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
51
51
  langgraph_api/js/base.py,sha256=gjY6K8avI03OrI-Hy6a311fQ_EG5r_x8hUYlc7uqxdE,534
52
52
  langgraph_api/js/build.mts,sha256=bRQo11cglDFXlLN7Y48CQPTSMLenp7MqIWuP1DkSIo0,3139
53
53
  langgraph_api/js/client.http.mts,sha256=AGA-p8J85IcNh2oXZjDxHQ4PnQdJmt-LPcpZp6j0Cws,4687
54
- langgraph_api/js/client.mts,sha256=5PjWSfNHGRNbrlFH4zlFM0lfGlvK0hBARa6qcl3gC5o,30958
54
+ langgraph_api/js/client.mts,sha256=vZpJEuF3FkkJMZGs57w00er2Hen42B7lq7hyUn3e77s,31419
55
55
  langgraph_api/js/errors.py,sha256=Cm1TKWlUCwZReDC5AQ6SgNIVGD27Qov2xcgHyf8-GXo,361
56
56
  langgraph_api/js/global.d.ts,sha256=j4GhgtQSZ5_cHzjSPcHgMJ8tfBThxrH-pUOrrJGteOU,196
57
57
  langgraph_api/js/package.json,sha256=BpNAO88mbE-Gv4WzQfj1TLktCWGqm6XBqI892ObuOUw,1333
@@ -94,8 +94,8 @@ langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,11
94
94
  LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
95
95
  logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
96
96
  openapi.json,sha256=p5tn_cNRiFA0HN3L6JfC9Nm16Hgv-BxvAQcJymKhVWI,143296
97
- langgraph_api-0.2.99.dist-info/METADATA,sha256=lHUXblBD756fXt6yZ7OibMl7rOBGRIwYl4i7nxxLBeQ,3891
98
- langgraph_api-0.2.99.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
99
- langgraph_api-0.2.99.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
100
- langgraph_api-0.2.99.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
101
- langgraph_api-0.2.99.dist-info/RECORD,,
97
+ langgraph_api-0.2.102.dist-info/METADATA,sha256=ywpk3aKcnY9zMTOKZVywEJlmqtOdnKO4YVCoV7s9034,3892
98
+ langgraph_api-0.2.102.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
99
+ langgraph_api-0.2.102.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
100
+ langgraph_api-0.2.102.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
101
+ langgraph_api-0.2.102.dist-info/RECORD,,