langgraph-api 0.2.126__py3-none-any.whl → 0.2.129__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.126"
1
+ __version__ = "0.2.129"
@@ -275,6 +275,20 @@ async def create_valid_run(
275
275
  config = payload.get("config") or {}
276
276
  context = payload.get("context") or {}
277
277
  configurable = config.setdefault("configurable", {})
278
+
279
+ if configurable and context:
280
+ raise HTTPException(
281
+ status_code=400,
282
+ detail="Cannot specify both configurable and context. Prefer setting context alone. Context was introduced in LangGraph 0.6.0 and is the long term planned replacement for configurable.",
283
+ )
284
+
285
+ # Keep config and context in sync for user provided params
286
+ if context:
287
+ configurable = context.copy()
288
+ config["configurable"] = configurable
289
+ else:
290
+ context = configurable.copy()
291
+
278
292
  if checkpoint_id:
279
293
  configurable["checkpoint_id"] = str(checkpoint_id)
280
294
  if checkpoint := payload.get("checkpoint"):
@@ -300,6 +314,7 @@ async def create_valid_run(
300
314
  configurable["__after_seconds__"] = after_seconds
301
315
  put_time_start = time.time()
302
316
  if_not_exists = payload.get("if_not_exists", "reject")
317
+
303
318
  run_coro = Runs.put(
304
319
  conn,
305
320
  assistant_id,
langgraph_api/server.py CHANGED
@@ -71,7 +71,11 @@ middleware.extend(
71
71
  allow_credentials=True,
72
72
  allow_methods=["*"],
73
73
  allow_headers=["*"],
74
- expose_headers=["x-pagination-total", "x-pagination-next"],
74
+ expose_headers=[
75
+ "x-pagination-total",
76
+ "x-pagination-next",
77
+ "content-location",
78
+ ],
75
79
  )
76
80
  if config.CORS_CONFIG is None
77
81
  else Middleware(
langgraph_api/stream.py CHANGED
@@ -40,6 +40,42 @@ from langgraph_runtime.ops import Runs
40
40
 
41
41
  logger = structlog.stdlib.get_logger(__name__)
42
42
 
43
+
44
+ async def _filter_context_by_schema(
45
+ context: dict[str, Any], context_schema: dict | None
46
+ ) -> dict[str, Any]:
47
+ """Filter context parameters based on the context schema.
48
+
49
+ Args:
50
+ context: The context dictionary to filter
51
+ context_schema: The JSON schema for valid context parameters
52
+
53
+ Returns:
54
+ Filtered context dictionary containing only valid parameters
55
+ """
56
+ if not context_schema or not context:
57
+ return context
58
+
59
+ # Extract valid properties from the schema
60
+ properties = context_schema.get("properties", {})
61
+ if not properties:
62
+ return context
63
+
64
+ # Filter context to only include parameters defined in the schema
65
+ filtered_context = {}
66
+ for key, value in context.items():
67
+ if key in properties:
68
+ filtered_context[key] = value
69
+ else:
70
+ await logger.adebug(
71
+ f"Filtering out context parameter '{key}' not found in context schema",
72
+ context_key=key,
73
+ available_keys=list(properties.keys()),
74
+ )
75
+
76
+ return filtered_context
77
+
78
+
43
79
  AnyStream = AsyncIterator[tuple[str, Any]]
44
80
 
45
81
 
@@ -107,6 +143,17 @@ async def astream_state(
107
143
  checkpointer=None if temporary else Checkpointer(),
108
144
  )
109
145
  )
146
+
147
+ # Filter context parameters based on context schema if available
148
+ if context and USE_RUNTIME_CONTEXT_API and not isinstance(graph, BaseRemotePregel):
149
+ try:
150
+ context_schema = graph.get_context_jsonschema()
151
+ context = await _filter_context_by_schema(context, context_schema)
152
+ except Exception as e:
153
+ await logger.adebug(
154
+ f"Failed to get context schema for filtering: {e}", exc_info=e
155
+ )
156
+
110
157
  input = kwargs.pop("input")
111
158
  if cmd := kwargs.pop("command"):
112
159
  input = map_cmd(cmd)
@@ -348,7 +395,17 @@ async def astream_state(
348
395
  yield "feedback", feedback_urls
349
396
 
350
397
 
351
- async def consume(stream: AnyStream, run_id: str, resumable: bool = False) -> None:
398
+ async def consume(
399
+ stream: AnyStream,
400
+ run_id: str,
401
+ resumable: bool = False,
402
+ stream_modes: set[StreamMode] | None = None,
403
+ ) -> None:
404
+ stream_modes = stream_modes or set()
405
+ if "messages-tuple" in stream_modes:
406
+ stream_modes.add("messages")
407
+ stream_modes.add("metadata")
408
+
352
409
  async with aclosing(stream):
353
410
  try:
354
411
  async for mode, payload in stream:
@@ -356,7 +413,7 @@ async def consume(stream: AnyStream, run_id: str, resumable: bool = False) -> No
356
413
  run_id,
357
414
  mode,
358
415
  await run_in_executor(None, json_dumpb, payload),
359
- resumable=resumable,
416
+ resumable=resumable and mode.split("|")[0] in stream_modes,
360
417
  )
361
418
  except Exception as e:
362
419
  if isinstance(e, ExceptionGroup):
@@ -365,7 +422,6 @@ async def consume(stream: AnyStream, run_id: str, resumable: bool = False) -> No
365
422
  run_id,
366
423
  "error",
367
424
  await run_in_executor(None, json_dumpb, e),
368
- resumable=resumable,
369
425
  )
370
426
  raise e
371
427
 
langgraph_api/worker.py CHANGED
@@ -20,7 +20,7 @@ from langgraph_api.config import (
20
20
  from langgraph_api.errors import UserInterrupt, UserRollback, UserTimeout
21
21
  from langgraph_api.js.errors import RemoteException
22
22
  from langgraph_api.metadata import incr_runs
23
- from langgraph_api.schema import Run
23
+ from langgraph_api.schema import Run, StreamMode
24
24
  from langgraph_api.state import state_snapshot_to_thread_state
25
25
  from langgraph_api.stream import AnyStream, astream_state, consume
26
26
  from langgraph_api.utils import with_user
@@ -130,9 +130,11 @@ async def worker(
130
130
  break
131
131
 
132
132
  # Wrap the graph execution to separate user errors from server errors
133
- async def wrap_user_errors(stream: AnyStream, run_id: str, resumable: bool):
133
+ async def wrap_user_errors(
134
+ stream: AnyStream, run_id: str, resumable: bool, stream_modes: set[StreamMode]
135
+ ):
134
136
  try:
135
- await consume(stream, run_id, resumable)
137
+ await consume(stream, run_id, resumable, stream_modes)
136
138
  except Exception as e:
137
139
  if not isinstance(e, UserRollback | UserInterrupt):
138
140
  logger.error(
@@ -184,8 +186,11 @@ async def worker(
184
186
  on_checkpoint=on_checkpoint,
185
187
  on_task_result=on_task_result,
186
188
  )
189
+ stream_modes: set[StreamMode] = set(
190
+ run["kwargs"].get("stream_mode", [])
191
+ )
187
192
  await asyncio.wait_for(
188
- wrap_user_errors(stream, run_id, resumable),
193
+ wrap_user_errors(stream, run_id, resumable, stream_modes),
189
194
  BG_JOB_TIMEOUT_SECS,
190
195
  )
191
196
  except (Exception, asyncio.CancelledError) as ee:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-api
3
- Version: 0.2.126
3
+ Version: 0.2.129
4
4
  Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
5
5
  License: Elastic-2.0
6
6
  License-File: LICENSE
@@ -1,4 +1,4 @@
1
- langgraph_api/__init__.py,sha256=MJXz1iIiYzSsVkIx-r66BL7rUK8vsMX6Q89086HsRSU,24
1
+ langgraph_api/__init__.py,sha256=HccA10414fLnmUwuQYGEKdJkoD7ewxAva8kIUEgZE6c,24
2
2
  langgraph_api/asgi_transport.py,sha256=eqifhHxNnxvI7jJqrY1_8RjL4Fp9NdN4prEub2FWBt8,5091
3
3
  langgraph_api/asyncio.py,sha256=Wv4Rwm-a-Cf6JpfgJmVuVlXQ7SlwrjbTn0eq1ux8I2Q,9652
4
4
  langgraph_api/cli.py,sha256=xQojITwmmKSJw48Lr2regcnRPRq2FJqWlPpeyr5TgbU,16158
@@ -17,17 +17,17 @@ langgraph_api/queue_entrypoint.py,sha256=JzJCB3iYvep-GoAHQ_-H2ZxXVgmzYfvjQsmdBRx
17
17
  langgraph_api/route.py,sha256=4VBkJMeusfiZtLzyUaKm1HwLHTq0g15y2CRiRhM6xyA,4773
18
18
  langgraph_api/schema.py,sha256=1L7g4TUJjmsaBUCSThycH11-hrdPLxB6mtc3tIHmpbM,6371
19
19
  langgraph_api/serde.py,sha256=0ALETUn582vNF-m0l_WOZGF_scL1VPA39fDkwMJQPrg,5187
20
- langgraph_api/server.py,sha256=KBnMFt3f9RVLVu_NqyeRc13D_Lq62Rk_2czZKEUMU5E,6994
20
+ langgraph_api/server.py,sha256=9Y3qPixq2MuTs2tTB3CCW4cT5ueFJK7yce3GwHyi_L0,7093
21
21
  langgraph_api/sse.py,sha256=SLdtZmTdh5D8fbWrQjuY9HYLd2dg8Rmi6ZMmFMVc2iE,4204
22
22
  langgraph_api/state.py,sha256=P2mCo-0bqPu2v9FSFGJtUCjPPNvv6wLUKQh8SdxAtc8,4387
23
23
  langgraph_api/store.py,sha256=srRI0fQXNFo_RSUs4apucr4BEp_KrIseJksZXs32MlQ,4635
24
- langgraph_api/stream.py,sha256=RvO0mYEzU7XTSQz2PDvj3KzMO_T2Hpmsbwff0GoRDmI,15741
24
+ langgraph_api/stream.py,sha256=ABYlcmNLqElA71Mn8I3cejBeSJaf5ZhdA-ymfs0uZEw,17517
25
25
  langgraph_api/thread_ttl.py,sha256=7H3gFlWcUiODPoaEzcwB0LR61uvcuyjD0ew_4BztB7k,1902
26
26
  langgraph_api/traceblock.py,sha256=2aWS6TKGTcQ0G1fOtnjVrzkpeGvDsR0spDbfddEqgRU,594
27
27
  langgraph_api/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  langgraph_api/validation.py,sha256=zMuKmwUEBjBgFMwAaeLZmatwGVijKv2sOYtYg7gfRtc,4950
29
29
  langgraph_api/webhook.py,sha256=VCJp4dI5E1oSJ15XP34cnPiOi8Ya8Q1BnBwVGadOpLI,1636
30
- langgraph_api/worker.py,sha256=LVvjvigurlDgpNjFcbAvRH7744fE01Lirrg2ZlHtORE,14245
30
+ langgraph_api/worker.py,sha256=0Yt3L4_D0vu0rL2l8ZeQIin-X12H6WPRQeLxYiKYMSo,14458
31
31
  langgraph_api/api/__init__.py,sha256=WHy6oNLWtH1K7AxmmsU9RD-Vm6WP-Ov16xS8Ey9YCmQ,6090
32
32
  langgraph_api/api/assistants.py,sha256=ecHaID71ReTAZF4qsJzDe5L-2T5iOL2v8p6kQVHLKFk,16009
33
33
  langgraph_api/api/mcp.py,sha256=qe10ZRMN3f-Hli-9TI8nbQyWvMeBb72YB1PZVbyqBQw,14418
@@ -74,7 +74,7 @@ langgraph_api/middleware/http_logger.py,sha256=tUdWuIKtDa2EkFtG_kCjw1Wkgv7gbGH3h
74
74
  langgraph_api/middleware/private_network.py,sha256=eYgdyU8AzU2XJu362i1L8aSFoQRiV7_aLBPw7_EgeqI,2111
75
75
  langgraph_api/middleware/request_id.py,sha256=SDj3Yi3WvTbFQ2ewrPQBjAV8sYReOJGeIiuoHeZpR9g,1242
76
76
  langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
77
- langgraph_api/models/run.py,sha256=Yn1VuAAzxLMb9akOw8mLMWgx2cxPhJiIZ4-lYe75guQ,14808
77
+ langgraph_api/models/run.py,sha256=kfoNs-PdurTsaV9tpQzaR59aRXPFYMJNJbDz4uH7Dis,15323
78
78
  langgraph_api/tunneling/cloudflare.py,sha256=iKb6tj-VWPlDchHFjuQyep2Dpb-w2NGfJKt-WJG9LH0,3650
79
79
  langgraph_api/utils/__init__.py,sha256=EQu0PShwHhxUI_9mDFgqlAf5_y5bX8TEk723P5iby24,4161
80
80
  langgraph_api/utils/cache.py,sha256=SrtIWYibbrNeZzLXLUGBFhJPkMVNQnVxR5giiYGHEfI,1810
@@ -95,8 +95,8 @@ langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,11
95
95
  LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
96
96
  logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
97
97
  openapi.json,sha256=SPCrzYpta2xTl-WE2W6qwosYdQqLeB8qpzaYEbcK44k,150725
98
- langgraph_api-0.2.126.dist-info/METADATA,sha256=2nFffECkS7whKzQeFOf4N-BWWgyRgAbKq1VVLqDIIA4,3890
99
- langgraph_api-0.2.126.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
100
- langgraph_api-0.2.126.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
101
- langgraph_api-0.2.126.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
102
- langgraph_api-0.2.126.dist-info/RECORD,,
98
+ langgraph_api-0.2.129.dist-info/METADATA,sha256=sSSIekJ2xlZNS6V6rXhkLiMouTrMysiZAyuV8SeMhpw,3890
99
+ langgraph_api-0.2.129.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
100
+ langgraph_api-0.2.129.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
101
+ langgraph_api-0.2.129.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
102
+ langgraph_api-0.2.129.dist-info/RECORD,,