langgraph-api 0.2.45__py3-none-any.whl → 0.2.46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.45"
1
+ __version__ = "0.2.46"
@@ -189,40 +189,40 @@ async def get_assistant_graph(
189
189
  async with connect() as conn:
190
190
  assistant_ = await Assistants.get(conn, assistant_id)
191
191
  assistant = await fetchone(assistant_)
192
- config = await ajson_loads(assistant["config"])
193
- async with get_graph(
194
- assistant["graph_id"],
195
- config,
196
- checkpointer=Checkpointer(conn),
197
- store=(await api_store.get_store()),
198
- ) as graph:
199
- xray: bool | int = False
200
- xray_query = request.query_params.get("xray")
201
- if xray_query:
202
- if xray_query in ("true", "True"):
203
- xray = True
204
- elif xray_query in ("false", "False"):
205
- xray = False
206
- else:
207
- try:
208
- xray = int(xray_query)
209
- except ValueError:
210
- raise HTTPException(422, detail="Invalid xray value") from None
211
-
212
- if xray <= 0:
213
- raise HTTPException(422, detail="Invalid xray value") from None
214
-
215
- if isinstance(graph, BaseRemotePregel):
216
- drawable_graph = await graph.fetch_graph(xray=xray)
217
- return ApiResponse(drawable_graph.to_json())
192
+ config = await ajson_loads(assistant["config"])
193
+ async with get_graph(
194
+ assistant["graph_id"],
195
+ config,
196
+ checkpointer=Checkpointer(),
197
+ store=(await api_store.get_store()),
198
+ ) as graph:
199
+ xray: bool | int = False
200
+ xray_query = request.query_params.get("xray")
201
+ if xray_query:
202
+ if xray_query in ("true", "True"):
203
+ xray = True
204
+ elif xray_query in ("false", "False"):
205
+ xray = False
206
+ else:
207
+ try:
208
+ xray = int(xray_query)
209
+ except ValueError:
210
+ raise HTTPException(422, detail="Invalid xray value") from None
211
+
212
+ if xray <= 0:
213
+ raise HTTPException(422, detail="Invalid xray value") from None
214
+
215
+ if isinstance(graph, BaseRemotePregel):
216
+ drawable_graph = await graph.fetch_graph(xray=xray)
217
+ return ApiResponse(drawable_graph.to_json())
218
218
 
219
- try:
220
- drawable_graph = await graph.aget_graph(xray=xray)
221
- return ApiResponse(drawable_graph.to_json())
222
- except NotImplementedError:
223
- raise HTTPException(
224
- 422, detail="The graph does not support visualization"
225
- ) from None
219
+ try:
220
+ drawable_graph = await graph.aget_graph(xray=xray)
221
+ return ApiResponse(drawable_graph.to_json())
222
+ except NotImplementedError:
223
+ raise HTTPException(
224
+ 422, detail="The graph does not support visualization"
225
+ ) from None
226
226
 
227
227
 
228
228
  @retry_db
@@ -239,7 +239,7 @@ async def get_assistant_subgraphs(
239
239
  async with get_graph(
240
240
  assistant["graph_id"],
241
241
  config,
242
- checkpointer=Checkpointer(conn),
242
+ checkpointer=Checkpointer(),
243
243
  store=(await api_store.get_store()),
244
244
  ) as graph:
245
245
  namespace = request.path_params.get("namespace")
@@ -285,7 +285,7 @@ async def get_assistant_schemas(
285
285
  async with get_graph(
286
286
  assistant["graph_id"],
287
287
  config,
288
- checkpointer=Checkpointer(conn),
288
+ checkpointer=Checkpointer(),
289
289
  store=(await api_store.get_store()),
290
290
  ) as graph:
291
291
  if isinstance(graph, BaseRemotePregel):
langgraph_api/api/runs.py CHANGED
@@ -222,6 +222,9 @@ async def wait_run(request: ApiRequest):
222
222
  stream = asyncio.create_task(consume())
223
223
  while True:
224
224
  try:
225
+ if stream.done():
226
+ # raise stream exception if any
227
+ stream.result()
225
228
  yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
226
229
  break
227
230
  except TimeoutError:
@@ -270,7 +273,10 @@ async def wait_run_stateless(request: ApiRequest):
270
273
  vchunk: bytes | None = None
271
274
  async with aclosing(
272
275
  Runs.Stream.join(
273
- run["run_id"], thread_id=run["thread_id"], stream_mode=await sub
276
+ run["run_id"],
277
+ thread_id=run["thread_id"],
278
+ stream_mode=await sub,
279
+ ignore_404=True,
274
280
  )
275
281
  ) as stream:
276
282
  async for mode, chunk, _ in stream:
@@ -290,6 +296,9 @@ async def wait_run_stateless(request: ApiRequest):
290
296
  stream = asyncio.create_task(consume())
291
297
  while True:
292
298
  try:
299
+ if stream.done():
300
+ # raise stream exception if any
301
+ stream.result()
293
302
  yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
294
303
  break
295
304
  except TimeoutError:
@@ -48,7 +48,6 @@ from langgraph_api.js.sse import SSEDecoder, aiter_lines_raw
48
48
  from langgraph_api.route import ApiResponse
49
49
  from langgraph_api.schema import Config
50
50
  from langgraph_api.serde import json_dumpb
51
- from langgraph_api.utils import AsyncConnectionProto
52
51
 
53
52
  logger = structlog.stdlib.get_logger(__name__)
54
53
 
@@ -469,10 +468,10 @@ class PassthroughSerialiser(SerializerProtocol):
469
468
  return orjson.loads(payload)
470
469
 
471
470
 
472
- def _get_passthrough_checkpointer(conn: AsyncConnectionProto):
471
+ def _get_passthrough_checkpointer():
473
472
  from langgraph_runtime.checkpoint import Checkpointer
474
473
 
475
- checkpointer = Checkpointer(conn)
474
+ checkpointer = Checkpointer()
476
475
  # This checkpointer does not attempt to revive LC-objects.
477
476
  # Instead, it will pass through the JSON values as-is.
478
477
  checkpointer.serde = PassthroughSerialiser()
@@ -487,53 +486,46 @@ async def _get_passthrough_store():
487
486
  # Setup a HTTP server on top of CHECKPOINTER_SOCKET unix socket
488
487
  # used by `client.mts` to communicate with the Python checkpointer
489
488
  async def run_remote_checkpointer():
490
- from langgraph_runtime.database import connect
491
-
492
489
  async def checkpointer_list(payload: dict):
493
490
  """Search checkpoints"""
494
491
 
495
492
  result = []
496
- async with connect() as conn:
497
- checkpointer = _get_passthrough_checkpointer(conn)
498
- async for item in checkpointer.alist(
499
- config=payload.get("config"),
500
- limit=int(payload.get("limit") or 10),
501
- before=payload.get("before"),
502
- filter=payload.get("filter"),
503
- ):
504
- result.append(item)
493
+ checkpointer = _get_passthrough_checkpointer()
494
+ async for item in checkpointer.alist(
495
+ config=payload.get("config"),
496
+ limit=int(payload.get("limit") or 10),
497
+ before=payload.get("before"),
498
+ filter=payload.get("filter"),
499
+ ):
500
+ result.append(item)
505
501
 
506
502
  return result
507
503
 
508
504
  async def checkpointer_put(payload: dict):
509
505
  """Put the new checkpoint metadata"""
510
506
 
511
- async with connect() as conn:
512
- checkpointer = _get_passthrough_checkpointer(conn)
513
- return await checkpointer.aput(
514
- payload["config"],
515
- payload["checkpoint"],
516
- payload["metadata"],
517
- payload.get("new_versions", {}),
518
- )
507
+ checkpointer = _get_passthrough_checkpointer()
508
+ return await checkpointer.aput(
509
+ payload["config"],
510
+ payload["checkpoint"],
511
+ payload["metadata"],
512
+ payload.get("new_versions", {}),
513
+ )
519
514
 
520
515
  async def checkpointer_get_tuple(payload: dict):
521
516
  """Get actual checkpoint values (reads)"""
522
-
523
- async with connect() as conn:
524
- checkpointer = _get_passthrough_checkpointer(conn)
525
- return await checkpointer.aget_tuple(config=payload["config"])
517
+ checkpointer = _get_passthrough_checkpointer()
518
+ return await checkpointer.aget_tuple(config=payload["config"])
526
519
 
527
520
  async def checkpointer_put_writes(payload: dict):
528
521
  """Put actual checkpoint values (writes)"""
529
522
 
530
- async with connect() as conn:
531
- checkpointer = _get_passthrough_checkpointer(conn)
532
- return await checkpointer.aput_writes(
533
- payload["config"],
534
- payload["writes"],
535
- payload["taskId"],
536
- )
523
+ checkpointer = _get_passthrough_checkpointer()
524
+ return await checkpointer.aput_writes(
525
+ payload["config"],
526
+ payload["writes"],
527
+ payload["taskId"],
528
+ )
537
529
 
538
530
  async def store_batch(payload: dict):
539
531
  """Batch operations on the store"""
langgraph_api/stream.py CHANGED
@@ -32,7 +32,6 @@ from langgraph_api.js.base import BaseRemotePregel
32
32
  from langgraph_api.metadata import HOST, PLAN, USER_API_URL, incr_nodes
33
33
  from langgraph_api.schema import Run, StreamMode
34
34
  from langgraph_api.serde import json_dumpb
35
- from langgraph_api.utils import AsyncConnectionProto
36
35
  from langgraph_runtime.checkpoint import Checkpointer
37
36
  from langgraph_runtime.ops import Runs
38
37
 
@@ -79,8 +78,6 @@ async def async_tracing_context(*args, **kwargs):
79
78
 
80
79
 
81
80
  async def astream_state(
82
- stack: AsyncExitStack,
83
- conn: AsyncConnectionProto,
84
81
  run: Run,
85
82
  attempt: int,
86
83
  done: ValueEvent,
@@ -90,7 +87,6 @@ async def astream_state(
90
87
  ) -> AnyStream:
91
88
  """Stream messages from the runnable."""
92
89
  run_id = str(run["run_id"])
93
- await stack.enter_async_context(conn.pipeline())
94
90
  # extract args from run
95
91
  kwargs = run["kwargs"].copy()
96
92
  kwargs.pop("webhook", None)
@@ -98,12 +94,13 @@ async def astream_state(
98
94
  subgraphs = kwargs.get("subgraphs", False)
99
95
  temporary = kwargs.pop("temporary", False)
100
96
  config = kwargs.pop("config")
97
+ stack = AsyncExitStack()
101
98
  graph = await stack.enter_async_context(
102
99
  get_graph(
103
100
  config["configurable"]["graph_id"],
104
101
  config,
105
102
  store=(await api_store.get_store()),
106
- checkpointer=None if temporary else Checkpointer(conn),
103
+ checkpointer=None if temporary else Checkpointer(),
107
104
  )
108
105
  )
109
106
  input = kwargs.pop("input")