langgraph-runtime-inmem 0.11.0__py3-none-any.whl → 0.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,7 +9,7 @@ from langgraph_runtime_inmem import (
9
9
  store,
10
10
  )
11
11
 
12
- __version__ = "0.11.0"
12
+ __version__ = "0.12.1"
13
13
  __all__ = [
14
14
  "ops",
15
15
  "database",
@@ -17,6 +17,7 @@ logger = structlog.stdlib.get_logger(__name__)
17
17
  @asynccontextmanager
18
18
  async def lifespan(
19
19
  app: Starlette | None = None,
20
+ cancel_event: asyncio.Event | None = None,
20
21
  taskset: set[asyncio.Task] | None = None,
21
22
  **kwargs: Any,
22
23
  ):
@@ -47,7 +48,7 @@ async def lifespan(
47
48
  try:
48
49
  async with SimpleTaskGroup(
49
50
  cancel=True,
50
- taskset=taskset,
51
+ cancel_event=cancel_event,
51
52
  taskgroup_name="Lifespan",
52
53
  ) as tg:
53
54
  tg.create_task(metadata_loop())
@@ -11,7 +11,6 @@ import uuid
11
11
  from collections import defaultdict
12
12
  from collections.abc import AsyncIterator, Sequence
13
13
  from contextlib import asynccontextmanager
14
- from copy import deepcopy
15
14
  from datetime import UTC, datetime, timedelta
16
15
  from typing import Any, Literal, cast
17
16
  from uuid import UUID, uuid4
@@ -230,7 +229,7 @@ class Assistants(Authenticated):
230
229
  if assistant["assistant_id"] == assistant_id and (
231
230
  not filters or _check_filter_match(assistant["metadata"], filters)
232
231
  ):
233
- yield assistant
232
+ yield copy.deepcopy(assistant)
234
233
 
235
234
  return _yield_result()
236
235
 
@@ -1256,7 +1255,7 @@ class Threads(Authenticated):
1256
1255
  "thread_id": new_thread_id,
1257
1256
  "created_at": datetime.now(tz=UTC),
1258
1257
  "updated_at": datetime.now(tz=UTC),
1259
- "metadata": deepcopy(original_thread["metadata"]),
1258
+ "metadata": copy.deepcopy(original_thread["metadata"]),
1260
1259
  "status": "idle",
1261
1260
  "config": {},
1262
1261
  }
@@ -2057,7 +2056,10 @@ class Runs(Authenticated):
2057
2056
  @asynccontextmanager
2058
2057
  @staticmethod
2059
2058
  async def enter(
2060
- run_id: UUID, thread_id: UUID | None, loop: asyncio.AbstractEventLoop
2059
+ run_id: UUID,
2060
+ thread_id: UUID | None,
2061
+ loop: asyncio.AbstractEventLoop,
2062
+ resumable: bool,
2061
2063
  ) -> AsyncIterator[ValueEvent]:
2062
2064
  """Enter a run, listen for cancellation while running, signal when done."
2063
2065
  This method should be called as a context manager by a worker executing a run.
@@ -2087,7 +2089,9 @@ class Runs(Authenticated):
2087
2089
  topic=f"run:{run_id}:stream".encode(),
2088
2090
  data={"event": "control", "message": b"done"},
2089
2091
  )
2090
- await stream_manager.put(run_id, thread_id, stream_message)
2092
+ await stream_manager.put(
2093
+ run_id, thread_id, stream_message, resumable=resumable
2094
+ )
2091
2095
 
2092
2096
  # Remove the control_queue (normal queue is cleaned up during run deletion)
2093
2097
  await stream_manager.remove_control_queue(run_id, thread_id, control_queue)
@@ -2417,19 +2421,21 @@ class Runs(Authenticated):
2417
2421
  last_chunk: bytes | None = None
2418
2422
  # wait for the run to complete
2419
2423
  # Rely on this join's auth
2420
- async for mode, chunk, _ in Runs.Stream.join(
2421
- run_id,
2422
- thread_id=thread_id,
2423
- ctx=ctx,
2424
- ignore_404=True,
2425
- stream_mode=["values", "updates", "error"],
2426
- ):
2427
- if mode == b"values":
2428
- last_chunk = chunk
2429
- elif mode == b"updates" and b"__interrupt__" in chunk:
2430
- last_chunk = chunk
2431
- elif mode == b"error":
2432
- last_chunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
2424
+ async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
2425
+ async for mode, chunk, _ in Runs.Stream.join(
2426
+ run_id,
2427
+ thread_id=thread_id,
2428
+ ctx=ctx,
2429
+ ignore_404=True,
2430
+ stream_channel=sub,
2431
+ stream_mode=["values", "updates", "error"],
2432
+ ):
2433
+ if mode == b"values":
2434
+ last_chunk = chunk
2435
+ elif mode == b"updates" and b"__interrupt__" in chunk:
2436
+ last_chunk = chunk
2437
+ elif mode == b"error":
2438
+ last_chunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
2433
2439
  # if we received a final chunk, return it
2434
2440
  if last_chunk is not None:
2435
2441
  # ie. if the run completed while we were waiting for it
@@ -2704,10 +2710,10 @@ class Runs(Authenticated):
2704
2710
  async def join(
2705
2711
  run_id: UUID,
2706
2712
  *,
2713
+ stream_channel: asyncio.Queue,
2707
2714
  thread_id: UUID,
2708
2715
  ignore_404: bool = False,
2709
2716
  cancel_on_disconnect: bool = False,
2710
- stream_channel: asyncio.Queue | None = None,
2711
2717
  stream_mode: list[StreamMode] | StreamMode | None = None,
2712
2718
  last_event_id: str | None = None,
2713
2719
  ctx: Auth.types.BaseAuthContext | None = None,
@@ -2716,12 +2722,7 @@ class Runs(Authenticated):
2716
2722
  from langgraph_api.asyncio import create_task
2717
2723
  from langgraph_api.serde import json_loads
2718
2724
 
2719
- queue = (
2720
- stream_channel
2721
- if stream_channel
2722
- else await Runs.Stream.subscribe(run_id, thread_id)
2723
- )
2724
-
2725
+ queue = stream_channel
2725
2726
  try:
2726
2727
  async with connect() as conn:
2727
2728
  filters = await Runs.handle_event(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-runtime-inmem
3
- Version: 0.11.0
3
+ Version: 0.12.1
4
4
  Summary: Inmem implementation for the LangGraph API server.
5
5
  Author-email: Will Fu-Hinthorn <will@langchain.dev>
6
6
  License: Elastic-2.0
@@ -1,13 +1,13 @@
1
- langgraph_runtime_inmem/__init__.py,sha256=fsghjd8RgYa23k1SGZS_DHaq_7X0NDBhUIAT0ud9uy4,311
1
+ langgraph_runtime_inmem/__init__.py,sha256=tv9_9neTzC-cY8ATa9lnqpF4WJ-YZW3fYhQ9rSL4Yu8,311
2
2
  langgraph_runtime_inmem/checkpoint.py,sha256=nc1G8DqVdIu-ibjKTqXfbPfMbAsKjPObKqegrSzo6Po,4432
3
3
  langgraph_runtime_inmem/database.py,sha256=QgaA_WQo1IY6QioYd8r-e6-0B0rnC5anS0muIEJWby0,6364
4
4
  langgraph_runtime_inmem/inmem_stream.py,sha256=utL1OlOJsy6VDkSGAA6eX9nETreZlM6K6nhfNoubmRQ,9011
5
- langgraph_runtime_inmem/lifespan.py,sha256=t0w2MX2dGxe8yNtSX97Z-d2pFpllSLS4s1rh2GJDw5M,3557
5
+ langgraph_runtime_inmem/lifespan.py,sha256=tngIYHMhDwTFd2zgpq9CZOxcBLONYYnkhwv2d2T5WWQ,3614
6
6
  langgraph_runtime_inmem/metrics.py,sha256=HhO0RC2bMDTDyGBNvnd2ooLebLA8P1u5oq978Kp_nAA,392
7
- langgraph_runtime_inmem/ops.py,sha256=4SBZ3LVQgKB53WeZMien4jb5WzNkMfZ48C4aS8rJX8k,111227
7
+ langgraph_runtime_inmem/ops.py,sha256=593xx2A5E7y2TY6nLpbkFSsODH6guwm1y9z-ars-seU,111327
8
8
  langgraph_runtime_inmem/queue.py,sha256=33qfFKPhQicZ1qiibllYb-bTFzUNSN2c4bffPACP5es,9952
9
9
  langgraph_runtime_inmem/retry.py,sha256=XmldOP4e_H5s264CagJRVnQMDFcEJR_dldVR1Hm5XvM,763
10
10
  langgraph_runtime_inmem/store.py,sha256=rTfL1JJvd-j4xjTrL8qDcynaWF6gUJ9-GDVwH0NBD_I,3506
11
- langgraph_runtime_inmem-0.11.0.dist-info/METADATA,sha256=xtMG9LLstWO11EQLaOQIhqSvBoA-Nprs2m9upNA7TbE,566
12
- langgraph_runtime_inmem-0.11.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
13
- langgraph_runtime_inmem-0.11.0.dist-info/RECORD,,
11
+ langgraph_runtime_inmem-0.12.1.dist-info/METADATA,sha256=faLaXWGpAJnAK6Z5XS0TmGBtafg7Cef16_nc_Viw8yg,566
12
+ langgraph_runtime_inmem-0.12.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
13
+ langgraph_runtime_inmem-0.12.1.dist-info/RECORD,,