langgraph-runtime-inmem 0.22.1__py3-none-any.whl → 0.23.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langgraph_runtime_inmem/__init__.py +1 -1
- langgraph_runtime_inmem/_persistence.py +58 -0
- langgraph_runtime_inmem/checkpoint.py +19 -56
- langgraph_runtime_inmem/database.py +5 -0
- langgraph_runtime_inmem/lifespan.py +5 -0
- langgraph_runtime_inmem/ops.py +11 -12
- langgraph_runtime_inmem/store.py +4 -0
- {langgraph_runtime_inmem-0.22.1.dist-info → langgraph_runtime_inmem-0.23.0.dist-info}/METADATA +1 -1
- langgraph_runtime_inmem-0.23.0.dist-info/RECORD +15 -0
- langgraph_runtime_inmem-0.22.1.dist-info/RECORD +0 -14
- {langgraph_runtime_inmem-0.22.1.dist-info → langgraph_runtime_inmem-0.23.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Periodic flushing for all PersistentDict stores."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import functools
|
|
6
|
+
import logging
|
|
7
|
+
import threading
|
|
8
|
+
import weakref
|
|
9
|
+
|
|
10
|
+
from langgraph.checkpoint.memory import PersistentDict
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
_stores: dict[str, weakref.ref[PersistentDict]] = {}
|
|
15
|
+
_flush_thread: tuple[threading.Event, threading.Thread] | None = None
|
|
16
|
+
_flush_interval: int = 10
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def register_persistent_dict(d: PersistentDict) -> None:
|
|
20
|
+
"""Register a PersistentDict for periodic flushing."""
|
|
21
|
+
global _flush_thread
|
|
22
|
+
_stores[d.filename] = weakref.ref(d)
|
|
23
|
+
if _flush_thread is None:
|
|
24
|
+
logger.info("Starting dev persistence flush loop")
|
|
25
|
+
stop_event = threading.Event()
|
|
26
|
+
_flush_thread = (
|
|
27
|
+
stop_event,
|
|
28
|
+
threading.Thread(
|
|
29
|
+
target=functools.partial(_flush_loop, stop_event), daemon=True
|
|
30
|
+
),
|
|
31
|
+
)
|
|
32
|
+
_flush_thread[1].start()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def stop_flush_loop() -> None:
|
|
36
|
+
"""Stop the background flush thread."""
|
|
37
|
+
global _flush_thread
|
|
38
|
+
if _flush_thread is not None:
|
|
39
|
+
logger.info("Stopping dev persistence flush loop")
|
|
40
|
+
_flush_thread[0].set()
|
|
41
|
+
_flush_thread[1].join()
|
|
42
|
+
_flush_thread = None
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _flush_loop(stop_event: threading.Event) -> None:
|
|
46
|
+
drop = set()
|
|
47
|
+
while not stop_event.wait(timeout=_flush_interval):
|
|
48
|
+
keys = list(_stores.keys())
|
|
49
|
+
for store_key in keys:
|
|
50
|
+
if store := _stores[store_key]():
|
|
51
|
+
store.sync()
|
|
52
|
+
else:
|
|
53
|
+
drop.add(store_key)
|
|
54
|
+
if drop:
|
|
55
|
+
for store_key in drop:
|
|
56
|
+
del _stores[store_key]
|
|
57
|
+
drop.clear()
|
|
58
|
+
logger.info("dev persistence flush loop exiting")
|
|
@@ -1,12 +1,10 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import functools
|
|
4
3
|
import logging
|
|
5
4
|
import os
|
|
6
|
-
import threading
|
|
7
5
|
import typing
|
|
8
6
|
import uuid
|
|
9
|
-
import
|
|
7
|
+
from collections import defaultdict
|
|
10
8
|
from collections.abc import AsyncIterator, Callable
|
|
11
9
|
from typing import Any
|
|
12
10
|
|
|
@@ -17,6 +15,11 @@ from langgraph.checkpoint.memory import (
|
|
|
17
15
|
PersistentDict,
|
|
18
16
|
)
|
|
19
17
|
|
|
18
|
+
from langgraph_runtime_inmem._persistence import (
|
|
19
|
+
register_persistent_dict,
|
|
20
|
+
stop_flush_loop,
|
|
21
|
+
)
|
|
22
|
+
|
|
20
23
|
if typing.TYPE_CHECKING:
|
|
21
24
|
from langchain_core.runnables import RunnableConfig
|
|
22
25
|
from langgraph.checkpoint.base import (
|
|
@@ -53,6 +56,7 @@ class InMemorySaver(InMemorySaverBase):
|
|
|
53
56
|
__persistence_hook__: Callable[[PersistentDict], None] | None = None,
|
|
54
57
|
) -> None:
|
|
55
58
|
self.filename = os.path.join(".langgraph_api", ".langgraph_checkpoint.")
|
|
59
|
+
self.latest_iter: AsyncIterator[CheckpointTuple] | None = None
|
|
56
60
|
i = 0
|
|
57
61
|
|
|
58
62
|
def factory(*args):
|
|
@@ -95,7 +99,7 @@ class InMemorySaver(InMemorySaverBase):
|
|
|
95
99
|
|
|
96
100
|
super().__init__(
|
|
97
101
|
serde=serde if serde is not None else Serializer(),
|
|
98
|
-
factory=factory if not DISABLE_FILE_PERSISTENCE else
|
|
102
|
+
factory=factory if not DISABLE_FILE_PERSISTENCE else defaultdict,
|
|
99
103
|
)
|
|
100
104
|
|
|
101
105
|
def put(
|
|
@@ -158,12 +162,10 @@ class InMemorySaver(InMemorySaverBase):
|
|
|
158
162
|
|
|
159
163
|
if not api_config.LANGGRAPH_ENCRYPTION:
|
|
160
164
|
return data
|
|
161
|
-
from langgraph_api.
|
|
162
|
-
from langgraph_api.encryption import
|
|
165
|
+
from langgraph_api.encryption import get_encryption
|
|
166
|
+
from langgraph_api.encryption.middleware import decrypt_json_if_needed
|
|
163
167
|
|
|
164
|
-
result = await decrypt_json_if_needed(
|
|
165
|
-
data, get_encryption_instance(), "checkpoint"
|
|
166
|
-
)
|
|
168
|
+
result = await decrypt_json_if_needed(data, get_encryption(), "checkpoint")
|
|
167
169
|
if result is None:
|
|
168
170
|
raise ValueError("decrypt_json_if_needed returned None for non-None input")
|
|
169
171
|
return result
|
|
@@ -211,14 +213,14 @@ class InMemorySaver(InMemorySaverBase):
|
|
|
211
213
|
)
|
|
212
214
|
|
|
213
215
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
214
|
-
|
|
215
|
-
if _ingestion_thread is not None:
|
|
216
|
-
logger.info("Stopping dev checkpoint ingestion loop")
|
|
217
|
-
_ingestion_thread[0].set()
|
|
218
|
-
_ingestion_thread[1].join()
|
|
219
|
-
_ingestion_thread = None
|
|
216
|
+
stop_flush_loop()
|
|
220
217
|
await super().__aexit__(exc_type, exc_val, exc_tb)
|
|
221
218
|
|
|
219
|
+
async def aget_iter(self, config: RunnableConfig) -> AsyncIterator[CheckpointTuple]:
|
|
220
|
+
tup = await self.aget_tuple(config)
|
|
221
|
+
if tup is not None:
|
|
222
|
+
yield tup
|
|
223
|
+
|
|
222
224
|
|
|
223
225
|
MEMORY = None
|
|
224
226
|
|
|
@@ -227,14 +229,14 @@ def Checkpointer(*args, unpack_hook=None, **kwargs):
|
|
|
227
229
|
global MEMORY
|
|
228
230
|
if MEMORY is None:
|
|
229
231
|
MEMORY = InMemorySaver(
|
|
230
|
-
__persistence_hook__=
|
|
232
|
+
__persistence_hook__=register_persistent_dict,
|
|
231
233
|
)
|
|
232
234
|
if unpack_hook is not None:
|
|
233
235
|
from langgraph_api.serde import Serializer
|
|
234
236
|
|
|
235
237
|
saver = InMemorySaver(
|
|
236
238
|
serde=Serializer(__unpack_ext_hook__=unpack_hook),
|
|
237
|
-
__persistence_hook__=
|
|
239
|
+
__persistence_hook__=register_persistent_dict,
|
|
238
240
|
**kwargs,
|
|
239
241
|
)
|
|
240
242
|
saver.writes = MEMORY.writes
|
|
@@ -244,43 +246,4 @@ def Checkpointer(*args, unpack_hook=None, **kwargs):
|
|
|
244
246
|
return MEMORY
|
|
245
247
|
|
|
246
248
|
|
|
247
|
-
_stores: dict[str, weakref.ref[PersistentDict]] = {}
|
|
248
|
-
_ingestion_thread: tuple[threading.Event, threading.Thread] | None = None
|
|
249
|
-
_ingestion_delay: int = 10
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
def _hook(d: PersistentDict):
|
|
253
|
-
global _ingestion_thread
|
|
254
|
-
_stores[d.filename] = weakref.ref(d)
|
|
255
|
-
if _ingestion_thread is None:
|
|
256
|
-
logger.info("Starting dev checkpoint ingestion loop")
|
|
257
|
-
stop_event = threading.Event()
|
|
258
|
-
_ingestion_thread = (
|
|
259
|
-
stop_event,
|
|
260
|
-
threading.Thread(
|
|
261
|
-
target=functools.partial(_ingestion_loop, stop_event), daemon=True
|
|
262
|
-
),
|
|
263
|
-
)
|
|
264
|
-
_ingestion_thread[1].start()
|
|
265
|
-
pass
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
def _ingestion_loop(stop_event: threading.Event):
|
|
269
|
-
drop = set()
|
|
270
|
-
while not stop_event.wait(timeout=_ingestion_delay):
|
|
271
|
-
keys = list(_stores.keys())
|
|
272
|
-
for store_key in keys:
|
|
273
|
-
if store := _stores[store_key]():
|
|
274
|
-
store.sync()
|
|
275
|
-
continue
|
|
276
|
-
else:
|
|
277
|
-
drop.add(store_key)
|
|
278
|
-
if drop:
|
|
279
|
-
for store_key in drop:
|
|
280
|
-
del _stores[store_key]
|
|
281
|
-
drop.clear()
|
|
282
|
-
# Note: the checkpoints are flushed one last time upon exit.
|
|
283
|
-
logger.info("dev checkpoint ingestion loop exiting")
|
|
284
|
-
|
|
285
|
-
|
|
286
249
|
__all__ = ["Checkpointer"]
|
|
@@ -13,6 +13,7 @@ from langgraph.checkpoint.memory import PersistentDict
|
|
|
13
13
|
from typing_extensions import TypedDict
|
|
14
14
|
|
|
15
15
|
from langgraph_runtime_inmem import store
|
|
16
|
+
from langgraph_runtime_inmem._persistence import register_persistent_dict
|
|
16
17
|
from langgraph_runtime_inmem.inmem_stream import start_stream, stop_stream
|
|
17
18
|
|
|
18
19
|
if TYPE_CHECKING:
|
|
@@ -114,6 +115,10 @@ class InMemoryRetryCounter:
|
|
|
114
115
|
GLOBAL_RETRY_COUNTER = InMemoryRetryCounter()
|
|
115
116
|
GLOBAL_STORE = GlobalStore(filename=OPS_FILENAME)
|
|
116
117
|
|
|
118
|
+
# Register for periodic flushing
|
|
119
|
+
register_persistent_dict(GLOBAL_STORE)
|
|
120
|
+
register_persistent_dict(GLOBAL_RETRY_COUNTER._counters)
|
|
121
|
+
|
|
117
122
|
|
|
118
123
|
class InMemConnectionProto:
|
|
119
124
|
def __init__(self):
|
|
@@ -30,6 +30,9 @@ async def lifespan(
|
|
|
30
30
|
):
|
|
31
31
|
import langgraph_api.config as config
|
|
32
32
|
from langgraph_api import __version__, feature_flags, graph, thread_ttl
|
|
33
|
+
from langgraph_api import (
|
|
34
|
+
_checkpointer as api_checkpointer,
|
|
35
|
+
)
|
|
33
36
|
from langgraph_api import store as api_store
|
|
34
37
|
from langgraph_api.asyncio import SimpleTaskGroup, set_event_loop
|
|
35
38
|
from langgraph_api.http import start_http_client, stop_http_client
|
|
@@ -54,6 +57,7 @@ async def lifespan(
|
|
|
54
57
|
|
|
55
58
|
await start_http_client()
|
|
56
59
|
await start_pool()
|
|
60
|
+
await api_checkpointer.start_checkpointer()
|
|
57
61
|
await start_ui_bundler()
|
|
58
62
|
|
|
59
63
|
async def _log_graph_load_failure(err: graph.GraphLoadError) -> None:
|
|
@@ -119,6 +123,7 @@ async def lifespan(
|
|
|
119
123
|
pass
|
|
120
124
|
finally:
|
|
121
125
|
await api_store.exit_store()
|
|
126
|
+
await api_checkpointer.exit_checkpointer()
|
|
122
127
|
await stop_ui_bundler()
|
|
123
128
|
await graph.stop_remote_graphs()
|
|
124
129
|
await stop_http_client()
|
langgraph_runtime_inmem/ops.py
CHANGED
|
@@ -150,7 +150,7 @@ class Assistants(Authenticated):
|
|
|
150
150
|
select: list[AssistantSelectField] | None = None,
|
|
151
151
|
ctx: Auth.types.BaseAuthContext | None = None,
|
|
152
152
|
) -> tuple[AsyncIterator[Assistant], int]:
|
|
153
|
-
from langgraph_api.graph import
|
|
153
|
+
from langgraph_api.graph import assert_graph_exists
|
|
154
154
|
|
|
155
155
|
metadata = metadata if metadata is not None else {}
|
|
156
156
|
filters = await Assistants.handle_event(
|
|
@@ -161,8 +161,8 @@ class Assistants(Authenticated):
|
|
|
161
161
|
),
|
|
162
162
|
)
|
|
163
163
|
|
|
164
|
-
if graph_id is not None
|
|
165
|
-
|
|
164
|
+
if graph_id is not None:
|
|
165
|
+
assert_graph_exists(graph_id)
|
|
166
166
|
|
|
167
167
|
# Get all assistants and filter them
|
|
168
168
|
assistants = conn.store["assistants"]
|
|
@@ -255,7 +255,7 @@ class Assistants(Authenticated):
|
|
|
255
255
|
description: str | None = None,
|
|
256
256
|
) -> AsyncIterator[Assistant]:
|
|
257
257
|
"""Insert an assistant."""
|
|
258
|
-
from langgraph_api.graph import
|
|
258
|
+
from langgraph_api.graph import assert_graph_exists
|
|
259
259
|
|
|
260
260
|
assistant_id = _ensure_uuid(assistant_id)
|
|
261
261
|
metadata = metadata if metadata is not None else {}
|
|
@@ -278,8 +278,7 @@ class Assistants(Authenticated):
|
|
|
278
278
|
detail="Cannot specify both configurable and context. Prefer setting context alone. Context was introduced in LangGraph 0.6.0 and is the long term planned replacement for configurable.",
|
|
279
279
|
)
|
|
280
280
|
|
|
281
|
-
|
|
282
|
-
raise HTTPException(status_code=404, detail=f"Graph {graph_id} not found")
|
|
281
|
+
assert_graph_exists(graph_id)
|
|
283
282
|
|
|
284
283
|
# Keep config and context up to date with one another
|
|
285
284
|
if config.get("configurable"):
|
|
@@ -370,7 +369,7 @@ class Assistants(Authenticated):
|
|
|
370
369
|
Returns:
|
|
371
370
|
return the updated assistant model.
|
|
372
371
|
"""
|
|
373
|
-
from langgraph_api.graph import
|
|
372
|
+
from langgraph_api.graph import assert_graph_exists
|
|
374
373
|
|
|
375
374
|
assistant_id = _ensure_uuid(assistant_id)
|
|
376
375
|
metadata = metadata if metadata is not None else {}
|
|
@@ -394,8 +393,8 @@ class Assistants(Authenticated):
|
|
|
394
393
|
detail="Cannot specify both configurable and context. Prefer setting context alone. Context was introduced in LangGraph 0.6.0 and is the long term planned replacement for configurable.",
|
|
395
394
|
)
|
|
396
395
|
|
|
397
|
-
if graph_id is not None
|
|
398
|
-
|
|
396
|
+
if graph_id is not None:
|
|
397
|
+
assert_graph_exists(graph_id)
|
|
399
398
|
|
|
400
399
|
# Keep config and context up to date with one another
|
|
401
400
|
if config.get("configurable"):
|
|
@@ -672,7 +671,7 @@ class Assistants(Authenticated):
|
|
|
672
671
|
ctx: Auth.types.BaseAuthContext | None = None,
|
|
673
672
|
) -> int:
|
|
674
673
|
"""Get count of assistants."""
|
|
675
|
-
from langgraph_api.graph import
|
|
674
|
+
from langgraph_api.graph import assert_graph_exists
|
|
676
675
|
|
|
677
676
|
metadata = metadata if metadata is not None else {}
|
|
678
677
|
filters = await Assistants.handle_event(
|
|
@@ -683,8 +682,8 @@ class Assistants(Authenticated):
|
|
|
683
682
|
),
|
|
684
683
|
)
|
|
685
684
|
|
|
686
|
-
if graph_id is not None
|
|
687
|
-
|
|
685
|
+
if graph_id is not None:
|
|
686
|
+
assert_graph_exists(graph_id)
|
|
688
687
|
|
|
689
688
|
count = 0
|
|
690
689
|
for assistant in conn.store["assistants"]:
|
langgraph_runtime_inmem/store.py
CHANGED
|
@@ -10,6 +10,8 @@ from langgraph.store.base import BaseStore, Op, Result
|
|
|
10
10
|
from langgraph.store.base.batch import AsyncBatchedBaseStore
|
|
11
11
|
from langgraph.store.memory import InMemoryStore
|
|
12
12
|
|
|
13
|
+
from langgraph_runtime_inmem._persistence import register_persistent_dict
|
|
14
|
+
|
|
13
15
|
_STORE_CONFIG = None
|
|
14
16
|
DISABLE_FILE_PERSISTENCE = (
|
|
15
17
|
os.getenv("LANGGRAPH_DISABLE_FILE_PERSISTENCE", "false").lower() == "true"
|
|
@@ -24,6 +26,8 @@ class DiskBackedInMemStore(InMemoryStore):
|
|
|
24
26
|
self._vectors = PersistentDict(
|
|
25
27
|
lambda: defaultdict(dict), filename=_VECTOR_FILE
|
|
26
28
|
)
|
|
29
|
+
register_persistent_dict(self._data)
|
|
30
|
+
register_persistent_dict(self._vectors)
|
|
27
31
|
else:
|
|
28
32
|
self._data = InMemoryStore._data
|
|
29
33
|
self._vectors = InMemoryStore._vectors
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
langgraph_runtime_inmem/__init__.py,sha256=oKfMmvvPRewTlo9oduB3tCC3l2b2MiD_9lka4PMm_5M,337
|
|
2
|
+
langgraph_runtime_inmem/_persistence.py,sha256=exchMr_NQB_h7PHt0vq5QBh25cOGoW0jHAFo07b1BFI,1711
|
|
3
|
+
langgraph_runtime_inmem/checkpoint.py,sha256=VD5c6CktsToo_f4qPe1WP_csdonQoOb7h5lHv4U0ZAE,8372
|
|
4
|
+
langgraph_runtime_inmem/database.py,sha256=iP7W1SI4kUkqcHtkg3aMmP-YLgZfvMHANwN-P4Pb1pY,6607
|
|
5
|
+
langgraph_runtime_inmem/inmem_stream.py,sha256=PFLWbsxU8RqbT5mYJgNk6v5q6TWJRIY1hkZWhJF8nkI,9094
|
|
6
|
+
langgraph_runtime_inmem/lifespan.py,sha256=51w3ZKvxcosd7XKkTE2Tnxtr3tux4rJuChGEN0CuvCY,4935
|
|
7
|
+
langgraph_runtime_inmem/metrics.py,sha256=_YiSkLnhQvHpMktk38SZo0abyL-5GihfVAtBo0-lFIc,403
|
|
8
|
+
langgraph_runtime_inmem/ops.py,sha256=OZ1VicFoh9eOxD15LbZsmCZ1JTaWkMUjdY0nF0t9e-k,120806
|
|
9
|
+
langgraph_runtime_inmem/queue.py,sha256=WM6ZJu25QPVjFXeJYW06GALLUgRsnRrA4YdypR0oG0U,9584
|
|
10
|
+
langgraph_runtime_inmem/retry.py,sha256=XmldOP4e_H5s264CagJRVnQMDFcEJR_dldVR1Hm5XvM,763
|
|
11
|
+
langgraph_runtime_inmem/routes.py,sha256=VVNxgJ8FWI3kDBoIgQUWN1gY5ivo7L954Agxzv72TAY,1377
|
|
12
|
+
langgraph_runtime_inmem/store.py,sha256=a3YKsLnFv4bu3zPvagIFv0xmtrIp_pmGvj1CnD3PHL0,3682
|
|
13
|
+
langgraph_runtime_inmem-0.23.0.dist-info/METADATA,sha256=9x1G8RrISXklr1MikxsE4kJFZTswjmATqNgA0EsPQYY,570
|
|
14
|
+
langgraph_runtime_inmem-0.23.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
15
|
+
langgraph_runtime_inmem-0.23.0.dist-info/RECORD,,
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
langgraph_runtime_inmem/__init__.py,sha256=yocpl21Fk4OqK3oi1op2eCBM4EqmVbZFTETc--eQcJk,337
|
|
2
|
-
langgraph_runtime_inmem/checkpoint.py,sha256=k9xHZVwbJhxunpbuM36zaVtcP2c2zdx6ABdDFYE2PrE,9471
|
|
3
|
-
langgraph_runtime_inmem/database.py,sha256=g2XYa5KN-T8MbDeFH9sfUApDG62Wp4BACumVnDtxYhI,6403
|
|
4
|
-
langgraph_runtime_inmem/inmem_stream.py,sha256=PFLWbsxU8RqbT5mYJgNk6v5q6TWJRIY1hkZWhJF8nkI,9094
|
|
5
|
-
langgraph_runtime_inmem/lifespan.py,sha256=fCoYcN_h0cxmj6-muC-f0csPdSpyepZuGRD1yBrq4XM,4755
|
|
6
|
-
langgraph_runtime_inmem/metrics.py,sha256=_YiSkLnhQvHpMktk38SZo0abyL-5GihfVAtBo0-lFIc,403
|
|
7
|
-
langgraph_runtime_inmem/ops.py,sha256=lsfdJczwmmMClaGbXIuYMBtZ0JED5UP-iwUDtHpShl4,121054
|
|
8
|
-
langgraph_runtime_inmem/queue.py,sha256=WM6ZJu25QPVjFXeJYW06GALLUgRsnRrA4YdypR0oG0U,9584
|
|
9
|
-
langgraph_runtime_inmem/retry.py,sha256=XmldOP4e_H5s264CagJRVnQMDFcEJR_dldVR1Hm5XvM,763
|
|
10
|
-
langgraph_runtime_inmem/routes.py,sha256=VVNxgJ8FWI3kDBoIgQUWN1gY5ivo7L954Agxzv72TAY,1377
|
|
11
|
-
langgraph_runtime_inmem/store.py,sha256=rTfL1JJvd-j4xjTrL8qDcynaWF6gUJ9-GDVwH0NBD_I,3506
|
|
12
|
-
langgraph_runtime_inmem-0.22.1.dist-info/METADATA,sha256=CVWjgQ-ttGgcpuVhDcdQQO9Ba6iuam76jRVWEFo4_SY,570
|
|
13
|
-
langgraph_runtime_inmem-0.22.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
14
|
-
langgraph_runtime_inmem-0.22.1.dist-info/RECORD,,
|
|
File without changes
|