langgraph-api 0.0.2__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/cli.py CHANGED
@@ -4,12 +4,35 @@ import logging
4
4
  import os
5
5
  import pathlib
6
6
  import threading
7
- from collections.abc import Mapping
7
+ from collections.abc import Mapping, Sequence
8
8
 
9
9
  logging.basicConfig(level=logging.INFO)
10
10
  logger = logging.getLogger(__name__)
11
11
 
12
12
 
13
+ def _get_org_id() -> str | None:
14
+ from langsmith.client import Client
15
+ from langsmith.utils import tracing_is_enabled
16
+
17
+ # Yes, the organizationId is actually the workspace iD
18
+ # which is actually the tenantID which we actually get via
19
+ # the sessions endpoint
20
+
21
+ if not tracing_is_enabled():
22
+ return
23
+ client = Client()
24
+ try:
25
+ response = client.request_with_retries(
26
+ "GET", "/api/v1/sessions", params={"limit": 1}
27
+ )
28
+ result = response.json()
29
+ if result:
30
+ return result[0]["tenant_id"]
31
+ except Exception as e:
32
+ logger.debug("Failed to get organization ID: %s", str(e))
33
+ return None
34
+
35
+
13
36
  @contextlib.contextmanager
14
37
  def patch_environment(**kwargs):
15
38
  """Temporarily patch environment variables.
@@ -47,6 +70,8 @@ def run_server(
47
70
  open_browser: bool = False,
48
71
  debug_port: int | None = None,
49
72
  env: str | pathlib.Path | Mapping[str, str] | None = None,
73
+ reload_includes: Sequence[str] | None = None,
74
+ reload_excludes: Sequence[str] | None = None,
50
75
  ):
51
76
  """Run the LangGraph API server."""
52
77
  import uvicorn
@@ -90,14 +115,21 @@ def run_server(
90
115
  studio_url = f"https://smith.langchain.com/studio/?baseUrl={local_url}"
91
116
 
92
117
  def _open_browser():
118
+ nonlocal studio_url
93
119
  import time
94
120
  import urllib.request
95
121
  import webbrowser
96
122
 
123
+ org_id = _get_org_id()
124
+ if org_id:
125
+ studio_url = f"https://smith.langchain.com/studio/?baseUrl={local_url}&organizationId={org_id}"
126
+
97
127
  while True:
98
128
  try:
99
129
  with urllib.request.urlopen(f"{local_url}/ok") as response:
100
130
  if response.status == 200:
131
+ logger.info("🎨 Opening Studio in your browser...")
132
+ logger.info("URL: " + studio_url)
101
133
  webbrowser.open(studio_url)
102
134
  return
103
135
  except urllib.error.URLError:
@@ -127,7 +159,7 @@ For production use, please use LangGraph Cloud.
127
159
  REDIS_URI="fake",
128
160
  N_JOBS_PER_WORKER=str(n_jobs_per_worker if n_jobs_per_worker else 1),
129
161
  LANGSERVE_GRAPHS=json.dumps(graphs) if graphs else None,
130
- LANGSMITH_LANGGRAPH_API_VARIANT="test",
162
+ LANGSMITH_LANGGRAPH_API_VARIANT="local_dev",
131
163
  **(env_vars or {}),
132
164
  ):
133
165
  if open_browser:
@@ -140,6 +172,8 @@ For production use, please use LangGraph Cloud.
140
172
  reload=reload,
141
173
  env_file=env_file,
142
174
  access_log=False,
175
+ reload_includes=reload_includes,
176
+ reload_excludes=reload_excludes,
143
177
  log_config={
144
178
  "version": 1,
145
179
  "incremental": False,
langgraph_api/graph.py CHANGED
@@ -157,18 +157,20 @@ async def collect_graphs_from_env(register: bool = False) -> None:
157
157
 
158
158
  if paths_str:
159
159
  specs = [
160
- GraphSpec(
161
- key,
162
- module=value.split(":")[0],
163
- variable=value.split(":")[1],
164
- config=config_per_graph.get(key),
165
- )
166
- if "/" not in value
167
- else GraphSpec(
168
- key,
169
- path=value.split(":")[0],
170
- variable=value.split(":")[1],
171
- config=config_per_graph.get(key),
160
+ (
161
+ GraphSpec(
162
+ key,
163
+ module=value.split(":")[0],
164
+ variable=value.split(":")[1],
165
+ config=config_per_graph.get(key),
166
+ )
167
+ if "/" not in value
168
+ else GraphSpec(
169
+ key,
170
+ path=value.split(":")[0],
171
+ variable=value.split(":")[1],
172
+ config=config_per_graph.get(key),
173
+ )
172
174
  )
173
175
  for key, value in json.loads(paths_str).items()
174
176
  ]
@@ -268,7 +270,15 @@ def _graph_from_spec(spec: GraphSpec) -> GraphValue:
268
270
  modspec.loader.exec_module(module)
269
271
  except ImportError as e:
270
272
  e.add_note(f"Could not import python module for graph: {spec}")
271
- raise e
273
+ if os.environ.get("LANGSMITH_LANGGRAPH_API_VARIANT") == "local_dev":
274
+ e.add_note(
275
+ "This error likely means you haven't installed your project and its dependencies yet. Before running the server, install your project:\n\n"
276
+ "If you are using requirements.txt:\n"
277
+ "python -m pip install -r requirements.txt\n\n"
278
+ "If you are using pyproject.toml or setuptools:\n"
279
+ "python -m pip install -e .\n\n"
280
+ "Make sure to run this command from your project's root directory (where your setup.py or pyproject.toml is located)"
281
+ )
272
282
  else:
273
283
  raise ValueError("Graph specification must have a path or module")
274
284
 
@@ -5,7 +5,11 @@ export async function resolve(specifier, context, nextResolve) {
5
5
  const parentURL = new URL("./graph.mts", import.meta.url).toString();
6
6
 
7
7
  if (specifier.startsWith("@langchain/langgraph")) {
8
- return nextResolve(specifier, { ...context, parentURL });
8
+ try {
9
+ return nextResolve(specifier, { ...context, parentURL });
10
+ } catch (error) {
11
+ return nextResolve(specifier, context);
12
+ }
9
13
  }
10
14
 
11
15
  return nextResolve(specifier, context);
langgraph_api/queue.py CHANGED
@@ -5,7 +5,7 @@ from random import random
5
5
  from typing import cast
6
6
 
7
7
  import structlog
8
- from langgraph.pregel.debug import CheckpointPayload
8
+ from langgraph.pregel.debug import CheckpointPayload, TaskResultPayload
9
9
 
10
10
  from langgraph_api.config import BG_JOB_NO_DELAY, STATS_INTERVAL_SECS
11
11
  from langgraph_api.errors import (
@@ -129,6 +129,13 @@ async def worker(
129
129
  nonlocal checkpoint
130
130
  checkpoint = checkpoint_arg
131
131
 
132
+ def on_task_result(task_result: TaskResultPayload):
133
+ if checkpoint is not None:
134
+ for task in checkpoint["tasks"]:
135
+ if task["id"] == task_result["id"]:
136
+ task.update(task_result)
137
+ break
138
+
132
139
  try:
133
140
  if attempt > MAX_RETRY_ATTEMPTS:
134
141
  raise RuntimeError(f"Run {run['run_id']} exceeded max attempts")
@@ -144,6 +151,7 @@ async def worker(
144
151
  attempt,
145
152
  done,
146
153
  on_checkpoint=on_checkpoint,
154
+ on_task_result=on_task_result,
147
155
  )
148
156
  await asyncio.wait_for(consume(stream, run_id), timeout)
149
157
  await logger.ainfo(
langgraph_api/schema.py CHANGED
@@ -98,6 +98,8 @@ class Thread(TypedDict):
98
98
  """The status of the thread. One of 'idle', 'busy', 'interrupted', "error"."""
99
99
  values: Fragment
100
100
  """The current state of the thread."""
101
+ interrupts: Fragment
102
+ """The current interrupts of the thread, a map of task_id to list of interrupts."""
101
103
 
102
104
 
103
105
  class ThreadTask(TypedDict):
langgraph_api/stream.py CHANGED
@@ -18,7 +18,7 @@ from langgraph.errors import (
18
18
  GraphRecursionError,
19
19
  InvalidUpdateError,
20
20
  )
21
- from langgraph.pregel.debug import CheckpointPayload
21
+ from langgraph.pregel.debug import CheckpointPayload, TaskResultPayload
22
22
  from langgraph.types import Command, Send
23
23
  from pydantic import ValidationError
24
24
  from pydantic.v1 import ValidationError as ValidationErrorLegacy
@@ -90,10 +90,11 @@ async def astream_state(
90
90
  done: ValueEvent,
91
91
  *,
92
92
  on_checkpoint: Callable[[CheckpointPayload], None] = lambda _: None,
93
+ on_task_result: Callable[[TaskResultPayload], None] = lambda _: None,
93
94
  ) -> AnyStream:
94
95
  """Stream messages from the runnable."""
95
96
  run_id = str(run["run_id"])
96
- await stack.enter_async_context(conn.pipeline())
97
+ pipe = await stack.enter_async_context(conn.pipeline())
97
98
  # extract args from run
98
99
  kwargs = run["kwargs"].copy()
99
100
  subgraphs = kwargs.get("subgraphs", False)
@@ -102,7 +103,7 @@ async def astream_state(
102
103
  graph = get_graph(
103
104
  config["configurable"]["graph_id"],
104
105
  config,
105
- store=None if not conn else Store(conn),
106
+ store=None if not conn else Store(conn, pipe=pipe),
106
107
  checkpointer=None if temporary else Checkpointer(conn),
107
108
  )
108
109
  input = kwargs.pop("input")
@@ -174,6 +175,8 @@ async def astream_state(
174
175
  if chunk["type"] == "checkpoint":
175
176
  checkpoint = _preprocess_debug_checkpoint(chunk["payload"])
176
177
  on_checkpoint(checkpoint)
178
+ elif chunk["type"] == "task_result":
179
+ on_task_result(chunk["payload"])
177
180
  if mode == "messages":
178
181
  if "messages-tuple" in stream_mode:
179
182
  yield "messages", chunk
@@ -185,9 +188,11 @@ async def astream_state(
185
188
  messages[msg.id] = msg
186
189
  yield "messages/metadata", {msg.id: {"metadata": meta}}
187
190
  yield (
188
- "messages/partial"
189
- if isinstance(msg, BaseMessageChunk)
190
- else "messages/complete",
191
+ (
192
+ "messages/partial"
193
+ if isinstance(msg, BaseMessageChunk)
194
+ else "messages/complete"
195
+ ),
191
196
  [message_chunk_to_message(messages[msg.id])],
192
197
  )
193
198
  elif mode in stream_mode:
@@ -221,6 +226,8 @@ async def astream_state(
221
226
  if chunk["type"] == "checkpoint":
222
227
  checkpoint = _preprocess_debug_checkpoint(chunk["payload"])
223
228
  on_checkpoint(checkpoint)
229
+ elif chunk["type"] == "task_result":
230
+ on_task_result(chunk["payload"])
224
231
  if mode == "messages":
225
232
  if "messages-tuple" in stream_mode:
226
233
  yield "messages", chunk
@@ -232,9 +239,11 @@ async def astream_state(
232
239
  messages[msg.id] = msg
233
240
  yield "messages/metadata", {msg.id: {"metadata": meta}}
234
241
  yield (
235
- "messages/partial"
236
- if isinstance(msg, BaseMessageChunk)
237
- else "messages/complete",
242
+ (
243
+ "messages/partial"
244
+ if isinstance(msg, BaseMessageChunk)
245
+ else "messages/complete"
246
+ ),
238
247
  [message_chunk_to_message(messages[msg.id])],
239
248
  )
240
249
  elif mode in stream_mode:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langgraph-api
3
- Version: 0.0.2
3
+ Version: 0.0.4
4
4
  Summary:
5
5
  License: Elastic-2.0
6
6
  Author: Nuno Campos
@@ -14,8 +14,8 @@ Requires-Dist: cryptography (>=43.0.3,<44.0.0)
14
14
  Requires-Dist: httpx (>=0.27.0)
15
15
  Requires-Dist: jsonschema-rs (>=0.25.0,<0.26.0)
16
16
  Requires-Dist: langchain-core (>=0.2.38,<0.4.0)
17
- Requires-Dist: langgraph (>=0.2.52)
18
- Requires-Dist: langgraph-checkpoint (>=2.0.5,<3.0)
17
+ Requires-Dist: langgraph (>=0.2.52,<0.3.0)
18
+ Requires-Dist: langgraph-checkpoint (>=2.0.6,<3.0)
19
19
  Requires-Dist: langsmith (>=0.1.63,<0.2.0)
20
20
  Requires-Dist: orjson (>=3.10.1)
21
21
  Requires-Dist: pyjwt (>=2.9.0,<3.0.0)
@@ -14,11 +14,11 @@ langgraph_api/auth/langsmith/backend.py,sha256=uHeb5-h13NIjrX_LDAvfWYr3zpbJvlvbd
14
14
  langgraph_api/auth/langsmith/client.py,sha256=eKchvAom7hdkUXauD8vHNceBDDUijrFgdTV8bKd7x4Q,3998
15
15
  langgraph_api/auth/middleware.py,sha256=_gJTOskEaln4RUT2rVYdQGPJVAyAiq-orsL_eQ3CynE,1369
16
16
  langgraph_api/auth/noop.py,sha256=vDJmzG2vArJxVzdHePvrJWahEa0dvGnhc2LEMMeiFz0,391
17
- langgraph_api/cli.py,sha256=LTETSHr2j5pBwKJkidKxzXfCohEScMnXjjIpfANBJpY,6458
17
+ langgraph_api/cli.py,sha256=wNGL4wnuwJn-WffuNY1akjeNSYMiZasV_uftF5XPP3E,7667
18
18
  langgraph_api/config.py,sha256=cG6eO4P_SZ2pKedb2b4n4vnBHRQr0aiECvGvOA8ZlJA,2259
19
19
  langgraph_api/cron_scheduler.py,sha256=DAzY2DsADzEpPVbG2BOSLTIufI93yeRswd71Aby_lV0,2257
20
20
  langgraph_api/errors.py,sha256=Bu_i5drgNTyJcLiyrwVE_6-XrSU50BHf9TDpttki9wQ,1690
21
- langgraph_api/graph.py,sha256=A9-l7cUvesyC3ymY_XFXxT02VG2kkoqbSYUDvjga5S0,9771
21
+ langgraph_api/graph.py,sha256=YM8XbgN8Q6FdqTO2LO_g_nV-cpbiJyOBMBtObnWXp18,10507
22
22
  langgraph_api/http.py,sha256=XrbyxpjtfSvnaWWh5ZLGpgZmY83WoDCrP_1GPguNiXI,4712
23
23
  langgraph_api/http_logger.py,sha256=Sxo_q-65tElauRvkzVLt9lJojgNdgtcHGBYD0IRyX7M,3146
24
24
  langgraph_api/js/.gitignore,sha256=qAah3Fq0HWAlfRj5ktZyC6QRQIsAolGLRGcRukA1XJI,33
@@ -29,7 +29,7 @@ langgraph_api/js/package.json,sha256=glTUiod6sTKOXoGX7C7XcUXLeB_6Xa0hIw7MmJq2EY8
29
29
  langgraph_api/js/remote.py,sha256=n34B4X1sZW70vEF1hMQ4vn28BroZJt7_7BGaJ0TNeBU,23306
30
30
  langgraph_api/js/server_sent_events.py,sha256=DLgXOHauemt7706vnfDUCG1GI3TidKycSizccdz9KgA,3702
31
31
  langgraph_api/js/src/graph.mts,sha256=Gh2-TcCepeRhqMk8Bm1tWm5sv53y2FxgSclUTamnLNw,2636
32
- langgraph_api/js/src/hooks.mjs,sha256=IfPwixktR6W5FA8yNmmBVDVAaqBA549WZKuj1uQedVY,511
32
+ langgraph_api/js/src/hooks.mjs,sha256=rdAEcBmqtIwzTPzqxJ2e7TJq6FWnhh5Edpy6os8hmlk,597
33
33
  langgraph_api/js/src/parser/parser.mts,sha256=wXre7teh8N8RYmGcyhZp4vMJd0kNnnFgoSGEyMVPzpQ,13007
34
34
  langgraph_api/js/src/parser/parser.worker.mjs,sha256=2K6D0GlUmkk7LE39I8mryB8VZVE3-N9Cblji-ArPhFo,386
35
35
  langgraph_api/js/src/schema/types.mts,sha256=SUj0vpvWVbID1mnGr2SUtumDBQkJ9zjfvJdoFP7DIzk,66536
@@ -56,31 +56,31 @@ langgraph_api/metadata.py,sha256=mih2G7ScQxiqyUlbksVXkqR3Oo-pM1b6lXtzOsgR1sw,304
56
56
  langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
57
  langgraph_api/models/run.py,sha256=nNhlG-l2P-DThFWYmCaar0UuPctadB9sH3CCWJfLqVc,8178
58
58
  langgraph_api/patch.py,sha256=94ddcTSZJe22JcpjxiSNjFZdYVnmeoWjk4IX4iBSoyk,1249
59
- langgraph_api/queue.py,sha256=m_LcZ_KZO1PVROQxlHXZDud7CYBroZv9M3oLJ4n_qYA,9456
59
+ langgraph_api/queue.py,sha256=7tsbDgv4GlUYieJsrvIJDMQUEok4Eu-n_PIQ93rwKjk,9810
60
60
  langgraph_api/route.py,sha256=Dzje_dSigJramglqkt4ERT9-cb2xFli7dx25ZV6B6mI,4147
61
- langgraph_api/schema.py,sha256=kEhwg9MUvlHKUDCvomRUts3ja53IlXHgz70H1_AXMvk,5138
61
+ langgraph_api/schema.py,sha256=yHHQS_4KQjiENF21JexzdJ_RF_HSxMEQX2aP6yPVTo4,5251
62
62
  langgraph_api/serde.py,sha256=VoJ7Z1IuqrQGXFzEP1qijAITtWCrmjtVqlCRuScjXJI,3533
63
63
  langgraph_api/server.py,sha256=cCD2lVv0SZdgf0o797UfxUyjFwmoazJVCjl_j-8Ae7A,1523
64
64
  langgraph_api/sse.py,sha256=2wNodCOP2eg7a9mpSu0S3FQ0CHk2BBV_vv0UtIgJIcc,4034
65
65
  langgraph_api/state.py,sha256=8jx4IoTCOjTJuwzuXJKKFwo1VseHjNnw_CCq4x1SW14,2284
66
- langgraph_api/stream.py,sha256=8VaGGUbh4BC_NJrAHeuPZgeLmRY0E8wNC0UkobLViqQ,11022
66
+ langgraph_api/stream.py,sha256=yHnhODy5FRqxmiN3t0m9pBYQADws0YSHuBLet8pXBDs,11521
67
67
  langgraph_api/utils.py,sha256=FI50tOFMVidV4-1TefouL1N-OJX41qD_fSEoWigTtf0,1575
68
68
  langgraph_api/validation.py,sha256=McizHlz-Ez8Jhdbc79mbPSde7GIuf2Jlbjx2yv_l6dA,4475
69
69
  langgraph_license/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
70
  langgraph_license/middleware.py,sha256=_ODIYzQkymr6W9_Fp9wtf1kAQspnpsmr53xuzyF2GA0,612
71
- langgraph_license/validation.py,sha256=th7OP0JZ8R8uf7vKRpfl0K3LOI0dyCKBPBxwdmgcDOk,202
71
+ langgraph_license/validation.py,sha256=Uu_G8UGO_WTlLsBEY0gTVWjRR4czYGfw5YAD3HLZoj0,203
72
72
  langgraph_storage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
- langgraph_storage/checkpoint.py,sha256=9mJTKXo4LIatr0nD7AX2mU8p9q18XaRnSSi25kNQcGc,2691
73
+ langgraph_storage/checkpoint.py,sha256=55J7W0s5Z7svqqan9wphubSgCphWUItq98j-iWVwbH8,2774
74
74
  langgraph_storage/database.py,sha256=0bB4y2rWBYiT4Jsmvbbsams4Q8G0CgEwPVwclz6iQrM,4924
75
- langgraph_storage/ops.py,sha256=0btWlZnl7hl3KC7C9IIKzlsmUPilF-MQe4WwLVLymfs,51884
75
+ langgraph_storage/ops.py,sha256=mehNrHleaPAjZ2sFISNuD7iwi_ewyX_IMmSPwreHhR0,52186
76
76
  langgraph_storage/queue.py,sha256=6cTZ0ubHu3S1T43yxHMVOwsQsDaJupByiU0sTUFFls8,3261
77
77
  langgraph_storage/retry.py,sha256=uvYFuXJ-T6S1QY1ZwkZHyZQbsvS-Ab68LSbzbUUSI2E,696
78
- langgraph_storage/store.py,sha256=Z033CojJb6jMZbMu3VPtwR0bFdfyfdUS8dQSaXUASYU,731
78
+ langgraph_storage/store.py,sha256=k5DdW_dA1VaYnXo7Cm2IoXBqxm1FtPFiqNXNsmNiDUo,1452
79
79
  langgraph_storage/ttl_dict.py,sha256=FlpEY8EANeXWKo_G5nmIotPquABZGyIJyk6HD9u6vqY,1533
80
80
  logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
81
81
  openapi.json,sha256=JaieC_zSdQ9bzqJYdHUfCOnNt0ALBWcdj7uVjRLh9M8,122950
82
- langgraph_api-0.0.2.dist-info/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
83
- langgraph_api-0.0.2.dist-info/METADATA,sha256=JlZkWP9lx3w6asP1m_U2NsCBkftoHp44PlEYWFmLezM,3986
84
- langgraph_api-0.0.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
85
- langgraph_api-0.0.2.dist-info/entry_points.txt,sha256=3EYLgj89DfzqJHHYGxPH4A_fEtClvlRbWRUHaXO7hj4,77
86
- langgraph_api-0.0.2.dist-info/RECORD,,
82
+ langgraph_api-0.0.4.dist-info/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
83
+ langgraph_api-0.0.4.dist-info/METADATA,sha256=yNGt2rJsJVk7b-kJJEjukG8PHtBO7WCdQMK-38KoWyc,3993
84
+ langgraph_api-0.0.4.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
85
+ langgraph_api-0.0.4.dist-info/entry_points.txt,sha256=3EYLgj89DfzqJHHYGxPH4A_fEtClvlRbWRUHaXO7hj4,77
86
+ langgraph_api-0.0.4.dist-info/RECORD,,
@@ -7,5 +7,5 @@ async def get_license_status() -> bool:
7
7
 
8
8
 
9
9
  def plus_features_enabled() -> bool:
10
- """Always return true"""
10
+ """Always return false"""
11
11
  return False
@@ -10,6 +10,8 @@ from langgraph.checkpoint.base import (
10
10
  )
11
11
  from langgraph.checkpoint.memory import MemorySaver, PersistentDict
12
12
 
13
+ from langgraph_api.serde import Serializer
14
+
13
15
  _EXCLUDED_KEYS = {"checkpoint_ns", "checkpoint_id", "run_id", "thread_id"}
14
16
 
15
17
 
@@ -37,7 +39,7 @@ class InMemorySaver(MemorySaver):
37
39
  return d
38
40
 
39
41
  super().__init__(
40
- serde=serde,
42
+ serde=serde if serde is not None else Serializer(),
41
43
  factory=factory,
42
44
  )
43
45
 
langgraph_storage/ops.py CHANGED
@@ -583,6 +583,15 @@ class Threads:
583
583
  "updated_at": datetime.now(UTC),
584
584
  "values": checkpoint["values"] if checkpoint else None,
585
585
  "status": status,
586
+ "interrupts": (
587
+ {
588
+ t["id"]: t["interrupts"]
589
+ for t in checkpoint["tasks"]
590
+ if t["interrupts"]
591
+ }
592
+ if checkpoint
593
+ else {}
594
+ ),
586
595
  }
587
596
  )
588
597
 
@@ -1,28 +1,44 @@
1
1
  import os
2
+ from typing import Any
2
3
 
3
4
  from langgraph.checkpoint.memory import PersistentDict
4
5
  from langgraph.store.memory import InMemoryStore
5
6
 
6
7
 
7
8
  class DiskBackedInMemStore(InMemoryStore):
8
- def __init__(self, *args, filename=None, **kwargs):
9
+ def __init__(self, *args: Any, filename: str | None = None, **kwargs: Any) -> None:
9
10
  super().__init__(*args, **kwargs)
10
11
  self.filename = filename
11
12
  self._data = PersistentDict(dict, filename=self.filename)
13
+ self._load_data()
14
+
15
+ def _load_data(self) -> None:
16
+ if not self.filename:
17
+ return
12
18
  try:
13
19
  self._data.load()
14
20
  except FileNotFoundError:
21
+ # It's okay if the file doesn't exist yet
15
22
  pass
16
-
17
- def close(self):
23
+ except (EOFError, ValueError) as e:
24
+ raise RuntimeError(
25
+ f"Failed to load store from {self.filename}. "
26
+ "This may be due to changes in the stored data structure. "
27
+ "Consider clearing the local store by running: rm -rf .langgraph_api"
28
+ ) from e
29
+ except Exception as e:
30
+ raise RuntimeError(
31
+ f"Unexpected error loading store from {self.filename}: {str(e)}"
32
+ ) from e
33
+
34
+ def close(self) -> None:
18
35
  self._data.close()
19
36
 
20
37
 
21
38
  _STORE_FILE = os.path.join(".langgraph_api", "store.pckl")
22
- if not os.path.exists(".langgraph_api"):
23
- os.mkdir(".langgraph_api")
39
+ os.makedirs(".langgraph_api", exist_ok=True)
24
40
  STORE = DiskBackedInMemStore(filename=_STORE_FILE)
25
41
 
26
42
 
27
- def Store(*args, **kwargs):
43
+ def Store(*args: Any, **kwargs: Any) -> DiskBackedInMemStore:
28
44
  return STORE