langgraph-api 0.4.21__tar.gz → 0.4.22__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

Files changed (121) hide show
  1. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/PKG-INFO +2 -2
  2. langgraph_api-0.4.22/langgraph_api/__init__.py +1 -0
  3. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/runs.py +181 -117
  4. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/cli.py +1 -1
  5. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/command.py +1 -1
  6. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/remote.py +1 -1
  7. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/models/run.py +1 -1
  8. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/patch.py +3 -1
  9. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/serde.py +1 -1
  10. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/stream.py +2 -2
  11. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/pyproject.toml +1 -1
  12. langgraph_api-0.4.21/langgraph_api/__init__.py +0 -1
  13. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/.gitignore +0 -0
  14. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/LICENSE +0 -0
  15. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/Makefile +0 -0
  16. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/README.md +0 -0
  17. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/.gitignore +0 -0
  18. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/Makefile +0 -0
  19. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/README.md +0 -0
  20. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/burst.js +0 -0
  21. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/clean.js +0 -0
  22. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/graphs.js +0 -0
  23. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/package.json +0 -0
  24. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/ramp.js +0 -0
  25. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/update-revision.js +0 -0
  26. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/benchmark/weather.js +0 -0
  27. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/constraints.txt +0 -0
  28. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/forbidden.txt +0 -0
  29. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/healthcheck.py +0 -0
  30. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/__init__.py +0 -0
  31. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/a2a.py +0 -0
  32. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/assistants.py +0 -0
  33. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/mcp.py +0 -0
  34. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/meta.py +0 -0
  35. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/openapi.py +0 -0
  36. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/store.py +0 -0
  37. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/threads.py +0 -0
  38. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/api/ui.py +0 -0
  39. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/asgi_transport.py +0 -0
  40. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/asyncio.py +0 -0
  41. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/auth/__init__.py +0 -0
  42. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/auth/custom.py +0 -0
  43. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/auth/langsmith/__init__.py +0 -0
  44. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/auth/langsmith/backend.py +0 -0
  45. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/auth/langsmith/client.py +0 -0
  46. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/auth/middleware.py +0 -0
  47. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/auth/noop.py +0 -0
  48. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/auth/studio_user.py +0 -0
  49. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/config.py +0 -0
  50. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/cron_scheduler.py +0 -0
  51. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/errors.py +0 -0
  52. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/executor_entrypoint.py +0 -0
  53. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/feature_flags.py +0 -0
  54. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/graph.py +0 -0
  55. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/http.py +0 -0
  56. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/http_metrics.py +0 -0
  57. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/.gitignore +0 -0
  58. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/.prettierrc +0 -0
  59. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/__init__.py +0 -0
  60. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/base.py +0 -0
  61. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/build.mts +0 -0
  62. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/client.http.mts +0 -0
  63. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/client.mts +0 -0
  64. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/errors.py +0 -0
  65. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/global.d.ts +0 -0
  66. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/package.json +0 -0
  67. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/schema.py +0 -0
  68. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/src/graph.mts +0 -0
  69. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/src/load.hooks.mjs +0 -0
  70. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/src/preload.mjs +0 -0
  71. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/src/utils/files.mts +0 -0
  72. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/src/utils/importMap.mts +0 -0
  73. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/src/utils/pythonSchemas.mts +0 -0
  74. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/src/utils/serde.mts +0 -0
  75. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/sse.py +0 -0
  76. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/traceblock.mts +0 -0
  77. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/tsconfig.json +0 -0
  78. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/ui.py +0 -0
  79. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/js/yarn.lock +0 -0
  80. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/logging.py +0 -0
  81. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/metadata.py +0 -0
  82. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/middleware/__init__.py +0 -0
  83. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/middleware/http_logger.py +0 -0
  84. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/middleware/private_network.py +0 -0
  85. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/middleware/request_id.py +0 -0
  86. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/models/__init__.py +0 -0
  87. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/queue_entrypoint.py +0 -0
  88. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/route.py +0 -0
  89. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/schema.py +0 -0
  90. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/server.py +0 -0
  91. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/sse.py +0 -0
  92. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/state.py +0 -0
  93. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/store.py +0 -0
  94. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/thread_ttl.py +0 -0
  95. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/traceblock.py +0 -0
  96. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/tunneling/cloudflare.py +0 -0
  97. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/utils/__init__.py +0 -0
  98. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/utils/cache.py +0 -0
  99. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/utils/config.py +0 -0
  100. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/utils/future.py +0 -0
  101. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/utils/headers.py +0 -0
  102. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/utils/retriable_client.py +0 -0
  103. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/utils/uuids.py +0 -0
  104. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/validation.py +0 -0
  105. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/webhook.py +0 -0
  106. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_api/worker.py +0 -0
  107. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_license/__init__.py +0 -0
  108. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_license/validation.py +0 -0
  109. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/__init__.py +0 -0
  110. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/checkpoint.py +0 -0
  111. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/database.py +0 -0
  112. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/lifespan.py +0 -0
  113. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/metrics.py +0 -0
  114. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/ops.py +0 -0
  115. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/queue.py +0 -0
  116. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/retry.py +0 -0
  117. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/langgraph_runtime/store.py +0 -0
  118. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/logging.json +0 -0
  119. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/openapi.json +0 -0
  120. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/scripts/create_license.py +0 -0
  121. {langgraph_api-0.4.21 → langgraph_api-0.4.22}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-api
3
- Version: 0.4.21
3
+ Version: 0.4.22
4
4
  Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
5
5
  License: Elastic-2.0
6
6
  License-File: LICENSE
@@ -11,7 +11,7 @@ Requires-Dist: httpx>=0.25.0
11
11
  Requires-Dist: jsonschema-rs<0.30,>=0.20.0
12
12
  Requires-Dist: langchain-core>=0.3.64
13
13
  Requires-Dist: langgraph-checkpoint>=2.0.23
14
- Requires-Dist: langgraph-runtime-inmem<0.13.0,>=0.12.1
14
+ Requires-Dist: langgraph-runtime-inmem<0.14.0,>=0.13.0
15
15
  Requires-Dist: langgraph-sdk>=0.2.0
16
16
  Requires-Dist: langgraph>=0.4.0
17
17
  Requires-Dist: langsmith>=0.3.45
@@ -0,0 +1 @@
1
+ __version__ = "0.4.22"
@@ -1,7 +1,7 @@
1
1
  import asyncio
2
- from collections.abc import AsyncIterator
2
+ from collections.abc import AsyncIterator, Awaitable, Callable
3
3
  from typing import Literal, cast
4
- from uuid import uuid4
4
+ from uuid import UUID, uuid4
5
5
 
6
6
  import orjson
7
7
  import structlog
@@ -32,12 +32,116 @@ from langgraph_api.validation import (
32
32
  )
33
33
  from langgraph_license.validation import plus_features_enabled
34
34
  from langgraph_runtime.database import connect
35
- from langgraph_runtime.ops import Crons, Runs, Threads
35
+ from langgraph_runtime.ops import Crons, Runs, StreamHandler, Threads
36
36
  from langgraph_runtime.retry import retry_db
37
37
 
38
38
  logger = structlog.stdlib.get_logger(__name__)
39
39
 
40
40
 
41
+ _RunResultFallback = Callable[[], Awaitable[bytes]]
42
+
43
+
44
+ def _thread_values_fallback(thread_id: UUID) -> _RunResultFallback:
45
+ async def fetch_thread_values() -> bytes:
46
+ async with connect() as conn:
47
+ thread_iter = await Threads.get(conn, thread_id)
48
+ try:
49
+ thread = await anext(thread_iter)
50
+ if thread["status"] == "error":
51
+ return orjson.dumps({"__error__": orjson.Fragment(thread["error"])})
52
+ if thread["status"] == "interrupted":
53
+ # Get an interrupt for the thread. There is the case where there are multiple interrupts for the same run and we may not show the same
54
+ # interrupt, but we'll always show one. Long term we should show all of them.
55
+ try:
56
+ if isinstance(thread["interrupts"], dict):
57
+ # Handle in memory format
58
+ interrupt_map = thread["interrupts"]
59
+ else:
60
+ interrupt_map = orjson.loads(thread["interrupts"].buf)
61
+ interrupt = [next(iter(interrupt_map.values()))[0]]
62
+ return orjson.dumps({"__interrupt__": interrupt})
63
+ except Exception:
64
+ # No interrupt, but status is interrupted from a before/after block. Default back to values.
65
+ pass
66
+ return cast(bytes, thread["values"])
67
+ except StopAsyncIteration:
68
+ await logger.awarning(
69
+ f"No checkpoint found for thread {thread_id}",
70
+ thread_id=thread_id,
71
+ )
72
+ return b"{}"
73
+
74
+ return fetch_thread_values
75
+
76
+
77
+ def _run_result_body(
78
+ *,
79
+ run_id: UUID,
80
+ thread_id: UUID,
81
+ sub: StreamHandler,
82
+ cancel_on_disconnect: bool = False,
83
+ ignore_404: bool = False,
84
+ fallback: _RunResultFallback | None = None,
85
+ cancel_message: str | None = None,
86
+ ) -> Callable[[], AsyncIterator[bytes]]:
87
+ last_chunk = ValueEvent()
88
+
89
+ async def consume() -> None:
90
+ vchunk: bytes | None = None
91
+ try:
92
+ async for mode, chunk, _ in Runs.Stream.join(
93
+ run_id,
94
+ stream_channel=sub,
95
+ cancel_on_disconnect=cancel_on_disconnect,
96
+ thread_id=thread_id,
97
+ ignore_404=ignore_404,
98
+ ):
99
+ if (
100
+ mode == b"values"
101
+ or mode == b"updates"
102
+ and b"__interrupt__" in chunk
103
+ ):
104
+ vchunk = chunk
105
+ elif mode == b"error":
106
+ vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
107
+ if vchunk is not None:
108
+ last_chunk.set(vchunk)
109
+ elif fallback is not None:
110
+ last_chunk.set(await fallback())
111
+ else:
112
+ last_chunk.set(b"{}")
113
+ finally:
114
+ # Make sure to always clean up the pubsub
115
+ await sub.__aexit__(None, None, None)
116
+
117
+ # keep the connection open by sending whitespace every 5 seconds
118
+ # leading whitespace will be ignored by json parsers
119
+ async def body() -> AsyncIterator[bytes]:
120
+ try:
121
+ stream = asyncio.create_task(consume())
122
+ while True:
123
+ try:
124
+ if stream.done():
125
+ # raise stream exception if any
126
+ stream.result()
127
+ yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
128
+ break
129
+ except TimeoutError:
130
+ yield b"\n"
131
+ except asyncio.CancelledError:
132
+ if cancel_message is not None:
133
+ stream.cancel(cancel_message)
134
+ else:
135
+ stream.cancel()
136
+ await stream
137
+ raise
138
+ finally:
139
+ # Make sure to always clean up the pubsub
140
+ await sub.__aexit__(None, None, None)
141
+
142
+ return body
143
+
144
+
41
145
  @retry_db
42
146
  async def create_run(request: ApiRequest):
43
147
  """Create a run."""
@@ -218,56 +322,13 @@ async def wait_run(request: ApiRequest):
218
322
  await sub.__aexit__(None, None, None)
219
323
  raise
220
324
 
221
- last_chunk = ValueEvent()
222
-
223
- async def consume():
224
- vchunk: bytes | None = None
225
- async for mode, chunk, _ in Runs.Stream.join(
226
- run["run_id"],
227
- thread_id=run["thread_id"],
228
- stream_channel=sub,
229
- cancel_on_disconnect=on_disconnect == "cancel",
230
- ):
231
- if mode == b"values" or mode == b"updates" and b"__interrupt__" in chunk:
232
- vchunk = chunk
233
- elif mode == b"error":
234
- vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
235
- if vchunk is not None:
236
- last_chunk.set(vchunk)
237
- else:
238
- async with connect() as conn:
239
- thread_iter = await Threads.get(conn, thread_id)
240
- try:
241
- thread = await anext(thread_iter)
242
- last_chunk.set(thread["values"])
243
- except StopAsyncIteration:
244
- await logger.awarning(
245
- f"No checkpoint found for thread {thread_id}",
246
- thread_id=thread_id,
247
- )
248
- last_chunk.set(b"{}")
249
-
250
- # keep the connection open by sending whitespace every 5 seconds
251
- # leading whitespace will be ignored by json parsers
252
- async def body() -> AsyncIterator[bytes]:
253
- try:
254
- stream = asyncio.create_task(consume())
255
- while True:
256
- try:
257
- if stream.done():
258
- # raise stream exception if any
259
- stream.result()
260
- yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
261
- break
262
- except TimeoutError:
263
- yield b"\n"
264
- except asyncio.CancelledError:
265
- stream.cancel()
266
- await stream
267
- raise
268
- finally:
269
- # Make sure to always clean up the pubsub
270
- await sub.__aexit__(None, None, None)
325
+ body = _run_result_body(
326
+ run_id=run["run_id"],
327
+ thread_id=run["thread_id"],
328
+ sub=sub,
329
+ cancel_on_disconnect=on_disconnect == "cancel",
330
+ fallback=_thread_values_fallback(thread_id),
331
+ )
271
332
 
272
333
  return StreamingResponse(
273
334
  body(),
@@ -305,53 +366,23 @@ async def wait_run_stateless(request: ApiRequest):
305
366
  await sub.__aexit__(None, None, None)
306
367
  raise
307
368
 
308
- last_chunk = ValueEvent()
309
-
310
- async def consume():
311
- vchunk: bytes | None = None
312
- async for mode, chunk, _ in Runs.Stream.join(
313
- run["run_id"],
369
+ async def stateless_fallback() -> bytes:
370
+ await logger.awarning(
371
+ "No checkpoint emitted for stateless run",
372
+ run_id=run["run_id"],
314
373
  thread_id=run["thread_id"],
315
- stream_channel=sub,
316
- ignore_404=True,
317
- cancel_on_disconnect=on_disconnect == "cancel",
318
- ):
319
- if mode == b"values" or mode == b"updates" and b"__interrupt__" in chunk:
320
- vchunk = chunk
321
- elif mode == b"error":
322
- vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
323
- if vchunk is not None:
324
- last_chunk.set(vchunk)
325
- else:
326
- # we can't fetch the thread (it was deleted), so just return empty values
327
- await logger.awarning(
328
- "No checkpoint emitted for stateless run",
329
- run_id=run["run_id"],
330
- thread_id=run["thread_id"],
331
- )
332
- last_chunk.set(b"{}")
333
-
334
- # keep the connection open by sending whitespace every 5 seconds
335
- # leading whitespace will be ignored by json parsers
336
- async def body() -> AsyncIterator[bytes]:
337
- try:
338
- stream = asyncio.create_task(consume())
339
- while True:
340
- try:
341
- if stream.done():
342
- # raise stream exception if any
343
- stream.result()
344
- yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
345
- break
346
- except TimeoutError:
347
- yield b"\n"
348
- except asyncio.CancelledError:
349
- stream.cancel("Run stream cancelled")
350
- await stream
351
- raise
352
- finally:
353
- # Make sure to always clean up the pubsub
354
- await sub.__aexit__(None, None, None)
374
+ )
375
+ return b"{}"
376
+
377
+ body = _run_result_body(
378
+ run_id=run["run_id"],
379
+ thread_id=run["thread_id"],
380
+ sub=sub,
381
+ cancel_on_disconnect=on_disconnect == "cancel",
382
+ ignore_404=True,
383
+ fallback=stateless_fallback,
384
+ cancel_message="Run stream cancelled",
385
+ )
355
386
 
356
387
  return StreamingResponse(
357
388
  body(),
@@ -422,11 +453,23 @@ async def join_run(request: ApiRequest):
422
453
  validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
423
454
  validate_uuid(run_id, "Invalid run ID: must be a UUID")
424
455
 
425
- return ApiResponse(
426
- await Runs.join(
427
- run_id,
428
- thread_id=thread_id,
429
- )
456
+ # A touch redundant, but to meet the existing signature of join, we need to throw any 404s before we enter the streaming body
457
+ await Runs.Stream.check_run_stream_auth(run_id, thread_id)
458
+ sub = await Runs.Stream.subscribe(run_id, thread_id)
459
+ body = _run_result_body(
460
+ run_id=run_id,
461
+ thread_id=thread_id,
462
+ sub=sub,
463
+ fallback=_thread_values_fallback(thread_id),
464
+ )
465
+
466
+ return StreamingResponse(
467
+ body(),
468
+ media_type="application/json",
469
+ headers={
470
+ "Location": f"/threads/{thread_id}/runs/{run_id}/join",
471
+ "Content-Location": f"/threads/{thread_id}/runs/{run_id}",
472
+ },
430
473
  )
431
474
 
432
475
 
@@ -456,6 +499,10 @@ async def join_run_stream(request: ApiRequest):
456
499
 
457
500
  return EventSourceResponse(
458
501
  body(),
502
+ headers={
503
+ "Location": f"/threads/{thread_id}/runs/{run_id}/stream",
504
+ "Content-Location": f"/threads/{thread_id}/runs/{run_id}",
505
+ },
459
506
  )
460
507
 
461
508
 
@@ -476,19 +523,36 @@ async def cancel_run(
476
523
  action_str if action_str in {"interrupt", "rollback"} else "interrupt",
477
524
  )
478
525
 
479
- async with connect() as conn:
480
- await Runs.cancel(
481
- conn,
482
- [run_id],
483
- action=action,
484
- thread_id=thread_id,
485
- )
486
- if wait:
487
- await Runs.join(
488
- run_id,
489
- thread_id=thread_id,
490
- )
491
- return Response(status_code=204 if wait else 202)
526
+ sub = await Runs.Stream.subscribe(run_id, thread_id) if wait else None
527
+ try:
528
+ async with connect() as conn:
529
+ await Runs.cancel(
530
+ conn,
531
+ [run_id],
532
+ action=action,
533
+ thread_id=thread_id,
534
+ )
535
+ except Exception:
536
+ if sub is not None:
537
+ await sub.__aexit__(None, None, None)
538
+ raise
539
+ if not wait:
540
+ return Response(status_code=202)
541
+
542
+ body = _run_result_body(
543
+ run_id=run_id,
544
+ thread_id=thread_id,
545
+ sub=sub,
546
+ )
547
+
548
+ return StreamingResponse(
549
+ body(),
550
+ media_type="application/json",
551
+ headers={
552
+ "Location": f"/threads/{thread_id}/runs/{run_id}/join",
553
+ "Content-Location": f"/threads/{thread_id}/runs/{run_id}",
554
+ },
555
+ )
492
556
 
493
557
 
494
558
  @retry_db
@@ -256,7 +256,7 @@ def run_server(
256
256
  if k in to_patch:
257
257
  logger.debug(f"Skipping loaded env var {k}={v}")
258
258
  continue
259
- to_patch[k] = v
259
+ to_patch[k] = v # type: ignore[invalid-assignment]
260
260
  with patch_environment(
261
261
  **to_patch,
262
262
  ):
@@ -21,7 +21,7 @@ def map_cmd(cmd: RunCommand) -> Command:
21
21
  update=update,
22
22
  goto=(
23
23
  [
24
- it if isinstance(it, str) else Send(it["node"], it["input"])
24
+ it if isinstance(it, str) else Send(it["node"], it["input"]) # type: ignore[non-subscriptable]
25
25
  for it in goto
26
26
  ]
27
27
  if goto
@@ -239,7 +239,7 @@ class RemotePregel(BaseRemotePregel):
239
239
  tuple(task["path"]) if task.get("path") else tuple(),
240
240
  # TODO: figure out how to properly deserialise errors
241
241
  task.get("error"),
242
- tuple(interrupts),
242
+ tuple(interrupts), # type: ignore[arg-type]
243
243
  state,
244
244
  task.get("result"),
245
245
  )
@@ -147,7 +147,7 @@ def ensure_ids(
147
147
  ) from None
148
148
  else:
149
149
  results.append(None)
150
- return tuple(results)
150
+ return tuple(results) # type: ignore[invalid-return-type]
151
151
 
152
152
 
153
153
  def assign_defaults(
@@ -3,7 +3,7 @@ from typing import Any
3
3
  from starlette.responses import Response, StreamingResponse
4
4
  from starlette.types import Send
5
5
 
6
- from langgraph_api.serde import Fragment
6
+ from langgraph_api.serde import Fragment, json_dumpb
7
7
 
8
8
  """
9
9
  Patch Response.render and StreamingResponse.stream_response
@@ -32,6 +32,8 @@ async def StreamingResponse_stream_response(self, send: Send) -> None:
32
32
  continue
33
33
  if isinstance(chunk, Fragment):
34
34
  chunk = chunk.buf
35
+ if isinstance(chunk, dict):
36
+ chunk = json_dumpb(chunk)
35
37
  if not isinstance(chunk, (bytes, bytearray, memoryview)): # noqa: UP038
36
38
  chunk = chunk.encode(self.charset)
37
39
  await send({"type": "http.response.body", "body": chunk, "more_body": True})
@@ -149,7 +149,7 @@ def json_loads(content: bytes | Fragment | dict) -> Any:
149
149
  content = content.buf
150
150
  if isinstance(content, dict):
151
151
  return content
152
- return orjson.loads(cast(bytes, content))
152
+ return orjson.loads(content)
153
153
 
154
154
 
155
155
  # Do not use. orjson holds the GIL the entire time it's running anyway.
@@ -304,7 +304,7 @@ async def astream_state(
304
304
  else:
305
305
  msg = convert_to_messages([msg_])[0]
306
306
  else:
307
- msg = cast(BaseMessage, msg_)
307
+ msg = msg_
308
308
  if msg.id in messages:
309
309
  messages[msg.id] += msg
310
310
  else:
@@ -404,7 +404,7 @@ async def astream_state(
404
404
  else:
405
405
  msg = convert_to_messages([msg_])[0]
406
406
  else:
407
- msg = cast(BaseMessage, msg_)
407
+ msg = msg_
408
408
  if msg.id in messages:
409
409
  messages[msg.id] += msg
410
410
  else:
@@ -31,7 +31,7 @@ dependencies = [
31
31
  "cryptography>=42.0.0,<45.0",
32
32
  "langgraph-sdk>=0.2.0",
33
33
  "cloudpickle>=3.0.0",
34
- "langgraph-runtime-inmem>=0.12.1,<0.13.0",
34
+ "langgraph-runtime-inmem>=0.13.0,<0.14.0",
35
35
  "truststore>=0.1",
36
36
  ]
37
37
 
@@ -1 +0,0 @@
1
- __version__ = "0.4.21"
File without changes
File without changes
File without changes
File without changes