langgraph-api 0.4.21__tar.gz → 0.4.23__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

Files changed (122) hide show
  1. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/PKG-INFO +2 -2
  2. langgraph_api-0.4.23/langgraph_api/__init__.py +1 -0
  3. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/runs.py +181 -117
  4. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/cli.py +1 -1
  5. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/command.py +1 -1
  6. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/remote.py +1 -1
  7. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/models/run.py +1 -1
  8. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/patch.py +3 -1
  9. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/serde.py +1 -1
  10. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/stream.py +2 -2
  11. langgraph_api-0.4.23/langgraph_api/utils/stream_codec.py +315 -0
  12. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/pyproject.toml +1 -1
  13. langgraph_api-0.4.21/langgraph_api/__init__.py +0 -1
  14. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/.gitignore +0 -0
  15. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/LICENSE +0 -0
  16. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/Makefile +0 -0
  17. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/README.md +0 -0
  18. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/.gitignore +0 -0
  19. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/Makefile +0 -0
  20. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/README.md +0 -0
  21. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/burst.js +0 -0
  22. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/clean.js +0 -0
  23. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/graphs.js +0 -0
  24. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/package.json +0 -0
  25. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/ramp.js +0 -0
  26. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/update-revision.js +0 -0
  27. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/benchmark/weather.js +0 -0
  28. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/constraints.txt +0 -0
  29. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/forbidden.txt +0 -0
  30. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/healthcheck.py +0 -0
  31. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/__init__.py +0 -0
  32. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/a2a.py +0 -0
  33. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/assistants.py +0 -0
  34. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/mcp.py +0 -0
  35. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/meta.py +0 -0
  36. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/openapi.py +0 -0
  37. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/store.py +0 -0
  38. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/threads.py +0 -0
  39. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/api/ui.py +0 -0
  40. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/asgi_transport.py +0 -0
  41. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/asyncio.py +0 -0
  42. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/auth/__init__.py +0 -0
  43. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/auth/custom.py +0 -0
  44. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/auth/langsmith/__init__.py +0 -0
  45. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/auth/langsmith/backend.py +0 -0
  46. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/auth/langsmith/client.py +0 -0
  47. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/auth/middleware.py +0 -0
  48. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/auth/noop.py +0 -0
  49. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/auth/studio_user.py +0 -0
  50. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/config.py +0 -0
  51. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/cron_scheduler.py +0 -0
  52. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/errors.py +0 -0
  53. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/executor_entrypoint.py +0 -0
  54. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/feature_flags.py +0 -0
  55. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/graph.py +0 -0
  56. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/http.py +0 -0
  57. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/http_metrics.py +0 -0
  58. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/.gitignore +0 -0
  59. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/.prettierrc +0 -0
  60. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/__init__.py +0 -0
  61. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/base.py +0 -0
  62. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/build.mts +0 -0
  63. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/client.http.mts +0 -0
  64. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/client.mts +0 -0
  65. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/errors.py +0 -0
  66. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/global.d.ts +0 -0
  67. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/package.json +0 -0
  68. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/schema.py +0 -0
  69. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/src/graph.mts +0 -0
  70. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/src/load.hooks.mjs +0 -0
  71. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/src/preload.mjs +0 -0
  72. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/src/utils/files.mts +0 -0
  73. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/src/utils/importMap.mts +0 -0
  74. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/src/utils/pythonSchemas.mts +0 -0
  75. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/src/utils/serde.mts +0 -0
  76. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/sse.py +0 -0
  77. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/traceblock.mts +0 -0
  78. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/tsconfig.json +0 -0
  79. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/ui.py +0 -0
  80. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/js/yarn.lock +0 -0
  81. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/logging.py +0 -0
  82. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/metadata.py +0 -0
  83. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/middleware/__init__.py +0 -0
  84. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/middleware/http_logger.py +0 -0
  85. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/middleware/private_network.py +0 -0
  86. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/middleware/request_id.py +0 -0
  87. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/models/__init__.py +0 -0
  88. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/queue_entrypoint.py +0 -0
  89. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/route.py +0 -0
  90. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/schema.py +0 -0
  91. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/server.py +0 -0
  92. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/sse.py +0 -0
  93. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/state.py +0 -0
  94. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/store.py +0 -0
  95. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/thread_ttl.py +0 -0
  96. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/traceblock.py +0 -0
  97. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/tunneling/cloudflare.py +0 -0
  98. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/utils/__init__.py +0 -0
  99. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/utils/cache.py +0 -0
  100. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/utils/config.py +0 -0
  101. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/utils/future.py +0 -0
  102. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/utils/headers.py +0 -0
  103. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/utils/retriable_client.py +0 -0
  104. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/utils/uuids.py +0 -0
  105. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/validation.py +0 -0
  106. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/webhook.py +0 -0
  107. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_api/worker.py +0 -0
  108. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_license/__init__.py +0 -0
  109. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_license/validation.py +0 -0
  110. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/__init__.py +0 -0
  111. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/checkpoint.py +0 -0
  112. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/database.py +0 -0
  113. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/lifespan.py +0 -0
  114. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/metrics.py +0 -0
  115. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/ops.py +0 -0
  116. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/queue.py +0 -0
  117. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/retry.py +0 -0
  118. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/langgraph_runtime/store.py +0 -0
  119. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/logging.json +0 -0
  120. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/openapi.json +0 -0
  121. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/scripts/create_license.py +0 -0
  122. {langgraph_api-0.4.21 → langgraph_api-0.4.23}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langgraph-api
3
- Version: 0.4.21
3
+ Version: 0.4.23
4
4
  Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
5
5
  License: Elastic-2.0
6
6
  License-File: LICENSE
@@ -11,7 +11,7 @@ Requires-Dist: httpx>=0.25.0
11
11
  Requires-Dist: jsonschema-rs<0.30,>=0.20.0
12
12
  Requires-Dist: langchain-core>=0.3.64
13
13
  Requires-Dist: langgraph-checkpoint>=2.0.23
14
- Requires-Dist: langgraph-runtime-inmem<0.13.0,>=0.12.1
14
+ Requires-Dist: langgraph-runtime-inmem<0.15.0,>=0.14.0
15
15
  Requires-Dist: langgraph-sdk>=0.2.0
16
16
  Requires-Dist: langgraph>=0.4.0
17
17
  Requires-Dist: langsmith>=0.3.45
@@ -0,0 +1 @@
1
+ __version__ = "0.4.23"
@@ -1,7 +1,7 @@
1
1
  import asyncio
2
- from collections.abc import AsyncIterator
2
+ from collections.abc import AsyncIterator, Awaitable, Callable
3
3
  from typing import Literal, cast
4
- from uuid import uuid4
4
+ from uuid import UUID, uuid4
5
5
 
6
6
  import orjson
7
7
  import structlog
@@ -32,12 +32,116 @@ from langgraph_api.validation import (
32
32
  )
33
33
  from langgraph_license.validation import plus_features_enabled
34
34
  from langgraph_runtime.database import connect
35
- from langgraph_runtime.ops import Crons, Runs, Threads
35
+ from langgraph_runtime.ops import Crons, Runs, StreamHandler, Threads
36
36
  from langgraph_runtime.retry import retry_db
37
37
 
38
38
  logger = structlog.stdlib.get_logger(__name__)
39
39
 
40
40
 
41
+ _RunResultFallback = Callable[[], Awaitable[bytes]]
42
+
43
+
44
+ def _thread_values_fallback(thread_id: UUID) -> _RunResultFallback:
45
+ async def fetch_thread_values() -> bytes:
46
+ async with connect() as conn:
47
+ thread_iter = await Threads.get(conn, thread_id)
48
+ try:
49
+ thread = await anext(thread_iter)
50
+ if thread["status"] == "error":
51
+ return orjson.dumps({"__error__": orjson.Fragment(thread["error"])})
52
+ if thread["status"] == "interrupted":
53
+ # Get an interrupt for the thread. There is the case where there are multiple interrupts for the same run and we may not show the same
54
+ # interrupt, but we'll always show one. Long term we should show all of them.
55
+ try:
56
+ if isinstance(thread["interrupts"], dict):
57
+ # Handle in memory format
58
+ interrupt_map = thread["interrupts"]
59
+ else:
60
+ interrupt_map = orjson.loads(thread["interrupts"].buf)
61
+ interrupt = [next(iter(interrupt_map.values()))[0]]
62
+ return orjson.dumps({"__interrupt__": interrupt})
63
+ except Exception:
64
+ # No interrupt, but status is interrupted from a before/after block. Default back to values.
65
+ pass
66
+ return cast(bytes, thread["values"])
67
+ except StopAsyncIteration:
68
+ await logger.awarning(
69
+ f"No checkpoint found for thread {thread_id}",
70
+ thread_id=thread_id,
71
+ )
72
+ return b"{}"
73
+
74
+ return fetch_thread_values
75
+
76
+
77
+ def _run_result_body(
78
+ *,
79
+ run_id: UUID,
80
+ thread_id: UUID,
81
+ sub: StreamHandler,
82
+ cancel_on_disconnect: bool = False,
83
+ ignore_404: bool = False,
84
+ fallback: _RunResultFallback | None = None,
85
+ cancel_message: str | None = None,
86
+ ) -> Callable[[], AsyncIterator[bytes]]:
87
+ last_chunk = ValueEvent()
88
+
89
+ async def consume() -> None:
90
+ vchunk: bytes | None = None
91
+ try:
92
+ async for mode, chunk, _ in Runs.Stream.join(
93
+ run_id,
94
+ stream_channel=sub,
95
+ cancel_on_disconnect=cancel_on_disconnect,
96
+ thread_id=thread_id,
97
+ ignore_404=ignore_404,
98
+ ):
99
+ if (
100
+ mode == b"values"
101
+ or mode == b"updates"
102
+ and b"__interrupt__" in chunk
103
+ ):
104
+ vchunk = chunk
105
+ elif mode == b"error":
106
+ vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
107
+ if vchunk is not None:
108
+ last_chunk.set(vchunk)
109
+ elif fallback is not None:
110
+ last_chunk.set(await fallback())
111
+ else:
112
+ last_chunk.set(b"{}")
113
+ finally:
114
+ # Make sure to always clean up the pubsub
115
+ await sub.__aexit__(None, None, None)
116
+
117
+ # keep the connection open by sending whitespace every 5 seconds
118
+ # leading whitespace will be ignored by json parsers
119
+ async def body() -> AsyncIterator[bytes]:
120
+ try:
121
+ stream = asyncio.create_task(consume())
122
+ while True:
123
+ try:
124
+ if stream.done():
125
+ # raise stream exception if any
126
+ stream.result()
127
+ yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
128
+ break
129
+ except TimeoutError:
130
+ yield b"\n"
131
+ except asyncio.CancelledError:
132
+ if cancel_message is not None:
133
+ stream.cancel(cancel_message)
134
+ else:
135
+ stream.cancel()
136
+ await stream
137
+ raise
138
+ finally:
139
+ # Make sure to always clean up the pubsub
140
+ await sub.__aexit__(None, None, None)
141
+
142
+ return body
143
+
144
+
41
145
  @retry_db
42
146
  async def create_run(request: ApiRequest):
43
147
  """Create a run."""
@@ -218,56 +322,13 @@ async def wait_run(request: ApiRequest):
218
322
  await sub.__aexit__(None, None, None)
219
323
  raise
220
324
 
221
- last_chunk = ValueEvent()
222
-
223
- async def consume():
224
- vchunk: bytes | None = None
225
- async for mode, chunk, _ in Runs.Stream.join(
226
- run["run_id"],
227
- thread_id=run["thread_id"],
228
- stream_channel=sub,
229
- cancel_on_disconnect=on_disconnect == "cancel",
230
- ):
231
- if mode == b"values" or mode == b"updates" and b"__interrupt__" in chunk:
232
- vchunk = chunk
233
- elif mode == b"error":
234
- vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
235
- if vchunk is not None:
236
- last_chunk.set(vchunk)
237
- else:
238
- async with connect() as conn:
239
- thread_iter = await Threads.get(conn, thread_id)
240
- try:
241
- thread = await anext(thread_iter)
242
- last_chunk.set(thread["values"])
243
- except StopAsyncIteration:
244
- await logger.awarning(
245
- f"No checkpoint found for thread {thread_id}",
246
- thread_id=thread_id,
247
- )
248
- last_chunk.set(b"{}")
249
-
250
- # keep the connection open by sending whitespace every 5 seconds
251
- # leading whitespace will be ignored by json parsers
252
- async def body() -> AsyncIterator[bytes]:
253
- try:
254
- stream = asyncio.create_task(consume())
255
- while True:
256
- try:
257
- if stream.done():
258
- # raise stream exception if any
259
- stream.result()
260
- yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
261
- break
262
- except TimeoutError:
263
- yield b"\n"
264
- except asyncio.CancelledError:
265
- stream.cancel()
266
- await stream
267
- raise
268
- finally:
269
- # Make sure to always clean up the pubsub
270
- await sub.__aexit__(None, None, None)
325
+ body = _run_result_body(
326
+ run_id=run["run_id"],
327
+ thread_id=run["thread_id"],
328
+ sub=sub,
329
+ cancel_on_disconnect=on_disconnect == "cancel",
330
+ fallback=_thread_values_fallback(thread_id),
331
+ )
271
332
 
272
333
  return StreamingResponse(
273
334
  body(),
@@ -305,53 +366,23 @@ async def wait_run_stateless(request: ApiRequest):
305
366
  await sub.__aexit__(None, None, None)
306
367
  raise
307
368
 
308
- last_chunk = ValueEvent()
309
-
310
- async def consume():
311
- vchunk: bytes | None = None
312
- async for mode, chunk, _ in Runs.Stream.join(
313
- run["run_id"],
369
+ async def stateless_fallback() -> bytes:
370
+ await logger.awarning(
371
+ "No checkpoint emitted for stateless run",
372
+ run_id=run["run_id"],
314
373
  thread_id=run["thread_id"],
315
- stream_channel=sub,
316
- ignore_404=True,
317
- cancel_on_disconnect=on_disconnect == "cancel",
318
- ):
319
- if mode == b"values" or mode == b"updates" and b"__interrupt__" in chunk:
320
- vchunk = chunk
321
- elif mode == b"error":
322
- vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
323
- if vchunk is not None:
324
- last_chunk.set(vchunk)
325
- else:
326
- # we can't fetch the thread (it was deleted), so just return empty values
327
- await logger.awarning(
328
- "No checkpoint emitted for stateless run",
329
- run_id=run["run_id"],
330
- thread_id=run["thread_id"],
331
- )
332
- last_chunk.set(b"{}")
333
-
334
- # keep the connection open by sending whitespace every 5 seconds
335
- # leading whitespace will be ignored by json parsers
336
- async def body() -> AsyncIterator[bytes]:
337
- try:
338
- stream = asyncio.create_task(consume())
339
- while True:
340
- try:
341
- if stream.done():
342
- # raise stream exception if any
343
- stream.result()
344
- yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
345
- break
346
- except TimeoutError:
347
- yield b"\n"
348
- except asyncio.CancelledError:
349
- stream.cancel("Run stream cancelled")
350
- await stream
351
- raise
352
- finally:
353
- # Make sure to always clean up the pubsub
354
- await sub.__aexit__(None, None, None)
374
+ )
375
+ return b"{}"
376
+
377
+ body = _run_result_body(
378
+ run_id=run["run_id"],
379
+ thread_id=run["thread_id"],
380
+ sub=sub,
381
+ cancel_on_disconnect=on_disconnect == "cancel",
382
+ ignore_404=True,
383
+ fallback=stateless_fallback,
384
+ cancel_message="Run stream cancelled",
385
+ )
355
386
 
356
387
  return StreamingResponse(
357
388
  body(),
@@ -422,11 +453,23 @@ async def join_run(request: ApiRequest):
422
453
  validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
423
454
  validate_uuid(run_id, "Invalid run ID: must be a UUID")
424
455
 
425
- return ApiResponse(
426
- await Runs.join(
427
- run_id,
428
- thread_id=thread_id,
429
- )
456
+ # A touch redundant, but to meet the existing signature of join, we need to throw any 404s before we enter the streaming body
457
+ await Runs.Stream.check_run_stream_auth(run_id, thread_id)
458
+ sub = await Runs.Stream.subscribe(run_id, thread_id)
459
+ body = _run_result_body(
460
+ run_id=run_id,
461
+ thread_id=thread_id,
462
+ sub=sub,
463
+ fallback=_thread_values_fallback(thread_id),
464
+ )
465
+
466
+ return StreamingResponse(
467
+ body(),
468
+ media_type="application/json",
469
+ headers={
470
+ "Location": f"/threads/{thread_id}/runs/{run_id}/join",
471
+ "Content-Location": f"/threads/{thread_id}/runs/{run_id}",
472
+ },
430
473
  )
431
474
 
432
475
 
@@ -456,6 +499,10 @@ async def join_run_stream(request: ApiRequest):
456
499
 
457
500
  return EventSourceResponse(
458
501
  body(),
502
+ headers={
503
+ "Location": f"/threads/{thread_id}/runs/{run_id}/stream",
504
+ "Content-Location": f"/threads/{thread_id}/runs/{run_id}",
505
+ },
459
506
  )
460
507
 
461
508
 
@@ -476,19 +523,36 @@ async def cancel_run(
476
523
  action_str if action_str in {"interrupt", "rollback"} else "interrupt",
477
524
  )
478
525
 
479
- async with connect() as conn:
480
- await Runs.cancel(
481
- conn,
482
- [run_id],
483
- action=action,
484
- thread_id=thread_id,
485
- )
486
- if wait:
487
- await Runs.join(
488
- run_id,
489
- thread_id=thread_id,
490
- )
491
- return Response(status_code=204 if wait else 202)
526
+ sub = await Runs.Stream.subscribe(run_id, thread_id) if wait else None
527
+ try:
528
+ async with connect() as conn:
529
+ await Runs.cancel(
530
+ conn,
531
+ [run_id],
532
+ action=action,
533
+ thread_id=thread_id,
534
+ )
535
+ except Exception:
536
+ if sub is not None:
537
+ await sub.__aexit__(None, None, None)
538
+ raise
539
+ if not wait:
540
+ return Response(status_code=202)
541
+
542
+ body = _run_result_body(
543
+ run_id=run_id,
544
+ thread_id=thread_id,
545
+ sub=sub,
546
+ )
547
+
548
+ return StreamingResponse(
549
+ body(),
550
+ media_type="application/json",
551
+ headers={
552
+ "Location": f"/threads/{thread_id}/runs/{run_id}/join",
553
+ "Content-Location": f"/threads/{thread_id}/runs/{run_id}",
554
+ },
555
+ )
492
556
 
493
557
 
494
558
  @retry_db
@@ -256,7 +256,7 @@ def run_server(
256
256
  if k in to_patch:
257
257
  logger.debug(f"Skipping loaded env var {k}={v}")
258
258
  continue
259
- to_patch[k] = v
259
+ to_patch[k] = v # type: ignore[invalid-assignment]
260
260
  with patch_environment(
261
261
  **to_patch,
262
262
  ):
@@ -21,7 +21,7 @@ def map_cmd(cmd: RunCommand) -> Command:
21
21
  update=update,
22
22
  goto=(
23
23
  [
24
- it if isinstance(it, str) else Send(it["node"], it["input"])
24
+ it if isinstance(it, str) else Send(it["node"], it["input"]) # type: ignore[non-subscriptable]
25
25
  for it in goto
26
26
  ]
27
27
  if goto
@@ -239,7 +239,7 @@ class RemotePregel(BaseRemotePregel):
239
239
  tuple(task["path"]) if task.get("path") else tuple(),
240
240
  # TODO: figure out how to properly deserialise errors
241
241
  task.get("error"),
242
- tuple(interrupts),
242
+ tuple(interrupts), # type: ignore[arg-type]
243
243
  state,
244
244
  task.get("result"),
245
245
  )
@@ -147,7 +147,7 @@ def ensure_ids(
147
147
  ) from None
148
148
  else:
149
149
  results.append(None)
150
- return tuple(results)
150
+ return tuple(results) # type: ignore[invalid-return-type]
151
151
 
152
152
 
153
153
  def assign_defaults(
@@ -3,7 +3,7 @@ from typing import Any
3
3
  from starlette.responses import Response, StreamingResponse
4
4
  from starlette.types import Send
5
5
 
6
- from langgraph_api.serde import Fragment
6
+ from langgraph_api.serde import Fragment, json_dumpb
7
7
 
8
8
  """
9
9
  Patch Response.render and StreamingResponse.stream_response
@@ -32,6 +32,8 @@ async def StreamingResponse_stream_response(self, send: Send) -> None:
32
32
  continue
33
33
  if isinstance(chunk, Fragment):
34
34
  chunk = chunk.buf
35
+ if isinstance(chunk, dict):
36
+ chunk = json_dumpb(chunk)
35
37
  if not isinstance(chunk, (bytes, bytearray, memoryview)): # noqa: UP038
36
38
  chunk = chunk.encode(self.charset)
37
39
  await send({"type": "http.response.body", "body": chunk, "more_body": True})
@@ -149,7 +149,7 @@ def json_loads(content: bytes | Fragment | dict) -> Any:
149
149
  content = content.buf
150
150
  if isinstance(content, dict):
151
151
  return content
152
- return orjson.loads(cast(bytes, content))
152
+ return orjson.loads(content)
153
153
 
154
154
 
155
155
  # Do not use. orjson holds the GIL the entire time it's running anyway.
@@ -304,7 +304,7 @@ async def astream_state(
304
304
  else:
305
305
  msg = convert_to_messages([msg_])[0]
306
306
  else:
307
- msg = cast(BaseMessage, msg_)
307
+ msg = msg_
308
308
  if msg.id in messages:
309
309
  messages[msg.id] += msg
310
310
  else:
@@ -404,7 +404,7 @@ async def astream_state(
404
404
  else:
405
405
  msg = convert_to_messages([msg_])[0]
406
406
  else:
407
- msg = cast(BaseMessage, msg_)
407
+ msg = msg_
408
408
  if msg.id in messages:
409
409
  messages[msg.id] += msg
410
410
  else:
@@ -0,0 +1,315 @@
1
+ from __future__ import annotations
2
+
3
+ import base64
4
+ from dataclasses import dataclass
5
+
6
+ import orjson
7
+ import structlog
8
+
9
+ PROTOCOL_VERSION = 1
10
+ """
11
+ ---
12
+ Version 1:
13
+ Byte Offsets
14
+ 0 1 3 5 5+N 5+N+M
15
+ +--------+------------------+----------------+------------------+------------------+--------------------+
16
+ | version| stream_id_len | event_len | stream_id | event | message |
17
+ +--------+------------------+----------------+------------------+------------------+--------------------+
18
+ 1 B 2 B 2 B N B M B variable
19
+
20
+ ---- Old (to be dropped soon / multiple formats)
21
+ Version 0 (old):
22
+ 1) b"$:" + <stream_id> + b"$:" + <event> + b"$:" + <raw_json>
23
+ 2) b"$:" + <stream_id> + b"$:" + <raw_json>
24
+ """
25
+
26
+ BYTE_MASK = 0xFF
27
+ HEADER_LEN = 5
28
+ logger = structlog.stdlib.get_logger(__name__)
29
+
30
+
31
+ class StreamFormatError(ValueError):
32
+ """Raised when a stream frame fails validation."""
33
+
34
+
35
+ @dataclass(slots=True)
36
+ class StreamPacket:
37
+ version: int
38
+ event: memoryview | bytes
39
+ message: memoryview | bytes
40
+ stream_id: memoryview | bytes | None
41
+
42
+ @property
43
+ def event_bytes(self) -> bytes:
44
+ return (
45
+ self.event.tobytes() if isinstance(self.event, memoryview) else self.event
46
+ )
47
+
48
+ @property
49
+ def message_bytes(self) -> bytes:
50
+ return (
51
+ self.message.tobytes()
52
+ if isinstance(self.message, memoryview)
53
+ else self.message
54
+ )
55
+
56
+ @property
57
+ def resumable(self) -> bool:
58
+ return self.stream_id is not None
59
+
60
+ @property
61
+ def stream_id_bytes(self) -> bytes | None:
62
+ if self.stream_id is None:
63
+ return None
64
+ if isinstance(self.stream_id, bytes):
65
+ return self.stream_id
66
+ return self.stream_id.tobytes()
67
+
68
+
69
+ class StreamCodec:
70
+ """Codec for encoding and decoding stream packets."""
71
+
72
+ __slots__ = ("_version",)
73
+
74
+ def __init__(self, *, protocol_version: int = PROTOCOL_VERSION) -> None:
75
+ self._version = protocol_version & BYTE_MASK
76
+
77
+ def encode(
78
+ self,
79
+ event: str,
80
+ message: bytes,
81
+ *,
82
+ stream_id: str | None = None,
83
+ ) -> bytes:
84
+ if not event:
85
+ raise StreamFormatError("event cannot be empty")
86
+ event_bytes = event.encode("utf-8")
87
+ if len(event_bytes) > 0xFFFF:
88
+ raise StreamFormatError("event exceeds 65535 bytes; cannot encode")
89
+ if not event_bytes:
90
+ raise StreamFormatError("event cannot be empty")
91
+
92
+ if stream_id:
93
+ # It's a resumable stream
94
+ stream_id_bytes = stream_id.encode("utf-8")
95
+ if len(stream_id_bytes) > 0xFFFF:
96
+ raise StreamFormatError("stream_id exceeds 65535 bytes; cannot encode")
97
+ else:
98
+ stream_id_bytes = None
99
+ stream_id_len = len(stream_id_bytes) if stream_id_bytes else 0
100
+ event_len = len(event_bytes)
101
+ frame = bytearray(HEADER_LEN + stream_id_len + event_len + len(message))
102
+ frame[0] = self._version
103
+ frame[1:3] = stream_id_len.to_bytes(2, "big")
104
+ frame[3:5] = event_len.to_bytes(2, "big")
105
+
106
+ cursor = HEADER_LEN
107
+ if stream_id_bytes is not None:
108
+ frame[cursor : cursor + stream_id_len] = stream_id_bytes
109
+ cursor += stream_id_len
110
+
111
+ frame[cursor : cursor + event_len] = event_bytes
112
+ cursor += event_len
113
+ frame[cursor:] = message
114
+ return bytes(frame)
115
+
116
+ def decode(self, data: bytes | bytearray | memoryview) -> StreamPacket:
117
+ view = data if isinstance(data, memoryview) else memoryview(data)
118
+ if len(view) < HEADER_LEN:
119
+ raise StreamFormatError("frame too short")
120
+
121
+ version = view[0]
122
+ if version != self._version:
123
+ raise StreamFormatError(f"unsupported protocol version: {version}")
124
+
125
+ stream_id_len = int.from_bytes(view[1:3], "big")
126
+ event_len = int.from_bytes(view[3:5], "big")
127
+ if event_len == 0:
128
+ raise StreamFormatError("event cannot be empty")
129
+ offset = HEADER_LEN
130
+ if stream_id_len > 0:
131
+ stream_id_view = view[offset : offset + stream_id_len]
132
+ offset += stream_id_len
133
+ else:
134
+ # Not resumable
135
+ stream_id_view = None
136
+ if len(view) < offset + event_len:
137
+ raise StreamFormatError("truncated event payload")
138
+ event_view = view[offset : offset + event_len]
139
+ offset += event_len
140
+ message_view = view[offset:]
141
+ return StreamPacket(
142
+ version=version,
143
+ event=event_view,
144
+ message=message_view,
145
+ stream_id=stream_id_view,
146
+ )
147
+
148
+ def decode_safe(self, data: bytes | bytearray | memoryview) -> StreamPacket | None:
149
+ try:
150
+ return self.decode(data)
151
+ except StreamFormatError as e:
152
+ logger.warning(f"Failed to decode as version {self._version}", error=e)
153
+ return None
154
+
155
+
156
+ STREAM_CODEC = StreamCodec()
157
+
158
+
159
+ def decode_stream_message(
160
+ data: bytes | bytearray | memoryview,
161
+ *,
162
+ channel: bytes | str | None = None,
163
+ ) -> StreamPacket:
164
+ if isinstance(data, memoryview):
165
+ view = data
166
+ elif isinstance(data, (bytes, bytearray)):
167
+ view = memoryview(data)
168
+ else:
169
+ logger.warning("Unknown type for stream message", type=type(data))
170
+ view = memoryview(bytes(data))
171
+
172
+ # Current protocol version
173
+ if packet := STREAM_CODEC.decode_safe(view):
174
+ return packet
175
+ logger.debug("Attempting to decode a v0 formatted stream message")
176
+ # Legacy codecs. Yuck. Won't be hit unless you have stale pods running (or for a brief period during upgrade).
177
+ # Schedule for removal in next major release.
178
+ if packet := _decode_v0_resumable_format(view, channel):
179
+ return packet
180
+
181
+ # Non-resumable format.
182
+ if packet := _decode_v0_live_format(view, channel):
183
+ return packet
184
+ raise StreamFormatError("failed to decode stream message")
185
+
186
+
187
+ _STREAMING_DELIMITER = b"$:"
188
+ _STREAMING_DELIMITER_LEN = len(_STREAMING_DELIMITER)
189
+
190
+
191
+ def _decode_v0_resumable_format(
192
+ view: memoryview,
193
+ channel: bytes | str | None = None,
194
+ ) -> StreamPacket | None:
195
+ """
196
+ Legacy v0 resumable format:
197
+ 1) b"$:" + <stream_id> + b"$:" + <event> + b"$:" + <raw_json>
198
+ 2) b"$:" + <stream_id> + b"$:" + <raw_json>
199
+ """
200
+
201
+ # must start with "$:"
202
+ if (
203
+ len(view) < _STREAMING_DELIMITER_LEN
204
+ or view[:_STREAMING_DELIMITER_LEN] != _STREAMING_DELIMITER
205
+ ):
206
+ return None
207
+
208
+ # "$:<stream_id>$:"
209
+ first = _find_delim(view, _STREAMING_DELIMITER_LEN, _STREAMING_DELIMITER)
210
+ if first == -1:
211
+ return None
212
+ stream_view = view[_STREAMING_DELIMITER_LEN:first]
213
+
214
+ # try "$:<event>$:"
215
+ second = _find_delim(view, first + _STREAMING_DELIMITER_LEN, _STREAMING_DELIMITER)
216
+ if second != -1:
217
+ event_view = view[first + _STREAMING_DELIMITER_LEN : second]
218
+ msg_view = view[second + _STREAMING_DELIMITER_LEN :]
219
+ return StreamPacket(
220
+ version=0,
221
+ event=event_view,
222
+ message=msg_view,
223
+ stream_id=stream_view,
224
+ )
225
+
226
+ chan_bytes = channel.encode("utf-8") if isinstance(channel, str) else channel
227
+
228
+ if chan_bytes:
229
+ marker = b":stream:"
230
+ idx = chan_bytes.rfind(marker)
231
+ event_bytes = chan_bytes[idx + len(marker) :] if idx != -1 else chan_bytes
232
+ else:
233
+ event_bytes = b""
234
+
235
+ msg_view = view[first + _STREAMING_DELIMITER_LEN :]
236
+ return StreamPacket(
237
+ version=0,
238
+ event=memoryview(event_bytes),
239
+ message=msg_view,
240
+ stream_id=stream_view,
241
+ )
242
+
243
+
244
+ def _decode_v0_live_format(
245
+ view: memoryview, channel: bytes | str | None = None
246
+ ) -> StreamPacket | None:
247
+ try:
248
+ package = orjson.loads(view)
249
+ except orjson.JSONDecodeError:
250
+ return _decode_v0_flat_format(view, channel)
251
+ if (
252
+ not isinstance(package, dict)
253
+ or "event" not in package
254
+ or "message" not in package
255
+ ):
256
+ return _decode_v0_flat_format(view, channel)
257
+ event_obj = package.get("event")
258
+ message_obj = package.get("message")
259
+ if event_obj is None:
260
+ event_bytes = b""
261
+ elif isinstance(event_obj, str):
262
+ event_bytes = event_obj.encode()
263
+ elif isinstance(event_obj, (bytes, bytearray, memoryview)):
264
+ event_bytes = bytes(event_obj)
265
+ else:
266
+ event_bytes = orjson.dumps(event_obj)
267
+
268
+ if isinstance(message_obj, (bytes, bytearray, memoryview)):
269
+ message_view = memoryview(bytes(message_obj))
270
+ elif isinstance(message_obj, str):
271
+ try:
272
+ message_view = memoryview(base64.b64decode(message_obj))
273
+ except Exception:
274
+ message_view = memoryview(message_obj.encode())
275
+ elif message_obj is None:
276
+ message_view = memoryview(b"")
277
+ else:
278
+ message_view = memoryview(orjson.dumps(message_obj))
279
+
280
+ return StreamPacket(
281
+ event=event_bytes,
282
+ message=message_view,
283
+ stream_id=None,
284
+ version=0,
285
+ )
286
+
287
+
288
+ def _decode_v0_flat_format(
289
+ view: memoryview, channel: bytes | str | None = None
290
+ ) -> StreamPacket | None:
291
+ packet = bytes(view)
292
+ stream_id = None
293
+ if channel is None:
294
+ return
295
+ if packet.startswith(b"$:"):
296
+ _, stream_id, packet = packet.split(b":", 2)
297
+ channel = channel.encode("utf-8") if isinstance(channel, str) else channel
298
+ channel = channel.split(b":")[-1]
299
+ return StreamPacket(
300
+ version=0,
301
+ event=memoryview(channel),
302
+ message=memoryview(packet),
303
+ stream_id=stream_id,
304
+ )
305
+
306
+
307
+ def _find_delim(view: memoryview, start: int, delimiter: bytes) -> int:
308
+ delim_len = len(delimiter)
309
+ end = len(view) - delim_len
310
+ i = start
311
+ while i <= end:
312
+ if view[i : i + delim_len] == delimiter:
313
+ return i
314
+ i += 1
315
+ return -1
@@ -31,7 +31,7 @@ dependencies = [
31
31
  "cryptography>=42.0.0,<45.0",
32
32
  "langgraph-sdk>=0.2.0",
33
33
  "cloudpickle>=3.0.0",
34
- "langgraph-runtime-inmem>=0.12.1,<0.13.0",
34
+ "langgraph-runtime-inmem>=0.14.0,<0.15.0",
35
35
  "truststore>=0.1",
36
36
  ]
37
37
 
@@ -1 +0,0 @@
1
- __version__ = "0.4.21"
File without changes
File without changes
File without changes
File without changes