langgraph-api 0.4.1__py3-none-any.whl → 0.7.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. langgraph_api/__init__.py +1 -1
  2. langgraph_api/api/__init__.py +111 -51
  3. langgraph_api/api/a2a.py +1610 -0
  4. langgraph_api/api/assistants.py +212 -89
  5. langgraph_api/api/mcp.py +3 -3
  6. langgraph_api/api/meta.py +52 -28
  7. langgraph_api/api/openapi.py +27 -17
  8. langgraph_api/api/profile.py +108 -0
  9. langgraph_api/api/runs.py +342 -195
  10. langgraph_api/api/store.py +19 -2
  11. langgraph_api/api/threads.py +209 -27
  12. langgraph_api/asgi_transport.py +14 -9
  13. langgraph_api/asyncio.py +14 -4
  14. langgraph_api/auth/custom.py +52 -37
  15. langgraph_api/auth/langsmith/backend.py +4 -3
  16. langgraph_api/auth/langsmith/client.py +13 -8
  17. langgraph_api/cli.py +230 -133
  18. langgraph_api/command.py +5 -3
  19. langgraph_api/config/__init__.py +532 -0
  20. langgraph_api/config/_parse.py +58 -0
  21. langgraph_api/config/schemas.py +431 -0
  22. langgraph_api/cron_scheduler.py +17 -1
  23. langgraph_api/encryption/__init__.py +15 -0
  24. langgraph_api/encryption/aes_json.py +158 -0
  25. langgraph_api/encryption/context.py +35 -0
  26. langgraph_api/encryption/custom.py +280 -0
  27. langgraph_api/encryption/middleware.py +632 -0
  28. langgraph_api/encryption/shared.py +63 -0
  29. langgraph_api/errors.py +12 -1
  30. langgraph_api/executor_entrypoint.py +11 -6
  31. langgraph_api/feature_flags.py +29 -0
  32. langgraph_api/graph.py +176 -76
  33. langgraph_api/grpc/client.py +313 -0
  34. langgraph_api/grpc/config_conversion.py +231 -0
  35. langgraph_api/grpc/generated/__init__.py +29 -0
  36. langgraph_api/grpc/generated/checkpointer_pb2.py +63 -0
  37. langgraph_api/grpc/generated/checkpointer_pb2.pyi +99 -0
  38. langgraph_api/grpc/generated/checkpointer_pb2_grpc.py +329 -0
  39. langgraph_api/grpc/generated/core_api_pb2.py +216 -0
  40. langgraph_api/grpc/generated/core_api_pb2.pyi +905 -0
  41. langgraph_api/grpc/generated/core_api_pb2_grpc.py +1621 -0
  42. langgraph_api/grpc/generated/engine_common_pb2.py +219 -0
  43. langgraph_api/grpc/generated/engine_common_pb2.pyi +722 -0
  44. langgraph_api/grpc/generated/engine_common_pb2_grpc.py +24 -0
  45. langgraph_api/grpc/generated/enum_cancel_run_action_pb2.py +37 -0
  46. langgraph_api/grpc/generated/enum_cancel_run_action_pb2.pyi +12 -0
  47. langgraph_api/grpc/generated/enum_cancel_run_action_pb2_grpc.py +24 -0
  48. langgraph_api/grpc/generated/enum_control_signal_pb2.py +37 -0
  49. langgraph_api/grpc/generated/enum_control_signal_pb2.pyi +16 -0
  50. langgraph_api/grpc/generated/enum_control_signal_pb2_grpc.py +24 -0
  51. langgraph_api/grpc/generated/enum_durability_pb2.py +37 -0
  52. langgraph_api/grpc/generated/enum_durability_pb2.pyi +16 -0
  53. langgraph_api/grpc/generated/enum_durability_pb2_grpc.py +24 -0
  54. langgraph_api/grpc/generated/enum_multitask_strategy_pb2.py +37 -0
  55. langgraph_api/grpc/generated/enum_multitask_strategy_pb2.pyi +16 -0
  56. langgraph_api/grpc/generated/enum_multitask_strategy_pb2_grpc.py +24 -0
  57. langgraph_api/grpc/generated/enum_run_status_pb2.py +37 -0
  58. langgraph_api/grpc/generated/enum_run_status_pb2.pyi +22 -0
  59. langgraph_api/grpc/generated/enum_run_status_pb2_grpc.py +24 -0
  60. langgraph_api/grpc/generated/enum_stream_mode_pb2.py +37 -0
  61. langgraph_api/grpc/generated/enum_stream_mode_pb2.pyi +28 -0
  62. langgraph_api/grpc/generated/enum_stream_mode_pb2_grpc.py +24 -0
  63. langgraph_api/grpc/generated/enum_thread_status_pb2.py +37 -0
  64. langgraph_api/grpc/generated/enum_thread_status_pb2.pyi +16 -0
  65. langgraph_api/grpc/generated/enum_thread_status_pb2_grpc.py +24 -0
  66. langgraph_api/grpc/generated/enum_thread_stream_mode_pb2.py +37 -0
  67. langgraph_api/grpc/generated/enum_thread_stream_mode_pb2.pyi +16 -0
  68. langgraph_api/grpc/generated/enum_thread_stream_mode_pb2_grpc.py +24 -0
  69. langgraph_api/grpc/generated/errors_pb2.py +39 -0
  70. langgraph_api/grpc/generated/errors_pb2.pyi +21 -0
  71. langgraph_api/grpc/generated/errors_pb2_grpc.py +24 -0
  72. langgraph_api/grpc/ops/__init__.py +370 -0
  73. langgraph_api/grpc/ops/assistants.py +424 -0
  74. langgraph_api/grpc/ops/runs.py +792 -0
  75. langgraph_api/grpc/ops/threads.py +1013 -0
  76. langgraph_api/http.py +16 -5
  77. langgraph_api/http_metrics.py +15 -35
  78. langgraph_api/http_metrics_utils.py +38 -0
  79. langgraph_api/js/build.mts +1 -1
  80. langgraph_api/js/client.http.mts +13 -7
  81. langgraph_api/js/client.mts +2 -5
  82. langgraph_api/js/package.json +29 -28
  83. langgraph_api/js/remote.py +56 -30
  84. langgraph_api/js/src/graph.mts +20 -0
  85. langgraph_api/js/sse.py +2 -2
  86. langgraph_api/js/ui.py +1 -1
  87. langgraph_api/js/yarn.lock +1204 -1006
  88. langgraph_api/logging.py +29 -2
  89. langgraph_api/metadata.py +99 -28
  90. langgraph_api/middleware/http_logger.py +7 -2
  91. langgraph_api/middleware/private_network.py +7 -7
  92. langgraph_api/models/run.py +54 -93
  93. langgraph_api/otel_context.py +205 -0
  94. langgraph_api/patch.py +5 -3
  95. langgraph_api/queue_entrypoint.py +154 -65
  96. langgraph_api/route.py +47 -5
  97. langgraph_api/schema.py +88 -10
  98. langgraph_api/self_hosted_logs.py +124 -0
  99. langgraph_api/self_hosted_metrics.py +450 -0
  100. langgraph_api/serde.py +79 -37
  101. langgraph_api/server.py +138 -60
  102. langgraph_api/state.py +4 -3
  103. langgraph_api/store.py +25 -16
  104. langgraph_api/stream.py +80 -29
  105. langgraph_api/thread_ttl.py +31 -13
  106. langgraph_api/timing/__init__.py +25 -0
  107. langgraph_api/timing/profiler.py +200 -0
  108. langgraph_api/timing/timer.py +318 -0
  109. langgraph_api/utils/__init__.py +53 -8
  110. langgraph_api/utils/cache.py +47 -10
  111. langgraph_api/utils/config.py +2 -1
  112. langgraph_api/utils/errors.py +77 -0
  113. langgraph_api/utils/future.py +10 -6
  114. langgraph_api/utils/headers.py +76 -2
  115. langgraph_api/utils/retriable_client.py +74 -0
  116. langgraph_api/utils/stream_codec.py +315 -0
  117. langgraph_api/utils/uuids.py +29 -62
  118. langgraph_api/validation.py +9 -0
  119. langgraph_api/webhook.py +120 -6
  120. langgraph_api/worker.py +55 -24
  121. {langgraph_api-0.4.1.dist-info → langgraph_api-0.7.3.dist-info}/METADATA +16 -8
  122. langgraph_api-0.7.3.dist-info/RECORD +168 -0
  123. {langgraph_api-0.4.1.dist-info → langgraph_api-0.7.3.dist-info}/WHEEL +1 -1
  124. langgraph_runtime/__init__.py +1 -0
  125. langgraph_runtime/routes.py +11 -0
  126. logging.json +1 -3
  127. openapi.json +839 -478
  128. langgraph_api/config.py +0 -387
  129. langgraph_api/js/isolate-0x130008000-46649-46649-v8.log +0 -4430
  130. langgraph_api/js/isolate-0x138008000-44681-44681-v8.log +0 -4430
  131. langgraph_api/js/package-lock.json +0 -3308
  132. langgraph_api-0.4.1.dist-info/RECORD +0 -107
  133. /langgraph_api/{utils.py → grpc/__init__.py} +0 -0
  134. {langgraph_api-0.4.1.dist-info → langgraph_api-0.7.3.dist-info}/entry_points.txt +0 -0
  135. {langgraph_api-0.4.1.dist-info → langgraph_api-0.7.3.dist-info}/licenses/LICENSE +0 -0
langgraph_api/logging.py CHANGED
@@ -69,9 +69,12 @@ class AddApiVersion:
69
69
  def __call__(
70
70
  self, logger: logging.Logger, method_name: str, event_dict: EventDict
71
71
  ) -> EventDict:
72
- from langgraph_api import __version__
72
+ try:
73
+ from langgraph_api import __version__
73
74
 
74
- event_dict["langgraph_api_version"] = __version__
75
+ event_dict["langgraph_api_version"] = __version__
76
+ except ImportError:
77
+ pass
75
78
  return event_dict
76
79
 
77
80
 
@@ -117,6 +120,30 @@ class JSONRenderer:
117
120
  return json_dumpb(event_dict).decode()
118
121
 
119
122
 
123
+ # same as Formatter, but always uses JSONRenderer. Used by OTLP log handler for self hosted logging
124
+ class OTLPFormatter(structlog.stdlib.ProcessorFormatter):
125
+ def __init__(self, *args, **kwargs) -> None:
126
+ if len(args) == 3:
127
+ fmt, datefmt, style = args
128
+ kwargs["fmt"] = fmt
129
+ kwargs["datefmt"] = datefmt
130
+ kwargs["style"] = style
131
+ else:
132
+ raise RuntimeError(
133
+ f"OTLPFormatter expected 3 positional arguments (fmt, datefmt, style), "
134
+ f"but got {len(args)} arguments."
135
+ )
136
+ super().__init__(
137
+ processors=[
138
+ structlog.stdlib.ProcessorFormatter.remove_processors_meta,
139
+ AddLoggingContext(),
140
+ JSONRenderer(),
141
+ ],
142
+ foreign_pre_chain=shared_processors,
143
+ **kwargs,
144
+ )
145
+
146
+
120
147
  LEVELS = logging.getLevelNamesMapping()
121
148
 
122
149
 
langgraph_api/metadata.py CHANGED
@@ -2,22 +2,28 @@ import asyncio
2
2
  import os
3
3
  import uuid
4
4
  from datetime import UTC, datetime
5
+ from typing import Any
5
6
 
6
7
  import langgraph.version
7
8
  import orjson
8
9
  import structlog
9
10
 
11
+ import langgraph_api.config as config
12
+ from langgraph_api.auth.custom import get_auth_instance
10
13
  from langgraph_api.config import (
11
14
  LANGGRAPH_CLOUD_LICENSE_KEY,
12
- LANGSMITH_API_KEY,
13
15
  LANGSMITH_AUTH_ENDPOINT,
16
+ LANGSMITH_CONTROL_PLANE_API_KEY,
17
+ LANGSMITH_LICENSE_REQUIRED_CLAIMS,
14
18
  USES_CUSTOM_APP,
15
19
  USES_CUSTOM_AUTH,
16
20
  USES_INDEXING,
17
21
  USES_STORE_TTL,
18
22
  USES_THREAD_TTL,
19
23
  )
24
+ from langgraph_api.graph import GRAPHS, is_js_graph
20
25
  from langgraph_api.http import http_request
26
+ from langgraph_api.js.base import is_js_path
21
27
  from langgraph_license.validation import plus_features_enabled
22
28
 
23
29
  logger = structlog.stdlib.get_logger(__name__)
@@ -56,7 +62,7 @@ NODE_COUNTER = 0
56
62
  FROM_TIMESTAMP = datetime.now(UTC).isoformat()
57
63
 
58
64
  # Beacon endpoint for license key submissions
59
- BEACON_ENDPOINT = "https://api.smith.langchain.com/v1/metadata/submit"
65
+ BEACON_ENDPOINT = "https://beacon.langchain.com/v1/beacon/metadata/submit"
60
66
 
61
67
  # LangChain auth endpoint for API key submissions
62
68
  LANGCHAIN_METADATA_ENDPOINT = None
@@ -72,6 +78,36 @@ if LANGSMITH_AUTH_ENDPOINT:
72
78
  )
73
79
 
74
80
 
81
+ def _lang_usage_metadata() -> tuple[dict[str, str], dict[str, int]]:
82
+ js_graph_count = sum(1 for graph_id in GRAPHS if is_js_graph(graph_id))
83
+ py_graph_count = len(GRAPHS) - js_graph_count
84
+
85
+ auth_instance = get_auth_instance()
86
+ custom_auth_enabled = auth_instance is not None
87
+ custom_js_auth_enabled = auth_instance == "js"
88
+
89
+ js_proxy_middleware_enabled = False
90
+ if (
91
+ config.HTTP_CONFIG
92
+ and (app := config.HTTP_CONFIG.get("app"))
93
+ and isinstance(app, str)
94
+ ):
95
+ app_path = app.split(":", 1)[0] # type: ignore[possibly-unresolved-reference]
96
+ js_proxy_middleware_enabled = is_js_path(app_path)
97
+
98
+ tags = {
99
+ "langgraph.platform.uses_custom_auth": str(custom_auth_enabled),
100
+ "langgraph.platform.uses_js_custom_auth": str(custom_js_auth_enabled),
101
+ "langgraph.platform.uses_js_proxy_middleware": str(js_proxy_middleware_enabled),
102
+ }
103
+ measures = {
104
+ "langgraph.platform.py_graphs": py_graph_count,
105
+ "langgraph.platform.js_graphs": js_graph_count,
106
+ }
107
+
108
+ return tags, measures
109
+
110
+
75
111
  def incr_runs(*, incr: int = 1) -> None:
76
112
  global RUN_COUNTER
77
113
  RUN_COUNTER += incr
@@ -87,20 +123,57 @@ async def metadata_loop() -> None:
87
123
  from langgraph_api import __version__
88
124
  except ImportError:
89
125
  __version__ = None
90
- if not LANGGRAPH_CLOUD_LICENSE_KEY and not LANGSMITH_API_KEY:
126
+ if not LANGGRAPH_CLOUD_LICENSE_KEY and not LANGSMITH_CONTROL_PLANE_API_KEY:
127
+ logger.info(
128
+ "No license key or control plane API key set, skipping metadata loop"
129
+ )
91
130
  return
131
+ lg_version = langgraph.version.__version__
92
132
 
93
133
  if (
94
134
  LANGGRAPH_CLOUD_LICENSE_KEY
95
135
  and not LANGGRAPH_CLOUD_LICENSE_KEY.startswith("lcl_")
96
- and not LANGSMITH_API_KEY
136
+ and not LANGSMITH_CONTROL_PLANE_API_KEY
97
137
  ):
98
138
  logger.info("Running in air-gapped mode, skipping metadata loop")
99
139
  return
100
140
 
101
- logger.info("Starting metadata loop")
141
+ # TODO: This is a temporary "hack". A user could inadvertently include
142
+ # 'agent_builder_enabled' in LANGSMITH_LICENSE_REQUIRED_CLAIMS for a
143
+ # non-Agent Builder self-hosted deployment. If the 'agent_builder_enabled'
144
+ # entitlement is enabled, then this would bypass the metadata loop.
145
+ #
146
+ # If the 'agent_builder_enabled' entitlement is disabled, then this is ok
147
+ # because the license key validation would fail and the app would not start.
148
+ if (
149
+ LANGGRAPH_CLOUD_LICENSE_KEY
150
+ and "agent_builder_enabled" in LANGSMITH_LICENSE_REQUIRED_CLAIMS
151
+ ):
152
+ logger.info("Skipping metadata loop for self-hosted Agent Builder")
153
+ return
154
+
155
+ logger.info("Starting metadata loop", endpoint=LANGCHAIN_METADATA_ENDPOINT)
102
156
 
103
157
  global RUN_COUNTER, NODE_COUNTER, FROM_TIMESTAMP
158
+ base_tags = _ensure_strings(
159
+ # Tag values must be strings.
160
+ {
161
+ "langgraph.python.version": lg_version,
162
+ "langgraph_api.version": __version__ or "",
163
+ "langgraph.platform.revision": REVISION or "",
164
+ "langgraph.platform.variant": VARIANT or "",
165
+ "langgraph.platform.host": HOST,
166
+ "langgraph.platform.tenant_id": TENANT_ID or "",
167
+ "langgraph.platform.project_id": PROJECT_ID or "",
168
+ "langgraph.platform.plan": PLAN,
169
+ # user app features
170
+ "user_app.uses_indexing": USES_INDEXING or "",
171
+ "user_app.uses_custom_app": USES_CUSTOM_APP or "",
172
+ "user_app.uses_custom_auth": USES_CUSTOM_AUTH or "",
173
+ "user_app.uses_thread_ttl": USES_THREAD_TTL or "",
174
+ "user_app.uses_store_ttl": USES_STORE_TTL or "",
175
+ }
176
+ )
104
177
  while True:
105
178
  # because we always read and write from coroutines in main thread
106
179
  # we don't need a lock as long as there's no awaits in this block
@@ -111,30 +184,16 @@ async def metadata_loop() -> None:
111
184
  RUN_COUNTER = 0
112
185
  NODE_COUNTER = 0
113
186
  FROM_TIMESTAMP = to_timestamp
187
+ usage_tags, usage_measures = _lang_usage_metadata()
114
188
 
115
189
  base_payload = {
116
190
  "from_timestamp": from_timestamp,
117
191
  "to_timestamp": to_timestamp,
118
- "tags": {
119
- # Tag values must be strings.
120
- "langgraph.python.version": langgraph.version.__version__,
121
- "langgraph_api.version": __version__ or "",
122
- "langgraph.platform.revision": REVISION or "",
123
- "langgraph.platform.variant": VARIANT or "",
124
- "langgraph.platform.host": HOST,
125
- "langgraph.platform.tenant_id": TENANT_ID or "",
126
- "langgraph.platform.project_id": PROJECT_ID or "",
127
- "langgraph.platform.plan": PLAN,
128
- # user app features
129
- "user_app.uses_indexing": str(USES_INDEXING or ""),
130
- "user_app.uses_custom_app": str(USES_CUSTOM_APP or ""),
131
- "user_app.uses_custom_auth": str(USES_CUSTOM_AUTH),
132
- "user_app.uses_thread_ttl": str(USES_THREAD_TTL),
133
- "user_app.uses_store_ttl": str(USES_STORE_TTL),
134
- },
192
+ "tags": base_tags | _ensure_strings(usage_tags),
135
193
  "measures": {
136
- "langgraph.platform.runs": runs,
137
- "langgraph.platform.nodes": nodes,
194
+ "langgraph.platform.runs": int(runs),
195
+ "langgraph.platform.nodes": int(nodes),
196
+ **usage_measures,
138
197
  },
139
198
  "logs": [],
140
199
  }
@@ -159,7 +218,11 @@ async def metadata_loop() -> None:
159
218
  body=orjson.dumps(beacon_payload),
160
219
  headers={"Content-Type": "application/json"},
161
220
  )
162
- await logger.ainfo("Successfully submitted metadata to beacon endpoint")
221
+ await logger.ainfo(
222
+ "Successfully submitted metadata to beacon endpoint",
223
+ n_runs=runs,
224
+ n_nodes=nodes,
225
+ )
163
226
  except Exception as e:
164
227
  submissions_failed.append("beacon")
165
228
  await logger.awarning(
@@ -167,10 +230,10 @@ async def metadata_loop() -> None:
167
230
  )
168
231
 
169
232
  # 2. Send to langchain auth endpoint if API key is set
170
- if LANGSMITH_API_KEY and LANGCHAIN_METADATA_ENDPOINT:
233
+ if LANGSMITH_CONTROL_PLANE_API_KEY and LANGCHAIN_METADATA_ENDPOINT:
171
234
  langchain_payload = {
172
235
  **base_payload,
173
- "api_key": LANGSMITH_API_KEY,
236
+ "api_key": LANGSMITH_CONTROL_PLANE_API_KEY,
174
237
  }
175
238
  submissions_attempted.append("langchain")
176
239
  try:
@@ -180,7 +243,11 @@ async def metadata_loop() -> None:
180
243
  body=orjson.dumps(langchain_payload),
181
244
  headers={"Content-Type": "application/json"},
182
245
  )
183
- logger.info("Successfully submitted metadata to LangSmith instance")
246
+ logger.info(
247
+ "Successfully submitted metadata to LangSmith instance",
248
+ n_runs=runs,
249
+ n_nodes=nodes,
250
+ )
184
251
  except Exception as e:
185
252
  submissions_failed.append("langchain")
186
253
  await logger.awarning(
@@ -201,3 +268,7 @@ async def metadata_loop() -> None:
201
268
  )
202
269
 
203
270
  await asyncio.sleep(INTERVAL)
271
+
272
+
273
+ def _ensure_strings(payload: dict[str, Any]) -> dict[str, Any]:
274
+ return {k: "" if v is None else str(v) for k, v in payload.items()}
@@ -5,11 +5,13 @@ import structlog
5
5
  from starlette.requests import ClientDisconnect
6
6
  from starlette.types import Message, Receive, Scope, Send
7
7
 
8
+ from langgraph_api.config import MOUNT_PREFIX
8
9
  from langgraph_api.http_metrics import HTTP_METRICS_COLLECTOR
9
10
  from langgraph_api.utils.headers import should_include_header_in_logs
10
11
 
11
12
  asgi = structlog.stdlib.get_logger("asgi")
12
13
 
14
+
13
15
  PATHS_IGNORE = {"/ok", "/metrics"}
14
16
 
15
17
 
@@ -37,7 +39,10 @@ class AccessLoggerMiddleware:
37
39
  self.debug_enabled = False
38
40
 
39
41
  async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
40
- if scope["type"] != "http" or scope.get("path") in PATHS_IGNORE:
42
+ if (
43
+ scope["type"] != "http"
44
+ or scope.get("path", "").replace(MOUNT_PREFIX or "", "") in PATHS_IGNORE
45
+ ):
41
46
  return await self.app(scope, receive, send) # pragma: no cover
42
47
 
43
48
  loop = asyncio.get_event_loop()
@@ -92,7 +97,7 @@ class AccessLoggerMiddleware:
92
97
  path=path,
93
98
  status=status,
94
99
  latency_ms=latency,
95
- route=route,
100
+ route=str(route),
96
101
  path_params=scope.get("path_params"),
97
102
  query_string=qs.decode() if qs else "",
98
103
  proto=scope.get("http_version"),
@@ -25,19 +25,19 @@ class PrivateNetworkMiddleware(BaseHTTPMiddleware):
25
25
  A web browser determines whether a network is private based on IP address ranges
26
26
  and local networking conditions. Typically, it checks:
27
27
 
28
- IP Address Range If the website is hosted on an IP within private address
28
+ IP Address Range - If the website is hosted on an IP within private address
29
29
  ranges (RFC 1918):
30
30
 
31
- 10.0.0.0 10.255.255.255
32
- 172.16.0.0 172.31.255.255
33
- 192.168.0.0 192.168.255.255
31
+ 10.0.0.0 - 10.255.255.255
32
+ 172.16.0.0 - 172.31.255.255
33
+ 192.168.0.0 - 192.168.255.255
34
34
  127.0.0.1 (loopback)
35
- Localhost and Hostname Domains like localhost or .local are assumed to be private.
35
+ Localhost and Hostname - Domains like localhost or .local are assumed to be private.
36
36
 
37
- Network Context The browser may check if the device is connected
37
+ Network Context - The browser may check if the device is connected
38
38
  to a local network (e.g., corporate or home Wi-Fi) rather than the public internet.
39
39
 
40
- CORS and Private Network Access (PNA) Modern browsers implement restrictions
40
+ CORS and Private Network Access (PNA) - Modern browsers implement restrictions
41
41
  where resources on private networks require explicit permission (via CORS headers)
42
42
  when accessed from a public site.
43
43
  """
@@ -1,19 +1,20 @@
1
1
  import asyncio
2
2
  import contextlib
3
3
  import time
4
- import urllib.parse
5
4
  import uuid
6
5
  from collections.abc import Mapping, Sequence
7
- from typing import Any, NamedTuple, cast
6
+ from typing import TYPE_CHECKING, Any, NamedTuple, cast
8
7
  from uuid import UUID
9
8
 
10
- import orjson
11
9
  import structlog
12
- from starlette.authentication import BaseUser
13
10
  from starlette.exceptions import HTTPException
14
11
  from typing_extensions import TypedDict
15
12
 
13
+ from langgraph_api.encryption.middleware import encrypt_request
14
+ from langgraph_api.feature_flags import FF_USE_CORE_API
16
15
  from langgraph_api.graph import GRAPHS, get_assistant_id
16
+ from langgraph_api.grpc.ops import Runs as GrpcRuns
17
+ from langgraph_api.otel_context import inject_current_trace_context
17
18
  from langgraph_api.schema import (
18
19
  All,
19
20
  Config,
@@ -26,11 +27,17 @@ from langgraph_api.schema import (
26
27
  RunCommand,
27
28
  StreamMode,
28
29
  )
29
- from langgraph_api.utils import AsyncConnectionProto, get_auth_ctx
30
- from langgraph_api.utils.headers import should_include_header
30
+ from langgraph_api.utils import AsyncConnectionProto, get_auth_ctx, get_user_id
31
+ from langgraph_api.utils.headers import get_configurable_headers
31
32
  from langgraph_api.utils.uuids import uuid7
33
+ from langgraph_api.webhook import validate_webhook_url_or_raise
32
34
  from langgraph_runtime.ops import Runs
33
35
 
36
+ CrudRuns = GrpcRuns if FF_USE_CORE_API else Runs
37
+
38
+ if TYPE_CHECKING:
39
+ from starlette.authentication import BaseUser
40
+
34
41
  logger = structlog.stdlib.get_logger(__name__)
35
42
 
36
43
 
@@ -84,13 +91,13 @@ class RunCreateDict(TypedDict):
84
91
  stream_mode: list[StreamMode] | StreamMode
85
92
  """One or more of "values", "messages", "updates" or "events".
86
93
  - "values": Stream the thread state any time it changes.
87
- - "messages": Stream chat messages from thread state and calls to chat models,
94
+ - "messages": Stream chat messages from thread state and calls to chat models,
88
95
  token-by-token where possible.
89
96
  - "updates": Stream the state updates returned by each node.
90
97
  - "events": Stream all events produced by sub-runs (eg. nodes, LLMs, etc.).
91
98
  - "custom": Stream custom events produced by your nodes.
92
-
93
- Note: __interrupt__ events are always included in the updates stream, even when "updates"
99
+
100
+ Note: __interrupt__ events are always included in the updates stream, even when "updates"
94
101
  is not explicitly requested, to ensure interrupt events are always visible.
95
102
  """
96
103
  stream_subgraphs: bool | None
@@ -106,6 +113,8 @@ class RunCreateDict(TypedDict):
106
113
  """Create the thread if it doesn't exist. If False, reply with 404."""
107
114
  langsmith_tracer: LangSmithTracer | None
108
115
  """Configuration for additional tracing with LangSmith."""
116
+ durability: str | None
117
+ """Durability level for the run. Must be one of 'sync', 'async', or 'exit'."""
109
118
 
110
119
 
111
120
  def ensure_ids(
@@ -147,7 +156,7 @@ def ensure_ids(
147
156
  ) from None
148
157
  else:
149
158
  results.append(None)
150
- return tuple(results)
159
+ return tuple(results) # type: ignore[invalid-return-type]
151
160
 
152
161
 
153
162
  def assign_defaults(
@@ -166,81 +175,6 @@ def assign_defaults(
166
175
  return stream_mode, multitask_strategy, prevent_insert_if_inflight
167
176
 
168
177
 
169
- def get_user_id(user: BaseUser | None) -> str | None:
170
- if user is None:
171
- return None
172
- try:
173
- return user.identity
174
- except NotImplementedError:
175
- try:
176
- return user.display_name
177
- except NotImplementedError:
178
- pass
179
-
180
-
181
- LANGSMITH_METADATA = "langsmith-metadata"
182
- LANGSMITH_TAGS = "langsmith-tags"
183
- LANGSMITH_PROJECT = "langsmith-project"
184
-
185
-
186
- # Default headers to exclude from run configuration for security
187
- DEFAULT_RUN_HEADERS_EXCLUDE = {"x-api-key", "x-tenant-id", "x-service-key"}
188
-
189
-
190
- def get_configurable_headers(headers: Mapping[str, str]) -> dict[str, str]:
191
- """Extract headers that should be added to run configuration.
192
-
193
- This function handles special cases like langsmith-trace and baggage headers,
194
- while respecting the configurable header patterns.
195
- """
196
- configurable = {}
197
-
198
- for key, value in headers.items():
199
- # First handle tracing stuff - always included regardless of patterns
200
- if key == "langsmith-trace":
201
- configurable[key] = value
202
- if baggage := headers.get("baggage"):
203
- for item in baggage.split(","):
204
- baggage_key, baggage_value = item.split("=")
205
- if (
206
- baggage_key == LANGSMITH_METADATA
207
- and baggage_key not in configurable
208
- ):
209
- configurable[baggage_key] = orjson.loads(
210
- urllib.parse.unquote(baggage_value)
211
- )
212
- elif baggage_key == LANGSMITH_TAGS:
213
- configurable[baggage_key] = urllib.parse.unquote(
214
- baggage_value
215
- ).split(",")
216
- elif baggage_key == LANGSMITH_PROJECT:
217
- configurable[baggage_key] = urllib.parse.unquote(baggage_value)
218
- continue
219
-
220
- # Check if header should be included based on patterns
221
- # For run configuration, we have specific default behavior for x-* headers
222
- if key.startswith("x-"):
223
- # Check against default excludes for x-* headers
224
- if key in DEFAULT_RUN_HEADERS_EXCLUDE:
225
- # Check if explicitly included via patterns
226
- if should_include_header(key):
227
- configurable[key] = value
228
- continue
229
- # Other x-* headers are included by default unless patterns exclude them
230
- if should_include_header(key):
231
- configurable[key] = value
232
- elif key == "user-agent":
233
- # user-agent is included by default unless excluded by patterns
234
- if should_include_header(key):
235
- configurable[key] = value
236
- else:
237
- # All other headers only included if patterns allow
238
- if should_include_header(key):
239
- configurable[key] = value
240
-
241
- return configurable
242
-
243
-
244
178
  async def create_valid_run(
245
179
  conn: AsyncConnectionProto,
246
180
  thread_id: str | None,
@@ -301,10 +235,12 @@ async def create_valid_run(
301
235
  if checkpoint := payload.get("checkpoint"):
302
236
  configurable.update(checkpoint)
303
237
  configurable.update(get_configurable_headers(headers))
238
+ inject_current_trace_context(configurable)
304
239
  ctx = get_auth_ctx()
305
240
  if ctx:
306
- user = cast(BaseUser | None, ctx.user)
241
+ user = cast("BaseUser | None", ctx.user)
307
242
  user_id = get_user_id(user)
243
+ # Store user as-is; encryption middleware will serialize if needed
308
244
  configurable["langgraph_auth_user"] = user
309
245
  configurable["langgraph_auth_user_id"] = user_id
310
246
  configurable["langgraph_auth_permissions"] = ctx.permissions
@@ -317,19 +253,43 @@ async def create_valid_run(
317
253
  configurable["__langsmith_example_id__"] = ls_tracing.get("example_id")
318
254
  if request_start_time:
319
255
  configurable["__request_start_time_ms__"] = request_start_time
320
- after_seconds = payload.get("after_seconds", 0)
256
+ after_seconds = cast("int", payload.get("after_seconds", 0))
321
257
  configurable["__after_seconds__"] = after_seconds
258
+ # Note: encryption context is injected by encrypt_request → encrypt_json_if_needed
259
+ # as the __encryption_context__ marker. Worker reads it before decryption.
322
260
  put_time_start = time.time()
323
261
  if_not_exists = payload.get("if_not_exists", "reject")
324
262
 
325
- run_coro = Runs.put(
326
- conn,
327
- assistant_id,
263
+ durability = payload.get("durability")
264
+ if durability is None:
265
+ checkpoint_during = payload.get("checkpoint_during")
266
+ durability = "async" if checkpoint_during in (None, True) else "exit"
267
+
268
+ if webhook := payload.get("webhook"):
269
+ await validate_webhook_url_or_raise(str(webhook))
270
+
271
+ # We can't pass payload directly because config and context have
272
+ # been modified above (with auth context, checkpoint info, etc.)
273
+ encrypted = await encrypt_request(
328
274
  {
275
+ "metadata": payload.get("metadata"),
329
276
  "input": payload.get("input"),
330
- "command": payload.get("command"),
331
277
  "config": config,
332
278
  "context": context,
279
+ "command": payload.get("command"),
280
+ },
281
+ "run",
282
+ ["metadata", "input", "config", "context", "command"],
283
+ )
284
+
285
+ run_coro = CrudRuns.put(
286
+ conn,
287
+ assistant_id,
288
+ {
289
+ "input": encrypted.get("input"),
290
+ "command": encrypted.get("command"),
291
+ "config": encrypted.get("config"),
292
+ "context": encrypted.get("context"),
333
293
  "stream_mode": stream_mode,
334
294
  "interrupt_before": payload.get("interrupt_before"),
335
295
  "interrupt_after": payload.get("interrupt_after"),
@@ -339,8 +299,9 @@ async def create_valid_run(
339
299
  "subgraphs": payload.get("stream_subgraphs", False),
340
300
  "resumable": stream_resumable,
341
301
  "checkpoint_during": payload.get("checkpoint_during", True),
302
+ "durability": durability,
342
303
  },
343
- metadata=payload.get("metadata"),
304
+ metadata=encrypted.get("metadata"),
344
305
  status="pending",
345
306
  user_id=user_id,
346
307
  thread_id=thread_id_,
@@ -389,7 +350,7 @@ async def create_valid_run(
389
350
  if multitask_strategy in ("interrupt", "rollback") and inflight_runs:
390
351
  with contextlib.suppress(HTTPException):
391
352
  # if we can't find the inflight runs again, we can proceeed
392
- await Runs.cancel(
353
+ await CrudRuns.cancel(
393
354
  conn,
394
355
  [run["run_id"] for run in inflight_runs],
395
356
  thread_id=thread_id_,