langgraph-api 0.4.19__py3-none-any.whl → 0.4.21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/api/runs.py CHANGED
@@ -104,7 +104,9 @@ async def stream_run(
104
104
  payload = await request.json(RunCreateStateful)
105
105
  on_disconnect = payload.get("on_disconnect", "continue")
106
106
  run_id = uuid7()
107
- async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
107
+
108
+ sub = await Runs.Stream.subscribe(run_id, thread_id)
109
+ try:
108
110
  async with connect() as conn:
109
111
  run = await create_valid_run(
110
112
  conn,
@@ -114,20 +116,32 @@ async def stream_run(
114
116
  run_id=run_id,
115
117
  request_start_time=request.scope.get("request_start_time_ms"),
116
118
  )
117
-
118
- return EventSourceResponse(
119
- Runs.Stream.join(
119
+ except Exception:
120
+ # Clean up the pubsub on errors
121
+ await sub.__aexit__(None, None, None)
122
+ raise
123
+
124
+ async def body():
125
+ try:
126
+ async for event, message, stream_id in Runs.Stream.join(
120
127
  run["run_id"],
121
128
  thread_id=thread_id,
122
129
  cancel_on_disconnect=on_disconnect == "cancel",
123
130
  stream_channel=sub,
124
131
  last_event_id=None,
125
- ),
126
- headers={
127
- "Location": f"/threads/{thread_id}/runs/{run['run_id']}/stream",
128
- "Content-Location": f"/threads/{thread_id}/runs/{run['run_id']}",
129
- },
130
- )
132
+ ):
133
+ yield event, message, stream_id
134
+ finally:
135
+ # Make sure to always clean up the pubsub
136
+ await sub.__aexit__(None, None, None)
137
+
138
+ return EventSourceResponse(
139
+ body(),
140
+ headers={
141
+ "Location": f"/threads/{thread_id}/runs/{run['run_id']}/stream",
142
+ "Content-Location": f"/threads/{thread_id}/runs/{run['run_id']}",
143
+ },
144
+ )
131
145
 
132
146
 
133
147
  async def stream_run_stateless(
@@ -139,7 +153,9 @@ async def stream_run_stateless(
139
153
  on_disconnect = payload.get("on_disconnect", "continue")
140
154
  run_id = uuid7()
141
155
  thread_id = uuid4()
142
- async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
156
+
157
+ sub = await Runs.Stream.subscribe(run_id, thread_id)
158
+ try:
143
159
  async with connect() as conn:
144
160
  run = await create_valid_run(
145
161
  conn,
@@ -150,21 +166,33 @@ async def stream_run_stateless(
150
166
  request_start_time=request.scope.get("request_start_time_ms"),
151
167
  temporary=True,
152
168
  )
153
-
154
- return EventSourceResponse(
155
- Runs.Stream.join(
169
+ except Exception:
170
+ # Clean up the pubsub on errors
171
+ await sub.__aexit__(None, None, None)
172
+ raise
173
+
174
+ async def body():
175
+ try:
176
+ async for event, message, stream_id in Runs.Stream.join(
156
177
  run["run_id"],
157
178
  thread_id=run["thread_id"],
158
179
  ignore_404=True,
159
180
  cancel_on_disconnect=on_disconnect == "cancel",
160
181
  stream_channel=sub,
161
182
  last_event_id=None,
162
- ),
163
- headers={
164
- "Location": f"/runs/{run['run_id']}/stream",
165
- "Content-Location": f"/runs/{run['run_id']}",
166
- },
167
- )
183
+ ):
184
+ yield event, message, stream_id
185
+ finally:
186
+ # Make sure to always clean up the pubsub
187
+ await sub.__aexit__(None, None, None)
188
+
189
+ return EventSourceResponse(
190
+ body(),
191
+ headers={
192
+ "Location": f"/runs/{run['run_id']}/stream",
193
+ "Content-Location": f"/runs/{run['run_id']}",
194
+ },
195
+ )
168
196
 
169
197
 
170
198
  @retry_db
@@ -174,7 +202,8 @@ async def wait_run(request: ApiRequest):
174
202
  payload = await request.json(RunCreateStateful)
175
203
  on_disconnect = payload.get("on_disconnect", "continue")
176
204
  run_id = uuid7()
177
- async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
205
+ sub = await Runs.Stream.subscribe(run_id, thread_id)
206
+ try:
178
207
  async with connect() as conn:
179
208
  run = await create_valid_run(
180
209
  conn,
@@ -184,43 +213,44 @@ async def wait_run(request: ApiRequest):
184
213
  run_id=run_id,
185
214
  request_start_time=request.scope.get("request_start_time_ms"),
186
215
  )
187
-
188
- last_chunk = ValueEvent()
189
-
190
- async def consume():
191
- vchunk: bytes | None = None
192
- async for mode, chunk, _ in Runs.Stream.join(
193
- run["run_id"],
194
- thread_id=run["thread_id"],
195
- stream_channel=sub,
196
- cancel_on_disconnect=on_disconnect == "cancel",
197
- ):
198
- if (
199
- mode == b"values"
200
- or mode == b"updates"
201
- and b"__interrupt__" in chunk
202
- ):
203
- vchunk = chunk
204
- elif mode == b"error":
205
- vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
206
- if vchunk is not None:
207
- last_chunk.set(vchunk)
208
- else:
209
- async with connect() as conn:
210
- thread_iter = await Threads.get(conn, thread_id)
211
- try:
212
- thread = await anext(thread_iter)
213
- last_chunk.set(thread["values"])
214
- except StopAsyncIteration:
215
- await logger.awarning(
216
- f"No checkpoint found for thread {thread_id}",
217
- thread_id=thread_id,
218
- )
219
- last_chunk.set(b"{}")
220
-
221
- # keep the connection open by sending whitespace every 5 seconds
222
- # leading whitespace will be ignored by json parsers
223
- async def body() -> AsyncIterator[bytes]:
216
+ except Exception:
217
+ # Clean up the pubsub on errors
218
+ await sub.__aexit__(None, None, None)
219
+ raise
220
+
221
+ last_chunk = ValueEvent()
222
+
223
+ async def consume():
224
+ vchunk: bytes | None = None
225
+ async for mode, chunk, _ in Runs.Stream.join(
226
+ run["run_id"],
227
+ thread_id=run["thread_id"],
228
+ stream_channel=sub,
229
+ cancel_on_disconnect=on_disconnect == "cancel",
230
+ ):
231
+ if mode == b"values" or mode == b"updates" and b"__interrupt__" in chunk:
232
+ vchunk = chunk
233
+ elif mode == b"error":
234
+ vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
235
+ if vchunk is not None:
236
+ last_chunk.set(vchunk)
237
+ else:
238
+ async with connect() as conn:
239
+ thread_iter = await Threads.get(conn, thread_id)
240
+ try:
241
+ thread = await anext(thread_iter)
242
+ last_chunk.set(thread["values"])
243
+ except StopAsyncIteration:
244
+ await logger.awarning(
245
+ f"No checkpoint found for thread {thread_id}",
246
+ thread_id=thread_id,
247
+ )
248
+ last_chunk.set(b"{}")
249
+
250
+ # keep the connection open by sending whitespace every 5 seconds
251
+ # leading whitespace will be ignored by json parsers
252
+ async def body() -> AsyncIterator[bytes]:
253
+ try:
224
254
  stream = asyncio.create_task(consume())
225
255
  while True:
226
256
  try:
@@ -235,15 +265,18 @@ async def wait_run(request: ApiRequest):
235
265
  stream.cancel()
236
266
  await stream
237
267
  raise
238
-
239
- return StreamingResponse(
240
- body(),
241
- media_type="application/json",
242
- headers={
243
- "Location": f"/threads/{thread_id}/runs/{run['run_id']}/join",
244
- "Content-Location": f"/threads/{thread_id}/runs/{run['run_id']}",
245
- },
246
- )
268
+ finally:
269
+ # Make sure to always clean up the pubsub
270
+ await sub.__aexit__(None, None, None)
271
+
272
+ return StreamingResponse(
273
+ body(),
274
+ media_type="application/json",
275
+ headers={
276
+ "Location": f"/threads/{thread_id}/runs/{run['run_id']}/join",
277
+ "Content-Location": f"/threads/{thread_id}/runs/{run['run_id']}",
278
+ },
279
+ )
247
280
 
248
281
 
249
282
  @retry_db
@@ -254,7 +287,9 @@ async def wait_run_stateless(request: ApiRequest):
254
287
  on_disconnect = payload.get("on_disconnect", "continue")
255
288
  run_id = uuid7()
256
289
  thread_id = uuid4()
257
- async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
290
+
291
+ sub = await Runs.Stream.subscribe(run_id, thread_id)
292
+ try:
258
293
  async with connect() as conn:
259
294
  run = await create_valid_run(
260
295
  conn,
@@ -265,40 +300,41 @@ async def wait_run_stateless(request: ApiRequest):
265
300
  request_start_time=request.scope.get("request_start_time_ms"),
266
301
  temporary=True,
267
302
  )
268
-
269
- last_chunk = ValueEvent()
270
-
271
- async def consume():
272
- vchunk: bytes | None = None
273
- async for mode, chunk, _ in Runs.Stream.join(
274
- run["run_id"],
303
+ except Exception:
304
+ # Clean up the pubsub on errors
305
+ await sub.__aexit__(None, None, None)
306
+ raise
307
+
308
+ last_chunk = ValueEvent()
309
+
310
+ async def consume():
311
+ vchunk: bytes | None = None
312
+ async for mode, chunk, _ in Runs.Stream.join(
313
+ run["run_id"],
314
+ thread_id=run["thread_id"],
315
+ stream_channel=sub,
316
+ ignore_404=True,
317
+ cancel_on_disconnect=on_disconnect == "cancel",
318
+ ):
319
+ if mode == b"values" or mode == b"updates" and b"__interrupt__" in chunk:
320
+ vchunk = chunk
321
+ elif mode == b"error":
322
+ vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
323
+ if vchunk is not None:
324
+ last_chunk.set(vchunk)
325
+ else:
326
+ # we can't fetch the thread (it was deleted), so just return empty values
327
+ await logger.awarning(
328
+ "No checkpoint emitted for stateless run",
329
+ run_id=run["run_id"],
275
330
  thread_id=run["thread_id"],
276
- stream_channel=sub,
277
- ignore_404=True,
278
- cancel_on_disconnect=on_disconnect == "cancel",
279
- ):
280
- if (
281
- mode == b"values"
282
- or mode == b"updates"
283
- and b"__interrupt__" in chunk
284
- ):
285
- vchunk = chunk
286
- elif mode == b"error":
287
- vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
288
- if vchunk is not None:
289
- last_chunk.set(vchunk)
290
- else:
291
- # we can't fetch the thread (it was deleted), so just return empty values
292
- await logger.awarning(
293
- "No checkpoint emitted for stateless run",
294
- run_id=run["run_id"],
295
- thread_id=run["thread_id"],
296
- )
297
- last_chunk.set(b"{}")
298
-
299
- # keep the connection open by sending whitespace every 5 seconds
300
- # leading whitespace will be ignored by json parsers
301
- async def body() -> AsyncIterator[bytes]:
331
+ )
332
+ last_chunk.set(b"{}")
333
+
334
+ # keep the connection open by sending whitespace every 5 seconds
335
+ # leading whitespace will be ignored by json parsers
336
+ async def body() -> AsyncIterator[bytes]:
337
+ try:
302
338
  stream = asyncio.create_task(consume())
303
339
  while True:
304
340
  try:
@@ -313,15 +349,18 @@ async def wait_run_stateless(request: ApiRequest):
313
349
  stream.cancel("Run stream cancelled")
314
350
  await stream
315
351
  raise
316
-
317
- return StreamingResponse(
318
- body(),
319
- media_type="application/json",
320
- headers={
321
- "Location": f"/threads/{run['thread_id']}/runs/{run['run_id']}/join",
322
- "Content-Location": f"/threads/{run['thread_id']}/runs/{run['run_id']}",
323
- },
324
- )
352
+ finally:
353
+ # Make sure to always clean up the pubsub
354
+ await sub.__aexit__(None, None, None)
355
+
356
+ return StreamingResponse(
357
+ body(),
358
+ media_type="application/json",
359
+ headers={
360
+ "Location": f"/threads/{run['thread_id']}/runs/{run['run_id']}/join",
361
+ "Content-Location": f"/threads/{run['thread_id']}/runs/{run['run_id']}",
362
+ },
363
+ )
325
364
 
326
365
 
327
366
  @retry_db
@@ -402,14 +441,21 @@ async def join_run_stream(request: ApiRequest):
402
441
  validate_uuid(run_id, "Invalid run ID: must be a UUID")
403
442
  stream_mode = request.query_params.get("stream_mode") or []
404
443
  last_event_id = request.headers.get("last-event-id") or None
444
+
445
+ async def body():
446
+ async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
447
+ async for event, message, stream_id in Runs.Stream.join(
448
+ run_id,
449
+ thread_id=thread_id,
450
+ cancel_on_disconnect=cancel_on_disconnect,
451
+ stream_channel=sub,
452
+ stream_mode=stream_mode,
453
+ last_event_id=last_event_id,
454
+ ):
455
+ yield event, message, stream_id
456
+
405
457
  return EventSourceResponse(
406
- Runs.Stream.join(
407
- run_id,
408
- thread_id=thread_id,
409
- cancel_on_disconnect=cancel_on_disconnect,
410
- stream_mode=stream_mode,
411
- last_event_id=last_event_id,
412
- ),
458
+ body(),
413
459
  )
414
460
 
415
461
 
langgraph_api/cli.py CHANGED
@@ -11,12 +11,17 @@ from typing import Literal
11
11
  from typing_extensions import TypedDict
12
12
 
13
13
  if typing.TYPE_CHECKING:
14
+ from packaging.version import Version
15
+
14
16
  from langgraph_api.config import HttpConfig, StoreConfig
15
17
 
16
18
  logging.basicConfig(level=logging.INFO)
17
19
  logger = logging.getLogger(__name__)
18
20
 
19
21
 
22
+ SUPPORT_STATUS = Literal["active", "critical", "eol"]
23
+
24
+
20
25
  def _get_ls_origin() -> str | None:
21
26
  from langsmith.client import Client
22
27
  from langsmith.utils import tracing_is_enabled
@@ -121,46 +126,6 @@ class AuthConfig(TypedDict, total=False):
121
126
  cache: CacheConfig | None
122
127
 
123
128
 
124
- def _check_newer_version(pkg: str, timeout: float = 0.2) -> None:
125
- """Log a notice if PyPI reports a newer version."""
126
- import importlib.metadata as md
127
- import json
128
- import urllib.request
129
-
130
- from packaging.version import Version
131
-
132
- thread_logger = logging.getLogger("check_version")
133
- if not thread_logger.handlers:
134
- handler = logging.StreamHandler()
135
- handler.setFormatter(logging.Formatter("%(message)s"))
136
- thread_logger.addHandler(handler)
137
-
138
- try:
139
- current = Version(md.version(pkg))
140
- with urllib.request.urlopen(
141
- f"https://pypi.org/pypi/{pkg}/json", timeout=timeout
142
- ) as resp:
143
- latest_str = json.load(resp)["info"]["version"]
144
- latest = Version(latest_str)
145
- if latest > current:
146
- thread_logger.info(
147
- "🔔 A newer version of %s is available: %s → %s (pip install -U %s)",
148
- pkg,
149
- current,
150
- latest,
151
- pkg,
152
- )
153
-
154
- except Exception:
155
- pass
156
-
157
- except RuntimeError:
158
- thread_logger.info(
159
- f"Failed to check for newer version of {pkg}."
160
- " To disable version checks, set LANGGRAPH_NO_VERSION_CHECK=true"
161
- )
162
-
163
-
164
129
  def run_server(
165
130
  host: str = "127.0.0.1",
166
131
  port: int = 2024,
@@ -362,8 +327,12 @@ For production use, please use LangGraph Platform.
362
327
  threading.Thread(target=_open_browser, daemon=True).start()
363
328
  nvc = os.getenv("LANGGRAPH_NO_VERSION_CHECK")
364
329
  if nvc is None or nvc.lower() not in ("true", "1"):
330
+ from langgraph_api import __version__
331
+
365
332
  threading.Thread(
366
- target=_check_newer_version, args=("langgraph-api",), daemon=True
333
+ target=_check_newer_version,
334
+ args=("langgraph-api", __version__),
335
+ daemon=True,
367
336
  ).start()
368
337
  supported_kwargs = {
369
338
  k: v
@@ -471,5 +440,133 @@ def main():
471
440
  )
472
441
 
473
442
 
443
+ def _check_newer_version(pkg: str, current_version: str, timeout: float = 0.5) -> None:
444
+ """Check PyPI for newer versions and log support status.
445
+
446
+ Critical = one minor behind on same major, OR latest minor of previous major while latest is X.0.*
447
+ EOL = two+ minors behind on same major, OR any previous major after X.1.*
448
+ """
449
+ import json
450
+ import urllib.request
451
+
452
+ from packaging.version import InvalidVersion, Version
453
+
454
+ log = logging.getLogger("version_check")
455
+ if not log.handlers:
456
+ h = logging.StreamHandler()
457
+ h.setFormatter(logging.Formatter("%(message)s"))
458
+ log.addHandler(h)
459
+
460
+ if os.getenv("LANGGRAPH_NO_VERSION_CHECK", "").lower() in ("true", "1"):
461
+ return
462
+
463
+ def _parse(v: str) -> Version | None:
464
+ try:
465
+ return Version(v)
466
+ except InvalidVersion:
467
+ return None
468
+
469
+ try:
470
+ current = Version(current_version)
471
+ except InvalidVersion:
472
+ log.info(
473
+ f"[version] Could not parse installed version {current_version!r}. Skipping support check."
474
+ )
475
+ return
476
+
477
+ try:
478
+ with urllib.request.urlopen(
479
+ f"https://pypi.org/pypi/{pkg}/json", timeout=timeout
480
+ ) as resp:
481
+ payload = json.load(resp)
482
+ latest_str = payload["info"]["version"]
483
+ latest = Version(latest_str)
484
+ releases: dict[str, list[dict]] = payload.get("releases", {})
485
+ except Exception:
486
+ log.debug("Failed to retrieve latest version info for %s", pkg)
487
+ return
488
+ prev_major_latest_minor: Version | None = None
489
+ if latest.major > 0:
490
+ pm = latest.major - 1
491
+ prev_major_versions = [
492
+ v
493
+ for s in releases
494
+ if (v := _parse(s)) is not None and not v.is_prerelease and v.major == pm
495
+ ]
496
+ if prev_major_versions:
497
+ prev_major_latest_minor = max(
498
+ prev_major_versions, key=lambda v: (v.major, v.minor, v.micro)
499
+ )
500
+
501
+ if latest > current and not current.is_prerelease:
502
+ log.info(
503
+ "[version] A newer version of %s is available: %s → %s (pip install -U %s)",
504
+ pkg,
505
+ current,
506
+ latest,
507
+ pkg,
508
+ )
509
+
510
+ level = _support_level(current, latest, prev_major_latest_minor)
511
+ changelog = (
512
+ "https://docs.langchain.com/langgraph-platform/langgraph-server-changelog"
513
+ )
514
+
515
+ if level == "critical":
516
+ # Distinguish same-major vs cross-major grace in the wording
517
+ if current.major == latest.major and current.minor == latest.minor - 1:
518
+ tail = "You are one minor version behind the latest (%d.%d.x).\n"
519
+ else:
520
+ tail = "You are on the latest minor of the previous major while a new major (%d.%d.x) just released.\n"
521
+ log.info(
522
+ "⚠️ [support] %s %s is in Critical support.\n"
523
+ "Only critical security and installation fixes are provided.\n"
524
+ + tail
525
+ + "Please plan an upgrade soon. See changelog: %s",
526
+ pkg,
527
+ current,
528
+ latest.major,
529
+ latest.minor,
530
+ changelog,
531
+ )
532
+ elif level == "eol":
533
+ log.info(
534
+ "⚠️ [support] %s %s is End of Life.\n"
535
+ "No bug fixes or security updates will be provided.\n"
536
+ "You are two or more minor versions behind the latest (%d.%d.x).\n"
537
+ "You should upgrade immediately. See changelog: %s",
538
+ pkg,
539
+ current,
540
+ latest.major,
541
+ latest.minor,
542
+ changelog,
543
+ )
544
+
545
+
546
+ def _support_level(
547
+ cur: "Version", lat: "Version", prev_major_latest_minor: "Version | None"
548
+ ) -> SUPPORT_STATUS:
549
+ if cur.major > lat.major:
550
+ return "active"
551
+ if cur.major == lat.major:
552
+ if cur.minor == lat.minor:
553
+ return "active"
554
+ if cur.minor == lat.minor - 1:
555
+ return "critical"
556
+ if cur.minor <= lat.minor - 2:
557
+ return "eol"
558
+ return "active"
559
+
560
+ if cur.major == lat.major - 1 and lat.minor == 0:
561
+ if (
562
+ prev_major_latest_minor is not None
563
+ and cur.minor == prev_major_latest_minor.minor
564
+ ):
565
+ return "critical"
566
+ return "eol"
567
+
568
+ return "eol"
569
+
570
+
474
571
  if __name__ == "__main__":
475
572
  main()
@@ -18,7 +18,7 @@
18
18
  "@typescript/vfs": "^1.6.0",
19
19
  "dedent": "^1.5.3",
20
20
  "exit-hook": "^4.0.0",
21
- "hono": "^4.5.4",
21
+ "hono": "^4.9.7",
22
22
  "p-queue": "^8.0.1",
23
23
  "p-retry": "^6.2.0",
24
24
  "tsx": "^4.19.3",
@@ -974,10 +974,10 @@ has-flag@^4.0.0:
974
974
  resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
975
975
  integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
976
976
 
977
- hono@^4.5.4:
978
- version "4.6.14"
979
- resolved "https://registry.yarnpkg.com/hono/-/hono-4.6.14.tgz#f83f51e81b8ae5611dab459570990bf4c977d20c"
980
- integrity sha512-j4VkyUp2xazGJ8eCCLN1Vm/bxdvm/j5ZuU9AIjLu9vapn2M44p9L3Ktr9Vnb2RN2QtcR/wVjZVMlT5k7GJQgPw==
977
+ hono@^4.5.4, hono@^4.9.7:
978
+ version "4.9.7"
979
+ resolved "https://registry.yarnpkg.com/hono/-/hono-4.9.7.tgz#8ac244477397de71d7d3d393fad129209b5b631e"
980
+ integrity sha512-t4Te6ERzIaC48W3x4hJmBwgNlLhmiEdEE5ViYb02ffw4ignHNHa5IBtPjmbKstmtKa8X6C35iWwK4HaqvrzG9w==
981
981
 
982
982
  icss-utils@^5.0.0, icss-utils@^5.1.0:
983
983
  version "5.1.0"
langgraph_api/metadata.py CHANGED
@@ -7,6 +7,8 @@ import langgraph.version
7
7
  import orjson
8
8
  import structlog
9
9
 
10
+ import langgraph_api.config as config
11
+ from langgraph_api.auth.custom import get_auth_instance
10
12
  from langgraph_api.config import (
11
13
  LANGGRAPH_CLOUD_LICENSE_KEY,
12
14
  LANGSMITH_API_KEY,
@@ -17,7 +19,9 @@ from langgraph_api.config import (
17
19
  USES_STORE_TTL,
18
20
  USES_THREAD_TTL,
19
21
  )
22
+ from langgraph_api.graph import GRAPHS, is_js_graph
20
23
  from langgraph_api.http import http_request
24
+ from langgraph_api.js.base import is_js_path
21
25
  from langgraph_license.validation import plus_features_enabled
22
26
 
23
27
  logger = structlog.stdlib.get_logger(__name__)
@@ -72,6 +76,36 @@ if LANGSMITH_AUTH_ENDPOINT:
72
76
  )
73
77
 
74
78
 
79
+ def _lang_usage_metadata() -> tuple[dict[str, str], dict[str, int]]:
80
+ js_graph_count = sum(1 for graph_id in GRAPHS if is_js_graph(graph_id))
81
+ py_graph_count = len(GRAPHS) - js_graph_count
82
+
83
+ auth_instance = get_auth_instance()
84
+ custom_auth_enabled = auth_instance is not None
85
+ custom_js_auth_enabled = auth_instance == "js"
86
+
87
+ js_proxy_middleware_enabled = False
88
+ if (
89
+ config.HTTP_CONFIG
90
+ and (app := config.HTTP_CONFIG.get("app"))
91
+ and isinstance(app, str)
92
+ ):
93
+ app_path = app.split(":", 1)[0] # type: ignore[possibly-unresolved-reference]
94
+ js_proxy_middleware_enabled = is_js_path(app_path)
95
+
96
+ tags = {
97
+ "langgraph.platform.uses_custom_auth": str(custom_auth_enabled),
98
+ "langgraph.platform.uses_js_custom_auth": str(custom_js_auth_enabled),
99
+ "langgraph.platform.uses_js_proxy_middleware": str(js_proxy_middleware_enabled),
100
+ }
101
+ measures = {
102
+ "langgraph.platform.py_graphs": py_graph_count,
103
+ "langgraph.platform.js_graphs": js_graph_count,
104
+ }
105
+
106
+ return tags, measures
107
+
108
+
75
109
  def incr_runs(*, incr: int = 1) -> None:
76
110
  global RUN_COUNTER
77
111
  RUN_COUNTER += incr
@@ -111,6 +145,7 @@ async def metadata_loop() -> None:
111
145
  RUN_COUNTER = 0
112
146
  NODE_COUNTER = 0
113
147
  FROM_TIMESTAMP = to_timestamp
148
+ usage_tags, usage_measures = _lang_usage_metadata()
114
149
 
115
150
  base_payload = {
116
151
  "from_timestamp": from_timestamp,
@@ -131,10 +166,12 @@ async def metadata_loop() -> None:
131
166
  "user_app.uses_custom_auth": str(USES_CUSTOM_AUTH),
132
167
  "user_app.uses_thread_ttl": str(USES_THREAD_TTL),
133
168
  "user_app.uses_store_ttl": str(USES_STORE_TTL),
169
+ **usage_tags,
134
170
  },
135
171
  "measures": {
136
172
  "langgraph.platform.runs": runs,
137
173
  "langgraph.platform.nodes": nodes,
174
+ **usage_measures,
138
175
  },
139
176
  "logs": [],
140
177
  }