langgraph-api 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.5"
1
+ __version__ = "0.1.7"
@@ -20,6 +20,7 @@ from langgraph_api.api.ui import ui_routes
20
20
  from langgraph_api.auth.middleware import auth_middleware
21
21
  from langgraph_api.config import HTTP_CONFIG, MIGRATIONS_PATH
22
22
  from langgraph_api.graph import js_bg_tasks
23
+ from langgraph_api.js.base import is_js_path
23
24
  from langgraph_api.validation import DOCS_HTML
24
25
  from langgraph_runtime.database import connect, healthcheck
25
26
 
@@ -89,6 +90,12 @@ def load_custom_app(app_import: str) -> Starlette | None:
89
90
  # Expect a string in either "path/to/file.py:my_variable" or "some.module.in:my_variable"
90
91
  logger.info(f"Loading custom app from {app_import}")
91
92
  path, name = app_import.rsplit(":", 1)
93
+
94
+ # skip loading custom app if it's a js path
95
+ # we are handling this in `langgraph_api.js.remote.JSCustomHTTPProxyMiddleware`
96
+ if is_js_path(path):
97
+ return None
98
+
92
99
  try:
93
100
  os.environ["__LANGGRAPH_DEFER_LOOPBACK_TRANSPORT"] = "true"
94
101
  if os.path.isfile(path) or path.endswith(".py"):
langgraph_api/api/meta.py CHANGED
@@ -2,7 +2,7 @@ import os
2
2
 
3
3
  from starlette.responses import JSONResponse, PlainTextResponse
4
4
 
5
- from langgraph_api import config
5
+ from langgraph_api import config, metadata
6
6
  from langgraph_api.route import ApiRequest
7
7
  from langgraph_license.validation import plus_features_enabled
8
8
  from langgraph_runtime.database import connect, pool_stats
@@ -20,7 +20,13 @@ async def meta_info(request: ApiRequest):
20
20
  "assistants": True,
21
21
  "crons": plus and config.FF_CRONS_ENABLED,
22
22
  "langsmith": bool(config.LANGSMITH_API_KEY) and bool(config.TRACING),
23
- }
23
+ },
24
+ "host": {
25
+ "kind": metadata.HOST,
26
+ "project_id": metadata.PROJECT_ID,
27
+ "revision_id": metadata.REVISION,
28
+ "tenant_id": metadata.TENANT_ID,
29
+ },
24
30
  }
25
31
  )
26
32
 
langgraph_api/cli.py CHANGED
@@ -136,6 +136,7 @@ def run_server(
136
136
  disable_persistence: bool = False,
137
137
  allow_blocking: bool = False,
138
138
  runtime_edition: Literal["inmem", "community", "postgres"] = "inmem",
139
+ server_level: str = "WARNING",
139
140
  **kwargs: typing.Any,
140
141
  ):
141
142
  """Run the LangGraph API server."""
@@ -149,8 +150,10 @@ def run_server(
149
150
 
150
151
  env_vars = env if isinstance(env, Mapping) else None
151
152
  mount_prefix = None
152
- if http:
153
+ if http is not None and http.get("mount_prefix") is not None:
153
154
  mount_prefix = http.get("mount_prefix")
155
+ if os.environ.get("LANGGRAPH_MOUNT_PREFIX"):
156
+ mount_prefix = os.environ.get("LANGGRAPH_MOUNT_PREFIX")
154
157
  if isinstance(env, str | pathlib.Path):
155
158
  try:
156
159
  from dotenv.main import DotEnv
@@ -289,7 +292,7 @@ For production use, please use LangGraph Cloud.
289
292
  for k, v in kwargs.items()
290
293
  if k in inspect.signature(uvicorn.run).parameters
291
294
  }
292
-
295
+ server_level = server_level.upper()
293
296
  uvicorn.run(
294
297
  "langgraph_api.server:app",
295
298
  host=host,
@@ -315,6 +318,11 @@ For production use, please use LangGraph Cloud.
315
318
  "stream": "ext://sys.stdout",
316
319
  }
317
320
  },
321
+ "loggers": {
322
+ "uvicorn": {"level": server_level},
323
+ "uvicorn.error": {"level": server_level},
324
+ "langgraph_api.server": {"level": server_level},
325
+ },
318
326
  "root": {"handlers": ["console"]},
319
327
  },
320
328
  **supported_kwargs,
langgraph_api/graph.py CHANGED
@@ -289,6 +289,7 @@ async def collect_graphs_from_env(register: bool = False) -> None:
289
289
 
290
290
  from langgraph_api.js.remote import (
291
291
  RemotePregel,
292
+ run_js_http_process,
292
293
  run_js_process,
293
294
  run_remote_checkpointer,
294
295
  wait_until_js_ready,
@@ -306,6 +307,22 @@ async def collect_graphs_from_env(register: bool = False) -> None:
306
307
  name="remote-graphs",
307
308
  )
308
309
  )
310
+
311
+ if (
312
+ config.HTTP_CONFIG
313
+ and config.HTTP_CONFIG.get("app")
314
+ and is_js_path(config.HTTP_CONFIG.get("app").split(":")[0])
315
+ ):
316
+ js_bg_tasks.add(
317
+ asyncio.create_task(
318
+ run_js_http_process(
319
+ paths_str,
320
+ config.HTTP_CONFIG.get("app"),
321
+ watch="--reload" in sys.argv[1:],
322
+ ),
323
+ )
324
+ )
325
+
309
326
  for task in js_bg_tasks:
310
327
  task.add_done_callback(_handle_exception)
311
328
 
@@ -0,0 +1,3 @@
1
+ {
2
+ "semi": true
3
+ }
@@ -0,0 +1,142 @@
1
+ /// <reference types="./global.d.ts" />
2
+
3
+ import type { Hono } from "hono";
4
+ import { serve } from "@hono/node-server";
5
+ import * as path from "node:path";
6
+ import * as url from "node:url";
7
+ import { createLogger, format, transports } from "winston";
8
+ import { gracefulExit } from "exit-hook";
9
+ import { z } from "zod";
10
+
11
+ const logger = createLogger({
12
+ level: "debug",
13
+ format: format.combine(
14
+ format.errors({ stack: true }),
15
+ format.timestamp(),
16
+ format.json(),
17
+ format.printf((info) => {
18
+ const { timestamp, level, message, ...rest } = info;
19
+
20
+ let event;
21
+ if (typeof message === "string") {
22
+ event = message;
23
+ } else {
24
+ event = JSON.stringify(message);
25
+ }
26
+
27
+ if (rest.stack) {
28
+ rest.message = event;
29
+ event = rest.stack;
30
+ }
31
+
32
+ return JSON.stringify({ timestamp, level, event, ...rest });
33
+ }),
34
+ ),
35
+ transports: [
36
+ new transports.Console({
37
+ handleExceptions: true,
38
+ handleRejections: true,
39
+ }),
40
+ ],
41
+ });
42
+
43
+ const HTTP_PORT = 5557;
44
+
45
+ const wrapHonoApp = (app: Hono) => {
46
+ // We do this to avoid importing Hono from server dependencies
47
+ // b/c the user's Hono version might be different than ours.
48
+ // See warning here: https://hono.dev/docs/guides/middleware#built-in-middleware
49
+ const newApp = new (Object.getPrototypeOf(app).constructor)() as Hono<{
50
+ Variables: { body: string | ArrayBuffer | ReadableStream | null };
51
+ }>;
52
+
53
+ // This endpoint is used to check if we can yield the routing to the Python server early.
54
+ // Note: will always yield if user added a custom middleware, as in that case
55
+ // the router will always find a suitable handler.
56
+ newApp.options("/__langgraph_check", (c) => {
57
+ const method = c.req.header("x-langgraph-method");
58
+ const path = c.req.header("x-langgraph-path");
59
+ if (!method || !path) return c.body(null, 400);
60
+
61
+ const [handlers] = app.router.match(method, path);
62
+ if (handlers.length === 0) return c.body(null, 404);
63
+ return c.body(null, 200);
64
+ });
65
+
66
+ newApp.route("/", app);
67
+
68
+ // `notFound` handler is overriden here to yield back to the Python server
69
+ // alongside any accumulated headers from middlewares.
70
+ // TODO: figure out how to compose the user-land `notFound` handler.
71
+ newApp.notFound(async (c) => {
72
+ // send the request body back to the Python server
73
+ // Use the cached body in-case the user mutated the body
74
+ let payload: any = null;
75
+ try {
76
+ payload = JSON.stringify(await c.req.json()) ?? null;
77
+ } catch {
78
+ // pass
79
+ }
80
+
81
+ return c.body(payload, {
82
+ status: 404,
83
+ // This header is set to denote user-land 404s vs internal 404s.
84
+ headers: {
85
+ "x-langgraph-status": "not-found",
86
+ "x-langgraph-body": payload != null ? "true" : "false",
87
+ },
88
+ });
89
+ });
90
+
91
+ return newApp;
92
+ };
93
+
94
+ async function registerHttp(appPath: string, options: { cwd: string }) {
95
+ const [userFile, exportSymbol] = appPath.split(":", 2);
96
+ const sourceFile = path.resolve(options.cwd, userFile);
97
+
98
+ const user = (await import(url.pathToFileURL(sourceFile).toString()).then(
99
+ (module) => module[exportSymbol || "default"],
100
+ )) as Hono | undefined;
101
+
102
+ if (!user) throw new Error(`Failed to load HTTP app: ${appPath}`);
103
+ return wrapHonoApp(user);
104
+ }
105
+
106
+ async function main() {
107
+ const http = z
108
+ .object({
109
+ app: z.string().optional(),
110
+ disable_assistants: z.boolean().default(false),
111
+ disable_threads: z.boolean().default(false),
112
+ disable_runs: z.boolean().default(false),
113
+ disable_store: z.boolean().default(false),
114
+ disable_meta: z.boolean().default(false),
115
+ cors: z
116
+ .object({
117
+ allow_origins: z.array(z.string()).optional(),
118
+ allow_methods: z.array(z.string()).optional(),
119
+ allow_headers: z.array(z.string()).optional(),
120
+ allow_credentials: z.boolean().optional(),
121
+ allow_origin_regex: z.string().optional(),
122
+ expose_headers: z.array(z.string()).optional(),
123
+ max_age: z.number().optional(),
124
+ })
125
+ .optional(),
126
+ })
127
+ .parse(JSON.parse(process.env.LANGGRAPH_HTTP ?? "{}"));
128
+
129
+ if (!http.app) throw new Error("No HTTP app path provided");
130
+ const app = await registerHttp(http.app, { cwd: process.cwd() });
131
+
132
+ serve({ fetch: app.fetch, hostname: "localhost", port: HTTP_PORT }, (c) =>
133
+ logger.info(`Listening to ${c.address}:${c.port}`),
134
+ );
135
+ }
136
+
137
+ process.on("uncaughtExceptionMonitor", (error) => {
138
+ logger.error(error);
139
+ gracefulExit();
140
+ });
141
+
142
+ main();
@@ -3,7 +3,8 @@ import logging
3
3
  import os
4
4
  import shutil
5
5
  import ssl
6
- from collections.abc import AsyncIterator
6
+ from collections import deque
7
+ from collections.abc import AsyncIterator, Callable
7
8
  from contextlib import AbstractContextManager
8
9
  from typing import Any, Literal, Self, cast
9
10
 
@@ -26,12 +27,14 @@ from langgraph.store.base import GetOp, Item, ListNamespacesOp, PutOp, SearchOp
26
27
  from langgraph.types import Command, Interrupt, Send
27
28
  from langgraph_sdk import Auth
28
29
  from pydantic import BaseModel
30
+ from starlette import types
29
31
  from starlette.applications import Starlette
30
32
  from starlette.authentication import (
31
33
  AuthCredentials,
32
34
  AuthenticationBackend,
33
35
  BaseUser,
34
36
  )
37
+ from starlette.datastructures import MutableHeaders
35
38
  from starlette.exceptions import HTTPException
36
39
  from starlette.requests import HTTPConnection, Request
37
40
  from starlette.routing import Route
@@ -48,8 +51,9 @@ from langgraph_api.utils import AsyncConnectionProto
48
51
 
49
52
  logger = structlog.stdlib.get_logger(__name__)
50
53
 
51
- GRAPH_PORT = 5556
52
54
  REMOTE_PORT = 5555
55
+ GRAPH_PORT = 5556
56
+ GRAPH_HTTP_PORT = 5557
53
57
  SSL = ssl.create_default_context(cafile=certifi.where())
54
58
 
55
59
  if port := int(os.getenv("PORT", "8080")):
@@ -336,7 +340,9 @@ async def run_js_process(paths_str: str, watch: bool = False):
336
340
  # check if tsx is available
337
341
  tsx_path = shutil.which("tsx")
338
342
  if tsx_path is None:
339
- raise FileNotFoundError("tsx not found in PATH")
343
+ raise FileNotFoundError(
344
+ "tsx not found in PATH. Please upgrade to latest LangGraph CLI to support running JS graphs."
345
+ )
340
346
  attempt = 0
341
347
  while not asyncio.current_task().cancelled():
342
348
  client_file = os.path.join(os.path.dirname(__file__), "client.mts")
@@ -372,6 +378,54 @@ async def run_js_process(paths_str: str, watch: bool = False):
372
378
  attempt += 1
373
379
 
374
380
 
381
+ async def run_js_http_process(paths_str: str, http_config: dict, watch: bool = False):
382
+ # check if tsx is available
383
+ tsx_path = shutil.which("tsx")
384
+ if tsx_path is None:
385
+ raise FileNotFoundError(
386
+ "tsx not found in PATH. Please upgrade to latest LangGraph CLI to support running JS graphs."
387
+ )
388
+
389
+ attempt = 0
390
+ while not asyncio.current_task().cancelled():
391
+ client_file = os.path.join(os.path.dirname(__file__), "client.http.mts")
392
+ args = ("tsx", "watch", client_file) if watch else ("tsx", client_file)
393
+ pid = None
394
+ try:
395
+ process = await asyncio.create_subprocess_exec(
396
+ *args,
397
+ env={
398
+ "LANGGRAPH_HTTP": orjson.dumps(http_config),
399
+ "LANGSERVE_GRAPHS": paths_str,
400
+ "LANGCHAIN_CALLBACKS_BACKGROUND": "true",
401
+ "NODE_ENV": "development" if watch else "production",
402
+ "CHOKIDAR_USEPOLLING": "true",
403
+ **os.environ,
404
+ },
405
+ )
406
+
407
+ pid = process.pid
408
+ logger.info("Started JS HTTP process [%d]", pid)
409
+
410
+ code = await process.wait()
411
+ raise Exception(f"JS HTTP process exited with code {code}")
412
+
413
+ except asyncio.CancelledError:
414
+ logger.info("Shutting down JS HTTP process [%d]", pid or -1)
415
+ try:
416
+ process.terminate()
417
+ await process.wait()
418
+ except (UnboundLocalError, ProcessLookupError):
419
+ pass
420
+ raise
421
+ except Exception:
422
+ if attempt >= 3:
423
+ raise
424
+ else:
425
+ logger.warning(f"Retrying JS HTTP process {3 - attempt} more times...")
426
+ attempt += 1
427
+
428
+
375
429
  def _get_passthrough_checkpointer(conn: AsyncConnectionProto):
376
430
  from langgraph_runtime.checkpoint import Checkpointer
377
431
 
@@ -668,8 +722,8 @@ class DisableHttpxLoggingContextManager(AbstractContextManager):
668
722
 
669
723
  filter: logging.Filter
670
724
 
671
- def filter(self, record: logging.LogRecord) -> bool:
672
- return "200 OK" not in record.getMessage()
725
+ def __init__(self, filter: Callable[[logging.LogRecord], bool] | None = None):
726
+ self.filter = filter or (lambda record: "200 OK" not in record.getMessage())
673
727
 
674
728
  def __enter__(self):
675
729
  logging.getLogger("httpx").addFilter(self.filter)
@@ -820,3 +874,139 @@ async def handle_js_auth_event(
820
874
  value["metadata"].update(metadata)
821
875
 
822
876
  return filters
877
+
878
+
879
+ class JSCustomHTTPProxyMiddleware:
880
+ def __init__(self, app: types.ASGIApp) -> None:
881
+ self.app = app
882
+ self.proxy_client = httpx.AsyncClient(
883
+ base_url=f"http://localhost:{GRAPH_HTTP_PORT}",
884
+ timeout=httpx.Timeout(None),
885
+ limits=httpx.Limits(),
886
+ transport=httpx.AsyncHTTPTransport(verify=SSL),
887
+ )
888
+
889
+ async def __call__(
890
+ self, scope: types.Scope, receive: types.Receive, send: types.Send
891
+ ) -> None:
892
+ if scope["type"] != "http" or "__langgraph_check" in scope["path"]:
893
+ # TODO: add support for proxying `websockets``
894
+ await self.app(scope, receive, send)
895
+ return
896
+
897
+ # First, check if the request can be handled by the JS server
898
+ with DisableHttpxLoggingContextManager(
899
+ filter=lambda record: "__langgraph_check" not in record.getMessage()
900
+ ):
901
+ res = await self.proxy_client.options(
902
+ "/__langgraph_check",
903
+ headers={
904
+ "x-langgraph-method": scope["method"],
905
+ "x-langgraph-path": scope["path"],
906
+ },
907
+ )
908
+
909
+ input_buffer: deque[types.Message] = deque()
910
+
911
+ async def yield_to_python(node_request: httpx.Response):
912
+ nonlocal input_buffer
913
+
914
+ async def replay_request():
915
+ if input_buffer:
916
+ if node_request.headers.get("x-langgraph-body") == "true":
917
+ input_buffer.clear()
918
+ return {
919
+ "type": "http.request",
920
+ "body": await node_request.aread(),
921
+ "more_body": False,
922
+ }
923
+
924
+ return input_buffer.popleft()
925
+ else:
926
+ return await receive()
927
+
928
+ async def send_with_extra_headers(message: types.Message):
929
+ if message["type"] == "http.response.start":
930
+ headers = MutableHeaders(scope=message)
931
+ for k, v in node_request.headers.items():
932
+ if k in (
933
+ "content-length",
934
+ "content-encoding",
935
+ "content-type",
936
+ "transfer-encoding",
937
+ "connection",
938
+ "keep-alive",
939
+ "x-langgraph-body",
940
+ "x-langgraph-status",
941
+ ):
942
+ continue
943
+
944
+ # Respect existing headers set by the Python server
945
+ headers.append(k, headers.get(k, None) or v)
946
+
947
+ await send(message)
948
+
949
+ return await self.app(scope, replay_request, send_with_extra_headers)
950
+
951
+ # If the JS server does not handle the request, yield the control back to the
952
+ # Python server.
953
+ if not res.is_success:
954
+ return await yield_to_python(res)
955
+
956
+ # Stream request body
957
+ async def upload_request_body() -> AsyncIterator[bytes]:
958
+ nonlocal input_buffer
959
+
960
+ more_body = True
961
+ while more_body:
962
+ message = await receive()
963
+ input_buffer.append(message)
964
+
965
+ more_body = message.get("more_body", False)
966
+ yield message.get("body", b"")
967
+
968
+ # Make the proxied request
969
+ async with self.proxy_client.stream(
970
+ scope["method"],
971
+ scope["path"],
972
+ params=scope["query_string"],
973
+ headers={
974
+ k.decode("latin-1"): v.decode("latin-1")
975
+ for k, v in scope["headers"]
976
+ if k.lower() not in (b"host", b"content-length")
977
+ },
978
+ content=upload_request_body(),
979
+ ) as response:
980
+ if (
981
+ response.status_code == 404
982
+ and response.headers.get("x-langgraph-status") == "not-found"
983
+ ):
984
+ return await yield_to_python(response)
985
+
986
+ # Send the response headers
987
+ await send(
988
+ {
989
+ "type": "http.response.start",
990
+ "status": response.status_code,
991
+ "headers": [
992
+ (k.encode("latin-1"), v.encode("latin-1"))
993
+ for k, v in response.headers.items()
994
+ if k.lower() not in (b"transfer-encoding",)
995
+ ],
996
+ }
997
+ )
998
+
999
+ # Stream the response body
1000
+ async for chunk in response.aiter_raw():
1001
+ await send(
1002
+ {"type": "http.response.body", "body": chunk, "more_body": True}
1003
+ )
1004
+
1005
+ # Send the final empty chunk to indicate the end of the response
1006
+ await send({"type": "http.response.body", "body": b"", "more_body": False})
1007
+
1008
+ async def __aenter__(self):
1009
+ return self
1010
+
1011
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
1012
+ await self.proxy_client.aclose()