langgraph-api 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

Files changed (86) hide show
  1. LICENSE +93 -0
  2. langgraph_api/__init__.py +0 -0
  3. langgraph_api/api/__init__.py +63 -0
  4. langgraph_api/api/assistants.py +326 -0
  5. langgraph_api/api/meta.py +71 -0
  6. langgraph_api/api/openapi.py +32 -0
  7. langgraph_api/api/runs.py +463 -0
  8. langgraph_api/api/store.py +116 -0
  9. langgraph_api/api/threads.py +263 -0
  10. langgraph_api/asyncio.py +201 -0
  11. langgraph_api/auth/__init__.py +0 -0
  12. langgraph_api/auth/langsmith/__init__.py +0 -0
  13. langgraph_api/auth/langsmith/backend.py +67 -0
  14. langgraph_api/auth/langsmith/client.py +145 -0
  15. langgraph_api/auth/middleware.py +41 -0
  16. langgraph_api/auth/noop.py +14 -0
  17. langgraph_api/cli.py +209 -0
  18. langgraph_api/config.py +70 -0
  19. langgraph_api/cron_scheduler.py +60 -0
  20. langgraph_api/errors.py +52 -0
  21. langgraph_api/graph.py +314 -0
  22. langgraph_api/http.py +168 -0
  23. langgraph_api/http_logger.py +89 -0
  24. langgraph_api/js/.gitignore +2 -0
  25. langgraph_api/js/build.mts +49 -0
  26. langgraph_api/js/client.mts +849 -0
  27. langgraph_api/js/global.d.ts +6 -0
  28. langgraph_api/js/package.json +33 -0
  29. langgraph_api/js/remote.py +673 -0
  30. langgraph_api/js/server_sent_events.py +126 -0
  31. langgraph_api/js/src/graph.mts +88 -0
  32. langgraph_api/js/src/hooks.mjs +12 -0
  33. langgraph_api/js/src/parser/parser.mts +443 -0
  34. langgraph_api/js/src/parser/parser.worker.mjs +12 -0
  35. langgraph_api/js/src/schema/types.mts +2136 -0
  36. langgraph_api/js/src/schema/types.template.mts +74 -0
  37. langgraph_api/js/src/utils/importMap.mts +85 -0
  38. langgraph_api/js/src/utils/pythonSchemas.mts +28 -0
  39. langgraph_api/js/src/utils/serde.mts +21 -0
  40. langgraph_api/js/tests/api.test.mts +1566 -0
  41. langgraph_api/js/tests/compose-postgres.yml +56 -0
  42. langgraph_api/js/tests/graphs/.gitignore +1 -0
  43. langgraph_api/js/tests/graphs/agent.mts +127 -0
  44. langgraph_api/js/tests/graphs/error.mts +17 -0
  45. langgraph_api/js/tests/graphs/langgraph.json +8 -0
  46. langgraph_api/js/tests/graphs/nested.mts +44 -0
  47. langgraph_api/js/tests/graphs/package.json +7 -0
  48. langgraph_api/js/tests/graphs/weather.mts +57 -0
  49. langgraph_api/js/tests/graphs/yarn.lock +159 -0
  50. langgraph_api/js/tests/parser.test.mts +870 -0
  51. langgraph_api/js/tests/utils.mts +17 -0
  52. langgraph_api/js/yarn.lock +1340 -0
  53. langgraph_api/lifespan.py +41 -0
  54. langgraph_api/logging.py +121 -0
  55. langgraph_api/metadata.py +101 -0
  56. langgraph_api/models/__init__.py +0 -0
  57. langgraph_api/models/run.py +229 -0
  58. langgraph_api/patch.py +42 -0
  59. langgraph_api/queue.py +245 -0
  60. langgraph_api/route.py +118 -0
  61. langgraph_api/schema.py +190 -0
  62. langgraph_api/serde.py +124 -0
  63. langgraph_api/server.py +48 -0
  64. langgraph_api/sse.py +118 -0
  65. langgraph_api/state.py +67 -0
  66. langgraph_api/stream.py +289 -0
  67. langgraph_api/utils.py +60 -0
  68. langgraph_api/validation.py +141 -0
  69. langgraph_api-0.0.1.dist-info/LICENSE +93 -0
  70. langgraph_api-0.0.1.dist-info/METADATA +26 -0
  71. langgraph_api-0.0.1.dist-info/RECORD +86 -0
  72. langgraph_api-0.0.1.dist-info/WHEEL +4 -0
  73. langgraph_api-0.0.1.dist-info/entry_points.txt +3 -0
  74. langgraph_license/__init__.py +0 -0
  75. langgraph_license/middleware.py +21 -0
  76. langgraph_license/validation.py +11 -0
  77. langgraph_storage/__init__.py +0 -0
  78. langgraph_storage/checkpoint.py +94 -0
  79. langgraph_storage/database.py +190 -0
  80. langgraph_storage/ops.py +1523 -0
  81. langgraph_storage/queue.py +108 -0
  82. langgraph_storage/retry.py +27 -0
  83. langgraph_storage/store.py +28 -0
  84. langgraph_storage/ttl_dict.py +54 -0
  85. logging.json +22 -0
  86. openapi.json +4304 -0
@@ -0,0 +1,41 @@
1
+ from starlette.middleware import Middleware
2
+ from starlette.middleware.authentication import (
3
+ AuthenticationError,
4
+ AuthenticationMiddleware,
5
+ )
6
+ from starlette.requests import HTTPConnection
7
+ from starlette.responses import JSONResponse
8
+ from starlette.types import Receive, Scope, Send
9
+
10
+ from langgraph_api.config import LANGGRAPH_AUTH_TYPE
11
+
12
+
13
+ def get_auth_backend():
14
+ if LANGGRAPH_AUTH_TYPE == "langsmith":
15
+ from langgraph_api.auth.langsmith.backend import LangsmithAuthBackend
16
+
17
+ return LangsmithAuthBackend()
18
+ else:
19
+ from langgraph_api.auth.noop import NoopAuthBackend
20
+
21
+ return NoopAuthBackend()
22
+
23
+
24
+ def on_error(conn: HTTPConnection, exc: AuthenticationError):
25
+ return JSONResponse({"detail": str(exc)}, status_code=403)
26
+
27
+
28
+ class ConditionalAuthenticationMiddleware(AuthenticationMiddleware):
29
+ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
30
+ if scope["root_path"] == "/noauth":
31
+ # disable auth for requests originating from SDK ASGI transport
32
+ # root_path cannot be set from a request, so safe to use as auth bypass
33
+ await self.app(scope, receive, send)
34
+ return
35
+
36
+ return await super().__call__(scope, receive, send)
37
+
38
+
39
+ auth_middleware = Middleware(
40
+ ConditionalAuthenticationMiddleware, backend=get_auth_backend(), on_error=on_error
41
+ )
@@ -0,0 +1,14 @@
1
+ from starlette.authentication import (
2
+ AuthCredentials,
3
+ AuthenticationBackend,
4
+ BaseUser,
5
+ UnauthenticatedUser,
6
+ )
7
+ from starlette.requests import HTTPConnection
8
+
9
+
10
+ class NoopAuthBackend(AuthenticationBackend):
11
+ async def authenticate(
12
+ self, conn: HTTPConnection
13
+ ) -> tuple[AuthCredentials, BaseUser] | None:
14
+ return AuthCredentials(), UnauthenticatedUser()
langgraph_api/cli.py ADDED
@@ -0,0 +1,209 @@
1
+ import contextlib
2
+ import json
3
+ import logging
4
+ import os
5
+ import pathlib
6
+ import threading
7
+ from collections.abc import Mapping
8
+
9
+ logging.basicConfig(level=logging.INFO)
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ @contextlib.contextmanager
14
+ def patch_environment(**kwargs):
15
+ """Temporarily patch environment variables.
16
+
17
+ Args:
18
+ **kwargs: Key-value pairs of environment variables to set.
19
+
20
+ Yields:
21
+ None
22
+ """
23
+ original = {}
24
+ try:
25
+ for key, value in kwargs.items():
26
+ if value is None:
27
+ original[key] = os.environ.pop(key, None)
28
+ continue
29
+ original[key] = os.environ.get(key)
30
+ os.environ[key] = value
31
+ yield
32
+ finally:
33
+ for key, value in original.items():
34
+ if value is None:
35
+ os.environ.pop(key, None)
36
+ else:
37
+ os.environ[key] = value
38
+
39
+
40
+ def run_server(
41
+ host: str = "127.0.0.1",
42
+ port: int = 2024,
43
+ reload: bool = False,
44
+ graphs: dict | None = None,
45
+ n_jobs_per_worker: int | None = None,
46
+ env_file: str | None = None,
47
+ open_browser: bool = False,
48
+ debug_port: int | None = None,
49
+ env: str | pathlib.Path | Mapping[str, str] | None = None,
50
+ ):
51
+ """Run the LangGraph API server."""
52
+ import uvicorn
53
+
54
+ env_vars = env if isinstance(env, Mapping) else None
55
+ if isinstance(env, str | pathlib.Path):
56
+ try:
57
+ from dotenv.main import DotEnv
58
+
59
+ env_vars = DotEnv(dotenv_path=env).dict() or {}
60
+ logger.debug(f"Loaded environment variables from {env}: {sorted(env_vars)}")
61
+
62
+ except ImportError:
63
+ logger.warning(
64
+ "python_dotenv is not installed. Environment variables will not be available."
65
+ )
66
+
67
+ if debug_port is not None:
68
+ try:
69
+ import debugpy
70
+ except ImportError:
71
+ logger.warning("debugpy is not installed. Debugging will not be available.")
72
+ logger.info("To enable debugging, install debugpy: pip install debugpy")
73
+ return
74
+ debugpy.listen((host, debug_port))
75
+ logger.info(
76
+ f"🐛 Debugger listening on port {debug_port}. Waiting for client to attach..."
77
+ )
78
+ logger.info("To attach the debugger:")
79
+ logger.info("1. Open your python debugger client (e.g., Visual Studio Code).")
80
+ logger.info(
81
+ "2. Use the 'Remote Attach' configuration with the following settings:"
82
+ )
83
+ logger.info(" - Host: 0.0.0.0")
84
+ logger.info(f" - Port: {debug_port}")
85
+ logger.info("3. Start the debugger to connect to the server.")
86
+ debugpy.wait_for_client()
87
+ logger.info("Debugger attached. Starting server...")
88
+
89
+ local_url = f"http://{host}:{port}"
90
+ studio_url = f"https://smith.langchain.com/studio/?baseUrl={local_url}"
91
+
92
+ def _open_browser():
93
+ import time
94
+ import urllib.request
95
+ import webbrowser
96
+
97
+ while True:
98
+ try:
99
+ with urllib.request.urlopen(f"{local_url}/ok") as response:
100
+ if response.status == 200:
101
+ webbrowser.open(studio_url)
102
+ return
103
+ except urllib.error.URLError:
104
+ pass
105
+ time.sleep(0.1)
106
+
107
+ welcome = f"""
108
+
109
+ Welcome to
110
+
111
+ ╦ ┌─┐┌┐┌┌─┐╔═╗┬─┐┌─┐┌─┐┬ ┬
112
+ ║ ├─┤││││ ┬║ ╦├┬┘├─┤├─┘├─┤
113
+ ╩═╝┴ ┴┘└┘└─┘╚═╝┴└─┴ ┴┴ ┴ ┴
114
+
115
+ - 🚀 API: \033[36m{local_url}\033[0m
116
+ - 🎨 Studio UI: \033[36m{studio_url}\033[0m
117
+ - 📚 API Docs: \033[36m{local_url}/docs\033[0m
118
+
119
+ This in-memory server is designed for development and testing.
120
+ For production use, please use LangGraph Cloud.
121
+
122
+ """
123
+ logger.info(welcome)
124
+ with patch_environment(
125
+ MIGRATIONS_PATH="__inmem",
126
+ DATABASE_URI=":memory:",
127
+ REDIS_URI="fake",
128
+ N_JOBS_PER_WORKER=str(n_jobs_per_worker if n_jobs_per_worker else 1),
129
+ LANGSERVE_GRAPHS=json.dumps(graphs) if graphs else None,
130
+ LANGSMITH_LANGGRAPH_API_VARIANT="test",
131
+ **(env_vars or {}),
132
+ ):
133
+ if open_browser:
134
+ threading.Thread(target=_open_browser, daemon=True).start()
135
+
136
+ uvicorn.run(
137
+ "langgraph_api.server:app",
138
+ host=host,
139
+ port=port,
140
+ reload=reload,
141
+ env_file=env_file,
142
+ access_log=False,
143
+ log_config={
144
+ "version": 1,
145
+ "incremental": False,
146
+ "disable_existing_loggers": False,
147
+ "formatters": {
148
+ "simple": {"class": "langgraph_api.logging.Formatter"}
149
+ },
150
+ "handlers": {
151
+ "console": {
152
+ "class": "logging.StreamHandler",
153
+ "formatter": "simple",
154
+ "stream": "ext://sys.stdout",
155
+ }
156
+ },
157
+ "root": {"handlers": ["console"]},
158
+ },
159
+ )
160
+
161
+
162
+ def main():
163
+ import argparse
164
+
165
+ parser = argparse.ArgumentParser(
166
+ description="CLI entrypoint for running the LangGraph API server."
167
+ )
168
+ parser.add_argument(
169
+ "--host", default="127.0.0.1", help="Host to bind the server to"
170
+ )
171
+ parser.add_argument(
172
+ "--port", type=int, default=2024, help="Port to bind the server to"
173
+ )
174
+ parser.add_argument("--no-reload", action="store_true", help="Disable auto-reload")
175
+ parser.add_argument(
176
+ "--config", default="langgraph.json", help="Path to configuration file"
177
+ )
178
+ parser.add_argument(
179
+ "--n-jobs-per-worker",
180
+ type=int,
181
+ help="Number of jobs per worker. Default is None (meaning 10)",
182
+ )
183
+ parser.add_argument(
184
+ "--no-browser", action="store_true", help="Disable automatic browser opening"
185
+ )
186
+ parser.add_argument(
187
+ "--debug-port", type=int, help="Port for debugger to listen on (default: none)"
188
+ )
189
+
190
+ args = parser.parse_args()
191
+
192
+ with open(args.config, encoding="utf-8") as f:
193
+ config_data = json.load(f)
194
+
195
+ graphs = config_data.get("graphs", {})
196
+ run_server(
197
+ args.host,
198
+ args.port,
199
+ not args.no_reload,
200
+ graphs,
201
+ n_jobs_per_worker=args.n_jobs_per_worker,
202
+ open_browser=not args.no_browser,
203
+ debug_port=args.debug_port,
204
+ env=config_data.get("env", None),
205
+ )
206
+
207
+
208
+ if __name__ == "__main__":
209
+ main()
@@ -0,0 +1,70 @@
1
+ from os import environ, getenv
2
+
3
+ from starlette.config import Config, undefined
4
+ from starlette.datastructures import CommaSeparatedStrings
5
+
6
+ env = Config()
7
+
8
+ STATS_INTERVAL_SECS = env("STATS_INTERVAL_SECS", cast=int, default=60)
9
+ HTTP_CONCURRENCY = env("HTTP_CONCURRENCY", cast=int, default=10)
10
+
11
+ # storage
12
+
13
+ DATABASE_URI = env("DATABASE_URI", cast=str, default=getenv("POSTGRES_URI", undefined))
14
+ MIGRATIONS_PATH = env("MIGRATIONS_PATH", cast=str, default="/storage/migrations")
15
+
16
+ # redis
17
+ REDIS_URI = env("REDIS_URI", cast=str)
18
+ REDIS_MAX_CONNECTIONS = env("REDIS_MAX_CONNECTIONS", cast=int, default=500)
19
+
20
+ # server
21
+
22
+ CORS_ALLOW_ORIGINS = env("CORS_ALLOW_ORIGINS", cast=CommaSeparatedStrings, default="*")
23
+
24
+ # queue
25
+
26
+ BG_JOB_NO_DELAY = env("BG_JOB_NO_DELAY", cast=bool, default=False)
27
+ N_JOBS_PER_WORKER = env("N_JOBS_PER_WORKER", cast=int, default=10)
28
+ BG_JOB_TIMEOUT_SECS = env("BG_JOB_TIMEOUT_SECS", cast=float, default=3600)
29
+ FF_CRONS_ENABLED = env("FF_CRONS_ENABLED", cast=bool, default=True)
30
+
31
+ # auth
32
+
33
+ LANGGRAPH_AUTH_TYPE = env("LANGGRAPH_AUTH_TYPE", cast=str, default="noop")
34
+
35
+ if LANGGRAPH_AUTH_TYPE == "langsmith":
36
+ LANGSMITH_AUTH_ENDPOINT = env("LANGSMITH_AUTH_ENDPOINT", cast=str)
37
+ LANGSMITH_TENANT_ID = env("LANGSMITH_TENANT_ID", cast=str)
38
+ LANGSMITH_AUTH_VERIFY_TENANT_ID = env(
39
+ "LANGSMITH_AUTH_VERIFY_TENANT_ID", cast=bool, default=True
40
+ )
41
+ else:
42
+ LANGSMITH_AUTH_ENDPOINT = env(
43
+ "LANGSMITH_AUTH_ENDPOINT",
44
+ cast=str,
45
+ default=getenv(
46
+ "LANGCHAIN_ENDPOINT",
47
+ getenv("LANGSMITH_ENDPOINT", "https://api.smith.langchain.com"),
48
+ ),
49
+ )
50
+
51
+ # license
52
+
53
+ LANGGRAPH_CLOUD_LICENSE_KEY = env("LANGGRAPH_CLOUD_LICENSE_KEY", cast=str, default="")
54
+ LANGSMITH_API_KEY = env(
55
+ "LANGSMITH_API_KEY", cast=str, default=getenv("LANGCHAIN_API_KEY", "")
56
+ )
57
+
58
+ # if langsmith api key is set, enable tracing unless explicitly disabled
59
+
60
+ if (
61
+ LANGSMITH_API_KEY
62
+ and not getenv("LANGCHAIN_TRACING_V2")
63
+ and not getenv("LANGCHAIN_TRACING")
64
+ ):
65
+ environ["LANGCHAIN_TRACING_V2"] = "true"
66
+
67
+ # if variant is "licensed", update to "local" if using LANGSMITH_API_KEY instead
68
+
69
+ if getenv("LANGSMITH_LANGGRAPH_API_VARIANT") == "licensed" and LANGSMITH_API_KEY:
70
+ environ["LANGSMITH_LANGGRAPH_API_VARIANT"] = "local"
@@ -0,0 +1,60 @@
1
+ import asyncio
2
+ from random import random
3
+
4
+ import structlog
5
+ from langchain_core.runnables.config import run_in_executor
6
+
7
+ from langgraph_api.models.run import create_valid_run
8
+ from langgraph_api.utils import next_cron_date
9
+ from langgraph_storage.database import connect
10
+ from langgraph_storage.ops import Crons
11
+ from langgraph_storage.retry import retry_db
12
+
13
+ logger = structlog.stdlib.get_logger(__name__)
14
+
15
+ SLEEP_TIME = 5
16
+
17
+
18
+ @retry_db
19
+ async def cron_scheduler():
20
+ logger.info("Starting cron scheduler")
21
+ while True:
22
+ try:
23
+ async with connect() as conn:
24
+ async for cron in Crons.next(conn):
25
+ logger.debug(f"Scheduling cron run {cron}")
26
+ try:
27
+ run_payload = cron["payload"]
28
+ run = await create_valid_run(
29
+ conn,
30
+ thread_id=str(cron.get("thread_id"))
31
+ if cron.get("thread_id")
32
+ else None,
33
+ payload=run_payload,
34
+ user_id=cron.get("user_id"),
35
+ headers={},
36
+ )
37
+ if not run:
38
+ logger.error(
39
+ "Run not created for cron_id={} payload".format(
40
+ cron["cron_id"],
41
+ )
42
+ )
43
+ except Exception as e:
44
+ logger.error(
45
+ "Error scheduling cron run cron_id={}".format(
46
+ cron["cron_id"]
47
+ ),
48
+ exc_info=e,
49
+ )
50
+ next_run_date = await run_in_executor(
51
+ None, next_cron_date, cron["schedule"], cron["now"]
52
+ )
53
+ await Crons.set_next_run_date(conn, cron["cron_id"], next_run_date)
54
+
55
+ await asyncio.sleep(SLEEP_TIME)
56
+ except asyncio.CancelledError:
57
+ raise
58
+ except Exception as e:
59
+ logger.error("Error in cron_scheduler", exc_info=e)
60
+ await asyncio.sleep(SLEEP_TIME + random())
@@ -0,0 +1,52 @@
1
+ import jsonschema_rs
2
+ import structlog
3
+ from starlette.exceptions import HTTPException
4
+ from starlette.requests import Request
5
+ from starlette.responses import JSONResponse, Response
6
+
7
+ logger = structlog.stdlib.get_logger(__name__)
8
+
9
+
10
+ def is_body_allowed_for_status_code(status_code: int | None) -> bool:
11
+ if status_code is None:
12
+ return True
13
+ return not (status_code < 200 or status_code in {204, 205, 304})
14
+
15
+
16
+ async def http_exception_handler(request: Request, exc: HTTPException) -> Response:
17
+ headers = getattr(exc, "headers", None)
18
+ if not is_body_allowed_for_status_code(exc.status_code):
19
+ return Response(status_code=exc.status_code, headers=headers)
20
+ return JSONResponse(
21
+ {"detail": exc.detail}, status_code=exc.status_code, headers=headers
22
+ )
23
+
24
+
25
+ async def validation_error_handler(request, exc: jsonschema_rs.ValidationError):
26
+ return await http_exception_handler(
27
+ request, HTTPException(status_code=422, detail=str(exc))
28
+ )
29
+
30
+
31
+ async def value_error_handler(request, exc: ValueError):
32
+ logger.exception("Bad Request Error", exc_info=exc)
33
+ return await http_exception_handler(
34
+ request, HTTPException(status_code=400, detail=str(exc))
35
+ )
36
+
37
+
38
+ async def overloaded_error_handler(request, exc: ValueError):
39
+ logger.exception("Overloaded Error", exc_info=exc)
40
+ return await http_exception_handler(
41
+ request, HTTPException(status_code=503, detail=str(exc))
42
+ )
43
+
44
+
45
+ class UserInterrupt(Exception):
46
+ def __init__(self, message="User interrupted the run"):
47
+ super().__init__(message)
48
+
49
+
50
+ class UserRollback(UserInterrupt):
51
+ def __init__(self):
52
+ super().__init__("User requested rollback of the run")