langgraph-api 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

Files changed (86) hide show
  1. LICENSE +93 -0
  2. langgraph_api/__init__.py +0 -0
  3. langgraph_api/api/__init__.py +63 -0
  4. langgraph_api/api/assistants.py +326 -0
  5. langgraph_api/api/meta.py +71 -0
  6. langgraph_api/api/openapi.py +32 -0
  7. langgraph_api/api/runs.py +463 -0
  8. langgraph_api/api/store.py +116 -0
  9. langgraph_api/api/threads.py +263 -0
  10. langgraph_api/asyncio.py +201 -0
  11. langgraph_api/auth/__init__.py +0 -0
  12. langgraph_api/auth/langsmith/__init__.py +0 -0
  13. langgraph_api/auth/langsmith/backend.py +67 -0
  14. langgraph_api/auth/langsmith/client.py +145 -0
  15. langgraph_api/auth/middleware.py +41 -0
  16. langgraph_api/auth/noop.py +14 -0
  17. langgraph_api/cli.py +209 -0
  18. langgraph_api/config.py +70 -0
  19. langgraph_api/cron_scheduler.py +60 -0
  20. langgraph_api/errors.py +52 -0
  21. langgraph_api/graph.py +314 -0
  22. langgraph_api/http.py +168 -0
  23. langgraph_api/http_logger.py +89 -0
  24. langgraph_api/js/.gitignore +2 -0
  25. langgraph_api/js/build.mts +49 -0
  26. langgraph_api/js/client.mts +849 -0
  27. langgraph_api/js/global.d.ts +6 -0
  28. langgraph_api/js/package.json +33 -0
  29. langgraph_api/js/remote.py +673 -0
  30. langgraph_api/js/server_sent_events.py +126 -0
  31. langgraph_api/js/src/graph.mts +88 -0
  32. langgraph_api/js/src/hooks.mjs +12 -0
  33. langgraph_api/js/src/parser/parser.mts +443 -0
  34. langgraph_api/js/src/parser/parser.worker.mjs +12 -0
  35. langgraph_api/js/src/schema/types.mts +2136 -0
  36. langgraph_api/js/src/schema/types.template.mts +74 -0
  37. langgraph_api/js/src/utils/importMap.mts +85 -0
  38. langgraph_api/js/src/utils/pythonSchemas.mts +28 -0
  39. langgraph_api/js/src/utils/serde.mts +21 -0
  40. langgraph_api/js/tests/api.test.mts +1566 -0
  41. langgraph_api/js/tests/compose-postgres.yml +56 -0
  42. langgraph_api/js/tests/graphs/.gitignore +1 -0
  43. langgraph_api/js/tests/graphs/agent.mts +127 -0
  44. langgraph_api/js/tests/graphs/error.mts +17 -0
  45. langgraph_api/js/tests/graphs/langgraph.json +8 -0
  46. langgraph_api/js/tests/graphs/nested.mts +44 -0
  47. langgraph_api/js/tests/graphs/package.json +7 -0
  48. langgraph_api/js/tests/graphs/weather.mts +57 -0
  49. langgraph_api/js/tests/graphs/yarn.lock +159 -0
  50. langgraph_api/js/tests/parser.test.mts +870 -0
  51. langgraph_api/js/tests/utils.mts +17 -0
  52. langgraph_api/js/yarn.lock +1340 -0
  53. langgraph_api/lifespan.py +41 -0
  54. langgraph_api/logging.py +121 -0
  55. langgraph_api/metadata.py +101 -0
  56. langgraph_api/models/__init__.py +0 -0
  57. langgraph_api/models/run.py +229 -0
  58. langgraph_api/patch.py +42 -0
  59. langgraph_api/queue.py +245 -0
  60. langgraph_api/route.py +118 -0
  61. langgraph_api/schema.py +190 -0
  62. langgraph_api/serde.py +124 -0
  63. langgraph_api/server.py +48 -0
  64. langgraph_api/sse.py +118 -0
  65. langgraph_api/state.py +67 -0
  66. langgraph_api/stream.py +289 -0
  67. langgraph_api/utils.py +60 -0
  68. langgraph_api/validation.py +141 -0
  69. langgraph_api-0.0.1.dist-info/LICENSE +93 -0
  70. langgraph_api-0.0.1.dist-info/METADATA +26 -0
  71. langgraph_api-0.0.1.dist-info/RECORD +86 -0
  72. langgraph_api-0.0.1.dist-info/WHEEL +4 -0
  73. langgraph_api-0.0.1.dist-info/entry_points.txt +3 -0
  74. langgraph_license/__init__.py +0 -0
  75. langgraph_license/middleware.py +21 -0
  76. langgraph_license/validation.py +11 -0
  77. langgraph_storage/__init__.py +0 -0
  78. langgraph_storage/checkpoint.py +94 -0
  79. langgraph_storage/database.py +190 -0
  80. langgraph_storage/ops.py +1523 -0
  81. langgraph_storage/queue.py +108 -0
  82. langgraph_storage/retry.py +27 -0
  83. langgraph_storage/store.py +28 -0
  84. langgraph_storage/ttl_dict.py +54 -0
  85. logging.json +22 -0
  86. openapi.json +4304 -0
langgraph_api/graph.py ADDED
@@ -0,0 +1,314 @@
1
+ import asyncio
2
+ import glob
3
+ import importlib.util
4
+ import inspect
5
+ import json
6
+ import os
7
+ import sys
8
+ from collections.abc import Callable
9
+ from itertools import filterfalse
10
+ from random import choice
11
+ from typing import NamedTuple
12
+ from uuid import UUID, uuid5
13
+
14
+ import structlog
15
+ from langchain_core.runnables.config import run_in_executor
16
+ from langgraph.checkpoint.base import BaseCheckpointSaver
17
+ from langgraph.graph import Graph
18
+ from langgraph.pregel import Pregel
19
+ from langgraph.store.base import BaseStore
20
+ from starlette.exceptions import HTTPException
21
+
22
+ from langgraph_api.js.remote import RemotePregel
23
+ from langgraph_api.schema import Config
24
+
25
+ logger = structlog.stdlib.get_logger(__name__)
26
+
27
+ GraphFactoryFromConfig = Callable[[Config], Pregel | Graph]
28
+ GraphFactory = Callable[[], Pregel | Graph]
29
+ GraphValue = Pregel | GraphFactory
30
+
31
+
32
+ GRAPHS: dict[str, Pregel | GraphFactoryFromConfig | GraphFactory] = {}
33
+ NAMESPACE_GRAPH = UUID("6ba7b821-9dad-11d1-80b4-00c04fd430c8")
34
+ FACTORY_ACCEPTS_CONFIG: dict[str, bool] = {}
35
+
36
+
37
+ async def register_graph(graph_id: str, graph: GraphValue, config: dict | None) -> None:
38
+ """Register a graph."""
39
+ from langgraph_storage.database import connect
40
+ from langgraph_storage.ops import Assistants
41
+
42
+ await logger.ainfo(f"Registering graph with id '{graph_id}'", graph_id=graph_id)
43
+ GRAPHS[graph_id] = graph
44
+ if callable(graph):
45
+ FACTORY_ACCEPTS_CONFIG[graph_id] = len(inspect.signature(graph).parameters) > 0
46
+ async with connect() as conn:
47
+ await Assistants.put(
48
+ conn,
49
+ str(uuid5(NAMESPACE_GRAPH, graph_id)),
50
+ graph_id=graph_id,
51
+ metadata={"created_by": "system"},
52
+ config=config or {},
53
+ if_exists="do_nothing",
54
+ name=graph_id,
55
+ )
56
+
57
+
58
+ def get_graph(
59
+ graph_id: str,
60
+ config: Config,
61
+ *,
62
+ checkpointer: BaseCheckpointSaver | None = None,
63
+ store: BaseStore | None = None,
64
+ ) -> Pregel:
65
+ """Return the runnable."""
66
+ assert_graph_exists(graph_id)
67
+ value = GRAPHS[graph_id]
68
+ if graph_id in FACTORY_ACCEPTS_CONFIG:
69
+ value = value(config) if FACTORY_ACCEPTS_CONFIG[graph_id] else value()
70
+ if isinstance(value, Graph):
71
+ value = value.compile()
72
+ if not isinstance(value, Pregel) and not isinstance(value, RemotePregel):
73
+ raise HTTPException(
74
+ status_code=424,
75
+ detail=f"Graph '{graph_id}' is not valid. Review graph registration.",
76
+ )
77
+ if isinstance(value, RemotePregel):
78
+ value.checkpointer = checkpointer
79
+ value.name = graph_id
80
+ return value
81
+
82
+ update = {
83
+ "checkpointer": checkpointer,
84
+ "store": store,
85
+ }
86
+ if value.name == "LangGraph":
87
+ update["name"] = graph_id
88
+ return value.copy(update=update)
89
+
90
+
91
+ def graph_exists(graph_id: str) -> bool:
92
+ """Return whether a graph exists."""
93
+ return graph_id in GRAPHS
94
+
95
+
96
+ def assert_graph_exists(graph_id: str) -> None:
97
+ """Assert that a graph exists."""
98
+ if not graph_exists(graph_id):
99
+ raise HTTPException(status_code=404, detail=f"Graph '{graph_id}' not found")
100
+
101
+
102
+ def get_assistant_id(assistant_id: str) -> str:
103
+ """Check if assistant_id is a valid graph_id. If so, retrieve the
104
+ assistant_id from the graph_id. Otherwise, return the assistant_id
105
+ as is.
106
+
107
+ This method is used where the API allows passing both assistant_id
108
+ and graph_id interchangeably.
109
+ """
110
+ if assistant_id in GRAPHS:
111
+ assistant_id = str(uuid5(NAMESPACE_GRAPH, assistant_id))
112
+ return assistant_id
113
+
114
+
115
+ class GraphSpec(NamedTuple):
116
+ """A graph specification."""
117
+
118
+ id: str
119
+ path: str | None = None
120
+ module: str | None = None
121
+ variable: str | None = None
122
+ config: dict | None = None
123
+
124
+
125
+ js_bg_tasks: set[asyncio.Task] = set()
126
+
127
+
128
+ def is_js_spec(spec: GraphSpec) -> bool:
129
+ return spec.path is not None and os.path.splitext(spec.path)[1] in (
130
+ ".ts",
131
+ ".mts",
132
+ ".cts",
133
+ ".js",
134
+ ".mjs",
135
+ ".cjs",
136
+ )
137
+
138
+
139
+ def _load_graph_config_from_env() -> dict | None:
140
+ """Return graph config from env."""
141
+ config_str = os.getenv("LANGGRAPH_CONFIG")
142
+ if not config_str:
143
+ return None
144
+
145
+ config_per_id = json.loads(config_str)
146
+ if not isinstance(config_per_id, dict):
147
+ raise ValueError("LANGGRAPH_CONFIG must be a JSON object")
148
+
149
+ return config_per_id
150
+
151
+
152
+ async def collect_graphs_from_env(register: bool = False) -> None:
153
+ """Return graphs from env."""
154
+
155
+ paths_str = os.getenv("LANGSERVE_GRAPHS")
156
+ config_per_graph = _load_graph_config_from_env() or {}
157
+
158
+ if paths_str:
159
+ specs = [
160
+ GraphSpec(
161
+ key,
162
+ module=value.split(":")[0],
163
+ variable=value.split(":")[1],
164
+ config=config_per_graph.get(key),
165
+ )
166
+ if "/" not in value
167
+ else GraphSpec(
168
+ key,
169
+ path=value.split(":")[0],
170
+ variable=value.split(":")[1],
171
+ config=config_per_graph.get(key),
172
+ )
173
+ for key, value in json.loads(paths_str).items()
174
+ ]
175
+ else:
176
+ specs = [
177
+ GraphSpec(
178
+ id=graph_path.split("/")[-1].replace(".py", ""),
179
+ path=graph_path,
180
+ config=config_per_graph.get(
181
+ graph_path.split("/")[-1].replace(".py", "")
182
+ ),
183
+ )
184
+ for graph_path in glob.glob("/graphs/*.py")
185
+ ]
186
+
187
+ js_specs = list(filter(is_js_spec, specs))
188
+ py_specs = list(filterfalse(is_js_spec, specs))
189
+
190
+ if js_specs:
191
+ import sys
192
+
193
+ from langgraph_api.js.remote import (
194
+ RemotePregel,
195
+ run_js_process,
196
+ run_remote_checkpointer,
197
+ run_remote_store,
198
+ wait_until_js_ready,
199
+ )
200
+
201
+ js_bg_tasks.add(
202
+ asyncio.create_task(
203
+ run_remote_checkpointer(),
204
+ name="remote-checkpointer",
205
+ )
206
+ )
207
+ js_bg_tasks.add(
208
+ asyncio.create_task(
209
+ run_remote_store(),
210
+ name="remote-store",
211
+ )
212
+ )
213
+ js_bg_tasks.add(
214
+ asyncio.create_task(
215
+ run_js_process(paths_str, watch="--reload" in sys.argv[1:]),
216
+ name="remote-graphs",
217
+ )
218
+ )
219
+ for task in js_bg_tasks:
220
+ task.add_done_callback(_handle_exception)
221
+
222
+ await wait_until_js_ready()
223
+
224
+ for spec in js_specs:
225
+ graph = await RemotePregel.load(graph_id=spec.id)
226
+ if register:
227
+ await register_graph(spec.id, graph, spec.config)
228
+
229
+ for spec in py_specs:
230
+ graph = await run_in_executor(None, _graph_from_spec, spec)
231
+ if register:
232
+ await register_graph(spec.id, graph, spec.config)
233
+
234
+
235
+ def _handle_exception(task: asyncio.Task) -> None:
236
+ try:
237
+ task.result()
238
+ except asyncio.CancelledError:
239
+ pass
240
+ finally:
241
+ # if the task died either with exception or not, we should exit
242
+ sys.exit(1)
243
+
244
+
245
+ async def stop_remote_graphs() -> None:
246
+ logger.info("Cancelling remote graphs")
247
+ for task in js_bg_tasks:
248
+ task.cancel()
249
+
250
+
251
+ def verify_graphs() -> None:
252
+ asyncio.run(collect_graphs_from_env())
253
+
254
+
255
+ def _graph_from_spec(spec: GraphSpec) -> GraphValue:
256
+ """Return a graph from a spec."""
257
+ # import the graph module
258
+ if spec.module:
259
+ module = importlib.import_module(spec.module)
260
+ elif spec.path:
261
+ try:
262
+ modname = "".join(choice("abcdefghijklmnopqrstuvwxyz") for _ in range(24))
263
+ modspec = importlib.util.spec_from_file_location(modname, spec.path)
264
+ if modspec is None:
265
+ raise ValueError(f"Could not find python file for graph: {spec}")
266
+ module = importlib.util.module_from_spec(modspec)
267
+ sys.modules[modname] = module
268
+ modspec.loader.exec_module(module)
269
+ except ImportError as e:
270
+ e.add_note(f"Could not import python module for graph: {spec}")
271
+ raise e
272
+ else:
273
+ raise ValueError("Graph specification must have a path or module")
274
+
275
+ if spec.variable:
276
+ graph: GraphValue = getattr(module, spec.variable)
277
+ if callable(graph):
278
+ sig = inspect.signature(graph)
279
+ if not sig.parameters:
280
+ if not isinstance(graph(), (Graph, Pregel)): # noqa: UP038
281
+ raise ValueError(
282
+ f"Graph factory function '{spec.variable}' in module '{spec.path}' must return a Graph or Pregel"
283
+ )
284
+ elif len(sig.parameters) != 1:
285
+ raise ValueError(
286
+ f"Graph factory function '{spec.variable}' in module '{spec.path}' must take exactly one argument, a RunnableConfig"
287
+ )
288
+ elif isinstance(graph, Graph):
289
+ graph = graph.compile()
290
+ elif isinstance(graph, Pregel):
291
+ pass
292
+ else:
293
+ raise ValueError(
294
+ f"Variable '{spec.variable}' in module '{spec.path}' is not a Graph or Graph factory function"
295
+ )
296
+ else:
297
+ # find the graph in the module
298
+ # - first look for a compiled graph (Pregel)
299
+ # - if not found, look for a Graph and compile it
300
+ for _, member in inspect.getmembers(module):
301
+ if isinstance(member, Pregel):
302
+ graph = member
303
+ break
304
+ else:
305
+ for _, member in inspect.getmembers(module):
306
+ if isinstance(member, Graph):
307
+ graph = member.compile()
308
+ break
309
+ else:
310
+ raise ValueError(
311
+ f"Could not find a Graph in module at path: {spec.path}"
312
+ )
313
+
314
+ return graph
langgraph_api/http.py ADDED
@@ -0,0 +1,168 @@
1
+ import asyncio
2
+
3
+ import httpx
4
+ from tenacity import (
5
+ retry,
6
+ retry_if_exception,
7
+ stop_after_attempt,
8
+ wait_exponential_jitter,
9
+ )
10
+
11
+ from langgraph_api.serde import json_dumpb
12
+
13
+
14
+ class JsonHttpClient:
15
+ """HTTPX client for JSON requests."""
16
+
17
+ def __init__(self, client: httpx.AsyncClient) -> None:
18
+ """Initialize the auth client."""
19
+ self.client = client
20
+
21
+ async def post(
22
+ self,
23
+ path: str,
24
+ /,
25
+ *,
26
+ params: dict[str, str] | None = None,
27
+ headers: dict[str, str] | None = None,
28
+ json: dict | None = None,
29
+ content: bytes | None = None,
30
+ connect_timeout: float | None = None,
31
+ request_timeout: float | None = None,
32
+ total_timeout: float | None = None,
33
+ raise_error: bool = True,
34
+ ) -> None:
35
+ request_headers = {"Content-Type": "application/json"}
36
+ if headers:
37
+ request_headers.update(headers)
38
+
39
+ if json and content:
40
+ raise ValueError("Cannot specify both 'json' and 'content'")
41
+
42
+ try:
43
+ res = await asyncio.wait_for(
44
+ self.client.post(
45
+ path,
46
+ content=json_dumpb(json) if json else content,
47
+ headers=request_headers,
48
+ timeout=httpx.Timeout(
49
+ request_timeout or connect_timeout,
50
+ connect=connect_timeout,
51
+ read=request_timeout,
52
+ )
53
+ if connect_timeout or request_timeout
54
+ else None,
55
+ params=params,
56
+ ),
57
+ # httpx timeout controls are additive for each operation
58
+ # (connect, read, write), so we need an asyncio timeout instead
59
+ total_timeout,
60
+ )
61
+ # Raise for retriable errors
62
+ if raise_error:
63
+ res.raise_for_status()
64
+ finally:
65
+ # We don't need the response body, so we close the response
66
+ try:
67
+ await res.aclose()
68
+ except UnboundLocalError:
69
+ pass
70
+
71
+
72
+ _http_client: JsonHttpClient
73
+
74
+
75
+ async def start_http_client() -> None:
76
+ global _http_client
77
+ _http_client = JsonHttpClient(
78
+ client=httpx.AsyncClient(
79
+ transport=httpx.AsyncHTTPTransport(
80
+ retries=2, # this applies only to ConnectError, ConnectTimeout
81
+ limits=httpx.Limits(
82
+ max_keepalive_connections=10, keepalive_expiry=60.0
83
+ ),
84
+ ),
85
+ ),
86
+ )
87
+
88
+
89
+ async def stop_http_client() -> None:
90
+ global _http_client
91
+ await _http_client.client.aclose()
92
+ del _http_client
93
+
94
+
95
+ def get_http_client() -> JsonHttpClient:
96
+ return _http_client
97
+
98
+
99
+ def is_retriable_error(exception: Exception) -> bool:
100
+ if isinstance(exception, httpx.HTTPError):
101
+ return (
102
+ getattr(exception, "response", None) is not None
103
+ and exception.response.status_code >= 500
104
+ )
105
+ return False
106
+
107
+
108
+ retry_http = retry(
109
+ reraise=True,
110
+ retry=retry_if_exception(is_retriable_error),
111
+ wait=wait_exponential_jitter(),
112
+ stop=stop_after_attempt(3),
113
+ )
114
+
115
+
116
+ @retry_http
117
+ async def http_request(
118
+ method: str,
119
+ path: str,
120
+ /,
121
+ *,
122
+ params: dict[str, str] | None = None,
123
+ headers: dict[str, str] | None = None,
124
+ body: bytes | str | None = None,
125
+ json: dict | None = None,
126
+ connect_timeout: float | None = 5,
127
+ request_timeout: float | None = 30,
128
+ raise_error: bool = True,
129
+ ) -> httpx.Response:
130
+ """Make an HTTP request with retries.
131
+
132
+ Args:
133
+ method: HTTP method
134
+ path: URL path
135
+ params: Query parameters
136
+ headers: Request headers
137
+ body: Raw request body (bytes or str)
138
+ json: JSON body (mutually exclusive with body)
139
+ connect_timeout: Connection timeout in seconds
140
+ request_timeout: Request timeout in seconds
141
+ raise_error: Whether to raise for HTTP errors
142
+
143
+ Returns:
144
+ httpx.Response object
145
+ """
146
+ if not path.startswith(("http://", "https://", "/")):
147
+ raise ValueError("path must start with / or http")
148
+
149
+ client = get_http_client()
150
+
151
+ content = None
152
+ if body is not None:
153
+ content = body
154
+ elif json is not None:
155
+ content = json_dumpb(json)
156
+
157
+ if method.upper() == "POST":
158
+ return await client.post(
159
+ path,
160
+ params=params,
161
+ headers=headers,
162
+ content=content,
163
+ connect_timeout=connect_timeout,
164
+ request_timeout=request_timeout,
165
+ raise_error=raise_error,
166
+ )
167
+ else:
168
+ raise ValueError(f"Unsupported HTTP method: {method}")
@@ -0,0 +1,89 @@
1
+ import asyncio
2
+ import logging
3
+
4
+ import structlog
5
+ from starlette.requests import ClientDisconnect
6
+ from starlette.types import Message, Receive, Scope, Send
7
+
8
+ from langgraph_api.asyncio import create_task
9
+ from langgraph_api.logging import LOG_JSON
10
+
11
+ asgi = structlog.stdlib.get_logger("asgi")
12
+
13
+ PATHS_IGNORE = {"/ok", "/metrics"}
14
+
15
+
16
+ class AccessLoggerMiddleware:
17
+ def __init__(
18
+ self,
19
+ app,
20
+ logger: structlog.stdlib.BoundLogger,
21
+ ) -> None:
22
+ self.app = app
23
+ self.logger = logger
24
+
25
+ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
26
+ if scope["type"] != "http" or (LOG_JSON and scope.get("path") in PATHS_IGNORE):
27
+ return await self.app(scope, receive, send) # pragma: no cover
28
+
29
+ loop = asyncio.get_event_loop()
30
+ info = {"response": {}}
31
+
32
+ if self.logger.isEnabledFor(logging.DEBUG):
33
+
34
+ async def inner_receive() -> Message:
35
+ message = await receive()
36
+ asgi.debug(f"ASGI receive {message['type']}", **message)
37
+ return message
38
+
39
+ async def inner_send(message: Message) -> None:
40
+ if message["type"] == "http.response.start":
41
+ info["response"] = message
42
+ await send(message)
43
+ asgi.debug(f"ASGI send {message['type']}", **message)
44
+ else:
45
+ inner_receive = receive
46
+
47
+ async def inner_send(message) -> None:
48
+ if message["type"] == "http.response.start":
49
+ info["response"] = message
50
+ await send(message)
51
+
52
+ try:
53
+ info["start_time"] = loop.time()
54
+ await self.app(scope, inner_receive, inner_send)
55
+ except ClientDisconnect as exc:
56
+ info["response"]["status"] = 499
57
+ raise exc
58
+ except Exception as exc:
59
+ info["response"]["status"] = 500
60
+ raise exc
61
+ finally:
62
+ info["end_time"] = loop.time()
63
+ latency = int((info["end_time"] - info["start_time"]) * 1_000)
64
+ create_task(
65
+ self.logger.ainfo(
66
+ f"{scope.get('method')} {scope.get('path')} {info['response'].get('status')} {latency}ms",
67
+ method=scope.get("method"),
68
+ path=scope.get("path"),
69
+ status=info["response"].get("status"),
70
+ latency_ms=latency,
71
+ route=scope.get("route"),
72
+ path_params=scope.get("path_params"),
73
+ query_string=scope.get("query_string").decode(),
74
+ proto=scope.get("http_version"),
75
+ req_header=_headers_to_dict(scope.get("headers")),
76
+ res_header=_headers_to_dict(info["response"].get("headers")),
77
+ )
78
+ )
79
+
80
+
81
+ HEADERS_IGNORE = {"authorization", "cookie", "set-cookie", "x-api-key"}
82
+
83
+
84
+ def _headers_to_dict(headers: list[tuple[bytes, bytes]] | None) -> dict[str, str]:
85
+ if headers is None:
86
+ return {}
87
+ return {
88
+ k.decode(): v.decode() for k, v in headers if k.lower() not in HEADERS_IGNORE
89
+ }
@@ -0,0 +1,2 @@
1
+ node_modules
2
+ client.schemas.json
@@ -0,0 +1,49 @@
1
+ import { z } from "zod";
2
+ import * as fs from "node:fs/promises";
3
+ import * as path from "node:path";
4
+ import {
5
+ GraphSchema,
6
+ resolveGraph,
7
+ runGraphSchemaWorker,
8
+ } from "./src/graph.mts";
9
+
10
+ const __dirname = new URL(".", import.meta.url).pathname;
11
+
12
+ async function main() {
13
+ const specs = z
14
+ .record(z.string())
15
+ .parse(JSON.parse(process.env.LANGSERVE_GRAPHS));
16
+
17
+ const GRAPH_SCHEMAS: Record<string, Record<string, GraphSchema>> = {};
18
+
19
+ try {
20
+ await Promise.all(
21
+ Object.entries(specs).map(async ([graphId, rawSpec]) => {
22
+ console.info(`[${graphId}]: Checking for source file existence`);
23
+ const { resolved, ...spec } = await resolveGraph(rawSpec, {
24
+ onlyFilePresence: true,
25
+ });
26
+
27
+ try {
28
+ console.info(`[${graphId}]: Extracting schema`);
29
+ GRAPH_SCHEMAS[graphId] = await runGraphSchemaWorker(spec);
30
+ } catch (error) {
31
+ console.error(`[${graphId}]: Error extracting schema: ${error}`);
32
+ }
33
+ })
34
+ );
35
+
36
+ await fs.writeFile(
37
+ path.resolve(__dirname, "client.schemas.json"),
38
+ JSON.stringify(GRAPH_SCHEMAS),
39
+ { encoding: "utf-8" }
40
+ );
41
+ } catch (error) {
42
+ console.error(`Error resolving graphs: ${error}`);
43
+ process.exit(1);
44
+ }
45
+
46
+ console.info("All graphs resolved");
47
+ }
48
+
49
+ main();