langgraph-api 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

Files changed (86) hide show
  1. LICENSE +93 -0
  2. langgraph_api/__init__.py +0 -0
  3. langgraph_api/api/__init__.py +63 -0
  4. langgraph_api/api/assistants.py +326 -0
  5. langgraph_api/api/meta.py +71 -0
  6. langgraph_api/api/openapi.py +32 -0
  7. langgraph_api/api/runs.py +463 -0
  8. langgraph_api/api/store.py +116 -0
  9. langgraph_api/api/threads.py +263 -0
  10. langgraph_api/asyncio.py +201 -0
  11. langgraph_api/auth/__init__.py +0 -0
  12. langgraph_api/auth/langsmith/__init__.py +0 -0
  13. langgraph_api/auth/langsmith/backend.py +67 -0
  14. langgraph_api/auth/langsmith/client.py +145 -0
  15. langgraph_api/auth/middleware.py +41 -0
  16. langgraph_api/auth/noop.py +14 -0
  17. langgraph_api/cli.py +209 -0
  18. langgraph_api/config.py +70 -0
  19. langgraph_api/cron_scheduler.py +60 -0
  20. langgraph_api/errors.py +52 -0
  21. langgraph_api/graph.py +314 -0
  22. langgraph_api/http.py +168 -0
  23. langgraph_api/http_logger.py +89 -0
  24. langgraph_api/js/.gitignore +2 -0
  25. langgraph_api/js/build.mts +49 -0
  26. langgraph_api/js/client.mts +849 -0
  27. langgraph_api/js/global.d.ts +6 -0
  28. langgraph_api/js/package.json +33 -0
  29. langgraph_api/js/remote.py +673 -0
  30. langgraph_api/js/server_sent_events.py +126 -0
  31. langgraph_api/js/src/graph.mts +88 -0
  32. langgraph_api/js/src/hooks.mjs +12 -0
  33. langgraph_api/js/src/parser/parser.mts +443 -0
  34. langgraph_api/js/src/parser/parser.worker.mjs +12 -0
  35. langgraph_api/js/src/schema/types.mts +2136 -0
  36. langgraph_api/js/src/schema/types.template.mts +74 -0
  37. langgraph_api/js/src/utils/importMap.mts +85 -0
  38. langgraph_api/js/src/utils/pythonSchemas.mts +28 -0
  39. langgraph_api/js/src/utils/serde.mts +21 -0
  40. langgraph_api/js/tests/api.test.mts +1566 -0
  41. langgraph_api/js/tests/compose-postgres.yml +56 -0
  42. langgraph_api/js/tests/graphs/.gitignore +1 -0
  43. langgraph_api/js/tests/graphs/agent.mts +127 -0
  44. langgraph_api/js/tests/graphs/error.mts +17 -0
  45. langgraph_api/js/tests/graphs/langgraph.json +8 -0
  46. langgraph_api/js/tests/graphs/nested.mts +44 -0
  47. langgraph_api/js/tests/graphs/package.json +7 -0
  48. langgraph_api/js/tests/graphs/weather.mts +57 -0
  49. langgraph_api/js/tests/graphs/yarn.lock +159 -0
  50. langgraph_api/js/tests/parser.test.mts +870 -0
  51. langgraph_api/js/tests/utils.mts +17 -0
  52. langgraph_api/js/yarn.lock +1340 -0
  53. langgraph_api/lifespan.py +41 -0
  54. langgraph_api/logging.py +121 -0
  55. langgraph_api/metadata.py +101 -0
  56. langgraph_api/models/__init__.py +0 -0
  57. langgraph_api/models/run.py +229 -0
  58. langgraph_api/patch.py +42 -0
  59. langgraph_api/queue.py +245 -0
  60. langgraph_api/route.py +118 -0
  61. langgraph_api/schema.py +190 -0
  62. langgraph_api/serde.py +124 -0
  63. langgraph_api/server.py +48 -0
  64. langgraph_api/sse.py +118 -0
  65. langgraph_api/state.py +67 -0
  66. langgraph_api/stream.py +289 -0
  67. langgraph_api/utils.py +60 -0
  68. langgraph_api/validation.py +141 -0
  69. langgraph_api-0.0.1.dist-info/LICENSE +93 -0
  70. langgraph_api-0.0.1.dist-info/METADATA +26 -0
  71. langgraph_api-0.0.1.dist-info/RECORD +86 -0
  72. langgraph_api-0.0.1.dist-info/WHEEL +4 -0
  73. langgraph_api-0.0.1.dist-info/entry_points.txt +3 -0
  74. langgraph_license/__init__.py +0 -0
  75. langgraph_license/middleware.py +21 -0
  76. langgraph_license/validation.py +11 -0
  77. langgraph_storage/__init__.py +0 -0
  78. langgraph_storage/checkpoint.py +94 -0
  79. langgraph_storage/database.py +190 -0
  80. langgraph_storage/ops.py +1523 -0
  81. langgraph_storage/queue.py +108 -0
  82. langgraph_storage/retry.py +27 -0
  83. langgraph_storage/store.py +28 -0
  84. langgraph_storage/ttl_dict.py +54 -0
  85. logging.json +22 -0
  86. openapi.json +4304 -0
@@ -0,0 +1,673 @@
1
+ import asyncio
2
+ import os
3
+ import shutil
4
+ from collections.abc import AsyncIterator
5
+ from typing import Any, Literal
6
+
7
+ import httpx
8
+ import orjson
9
+ import structlog
10
+ import uvicorn
11
+ from langchain_core.runnables import Runnable
12
+ from langchain_core.runnables.config import RunnableConfig
13
+ from langchain_core.runnables.graph import Edge, Node
14
+ from langchain_core.runnables.graph import Graph as DrawableGraph
15
+ from langchain_core.runnables.schema import (
16
+ CustomStreamEvent,
17
+ StandardStreamEvent,
18
+ StreamEvent,
19
+ )
20
+ from langgraph.checkpoint.serde.base import SerializerProtocol
21
+ from langgraph.pregel.types import PregelTask, StateSnapshot
22
+ from langgraph.store.base import GetOp, Item, ListNamespacesOp, PutOp, SearchOp
23
+ from langgraph.types import Interrupt
24
+ from pydantic import BaseModel
25
+ from starlette.applications import Starlette
26
+ from starlette.requests import Request
27
+ from starlette.routing import Route
28
+
29
+ from langgraph_api.js.server_sent_events import aconnect_sse
30
+ from langgraph_api.route import ApiResponse
31
+ from langgraph_api.serde import json_dumpb
32
+ from langgraph_api.utils import AsyncConnectionProto
33
+
34
+ logger = structlog.stdlib.get_logger(__name__)
35
+
36
+ GRAPH_SOCKET = "./graph.sock"
37
+ CHECKPOINTER_SOCKET = "./checkpointer.sock"
38
+ STORE_SOCKET = "./store.sock"
39
+
40
+
41
+ class NoopModel(BaseModel):
42
+ pass
43
+
44
+
45
+ class RemoteException(Exception):
46
+ error: str
47
+
48
+ def __init__(self, error: str, *args: object) -> None:
49
+ super().__init__(*args)
50
+ self.error = error
51
+
52
+ # Used to nudge the serde to encode like BaseException
53
+ # @see /api/langgraph_api/shared/serde.py:default
54
+ def dict(self):
55
+ return {"error": self.error, "message": str(self)}
56
+
57
+
58
+ # Shim for the Pregel API. Will connect to GRAPH_SOCKET
59
+ # UNIX socket to communicate with the JS process.
60
+ class RemotePregel(Runnable):
61
+ # TODO: implement name overriding
62
+ name: str = "LangGraph"
63
+
64
+ # TODO: implement graph_id overriding
65
+ graph_id: str
66
+
67
+ _async_client: httpx.AsyncClient
68
+
69
+ @staticmethod
70
+ async def load(graph_id: str):
71
+ model = RemotePregel()
72
+
73
+ model.graph_id = graph_id
74
+ model._async_client = httpx.AsyncClient(
75
+ base_url="http://graph",
76
+ timeout=httpx.Timeout(None),
77
+ limits=httpx.Limits(),
78
+ transport=httpx.AsyncHTTPTransport(uds=GRAPH_SOCKET),
79
+ )
80
+
81
+ return model
82
+
83
+ async def astream_events(
84
+ self,
85
+ input: Any,
86
+ config: RunnableConfig | None = None,
87
+ *,
88
+ version: Literal["v1", "v2"],
89
+ **kwargs: Any,
90
+ ) -> AsyncIterator[StreamEvent]:
91
+ if version != "v2":
92
+ raise ValueError("Only v2 of astream_events is supported")
93
+
94
+ async with aconnect_sse(
95
+ self._async_client,
96
+ "POST",
97
+ f"/{self.graph_id}/streamEvents",
98
+ headers={"Content-Type": "application/json"},
99
+ data=orjson.dumps({"input": input, "config": config, **kwargs}),
100
+ ) as event_source:
101
+ async for sse in event_source.aiter_sse():
102
+ event = orjson.loads(sse["data"])
103
+ if sse["event"] == "error":
104
+ raise RemoteException(event["error"], event["message"])
105
+ elif event["event"] == "on_custom_event":
106
+ yield CustomStreamEvent(**event)
107
+ else:
108
+ yield StandardStreamEvent(**event)
109
+
110
+ async def fetch_state_schema(self):
111
+ schema = await self._async_client.post(f"/{self.graph_id}/getSchema")
112
+ return orjson.loads(schema.content)
113
+
114
+ async def fetch_graph(
115
+ self,
116
+ config: RunnableConfig | None = None,
117
+ *,
118
+ xray: int | bool = False,
119
+ ) -> DrawableGraph:
120
+ response = (
121
+ await self._async_client.post(
122
+ f"/{self.graph_id}/getGraph",
123
+ headers={"Content-Type": "application/json"},
124
+ data=orjson.dumps({"config": config, "xray": xray}),
125
+ )
126
+ ).json()
127
+
128
+ nodes: list[Any] = response.pop("nodes")
129
+ edges: list[Any] = response.pop("edges")
130
+
131
+ return DrawableGraph(
132
+ {
133
+ data["id"]: Node(
134
+ data["id"], data["id"], NoopModel(), data.get("metadata")
135
+ )
136
+ for data in nodes
137
+ },
138
+ {
139
+ Edge(
140
+ data["source"],
141
+ data["target"],
142
+ data.get("data"),
143
+ data.get("conditional", False),
144
+ )
145
+ for data in edges
146
+ },
147
+ )
148
+
149
+ async def fetch_subgraphs(
150
+ self, *, namespace: str | None = None, recurse: bool = False
151
+ ) -> dict[str, dict]:
152
+ return (
153
+ await self._async_client.post(
154
+ f"/{self.graph_id}/getSubgraphs",
155
+ headers={"Content-Type": "application/json"},
156
+ data=orjson.dumps({"namespace": namespace, "recurse": recurse}),
157
+ )
158
+ ).json()
159
+
160
+ def _convert_state_snapshot(self, item: dict) -> StateSnapshot:
161
+ def _convert_tasks(tasks: list[dict]) -> tuple[PregelTask, ...]:
162
+ result: list[PregelTask] = []
163
+ for task in tasks:
164
+ state = task.get("state")
165
+
166
+ if state and isinstance(state, dict) and "config" in state:
167
+ state = self._convert_state_snapshot(state)
168
+
169
+ result.append(
170
+ PregelTask(
171
+ task["id"],
172
+ task["name"],
173
+ tuple(task["path"]) if task.get("path") else tuple(),
174
+ # TODO: figure out how to properly deserialise errors
175
+ task.get("error"),
176
+ tuple(
177
+ Interrupt(
178
+ value=interrupt["value"],
179
+ when=interrupt["when"],
180
+ )
181
+ for interrupt in task.get("interrupts")
182
+ )
183
+ if task.get("interrupts")
184
+ else [],
185
+ state,
186
+ )
187
+ )
188
+ return tuple(result)
189
+
190
+ return StateSnapshot(
191
+ item.get("values"),
192
+ item.get("next"),
193
+ item.get("config"),
194
+ item.get("metadata"),
195
+ item.get("createdAt"),
196
+ item.get("parentConfig"),
197
+ _convert_tasks(item.get("tasks", [])),
198
+ )
199
+
200
+ async def aget_state(
201
+ self, config: RunnableConfig, *, subgraphs: bool = False
202
+ ) -> StateSnapshot:
203
+ response = await self._async_client.post(
204
+ f"/{self.graph_id}/getState",
205
+ headers={"Content-Type": "application/json"},
206
+ data=orjson.dumps({"config": config, "subgraphs": subgraphs}),
207
+ )
208
+ return self._convert_state_snapshot(response.json())
209
+
210
+ async def aupdate_state(
211
+ self,
212
+ config: RunnableConfig,
213
+ values: dict[str, Any] | Any,
214
+ as_node: str | None = None,
215
+ ) -> RunnableConfig:
216
+ response = await self._async_client.post(
217
+ f"/{self.graph_id}/updateState",
218
+ headers={"Content-Type": "application/json"},
219
+ data=orjson.dumps({"config": config, "values": values, "as_node": as_node}),
220
+ )
221
+ return RunnableConfig(**response.json())
222
+
223
+ async def aget_state_history(
224
+ self,
225
+ config: RunnableConfig,
226
+ *,
227
+ filter: dict[str, Any] | None = None,
228
+ before: RunnableConfig | None = None,
229
+ limit: int | None = None,
230
+ ) -> AsyncIterator[StateSnapshot]:
231
+ async with aconnect_sse(
232
+ self._async_client,
233
+ "POST",
234
+ f"/{self.graph_id}/getStateHistory",
235
+ headers={"Content-Type": "application/json"},
236
+ data=orjson.dumps(
237
+ {"config": config, "limit": limit, "filter": filter, "before": before}
238
+ ),
239
+ ) as event_source:
240
+ async for sse in event_source.aiter_sse():
241
+ yield self._convert_state_snapshot(orjson.loads(sse["data"]))
242
+
243
+ def get_graph(
244
+ self,
245
+ config: RunnableConfig | None = None,
246
+ *,
247
+ xray: int | bool = False,
248
+ ) -> dict[str, Any]:
249
+ raise Exception("Not implemented")
250
+
251
+ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
252
+ raise Exception("Not implemented")
253
+
254
+ def get_output_schema(
255
+ self, config: RunnableConfig | None = None
256
+ ) -> type[BaseModel]:
257
+ raise Exception("Not implemented")
258
+
259
+ def config_schema(self) -> type[BaseModel]:
260
+ raise Exception("Not implemented")
261
+
262
+ async def invoke(self, input: Any, config: RunnableConfig | None = None):
263
+ raise Exception("Not implemented")
264
+
265
+
266
+ async def run_js_process(paths_str: str, watch: bool = False):
267
+ # check if tsx is available
268
+ tsx_path = shutil.which("tsx")
269
+ if tsx_path is None:
270
+ raise FileNotFoundError("tsx not found in PATH")
271
+ attempt = 0
272
+ while True:
273
+ client_file = os.path.join(os.path.dirname(__file__), "client.mts")
274
+ args = ("tsx", client_file)
275
+ if watch:
276
+ args = ("tsx", "watch", client_file, "--skip-schema-cache")
277
+ try:
278
+ process = await asyncio.create_subprocess_exec(
279
+ *args,
280
+ env={
281
+ "LANGSERVE_GRAPHS": paths_str,
282
+ "LANGCHAIN_CALLBACKS_BACKGROUND": "true",
283
+ "CHOKIDAR_USEPOLLING": "true",
284
+ **os.environ,
285
+ },
286
+ )
287
+ code = await process.wait()
288
+ raise Exception(f"JS process exited with code {code}")
289
+ except asyncio.CancelledError:
290
+ logger.info("Terminating JS graphs process")
291
+ try:
292
+ process.terminate()
293
+ await process.wait()
294
+ except (UnboundLocalError, ProcessLookupError):
295
+ pass
296
+ raise
297
+ except Exception:
298
+ if attempt >= 3:
299
+ raise
300
+ else:
301
+ logger.warning(f"Retrying JS process {3 - attempt} more times...")
302
+ attempt += 1
303
+
304
+
305
+ def _get_passthrough_checkpointer(conn: AsyncConnectionProto):
306
+ from langgraph_storage.checkpoint import Checkpointer
307
+
308
+ class PassthroughSerialiser(SerializerProtocol):
309
+ def dumps(self, obj: Any) -> bytes:
310
+ return json_dumpb(obj)
311
+
312
+ def dumps_typed(self, obj: Any) -> tuple[str, bytes]:
313
+ return "json", json_dumpb(obj)
314
+
315
+ def loads(self, data: bytes) -> Any:
316
+ return orjson.loads(data)
317
+
318
+ def loads_typed(self, data: tuple[str, bytes]) -> Any:
319
+ type, payload = data
320
+ if type != "json":
321
+ raise ValueError(f"Unsupported type {type}")
322
+ return orjson.loads(payload)
323
+
324
+ checkpointer = Checkpointer(conn)
325
+
326
+ # This checkpointer does not attempt to revive LC-objects.
327
+ # Instead, it will pass through the JSON values as-is.
328
+ checkpointer.serde = PassthroughSerialiser()
329
+
330
+ return checkpointer
331
+
332
+
333
+ # Setup a HTTP server on top of CHECKPOINTER_SOCKET unix socket
334
+ # used by `client.mts` to communicate with the Python checkpointer
335
+ async def run_remote_checkpointer():
336
+ from langgraph_storage.database import connect
337
+
338
+ # Search checkpoints
339
+ async def list(request: Request):
340
+ payload = orjson.loads(await request.body())
341
+ result = []
342
+ async with connect() as conn:
343
+ checkpointer = _get_passthrough_checkpointer(conn)
344
+ async for item in checkpointer.alist(
345
+ config=payload.get("config"),
346
+ limit=payload.get("limit"),
347
+ before=payload.get("before"),
348
+ filter=payload.get("filter"),
349
+ ):
350
+ result.append(item)
351
+
352
+ return ApiResponse(result)
353
+
354
+ # Put the new checkpoint metadata
355
+ async def put(request: Request):
356
+ payload = orjson.loads(await request.body())
357
+ async with connect() as conn:
358
+ checkpointer = _get_passthrough_checkpointer(conn)
359
+ result = await checkpointer.aput(
360
+ payload["config"],
361
+ payload["checkpoint"],
362
+ payload["metadata"],
363
+ payload.get("new_versions", {}),
364
+ )
365
+ return ApiResponse(result)
366
+
367
+ # Get actual checkpoint values (reads)
368
+ async def get_tuple(request: Request):
369
+ payload = orjson.loads(await request.body())
370
+
371
+ async with connect() as conn:
372
+ checkpointer = _get_passthrough_checkpointer(conn)
373
+ result = await checkpointer.aget_tuple(config=payload["config"])
374
+ return ApiResponse(result)
375
+
376
+ # Put actual checkpoint values (writes)
377
+ async def put_writes(request: Request):
378
+ payload = orjson.loads(await request.body())
379
+
380
+ async with connect() as conn:
381
+ checkpointer = _get_passthrough_checkpointer(conn)
382
+ result = await checkpointer.aput_writes(
383
+ payload["config"],
384
+ payload["writes"],
385
+ payload["taskId"],
386
+ )
387
+
388
+ return ApiResponse(result)
389
+
390
+ remote = Starlette(
391
+ routes=[
392
+ Route("/get_tuple", get_tuple, methods=["POST"]),
393
+ Route("/list", list, methods=["POST"]),
394
+ Route("/put", put, methods=["POST"]),
395
+ Route("/put_writes", put_writes, methods=["POST"]),
396
+ Route("/ok", lambda _: ApiResponse({"ok": True}), methods=["GET"]),
397
+ ]
398
+ )
399
+
400
+ server = uvicorn.Server(
401
+ uvicorn.Config(
402
+ remote,
403
+ uds=CHECKPOINTER_SOCKET,
404
+ # We need to _explicitly_ set these values in order
405
+ # to avoid reinitialising the logger, which removes
406
+ # the structlog logger setup before.
407
+ # See: https://github.com/encode/uvicorn/blob/8f4c8a7f34914c16650ebd026127b96560425fde/uvicorn/config.py#L357-L393
408
+ log_config=None,
409
+ log_level=None,
410
+ access_log=True,
411
+ )
412
+ )
413
+ await server.serve()
414
+
415
+
416
+ def _get_passthrough_store(conn: AsyncConnectionProto):
417
+ from langgraph_storage.store import Store
418
+
419
+ store = Store(conn)
420
+
421
+ return store
422
+
423
+
424
+ async def run_remote_store():
425
+ from langgraph_storage.database import connect
426
+
427
+ async def abatch(request: Request):
428
+ payload = orjson.loads(await request.body())
429
+ operations = payload.get("operations", [])
430
+
431
+ if not operations:
432
+ return ApiResponse({"error": "No operations provided"}, status_code=400)
433
+
434
+ # Convert raw operations to proper objects
435
+ processed_operations = []
436
+ for op in operations:
437
+ if "value" in op:
438
+ processed_operations.append(
439
+ PutOp(
440
+ namespace=tuple(op["namespace"]),
441
+ key=op["key"],
442
+ value=op["value"],
443
+ )
444
+ )
445
+ elif "namespace_prefix" in op:
446
+ processed_operations.append(
447
+ SearchOp(
448
+ namespace_prefix=tuple(op["namespace_prefix"]),
449
+ filter=op.get("filter"),
450
+ limit=op.get("limit", 10),
451
+ offset=op.get("offset", 0),
452
+ )
453
+ )
454
+
455
+ elif "namespace" in op and "key" in op:
456
+ processed_operations.append(
457
+ GetOp(namespace=tuple(op["namespace"]), key=op["key"])
458
+ )
459
+ elif "match_conditions" in op:
460
+ processed_operations.append(
461
+ ListNamespacesOp(
462
+ match_conditions=tuple(op["match_conditions"]),
463
+ max_depth=op.get("max_depth"),
464
+ limit=op.get("limit", 100),
465
+ offset=op.get("offset", 0),
466
+ )
467
+ )
468
+ else:
469
+ return ApiResponse(
470
+ {"error": f"Unknown operation type: {op}"}, status_code=400
471
+ )
472
+
473
+ async with connect() as conn:
474
+ store = _get_passthrough_store(conn)
475
+ results = await store.abatch(processed_operations)
476
+
477
+ # Handle potentially undefined or non-dict results
478
+ processed_results = []
479
+ # Result is of type: Union[Item, list[Item], list[tuple[str, ...]], None]
480
+ for result in results:
481
+ if isinstance(result, Item):
482
+ processed_results.append(result.dict())
483
+ elif isinstance(result, dict):
484
+ processed_results.append(result)
485
+ elif isinstance(result, list):
486
+ coerced = []
487
+ for res in result:
488
+ if isinstance(res, Item):
489
+ coerced.append(res.dict())
490
+ elif isinstance(res, tuple):
491
+ coerced.append(list(res))
492
+ elif res is None:
493
+ coerced.append(res)
494
+ else:
495
+ coerced.append(str(res))
496
+ processed_results.append(coerced)
497
+ elif result is None:
498
+ processed_results.append(None)
499
+ else:
500
+ processed_results.append(str(result))
501
+ return ApiResponse(processed_results)
502
+
503
+ # List all stores
504
+ async def aget(request: Request):
505
+ namespaces_str = request.query_params.get("namespaces")
506
+ key = request.query_params.get("key")
507
+
508
+ if not namespaces_str or not key:
509
+ return ApiResponse(
510
+ {"error": "Both namespaces and key are required"}, status_code=400
511
+ )
512
+
513
+ namespaces = namespaces_str.split(".")
514
+
515
+ async with connect() as conn:
516
+ store = _get_passthrough_store(conn)
517
+ result = await store.aget(namespaces, key)
518
+
519
+ return ApiResponse(result)
520
+
521
+ # Put the new store data
522
+ async def aput(request: Request):
523
+ payload = orjson.loads(await request.body())
524
+ namespace = tuple(payload["namespace"].split("."))
525
+ key = payload["key"]
526
+ value = payload["value"]
527
+
528
+ async with connect() as conn:
529
+ store = _get_passthrough_store(conn)
530
+ await store.aput(namespace, key, value)
531
+
532
+ return ApiResponse({"success": True})
533
+
534
+ # Search stores
535
+ async def asearch(request: Request):
536
+ payload = orjson.loads(await request.body())
537
+ namespace_prefix = tuple(payload["namespace_prefix"])
538
+ filter = payload.get("filter")
539
+ limit = payload.get("limit", 10)
540
+ offset = payload.get("offset", 0)
541
+
542
+ async with connect() as conn:
543
+ store = _get_passthrough_store(conn)
544
+ result = await store.asearch(
545
+ namespace_prefix, filter=filter, limit=limit, offset=offset
546
+ )
547
+
548
+ return ApiResponse([item.dict() for item in result])
549
+
550
+ # Delete store data
551
+ async def adelete(request: Request):
552
+ payload = orjson.loads(await request.body())
553
+ namespace = tuple(payload["namespace"])
554
+ key = payload["key"]
555
+
556
+ async with connect() as conn:
557
+ store = _get_passthrough_store(conn)
558
+ await store.adelete(namespace, key)
559
+
560
+ return ApiResponse({"success": True})
561
+
562
+ # List all namespaces
563
+ async def alist_namespaces(request: Request):
564
+ payload = orjson.loads(await request.body())
565
+ prefix = tuple(payload.get("prefix", [])) or None
566
+ suffix = tuple(payload.get("suffix", [])) or None
567
+ max_depth = payload.get("max_depth")
568
+ limit = payload.get("limit", 100)
569
+ offset = payload.get("offset", 0)
570
+
571
+ async with connect() as conn:
572
+ store = _get_passthrough_store(conn)
573
+ result = await store.alist_namespaces(
574
+ prefix=prefix,
575
+ suffix=suffix,
576
+ max_depth=max_depth,
577
+ limit=limit,
578
+ offset=offset,
579
+ )
580
+
581
+ return ApiResponse([list(ns) for ns in result])
582
+
583
+ remote = Starlette(
584
+ routes=[
585
+ Route("/items", aget, methods=["GET"]),
586
+ Route("/items", aput, methods=["PUT"]),
587
+ Route("/items", adelete, methods=["DELETE"]),
588
+ Route("/items/search", asearch, methods=["POST"]),
589
+ Route("/list/namespaces", alist_namespaces, methods=["POST"]),
590
+ Route("/items/batch", abatch, methods=["POST"]),
591
+ Route("/ok", lambda _: ApiResponse({"ok": True}), methods=["GET"]),
592
+ ]
593
+ )
594
+ server = uvicorn.Server(
595
+ uvicorn.Config(
596
+ remote,
597
+ uds=STORE_SOCKET,
598
+ # We need to _explicitly_ set these values in order
599
+ # to avoid reinitialising the logger, which removes
600
+ # the structlog logger setup before.
601
+ # See: https://github.com/encode/uvicorn/blob/8f4c8a7f34914c16650ebd026127b96560425fde/uvicorn/config.py#L357-L393
602
+ log_config=None,
603
+ log_level=None,
604
+ access_log=True,
605
+ )
606
+ )
607
+ await server.serve()
608
+
609
+
610
+ async def wait_until_js_ready():
611
+ async with (
612
+ httpx.AsyncClient(
613
+ base_url="http://graph",
614
+ transport=httpx.AsyncHTTPTransport(uds=GRAPH_SOCKET),
615
+ limits=httpx.Limits(),
616
+ ) as graph_client,
617
+ httpx.AsyncClient(
618
+ base_url="http://checkpointer",
619
+ transport=httpx.AsyncHTTPTransport(uds=CHECKPOINTER_SOCKET),
620
+ limits=httpx.Limits(),
621
+ ) as checkpointer_client,
622
+ httpx.AsyncClient(
623
+ base_url="http://store",
624
+ transport=httpx.AsyncHTTPTransport(uds=STORE_SOCKET),
625
+ limits=httpx.Limits(),
626
+ ) as store_client,
627
+ ):
628
+ attempt = 0
629
+ while True:
630
+ try:
631
+ res = await graph_client.get("/ok")
632
+ res.raise_for_status()
633
+ res = await checkpointer_client.get("/ok")
634
+ res.raise_for_status()
635
+ res = await store_client.get("/ok")
636
+ res.raise_for_status()
637
+ return
638
+ except httpx.HTTPError:
639
+ if attempt > 240:
640
+ raise
641
+ else:
642
+ attempt += 1
643
+ await asyncio.sleep(0.5)
644
+
645
+
646
+ async def js_healthcheck():
647
+ async with (
648
+ httpx.AsyncClient(
649
+ base_url="http://graph",
650
+ transport=httpx.AsyncHTTPTransport(uds=GRAPH_SOCKET),
651
+ limits=httpx.Limits(),
652
+ ) as graph_client,
653
+ httpx.AsyncClient(
654
+ base_url="http://checkpointer",
655
+ transport=httpx.AsyncHTTPTransport(uds=CHECKPOINTER_SOCKET),
656
+ limits=httpx.Limits(),
657
+ ) as checkpointer_client,
658
+ httpx.AsyncClient(
659
+ base_url="http://store",
660
+ transport=httpx.AsyncHTTPTransport(uds=STORE_SOCKET),
661
+ limits=httpx.Limits(),
662
+ ) as store_client,
663
+ ):
664
+ try:
665
+ res = await graph_client.get("/ok")
666
+ res.raise_for_status()
667
+ res = await checkpointer_client.get("/ok")
668
+ res.raise_for_status()
669
+ res = await store_client.get("/ok")
670
+ res.raise_for_status()
671
+ return True
672
+ except httpx.HTTPError:
673
+ return False