langgraph-api 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

Files changed (86) hide show
  1. LICENSE +93 -0
  2. langgraph_api/__init__.py +0 -0
  3. langgraph_api/api/__init__.py +63 -0
  4. langgraph_api/api/assistants.py +326 -0
  5. langgraph_api/api/meta.py +71 -0
  6. langgraph_api/api/openapi.py +32 -0
  7. langgraph_api/api/runs.py +463 -0
  8. langgraph_api/api/store.py +116 -0
  9. langgraph_api/api/threads.py +263 -0
  10. langgraph_api/asyncio.py +201 -0
  11. langgraph_api/auth/__init__.py +0 -0
  12. langgraph_api/auth/langsmith/__init__.py +0 -0
  13. langgraph_api/auth/langsmith/backend.py +67 -0
  14. langgraph_api/auth/langsmith/client.py +145 -0
  15. langgraph_api/auth/middleware.py +41 -0
  16. langgraph_api/auth/noop.py +14 -0
  17. langgraph_api/cli.py +209 -0
  18. langgraph_api/config.py +70 -0
  19. langgraph_api/cron_scheduler.py +60 -0
  20. langgraph_api/errors.py +52 -0
  21. langgraph_api/graph.py +314 -0
  22. langgraph_api/http.py +168 -0
  23. langgraph_api/http_logger.py +89 -0
  24. langgraph_api/js/.gitignore +2 -0
  25. langgraph_api/js/build.mts +49 -0
  26. langgraph_api/js/client.mts +849 -0
  27. langgraph_api/js/global.d.ts +6 -0
  28. langgraph_api/js/package.json +33 -0
  29. langgraph_api/js/remote.py +673 -0
  30. langgraph_api/js/server_sent_events.py +126 -0
  31. langgraph_api/js/src/graph.mts +88 -0
  32. langgraph_api/js/src/hooks.mjs +12 -0
  33. langgraph_api/js/src/parser/parser.mts +443 -0
  34. langgraph_api/js/src/parser/parser.worker.mjs +12 -0
  35. langgraph_api/js/src/schema/types.mts +2136 -0
  36. langgraph_api/js/src/schema/types.template.mts +74 -0
  37. langgraph_api/js/src/utils/importMap.mts +85 -0
  38. langgraph_api/js/src/utils/pythonSchemas.mts +28 -0
  39. langgraph_api/js/src/utils/serde.mts +21 -0
  40. langgraph_api/js/tests/api.test.mts +1566 -0
  41. langgraph_api/js/tests/compose-postgres.yml +56 -0
  42. langgraph_api/js/tests/graphs/.gitignore +1 -0
  43. langgraph_api/js/tests/graphs/agent.mts +127 -0
  44. langgraph_api/js/tests/graphs/error.mts +17 -0
  45. langgraph_api/js/tests/graphs/langgraph.json +8 -0
  46. langgraph_api/js/tests/graphs/nested.mts +44 -0
  47. langgraph_api/js/tests/graphs/package.json +7 -0
  48. langgraph_api/js/tests/graphs/weather.mts +57 -0
  49. langgraph_api/js/tests/graphs/yarn.lock +159 -0
  50. langgraph_api/js/tests/parser.test.mts +870 -0
  51. langgraph_api/js/tests/utils.mts +17 -0
  52. langgraph_api/js/yarn.lock +1340 -0
  53. langgraph_api/lifespan.py +41 -0
  54. langgraph_api/logging.py +121 -0
  55. langgraph_api/metadata.py +101 -0
  56. langgraph_api/models/__init__.py +0 -0
  57. langgraph_api/models/run.py +229 -0
  58. langgraph_api/patch.py +42 -0
  59. langgraph_api/queue.py +245 -0
  60. langgraph_api/route.py +118 -0
  61. langgraph_api/schema.py +190 -0
  62. langgraph_api/serde.py +124 -0
  63. langgraph_api/server.py +48 -0
  64. langgraph_api/sse.py +118 -0
  65. langgraph_api/state.py +67 -0
  66. langgraph_api/stream.py +289 -0
  67. langgraph_api/utils.py +60 -0
  68. langgraph_api/validation.py +141 -0
  69. langgraph_api-0.0.1.dist-info/LICENSE +93 -0
  70. langgraph_api-0.0.1.dist-info/METADATA +26 -0
  71. langgraph_api-0.0.1.dist-info/RECORD +86 -0
  72. langgraph_api-0.0.1.dist-info/WHEEL +4 -0
  73. langgraph_api-0.0.1.dist-info/entry_points.txt +3 -0
  74. langgraph_license/__init__.py +0 -0
  75. langgraph_license/middleware.py +21 -0
  76. langgraph_license/validation.py +11 -0
  77. langgraph_storage/__init__.py +0 -0
  78. langgraph_storage/checkpoint.py +94 -0
  79. langgraph_storage/database.py +190 -0
  80. langgraph_storage/ops.py +1523 -0
  81. langgraph_storage/queue.py +108 -0
  82. langgraph_storage/retry.py +27 -0
  83. langgraph_storage/store.py +28 -0
  84. langgraph_storage/ttl_dict.py +54 -0
  85. logging.json +22 -0
  86. openapi.json +4304 -0
@@ -0,0 +1,463 @@
1
+ import asyncio
2
+ from collections.abc import AsyncIterator
3
+
4
+ import orjson
5
+ from langgraph.checkpoint.base.id import uuid6
6
+ from starlette.responses import Response, StreamingResponse
7
+
8
+ from langgraph_api import config
9
+ from langgraph_api.asyncio import ValueEvent, aclosing
10
+ from langgraph_api.models.run import create_valid_run
11
+ from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
12
+ from langgraph_api.sse import EventSourceResponse
13
+ from langgraph_api.utils import fetchone, validate_uuid
14
+ from langgraph_api.validation import (
15
+ CronCreate,
16
+ CronSearch,
17
+ RunBatchCreate,
18
+ RunCreateStateful,
19
+ RunCreateStateless,
20
+ )
21
+ from langgraph_license.validation import plus_features_enabled
22
+ from langgraph_storage.database import connect
23
+ from langgraph_storage.ops import Crons, Runs, Threads
24
+ from langgraph_storage.retry import retry_db
25
+
26
+
27
+ def get_user_id(request: ApiRequest) -> str | None:
28
+ try:
29
+ return request.user.display_name
30
+ except AssertionError:
31
+ return None
32
+
33
+
34
+ @retry_db
35
+ async def create_run(request: ApiRequest):
36
+ """Create a run."""
37
+ thread_id = request.path_params["thread_id"]
38
+ payload = await request.json(RunCreateStateful)
39
+ async with connect() as conn:
40
+ run = await create_valid_run(
41
+ conn,
42
+ thread_id,
43
+ payload,
44
+ get_user_id(request),
45
+ request.headers,
46
+ )
47
+ return ApiResponse(run)
48
+
49
+
50
+ @retry_db
51
+ async def create_stateless_run(request: ApiRequest):
52
+ """Create a run."""
53
+ payload = await request.json(RunCreateStateless)
54
+ async with connect() as conn:
55
+ run = await create_valid_run(
56
+ conn, None, payload, get_user_id(request), request.headers
57
+ )
58
+ return ApiResponse(run)
59
+
60
+
61
+ async def create_stateless_run_batch(request: ApiRequest):
62
+ """Create a batch of stateless backround runs."""
63
+ batch_payload = await request.json(RunBatchCreate)
64
+ async with connect() as conn, conn.pipeline():
65
+ # barrier so all queries are sent before fetching any results
66
+ barrier = asyncio.Barrier(len(batch_payload))
67
+ coros = [
68
+ create_valid_run(
69
+ conn, None, payload, get_user_id(request), request.headers, barrier
70
+ )
71
+ for payload in batch_payload
72
+ ]
73
+ runs = await asyncio.gather(*coros)
74
+
75
+ return ApiResponse(runs)
76
+
77
+
78
+ async def stream_run(
79
+ request: ApiRequest,
80
+ ):
81
+ """Create a run."""
82
+ thread_id = request.path_params["thread_id"]
83
+ payload = await request.json(RunCreateStateful)
84
+ on_disconnect = payload.get("on_disconnect", "continue")
85
+ run_id = uuid6()
86
+ sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
87
+
88
+ try:
89
+ async with connect() as conn:
90
+ run = await create_valid_run(
91
+ conn,
92
+ thread_id,
93
+ payload,
94
+ get_user_id(request),
95
+ request.headers,
96
+ run_id=run_id,
97
+ )
98
+ except Exception:
99
+ if not sub.cancelled():
100
+ handle = await sub
101
+ await handle.__aexit__(None, None, None)
102
+ raise
103
+
104
+ return EventSourceResponse(
105
+ Runs.Stream.join(
106
+ run["run_id"],
107
+ thread_id=thread_id,
108
+ cancel_on_disconnect=on_disconnect == "cancel",
109
+ stream_mode=await sub,
110
+ ),
111
+ headers={"Location": f"/threads/{thread_id}/runs/{run['run_id']}/stream"},
112
+ )
113
+
114
+
115
+ async def stream_run_stateless(
116
+ request: ApiRequest,
117
+ ):
118
+ """Create a stateless run."""
119
+ payload = await request.json(RunCreateStateless)
120
+ on_disconnect = payload.get("on_disconnect", "continue")
121
+ run_id = uuid6()
122
+ sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
123
+
124
+ try:
125
+ async with connect() as conn:
126
+ run = await create_valid_run(
127
+ conn,
128
+ None,
129
+ payload,
130
+ get_user_id(request),
131
+ request.headers,
132
+ run_id=run_id,
133
+ )
134
+ except Exception:
135
+ if not sub.cancelled():
136
+ handle = await sub
137
+ await handle.__aexit__(None, None, None)
138
+ raise
139
+
140
+ return EventSourceResponse(
141
+ Runs.Stream.join(
142
+ run["run_id"],
143
+ thread_id=run["thread_id"],
144
+ ignore_404=True,
145
+ cancel_on_disconnect=on_disconnect == "cancel",
146
+ stream_mode=await sub,
147
+ ),
148
+ headers={
149
+ "Location": f"/threads/{run['thread_id']}/runs/{run['run_id']}/stream"
150
+ },
151
+ )
152
+
153
+
154
+ @retry_db
155
+ async def wait_run(request: ApiRequest):
156
+ """Create a run, wait for the output."""
157
+ thread_id = request.path_params["thread_id"]
158
+ payload = await request.json(RunCreateStateful)
159
+ run_id = uuid6()
160
+ sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
161
+
162
+ try:
163
+ async with connect() as conn:
164
+ run = await create_valid_run(
165
+ conn,
166
+ thread_id,
167
+ payload,
168
+ get_user_id(request),
169
+ request.headers,
170
+ run_id=run_id,
171
+ )
172
+ except Exception:
173
+ if not sub.cancelled():
174
+ handle = await sub
175
+ await handle.__aexit__(None, None, None)
176
+ raise
177
+
178
+ last_chunk = ValueEvent()
179
+
180
+ async def consume():
181
+ vchunk: bytes | None = None
182
+ async with aclosing(
183
+ Runs.Stream.join(
184
+ run["run_id"], thread_id=run["thread_id"], stream_mode=await sub
185
+ )
186
+ ) as stream:
187
+ async for mode, chunk in stream:
188
+ if mode == b"values":
189
+ vchunk = chunk
190
+ elif mode == b"error":
191
+ vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
192
+ last_chunk.set(vchunk)
193
+
194
+ # keep the connection open by sending whitespace every 5 seconds
195
+ # leading whitespace will be ignored by json parsers
196
+ async def body() -> AsyncIterator[bytes]:
197
+ stream = asyncio.create_task(consume())
198
+ while True:
199
+ try:
200
+ yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
201
+ break
202
+ except TimeoutError:
203
+ yield b"\n"
204
+ except asyncio.CancelledError:
205
+ stream.cancel()
206
+ await stream
207
+ raise
208
+
209
+ return StreamingResponse(
210
+ body(),
211
+ media_type="application/json",
212
+ headers={"Location": f"/threads/{thread_id}/runs/{run['run_id']}/join"},
213
+ )
214
+
215
+
216
+ @retry_db
217
+ async def wait_run_stateless(request: ApiRequest):
218
+ """Create a stateless run, wait for the output."""
219
+ payload = await request.json(RunCreateStateless)
220
+ run_id = uuid6()
221
+ sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
222
+
223
+ try:
224
+ async with connect() as conn:
225
+ run = await create_valid_run(
226
+ conn,
227
+ None,
228
+ payload,
229
+ get_user_id(request),
230
+ request.headers,
231
+ run_id=run_id,
232
+ )
233
+ except Exception:
234
+ if not sub.cancelled():
235
+ handle = await sub
236
+ await handle.__aexit__(None, None, None)
237
+ raise
238
+
239
+ last_chunk = ValueEvent()
240
+
241
+ async def consume():
242
+ vchunk: bytes | None = None
243
+ async with aclosing(
244
+ Runs.Stream.join(
245
+ run["run_id"], thread_id=run["thread_id"], stream_mode=await sub
246
+ )
247
+ ) as stream:
248
+ async for mode, chunk in stream:
249
+ if mode == b"values":
250
+ vchunk = chunk
251
+ elif mode == b"error":
252
+ vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
253
+ last_chunk.set(vchunk)
254
+
255
+ # keep the connection open by sending whitespace every 5 seconds
256
+ # leading whitespace will be ignored by json parsers
257
+ async def body() -> AsyncIterator[bytes]:
258
+ stream = asyncio.create_task(consume())
259
+ while True:
260
+ try:
261
+ yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
262
+ break
263
+ except TimeoutError:
264
+ yield b"\n"
265
+ except asyncio.CancelledError:
266
+ stream.cancel()
267
+ await stream
268
+ raise
269
+
270
+ return StreamingResponse(
271
+ body(),
272
+ media_type="application/json",
273
+ headers={"Location": f"/threads/{run['thread_id']}/runs/{run['run_id']}/join"},
274
+ )
275
+
276
+
277
+ @retry_db
278
+ async def list_runs_http(
279
+ request: ApiRequest,
280
+ ):
281
+ """List all runs for a thread."""
282
+ thread_id = request.path_params["thread_id"]
283
+ validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
284
+ limit = int(request.query_params.get("limit", 10))
285
+ offset = int(request.query_params.get("offset", 0))
286
+ async with connect() as conn, conn.pipeline():
287
+ thread, runs = await asyncio.gather(
288
+ Threads.get(conn, thread_id),
289
+ Runs.search(conn, thread_id, limit=limit, offset=offset, metadata=None),
290
+ )
291
+ await fetchone(thread)
292
+ return ApiResponse([run async for run in runs])
293
+
294
+
295
+ @retry_db
296
+ async def get_run_http(request: ApiRequest):
297
+ """Get a run by ID."""
298
+ thread_id = request.path_params["thread_id"]
299
+ run_id = request.path_params["run_id"]
300
+ validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
301
+ validate_uuid(run_id, "Invalid run ID: must be a UUID")
302
+ async with connect() as conn, conn.pipeline():
303
+ thread, run = await asyncio.gather(
304
+ Threads.get(conn, thread_id),
305
+ Runs.get(conn, run_id, thread_id=thread_id),
306
+ )
307
+ await fetchone(thread)
308
+ return ApiResponse(await fetchone(run))
309
+
310
+
311
+ @retry_db
312
+ async def join_run(request: ApiRequest):
313
+ """Wait for a run to finish."""
314
+ thread_id = request.path_params["thread_id"]
315
+ run_id = request.path_params["run_id"]
316
+ validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
317
+ validate_uuid(run_id, "Invalid run ID: must be a UUID")
318
+ return ApiResponse(await Runs.join(run_id, thread_id=thread_id))
319
+
320
+
321
+ @retry_db
322
+ async def join_run_stream_endpoint(request: ApiRequest):
323
+ """Wait for a run to finish."""
324
+ thread_id = request.path_params["thread_id"]
325
+ run_id = request.path_params["run_id"]
326
+ validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
327
+ validate_uuid(run_id, "Invalid run ID: must be a UUID")
328
+ return EventSourceResponse(Runs.Stream.join(run_id, thread_id=thread_id))
329
+
330
+
331
+ @retry_db
332
+ async def cancel_run(
333
+ request: ApiRequest,
334
+ ):
335
+ """Cancel a run."""
336
+ thread_id = request.path_params["thread_id"]
337
+ run_id = request.path_params["run_id"]
338
+ validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
339
+ validate_uuid(run_id, "Invalid run ID: must be a UUID")
340
+ wait_str = request.query_params.get("wait", False)
341
+ wait = wait_str.lower() in {"true", "yes", "1"}
342
+ action_str = request.query_params.get("action", "interrupt")
343
+ action = action_str if action_str in {"interrupt", "rollback"} else "interrupt"
344
+ async with connect() as conn:
345
+ await Runs.cancel(conn, [run_id], action=action, thread_id=thread_id)
346
+ if wait:
347
+ await Runs.join(run_id, thread_id=thread_id)
348
+ return Response(status_code=204 if wait else 202)
349
+
350
+
351
+ @retry_db
352
+ async def delete_run(request: ApiRequest):
353
+ """Delete a run by ID."""
354
+ thread_id = request.path_params["thread_id"]
355
+ run_id = request.path_params["run_id"]
356
+ validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
357
+ validate_uuid(run_id, "Invalid run ID: must be a UUID")
358
+ async with connect() as conn:
359
+ rid = await Runs.delete(conn, run_id, thread_id=thread_id)
360
+ await fetchone(rid)
361
+ return Response(status_code=204)
362
+
363
+
364
+ @retry_db
365
+ async def create_cron(request: ApiRequest):
366
+ """Create a cron with new thread."""
367
+ payload = await request.json(CronCreate)
368
+
369
+ async with connect() as conn:
370
+ cron = await Crons.put(
371
+ conn,
372
+ user_id=get_user_id(request),
373
+ thread_id=None,
374
+ end_time=payload.get("end_time"),
375
+ schedule=payload.get("schedule"),
376
+ payload=payload,
377
+ )
378
+ return ApiResponse(await fetchone(cron))
379
+
380
+
381
+ @retry_db
382
+ async def create_thread_cron(request: ApiRequest):
383
+ """Create a thread specific cron."""
384
+ thread_id = request.path_params["thread_id"]
385
+ validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
386
+ payload = await request.json(CronCreate)
387
+
388
+ async with connect() as conn:
389
+ cron = await Crons.put(
390
+ conn,
391
+ user_id=get_user_id(request),
392
+ thread_id=thread_id,
393
+ end_time=payload.get("end_time"),
394
+ schedule=payload.get("schedule"),
395
+ payload=payload,
396
+ )
397
+ return ApiResponse(await fetchone(cron))
398
+
399
+
400
+ @retry_db
401
+ async def delete_cron(request: ApiRequest):
402
+ """Delete a cron by ID."""
403
+ cron_id = request.path_params["cron_id"]
404
+ validate_uuid(cron_id, "Invalid cron ID: must be a UUID")
405
+
406
+ async with connect() as conn:
407
+ cid = await Crons.delete(conn, cron_id)
408
+ await fetchone(cid)
409
+ return Response(status_code=204)
410
+
411
+
412
+ @retry_db
413
+ async def search_crons(request: ApiRequest):
414
+ """List all cron jobs for an assistant"""
415
+ payload = await request.json(CronSearch)
416
+ if assistant_id := payload.get("assistant_id"):
417
+ validate_uuid(assistant_id, "Invalid assistant ID: must be a UUID")
418
+ if thread_id := payload.get("thread_id"):
419
+ validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
420
+ async with connect() as conn:
421
+ crons_iter = await Crons.search(
422
+ conn,
423
+ assistant_id=assistant_id,
424
+ thread_id=thread_id,
425
+ limit=int(payload.get("limit", 10)),
426
+ offset=int(payload.get("offset", 0)),
427
+ )
428
+ return ApiResponse([cron async for cron in crons_iter])
429
+
430
+
431
+ runs_routes = [
432
+ ApiRoute("/runs/stream", stream_run_stateless, methods=["POST"]),
433
+ ApiRoute("/runs/wait", wait_run_stateless, methods=["POST"]),
434
+ ApiRoute("/runs", create_stateless_run, methods=["POST"]),
435
+ ApiRoute("/runs/batch", create_stateless_run_batch, methods=["POST"]),
436
+ ApiRoute("/runs/crons", create_cron, methods=["POST"])
437
+ if config.FF_CRONS_ENABLED and plus_features_enabled()
438
+ else None,
439
+ ApiRoute("/runs/crons/search", search_crons, methods=["POST"])
440
+ if config.FF_CRONS_ENABLED and plus_features_enabled()
441
+ else None,
442
+ ApiRoute("/threads/{thread_id}/runs/{run_id}/join", join_run, methods=["GET"]),
443
+ ApiRoute(
444
+ "/threads/{thread_id}/runs/{run_id}/stream",
445
+ join_run_stream_endpoint,
446
+ methods=["GET"],
447
+ ),
448
+ ApiRoute("/threads/{thread_id}/runs/{run_id}/cancel", cancel_run, methods=["POST"]),
449
+ ApiRoute("/threads/{thread_id}/runs/{run_id}", get_run_http, methods=["GET"]),
450
+ ApiRoute("/threads/{thread_id}/runs/{run_id}", delete_run, methods=["DELETE"]),
451
+ ApiRoute("/threads/{thread_id}/runs/stream", stream_run, methods=["POST"]),
452
+ ApiRoute("/threads/{thread_id}/runs/wait", wait_run, methods=["POST"]),
453
+ ApiRoute("/threads/{thread_id}/runs", create_run, methods=["POST"]),
454
+ ApiRoute("/threads/{thread_id}/runs/crons", create_thread_cron, methods=["POST"])
455
+ if config.FF_CRONS_ENABLED and plus_features_enabled()
456
+ else None,
457
+ ApiRoute("/threads/{thread_id}/runs", list_runs_http, methods=["GET"]),
458
+ ApiRoute("/runs/crons/{cron_id}", delete_cron, methods=["DELETE"])
459
+ if config.FF_CRONS_ENABLED and plus_features_enabled()
460
+ else None,
461
+ ]
462
+
463
+ runs_routes = [route for route in runs_routes if route is not None]
@@ -0,0 +1,116 @@
1
+ from starlette.responses import Response
2
+ from starlette.routing import BaseRoute
3
+
4
+ from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
5
+ from langgraph_api.validation import (
6
+ StoreDeleteRequest,
7
+ StoreListNamespacesRequest,
8
+ StorePutRequest,
9
+ StoreSearchRequest,
10
+ )
11
+ from langgraph_storage.database import connect
12
+ from langgraph_storage.retry import retry_db
13
+ from langgraph_storage.store import Store
14
+
15
+
16
+ def _validate_namespace(namespace: tuple[str, ...]) -> Response | None:
17
+ for label in namespace:
18
+ if not label or "." in label:
19
+ return Response(
20
+ status_code=422,
21
+ content=f"Namespace labels cannot be empty or contain periods. Received: {namespace}",
22
+ )
23
+
24
+
25
+ @retry_db
26
+ async def put_item(request: ApiRequest):
27
+ """Store or update an item."""
28
+ payload = await request.json(StorePutRequest)
29
+ namespace = tuple(payload["namespace"]) if payload.get("namespace") else ()
30
+ if err := _validate_namespace(namespace):
31
+ return err
32
+ key = payload["key"]
33
+ value = payload["value"]
34
+ async with connect() as conn:
35
+ await Store(conn).aput(namespace, key, value)
36
+ return Response(status_code=204)
37
+
38
+
39
+ @retry_db
40
+ async def get_item(request: ApiRequest):
41
+ """Retrieve a single item."""
42
+ namespace = tuple(request.query_params.get("namespace", "").split("."))
43
+ if err := _validate_namespace(namespace):
44
+ return err
45
+ key = request.query_params.get("key")
46
+ if not key:
47
+ return ApiResponse({"error": "Key is required"}, status_code=400)
48
+ async with connect() as conn:
49
+ result = await Store(conn).aget(namespace, key)
50
+ return ApiResponse(result.dict() if result is not None else None)
51
+
52
+
53
+ @retry_db
54
+ async def delete_item(request: ApiRequest):
55
+ """Delete an item."""
56
+ payload = await request.json(StoreDeleteRequest)
57
+ namespace = tuple(payload["namespace"]) if payload.get("namespace") else ()
58
+ if err := _validate_namespace(namespace):
59
+ return err
60
+ key = payload["key"]
61
+ async with connect() as conn:
62
+ await Store(conn).adelete(namespace, key)
63
+ return Response(status_code=204)
64
+
65
+
66
+ @retry_db
67
+ async def search_items(request: ApiRequest):
68
+ """Search for items within a namespace prefix."""
69
+ payload = await request.json(StoreSearchRequest)
70
+ namespace_prefix = tuple(payload["namespace_prefix"])
71
+ if err := _validate_namespace(namespace_prefix):
72
+ return err
73
+ filter = payload.get("filter")
74
+ limit = payload.get("limit") or 10
75
+ offset = payload.get("offset") or 0
76
+ async with connect() as conn:
77
+ items = await Store(conn).asearch(
78
+ namespace_prefix,
79
+ filter=filter,
80
+ limit=limit,
81
+ offset=offset,
82
+ )
83
+ return ApiResponse({"items": [item.dict() for item in items]})
84
+
85
+
86
+ @retry_db
87
+ async def list_namespaces(request: ApiRequest):
88
+ """List namespaces with optional match conditions."""
89
+ payload = await request.json(StoreListNamespacesRequest)
90
+ prefix = tuple(payload["prefix"]) if payload.get("prefix") else None
91
+ suffix = tuple(payload["suffix"]) if payload.get("suffix") else None
92
+ if prefix and (err := _validate_namespace(prefix)):
93
+ return err
94
+ if suffix and (err := _validate_namespace(suffix)):
95
+ return err
96
+ max_depth = payload.get("max_depth")
97
+ limit = payload.get("limit", 100)
98
+ offset = payload.get("offset", 0)
99
+ async with connect() as conn:
100
+ result = await Store(conn).alist_namespaces(
101
+ prefix=prefix,
102
+ suffix=suffix,
103
+ max_depth=max_depth,
104
+ limit=limit,
105
+ offset=offset,
106
+ )
107
+ return ApiResponse({"namespaces": result})
108
+
109
+
110
+ store_routes: list[BaseRoute] = [
111
+ ApiRoute("/store/items", endpoint=put_item, methods=["PUT"]),
112
+ ApiRoute("/store/items", endpoint=get_item, methods=["GET"]),
113
+ ApiRoute("/store/items", endpoint=delete_item, methods=["DELETE"]),
114
+ ApiRoute("/store/items/search", endpoint=search_items, methods=["POST"]),
115
+ ApiRoute("/store/namespaces", endpoint=list_namespaces, methods=["POST"]),
116
+ ]