langgraph-api 0.5.4__py3-none-any.whl → 0.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/__init__.py +93 -27
- langgraph_api/api/a2a.py +36 -32
- langgraph_api/api/assistants.py +114 -26
- langgraph_api/api/mcp.py +3 -3
- langgraph_api/api/meta.py +15 -2
- langgraph_api/api/openapi.py +27 -17
- langgraph_api/api/profile.py +108 -0
- langgraph_api/api/runs.py +114 -57
- langgraph_api/api/store.py +19 -2
- langgraph_api/api/threads.py +133 -10
- langgraph_api/asgi_transport.py +14 -9
- langgraph_api/auth/custom.py +23 -13
- langgraph_api/cli.py +86 -41
- langgraph_api/command.py +2 -2
- langgraph_api/config/__init__.py +532 -0
- langgraph_api/config/_parse.py +58 -0
- langgraph_api/config/schemas.py +431 -0
- langgraph_api/cron_scheduler.py +17 -1
- langgraph_api/encryption/__init__.py +15 -0
- langgraph_api/encryption/aes_json.py +158 -0
- langgraph_api/encryption/context.py +35 -0
- langgraph_api/encryption/custom.py +280 -0
- langgraph_api/encryption/middleware.py +632 -0
- langgraph_api/encryption/shared.py +63 -0
- langgraph_api/errors.py +12 -1
- langgraph_api/executor_entrypoint.py +11 -6
- langgraph_api/feature_flags.py +19 -0
- langgraph_api/graph.py +163 -64
- langgraph_api/{grpc_ops → grpc}/client.py +142 -12
- langgraph_api/{grpc_ops → grpc}/config_conversion.py +16 -10
- langgraph_api/grpc/generated/__init__.py +29 -0
- langgraph_api/grpc/generated/checkpointer_pb2.py +63 -0
- langgraph_api/grpc/generated/checkpointer_pb2.pyi +99 -0
- langgraph_api/grpc/generated/checkpointer_pb2_grpc.py +329 -0
- langgraph_api/grpc/generated/core_api_pb2.py +216 -0
- langgraph_api/{grpc_ops → grpc}/generated/core_api_pb2.pyi +292 -372
- langgraph_api/{grpc_ops → grpc}/generated/core_api_pb2_grpc.py +252 -31
- langgraph_api/grpc/generated/engine_common_pb2.py +219 -0
- langgraph_api/{grpc_ops → grpc}/generated/engine_common_pb2.pyi +178 -104
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2.pyi +12 -0
- langgraph_api/grpc/generated/enum_cancel_run_action_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_control_signal_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_durability_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_durability_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_durability_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_multitask_strategy_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_run_status_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_run_status_pb2.pyi +22 -0
- langgraph_api/grpc/generated/enum_run_status_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2.pyi +28 -0
- langgraph_api/grpc/generated/enum_stream_mode_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_thread_status_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2.py +37 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2.pyi +16 -0
- langgraph_api/grpc/generated/enum_thread_stream_mode_pb2_grpc.py +24 -0
- langgraph_api/grpc/generated/errors_pb2.py +39 -0
- langgraph_api/grpc/generated/errors_pb2.pyi +21 -0
- langgraph_api/grpc/generated/errors_pb2_grpc.py +24 -0
- langgraph_api/grpc/ops/__init__.py +370 -0
- langgraph_api/grpc/ops/assistants.py +424 -0
- langgraph_api/grpc/ops/runs.py +792 -0
- langgraph_api/grpc/ops/threads.py +1013 -0
- langgraph_api/http.py +16 -5
- langgraph_api/js/client.mts +1 -4
- langgraph_api/js/package.json +28 -27
- langgraph_api/js/remote.py +39 -17
- langgraph_api/js/sse.py +2 -2
- langgraph_api/js/ui.py +1 -1
- langgraph_api/js/yarn.lock +1139 -869
- langgraph_api/metadata.py +29 -3
- langgraph_api/middleware/http_logger.py +1 -1
- langgraph_api/middleware/private_network.py +7 -7
- langgraph_api/models/run.py +44 -26
- langgraph_api/otel_context.py +205 -0
- langgraph_api/patch.py +2 -2
- langgraph_api/queue_entrypoint.py +34 -35
- langgraph_api/route.py +33 -1
- langgraph_api/schema.py +84 -9
- langgraph_api/self_hosted_logs.py +2 -2
- langgraph_api/self_hosted_metrics.py +73 -3
- langgraph_api/serde.py +16 -4
- langgraph_api/server.py +33 -31
- langgraph_api/state.py +3 -2
- langgraph_api/store.py +25 -16
- langgraph_api/stream.py +20 -16
- langgraph_api/thread_ttl.py +28 -13
- langgraph_api/timing/__init__.py +25 -0
- langgraph_api/timing/profiler.py +200 -0
- langgraph_api/timing/timer.py +318 -0
- langgraph_api/utils/__init__.py +53 -8
- langgraph_api/utils/config.py +2 -1
- langgraph_api/utils/future.py +10 -6
- langgraph_api/utils/uuids.py +29 -62
- langgraph_api/validation.py +6 -0
- langgraph_api/webhook.py +120 -6
- langgraph_api/worker.py +54 -24
- {langgraph_api-0.5.4.dist-info → langgraph_api-0.7.3.dist-info}/METADATA +8 -6
- langgraph_api-0.7.3.dist-info/RECORD +168 -0
- {langgraph_api-0.5.4.dist-info → langgraph_api-0.7.3.dist-info}/WHEEL +1 -1
- langgraph_runtime/__init__.py +1 -0
- langgraph_runtime/routes.py +11 -0
- logging.json +1 -3
- openapi.json +635 -537
- langgraph_api/config.py +0 -523
- langgraph_api/grpc_ops/generated/__init__.py +0 -5
- langgraph_api/grpc_ops/generated/core_api_pb2.py +0 -275
- langgraph_api/grpc_ops/generated/engine_common_pb2.py +0 -194
- langgraph_api/grpc_ops/ops.py +0 -1045
- langgraph_api-0.5.4.dist-info/RECORD +0 -121
- /langgraph_api/{grpc_ops → grpc}/__init__.py +0 -0
- /langgraph_api/{grpc_ops → grpc}/generated/engine_common_pb2_grpc.py +0 -0
- {langgraph_api-0.5.4.dist-info → langgraph_api-0.7.3.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.5.4.dist-info → langgraph_api-0.7.3.dist-info}/licenses/LICENSE +0 -0
langgraph_api/api/store.py
CHANGED
|
@@ -5,7 +5,13 @@ from starlette.responses import Response
|
|
|
5
5
|
from starlette.routing import BaseRoute
|
|
6
6
|
|
|
7
7
|
from langgraph_api.auth.custom import handle_event as _handle_event
|
|
8
|
+
from langgraph_api.encryption.middleware import (
|
|
9
|
+
decrypt_response,
|
|
10
|
+
decrypt_responses,
|
|
11
|
+
encrypt_request,
|
|
12
|
+
)
|
|
8
13
|
from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
|
|
14
|
+
from langgraph_api.schema import STORE_ENCRYPTION_FIELDS
|
|
9
15
|
from langgraph_api.store import get_store
|
|
10
16
|
from langgraph_api.utils import get_auth_ctx
|
|
11
17
|
from langgraph_api.validation import (
|
|
@@ -48,6 +54,7 @@ async def handle_event(
|
|
|
48
54
|
async def put_item(request: ApiRequest):
|
|
49
55
|
"""Store or update an item."""
|
|
50
56
|
payload = await request.json(StorePutRequest)
|
|
57
|
+
payload = await encrypt_request(payload, "store", STORE_ENCRYPTION_FIELDS)
|
|
51
58
|
namespace = tuple(payload["namespace"]) if payload.get("namespace") else ()
|
|
52
59
|
if err := _validate_namespace(namespace):
|
|
53
60
|
return err
|
|
@@ -78,7 +85,11 @@ async def get_item(request: ApiRequest):
|
|
|
78
85
|
}
|
|
79
86
|
await handle_event("get", handler_payload)
|
|
80
87
|
result = await (await get_store()).aget(namespace, key)
|
|
81
|
-
|
|
88
|
+
if result is None:
|
|
89
|
+
return ApiResponse(None)
|
|
90
|
+
return ApiResponse(
|
|
91
|
+
await decrypt_response(result.dict(), "store", STORE_ENCRYPTION_FIELDS)
|
|
92
|
+
)
|
|
82
93
|
|
|
83
94
|
|
|
84
95
|
@retry_db
|
|
@@ -125,7 +136,13 @@ async def search_items(request: ApiRequest):
|
|
|
125
136
|
offset=handler_payload["offset"],
|
|
126
137
|
query=handler_payload["query"],
|
|
127
138
|
)
|
|
128
|
-
return ApiResponse(
|
|
139
|
+
return ApiResponse(
|
|
140
|
+
{
|
|
141
|
+
"items": await decrypt_responses(
|
|
142
|
+
[item.dict() for item in items], "store", STORE_ENCRYPTION_FIELDS
|
|
143
|
+
)
|
|
144
|
+
}
|
|
145
|
+
)
|
|
129
146
|
|
|
130
147
|
|
|
131
148
|
@retry_db
|
langgraph_api/api/threads.py
CHANGED
|
@@ -5,10 +5,19 @@ from starlette.exceptions import HTTPException
|
|
|
5
5
|
from starlette.responses import Response
|
|
6
6
|
from starlette.routing import BaseRoute
|
|
7
7
|
|
|
8
|
+
from langgraph_api.encryption.middleware import (
|
|
9
|
+
decrypt_response,
|
|
10
|
+
decrypt_responses,
|
|
11
|
+
encrypt_request,
|
|
12
|
+
)
|
|
8
13
|
from langgraph_api.feature_flags import FF_USE_CORE_API
|
|
9
|
-
from langgraph_api.
|
|
14
|
+
from langgraph_api.grpc.ops import Threads as GrpcThreads
|
|
10
15
|
from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
|
|
11
|
-
from langgraph_api.schema import
|
|
16
|
+
from langgraph_api.schema import (
|
|
17
|
+
THREAD_ENCRYPTION_FIELDS,
|
|
18
|
+
THREAD_FIELDS,
|
|
19
|
+
ThreadStreamMode,
|
|
20
|
+
)
|
|
12
21
|
from langgraph_api.sse import EventSourceResponse
|
|
13
22
|
from langgraph_api.state import state_snapshot_to_thread_state
|
|
14
23
|
from langgraph_api.utils import (
|
|
@@ -23,6 +32,7 @@ from langgraph_api.validation import (
|
|
|
23
32
|
ThreadCountRequest,
|
|
24
33
|
ThreadCreate,
|
|
25
34
|
ThreadPatch,
|
|
35
|
+
ThreadPruneRequest,
|
|
26
36
|
ThreadSearchRequest,
|
|
27
37
|
ThreadStateCheckpointRequest,
|
|
28
38
|
ThreadStateSearch,
|
|
@@ -43,12 +53,28 @@ async def create_thread(
|
|
|
43
53
|
payload = await request.json(ThreadCreate)
|
|
44
54
|
if thread_id := payload.get("thread_id"):
|
|
45
55
|
validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
|
|
56
|
+
|
|
57
|
+
# Validate keep_latest TTL requires core API
|
|
58
|
+
ttl = payload.get("ttl")
|
|
59
|
+
if ttl and ttl.get("strategy") == "keep_latest" and not FF_USE_CORE_API:
|
|
60
|
+
raise HTTPException(
|
|
61
|
+
status_code=422,
|
|
62
|
+
detail="keep_latest TTL strategy requires FF_USE_CORE_API=true",
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
# Encrypt metadata before storing
|
|
66
|
+
encrypted_payload = await encrypt_request(
|
|
67
|
+
payload,
|
|
68
|
+
"thread",
|
|
69
|
+
["metadata"],
|
|
70
|
+
)
|
|
71
|
+
|
|
46
72
|
async with connect() as conn:
|
|
47
73
|
thread_id = thread_id or str(uuid4())
|
|
48
74
|
iter = await CrudThreads.put(
|
|
49
75
|
conn,
|
|
50
76
|
thread_id,
|
|
51
|
-
metadata=
|
|
77
|
+
metadata=encrypted_payload.get("metadata") or {},
|
|
52
78
|
if_exists=payload.get("if_exists") or "raise",
|
|
53
79
|
ttl=payload.get("ttl"),
|
|
54
80
|
)
|
|
@@ -69,7 +95,14 @@ async def create_thread(
|
|
|
69
95
|
detail = f"Thread {thread_id} was created, but there were problems updating the state: {e.detail}"
|
|
70
96
|
raise HTTPException(status_code=201, detail=detail) from e
|
|
71
97
|
|
|
72
|
-
|
|
98
|
+
# Decrypt thread fields in response
|
|
99
|
+
thread = await fetchone(iter, not_found_code=409)
|
|
100
|
+
thread = await decrypt_response(
|
|
101
|
+
thread,
|
|
102
|
+
"thread",
|
|
103
|
+
THREAD_ENCRYPTION_FIELDS,
|
|
104
|
+
)
|
|
105
|
+
return ApiResponse(thread)
|
|
73
106
|
|
|
74
107
|
|
|
75
108
|
@retry_db
|
|
@@ -81,6 +114,7 @@ async def search_threads(
|
|
|
81
114
|
select = validate_select_columns(payload.get("select") or None, THREAD_FIELDS)
|
|
82
115
|
limit = int(payload.get("limit") or 10)
|
|
83
116
|
offset = int(payload.get("offset") or 0)
|
|
117
|
+
|
|
84
118
|
async with connect() as conn:
|
|
85
119
|
threads_iter, next_offset = await CrudThreads.search(
|
|
86
120
|
conn,
|
|
@@ -97,7 +131,15 @@ async def search_threads(
|
|
|
97
131
|
threads, response_headers = await get_pagination_headers(
|
|
98
132
|
threads_iter, next_offset, offset
|
|
99
133
|
)
|
|
100
|
-
|
|
134
|
+
|
|
135
|
+
# Decrypt metadata, values, interrupts, and error in all returned threads
|
|
136
|
+
decrypted_threads = await decrypt_responses(
|
|
137
|
+
threads,
|
|
138
|
+
"thread",
|
|
139
|
+
THREAD_ENCRYPTION_FIELDS,
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
return ApiResponse(decrypted_threads, headers=response_headers)
|
|
101
143
|
|
|
102
144
|
|
|
103
145
|
@retry_db
|
|
@@ -280,9 +322,23 @@ async def get_thread(
|
|
|
280
322
|
"""Get a thread by ID."""
|
|
281
323
|
thread_id = request.path_params["thread_id"]
|
|
282
324
|
validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
|
|
325
|
+
|
|
326
|
+
# Parse include parameter for optional fields (e.g., ttl)
|
|
327
|
+
include_param = request.query_params.get("include", "")
|
|
328
|
+
include_fields = [f.strip() for f in include_param.split(",") if f.strip()]
|
|
329
|
+
include_ttl = "ttl" in include_fields
|
|
330
|
+
|
|
283
331
|
async with connect() as conn:
|
|
284
|
-
thread = await CrudThreads.get(conn, thread_id)
|
|
285
|
-
|
|
332
|
+
thread = await CrudThreads.get(conn, thread_id, include_ttl=include_ttl)
|
|
333
|
+
|
|
334
|
+
# Decrypt metadata, values, interrupts, and error in response
|
|
335
|
+
thread_data = await fetchone(thread)
|
|
336
|
+
thread_data = await decrypt_response(
|
|
337
|
+
thread_data,
|
|
338
|
+
"thread",
|
|
339
|
+
THREAD_ENCRYPTION_FIELDS,
|
|
340
|
+
)
|
|
341
|
+
return ApiResponse(thread_data)
|
|
286
342
|
|
|
287
343
|
|
|
288
344
|
@retry_db
|
|
@@ -293,14 +349,37 @@ async def patch_thread(
|
|
|
293
349
|
thread_id = request.path_params["thread_id"]
|
|
294
350
|
validate_uuid(thread_id, "Invalid thread ID: must be a UUID")
|
|
295
351
|
payload = await request.json(ThreadPatch)
|
|
352
|
+
|
|
353
|
+
# Validate keep_latest TTL requires core API
|
|
354
|
+
ttl = payload.get("ttl")
|
|
355
|
+
if ttl and ttl.get("strategy") == "keep_latest" and not FF_USE_CORE_API:
|
|
356
|
+
raise HTTPException(
|
|
357
|
+
status_code=422,
|
|
358
|
+
detail="keep_latest TTL strategy requires FF_USE_CORE_API=true",
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
# Encrypt metadata before storing
|
|
362
|
+
encrypted_payload = await encrypt_request(
|
|
363
|
+
payload,
|
|
364
|
+
"thread",
|
|
365
|
+
["metadata"],
|
|
366
|
+
)
|
|
367
|
+
|
|
296
368
|
async with connect() as conn:
|
|
297
369
|
thread = await CrudThreads.patch(
|
|
298
370
|
conn,
|
|
299
371
|
thread_id,
|
|
300
|
-
metadata=
|
|
372
|
+
metadata=encrypted_payload.get("metadata") or {},
|
|
301
373
|
ttl=payload.get("ttl"),
|
|
302
374
|
)
|
|
303
|
-
|
|
375
|
+
thread_data = await fetchone(thread)
|
|
376
|
+
# Decrypt metadata, values, interrupts, and error in response
|
|
377
|
+
thread_data = await decrypt_response(
|
|
378
|
+
thread_data,
|
|
379
|
+
"thread",
|
|
380
|
+
THREAD_ENCRYPTION_FIELDS,
|
|
381
|
+
)
|
|
382
|
+
return ApiResponse(thread_data)
|
|
304
383
|
|
|
305
384
|
|
|
306
385
|
@retry_db
|
|
@@ -314,12 +393,55 @@ async def delete_thread(request: ApiRequest):
|
|
|
314
393
|
return Response(status_code=204)
|
|
315
394
|
|
|
316
395
|
|
|
396
|
+
@retry_db
|
|
397
|
+
async def prune_threads(request: ApiRequest):
|
|
398
|
+
"""Prune threads by ID."""
|
|
399
|
+
payload = await request.json(ThreadPruneRequest)
|
|
400
|
+
thread_ids = payload.get("thread_ids", [])
|
|
401
|
+
strategy = payload.get("strategy", "delete")
|
|
402
|
+
|
|
403
|
+
# Validate each thread_id is a valid UUID
|
|
404
|
+
for tid in thread_ids:
|
|
405
|
+
validate_uuid(tid, "Invalid thread ID: must be a UUID")
|
|
406
|
+
|
|
407
|
+
# Validate strategy
|
|
408
|
+
if strategy not in ("delete", "keep_latest"):
|
|
409
|
+
raise HTTPException(
|
|
410
|
+
status_code=422,
|
|
411
|
+
detail=f"Invalid strategy: {strategy}. Expected 'delete' or 'keep_latest'.",
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
# Empty list is a no-op, return early
|
|
415
|
+
if not thread_ids:
|
|
416
|
+
return ApiResponse({"pruned_count": 0})
|
|
417
|
+
|
|
418
|
+
if not FF_USE_CORE_API:
|
|
419
|
+
raise HTTPException(
|
|
420
|
+
status_code=422,
|
|
421
|
+
detail="Thread prune requires FF_USE_CORE_API=true",
|
|
422
|
+
)
|
|
423
|
+
|
|
424
|
+
pruned_count = await CrudThreads.prune(
|
|
425
|
+
thread_ids=thread_ids,
|
|
426
|
+
strategy=strategy,
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
return ApiResponse({"pruned_count": pruned_count})
|
|
430
|
+
|
|
431
|
+
|
|
317
432
|
@retry_db
|
|
318
433
|
async def copy_thread(request: ApiRequest):
|
|
319
434
|
thread_id = request.path_params["thread_id"]
|
|
320
435
|
async with connect() as conn:
|
|
321
436
|
iter = await CrudThreads.copy(conn, thread_id)
|
|
322
|
-
|
|
437
|
+
thread_data = await fetchone(iter, not_found_code=409)
|
|
438
|
+
# Decrypt metadata, values, interrupts, and error in response
|
|
439
|
+
thread_data = await decrypt_response(
|
|
440
|
+
thread_data,
|
|
441
|
+
"thread",
|
|
442
|
+
THREAD_ENCRYPTION_FIELDS,
|
|
443
|
+
)
|
|
444
|
+
return ApiResponse(thread_data)
|
|
323
445
|
|
|
324
446
|
|
|
325
447
|
@retry_db
|
|
@@ -364,6 +486,7 @@ threads_routes: list[BaseRoute] = [
|
|
|
364
486
|
ApiRoute("/threads", endpoint=create_thread, methods=["POST"]),
|
|
365
487
|
ApiRoute("/threads/search", endpoint=search_threads, methods=["POST"]),
|
|
366
488
|
ApiRoute("/threads/count", endpoint=count_threads, methods=["POST"]),
|
|
489
|
+
ApiRoute("/threads/prune", endpoint=prune_threads, methods=["POST"]),
|
|
367
490
|
ApiRoute("/threads/{thread_id}", endpoint=get_thread, methods=["GET"]),
|
|
368
491
|
ApiRoute("/threads/{thread_id}", endpoint=patch_thread, methods=["PATCH"]),
|
|
369
492
|
ApiRoute("/threads/{thread_id}", endpoint=delete_thread, methods=["DELETE"]),
|
langgraph_api/asgi_transport.py
CHANGED
|
@@ -25,7 +25,7 @@ def is_running_trio() -> bool:
|
|
|
25
25
|
# sniffio is a dependency of trio.
|
|
26
26
|
|
|
27
27
|
# See https://github.com/python-trio/trio/issues/2802
|
|
28
|
-
import sniffio
|
|
28
|
+
import sniffio # type: ignore[unresolved-import]
|
|
29
29
|
|
|
30
30
|
if sniffio.current_async_library() == "trio":
|
|
31
31
|
return True
|
|
@@ -84,7 +84,8 @@ class ASGITransport(ASGITransportBase):
|
|
|
84
84
|
) -> Response:
|
|
85
85
|
from langgraph_api.asyncio import call_soon_in_main_loop
|
|
86
86
|
|
|
87
|
-
|
|
87
|
+
if not isinstance(request.stream, AsyncByteStream):
|
|
88
|
+
raise ValueError("Request stream must be an AsyncByteStream")
|
|
88
89
|
|
|
89
90
|
# ASGI scope.
|
|
90
91
|
scope = {
|
|
@@ -133,14 +134,15 @@ class ASGITransport(ASGITransportBase):
|
|
|
133
134
|
nonlocal status_code, response_headers, response_started
|
|
134
135
|
|
|
135
136
|
if message["type"] == "http.response.start":
|
|
136
|
-
|
|
137
|
-
|
|
137
|
+
if response_started:
|
|
138
|
+
raise RuntimeError("Response already started")
|
|
138
139
|
status_code = message["status"]
|
|
139
140
|
response_headers = message.get("headers", [])
|
|
140
141
|
response_started = True
|
|
141
142
|
|
|
142
143
|
elif message["type"] == "http.response.body":
|
|
143
|
-
|
|
144
|
+
if response_complete.is_set():
|
|
145
|
+
raise RuntimeError("Response already complete")
|
|
144
146
|
body = message.get("body", b"")
|
|
145
147
|
more_body = message.get("more_body", False)
|
|
146
148
|
|
|
@@ -152,7 +154,7 @@ class ASGITransport(ASGITransportBase):
|
|
|
152
154
|
|
|
153
155
|
try:
|
|
154
156
|
await call_soon_in_main_loop(self.app(scope, receive, send))
|
|
155
|
-
except Exception:
|
|
157
|
+
except Exception:
|
|
156
158
|
if self.raise_app_exceptions:
|
|
157
159
|
raise
|
|
158
160
|
|
|
@@ -162,9 +164,12 @@ class ASGITransport(ASGITransportBase):
|
|
|
162
164
|
if response_headers is None:
|
|
163
165
|
response_headers = {}
|
|
164
166
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
167
|
+
if not response_complete.is_set():
|
|
168
|
+
raise RuntimeError("Response not complete")
|
|
169
|
+
if status_code is None:
|
|
170
|
+
raise RuntimeError("Status code not set")
|
|
171
|
+
if response_headers is None:
|
|
172
|
+
raise RuntimeError("Response headers not set")
|
|
168
173
|
|
|
169
174
|
stream = ASGIResponseStream(body_parts)
|
|
170
175
|
|
langgraph_api/auth/custom.py
CHANGED
|
@@ -23,10 +23,12 @@ from starlette.exceptions import HTTPException
|
|
|
23
23
|
from starlette.requests import HTTPConnection, Request
|
|
24
24
|
from starlette.responses import Response
|
|
25
25
|
|
|
26
|
+
from langgraph_api import timing
|
|
26
27
|
from langgraph_api.auth.langsmith.backend import LangsmithAuthBackend
|
|
27
28
|
from langgraph_api.auth.studio_user import StudioUser
|
|
28
29
|
from langgraph_api.config import LANGGRAPH_AUTH, LANGGRAPH_AUTH_TYPE
|
|
29
30
|
from langgraph_api.js.base import is_js_path
|
|
31
|
+
from langgraph_api.timing import profiled_import
|
|
30
32
|
|
|
31
33
|
logger = structlog.stdlib.get_logger(__name__)
|
|
32
34
|
|
|
@@ -233,7 +235,7 @@ def _get_custom_auth_middleware(
|
|
|
233
235
|
auth_instance._authenticate_handler,
|
|
234
236
|
disable_studio_auth,
|
|
235
237
|
)
|
|
236
|
-
logger.info(f"Loaded custom auth middleware: {
|
|
238
|
+
logger.info(f"Loaded custom auth middleware: {result!s}")
|
|
237
239
|
return result
|
|
238
240
|
|
|
239
241
|
|
|
@@ -585,6 +587,13 @@ def normalize_user(user: Any) -> BaseUser:
|
|
|
585
587
|
)
|
|
586
588
|
|
|
587
589
|
|
|
590
|
+
@timing.timer(
|
|
591
|
+
message="Loading custom auth {auth_path}",
|
|
592
|
+
metadata_fn=lambda auth_path: {"auth_path": auth_path},
|
|
593
|
+
warn_threshold_secs=5,
|
|
594
|
+
warn_message="Loading custom auth '{auth_path}' took longer than expected",
|
|
595
|
+
error_threshold_secs=10,
|
|
596
|
+
)
|
|
588
597
|
def _load_auth_obj(path: str) -> Auth | Literal["js"]:
|
|
589
598
|
"""Load an object from a path string."""
|
|
590
599
|
if ":" not in path:
|
|
@@ -600,18 +609,19 @@ def _load_auth_obj(path: str) -> Auth | Literal["js"]:
|
|
|
600
609
|
return "js"
|
|
601
610
|
|
|
602
611
|
try:
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
612
|
+
with profiled_import(path):
|
|
613
|
+
if "/" in module_name or ".py" in module_name:
|
|
614
|
+
# Load from file path
|
|
615
|
+
modname = f"dynamic_module_{hash(module_name)}"
|
|
616
|
+
modspec = importlib.util.spec_from_file_location(modname, module_name)
|
|
617
|
+
if modspec is None or modspec.loader is None:
|
|
618
|
+
raise ValueError(f"Could not load file: {module_name}")
|
|
619
|
+
module = importlib.util.module_from_spec(modspec)
|
|
620
|
+
sys.modules[modname] = module
|
|
621
|
+
modspec.loader.exec_module(module) # type: ignore[possibly-unbound-attribute]
|
|
622
|
+
else:
|
|
623
|
+
# Load from Python module
|
|
624
|
+
module = importlib.import_module(module_name)
|
|
615
625
|
|
|
616
626
|
loaded_auth = getattr(module, callable_name, None)
|
|
617
627
|
if loaded_auth is None:
|
langgraph_api/cli.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import contextlib
|
|
2
3
|
import json
|
|
3
4
|
import logging
|
|
@@ -97,12 +98,17 @@ def run_server(
|
|
|
97
98
|
auth: typing.Optional["AuthConfig"] = None,
|
|
98
99
|
http: typing.Optional["HttpConfig"] = None,
|
|
99
100
|
ui: dict | None = None,
|
|
101
|
+
webhooks: dict | None = None,
|
|
100
102
|
ui_config: dict | None = None,
|
|
101
103
|
studio_url: str | None = None,
|
|
102
104
|
disable_persistence: bool = False,
|
|
103
105
|
allow_blocking: bool = False,
|
|
104
106
|
runtime_edition: Literal["inmem", "community", "postgres"] = "inmem",
|
|
105
107
|
server_level: str = "WARNING",
|
|
108
|
+
__redis_uri__: str | None = "fake",
|
|
109
|
+
__database_uri__: str | None = ":memory:",
|
|
110
|
+
__migrations_path__: str | None = "__inmem",
|
|
111
|
+
__entrypoint__: Literal["server", "python-executor"] = "server",
|
|
106
112
|
**kwargs: typing.Any,
|
|
107
113
|
):
|
|
108
114
|
"""Run the LangGraph API server."""
|
|
@@ -185,16 +191,19 @@ def run_server(
|
|
|
185
191
|
else:
|
|
186
192
|
local_url = upstream_url
|
|
187
193
|
to_patch = dict(
|
|
188
|
-
MIGRATIONS_PATH=
|
|
189
|
-
DATABASE_URI=
|
|
190
|
-
REDIS_URI=
|
|
191
|
-
N_JOBS_PER_WORKER=str(
|
|
194
|
+
MIGRATIONS_PATH=__migrations_path__,
|
|
195
|
+
DATABASE_URI=__database_uri__,
|
|
196
|
+
REDIS_URI=__redis_uri__,
|
|
197
|
+
N_JOBS_PER_WORKER=str(
|
|
198
|
+
n_jobs_per_worker if n_jobs_per_worker is not None else 1
|
|
199
|
+
),
|
|
192
200
|
LANGGRAPH_STORE=json.dumps(store) if store else None,
|
|
193
201
|
LANGSERVE_GRAPHS=json.dumps(graphs) if graphs else None,
|
|
194
202
|
LANGSMITH_LANGGRAPH_API_VARIANT="local_dev",
|
|
195
203
|
LANGGRAPH_AUTH=json.dumps(auth) if auth else None,
|
|
196
204
|
LANGGRAPH_HTTP=json.dumps(http) if http else None,
|
|
197
205
|
LANGGRAPH_UI=json.dumps(ui) if ui else None,
|
|
206
|
+
LANGGRAPH_WEBHOOKS=json.dumps(webhooks) if webhooks else None,
|
|
198
207
|
LANGGRAPH_UI_CONFIG=json.dumps(ui_config) if ui_config else None,
|
|
199
208
|
LANGGRAPH_UI_BUNDLER="true",
|
|
200
209
|
LANGGRAPH_API_URL=local_url,
|
|
@@ -245,7 +254,7 @@ def run_server(
|
|
|
245
254
|
full_studio_url = f"{studio_origin}/studio/?baseUrl={local_url}&organizationId={org_id}"
|
|
246
255
|
except TimeoutError as e:
|
|
247
256
|
thread_logger.debug(
|
|
248
|
-
f"Failed to get organization ID: {
|
|
257
|
+
f"Failed to get organization ID: {e!s}"
|
|
249
258
|
)
|
|
250
259
|
pass
|
|
251
260
|
thread_logger.info(
|
|
@@ -295,40 +304,53 @@ For production use, please use LangSmith Deployment.
|
|
|
295
304
|
if k in inspect.signature(uvicorn.run).parameters
|
|
296
305
|
}
|
|
297
306
|
server_level = server_level.upper()
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
"
|
|
313
|
-
"
|
|
314
|
-
|
|
307
|
+
if __entrypoint__ == "server":
|
|
308
|
+
uvicorn.run(
|
|
309
|
+
"langgraph_api.server:app",
|
|
310
|
+
host=host,
|
|
311
|
+
port=port,
|
|
312
|
+
reload=reload,
|
|
313
|
+
env_file=env_file,
|
|
314
|
+
access_log=False,
|
|
315
|
+
reload_includes=list(reload_includes) if reload_includes else None,
|
|
316
|
+
reload_excludes=list(reload_excludes) if reload_excludes else None,
|
|
317
|
+
log_config={
|
|
318
|
+
"version": 1,
|
|
319
|
+
"incremental": False,
|
|
320
|
+
"disable_existing_loggers": False,
|
|
321
|
+
"formatters": {
|
|
322
|
+
"simple": {
|
|
323
|
+
"class": "langgraph_api.logging.Formatter",
|
|
324
|
+
}
|
|
325
|
+
},
|
|
326
|
+
"handlers": {
|
|
327
|
+
"console": {
|
|
328
|
+
"class": "logging.StreamHandler",
|
|
329
|
+
"formatter": "simple",
|
|
330
|
+
"stream": "ext://sys.stdout",
|
|
331
|
+
}
|
|
332
|
+
},
|
|
333
|
+
"loggers": {
|
|
334
|
+
"uvicorn": {"level": server_level},
|
|
335
|
+
"uvicorn.error": {"level": server_level},
|
|
336
|
+
"langgraph_api.server": {"level": server_level},
|
|
337
|
+
},
|
|
338
|
+
"root": {"handlers": ["console"]},
|
|
315
339
|
},
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
}
|
|
330
|
-
**supported_kwargs,
|
|
331
|
-
)
|
|
340
|
+
**supported_kwargs,
|
|
341
|
+
)
|
|
342
|
+
elif __entrypoint__ == "python-executor":
|
|
343
|
+
from langgraph_api.executor_entrypoint import (
|
|
344
|
+
main as executor_entrypoint_main,
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
asyncio.run(
|
|
348
|
+
executor_entrypoint_main(
|
|
349
|
+
grpc_port=8188,
|
|
350
|
+
)
|
|
351
|
+
)
|
|
352
|
+
else:
|
|
353
|
+
raise ValueError(f"Unknown entrypoint: {__entrypoint__}")
|
|
332
354
|
|
|
333
355
|
|
|
334
356
|
def main():
|
|
@@ -353,7 +375,7 @@ def main():
|
|
|
353
375
|
help="Number of jobs per worker. Default is None (meaning 10)",
|
|
354
376
|
)
|
|
355
377
|
parser.add_argument(
|
|
356
|
-
"--
|
|
378
|
+
"--open-browser", action="store_true", help="Open browser automatically"
|
|
357
379
|
)
|
|
358
380
|
parser.add_argument(
|
|
359
381
|
"--debug-port", type=int, help="Port for debugger to listen on (default: none)"
|
|
@@ -368,7 +390,19 @@ def main():
|
|
|
368
390
|
action="store_true",
|
|
369
391
|
help="Expose the server via Cloudflare Tunnel",
|
|
370
392
|
)
|
|
371
|
-
|
|
393
|
+
parser.add_argument(
|
|
394
|
+
"--runtime-edition",
|
|
395
|
+
type=str,
|
|
396
|
+
default="inmem",
|
|
397
|
+
help="Runtime edition to use",
|
|
398
|
+
)
|
|
399
|
+
parser.add_argument(
|
|
400
|
+
"--entrypoint",
|
|
401
|
+
type=str,
|
|
402
|
+
default="server",
|
|
403
|
+
choices=["server", "python-executor"],
|
|
404
|
+
help="Entry point to use",
|
|
405
|
+
)
|
|
372
406
|
args = parser.parse_args()
|
|
373
407
|
|
|
374
408
|
with open(args.config, encoding="utf-8") as f:
|
|
@@ -377,21 +411,32 @@ def main():
|
|
|
377
411
|
graphs = config_data.get("graphs", {})
|
|
378
412
|
auth = config_data.get("auth")
|
|
379
413
|
ui = config_data.get("ui")
|
|
414
|
+
webhooks = config_data.get("webhooks")
|
|
380
415
|
ui_config = config_data.get("ui_config")
|
|
416
|
+
kwargs = {}
|
|
417
|
+
if args.runtime_edition == "postgres":
|
|
418
|
+
kwargs["__redis_uri__"] = os.getenv("REDIS_URI")
|
|
419
|
+
kwargs["__database_uri__"] = os.getenv("DATABASE_URI")
|
|
420
|
+
kwargs["__migrations_path__"] = os.getenv("MIGRATIONS_PATH")
|
|
421
|
+
if args.entrypoint == "python-executor":
|
|
422
|
+
kwargs["__entrypoint__"] = "python-executor"
|
|
381
423
|
run_server(
|
|
382
424
|
args.host,
|
|
383
425
|
args.port,
|
|
384
426
|
not args.no_reload,
|
|
385
427
|
graphs,
|
|
386
428
|
n_jobs_per_worker=args.n_jobs_per_worker,
|
|
387
|
-
open_browser=
|
|
429
|
+
open_browser=args.open_browser,
|
|
388
430
|
tunnel=args.tunnel,
|
|
389
431
|
debug_port=args.debug_port,
|
|
390
432
|
wait_for_client=args.wait_for_client,
|
|
391
433
|
env=config_data.get("env", None),
|
|
392
434
|
auth=auth,
|
|
393
435
|
ui=ui,
|
|
436
|
+
webhooks=webhooks,
|
|
394
437
|
ui_config=ui_config,
|
|
438
|
+
runtime_edition=args.runtime_edition,
|
|
439
|
+
**kwargs,
|
|
395
440
|
)
|
|
396
441
|
|
|
397
442
|
|
langgraph_api/command.py
CHANGED
|
@@ -13,9 +13,9 @@ def map_cmd(cmd: RunCommand) -> Command:
|
|
|
13
13
|
update = cmd.get("update")
|
|
14
14
|
if isinstance(update, tuple | list) and all(
|
|
15
15
|
isinstance(t, tuple | list) and len(t) == 2 and isinstance(t[0], str)
|
|
16
|
-
for t in cast(list, update)
|
|
16
|
+
for t in cast("list", update)
|
|
17
17
|
):
|
|
18
|
-
update = [tuple(t) for t in cast(list, update)]
|
|
18
|
+
update = [tuple(t) for t in cast("list", update)]
|
|
19
19
|
|
|
20
20
|
return Command(
|
|
21
21
|
update=update,
|