langgraph-api 0.2.100__py3-none-any.whl → 0.2.108__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- langgraph_api/__init__.py +1 -1
- langgraph_api/api/assistants.py +24 -27
- langgraph_api/api/runs.py +28 -0
- langgraph_api/cli.py +7 -0
- langgraph_api/feature_flags.py +2 -1
- langgraph_api/graph.py +13 -7
- langgraph_api/js/base.py +30 -0
- langgraph_api/js/client.mts +34 -2
- langgraph_api/js/remote.py +43 -18
- langgraph_api/metadata.py +9 -3
- langgraph_api/middleware/http_logger.py +11 -2
- langgraph_api/models/run.py +8 -1
- langgraph_api/schema.py +34 -14
- langgraph_api/state.py +47 -18
- langgraph_api/stream.py +4 -0
- {langgraph_api-0.2.100.dist-info → langgraph_api-0.2.108.dist-info}/METADATA +3 -3
- {langgraph_api-0.2.100.dist-info → langgraph_api-0.2.108.dist-info}/RECORD +21 -21
- openapi.json +80 -6
- {langgraph_api-0.2.100.dist-info → langgraph_api-0.2.108.dist-info}/WHEEL +0 -0
- {langgraph_api-0.2.100.dist-info → langgraph_api-0.2.108.dist-info}/entry_points.txt +0 -0
- {langgraph_api-0.2.100.dist-info → langgraph_api-0.2.108.dist-info}/licenses/LICENSE +0 -0
langgraph_api/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.2.
|
|
1
|
+
__version__ = "0.2.108"
|
langgraph_api/api/assistants.py
CHANGED
|
@@ -12,6 +12,7 @@ from starlette.responses import Response
|
|
|
12
12
|
from starlette.routing import BaseRoute
|
|
13
13
|
|
|
14
14
|
from langgraph_api import store as api_store
|
|
15
|
+
from langgraph_api.feature_flags import USE_RUNTIME_CONTEXT_API
|
|
15
16
|
from langgraph_api.graph import get_assistant_id, get_graph
|
|
16
17
|
from langgraph_api.js.base import BaseRemotePregel
|
|
17
18
|
from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
|
|
@@ -55,6 +56,9 @@ def _get_configurable_jsonschema(graph: Pregel) -> dict:
|
|
|
55
56
|
|
|
56
57
|
Returns:
|
|
57
58
|
The JSON schema for the configurable part of the graph.
|
|
59
|
+
|
|
60
|
+
Whenever we no longer support langgraph < 0.6, we can remove this method
|
|
61
|
+
in favor of graph.get_context_jsonschema().
|
|
58
62
|
"""
|
|
59
63
|
# Otherwise, use the config_schema method.
|
|
60
64
|
config_schema = graph.config_schema()
|
|
@@ -112,6 +116,7 @@ def _graph_schemas(graph: Pregel) -> dict:
|
|
|
112
116
|
f"Failed to get state schema for graph {graph.name} with error: `{str(e)}`"
|
|
113
117
|
)
|
|
114
118
|
state_schema = None
|
|
119
|
+
|
|
115
120
|
try:
|
|
116
121
|
config_schema = _get_configurable_jsonschema(graph)
|
|
117
122
|
except Exception as e:
|
|
@@ -119,18 +124,31 @@ def _graph_schemas(graph: Pregel) -> dict:
|
|
|
119
124
|
f"Failed to get config schema for graph {graph.name} with error: `{str(e)}`"
|
|
120
125
|
)
|
|
121
126
|
config_schema = None
|
|
127
|
+
|
|
128
|
+
if USE_RUNTIME_CONTEXT_API:
|
|
129
|
+
try:
|
|
130
|
+
context_schema = graph.get_context_jsonschema()
|
|
131
|
+
except Exception as e:
|
|
132
|
+
logger.warning(
|
|
133
|
+
f"Failed to get context schema for graph {graph.name} with error: `{str(e)}`"
|
|
134
|
+
)
|
|
135
|
+
context_schema = graph.config_schema()
|
|
136
|
+
else:
|
|
137
|
+
context_schema = None
|
|
138
|
+
|
|
122
139
|
return {
|
|
123
140
|
"input_schema": input_schema,
|
|
124
141
|
"output_schema": output_schema,
|
|
125
142
|
"state_schema": state_schema,
|
|
126
143
|
"config_schema": config_schema,
|
|
144
|
+
"context_schema": context_schema,
|
|
127
145
|
}
|
|
128
146
|
|
|
129
147
|
|
|
130
148
|
@retry_db
|
|
131
149
|
async def create_assistant(request: ApiRequest) -> ApiResponse:
|
|
132
|
-
payload = await request.json(AssistantCreate)
|
|
133
150
|
"""Create an assistant."""
|
|
151
|
+
payload = await request.json(AssistantCreate)
|
|
134
152
|
if assistant_id := payload.get("assistant_id"):
|
|
135
153
|
validate_uuid(assistant_id, "Invalid assistant ID: must be a UUID")
|
|
136
154
|
async with connect() as conn:
|
|
@@ -138,6 +156,7 @@ async def create_assistant(request: ApiRequest) -> ApiResponse:
|
|
|
138
156
|
conn,
|
|
139
157
|
assistant_id or str(uuid4()),
|
|
140
158
|
config=payload.get("config") or {},
|
|
159
|
+
context=payload.get("context") or {},
|
|
141
160
|
graph_id=payload["graph_id"],
|
|
142
161
|
metadata=payload.get("metadata") or {},
|
|
143
162
|
if_exists=payload.get("if_exists") or "raise",
|
|
@@ -309,39 +328,16 @@ async def get_assistant_schemas(
|
|
|
309
328
|
"output_schema": schemas.get("output"),
|
|
310
329
|
"state_schema": schemas.get("state"),
|
|
311
330
|
"config_schema": schemas.get("config"),
|
|
331
|
+
"context_schema": schemas.get("context"),
|
|
312
332
|
}
|
|
313
333
|
)
|
|
314
334
|
|
|
315
|
-
|
|
316
|
-
input_schema = graph.get_input_jsonschema()
|
|
317
|
-
except Exception as e:
|
|
318
|
-
logger.warning(
|
|
319
|
-
f"Failed to get input schema for graph {graph.name} with error: `{str(e)}`"
|
|
320
|
-
)
|
|
321
|
-
input_schema = None
|
|
322
|
-
try:
|
|
323
|
-
output_schema = graph.get_output_jsonschema()
|
|
324
|
-
except Exception as e:
|
|
325
|
-
logger.warning(
|
|
326
|
-
f"Failed to get output schema for graph {graph.name} with error: `{str(e)}`"
|
|
327
|
-
)
|
|
328
|
-
output_schema = None
|
|
335
|
+
schemas = _graph_schemas(graph)
|
|
329
336
|
|
|
330
|
-
state_schema = _state_jsonschema(graph)
|
|
331
|
-
try:
|
|
332
|
-
config_schema = _get_configurable_jsonschema(graph)
|
|
333
|
-
except Exception as e:
|
|
334
|
-
config_schema = None
|
|
335
|
-
logger.warning(
|
|
336
|
-
f"Failed to get config schema for graph {graph.name} with error: `{str(e)}`"
|
|
337
|
-
)
|
|
338
337
|
return ApiResponse(
|
|
339
338
|
{
|
|
340
339
|
"graph_id": assistant["graph_id"],
|
|
341
|
-
|
|
342
|
-
"output_schema": output_schema,
|
|
343
|
-
"state_schema": state_schema,
|
|
344
|
-
"config_schema": config_schema,
|
|
340
|
+
**schemas,
|
|
345
341
|
}
|
|
346
342
|
)
|
|
347
343
|
|
|
@@ -359,6 +355,7 @@ async def patch_assistant(
|
|
|
359
355
|
conn,
|
|
360
356
|
assistant_id,
|
|
361
357
|
config=payload.get("config"),
|
|
358
|
+
context=payload.get("context"),
|
|
362
359
|
graph_id=payload.get("graph_id"),
|
|
363
360
|
metadata=payload.get("metadata"),
|
|
364
361
|
name=payload.get("name"),
|
langgraph_api/api/runs.py
CHANGED
|
@@ -172,6 +172,17 @@ async def wait_run(request: ApiRequest):
|
|
|
172
172
|
"""Create a run, wait for the output."""
|
|
173
173
|
thread_id = request.path_params["thread_id"]
|
|
174
174
|
payload = await request.json(RunCreateStateful)
|
|
175
|
+
|
|
176
|
+
# Ensure stream_mode always includes "values" and "updates" while respecting other modes
|
|
177
|
+
user_stream_mode = payload.get("stream_mode", ["values"])
|
|
178
|
+
if isinstance(user_stream_mode, str):
|
|
179
|
+
user_stream_mode = [user_stream_mode]
|
|
180
|
+
|
|
181
|
+
# Always include "values" and "updates" if not already present
|
|
182
|
+
required_modes = {"values", "updates"}
|
|
183
|
+
final_stream_mode = list(set(user_stream_mode) | required_modes)
|
|
184
|
+
payload["stream_mode"] = final_stream_mode
|
|
185
|
+
|
|
175
186
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
176
187
|
run_id = uuid6()
|
|
177
188
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
@@ -207,6 +218,9 @@ async def wait_run(request: ApiRequest):
|
|
|
207
218
|
async for mode, chunk, _ in stream:
|
|
208
219
|
if mode == b"values":
|
|
209
220
|
vchunk = chunk
|
|
221
|
+
elif mode == b"updates" and b"__interrupt__" in chunk:
|
|
222
|
+
# Include the interrupt message in the values
|
|
223
|
+
vchunk = chunk
|
|
210
224
|
elif mode == b"error":
|
|
211
225
|
vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
|
|
212
226
|
if vchunk is not None:
|
|
@@ -252,6 +266,17 @@ async def wait_run(request: ApiRequest):
|
|
|
252
266
|
async def wait_run_stateless(request: ApiRequest):
|
|
253
267
|
"""Create a stateless run, wait for the output."""
|
|
254
268
|
payload = await request.json(RunCreateStateless)
|
|
269
|
+
|
|
270
|
+
# Ensure stream_mode always includes "values" and "updates" while respecting other modes
|
|
271
|
+
user_stream_mode = payload.get("stream_mode", ["values"])
|
|
272
|
+
if isinstance(user_stream_mode, str):
|
|
273
|
+
user_stream_mode = [user_stream_mode]
|
|
274
|
+
|
|
275
|
+
# Always include "values" and "updates" if not already present
|
|
276
|
+
required_modes = {"values", "updates"}
|
|
277
|
+
final_stream_mode = list(set(user_stream_mode) | required_modes)
|
|
278
|
+
payload["stream_mode"] = final_stream_mode
|
|
279
|
+
|
|
255
280
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
256
281
|
run_id = uuid6()
|
|
257
282
|
sub = asyncio.create_task(Runs.Stream.subscribe(run_id))
|
|
@@ -288,6 +313,9 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
288
313
|
async for mode, chunk, _ in stream:
|
|
289
314
|
if mode == b"values":
|
|
290
315
|
vchunk = chunk
|
|
316
|
+
elif mode == b"updates" and b"__interrupt__" in chunk:
|
|
317
|
+
# Include the interrupt message in the values
|
|
318
|
+
vchunk = chunk
|
|
291
319
|
elif mode == b"error":
|
|
292
320
|
vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
|
|
293
321
|
if vchunk is not None:
|
langgraph_api/cli.py
CHANGED
|
@@ -83,6 +83,12 @@ class SecurityConfig(TypedDict, total=False):
|
|
|
83
83
|
paths: dict[str, dict[str, list]]
|
|
84
84
|
|
|
85
85
|
|
|
86
|
+
class CacheConfig(TypedDict, total=False):
|
|
87
|
+
cache_keys: list[str]
|
|
88
|
+
ttl_seconds: int
|
|
89
|
+
max_size: int
|
|
90
|
+
|
|
91
|
+
|
|
86
92
|
class AuthConfig(TypedDict, total=False):
|
|
87
93
|
path: str
|
|
88
94
|
"""Path to the authentication function in a Python file."""
|
|
@@ -112,6 +118,7 @@ class AuthConfig(TypedDict, total=False):
|
|
|
112
118
|
]
|
|
113
119
|
}
|
|
114
120
|
"""
|
|
121
|
+
cache: CacheConfig | None
|
|
115
122
|
|
|
116
123
|
|
|
117
124
|
def _check_newer_version(pkg: str, timeout: float = 0.2) -> None:
|
langgraph_api/feature_flags.py
CHANGED
|
@@ -4,4 +4,5 @@ from langgraph.version import __version__
|
|
|
4
4
|
LANGGRAPH_PY_MINOR = tuple(map(int, __version__.split(".")[:2]))
|
|
5
5
|
|
|
6
6
|
OMIT_PENDING_SENDS = LANGGRAPH_PY_MINOR >= (0, 5)
|
|
7
|
-
|
|
7
|
+
USE_RUNTIME_CONTEXT_API = LANGGRAPH_PY_MINOR >= (0, 6)
|
|
8
|
+
USE_NEW_INTERRUPTS = LANGGRAPH_PY_MINOR >= (0, 6)
|
langgraph_api/graph.py
CHANGED
|
@@ -23,7 +23,7 @@ from starlette.exceptions import HTTPException
|
|
|
23
23
|
|
|
24
24
|
from langgraph_api import asyncio as lg_asyncio
|
|
25
25
|
from langgraph_api import config
|
|
26
|
-
from langgraph_api.feature_flags import
|
|
26
|
+
from langgraph_api.feature_flags import USE_RUNTIME_CONTEXT_API
|
|
27
27
|
from langgraph_api.js.base import BaseRemotePregel, is_js_path
|
|
28
28
|
from langgraph_api.schema import Config
|
|
29
29
|
from langgraph_api.utils.config import run_in_executor, var_child_runnable_config
|
|
@@ -78,6 +78,7 @@ async def register_graph(
|
|
|
78
78
|
graph_id=graph_id,
|
|
79
79
|
metadata={"created_by": "system"},
|
|
80
80
|
config=config or {},
|
|
81
|
+
context={},
|
|
81
82
|
if_exists="do_nothing",
|
|
82
83
|
name=assistant_name,
|
|
83
84
|
description=description,
|
|
@@ -131,16 +132,19 @@ async def get_graph(
|
|
|
131
132
|
config = lg_config.ensure_config(config)
|
|
132
133
|
|
|
133
134
|
if store is not None:
|
|
134
|
-
if
|
|
135
|
+
if USE_RUNTIME_CONTEXT_API:
|
|
135
136
|
from langgraph._internal._constants import CONFIG_KEY_RUNTIME
|
|
136
137
|
from langgraph.runtime import Runtime
|
|
137
138
|
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
139
|
+
runtime = config["configurable"].get(CONFIG_KEY_RUNTIME)
|
|
140
|
+
if runtime is None:
|
|
141
|
+
patched_runtime = Runtime(store=store)
|
|
142
|
+
elif runtime.store is None:
|
|
142
143
|
patched_runtime = cast(Runtime, runtime).override(store=store)
|
|
143
|
-
|
|
144
|
+
else:
|
|
145
|
+
patched_runtime = runtime
|
|
146
|
+
|
|
147
|
+
config["configurable"][CONFIG_KEY_RUNTIME] = patched_runtime
|
|
144
148
|
else:
|
|
145
149
|
from langgraph.constants import CONFIG_KEY_STORE
|
|
146
150
|
|
|
@@ -412,6 +416,8 @@ def _handle_exception(task: asyncio.Task) -> None:
|
|
|
412
416
|
task.result()
|
|
413
417
|
except asyncio.CancelledError:
|
|
414
418
|
pass
|
|
419
|
+
except Exception as e:
|
|
420
|
+
logger.exception("Task failed", exc_info=e)
|
|
415
421
|
finally:
|
|
416
422
|
# if the task died either with exception or not, we should exit
|
|
417
423
|
sys.exit(1)
|
langgraph_api/js/base.py
CHANGED
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
import os
|
|
2
|
+
from collections.abc import Sequence
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Any, Literal
|
|
2
5
|
|
|
3
6
|
from langchain_core.runnables import Runnable
|
|
4
7
|
|
|
5
8
|
from langgraph_api.schema import Config
|
|
6
9
|
|
|
10
|
+
_DC_KWARGS = {"kw_only": True, "slots": True, "frozen": True}
|
|
11
|
+
|
|
7
12
|
JS_EXTENSIONS = (
|
|
8
13
|
".ts",
|
|
9
14
|
".mts",
|
|
@@ -20,6 +25,31 @@ def is_js_path(path: str | None) -> bool:
|
|
|
20
25
|
return os.path.splitext(path)[1] in JS_EXTENSIONS
|
|
21
26
|
|
|
22
27
|
|
|
28
|
+
@dataclass(**_DC_KWARGS)
|
|
29
|
+
class RemoteInterrupt:
|
|
30
|
+
raw: dict
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def id(self) -> str:
|
|
34
|
+
return self.raw["id"]
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def value(self) -> Any:
|
|
38
|
+
return self.raw["value"]
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def ns(self) -> Sequence[str] | None:
|
|
42
|
+
return self.raw.get("ns")
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def resumable(self) -> bool:
|
|
46
|
+
return self.raw.get("resumable", True)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def when(self) -> Literal["during"]:
|
|
50
|
+
return self.raw.get("when", "during")
|
|
51
|
+
|
|
52
|
+
|
|
23
53
|
class BaseRemotePregel(Runnable):
|
|
24
54
|
name: str = "LangGraph"
|
|
25
55
|
|
langgraph_api/js/client.mts
CHANGED
|
@@ -9,6 +9,7 @@ import { HTTPException } from "hono/http-exception";
|
|
|
9
9
|
import { fetch } from "undici";
|
|
10
10
|
import pRetry from "p-retry";
|
|
11
11
|
import {
|
|
12
|
+
getConfig,
|
|
12
13
|
BaseStore,
|
|
13
14
|
Item,
|
|
14
15
|
Operation,
|
|
@@ -25,7 +26,6 @@ import {
|
|
|
25
26
|
type ChannelVersions,
|
|
26
27
|
type ChannelProtocol,
|
|
27
28
|
} from "@langchain/langgraph-checkpoint";
|
|
28
|
-
import { createHash } from "node:crypto";
|
|
29
29
|
import * as fs from "node:fs/promises";
|
|
30
30
|
import * as path from "node:path";
|
|
31
31
|
import { serialiseAsDict, serializeError } from "./src/utils/serde.mjs";
|
|
@@ -57,10 +57,22 @@ import {
|
|
|
57
57
|
} from "@langchain/langgraph-api/schema";
|
|
58
58
|
import { filterValidExportPath } from "./src/utils/files.mts";
|
|
59
59
|
import { patchFetch } from "./traceblock.mts";
|
|
60
|
+
import { writeHeapSnapshot } from "node:v8";
|
|
61
|
+
|
|
62
|
+
const injectConfigFormatter = format((info) => {
|
|
63
|
+
const config = getConfig();
|
|
64
|
+
if (config == null) return info;
|
|
65
|
+
|
|
66
|
+
const node = config.metadata?.["langgraph_node"];
|
|
67
|
+
if (node != null) info.langgraph_node = node;
|
|
68
|
+
|
|
69
|
+
return info;
|
|
70
|
+
});
|
|
60
71
|
|
|
61
72
|
const logger = createLogger({
|
|
62
73
|
level: "debug",
|
|
63
74
|
format: format.combine(
|
|
75
|
+
injectConfigFormatter(),
|
|
64
76
|
format.errors({ stack: true }),
|
|
65
77
|
format.timestamp(),
|
|
66
78
|
format.json(),
|
|
@@ -90,6 +102,12 @@ const logger = createLogger({
|
|
|
90
102
|
],
|
|
91
103
|
});
|
|
92
104
|
|
|
105
|
+
const GLOBAL_LOGGER = Symbol.for("langgraph.api.sdk-logger");
|
|
106
|
+
type GLOBAL_LOGGER = typeof GLOBAL_LOGGER;
|
|
107
|
+
|
|
108
|
+
const maybeGlobal = globalThis as unknown as { [GLOBAL_LOGGER]: typeof logger };
|
|
109
|
+
maybeGlobal[GLOBAL_LOGGER] = logger;
|
|
110
|
+
|
|
93
111
|
let GRAPH_SCHEMA: Record<string, Record<string, GraphSchema> | false> = {};
|
|
94
112
|
let GRAPH_OPTIONS: {
|
|
95
113
|
checkpointer?: BaseCheckpointSaver<string | number>;
|
|
@@ -1041,7 +1059,6 @@ async function main() {
|
|
|
1041
1059
|
const headers = new Headers(rawHeaders);
|
|
1042
1060
|
headers.delete("x-langgraph-auth-url");
|
|
1043
1061
|
headers.delete("x-langgraph-auth-method");
|
|
1044
|
-
|
|
1045
1062
|
const context = await authenticate(
|
|
1046
1063
|
new Request(authUrl, { headers, method }),
|
|
1047
1064
|
);
|
|
@@ -1077,6 +1094,21 @@ async function main() {
|
|
|
1077
1094
|
}
|
|
1078
1095
|
|
|
1079
1096
|
app.get("/ok", (c) => c.json({ ok: true }));
|
|
1097
|
+
|
|
1098
|
+
app.get("/debug/heapdump", async (c) => {
|
|
1099
|
+
try {
|
|
1100
|
+
const target =
|
|
1101
|
+
`/tmp/heapdump-${Date.now()}.heapsnapshot`;
|
|
1102
|
+
await fs.mkdir(path.dirname(target), { recursive: true });
|
|
1103
|
+
const written = writeHeapSnapshot(target);
|
|
1104
|
+
return c.json({ ok: true, written }); // 200
|
|
1105
|
+
} catch (error) {
|
|
1106
|
+
if (error instanceof HTTPException) {
|
|
1107
|
+
return c.json(serializeError(error), error.status);
|
|
1108
|
+
}
|
|
1109
|
+
return c.json(serializeError(error), 500);
|
|
1110
|
+
}
|
|
1111
|
+
});
|
|
1080
1112
|
|
|
1081
1113
|
app.onError((err, c) => {
|
|
1082
1114
|
logger.error(err);
|
langgraph_api/js/remote.py
CHANGED
|
@@ -25,7 +25,7 @@ from langchain_core.runnables.schema import (
|
|
|
25
25
|
)
|
|
26
26
|
from langgraph.checkpoint.serde.base import SerializerProtocol
|
|
27
27
|
from langgraph.store.base import GetOp, Item, ListNamespacesOp, PutOp, SearchOp
|
|
28
|
-
from langgraph.types import Command,
|
|
28
|
+
from langgraph.types import Command, PregelTask, Send, StateSnapshot
|
|
29
29
|
from langgraph_sdk import Auth
|
|
30
30
|
from pydantic import BaseModel
|
|
31
31
|
from starlette import types
|
|
@@ -42,8 +42,8 @@ from starlette.routing import Route
|
|
|
42
42
|
|
|
43
43
|
from langgraph_api import store as api_store
|
|
44
44
|
from langgraph_api.auth.custom import DotDict, ProxyUser
|
|
45
|
-
from langgraph_api.config import LANGGRAPH_AUTH_TYPE
|
|
46
|
-
from langgraph_api.js.base import BaseRemotePregel
|
|
45
|
+
from langgraph_api.config import LANGGRAPH_AUTH, LANGGRAPH_AUTH_TYPE
|
|
46
|
+
from langgraph_api.js.base import BaseRemotePregel, RemoteInterrupt
|
|
47
47
|
from langgraph_api.js.errors import RemoteException
|
|
48
48
|
from langgraph_api.js.sse import SSEDecoder, aiter_lines_raw
|
|
49
49
|
from langgraph_api.route import ApiResponse
|
|
@@ -226,6 +226,10 @@ class RemotePregel(BaseRemotePregel):
|
|
|
226
226
|
if state and isinstance(state, dict) and "config" in state:
|
|
227
227
|
state = self._convert_state_snapshot(state)
|
|
228
228
|
|
|
229
|
+
interrupts: list[RemoteInterrupt] = []
|
|
230
|
+
if task_interrupts := task.get("interrupts"):
|
|
231
|
+
interrupts = [RemoteInterrupt(raw=i) for i in task_interrupts]
|
|
232
|
+
|
|
229
233
|
result.append(
|
|
230
234
|
PregelTask(
|
|
231
235
|
task["id"],
|
|
@@ -233,19 +237,7 @@ class RemotePregel(BaseRemotePregel):
|
|
|
233
237
|
tuple(task["path"]) if task.get("path") else tuple(),
|
|
234
238
|
# TODO: figure out how to properly deserialise errors
|
|
235
239
|
task.get("error"),
|
|
236
|
-
(
|
|
237
|
-
tuple(
|
|
238
|
-
Interrupt(
|
|
239
|
-
value=interrupt["value"],
|
|
240
|
-
when=interrupt["when"],
|
|
241
|
-
resumable=interrupt.get("resumable", True),
|
|
242
|
-
ns=interrupt.get("ns"),
|
|
243
|
-
)
|
|
244
|
-
for interrupt in task.get("interrupts")
|
|
245
|
-
)
|
|
246
|
-
if task.get("interrupts")
|
|
247
|
-
else []
|
|
248
|
-
),
|
|
240
|
+
tuple(interrupts),
|
|
249
241
|
state,
|
|
250
242
|
)
|
|
251
243
|
)
|
|
@@ -339,6 +331,9 @@ class RemotePregel(BaseRemotePregel):
|
|
|
339
331
|
def config_schema(self) -> type[BaseModel]:
|
|
340
332
|
raise NotImplementedError()
|
|
341
333
|
|
|
334
|
+
def get_context_jsonschema(self) -> dict:
|
|
335
|
+
raise NotImplementedError()
|
|
336
|
+
|
|
342
337
|
async def invoke(self, input: Any, config: RunnableConfig | None = None):
|
|
343
338
|
raise NotImplementedError()
|
|
344
339
|
|
|
@@ -374,7 +369,7 @@ async def run_js_process(paths_str: str, watch: bool = False):
|
|
|
374
369
|
client_file,
|
|
375
370
|
"--skip-schema-cache",
|
|
376
371
|
)
|
|
377
|
-
if
|
|
372
|
+
if False
|
|
378
373
|
else ("tsx", "--import", client_preload_file, client_file)
|
|
379
374
|
)
|
|
380
375
|
try:
|
|
@@ -388,6 +383,7 @@ async def run_js_process(paths_str: str, watch: bool = False):
|
|
|
388
383
|
**os.environ,
|
|
389
384
|
},
|
|
390
385
|
)
|
|
386
|
+
logger.info("Started JS graphs process [%d]", process.pid)
|
|
391
387
|
code = await process.wait()
|
|
392
388
|
raise Exception(f"JS process exited with code {code}")
|
|
393
389
|
except asyncio.CancelledError:
|
|
@@ -448,6 +444,7 @@ async def run_js_http_process(paths_str: str, http_config: dict, watch: bool = F
|
|
|
448
444
|
raise
|
|
449
445
|
except Exception:
|
|
450
446
|
if attempt >= 3:
|
|
447
|
+
logger.exception("JS HTTP process failed")
|
|
451
448
|
raise
|
|
452
449
|
else:
|
|
453
450
|
logger.warning(f"Retrying JS HTTP process {3 - attempt} more times...")
|
|
@@ -843,11 +840,26 @@ class CustomJsAuthBackend(AuthenticationBackend):
|
|
|
843
840
|
ls_auth: AuthenticationBackend | None
|
|
844
841
|
|
|
845
842
|
def __init__(self, disable_studio_auth: bool = False):
|
|
843
|
+
from langgraph_api.utils.cache import LRUCache
|
|
844
|
+
|
|
846
845
|
self.ls_auth = None
|
|
847
846
|
if not disable_studio_auth and LANGGRAPH_AUTH_TYPE == "langsmith":
|
|
848
847
|
from langgraph_api.auth.langsmith.backend import LangsmithAuthBackend
|
|
849
848
|
|
|
850
849
|
self.ls_auth = LangsmithAuthBackend()
|
|
850
|
+
self.ttl_cache: LRUCache | None = None
|
|
851
|
+
self.cache_keys: list[str] | None = None
|
|
852
|
+
if cache := LANGGRAPH_AUTH.get("cache"):
|
|
853
|
+
keys = cache.get("cache_keys", [])
|
|
854
|
+
if not isinstance(keys, list):
|
|
855
|
+
raise ValueError(
|
|
856
|
+
f"LANGGRAPH_AUTH.cache.cache_keys must be a list. Got: {keys}"
|
|
857
|
+
)
|
|
858
|
+
self.cache_keys = keys
|
|
859
|
+
self.ttl_cache = LRUCache(
|
|
860
|
+
max_size=cache.get("max_size", 1000),
|
|
861
|
+
ttl=cache.get("ttl_seconds", 60),
|
|
862
|
+
)
|
|
851
863
|
|
|
852
864
|
async def authenticate(
|
|
853
865
|
self, conn: HTTPConnection
|
|
@@ -863,6 +875,16 @@ class CustomJsAuthBackend(AuthenticationBackend):
|
|
|
863
875
|
headers.pop("content-length", None)
|
|
864
876
|
headers["x-langgraph-auth-url"] = str(conn.url)
|
|
865
877
|
headers["x-langgraph-auth-method"] = conn.scope.get("method")
|
|
878
|
+
cache_key = None
|
|
879
|
+
if self.cache_keys:
|
|
880
|
+
cache_key = tuple(
|
|
881
|
+
(k, headers.get(k)) for k in self.cache_keys if headers.get(k)
|
|
882
|
+
)
|
|
883
|
+
if cache_key:
|
|
884
|
+
if self.ttl_cache is not None:
|
|
885
|
+
cached = self.ttl_cache.get(cache_key)
|
|
886
|
+
if cached:
|
|
887
|
+
return cached
|
|
866
888
|
|
|
867
889
|
res = await _client.post("/auth/authenticate", headers=headers)
|
|
868
890
|
data = res.json()
|
|
@@ -873,8 +895,11 @@ class CustomJsAuthBackend(AuthenticationBackend):
|
|
|
873
895
|
message = data.get("message") or "Unauthorized"
|
|
874
896
|
|
|
875
897
|
raise HTTPException(status_code=status, detail=message, headers=headers)
|
|
898
|
+
result = AuthCredentials(data["scopes"]), ProxyUser(DotDict(data["user"]))
|
|
899
|
+
if cache_key:
|
|
900
|
+
self.ttl_cache.set(cache_key, result)
|
|
876
901
|
|
|
877
|
-
return
|
|
902
|
+
return result
|
|
878
903
|
|
|
879
904
|
|
|
880
905
|
async def handle_js_auth_event(
|
langgraph_api/metadata.py
CHANGED
|
@@ -61,9 +61,15 @@ BEACON_ENDPOINT = "https://api.smith.langchain.com/v1/metadata/submit"
|
|
|
61
61
|
# LangChain auth endpoint for API key submissions
|
|
62
62
|
LANGCHAIN_METADATA_ENDPOINT = None
|
|
63
63
|
if LANGSMITH_AUTH_ENDPOINT:
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
64
|
+
if "/api/v1" in LANGSMITH_AUTH_ENDPOINT:
|
|
65
|
+
# If the endpoint already has /api/v1 (for self-hosted control plane deployments), we assume it's the correct format
|
|
66
|
+
LANGCHAIN_METADATA_ENDPOINT = (
|
|
67
|
+
LANGSMITH_AUTH_ENDPOINT.rstrip("/") + "/metadata/submit"
|
|
68
|
+
)
|
|
69
|
+
else:
|
|
70
|
+
LANGCHAIN_METADATA_ENDPOINT = (
|
|
71
|
+
LANGSMITH_AUTH_ENDPOINT.rstrip("/") + "/v1/metadata/submit"
|
|
72
|
+
)
|
|
67
73
|
|
|
68
74
|
|
|
69
75
|
def incr_runs(*, incr: int = 1) -> None:
|
|
@@ -12,6 +12,14 @@ asgi = structlog.stdlib.get_logger("asgi")
|
|
|
12
12
|
PATHS_IGNORE = {"/ok", "/metrics"}
|
|
13
13
|
|
|
14
14
|
|
|
15
|
+
def _get_level(status: int | None) -> int:
|
|
16
|
+
if status is None or status < 400:
|
|
17
|
+
return logging.INFO
|
|
18
|
+
if status < 500:
|
|
19
|
+
return logging.WARNING
|
|
20
|
+
return logging.ERROR
|
|
21
|
+
|
|
22
|
+
|
|
15
23
|
class AccessLoggerMiddleware:
|
|
16
24
|
def __init__(
|
|
17
25
|
self,
|
|
@@ -46,6 +54,7 @@ class AccessLoggerMiddleware:
|
|
|
46
54
|
info["response"] = message
|
|
47
55
|
await send(message)
|
|
48
56
|
asgi.debug(f"ASGI send {message['type']}", **message)
|
|
57
|
+
|
|
49
58
|
else:
|
|
50
59
|
inner_receive = receive
|
|
51
60
|
|
|
@@ -74,8 +83,8 @@ class AccessLoggerMiddleware:
|
|
|
74
83
|
|
|
75
84
|
if method and route and status:
|
|
76
85
|
HTTP_METRICS_COLLECTOR.record_request(method, route, status, latency)
|
|
77
|
-
|
|
78
|
-
|
|
86
|
+
self.logger.log(
|
|
87
|
+
_get_level(status),
|
|
79
88
|
f"{method} {path} {status} {latency}ms",
|
|
80
89
|
method=method,
|
|
81
90
|
path=path,
|
langgraph_api/models/run.py
CHANGED
|
@@ -18,6 +18,7 @@ from langgraph_api.graph import GRAPHS, get_assistant_id
|
|
|
18
18
|
from langgraph_api.schema import (
|
|
19
19
|
All,
|
|
20
20
|
Config,
|
|
21
|
+
Context,
|
|
21
22
|
IfNotExists,
|
|
22
23
|
MetadataInput,
|
|
23
24
|
MultitaskStrategy,
|
|
@@ -52,6 +53,8 @@ class RunCreateDict(TypedDict):
|
|
|
52
53
|
"""Metadata for the run."""
|
|
53
54
|
config: Config | None
|
|
54
55
|
"""Additional configuration for the run."""
|
|
56
|
+
context: Context | None
|
|
57
|
+
"""Static context for the run."""
|
|
55
58
|
webhook: str | None
|
|
56
59
|
"""Webhook to call when the run is complete."""
|
|
57
60
|
|
|
@@ -283,11 +286,13 @@ async def create_valid_run(
|
|
|
283
286
|
detail="You must provide a thread_id when resuming.",
|
|
284
287
|
)
|
|
285
288
|
temporary = thread_id is None and payload.get("on_completion", "delete") == "delete"
|
|
289
|
+
stream_resumable = payload.get("stream_resumable", False)
|
|
286
290
|
stream_mode, multitask_strategy, prevent_insert_if_inflight = assign_defaults(
|
|
287
291
|
payload
|
|
288
292
|
)
|
|
289
293
|
# assign custom headers and checkpoint to config
|
|
290
294
|
config = payload.get("config") or {}
|
|
295
|
+
context = payload.get("context") or {}
|
|
291
296
|
configurable = config.setdefault("configurable", {})
|
|
292
297
|
if checkpoint_id:
|
|
293
298
|
configurable["checkpoint_id"] = str(checkpoint_id)
|
|
@@ -321,6 +326,7 @@ async def create_valid_run(
|
|
|
321
326
|
"input": payload.get("input"),
|
|
322
327
|
"command": payload.get("command"),
|
|
323
328
|
"config": config,
|
|
329
|
+
"context": context,
|
|
324
330
|
"stream_mode": stream_mode,
|
|
325
331
|
"interrupt_before": payload.get("interrupt_before"),
|
|
326
332
|
"interrupt_after": payload.get("interrupt_after"),
|
|
@@ -328,7 +334,7 @@ async def create_valid_run(
|
|
|
328
334
|
"feedback_keys": payload.get("feedback_keys"),
|
|
329
335
|
"temporary": temporary,
|
|
330
336
|
"subgraphs": payload.get("stream_subgraphs", False),
|
|
331
|
-
"resumable":
|
|
337
|
+
"resumable": stream_resumable,
|
|
332
338
|
"checkpoint_during": payload.get("checkpoint_during", True),
|
|
333
339
|
},
|
|
334
340
|
metadata=payload.get("metadata"),
|
|
@@ -367,6 +373,7 @@ async def create_valid_run(
|
|
|
367
373
|
temporary=temporary,
|
|
368
374
|
after_seconds=after_seconds,
|
|
369
375
|
if_not_exists=if_not_exists,
|
|
376
|
+
stream_resumable=stream_resumable,
|
|
370
377
|
run_create_ms=(
|
|
371
378
|
int(time.time() * 1_000) - request_start_time
|
|
372
379
|
if request_start_time
|
langgraph_api/schema.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from collections.abc import Sequence
|
|
2
2
|
from datetime import datetime
|
|
3
|
-
from typing import Any, Literal, Optional
|
|
3
|
+
from typing import Any, Literal, Optional, TypeAlias
|
|
4
4
|
from uuid import UUID
|
|
5
5
|
|
|
6
6
|
from typing_extensions import TypedDict
|
|
@@ -26,6 +26,8 @@ IfNotExists = Literal["create", "reject"]
|
|
|
26
26
|
|
|
27
27
|
All = Literal["*"]
|
|
28
28
|
|
|
29
|
+
Context: TypeAlias = dict[str, Any]
|
|
30
|
+
|
|
29
31
|
|
|
30
32
|
class Config(TypedDict, total=False):
|
|
31
33
|
tags: list[str]
|
|
@@ -55,17 +57,6 @@ class Checkpoint(TypedDict):
|
|
|
55
57
|
checkpoint_map: dict[str, Any] | None
|
|
56
58
|
|
|
57
59
|
|
|
58
|
-
class GraphSchema(TypedDict):
|
|
59
|
-
"""Graph model."""
|
|
60
|
-
|
|
61
|
-
graph_id: str
|
|
62
|
-
"""The ID of the graph."""
|
|
63
|
-
state_schema: dict
|
|
64
|
-
"""The schema for the graph state."""
|
|
65
|
-
config_schema: dict
|
|
66
|
-
"""The schema for the graph config."""
|
|
67
|
-
|
|
68
|
-
|
|
69
60
|
class Assistant(TypedDict):
|
|
70
61
|
"""Assistant model."""
|
|
71
62
|
|
|
@@ -79,6 +70,8 @@ class Assistant(TypedDict):
|
|
|
79
70
|
"""The description of the assistant."""
|
|
80
71
|
config: Config
|
|
81
72
|
"""The assistant config."""
|
|
73
|
+
context: Fragment
|
|
74
|
+
"""The static context of the assistant."""
|
|
82
75
|
created_at: datetime
|
|
83
76
|
"""The time the assistant was created."""
|
|
84
77
|
updated_at: datetime
|
|
@@ -89,6 +82,31 @@ class Assistant(TypedDict):
|
|
|
89
82
|
"""The assistant version."""
|
|
90
83
|
|
|
91
84
|
|
|
85
|
+
class Interrupt(TypedDict):
|
|
86
|
+
id: str | None
|
|
87
|
+
"""The ID of the interrupt."""
|
|
88
|
+
value: Any
|
|
89
|
+
"""The value of the interrupt."""
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class DeprecatedInterrupt(TypedDict, total=False):
|
|
93
|
+
"""We document this old interrupt format internally, but not in API spec.
|
|
94
|
+
|
|
95
|
+
Should be dropped with lg-api v1.0.0.
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
id: str | None
|
|
99
|
+
"""The ID of the interrupt."""
|
|
100
|
+
value: Any
|
|
101
|
+
"""The value of the interrupt."""
|
|
102
|
+
resumable: bool
|
|
103
|
+
"""Whether the interrupt is resumable."""
|
|
104
|
+
ns: Sequence[str] | None
|
|
105
|
+
"""The optional namespace of the interrupt."""
|
|
106
|
+
when: Literal["during"]
|
|
107
|
+
"""When the interrupt occurred, always "during"."""
|
|
108
|
+
|
|
109
|
+
|
|
92
110
|
class Thread(TypedDict):
|
|
93
111
|
thread_id: UUID
|
|
94
112
|
"""The ID of the thread."""
|
|
@@ -104,7 +122,7 @@ class Thread(TypedDict):
|
|
|
104
122
|
"""The status of the thread. One of 'idle', 'busy', 'interrupted', "error"."""
|
|
105
123
|
values: Fragment
|
|
106
124
|
"""The current state of the thread."""
|
|
107
|
-
interrupts:
|
|
125
|
+
interrupts: dict[str, list[Interrupt]]
|
|
108
126
|
"""The current interrupts of the thread, a map of task_id to list of interrupts."""
|
|
109
127
|
|
|
110
128
|
|
|
@@ -112,7 +130,7 @@ class ThreadTask(TypedDict):
|
|
|
112
130
|
id: str
|
|
113
131
|
name: str
|
|
114
132
|
error: str | None
|
|
115
|
-
interrupts: list[
|
|
133
|
+
interrupts: list[Interrupt]
|
|
116
134
|
checkpoint: Checkpoint | None
|
|
117
135
|
state: Optional["ThreadState"]
|
|
118
136
|
|
|
@@ -133,6 +151,8 @@ class ThreadState(TypedDict):
|
|
|
133
151
|
"""The parent checkpoint. If missing, this is the root checkpoint."""
|
|
134
152
|
tasks: Sequence[ThreadTask]
|
|
135
153
|
"""Tasks to execute in this step. If already attempted, may contain an error."""
|
|
154
|
+
interrupts: list[Interrupt]
|
|
155
|
+
"""The interrupts for this state."""
|
|
136
156
|
|
|
137
157
|
|
|
138
158
|
class Run(TypedDict):
|
langgraph_api/state.py
CHANGED
|
@@ -4,7 +4,10 @@ import typing
|
|
|
4
4
|
|
|
5
5
|
from langgraph.types import Interrupt, StateSnapshot
|
|
6
6
|
|
|
7
|
-
from langgraph_api.
|
|
7
|
+
from langgraph_api.feature_flags import USE_NEW_INTERRUPTS
|
|
8
|
+
from langgraph_api.js.base import RemoteInterrupt
|
|
9
|
+
from langgraph_api.schema import Checkpoint, DeprecatedInterrupt, ThreadState
|
|
10
|
+
from langgraph_api.schema import Interrupt as InterruptSchema
|
|
8
11
|
|
|
9
12
|
if typing.TYPE_CHECKING:
|
|
10
13
|
from langchain_core.runnables.config import RunnableConfig
|
|
@@ -38,21 +41,48 @@ def runnable_config_to_checkpoint(
|
|
|
38
41
|
return checkpoint
|
|
39
42
|
|
|
40
43
|
|
|
41
|
-
def
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
interrupt = Interrupt(**interrupt)
|
|
44
|
+
def patch_interrupt(
|
|
45
|
+
interrupt: Interrupt | RemoteInterrupt | dict,
|
|
46
|
+
) -> InterruptSchema | DeprecatedInterrupt:
|
|
47
|
+
"""Convert a langgraph interrupt (v0 or v1) to standard interrupt schema.
|
|
46
48
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
49
|
+
In v0.4 and v0.5, interrupt_id is a property on the langgraph.types.Interrupt object,
|
|
50
|
+
so we reconstruct the type in order to access the id, with compatibility for the new
|
|
51
|
+
v0.6 interrupt format as well.
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
# This is coming from JS, which already contains the interrupt ID.
|
|
55
|
+
# Stay on the safe side and pass-through the interrupt ID if it exists.
|
|
56
|
+
if isinstance(interrupt, RemoteInterrupt):
|
|
57
|
+
id = interrupt.raw.pop("interrupt_id", None) or interrupt.raw.pop("id", None)
|
|
58
|
+
if id is None:
|
|
59
|
+
return interrupt.raw
|
|
60
|
+
return {"id": id, **interrupt.raw}
|
|
61
|
+
|
|
62
|
+
if USE_NEW_INTERRUPTS:
|
|
63
|
+
interrupt = Interrupt(**interrupt) if isinstance(interrupt, dict) else interrupt
|
|
64
|
+
|
|
65
|
+
return {
|
|
66
|
+
"id": interrupt.id,
|
|
67
|
+
"value": interrupt.value,
|
|
68
|
+
}
|
|
69
|
+
else:
|
|
70
|
+
if isinstance(interrupt, dict):
|
|
71
|
+
# interrupt_id is a deprecated property on Interrupt and should not be used for initialization
|
|
72
|
+
# id is the new field we use for identification, also not supported on init for old versions
|
|
73
|
+
interrupt.pop("interrupt_id", None)
|
|
74
|
+
interrupt.pop("id", None)
|
|
75
|
+
interrupt = Interrupt(**interrupt)
|
|
76
|
+
|
|
77
|
+
return {
|
|
78
|
+
"id": interrupt.interrupt_id
|
|
79
|
+
if hasattr(interrupt, "interrupt_id")
|
|
80
|
+
else None,
|
|
81
|
+
"value": interrupt.value,
|
|
82
|
+
"resumable": interrupt.resumable,
|
|
83
|
+
"ns": interrupt.ns,
|
|
84
|
+
"when": interrupt.when,
|
|
85
|
+
}
|
|
56
86
|
|
|
57
87
|
|
|
58
88
|
def state_snapshot_to_thread_state(state: StateSnapshot) -> ThreadState:
|
|
@@ -65,9 +95,7 @@ def state_snapshot_to_thread_state(state: StateSnapshot) -> ThreadState:
|
|
|
65
95
|
"name": t.name,
|
|
66
96
|
"path": t.path,
|
|
67
97
|
"error": t.error,
|
|
68
|
-
"interrupts": [
|
|
69
|
-
state_interrupt_to_thread_interrupt(i) for i in t.interrupts
|
|
70
|
-
],
|
|
98
|
+
"interrupts": [patch_interrupt(i) for i in t.interrupts],
|
|
71
99
|
"checkpoint": t.state["configurable"]
|
|
72
100
|
if t.state is not None and not isinstance(t.state, StateSnapshot)
|
|
73
101
|
else None,
|
|
@@ -82,6 +110,7 @@ def state_snapshot_to_thread_state(state: StateSnapshot) -> ThreadState:
|
|
|
82
110
|
"created_at": state.created_at,
|
|
83
111
|
"checkpoint": runnable_config_to_checkpoint(state.config),
|
|
84
112
|
"parent_checkpoint": runnable_config_to_checkpoint(state.parent_config),
|
|
113
|
+
"interrupts": [patch_interrupt(i) for i in getattr(state, "interrupts", [])],
|
|
85
114
|
# below are deprecated
|
|
86
115
|
"checkpoint_id": state.config["configurable"].get("checkpoint_id")
|
|
87
116
|
if state.config
|
langgraph_api/stream.py
CHANGED
|
@@ -28,6 +28,7 @@ from langgraph_api import __version__
|
|
|
28
28
|
from langgraph_api import store as api_store
|
|
29
29
|
from langgraph_api.asyncio import ValueEvent, wait_if_not_done
|
|
30
30
|
from langgraph_api.command import map_cmd
|
|
31
|
+
from langgraph_api.feature_flags import USE_RUNTIME_CONTEXT_API
|
|
31
32
|
from langgraph_api.graph import get_graph
|
|
32
33
|
from langgraph_api.js.base import BaseRemotePregel
|
|
33
34
|
from langgraph_api.metadata import HOST, PLAN, USER_API_URL, incr_nodes
|
|
@@ -95,6 +96,7 @@ async def astream_state(
|
|
|
95
96
|
kwargs.pop("resumable", False)
|
|
96
97
|
subgraphs = kwargs.get("subgraphs", False)
|
|
97
98
|
temporary = kwargs.pop("temporary", False)
|
|
99
|
+
context = kwargs.pop("context", None)
|
|
98
100
|
config = kwargs.pop("config")
|
|
99
101
|
stack = AsyncExitStack()
|
|
100
102
|
graph = await stack.enter_async_context(
|
|
@@ -234,6 +236,8 @@ async def astream_state(
|
|
|
234
236
|
yield "events", event
|
|
235
237
|
else:
|
|
236
238
|
output_keys = kwargs.pop("output_keys", graph.output_channels)
|
|
239
|
+
if USE_RUNTIME_CONTEXT_API:
|
|
240
|
+
kwargs["context"] = context
|
|
237
241
|
async with (
|
|
238
242
|
stack,
|
|
239
243
|
aclosing(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langgraph-api
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.108
|
|
4
4
|
Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
|
|
5
5
|
License: Elastic-2.0
|
|
6
6
|
License-File: LICENSE
|
|
@@ -12,8 +12,8 @@ Requires-Dist: jsonschema-rs<0.30,>=0.20.0
|
|
|
12
12
|
Requires-Dist: langchain-core>=0.3.64
|
|
13
13
|
Requires-Dist: langgraph-checkpoint>=2.0.23
|
|
14
14
|
Requires-Dist: langgraph-runtime-inmem<0.7,>=0.6.0
|
|
15
|
-
Requires-Dist: langgraph-sdk>=0.
|
|
16
|
-
Requires-Dist: langgraph>=0.
|
|
15
|
+
Requires-Dist: langgraph-sdk>=0.2.0
|
|
16
|
+
Requires-Dist: langgraph>=0.4.0
|
|
17
17
|
Requires-Dist: langsmith>=0.3.45
|
|
18
18
|
Requires-Dist: orjson>=3.9.7
|
|
19
19
|
Requires-Dist: pyjwt>=2.9.0
|
|
@@ -1,27 +1,27 @@
|
|
|
1
|
-
langgraph_api/__init__.py,sha256=
|
|
1
|
+
langgraph_api/__init__.py,sha256=wDb60-m4UuCYILmUqUWjUPEIIRh_kcoP0eCTtbgSAkQ,24
|
|
2
2
|
langgraph_api/asgi_transport.py,sha256=eqifhHxNnxvI7jJqrY1_8RjL4Fp9NdN4prEub2FWBt8,5091
|
|
3
3
|
langgraph_api/asyncio.py,sha256=Wv4Rwm-a-Cf6JpfgJmVuVlXQ7SlwrjbTn0eq1ux8I2Q,9652
|
|
4
|
-
langgraph_api/cli.py,sha256
|
|
4
|
+
langgraph_api/cli.py,sha256=xQojITwmmKSJw48Lr2regcnRPRq2FJqWlPpeyr5TgbU,16158
|
|
5
5
|
langgraph_api/command.py,sha256=3O9v3i0OPa96ARyJ_oJbLXkfO8rPgDhLCswgO9koTFA,768
|
|
6
6
|
langgraph_api/config.py,sha256=Nxhx6fOsxk_u-Aae54JAGn46JQ1wKXPjeu_KX_3d4wQ,11918
|
|
7
7
|
langgraph_api/cron_scheduler.py,sha256=CiwZ-U4gDOdG9zl9dlr7mH50USUgNB2Fvb8YTKVRBN4,2625
|
|
8
8
|
langgraph_api/errors.py,sha256=zlnl3xXIwVG0oGNKKpXf1an9Rn_SBDHSyhe53hU6aLw,1858
|
|
9
|
-
langgraph_api/feature_flags.py,sha256=
|
|
10
|
-
langgraph_api/graph.py,sha256=
|
|
9
|
+
langgraph_api/feature_flags.py,sha256=GjwmNjfg0Jhs3OzR2VbK2WgrRy3o5l8ibIYiUtQkDPA,363
|
|
10
|
+
langgraph_api/graph.py,sha256=Yq7Y1x_SFmzmWWY73YidrANPOIDqjZv0Gj4O3lR7H6I,24457
|
|
11
11
|
langgraph_api/http.py,sha256=L0leP5fH4NIiFgJd1YPMnTRWqrUUYq_4m5j558UwM5E,5612
|
|
12
12
|
langgraph_api/http_metrics.py,sha256=VgM45yU1FkXuI9CIOE_astxAAu2G-OJ42BRbkcos_CQ,5555
|
|
13
13
|
langgraph_api/logging.py,sha256=4K1Fnq8rrGC9CqJubZtP34Y9P2zh7VXf_41q7bH3OXU,4849
|
|
14
|
-
langgraph_api/metadata.py,sha256=
|
|
14
|
+
langgraph_api/metadata.py,sha256=fVsbwxVitAj4LGVYpCcadYeIFANEaNtcx6LBxQLcTqg,6949
|
|
15
15
|
langgraph_api/patch.py,sha256=Dgs0PXHytekX4SUL6KsjjN0hHcOtGLvv1GRGbh6PswU,1408
|
|
16
16
|
langgraph_api/queue_entrypoint.py,sha256=hC8j-A4cUxibusiiPJBlK0mkmChNZxNcXn5GVwL0yic,4889
|
|
17
17
|
langgraph_api/route.py,sha256=4VBkJMeusfiZtLzyUaKm1HwLHTq0g15y2CRiRhM6xyA,4773
|
|
18
|
-
langgraph_api/schema.py,sha256=
|
|
18
|
+
langgraph_api/schema.py,sha256=WoA7uu1VA_ZRVSR4gpAnm1n7sKDfWUEojx7CFDiOH7Q,6341
|
|
19
19
|
langgraph_api/serde.py,sha256=0ALETUn582vNF-m0l_WOZGF_scL1VPA39fDkwMJQPrg,5187
|
|
20
20
|
langgraph_api/server.py,sha256=Z_VL-kIphybTRDWBIqHMfRhgCmAFyTRqAGlgnHQF0Zg,6973
|
|
21
21
|
langgraph_api/sse.py,sha256=SLdtZmTdh5D8fbWrQjuY9HYLd2dg8Rmi6ZMmFMVc2iE,4204
|
|
22
|
-
langgraph_api/state.py,sha256=
|
|
22
|
+
langgraph_api/state.py,sha256=P2mCo-0bqPu2v9FSFGJtUCjPPNvv6wLUKQh8SdxAtc8,4387
|
|
23
23
|
langgraph_api/store.py,sha256=srRI0fQXNFo_RSUs4apucr4BEp_KrIseJksZXs32MlQ,4635
|
|
24
|
-
langgraph_api/stream.py,sha256=
|
|
24
|
+
langgraph_api/stream.py,sha256=f22caNhIPeF67wBD3xtoaodyimya0fiBlmVOAznEkOY,13851
|
|
25
25
|
langgraph_api/thread_ttl.py,sha256=-Ox8NFHqUH3wGNdEKMIfAXUubY5WGifIgCaJ7npqLgw,1762
|
|
26
26
|
langgraph_api/traceblock.py,sha256=2aWS6TKGTcQ0G1fOtnjVrzkpeGvDsR0spDbfddEqgRU,594
|
|
27
27
|
langgraph_api/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -29,11 +29,11 @@ langgraph_api/validation.py,sha256=zMuKmwUEBjBgFMwAaeLZmatwGVijKv2sOYtYg7gfRtc,4
|
|
|
29
29
|
langgraph_api/webhook.py,sha256=VCJp4dI5E1oSJ15XP34cnPiOi8Ya8Q1BnBwVGadOpLI,1636
|
|
30
30
|
langgraph_api/worker.py,sha256=LVvjvigurlDgpNjFcbAvRH7744fE01Lirrg2ZlHtORE,14245
|
|
31
31
|
langgraph_api/api/__init__.py,sha256=WHy6oNLWtH1K7AxmmsU9RD-Vm6WP-Ov16xS8Ey9YCmQ,6090
|
|
32
|
-
langgraph_api/api/assistants.py,sha256=
|
|
32
|
+
langgraph_api/api/assistants.py,sha256=2ZIdtfiI1KJAHpDhu0a8kJtLebVBZwUZl8ky1k54-aA,15928
|
|
33
33
|
langgraph_api/api/mcp.py,sha256=qe10ZRMN3f-Hli-9TI8nbQyWvMeBb72YB1PZVbyqBQw,14418
|
|
34
34
|
langgraph_api/api/meta.py,sha256=fmc7btbtl5KVlU_vQ3Bj4J861IjlqmjBKNtnxSV-S-Q,4198
|
|
35
35
|
langgraph_api/api/openapi.py,sha256=KToI2glOEsvrhDpwdScdBnL9xoLOqkTxx5zKq2pMuKQ,11957
|
|
36
|
-
langgraph_api/api/runs.py,sha256=
|
|
36
|
+
langgraph_api/api/runs.py,sha256=DxmGkRnZsQgr5bmflguDKXEvY3J9Q-bt7YwbuSFAMxU,21579
|
|
37
37
|
langgraph_api/api/store.py,sha256=TSeMiuMfrifmEnEbL0aObC2DPeseLlmZvAMaMzPgG3Y,5535
|
|
38
38
|
langgraph_api/api/threads.py,sha256=nQMlGnsrFD1F4S-ID_q0HZrF2GZ0Pm7aV04Sh1eYgds,9588
|
|
39
39
|
langgraph_api/api/ui.py,sha256=17QrRy2XVzP7x_0RdRw7pmSv-n1lmnb54byHCGGeNhM,2490
|
|
@@ -48,14 +48,14 @@ langgraph_api/auth/langsmith/client.py,sha256=eKchvAom7hdkUXauD8vHNceBDDUijrFgdT
|
|
|
48
48
|
langgraph_api/js/.gitignore,sha256=l5yI6G_V6F1600I1IjiUKn87f4uYIrBAYU1MOyBBhg4,59
|
|
49
49
|
langgraph_api/js/.prettierrc,sha256=0es3ovvyNIqIw81rPQsdt1zCQcOdBqyR_DMbFE4Ifms,19
|
|
50
50
|
langgraph_api/js/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
51
|
-
langgraph_api/js/base.py,sha256=
|
|
51
|
+
langgraph_api/js/base.py,sha256=GORqRDbGAOQX2ygT6dMcqBDCA9tdAp8EpG4bfqUPMg4,1198
|
|
52
52
|
langgraph_api/js/build.mts,sha256=bRQo11cglDFXlLN7Y48CQPTSMLenp7MqIWuP1DkSIo0,3139
|
|
53
53
|
langgraph_api/js/client.http.mts,sha256=AGA-p8J85IcNh2oXZjDxHQ4PnQdJmt-LPcpZp6j0Cws,4687
|
|
54
|
-
langgraph_api/js/client.mts,sha256=
|
|
54
|
+
langgraph_api/js/client.mts,sha256=wizpsqiM95KWnPw4ByT9_3nELrGHVvquQB2pABw597w,31957
|
|
55
55
|
langgraph_api/js/errors.py,sha256=Cm1TKWlUCwZReDC5AQ6SgNIVGD27Qov2xcgHyf8-GXo,361
|
|
56
56
|
langgraph_api/js/global.d.ts,sha256=j4GhgtQSZ5_cHzjSPcHgMJ8tfBThxrH-pUOrrJGteOU,196
|
|
57
57
|
langgraph_api/js/package.json,sha256=BpNAO88mbE-Gv4WzQfj1TLktCWGqm6XBqI892ObuOUw,1333
|
|
58
|
-
langgraph_api/js/remote.py,sha256=
|
|
58
|
+
langgraph_api/js/remote.py,sha256=hN-yVXln3qk0E3JXM6It89sIgRkbT_FpW0JDqQ2KN-g,37832
|
|
59
59
|
langgraph_api/js/schema.py,sha256=M4fLtr50O1jck8H1hm_0W4cZOGYGdkrB7riLyCes4oY,438
|
|
60
60
|
langgraph_api/js/sse.py,sha256=lsfp4nyJyA1COmlKG9e2gJnTttf_HGCB5wyH8OZBER8,4105
|
|
61
61
|
langgraph_api/js/traceblock.mts,sha256=QtGSN5VpzmGqDfbArrGXkMiONY94pMQ5CgzetT_bKYg,761
|
|
@@ -70,11 +70,11 @@ langgraph_api/js/src/utils/importMap.mts,sha256=pX4TGOyUpuuWF82kXcxcv3-8mgusRezO
|
|
|
70
70
|
langgraph_api/js/src/utils/pythonSchemas.mts,sha256=98IW7Z_VP7L_CHNRMb3_MsiV3BgLE2JsWQY_PQcRR3o,685
|
|
71
71
|
langgraph_api/js/src/utils/serde.mts,sha256=D9o6MwTgwPezC_DEmsWS5NnLPnjPMVWIb1I1D4QPEPo,743
|
|
72
72
|
langgraph_api/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
73
|
-
langgraph_api/middleware/http_logger.py,sha256=
|
|
73
|
+
langgraph_api/middleware/http_logger.py,sha256=L7ZhypmQjlHBfm93GqZaqUXzu0r-ieaoO1lY7t1jGb0,3701
|
|
74
74
|
langgraph_api/middleware/private_network.py,sha256=eYgdyU8AzU2XJu362i1L8aSFoQRiV7_aLBPw7_EgeqI,2111
|
|
75
75
|
langgraph_api/middleware/request_id.py,sha256=SDj3Yi3WvTbFQ2ewrPQBjAV8sYReOJGeIiuoHeZpR9g,1242
|
|
76
76
|
langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
77
|
-
langgraph_api/models/run.py,sha256=
|
|
77
|
+
langgraph_api/models/run.py,sha256=M4NfATGKkXIuB5MLh0CUSp3bP6iG83ZZVvlqE2A_n1E,14855
|
|
78
78
|
langgraph_api/tunneling/cloudflare.py,sha256=iKb6tj-VWPlDchHFjuQyep2Dpb-w2NGfJKt-WJG9LH0,3650
|
|
79
79
|
langgraph_api/utils/__init__.py,sha256=92mSti9GfGdMRRWyESKQW5yV-75Z9icGHnIrBYvdypU,3619
|
|
80
80
|
langgraph_api/utils/cache.py,sha256=SrtIWYibbrNeZzLXLUGBFhJPkMVNQnVxR5giiYGHEfI,1810
|
|
@@ -93,9 +93,9 @@ langgraph_runtime/retry.py,sha256=V0duD01fO7GUQ_btQkp1aoXcEOFhXooGVP6q4yMfuyY,11
|
|
|
93
93
|
langgraph_runtime/store.py,sha256=7mowndlsIroGHv3NpTSOZDJR0lCuaYMBoTnTrewjslw,114
|
|
94
94
|
LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
95
95
|
logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
|
|
96
|
-
openapi.json,sha256=
|
|
97
|
-
langgraph_api-0.2.
|
|
98
|
-
langgraph_api-0.2.
|
|
99
|
-
langgraph_api-0.2.
|
|
100
|
-
langgraph_api-0.2.
|
|
101
|
-
langgraph_api-0.2.
|
|
96
|
+
openapi.json,sha256=jyQZW5U4V15zWciiIvaDPasYZd3k1iMiQ2vkPxf3zb4,145614
|
|
97
|
+
langgraph_api-0.2.108.dist-info/METADATA,sha256=P6idskl_R8E9Oyv8ae9Ne9jQlZLNobIwbXmzN3ZxGoE,3890
|
|
98
|
+
langgraph_api-0.2.108.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
99
|
+
langgraph_api-0.2.108.dist-info/entry_points.txt,sha256=hGedv8n7cgi41PypMfinwS_HfCwA7xJIfS0jAp8htV8,78
|
|
100
|
+
langgraph_api-0.2.108.dist-info/licenses/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
|
|
101
|
+
langgraph_api-0.2.108.dist-info/RECORD,,
|
openapi.json
CHANGED
|
@@ -3035,6 +3035,11 @@
|
|
|
3035
3035
|
"title": "Config",
|
|
3036
3036
|
"description": "The assistant config."
|
|
3037
3037
|
},
|
|
3038
|
+
"context": {
|
|
3039
|
+
"type": "object",
|
|
3040
|
+
"title": "Context",
|
|
3041
|
+
"description": "Static context added to the assistant."
|
|
3042
|
+
},
|
|
3038
3043
|
"created_at": {
|
|
3039
3044
|
"type": "string",
|
|
3040
3045
|
"format": "date-time",
|
|
@@ -3100,6 +3105,11 @@
|
|
|
3100
3105
|
"title": "Config",
|
|
3101
3106
|
"description": "Configuration to use for the graph. Useful when graph is configurable and you want to create different assistants based on different configurations."
|
|
3102
3107
|
},
|
|
3108
|
+
"context": {
|
|
3109
|
+
"type": "object",
|
|
3110
|
+
"title": "Context",
|
|
3111
|
+
"description": "Static context added to the assistant."
|
|
3112
|
+
},
|
|
3103
3113
|
"metadata": {
|
|
3104
3114
|
"type": "object",
|
|
3105
3115
|
"title": "Metadata",
|
|
@@ -3148,6 +3158,11 @@
|
|
|
3148
3158
|
"title": "Config",
|
|
3149
3159
|
"description": "Configuration to use for the graph. Useful when graph is configurable and you want to update the assistant's configuration."
|
|
3150
3160
|
},
|
|
3161
|
+
"context": {
|
|
3162
|
+
"type": "object",
|
|
3163
|
+
"title": "Context",
|
|
3164
|
+
"description": "Static context added to the assistant."
|
|
3165
|
+
},
|
|
3151
3166
|
"metadata": {
|
|
3152
3167
|
"type": "object",
|
|
3153
3168
|
"title": "Metadata",
|
|
@@ -3326,6 +3341,11 @@
|
|
|
3326
3341
|
"title": "Config",
|
|
3327
3342
|
"description": "The configuration for the assistant."
|
|
3328
3343
|
},
|
|
3344
|
+
"context": {
|
|
3345
|
+
"type": "object",
|
|
3346
|
+
"title": "Context",
|
|
3347
|
+
"description": "Static context added to the assistant."
|
|
3348
|
+
},
|
|
3329
3349
|
"webhook": {
|
|
3330
3350
|
"type": "string",
|
|
3331
3351
|
"maxLength": 65536,
|
|
@@ -3466,13 +3486,17 @@
|
|
|
3466
3486
|
"type": "object",
|
|
3467
3487
|
"title": "Config Schema",
|
|
3468
3488
|
"description": "The schema for the graph config. Missing if unable to generate JSON schema from graph."
|
|
3489
|
+
},
|
|
3490
|
+
"context_schema": {
|
|
3491
|
+
"type": "object",
|
|
3492
|
+
"title": "Context Schema",
|
|
3493
|
+
"description": "The schema for the graph context. Missing if unable to generate JSON schema from graph."
|
|
3469
3494
|
}
|
|
3470
3495
|
},
|
|
3471
3496
|
"type": "object",
|
|
3472
3497
|
"required": [
|
|
3473
3498
|
"graph_id",
|
|
3474
|
-
"state_schema"
|
|
3475
|
-
"config_schema"
|
|
3499
|
+
"state_schema"
|
|
3476
3500
|
],
|
|
3477
3501
|
"title": "GraphSchema",
|
|
3478
3502
|
"description": "Defines the structure and properties of a graph."
|
|
@@ -3498,14 +3522,18 @@
|
|
|
3498
3522
|
"type": "object",
|
|
3499
3523
|
"title": "Config Schema",
|
|
3500
3524
|
"description": "The schema for the graph config. Missing if unable to generate JSON schema from graph."
|
|
3525
|
+
},
|
|
3526
|
+
"context_schema": {
|
|
3527
|
+
"type": "object",
|
|
3528
|
+
"title": "Context Schema",
|
|
3529
|
+
"description": "The schema for the graph context. Missing if unable to generate JSON schema from graph."
|
|
3501
3530
|
}
|
|
3502
3531
|
},
|
|
3503
3532
|
"type": "object",
|
|
3504
3533
|
"required": [
|
|
3505
3534
|
"input_schema",
|
|
3506
3535
|
"output_schema",
|
|
3507
|
-
"state_schema"
|
|
3508
|
-
"config_schema"
|
|
3536
|
+
"state_schema"
|
|
3509
3537
|
],
|
|
3510
3538
|
"title": "GraphSchemaNoId",
|
|
3511
3539
|
"description": "Defines the structure and properties of a graph without an ID."
|
|
@@ -3516,7 +3544,7 @@
|
|
|
3516
3544
|
"$ref": "#/components/schemas/GraphSchemaNoId"
|
|
3517
3545
|
},
|
|
3518
3546
|
"title": "Subgraphs",
|
|
3519
|
-
"description": "Map of graph name to graph schema metadata (`input_schema`, `output_schema`, `state_schema`, `config_schema`)."
|
|
3547
|
+
"description": "Map of graph name to graph schema metadata (`input_schema`, `output_schema`, `state_schema`, `config_schema`, `context_schema`)."
|
|
3520
3548
|
},
|
|
3521
3549
|
"Run": {
|
|
3522
3550
|
"properties": {
|
|
@@ -3766,6 +3794,11 @@
|
|
|
3766
3794
|
"title": "Config",
|
|
3767
3795
|
"description": "The configuration for the assistant."
|
|
3768
3796
|
},
|
|
3797
|
+
"context": {
|
|
3798
|
+
"type": "object",
|
|
3799
|
+
"title": "Context",
|
|
3800
|
+
"description": "Static context added to the assistant."
|
|
3801
|
+
},
|
|
3769
3802
|
"webhook": {
|
|
3770
3803
|
"type": "string",
|
|
3771
3804
|
"maxLength": 65536,
|
|
@@ -4005,6 +4038,11 @@
|
|
|
4005
4038
|
"title": "Config",
|
|
4006
4039
|
"description": "The configuration for the assistant."
|
|
4007
4040
|
},
|
|
4041
|
+
"context": {
|
|
4042
|
+
"type": "object",
|
|
4043
|
+
"title": "Context",
|
|
4044
|
+
"description": "Static context added to the assistant."
|
|
4045
|
+
},
|
|
4008
4046
|
"webhook": {
|
|
4009
4047
|
"type": "string",
|
|
4010
4048
|
"maxLength": 65536,
|
|
@@ -4312,6 +4350,11 @@
|
|
|
4312
4350
|
"title": "Metadata",
|
|
4313
4351
|
"description": "The thread metadata."
|
|
4314
4352
|
},
|
|
4353
|
+
"config": {
|
|
4354
|
+
"type": "object",
|
|
4355
|
+
"title": "Config",
|
|
4356
|
+
"description": "The thread config."
|
|
4357
|
+
},
|
|
4315
4358
|
"status": {
|
|
4316
4359
|
"type": "string",
|
|
4317
4360
|
"enum": [
|
|
@@ -4327,6 +4370,11 @@
|
|
|
4327
4370
|
"type": "object",
|
|
4328
4371
|
"title": "Values",
|
|
4329
4372
|
"description": "The current state of the thread."
|
|
4373
|
+
},
|
|
4374
|
+
"interrupts": {
|
|
4375
|
+
"type": "object",
|
|
4376
|
+
"title": "Interrupts",
|
|
4377
|
+
"description": "The current interrupts of the thread."
|
|
4330
4378
|
}
|
|
4331
4379
|
},
|
|
4332
4380
|
"type": "object",
|
|
@@ -4476,7 +4524,9 @@
|
|
|
4476
4524
|
},
|
|
4477
4525
|
"interrupts": {
|
|
4478
4526
|
"type": "array",
|
|
4479
|
-
"items": {
|
|
4527
|
+
"items": {
|
|
4528
|
+
"$ref": "#/components/schemas/Interrupt"
|
|
4529
|
+
}
|
|
4480
4530
|
},
|
|
4481
4531
|
"checkpoint": {
|
|
4482
4532
|
"$ref": "#/components/schemas/CheckpointConfig",
|
|
@@ -4509,6 +4559,12 @@
|
|
|
4509
4559
|
"parent_checkpoint": {
|
|
4510
4560
|
"type": "object",
|
|
4511
4561
|
"title": "Parent Checkpoint"
|
|
4562
|
+
},
|
|
4563
|
+
"interrupts": {
|
|
4564
|
+
"type": "array",
|
|
4565
|
+
"items": {
|
|
4566
|
+
"$ref": "#/components/schemas/Interrupt"
|
|
4567
|
+
}
|
|
4512
4568
|
}
|
|
4513
4569
|
},
|
|
4514
4570
|
"type": "object",
|
|
@@ -4954,6 +5010,24 @@
|
|
|
4954
5010
|
}
|
|
4955
5011
|
}
|
|
4956
5012
|
}
|
|
5013
|
+
},
|
|
5014
|
+
"Interrupt": {
|
|
5015
|
+
"type": "object",
|
|
5016
|
+
"properties": {
|
|
5017
|
+
"id": {
|
|
5018
|
+
"type": [
|
|
5019
|
+
"string",
|
|
5020
|
+
"null"
|
|
5021
|
+
]
|
|
5022
|
+
},
|
|
5023
|
+
"value": {
|
|
5024
|
+
"type": "object"
|
|
5025
|
+
}
|
|
5026
|
+
},
|
|
5027
|
+
"title": "Interrupt",
|
|
5028
|
+
"required": [
|
|
5029
|
+
"value"
|
|
5030
|
+
]
|
|
4957
5031
|
}
|
|
4958
5032
|
}
|
|
4959
5033
|
}
|
|
File without changes
|
|
File without changes
|
|
File without changes
|