langgraph-api 0.2.102__tar.gz → 0.2.108__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/Makefile +4 -5
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/PKG-INFO +3 -3
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/constraints.txt +2 -2
- langgraph_api-0.2.108/langgraph_api/__init__.py +1 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/assistants.py +24 -27
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/cli.py +7 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/feature_flags.py +2 -1
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/graph.py +13 -7
- langgraph_api-0.2.108/langgraph_api/js/base.py +62 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/client.mts +16 -1
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/remote.py +43 -18
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/metadata.py +4 -3
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/middleware/http_logger.py +11 -2
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/models/run.py +8 -1
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/schema.py +34 -14
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/state.py +47 -18
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/stream.py +4 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/openapi.json +80 -6
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/pyproject.toml +2 -2
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/uv.lock +44 -44
- langgraph_api-0.2.102/langgraph_api/__init__.py +0 -1
- langgraph_api-0.2.102/langgraph_api/js/base.py +0 -32
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/.gitignore +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/LICENSE +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/README.md +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/benchmark/.gitignore +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/benchmark/Makefile +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/benchmark/README.md +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/benchmark/burst.js +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/benchmark/ramp.js +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/benchmark/weather.js +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/forbidden.txt +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/healthcheck.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/mcp.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/meta.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/openapi.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/runs.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/store.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/threads.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/api/ui.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/asgi_transport.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/asyncio.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/auth/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/auth/custom.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/auth/langsmith/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/auth/langsmith/backend.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/auth/langsmith/client.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/auth/middleware.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/auth/noop.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/auth/studio_user.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/command.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/config.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/cron_scheduler.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/errors.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/http.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/http_metrics.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/.gitignore +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/.prettierrc +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/build.mts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/client.http.mts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/errors.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/global.d.ts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/package.json +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/schema.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/src/graph.mts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/src/load.hooks.mjs +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/src/preload.mjs +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/src/utils/files.mts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/src/utils/importMap.mts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/src/utils/pythonSchemas.mts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/src/utils/serde.mts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/sse.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/traceblock.mts +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/tsconfig.json +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/ui.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/js/yarn.lock +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/logging.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/middleware/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/middleware/private_network.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/middleware/request_id.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/models/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/patch.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/queue_entrypoint.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/route.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/serde.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/server.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/sse.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/store.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/thread_ttl.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/traceblock.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/tunneling/cloudflare.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/utils/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/utils/cache.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/utils/config.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/utils/future.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/utils.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/validation.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/webhook.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_api/worker.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_license/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_license/validation.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/__init__.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/checkpoint.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/database.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/lifespan.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/metrics.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/ops.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/queue.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/retry.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/langgraph_runtime/store.py +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/logging.json +0 -0
- {langgraph_api-0.2.102 → langgraph_api-0.2.108}/scripts/create_license.py +0 -0
|
@@ -36,7 +36,7 @@ test-license-oss:
|
|
|
36
36
|
LANGGRAPH_RUNTIME_EDITION=inmem LANGGRAPH_HTTP='$(HTTP_CONFIG)' LANGGRAPH_STORE='$(STORE_CONFIG)' REDIS_URI=_FAKE DATABASE_URI=:memory: MIGRATIONS_PATH=__inmem__ uv run pytest -v $(TEST)
|
|
37
37
|
|
|
38
38
|
test-watch-oss:
|
|
39
|
-
LANGGRAPH_RUNTIME_EDITION=inmem LANGGRAPH_HTTP='$(HTTP_CONFIG)' REDIS_URI=_FAKE DATABASE_URI=:memory: MIGRATIONS_PATH=__inmem__ uv run ptw . -- -x -vv --ff --capture=no $(TEST)
|
|
39
|
+
LANGGRAPH_RUNTIME_EDITION=inmem LANGGRAPH_HTTP='$(HTTP_CONFIG)' REDIS_URI=_FAKE DATABASE_URI=:memory: MIGRATIONS_PATH=__inmem__ uv run --no-sync ptw . -- -x -vv --ff --capture=no $(TEST)
|
|
40
40
|
|
|
41
41
|
test: test-license-oss
|
|
42
42
|
test-watch: test-watch-oss
|
|
@@ -54,8 +54,7 @@ start:
|
|
|
54
54
|
LANGGRAPH_RUNTIME_EDITION=inmem \
|
|
55
55
|
LANGGRAPH_AES_KEY='$(LANGGRAPH_AES_KEY)' \
|
|
56
56
|
N_JOBS_PER_WORKER=2 \
|
|
57
|
-
|
|
58
|
-
LANGSERVE_GRAPHS='{"agent": "./tests/graphs/agent.py:graph", "single_node": "./tests/graphs/single_node.py:graph", "benchmark": "./tests/graphs/benchmark.py:graph", "other": "./tests/graphs/other.py:make_graph", "weather": "./tests/graphs/weather.py:mk_weather_graph", "searchy": "./tests/graphs/searchy.py:graph", "agent_simple": "./tests/graphs/agent_simple.py:graph"}' \
|
|
57
|
+
LANGSERVE_GRAPHS='{"agent": "./tests/graphs/agent.py:graph", "single_node": "./tests/graphs/single_node.py:graph", "benchmark": "./tests/graphs/benchmark.py:graph", "other": "./tests/graphs/other.py:make_graph", "weather": "./tests/graphs/weather.py:mk_weather_graph", "searchy": "./tests/graphs/searchy.py:graph", "agent_simple": "./tests/graphs/agent_simple.py:graph", "simple_runtime": "./tests/graphs/simple_runtime.py:graph"}' \
|
|
59
58
|
LANGGRAPH_STORE='$(STORE_CONFIG)' \
|
|
60
59
|
LANGGRAPH_CONFIG='{"agent": {"configurable": {"model_name": "openai"}}}' \
|
|
61
60
|
LANGSMITH_LANGGRAPH_API_VARIANT=test \
|
|
@@ -78,7 +77,7 @@ start-auth-jwt:
|
|
|
78
77
|
LANGGRAPH_RUNTIME_EDITION=inmem LANGGRAPH_HTTP='$(HTTP_CONFIG)' \
|
|
79
78
|
LANGGRAPH_AES_KEY='$(LANGGRAPH_AES_KEY)' \
|
|
80
79
|
N_JOBS_PER_WORKER=2 \
|
|
81
|
-
LANGSERVE_GRAPHS='{"agent": "./tests/graphs/agent.py:graph", "other": "./tests/graphs/other.py:make_graph", "weather": "./tests/graphs/weather.py:mk_weather_graph", "searchy": "./tests/graphs/searchy.py:graph", "agent_simple": "./tests/graphs/agent_simple.py:graph"}' \
|
|
80
|
+
LANGSERVE_GRAPHS='{"agent": "./tests/graphs/agent.py:graph", "other": "./tests/graphs/other.py:make_graph", "weather": "./tests/graphs/weather.py:mk_weather_graph", "searchy": "./tests/graphs/searchy.py:graph", "agent_simple": "./tests/graphs/agent_simple.py:graph", "simple_runtime": "./tests/graphs/simple_runtime.py:graph"}' \
|
|
82
81
|
LANGGRAPH_STORE='$(STORE_CONFIG)' \
|
|
83
82
|
LANGGRAPH_AUTH='{"path": "tests/graphs/jwt_auth.py:auth"}' \
|
|
84
83
|
LANGSMITH_LANGGRAPH_API_VARIANT=test \
|
|
@@ -96,7 +95,7 @@ start-auth-jwt:
|
|
|
96
95
|
start-auth-fastapi-jwt:
|
|
97
96
|
LANGGRAPH_RUNTIME_EDITION=inmem LANGGRAPH_HTTP='$(HTTP_CONFIG)' \
|
|
98
97
|
N_JOBS_PER_WORKER=2 \
|
|
99
|
-
LANGSERVE_GRAPHS='{"agent": "./tests/graphs/agent.py:graph", "other": "./tests/graphs/other.py:make_graph", "weather": "./tests/graphs/weather.py:mk_weather_graph", "searchy": "./tests/graphs/searchy.py:graph", "agent_simple": "./tests/graphs/agent_simple.py:graph"}' \
|
|
98
|
+
LANGSERVE_GRAPHS='{"agent": "./tests/graphs/agent.py:graph", "other": "./tests/graphs/other.py:make_graph", "weather": "./tests/graphs/weather.py:mk_weather_graph", "searchy": "./tests/graphs/searchy.py:graph", "agent_simple": "./tests/graphs/agent_simple.py:graph", "simple_runtime": "./tests/graphs/simple_runtime.py:graph"}' \
|
|
100
99
|
LANGGRAPH_STORE='$(STORE_CONFIG)' \
|
|
101
100
|
LANGGRAPH_AUTH='{"path": "./tests/graphs/fastapi_jwt_auth.py:auth"}' \
|
|
102
101
|
LANGSMITH_LANGGRAPH_API_VARIANT=test \
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langgraph-api
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.108
|
|
4
4
|
Author-email: Nuno Campos <nuno@langchain.dev>, Will Fu-Hinthorn <will@langchain.dev>
|
|
5
5
|
License: Elastic-2.0
|
|
6
6
|
License-File: LICENSE
|
|
@@ -12,8 +12,8 @@ Requires-Dist: jsonschema-rs<0.30,>=0.20.0
|
|
|
12
12
|
Requires-Dist: langchain-core>=0.3.64
|
|
13
13
|
Requires-Dist: langgraph-checkpoint>=2.0.23
|
|
14
14
|
Requires-Dist: langgraph-runtime-inmem<0.7,>=0.6.0
|
|
15
|
-
Requires-Dist: langgraph-sdk>=0.
|
|
16
|
-
Requires-Dist: langgraph>=0.
|
|
15
|
+
Requires-Dist: langgraph-sdk>=0.2.0
|
|
16
|
+
Requires-Dist: langgraph>=0.4.0
|
|
17
17
|
Requires-Dist: langsmith>=0.3.45
|
|
18
18
|
Requires-Dist: orjson>=3.9.7
|
|
19
19
|
Requires-Dist: pyjwt>=2.9.0
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
# When editing this file, also update docs pages:
|
|
2
2
|
# https://github.com/langchain-ai/langgraph/blob/main/docs/docs/cloud/deployment/setup.md
|
|
3
3
|
# https://github.com/langchain-ai/langgraph/blob/main/docs/docs/cloud/deployment/setup_pyproject.md
|
|
4
|
-
langgraph>=0.
|
|
5
|
-
langgraph-sdk>=0.
|
|
4
|
+
langgraph>=0.4.0
|
|
5
|
+
langgraph-sdk>=0.2.0
|
|
6
6
|
langgraph-checkpoint>=2.0.23
|
|
7
7
|
langchain-core>=0.3.64
|
|
8
8
|
langsmith>=0.3.45
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.2.108"
|
|
@@ -12,6 +12,7 @@ from starlette.responses import Response
|
|
|
12
12
|
from starlette.routing import BaseRoute
|
|
13
13
|
|
|
14
14
|
from langgraph_api import store as api_store
|
|
15
|
+
from langgraph_api.feature_flags import USE_RUNTIME_CONTEXT_API
|
|
15
16
|
from langgraph_api.graph import get_assistant_id, get_graph
|
|
16
17
|
from langgraph_api.js.base import BaseRemotePregel
|
|
17
18
|
from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
|
|
@@ -55,6 +56,9 @@ def _get_configurable_jsonschema(graph: Pregel) -> dict:
|
|
|
55
56
|
|
|
56
57
|
Returns:
|
|
57
58
|
The JSON schema for the configurable part of the graph.
|
|
59
|
+
|
|
60
|
+
Whenever we no longer support langgraph < 0.6, we can remove this method
|
|
61
|
+
in favor of graph.get_context_jsonschema().
|
|
58
62
|
"""
|
|
59
63
|
# Otherwise, use the config_schema method.
|
|
60
64
|
config_schema = graph.config_schema()
|
|
@@ -112,6 +116,7 @@ def _graph_schemas(graph: Pregel) -> dict:
|
|
|
112
116
|
f"Failed to get state schema for graph {graph.name} with error: `{str(e)}`"
|
|
113
117
|
)
|
|
114
118
|
state_schema = None
|
|
119
|
+
|
|
115
120
|
try:
|
|
116
121
|
config_schema = _get_configurable_jsonschema(graph)
|
|
117
122
|
except Exception as e:
|
|
@@ -119,18 +124,31 @@ def _graph_schemas(graph: Pregel) -> dict:
|
|
|
119
124
|
f"Failed to get config schema for graph {graph.name} with error: `{str(e)}`"
|
|
120
125
|
)
|
|
121
126
|
config_schema = None
|
|
127
|
+
|
|
128
|
+
if USE_RUNTIME_CONTEXT_API:
|
|
129
|
+
try:
|
|
130
|
+
context_schema = graph.get_context_jsonschema()
|
|
131
|
+
except Exception as e:
|
|
132
|
+
logger.warning(
|
|
133
|
+
f"Failed to get context schema for graph {graph.name} with error: `{str(e)}`"
|
|
134
|
+
)
|
|
135
|
+
context_schema = graph.config_schema()
|
|
136
|
+
else:
|
|
137
|
+
context_schema = None
|
|
138
|
+
|
|
122
139
|
return {
|
|
123
140
|
"input_schema": input_schema,
|
|
124
141
|
"output_schema": output_schema,
|
|
125
142
|
"state_schema": state_schema,
|
|
126
143
|
"config_schema": config_schema,
|
|
144
|
+
"context_schema": context_schema,
|
|
127
145
|
}
|
|
128
146
|
|
|
129
147
|
|
|
130
148
|
@retry_db
|
|
131
149
|
async def create_assistant(request: ApiRequest) -> ApiResponse:
|
|
132
|
-
payload = await request.json(AssistantCreate)
|
|
133
150
|
"""Create an assistant."""
|
|
151
|
+
payload = await request.json(AssistantCreate)
|
|
134
152
|
if assistant_id := payload.get("assistant_id"):
|
|
135
153
|
validate_uuid(assistant_id, "Invalid assistant ID: must be a UUID")
|
|
136
154
|
async with connect() as conn:
|
|
@@ -138,6 +156,7 @@ async def create_assistant(request: ApiRequest) -> ApiResponse:
|
|
|
138
156
|
conn,
|
|
139
157
|
assistant_id or str(uuid4()),
|
|
140
158
|
config=payload.get("config") or {},
|
|
159
|
+
context=payload.get("context") or {},
|
|
141
160
|
graph_id=payload["graph_id"],
|
|
142
161
|
metadata=payload.get("metadata") or {},
|
|
143
162
|
if_exists=payload.get("if_exists") or "raise",
|
|
@@ -309,39 +328,16 @@ async def get_assistant_schemas(
|
|
|
309
328
|
"output_schema": schemas.get("output"),
|
|
310
329
|
"state_schema": schemas.get("state"),
|
|
311
330
|
"config_schema": schemas.get("config"),
|
|
331
|
+
"context_schema": schemas.get("context"),
|
|
312
332
|
}
|
|
313
333
|
)
|
|
314
334
|
|
|
315
|
-
|
|
316
|
-
input_schema = graph.get_input_jsonschema()
|
|
317
|
-
except Exception as e:
|
|
318
|
-
logger.warning(
|
|
319
|
-
f"Failed to get input schema for graph {graph.name} with error: `{str(e)}`"
|
|
320
|
-
)
|
|
321
|
-
input_schema = None
|
|
322
|
-
try:
|
|
323
|
-
output_schema = graph.get_output_jsonschema()
|
|
324
|
-
except Exception as e:
|
|
325
|
-
logger.warning(
|
|
326
|
-
f"Failed to get output schema for graph {graph.name} with error: `{str(e)}`"
|
|
327
|
-
)
|
|
328
|
-
output_schema = None
|
|
335
|
+
schemas = _graph_schemas(graph)
|
|
329
336
|
|
|
330
|
-
state_schema = _state_jsonschema(graph)
|
|
331
|
-
try:
|
|
332
|
-
config_schema = _get_configurable_jsonschema(graph)
|
|
333
|
-
except Exception as e:
|
|
334
|
-
config_schema = None
|
|
335
|
-
logger.warning(
|
|
336
|
-
f"Failed to get config schema for graph {graph.name} with error: `{str(e)}`"
|
|
337
|
-
)
|
|
338
337
|
return ApiResponse(
|
|
339
338
|
{
|
|
340
339
|
"graph_id": assistant["graph_id"],
|
|
341
|
-
|
|
342
|
-
"output_schema": output_schema,
|
|
343
|
-
"state_schema": state_schema,
|
|
344
|
-
"config_schema": config_schema,
|
|
340
|
+
**schemas,
|
|
345
341
|
}
|
|
346
342
|
)
|
|
347
343
|
|
|
@@ -359,6 +355,7 @@ async def patch_assistant(
|
|
|
359
355
|
conn,
|
|
360
356
|
assistant_id,
|
|
361
357
|
config=payload.get("config"),
|
|
358
|
+
context=payload.get("context"),
|
|
362
359
|
graph_id=payload.get("graph_id"),
|
|
363
360
|
metadata=payload.get("metadata"),
|
|
364
361
|
name=payload.get("name"),
|
|
@@ -83,6 +83,12 @@ class SecurityConfig(TypedDict, total=False):
|
|
|
83
83
|
paths: dict[str, dict[str, list]]
|
|
84
84
|
|
|
85
85
|
|
|
86
|
+
class CacheConfig(TypedDict, total=False):
|
|
87
|
+
cache_keys: list[str]
|
|
88
|
+
ttl_seconds: int
|
|
89
|
+
max_size: int
|
|
90
|
+
|
|
91
|
+
|
|
86
92
|
class AuthConfig(TypedDict, total=False):
|
|
87
93
|
path: str
|
|
88
94
|
"""Path to the authentication function in a Python file."""
|
|
@@ -112,6 +118,7 @@ class AuthConfig(TypedDict, total=False):
|
|
|
112
118
|
]
|
|
113
119
|
}
|
|
114
120
|
"""
|
|
121
|
+
cache: CacheConfig | None
|
|
115
122
|
|
|
116
123
|
|
|
117
124
|
def _check_newer_version(pkg: str, timeout: float = 0.2) -> None:
|
|
@@ -4,4 +4,5 @@ from langgraph.version import __version__
|
|
|
4
4
|
LANGGRAPH_PY_MINOR = tuple(map(int, __version__.split(".")[:2]))
|
|
5
5
|
|
|
6
6
|
OMIT_PENDING_SENDS = LANGGRAPH_PY_MINOR >= (0, 5)
|
|
7
|
-
|
|
7
|
+
USE_RUNTIME_CONTEXT_API = LANGGRAPH_PY_MINOR >= (0, 6)
|
|
8
|
+
USE_NEW_INTERRUPTS = LANGGRAPH_PY_MINOR >= (0, 6)
|
|
@@ -23,7 +23,7 @@ from starlette.exceptions import HTTPException
|
|
|
23
23
|
|
|
24
24
|
from langgraph_api import asyncio as lg_asyncio
|
|
25
25
|
from langgraph_api import config
|
|
26
|
-
from langgraph_api.feature_flags import
|
|
26
|
+
from langgraph_api.feature_flags import USE_RUNTIME_CONTEXT_API
|
|
27
27
|
from langgraph_api.js.base import BaseRemotePregel, is_js_path
|
|
28
28
|
from langgraph_api.schema import Config
|
|
29
29
|
from langgraph_api.utils.config import run_in_executor, var_child_runnable_config
|
|
@@ -78,6 +78,7 @@ async def register_graph(
|
|
|
78
78
|
graph_id=graph_id,
|
|
79
79
|
metadata={"created_by": "system"},
|
|
80
80
|
config=config or {},
|
|
81
|
+
context={},
|
|
81
82
|
if_exists="do_nothing",
|
|
82
83
|
name=assistant_name,
|
|
83
84
|
description=description,
|
|
@@ -131,16 +132,19 @@ async def get_graph(
|
|
|
131
132
|
config = lg_config.ensure_config(config)
|
|
132
133
|
|
|
133
134
|
if store is not None:
|
|
134
|
-
if
|
|
135
|
+
if USE_RUNTIME_CONTEXT_API:
|
|
135
136
|
from langgraph._internal._constants import CONFIG_KEY_RUNTIME
|
|
136
137
|
from langgraph.runtime import Runtime
|
|
137
138
|
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
139
|
+
runtime = config["configurable"].get(CONFIG_KEY_RUNTIME)
|
|
140
|
+
if runtime is None:
|
|
141
|
+
patched_runtime = Runtime(store=store)
|
|
142
|
+
elif runtime.store is None:
|
|
142
143
|
patched_runtime = cast(Runtime, runtime).override(store=store)
|
|
143
|
-
|
|
144
|
+
else:
|
|
145
|
+
patched_runtime = runtime
|
|
146
|
+
|
|
147
|
+
config["configurable"][CONFIG_KEY_RUNTIME] = patched_runtime
|
|
144
148
|
else:
|
|
145
149
|
from langgraph.constants import CONFIG_KEY_STORE
|
|
146
150
|
|
|
@@ -412,6 +416,8 @@ def _handle_exception(task: asyncio.Task) -> None:
|
|
|
412
416
|
task.result()
|
|
413
417
|
except asyncio.CancelledError:
|
|
414
418
|
pass
|
|
419
|
+
except Exception as e:
|
|
420
|
+
logger.exception("Task failed", exc_info=e)
|
|
415
421
|
finally:
|
|
416
422
|
# if the task died either with exception or not, we should exit
|
|
417
423
|
sys.exit(1)
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from collections.abc import Sequence
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Any, Literal
|
|
5
|
+
|
|
6
|
+
from langchain_core.runnables import Runnable
|
|
7
|
+
|
|
8
|
+
from langgraph_api.schema import Config
|
|
9
|
+
|
|
10
|
+
_DC_KWARGS = {"kw_only": True, "slots": True, "frozen": True}
|
|
11
|
+
|
|
12
|
+
JS_EXTENSIONS = (
|
|
13
|
+
".ts",
|
|
14
|
+
".mts",
|
|
15
|
+
".cts",
|
|
16
|
+
".js",
|
|
17
|
+
".mjs",
|
|
18
|
+
".cjs",
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def is_js_path(path: str | None) -> bool:
|
|
23
|
+
if path is None:
|
|
24
|
+
return False
|
|
25
|
+
return os.path.splitext(path)[1] in JS_EXTENSIONS
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass(**_DC_KWARGS)
|
|
29
|
+
class RemoteInterrupt:
|
|
30
|
+
raw: dict
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def id(self) -> str:
|
|
34
|
+
return self.raw["id"]
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def value(self) -> Any:
|
|
38
|
+
return self.raw["value"]
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def ns(self) -> Sequence[str] | None:
|
|
42
|
+
return self.raw.get("ns")
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def resumable(self) -> bool:
|
|
46
|
+
return self.raw.get("resumable", True)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def when(self) -> Literal["during"]:
|
|
50
|
+
return self.raw.get("when", "during")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class BaseRemotePregel(Runnable):
|
|
54
|
+
name: str = "LangGraph"
|
|
55
|
+
|
|
56
|
+
graph_id: str
|
|
57
|
+
|
|
58
|
+
# Config passed from get_graph()
|
|
59
|
+
config: Config
|
|
60
|
+
|
|
61
|
+
async def get_nodes_executed(self) -> int:
|
|
62
|
+
return 0
|
|
@@ -57,6 +57,7 @@ import {
|
|
|
57
57
|
} from "@langchain/langgraph-api/schema";
|
|
58
58
|
import { filterValidExportPath } from "./src/utils/files.mts";
|
|
59
59
|
import { patchFetch } from "./traceblock.mts";
|
|
60
|
+
import { writeHeapSnapshot } from "node:v8";
|
|
60
61
|
|
|
61
62
|
const injectConfigFormatter = format((info) => {
|
|
62
63
|
const config = getConfig();
|
|
@@ -1058,7 +1059,6 @@ async function main() {
|
|
|
1058
1059
|
const headers = new Headers(rawHeaders);
|
|
1059
1060
|
headers.delete("x-langgraph-auth-url");
|
|
1060
1061
|
headers.delete("x-langgraph-auth-method");
|
|
1061
|
-
|
|
1062
1062
|
const context = await authenticate(
|
|
1063
1063
|
new Request(authUrl, { headers, method }),
|
|
1064
1064
|
);
|
|
@@ -1094,6 +1094,21 @@ async function main() {
|
|
|
1094
1094
|
}
|
|
1095
1095
|
|
|
1096
1096
|
app.get("/ok", (c) => c.json({ ok: true }));
|
|
1097
|
+
|
|
1098
|
+
app.get("/debug/heapdump", async (c) => {
|
|
1099
|
+
try {
|
|
1100
|
+
const target =
|
|
1101
|
+
`/tmp/heapdump-${Date.now()}.heapsnapshot`;
|
|
1102
|
+
await fs.mkdir(path.dirname(target), { recursive: true });
|
|
1103
|
+
const written = writeHeapSnapshot(target);
|
|
1104
|
+
return c.json({ ok: true, written }); // 200
|
|
1105
|
+
} catch (error) {
|
|
1106
|
+
if (error instanceof HTTPException) {
|
|
1107
|
+
return c.json(serializeError(error), error.status);
|
|
1108
|
+
}
|
|
1109
|
+
return c.json(serializeError(error), 500);
|
|
1110
|
+
}
|
|
1111
|
+
});
|
|
1097
1112
|
|
|
1098
1113
|
app.onError((err, c) => {
|
|
1099
1114
|
logger.error(err);
|
|
@@ -25,7 +25,7 @@ from langchain_core.runnables.schema import (
|
|
|
25
25
|
)
|
|
26
26
|
from langgraph.checkpoint.serde.base import SerializerProtocol
|
|
27
27
|
from langgraph.store.base import GetOp, Item, ListNamespacesOp, PutOp, SearchOp
|
|
28
|
-
from langgraph.types import Command,
|
|
28
|
+
from langgraph.types import Command, PregelTask, Send, StateSnapshot
|
|
29
29
|
from langgraph_sdk import Auth
|
|
30
30
|
from pydantic import BaseModel
|
|
31
31
|
from starlette import types
|
|
@@ -42,8 +42,8 @@ from starlette.routing import Route
|
|
|
42
42
|
|
|
43
43
|
from langgraph_api import store as api_store
|
|
44
44
|
from langgraph_api.auth.custom import DotDict, ProxyUser
|
|
45
|
-
from langgraph_api.config import LANGGRAPH_AUTH_TYPE
|
|
46
|
-
from langgraph_api.js.base import BaseRemotePregel
|
|
45
|
+
from langgraph_api.config import LANGGRAPH_AUTH, LANGGRAPH_AUTH_TYPE
|
|
46
|
+
from langgraph_api.js.base import BaseRemotePregel, RemoteInterrupt
|
|
47
47
|
from langgraph_api.js.errors import RemoteException
|
|
48
48
|
from langgraph_api.js.sse import SSEDecoder, aiter_lines_raw
|
|
49
49
|
from langgraph_api.route import ApiResponse
|
|
@@ -226,6 +226,10 @@ class RemotePregel(BaseRemotePregel):
|
|
|
226
226
|
if state and isinstance(state, dict) and "config" in state:
|
|
227
227
|
state = self._convert_state_snapshot(state)
|
|
228
228
|
|
|
229
|
+
interrupts: list[RemoteInterrupt] = []
|
|
230
|
+
if task_interrupts := task.get("interrupts"):
|
|
231
|
+
interrupts = [RemoteInterrupt(raw=i) for i in task_interrupts]
|
|
232
|
+
|
|
229
233
|
result.append(
|
|
230
234
|
PregelTask(
|
|
231
235
|
task["id"],
|
|
@@ -233,19 +237,7 @@ class RemotePregel(BaseRemotePregel):
|
|
|
233
237
|
tuple(task["path"]) if task.get("path") else tuple(),
|
|
234
238
|
# TODO: figure out how to properly deserialise errors
|
|
235
239
|
task.get("error"),
|
|
236
|
-
(
|
|
237
|
-
tuple(
|
|
238
|
-
Interrupt(
|
|
239
|
-
value=interrupt["value"],
|
|
240
|
-
when=interrupt["when"],
|
|
241
|
-
resumable=interrupt.get("resumable", True),
|
|
242
|
-
ns=interrupt.get("ns"),
|
|
243
|
-
)
|
|
244
|
-
for interrupt in task.get("interrupts")
|
|
245
|
-
)
|
|
246
|
-
if task.get("interrupts")
|
|
247
|
-
else []
|
|
248
|
-
),
|
|
240
|
+
tuple(interrupts),
|
|
249
241
|
state,
|
|
250
242
|
)
|
|
251
243
|
)
|
|
@@ -339,6 +331,9 @@ class RemotePregel(BaseRemotePregel):
|
|
|
339
331
|
def config_schema(self) -> type[BaseModel]:
|
|
340
332
|
raise NotImplementedError()
|
|
341
333
|
|
|
334
|
+
def get_context_jsonschema(self) -> dict:
|
|
335
|
+
raise NotImplementedError()
|
|
336
|
+
|
|
342
337
|
async def invoke(self, input: Any, config: RunnableConfig | None = None):
|
|
343
338
|
raise NotImplementedError()
|
|
344
339
|
|
|
@@ -374,7 +369,7 @@ async def run_js_process(paths_str: str, watch: bool = False):
|
|
|
374
369
|
client_file,
|
|
375
370
|
"--skip-schema-cache",
|
|
376
371
|
)
|
|
377
|
-
if
|
|
372
|
+
if False
|
|
378
373
|
else ("tsx", "--import", client_preload_file, client_file)
|
|
379
374
|
)
|
|
380
375
|
try:
|
|
@@ -388,6 +383,7 @@ async def run_js_process(paths_str: str, watch: bool = False):
|
|
|
388
383
|
**os.environ,
|
|
389
384
|
},
|
|
390
385
|
)
|
|
386
|
+
logger.info("Started JS graphs process [%d]", process.pid)
|
|
391
387
|
code = await process.wait()
|
|
392
388
|
raise Exception(f"JS process exited with code {code}")
|
|
393
389
|
except asyncio.CancelledError:
|
|
@@ -448,6 +444,7 @@ async def run_js_http_process(paths_str: str, http_config: dict, watch: bool = F
|
|
|
448
444
|
raise
|
|
449
445
|
except Exception:
|
|
450
446
|
if attempt >= 3:
|
|
447
|
+
logger.exception("JS HTTP process failed")
|
|
451
448
|
raise
|
|
452
449
|
else:
|
|
453
450
|
logger.warning(f"Retrying JS HTTP process {3 - attempt} more times...")
|
|
@@ -843,11 +840,26 @@ class CustomJsAuthBackend(AuthenticationBackend):
|
|
|
843
840
|
ls_auth: AuthenticationBackend | None
|
|
844
841
|
|
|
845
842
|
def __init__(self, disable_studio_auth: bool = False):
|
|
843
|
+
from langgraph_api.utils.cache import LRUCache
|
|
844
|
+
|
|
846
845
|
self.ls_auth = None
|
|
847
846
|
if not disable_studio_auth and LANGGRAPH_AUTH_TYPE == "langsmith":
|
|
848
847
|
from langgraph_api.auth.langsmith.backend import LangsmithAuthBackend
|
|
849
848
|
|
|
850
849
|
self.ls_auth = LangsmithAuthBackend()
|
|
850
|
+
self.ttl_cache: LRUCache | None = None
|
|
851
|
+
self.cache_keys: list[str] | None = None
|
|
852
|
+
if cache := LANGGRAPH_AUTH.get("cache"):
|
|
853
|
+
keys = cache.get("cache_keys", [])
|
|
854
|
+
if not isinstance(keys, list):
|
|
855
|
+
raise ValueError(
|
|
856
|
+
f"LANGGRAPH_AUTH.cache.cache_keys must be a list. Got: {keys}"
|
|
857
|
+
)
|
|
858
|
+
self.cache_keys = keys
|
|
859
|
+
self.ttl_cache = LRUCache(
|
|
860
|
+
max_size=cache.get("max_size", 1000),
|
|
861
|
+
ttl=cache.get("ttl_seconds", 60),
|
|
862
|
+
)
|
|
851
863
|
|
|
852
864
|
async def authenticate(
|
|
853
865
|
self, conn: HTTPConnection
|
|
@@ -863,6 +875,16 @@ class CustomJsAuthBackend(AuthenticationBackend):
|
|
|
863
875
|
headers.pop("content-length", None)
|
|
864
876
|
headers["x-langgraph-auth-url"] = str(conn.url)
|
|
865
877
|
headers["x-langgraph-auth-method"] = conn.scope.get("method")
|
|
878
|
+
cache_key = None
|
|
879
|
+
if self.cache_keys:
|
|
880
|
+
cache_key = tuple(
|
|
881
|
+
(k, headers.get(k)) for k in self.cache_keys if headers.get(k)
|
|
882
|
+
)
|
|
883
|
+
if cache_key:
|
|
884
|
+
if self.ttl_cache is not None:
|
|
885
|
+
cached = self.ttl_cache.get(cache_key)
|
|
886
|
+
if cached:
|
|
887
|
+
return cached
|
|
866
888
|
|
|
867
889
|
res = await _client.post("/auth/authenticate", headers=headers)
|
|
868
890
|
data = res.json()
|
|
@@ -873,8 +895,11 @@ class CustomJsAuthBackend(AuthenticationBackend):
|
|
|
873
895
|
message = data.get("message") or "Unauthorized"
|
|
874
896
|
|
|
875
897
|
raise HTTPException(status_code=status, detail=message, headers=headers)
|
|
898
|
+
result = AuthCredentials(data["scopes"]), ProxyUser(DotDict(data["user"]))
|
|
899
|
+
if cache_key:
|
|
900
|
+
self.ttl_cache.set(cache_key, result)
|
|
876
901
|
|
|
877
|
-
return
|
|
902
|
+
return result
|
|
878
903
|
|
|
879
904
|
|
|
880
905
|
async def handle_js_auth_event(
|
|
@@ -66,9 +66,10 @@ if LANGSMITH_AUTH_ENDPOINT:
|
|
|
66
66
|
LANGCHAIN_METADATA_ENDPOINT = (
|
|
67
67
|
LANGSMITH_AUTH_ENDPOINT.rstrip("/") + "/metadata/submit"
|
|
68
68
|
)
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
69
|
+
else:
|
|
70
|
+
LANGCHAIN_METADATA_ENDPOINT = (
|
|
71
|
+
LANGSMITH_AUTH_ENDPOINT.rstrip("/") + "/v1/metadata/submit"
|
|
72
|
+
)
|
|
72
73
|
|
|
73
74
|
|
|
74
75
|
def incr_runs(*, incr: int = 1) -> None:
|
|
@@ -12,6 +12,14 @@ asgi = structlog.stdlib.get_logger("asgi")
|
|
|
12
12
|
PATHS_IGNORE = {"/ok", "/metrics"}
|
|
13
13
|
|
|
14
14
|
|
|
15
|
+
def _get_level(status: int | None) -> int:
|
|
16
|
+
if status is None or status < 400:
|
|
17
|
+
return logging.INFO
|
|
18
|
+
if status < 500:
|
|
19
|
+
return logging.WARNING
|
|
20
|
+
return logging.ERROR
|
|
21
|
+
|
|
22
|
+
|
|
15
23
|
class AccessLoggerMiddleware:
|
|
16
24
|
def __init__(
|
|
17
25
|
self,
|
|
@@ -46,6 +54,7 @@ class AccessLoggerMiddleware:
|
|
|
46
54
|
info["response"] = message
|
|
47
55
|
await send(message)
|
|
48
56
|
asgi.debug(f"ASGI send {message['type']}", **message)
|
|
57
|
+
|
|
49
58
|
else:
|
|
50
59
|
inner_receive = receive
|
|
51
60
|
|
|
@@ -74,8 +83,8 @@ class AccessLoggerMiddleware:
|
|
|
74
83
|
|
|
75
84
|
if method and route and status:
|
|
76
85
|
HTTP_METRICS_COLLECTOR.record_request(method, route, status, latency)
|
|
77
|
-
|
|
78
|
-
|
|
86
|
+
self.logger.log(
|
|
87
|
+
_get_level(status),
|
|
79
88
|
f"{method} {path} {status} {latency}ms",
|
|
80
89
|
method=method,
|
|
81
90
|
path=path,
|
|
@@ -18,6 +18,7 @@ from langgraph_api.graph import GRAPHS, get_assistant_id
|
|
|
18
18
|
from langgraph_api.schema import (
|
|
19
19
|
All,
|
|
20
20
|
Config,
|
|
21
|
+
Context,
|
|
21
22
|
IfNotExists,
|
|
22
23
|
MetadataInput,
|
|
23
24
|
MultitaskStrategy,
|
|
@@ -52,6 +53,8 @@ class RunCreateDict(TypedDict):
|
|
|
52
53
|
"""Metadata for the run."""
|
|
53
54
|
config: Config | None
|
|
54
55
|
"""Additional configuration for the run."""
|
|
56
|
+
context: Context | None
|
|
57
|
+
"""Static context for the run."""
|
|
55
58
|
webhook: str | None
|
|
56
59
|
"""Webhook to call when the run is complete."""
|
|
57
60
|
|
|
@@ -283,11 +286,13 @@ async def create_valid_run(
|
|
|
283
286
|
detail="You must provide a thread_id when resuming.",
|
|
284
287
|
)
|
|
285
288
|
temporary = thread_id is None and payload.get("on_completion", "delete") == "delete"
|
|
289
|
+
stream_resumable = payload.get("stream_resumable", False)
|
|
286
290
|
stream_mode, multitask_strategy, prevent_insert_if_inflight = assign_defaults(
|
|
287
291
|
payload
|
|
288
292
|
)
|
|
289
293
|
# assign custom headers and checkpoint to config
|
|
290
294
|
config = payload.get("config") or {}
|
|
295
|
+
context = payload.get("context") or {}
|
|
291
296
|
configurable = config.setdefault("configurable", {})
|
|
292
297
|
if checkpoint_id:
|
|
293
298
|
configurable["checkpoint_id"] = str(checkpoint_id)
|
|
@@ -321,6 +326,7 @@ async def create_valid_run(
|
|
|
321
326
|
"input": payload.get("input"),
|
|
322
327
|
"command": payload.get("command"),
|
|
323
328
|
"config": config,
|
|
329
|
+
"context": context,
|
|
324
330
|
"stream_mode": stream_mode,
|
|
325
331
|
"interrupt_before": payload.get("interrupt_before"),
|
|
326
332
|
"interrupt_after": payload.get("interrupt_after"),
|
|
@@ -328,7 +334,7 @@ async def create_valid_run(
|
|
|
328
334
|
"feedback_keys": payload.get("feedback_keys"),
|
|
329
335
|
"temporary": temporary,
|
|
330
336
|
"subgraphs": payload.get("stream_subgraphs", False),
|
|
331
|
-
"resumable":
|
|
337
|
+
"resumable": stream_resumable,
|
|
332
338
|
"checkpoint_during": payload.get("checkpoint_during", True),
|
|
333
339
|
},
|
|
334
340
|
metadata=payload.get("metadata"),
|
|
@@ -367,6 +373,7 @@ async def create_valid_run(
|
|
|
367
373
|
temporary=temporary,
|
|
368
374
|
after_seconds=after_seconds,
|
|
369
375
|
if_not_exists=if_not_exists,
|
|
376
|
+
stream_resumable=stream_resumable,
|
|
370
377
|
run_create_ms=(
|
|
371
378
|
int(time.time() * 1_000) - request_start_time
|
|
372
379
|
if request_start_time
|