langgraph-api 0.4.1__tar.gz → 0.4.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langgraph-api might be problematic. Click here for more details.
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/PKG-INFO +1 -1
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/ramp.js +22 -11
- langgraph_api-0.4.7/langgraph_api/__init__.py +1 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/runs.py +126 -146
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/threads.py +23 -1
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/asyncio.py +2 -1
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/feature_flags.py +1 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/logging.py +5 -2
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/models/run.py +8 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/schema.py +2 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/stream.py +9 -1
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/utils/headers.py +8 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/openapi.json +45 -0
- langgraph_api-0.4.1/langgraph_api/__init__.py +0 -1
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/.gitignore +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/LICENSE +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/Makefile +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/README.md +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/.gitignore +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/Makefile +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/README.md +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/burst.js +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/clean.js +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/graphs.js +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/package.json +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/update-revision.js +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/benchmark/weather.js +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/constraints.txt +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/forbidden.txt +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/healthcheck.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/assistants.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/mcp.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/meta.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/openapi.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/store.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/api/ui.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/asgi_transport.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/auth/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/auth/custom.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/auth/langsmith/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/auth/langsmith/backend.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/auth/langsmith/client.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/auth/middleware.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/auth/noop.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/auth/studio_user.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/cli.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/command.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/config.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/cron_scheduler.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/errors.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/executor_entrypoint.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/graph.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/http.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/http_metrics.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/.gitignore +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/.prettierrc +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/base.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/build.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/client.http.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/client.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/errors.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/global.d.ts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/isolate-0x130008000-46649-46649-v8.log +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/isolate-0x138008000-44681-44681-v8.log +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/package-lock.json +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/package.json +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/remote.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/schema.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/src/graph.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/src/load.hooks.mjs +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/src/preload.mjs +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/src/utils/files.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/src/utils/importMap.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/src/utils/pythonSchemas.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/src/utils/serde.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/sse.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/traceblock.mts +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/tsconfig.json +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/ui.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/yarn.lock +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/metadata.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/middleware/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/middleware/http_logger.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/middleware/private_network.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/middleware/request_id.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/models/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/patch.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/queue_entrypoint.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/route.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/serde.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/server.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/sse.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/state.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/store.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/thread_ttl.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/traceblock.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/tunneling/cloudflare.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/utils/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/utils/cache.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/utils/config.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/utils/future.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/utils/uuids.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/utils.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/validation.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/webhook.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/worker.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_license/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_license/validation.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/__init__.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/checkpoint.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/database.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/lifespan.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/metrics.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/ops.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/queue.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/retry.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_runtime/store.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/logging.json +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/pyproject.toml +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/scripts/create_license.py +0 -0
- {langgraph_api-0.4.1 → langgraph_api-0.4.7}/uv.lock +0 -0
|
@@ -10,6 +10,7 @@ const failedRuns = new Counter('failed_runs');
|
|
|
10
10
|
const timeoutErrors = new Counter('timeout_errors');
|
|
11
11
|
const connectionErrors = new Counter('connection_errors');
|
|
12
12
|
const serverErrors = new Counter('server_errors');
|
|
13
|
+
const missingMessageErrors = new Counter('missing_message_errors');
|
|
13
14
|
const otherErrors = new Counter('other_errors');
|
|
14
15
|
|
|
15
16
|
// URL of your LangGraph server
|
|
@@ -36,6 +37,13 @@ for (let i = 1; i <= LEVELS; i++) {
|
|
|
36
37
|
stages.push({ duration: `${PLATEAU_DURATION}s`, target: LOAD_SIZE * LEVELS});
|
|
37
38
|
stages.push({ duration: '60s', target: 0 }); // Ramp down
|
|
38
39
|
|
|
40
|
+
// These are rough estimates from running in github actions. Actual results should be better so long as load is 1-1 with jobs available.
|
|
41
|
+
const p95_run_duration = {
|
|
42
|
+
'sequential': 18000,
|
|
43
|
+
'parallel': 8500,
|
|
44
|
+
'single': 1500,
|
|
45
|
+
}
|
|
46
|
+
|
|
39
47
|
// Test configuration
|
|
40
48
|
export let options = {
|
|
41
49
|
scenarios: {
|
|
@@ -47,16 +55,16 @@ export let options = {
|
|
|
47
55
|
},
|
|
48
56
|
},
|
|
49
57
|
thresholds: {
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
// 'http_req_failed': ['rate<0.05'], // Error rate should be less than 5%
|
|
58
|
+
'run_duration': [`p(95)<${p95_run_duration[MODE]}`],
|
|
59
|
+
'successful_runs': [`count>${(PLATEAU_DURATION / (p95_run_duration[MODE] / 1000)) * LOAD_SIZE * LEVELS * 2}`], // Number of expected successful runs per user worst caseduring plateau * max number of users * 2 cause that feels about right
|
|
60
|
+
'http_req_failed': ['rate<0.01'], // Error rate should be less than 1%
|
|
54
61
|
},
|
|
55
62
|
};
|
|
56
63
|
|
|
57
64
|
// Main test function
|
|
58
65
|
export default function() {
|
|
59
66
|
const startTime = new Date().getTime();
|
|
67
|
+
let response;
|
|
60
68
|
|
|
61
69
|
try {
|
|
62
70
|
// Prepare the request payload
|
|
@@ -91,7 +99,7 @@ export default function() {
|
|
|
91
99
|
}
|
|
92
100
|
|
|
93
101
|
// Make a single request to the wait endpoint
|
|
94
|
-
|
|
102
|
+
response = http.post(url, payload, {
|
|
95
103
|
headers,
|
|
96
104
|
timeout: '120s' // k6 request timeout slightly longer than the server timeout
|
|
97
105
|
});
|
|
@@ -110,9 +118,6 @@ export default function() {
|
|
|
110
118
|
// Record success metrics
|
|
111
119
|
runDuration.add(duration);
|
|
112
120
|
successfulRuns.add(1);
|
|
113
|
-
|
|
114
|
-
// Optional: Log successful run details
|
|
115
|
-
console.log(`Run completed successfully in ${duration/1000}s`);
|
|
116
121
|
} else {
|
|
117
122
|
// Handle failure
|
|
118
123
|
failedRuns.add(1);
|
|
@@ -124,12 +129,15 @@ export default function() {
|
|
|
124
129
|
} else if (response.status === 408 || response.error === 'timeout') {
|
|
125
130
|
timeoutErrors.add(1);
|
|
126
131
|
console.log(`Timeout error: ${response.error}`);
|
|
132
|
+
} else if (response.status === 200 && response?.body?.messages?.length !== expected_length) {
|
|
133
|
+
missingMessageErrors.add(1);
|
|
134
|
+
console.log(response);
|
|
135
|
+
console.log(`Missing message error: Status ${response.status}, ${JSON.stringify(response.body)}`);
|
|
127
136
|
} else {
|
|
128
137
|
otherErrors.add(1);
|
|
129
|
-
console.log(`Other error: Status ${response.status}, ${JSON.stringify(response)}`);
|
|
138
|
+
console.log(`Other error: Status ${response.status}, ${JSON.stringify(response.body)}`);
|
|
130
139
|
}
|
|
131
140
|
}
|
|
132
|
-
|
|
133
141
|
} catch (error) {
|
|
134
142
|
// Handle exceptions (network errors, etc.)
|
|
135
143
|
failedRuns.add(1);
|
|
@@ -142,7 +150,9 @@ export default function() {
|
|
|
142
150
|
console.log(`Connection error: ${error.message}`);
|
|
143
151
|
} else {
|
|
144
152
|
otherErrors.add(1);
|
|
145
|
-
|
|
153
|
+
// Usually we end up with HTML error pages here
|
|
154
|
+
console.log(response);
|
|
155
|
+
console.log(`Unexpected error: ${error.message}, Response Body: ${response?.body}`);
|
|
146
156
|
}
|
|
147
157
|
}
|
|
148
158
|
|
|
@@ -180,6 +190,7 @@ export function handleSummary(data) {
|
|
|
180
190
|
timeout: data.metrics.timeout_errors ? data.metrics.timeout_errors.values.count : 0,
|
|
181
191
|
connection: data.metrics.connection_errors ? data.metrics.connection_errors.values.count : 0,
|
|
182
192
|
server: data.metrics.server_errors ? data.metrics.server_errors.values.count : 0,
|
|
193
|
+
missingMessage: data.metrics.missing_message_errors ? data.metrics.missing_message_errors.values.count : 0,
|
|
183
194
|
other: data.metrics.other_errors ? data.metrics.other_errors.values.count : 0
|
|
184
195
|
}
|
|
185
196
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.4.7"
|
|
@@ -4,11 +4,12 @@ from typing import Literal, cast
|
|
|
4
4
|
from uuid import uuid4
|
|
5
5
|
|
|
6
6
|
import orjson
|
|
7
|
+
import structlog
|
|
7
8
|
from starlette.exceptions import HTTPException
|
|
8
9
|
from starlette.responses import Response, StreamingResponse
|
|
9
10
|
|
|
10
11
|
from langgraph_api import config
|
|
11
|
-
from langgraph_api.asyncio import ValueEvent
|
|
12
|
+
from langgraph_api.asyncio import ValueEvent
|
|
12
13
|
from langgraph_api.models.run import create_valid_run
|
|
13
14
|
from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
|
|
14
15
|
from langgraph_api.schema import CRON_FIELDS, RUN_FIELDS
|
|
@@ -34,6 +35,8 @@ from langgraph_runtime.database import connect
|
|
|
34
35
|
from langgraph_runtime.ops import Crons, Runs, Threads
|
|
35
36
|
from langgraph_runtime.retry import retry_db
|
|
36
37
|
|
|
38
|
+
logger = structlog.stdlib.get_logger(__name__)
|
|
39
|
+
|
|
37
40
|
|
|
38
41
|
@retry_db
|
|
39
42
|
async def create_run(request: ApiRequest):
|
|
@@ -101,9 +104,7 @@ async def stream_run(
|
|
|
101
104
|
payload = await request.json(RunCreateStateful)
|
|
102
105
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
103
106
|
run_id = uuid7()
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
try:
|
|
107
|
+
async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
|
|
107
108
|
async with connect() as conn:
|
|
108
109
|
run = await create_valid_run(
|
|
109
110
|
conn,
|
|
@@ -113,25 +114,20 @@ async def stream_run(
|
|
|
113
114
|
run_id=run_id,
|
|
114
115
|
request_start_time=request.scope.get("request_start_time_ms"),
|
|
115
116
|
)
|
|
116
|
-
except Exception:
|
|
117
|
-
if not sub.cancelled():
|
|
118
|
-
handle = await sub
|
|
119
|
-
await handle.__aexit__(None, None, None)
|
|
120
|
-
raise
|
|
121
117
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
118
|
+
return EventSourceResponse(
|
|
119
|
+
Runs.Stream.join(
|
|
120
|
+
run["run_id"],
|
|
121
|
+
thread_id=thread_id,
|
|
122
|
+
cancel_on_disconnect=on_disconnect == "cancel",
|
|
123
|
+
stream_channel=sub,
|
|
124
|
+
last_event_id=None,
|
|
125
|
+
),
|
|
126
|
+
headers={
|
|
127
|
+
"Location": f"/threads/{thread_id}/runs/{run['run_id']}/stream",
|
|
128
|
+
"Content-Location": f"/threads/{thread_id}/runs/{run['run_id']}",
|
|
129
|
+
},
|
|
130
|
+
)
|
|
135
131
|
|
|
136
132
|
|
|
137
133
|
async def stream_run_stateless(
|
|
@@ -143,8 +139,7 @@ async def stream_run_stateless(
|
|
|
143
139
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
144
140
|
run_id = uuid7()
|
|
145
141
|
thread_id = uuid4()
|
|
146
|
-
|
|
147
|
-
try:
|
|
142
|
+
async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
|
|
148
143
|
async with connect() as conn:
|
|
149
144
|
run = await create_valid_run(
|
|
150
145
|
conn,
|
|
@@ -155,26 +150,21 @@ async def stream_run_stateless(
|
|
|
155
150
|
request_start_time=request.scope.get("request_start_time_ms"),
|
|
156
151
|
temporary=True,
|
|
157
152
|
)
|
|
158
|
-
except Exception:
|
|
159
|
-
if not sub.cancelled():
|
|
160
|
-
handle = await sub
|
|
161
|
-
await handle.__aexit__(None, None, None)
|
|
162
|
-
raise
|
|
163
153
|
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
154
|
+
return EventSourceResponse(
|
|
155
|
+
Runs.Stream.join(
|
|
156
|
+
run["run_id"],
|
|
157
|
+
thread_id=run["thread_id"],
|
|
158
|
+
ignore_404=True,
|
|
159
|
+
cancel_on_disconnect=on_disconnect == "cancel",
|
|
160
|
+
stream_channel=sub,
|
|
161
|
+
last_event_id=None,
|
|
162
|
+
),
|
|
163
|
+
headers={
|
|
164
|
+
"Location": f"/runs/{run['run_id']}/stream",
|
|
165
|
+
"Content-Location": f"/runs/{run['run_id']}",
|
|
166
|
+
},
|
|
167
|
+
)
|
|
178
168
|
|
|
179
169
|
|
|
180
170
|
@retry_db
|
|
@@ -184,9 +174,7 @@ async def wait_run(request: ApiRequest):
|
|
|
184
174
|
payload = await request.json(RunCreateStateful)
|
|
185
175
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
186
176
|
run_id = uuid7()
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
try:
|
|
177
|
+
async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
|
|
190
178
|
async with connect() as conn:
|
|
191
179
|
run = await create_valid_run(
|
|
192
180
|
conn,
|
|
@@ -196,25 +184,17 @@ async def wait_run(request: ApiRequest):
|
|
|
196
184
|
run_id=run_id,
|
|
197
185
|
request_start_time=request.scope.get("request_start_time_ms"),
|
|
198
186
|
)
|
|
199
|
-
except Exception:
|
|
200
|
-
if not sub.cancelled():
|
|
201
|
-
handle = await sub
|
|
202
|
-
await handle.__aexit__(None, None, None)
|
|
203
|
-
raise
|
|
204
187
|
|
|
205
|
-
|
|
188
|
+
last_chunk = ValueEvent()
|
|
206
189
|
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
Runs.Stream.join(
|
|
190
|
+
async def consume():
|
|
191
|
+
vchunk: bytes | None = None
|
|
192
|
+
async for mode, chunk, _ in Runs.Stream.join(
|
|
211
193
|
run["run_id"],
|
|
212
194
|
thread_id=run["thread_id"],
|
|
213
|
-
stream_channel=
|
|
195
|
+
stream_channel=sub,
|
|
214
196
|
cancel_on_disconnect=on_disconnect == "cancel",
|
|
215
|
-
)
|
|
216
|
-
) as stream:
|
|
217
|
-
async for mode, chunk, _ in stream:
|
|
197
|
+
):
|
|
218
198
|
if (
|
|
219
199
|
mode == b"values"
|
|
220
200
|
or mode == b"updates"
|
|
@@ -223,43 +203,47 @@ async def wait_run(request: ApiRequest):
|
|
|
223
203
|
vchunk = chunk
|
|
224
204
|
elif mode == b"error":
|
|
225
205
|
vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
206
|
+
if vchunk is not None:
|
|
207
|
+
last_chunk.set(vchunk)
|
|
208
|
+
else:
|
|
209
|
+
async with connect() as conn:
|
|
210
|
+
thread_iter = await Threads.get(conn, thread_id)
|
|
211
|
+
try:
|
|
212
|
+
thread = await anext(thread_iter)
|
|
213
|
+
last_chunk.set(thread["values"])
|
|
214
|
+
except StopAsyncIteration:
|
|
215
|
+
await logger.awarning(
|
|
216
|
+
f"No checkpoint found for thread {thread_id}",
|
|
217
|
+
thread_id=thread_id,
|
|
218
|
+
)
|
|
219
|
+
last_chunk.set(b"{}")
|
|
220
|
+
|
|
221
|
+
# keep the connection open by sending whitespace every 5 seconds
|
|
222
|
+
# leading whitespace will be ignored by json parsers
|
|
223
|
+
async def body() -> AsyncIterator[bytes]:
|
|
224
|
+
stream = asyncio.create_task(consume())
|
|
225
|
+
while True:
|
|
231
226
|
try:
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
last_chunk.
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
await stream
|
|
253
|
-
raise
|
|
254
|
-
|
|
255
|
-
return StreamingResponse(
|
|
256
|
-
body(),
|
|
257
|
-
media_type="application/json",
|
|
258
|
-
headers={
|
|
259
|
-
"Location": f"/threads/{thread_id}/runs/{run['run_id']}/join",
|
|
260
|
-
"Content-Location": f"/threads/{thread_id}/runs/{run['run_id']}",
|
|
261
|
-
},
|
|
262
|
-
)
|
|
227
|
+
if stream.done():
|
|
228
|
+
# raise stream exception if any
|
|
229
|
+
stream.result()
|
|
230
|
+
yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
|
|
231
|
+
break
|
|
232
|
+
except TimeoutError:
|
|
233
|
+
yield b"\n"
|
|
234
|
+
except asyncio.CancelledError:
|
|
235
|
+
stream.cancel()
|
|
236
|
+
await stream
|
|
237
|
+
raise
|
|
238
|
+
|
|
239
|
+
return StreamingResponse(
|
|
240
|
+
body(),
|
|
241
|
+
media_type="application/json",
|
|
242
|
+
headers={
|
|
243
|
+
"Location": f"/threads/{thread_id}/runs/{run['run_id']}/join",
|
|
244
|
+
"Content-Location": f"/threads/{thread_id}/runs/{run['run_id']}",
|
|
245
|
+
},
|
|
246
|
+
)
|
|
263
247
|
|
|
264
248
|
|
|
265
249
|
@retry_db
|
|
@@ -270,9 +254,7 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
270
254
|
on_disconnect = payload.get("on_disconnect", "continue")
|
|
271
255
|
run_id = uuid7()
|
|
272
256
|
thread_id = uuid4()
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
try:
|
|
257
|
+
async with await Runs.Stream.subscribe(run_id, thread_id) as sub:
|
|
276
258
|
async with connect() as conn:
|
|
277
259
|
run = await create_valid_run(
|
|
278
260
|
conn,
|
|
@@ -283,25 +265,18 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
283
265
|
request_start_time=request.scope.get("request_start_time_ms"),
|
|
284
266
|
temporary=True,
|
|
285
267
|
)
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
async def consume():
|
|
294
|
-
vchunk: bytes | None = None
|
|
295
|
-
async with aclosing(
|
|
296
|
-
Runs.Stream.join(
|
|
268
|
+
|
|
269
|
+
last_chunk = ValueEvent()
|
|
270
|
+
|
|
271
|
+
async def consume():
|
|
272
|
+
vchunk: bytes | None = None
|
|
273
|
+
async for mode, chunk, _ in Runs.Stream.join(
|
|
297
274
|
run["run_id"],
|
|
298
275
|
thread_id=run["thread_id"],
|
|
299
|
-
stream_channel=
|
|
276
|
+
stream_channel=sub,
|
|
300
277
|
ignore_404=True,
|
|
301
278
|
cancel_on_disconnect=on_disconnect == "cancel",
|
|
302
|
-
)
|
|
303
|
-
) as stream:
|
|
304
|
-
async for mode, chunk, _ in stream:
|
|
279
|
+
):
|
|
305
280
|
if (
|
|
306
281
|
mode == b"values"
|
|
307
282
|
or mode == b"updates"
|
|
@@ -310,38 +285,43 @@ async def wait_run_stateless(request: ApiRequest):
|
|
|
310
285
|
vchunk = chunk
|
|
311
286
|
elif mode == b"error":
|
|
312
287
|
vchunk = orjson.dumps({"__error__": orjson.Fragment(chunk)})
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
288
|
+
if vchunk is not None:
|
|
289
|
+
last_chunk.set(vchunk)
|
|
290
|
+
else:
|
|
291
|
+
# we can't fetch the thread (it was deleted), so just return empty values
|
|
292
|
+
await logger.awarning(
|
|
293
|
+
"No checkpoint emitted for stateless run",
|
|
294
|
+
run_id=run["run_id"],
|
|
295
|
+
thread_id=run["thread_id"],
|
|
296
|
+
)
|
|
297
|
+
last_chunk.set(b"{}")
|
|
298
|
+
|
|
299
|
+
# keep the connection open by sending whitespace every 5 seconds
|
|
300
|
+
# leading whitespace will be ignored by json parsers
|
|
301
|
+
async def body() -> AsyncIterator[bytes]:
|
|
302
|
+
stream = asyncio.create_task(consume())
|
|
303
|
+
while True:
|
|
304
|
+
try:
|
|
305
|
+
if stream.done():
|
|
306
|
+
# raise stream exception if any
|
|
307
|
+
stream.result()
|
|
308
|
+
yield await asyncio.wait_for(last_chunk.wait(), timeout=5)
|
|
309
|
+
break
|
|
310
|
+
except TimeoutError:
|
|
311
|
+
yield b"\n"
|
|
312
|
+
except asyncio.CancelledError:
|
|
313
|
+
stream.cancel("Run stream cancelled")
|
|
314
|
+
await stream
|
|
315
|
+
raise
|
|
316
|
+
|
|
317
|
+
return StreamingResponse(
|
|
318
|
+
body(),
|
|
319
|
+
media_type="application/json",
|
|
320
|
+
headers={
|
|
321
|
+
"Location": f"/threads/{run['thread_id']}/runs/{run['run_id']}/join",
|
|
322
|
+
"Content-Location": f"/threads/{run['thread_id']}/runs/{run['run_id']}",
|
|
323
|
+
},
|
|
324
|
+
)
|
|
345
325
|
|
|
346
326
|
|
|
347
327
|
@retry_db
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from typing import get_args
|
|
1
2
|
from uuid import uuid4
|
|
2
3
|
|
|
3
4
|
from starlette.exceptions import HTTPException
|
|
@@ -5,7 +6,7 @@ from starlette.responses import Response
|
|
|
5
6
|
from starlette.routing import BaseRoute
|
|
6
7
|
|
|
7
8
|
from langgraph_api.route import ApiRequest, ApiResponse, ApiRoute
|
|
8
|
-
from langgraph_api.schema import THREAD_FIELDS
|
|
9
|
+
from langgraph_api.schema import THREAD_FIELDS, ThreadStreamMode
|
|
9
10
|
from langgraph_api.sse import EventSourceResponse
|
|
10
11
|
from langgraph_api.state import state_snapshot_to_thread_state
|
|
11
12
|
from langgraph_api.utils import (
|
|
@@ -293,10 +294,31 @@ async def join_thread_stream(request: ApiRequest):
|
|
|
293
294
|
validate_stream_id(
|
|
294
295
|
last_event_id, "Invalid last-event-id: must be a valid Redis stream ID"
|
|
295
296
|
)
|
|
297
|
+
|
|
298
|
+
# Parse stream_modes parameter - can be single string or comma-separated list
|
|
299
|
+
stream_modes_param = request.query_params.get("stream_modes")
|
|
300
|
+
if stream_modes_param:
|
|
301
|
+
if "," in stream_modes_param:
|
|
302
|
+
# Handle comma-separated list
|
|
303
|
+
stream_modes = [mode.strip() for mode in stream_modes_param.split(",")]
|
|
304
|
+
else:
|
|
305
|
+
# Handle single value
|
|
306
|
+
stream_modes = [stream_modes_param]
|
|
307
|
+
# Validate each mode
|
|
308
|
+
for mode in stream_modes:
|
|
309
|
+
if mode not in get_args(ThreadStreamMode):
|
|
310
|
+
raise HTTPException(
|
|
311
|
+
status_code=422, detail=f"Invalid stream mode: {mode}"
|
|
312
|
+
)
|
|
313
|
+
else:
|
|
314
|
+
# Default to run_modes
|
|
315
|
+
stream_modes = ["run_modes"]
|
|
316
|
+
|
|
296
317
|
return EventSourceResponse(
|
|
297
318
|
Threads.Stream.join(
|
|
298
319
|
thread_id,
|
|
299
320
|
last_event_id=last_event_id,
|
|
321
|
+
stream_modes=stream_modes,
|
|
300
322
|
),
|
|
301
323
|
)
|
|
302
324
|
|
|
@@ -162,7 +162,8 @@ class SimpleTaskGroup(AbstractAsyncContextManager["SimpleTaskGroup"]):
|
|
|
162
162
|
taskset: set[asyncio.Task] | None = None,
|
|
163
163
|
taskgroup_name: str | None = None,
|
|
164
164
|
) -> None:
|
|
165
|
-
|
|
165
|
+
# Copy the taskset to avoid modifying the original set unintentionally (like in lifespan)
|
|
166
|
+
self.tasks = taskset.copy() if taskset is not None else set()
|
|
166
167
|
self.cancel = cancel
|
|
167
168
|
self.wait = wait
|
|
168
169
|
if taskset:
|
|
@@ -69,9 +69,12 @@ class AddApiVersion:
|
|
|
69
69
|
def __call__(
|
|
70
70
|
self, logger: logging.Logger, method_name: str, event_dict: EventDict
|
|
71
71
|
) -> EventDict:
|
|
72
|
-
|
|
72
|
+
try:
|
|
73
|
+
from langgraph_api import __version__
|
|
73
74
|
|
|
74
|
-
|
|
75
|
+
event_dict["langgraph_api_version"] = __version__
|
|
76
|
+
except ImportError:
|
|
77
|
+
pass
|
|
75
78
|
return event_dict
|
|
76
79
|
|
|
77
80
|
|
|
@@ -106,6 +106,8 @@ class RunCreateDict(TypedDict):
|
|
|
106
106
|
"""Create the thread if it doesn't exist. If False, reply with 404."""
|
|
107
107
|
langsmith_tracer: LangSmithTracer | None
|
|
108
108
|
"""Configuration for additional tracing with LangSmith."""
|
|
109
|
+
durability: str | None
|
|
110
|
+
"""Durability level for the run. Must be one of 'sync', 'async', or 'exit'."""
|
|
109
111
|
|
|
110
112
|
|
|
111
113
|
def ensure_ids(
|
|
@@ -322,6 +324,11 @@ async def create_valid_run(
|
|
|
322
324
|
put_time_start = time.time()
|
|
323
325
|
if_not_exists = payload.get("if_not_exists", "reject")
|
|
324
326
|
|
|
327
|
+
durability = payload.get("durability")
|
|
328
|
+
if durability is None:
|
|
329
|
+
checkpoint_during = payload.get("checkpoint_during")
|
|
330
|
+
durability = "async" if checkpoint_during in (None, True) else "exit"
|
|
331
|
+
|
|
325
332
|
run_coro = Runs.put(
|
|
326
333
|
conn,
|
|
327
334
|
assistant_id,
|
|
@@ -339,6 +346,7 @@ async def create_valid_run(
|
|
|
339
346
|
"subgraphs": payload.get("stream_subgraphs", False),
|
|
340
347
|
"resumable": stream_resumable,
|
|
341
348
|
"checkpoint_during": payload.get("checkpoint_during", True),
|
|
349
|
+
"durability": durability,
|
|
342
350
|
},
|
|
343
351
|
metadata=payload.get("metadata"),
|
|
344
352
|
status="pending",
|
|
@@ -19,6 +19,8 @@ StreamMode = Literal[
|
|
|
19
19
|
"values", "messages", "updates", "events", "debug", "tasks", "checkpoints", "custom"
|
|
20
20
|
]
|
|
21
21
|
|
|
22
|
+
ThreadStreamMode = Literal["lifecycle", "run_modes", "state_update"]
|
|
23
|
+
|
|
22
24
|
MultitaskStrategy = Literal["reject", "rollback", "interrupt", "enqueue"]
|
|
23
25
|
|
|
24
26
|
OnConflictBehavior = Literal["raise", "do_nothing"]
|
|
@@ -30,7 +30,7 @@ from langgraph_api import __version__
|
|
|
30
30
|
from langgraph_api import store as api_store
|
|
31
31
|
from langgraph_api.asyncio import ValueEvent, wait_if_not_done
|
|
32
32
|
from langgraph_api.command import map_cmd
|
|
33
|
-
from langgraph_api.feature_flags import USE_RUNTIME_CONTEXT_API
|
|
33
|
+
from langgraph_api.feature_flags import USE_DURABILITY, USE_RUNTIME_CONTEXT_API
|
|
34
34
|
from langgraph_api.graph import get_graph
|
|
35
35
|
from langgraph_api.js.base import BaseRemotePregel
|
|
36
36
|
from langgraph_api.metadata import HOST, PLAN, USER_API_URL, incr_nodes
|
|
@@ -134,6 +134,14 @@ async def astream_state(
|
|
|
134
134
|
kwargs = run["kwargs"].copy()
|
|
135
135
|
kwargs.pop("webhook", None)
|
|
136
136
|
kwargs.pop("resumable", False)
|
|
137
|
+
if USE_DURABILITY:
|
|
138
|
+
checkpoint_during = kwargs.pop("checkpoint_during")
|
|
139
|
+
if not kwargs.get("durability") and checkpoint_during:
|
|
140
|
+
kwargs["durability"] = "async" if checkpoint_during else "exit"
|
|
141
|
+
else:
|
|
142
|
+
durability = kwargs.pop("durability")
|
|
143
|
+
if not kwargs.get("checkpoint_during") and durability in ("async", "exit"):
|
|
144
|
+
kwargs["checkpoint_during"] = durability == "async"
|
|
137
145
|
subgraphs = kwargs.get("subgraphs", False)
|
|
138
146
|
temporary = kwargs.pop("temporary", False)
|
|
139
147
|
context = kwargs.pop("context", None)
|
|
@@ -59,6 +59,14 @@ def should_include_header(key: str) -> bool:
|
|
|
59
59
|
Returns:
|
|
60
60
|
True if the header should be included, False otherwise
|
|
61
61
|
"""
|
|
62
|
+
if (
|
|
63
|
+
key == "x-api-key"
|
|
64
|
+
or key == "x-service-key"
|
|
65
|
+
or key == "x-tenant-id"
|
|
66
|
+
or key == "authorization"
|
|
67
|
+
):
|
|
68
|
+
return False
|
|
69
|
+
|
|
62
70
|
include_patterns, exclude_patterns = get_header_patterns("configurable_headers")
|
|
63
71
|
|
|
64
72
|
return pattern_matches(key, include_patterns, exclude_patterns)
|
|
@@ -1550,6 +1550,29 @@
|
|
|
1550
1550
|
},
|
|
1551
1551
|
"name": "Last-Event-ID",
|
|
1552
1552
|
"in": "header"
|
|
1553
|
+
},
|
|
1554
|
+
{
|
|
1555
|
+
"required": false,
|
|
1556
|
+
"schema": {
|
|
1557
|
+
"anyOf": [
|
|
1558
|
+
{
|
|
1559
|
+
"type": "string",
|
|
1560
|
+
"enum": ["lifecycle", "run_modes", "state_update"]
|
|
1561
|
+
},
|
|
1562
|
+
{
|
|
1563
|
+
"type": "array",
|
|
1564
|
+
"items": {
|
|
1565
|
+
"type": "string",
|
|
1566
|
+
"enum": ["lifecycle", "run_modes", "state_update"]
|
|
1567
|
+
}
|
|
1568
|
+
}
|
|
1569
|
+
],
|
|
1570
|
+
"default": ["run_modes"],
|
|
1571
|
+
"title": "Stream Modes",
|
|
1572
|
+
"description": "Stream modes to control which events are returned. 'lifecycle' returns only run start/end events, 'run_modes' returns all run events (default behavior), 'state_update' returns only state update events."
|
|
1573
|
+
},
|
|
1574
|
+
"name": "stream_modes",
|
|
1575
|
+
"in": "query"
|
|
1553
1576
|
}
|
|
1554
1577
|
],
|
|
1555
1578
|
"responses": {
|
|
@@ -4413,6 +4436,17 @@
|
|
|
4413
4436
|
"title": "Checkpoint During",
|
|
4414
4437
|
"description": "Whether to checkpoint during the run.",
|
|
4415
4438
|
"default": false
|
|
4439
|
+
},
|
|
4440
|
+
"durability": {
|
|
4441
|
+
"type": "string",
|
|
4442
|
+
"enum": [
|
|
4443
|
+
"sync",
|
|
4444
|
+
"async",
|
|
4445
|
+
"exit"
|
|
4446
|
+
],
|
|
4447
|
+
"title": "Durability",
|
|
4448
|
+
"description": "Durability level for the run. Must be one of 'sync', 'async', or 'exit'.",
|
|
4449
|
+
"default": "async"
|
|
4416
4450
|
}
|
|
4417
4451
|
},
|
|
4418
4452
|
"type": "object",
|
|
@@ -4649,6 +4683,17 @@
|
|
|
4649
4683
|
"title": "Checkpoint During",
|
|
4650
4684
|
"description": "Whether to checkpoint during the run.",
|
|
4651
4685
|
"default": false
|
|
4686
|
+
},
|
|
4687
|
+
"durability": {
|
|
4688
|
+
"type": "string",
|
|
4689
|
+
"enum": [
|
|
4690
|
+
"sync",
|
|
4691
|
+
"async",
|
|
4692
|
+
"exit"
|
|
4693
|
+
],
|
|
4694
|
+
"title": "Durability",
|
|
4695
|
+
"description": "Durability level for the run. Must be one of 'sync', 'async', or 'exit'.",
|
|
4696
|
+
"default": "async"
|
|
4652
4697
|
}
|
|
4653
4698
|
},
|
|
4654
4699
|
"type": "object",
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.4.1"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/isolate-0x130008000-46649-46649-v8.log
RENAMED
|
File without changes
|
{langgraph_api-0.4.1 → langgraph_api-0.4.7}/langgraph_api/js/isolate-0x138008000-44681-44681-v8.log
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|