ryeos-node 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ryeos_node/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ """RYE Execution Node — CAS-native remote execution server."""
2
+
3
+ __version__ = "0.1.1"
ryeos_node/auth.py ADDED
@@ -0,0 +1,301 @@
1
+ """Authentication for ryeos-node.
2
+
3
+ Dual auth: signed-request (Ed25519) and HMAC (webhook).
4
+ - Signed-request: verifies X-Rye-Signature headers against authorized key files.
5
+ - Webhook: HMAC-SHA256 signature verification via webhook_bindings table.
6
+ """
7
+
8
+ import fnmatch
9
+ import hashlib
10
+ import hmac as hmac_mod
11
+ import logging
12
+ import time
13
+ from dataclasses import dataclass
14
+ from pathlib import Path
15
+ from typing import Optional
16
+
17
+ try:
18
+ import tomllib
19
+ except ModuleNotFoundError:
20
+ import tomli as tomllib # type: ignore[no-redef]
21
+
22
+ from fastapi import Depends, HTTPException, Request, status
23
+
24
+ from ryeos_node.config import Settings, get_settings
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+
29
+ # ---------------------------------------------------------------------------
30
+ # Principal (replaces User)
31
+ # ---------------------------------------------------------------------------
32
+
33
+
34
+ @dataclass
35
+ class Principal:
36
+ """Authenticated caller identity.
37
+
38
+ fingerprint: Ed25519 key fingerprint (the identity)
39
+ capabilities: fnmatch patterns from authorized key file
40
+ owner: human-readable label from authorized key file
41
+ """
42
+ fingerprint: str
43
+ capabilities: list[str]
44
+ owner: str = ""
45
+
46
+
47
+ # ---------------------------------------------------------------------------
48
+ # Authorized key file loading + verification
49
+ # ---------------------------------------------------------------------------
50
+
51
+
52
+ def _load_authorized_key(fingerprint: str, settings: Settings) -> dict:
53
+ """Load and verify an authorized key TOML file.
54
+
55
+ The file must be signed by this node's key (signature header line).
56
+ Returns the parsed TOML dict.
57
+ Raises HTTPException(401) on any failure.
58
+ """
59
+ auth_dir = settings.authorized_keys_dir()
60
+ key_file = auth_dir / f"{fingerprint}.toml"
61
+
62
+ if not key_file.exists():
63
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Unknown principal")
64
+
65
+ raw = key_file.read_text()
66
+
67
+ # Verify node signature (first line: # rye:signed:<timestamp>:<hash>:<sig>:<signer_fp>)
68
+ lines = raw.split("\n", 1)
69
+ sig_line = lines[0].strip()
70
+ if not sig_line.startswith("# rye:signed:"):
71
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Unauthorized key file (unsigned)")
72
+
73
+ # Format: <timestamp>:<content_hash>:<sig_b64>:<signer_fp>
74
+ # Timestamp is ISO 8601 with colons, so rsplit from the right.
75
+ remainder = sig_line[len("# rye:signed:"):]
76
+ rparts = remainder.rsplit(":", 3)
77
+ if len(rparts) != 4:
78
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Unauthorized key file (malformed sig)")
79
+
80
+ _sig_timestamp, content_hash, sig_b64, signer_fp = rparts
81
+
82
+ # Verify signature was made by this node's key
83
+ from rye.primitives.signing import load_keypair, compute_key_fingerprint, verify_signature
84
+
85
+ try:
86
+ _, node_pub = load_keypair(Path(settings.signing_key_dir))
87
+ node_fp = compute_key_fingerprint(node_pub)
88
+ except FileNotFoundError:
89
+ raise HTTPException(
90
+ status.HTTP_500_INTERNAL_SERVER_ERROR,
91
+ "Node signing key not configured",
92
+ )
93
+
94
+ if signer_fp != node_fp:
95
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Unauthorized key file (wrong signer)")
96
+
97
+ # The signed content is everything after the signature line
98
+ body = lines[1] if len(lines) > 1 else ""
99
+ actual_hash = hashlib.sha256(body.encode()).hexdigest()
100
+ if actual_hash != content_hash:
101
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Unauthorized key file (tampered)")
102
+
103
+ if not verify_signature(content_hash, sig_b64, node_pub):
104
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Unauthorized key file (bad signature)")
105
+
106
+ # Parse TOML body
107
+ try:
108
+ data = tomllib.loads(body)
109
+ except Exception:
110
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Unauthorized key file (invalid TOML)")
111
+
112
+ # Verify fingerprint matches
113
+ if data.get("fingerprint") != fingerprint:
114
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Unauthorized key file (fingerprint mismatch)")
115
+
116
+ # Check expiry
117
+ expires_at = data.get("expires_at")
118
+ if expires_at:
119
+ from datetime import datetime, timezone
120
+ try:
121
+ exp = datetime.fromisoformat(expires_at.replace("Z", "+00:00"))
122
+ if datetime.now(timezone.utc) > exp:
123
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Authorized key expired")
124
+ except (ValueError, AttributeError):
125
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Authorized key file (bad expiry)")
126
+
127
+ return data
128
+
129
+
130
+ def _verify_signed_request(request: Request, raw_body: bytes, settings: Settings) -> Principal:
131
+ """Verify Ed25519 signed request headers.
132
+
133
+ Extracts the caller's fingerprint from X-Rye-Key-Id, loads their
134
+ authorized key file, verifies the request signature, checks replay,
135
+ and returns a Principal.
136
+ """
137
+ from ryeos_node.replay import get_replay_guard
138
+
139
+ key_id = request.headers.get("x-rye-key-id", "")
140
+ timestamp = request.headers.get("x-rye-timestamp", "")
141
+ nonce = request.headers.get("x-rye-nonce", "")
142
+ signature = request.headers.get("x-rye-signature", "")
143
+
144
+ if not all([key_id, timestamp, nonce, signature]):
145
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Missing auth headers")
146
+
147
+ # Extract fingerprint from key_id (format: fp:<fingerprint>)
148
+ if not key_id.startswith("fp:"):
149
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid key ID format")
150
+ fingerprint = key_id[3:]
151
+
152
+ # Check timestamp freshness
153
+ try:
154
+ req_time = int(timestamp)
155
+ except ValueError:
156
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid timestamp")
157
+ now = int(time.time())
158
+ if abs(now - req_time) > 300: # 5 minute window
159
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Request expired")
160
+
161
+ # Load authorized key file (verifies node signature, expiry)
162
+ auth_data = _load_authorized_key(fingerprint, settings)
163
+
164
+ # Get caller's public key from authorized key file
165
+ public_key_b64 = auth_data.get("public_key", "")
166
+ if not public_key_b64.startswith("ed25519:"):
167
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid public key format")
168
+ # The public key PEM is stored as base64 after "ed25519:" prefix
169
+ # But we stored it as the actual PEM content after the prefix
170
+ import base64
171
+ public_key_pem = base64.b64decode(public_key_b64[8:])
172
+
173
+ # Compute this node's audience (fp:<node_fingerprint>)
174
+ from rye.primitives.signing import load_keypair, compute_key_fingerprint, verify_signature
175
+
176
+ _, node_pub = load_keypair(Path(settings.signing_key_dir))
177
+ node_fp = compute_key_fingerprint(node_pub)
178
+ audience = f"fp:{node_fp}"
179
+
180
+ # Reconstruct string_to_sign and verify
181
+ body_hash = hashlib.sha256(raw_body or b"").hexdigest()
182
+
183
+ # Build canonical path from request
184
+ path = request.url.path
185
+ query = str(request.url.query) if request.url.query else ""
186
+ if query:
187
+ from urllib.parse import parse_qsl, urlencode
188
+ params = parse_qsl(query, keep_blank_values=True)
189
+ params.sort()
190
+ canon_path = f"{path}?{urlencode(params)}"
191
+ else:
192
+ canon_path = path
193
+
194
+ string_to_sign = "\n".join([
195
+ "ryeos-request-v1",
196
+ request.method.upper(),
197
+ canon_path,
198
+ body_hash,
199
+ timestamp,
200
+ nonce,
201
+ audience,
202
+ ])
203
+
204
+ content_hash = hashlib.sha256(string_to_sign.encode()).hexdigest()
205
+ if not verify_signature(content_hash, signature, public_key_pem):
206
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid signature")
207
+
208
+ # Replay check
209
+ guard = get_replay_guard(settings.cas_base_path)
210
+ if not guard.check_and_record(fingerprint, nonce):
211
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Replayed request")
212
+
213
+ return Principal(
214
+ fingerprint=fingerprint,
215
+ capabilities=auth_data.get("capabilities", []),
216
+ owner=auth_data.get("owner", ""),
217
+ )
218
+
219
+
220
+ def require_capability(principal: Principal, action: str) -> None:
221
+ """Raise 403 if principal doesn't have a matching capability.
222
+
223
+ Capabilities use fnmatch patterns (e.g., 'rye.execute.tool.*').
224
+ """
225
+ for cap in principal.capabilities:
226
+ if fnmatch.fnmatch(action, cap):
227
+ return
228
+ raise HTTPException(
229
+ status_code=status.HTTP_403_FORBIDDEN,
230
+ detail=f"Missing required capability: {action}",
231
+ )
232
+
233
+
234
+ async def get_current_principal(
235
+ request: Request,
236
+ settings: Settings = Depends(get_settings),
237
+ ) -> Principal:
238
+ """FastAPI dependency: authenticate via signed request headers."""
239
+ raw_body = await request.body()
240
+ return _verify_signed_request(request, raw_body, settings)
241
+
242
+
243
+ # ---------------------------------------------------------------------------
244
+ # HMAC webhook verification (unchanged — external services don't have Ed25519)
245
+ # ---------------------------------------------------------------------------
246
+
247
+ WEBHOOK_TIMESTAMP_MAX_FUTURE_SECONDS = 30
248
+ WEBHOOK_TIMESTAMP_MAX_AGE_SECONDS = 300
249
+
250
+
251
+ def verify_timestamp(timestamp: str) -> None:
252
+ """Reject stale or future webhook timestamps."""
253
+ if not timestamp:
254
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid webhook auth")
255
+ try:
256
+ ts = int(timestamp)
257
+ except ValueError:
258
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid webhook auth")
259
+ now = int(time.time())
260
+ if ts > now + WEBHOOK_TIMESTAMP_MAX_FUTURE_SECONDS:
261
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid webhook auth")
262
+ if now - ts > WEBHOOK_TIMESTAMP_MAX_AGE_SECONDS:
263
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid webhook auth")
264
+
265
+
266
+ def verify_hmac(timestamp: str, raw_body: bytes, secret: str, signature: str) -> None:
267
+ """Verify HMAC-SHA256 signature over timestamp.raw_body."""
268
+ if not signature or not signature.startswith("sha256="):
269
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid webhook auth")
270
+ received = signature[7:]
271
+ if len(received) != 64:
272
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid webhook auth")
273
+ signed = timestamp.encode() + b"." + raw_body
274
+ expected = hmac_mod.new(
275
+ secret.encode(),
276
+ signed,
277
+ hashlib.sha256,
278
+ ).hexdigest()
279
+ if not hmac_mod.compare_digest(expected, received):
280
+ raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid webhook auth")
281
+
282
+
283
+ # ---------------------------------------------------------------------------
284
+ # ResolvedExecution — normalized result from dual-auth
285
+ # ---------------------------------------------------------------------------
286
+
287
+
288
+ @dataclass
289
+ class ResolvedExecution:
290
+ """Normalized execution request after auth resolution.
291
+
292
+ Both signed-request and webhook paths produce this. The /execute handler
293
+ doesn't know or care which auth path was used.
294
+ """
295
+ principal: Principal
296
+ item_type: str
297
+ item_id: str
298
+ project_path: str
299
+ parameters: dict
300
+ thread: str
301
+ secret_envelope: dict | None = None
ryeos_node/config.py ADDED
@@ -0,0 +1,151 @@
1
+ """Configuration for ryeos-node server."""
2
+
3
+ import logging
4
+ from functools import lru_cache
5
+ from pathlib import Path
6
+ from typing import Any, Dict, Optional
7
+
8
+ import yaml
9
+ from pydantic import ConfigDict, model_validator
10
+ from pydantic_settings import BaseSettings
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def _load_node_yaml(node_config_dir: str, cas_base_path: str) -> Dict[str, Any]:
16
+ """Load node.yaml from node config space, return flattened settings dict.
17
+
18
+ Maps nested node.yaml fields to flat Settings field names.
19
+ Returns empty dict if file doesn't exist or fails to parse.
20
+ """
21
+ config_root = Path(node_config_dir) if node_config_dir else Path(cas_base_path) / "config"
22
+ node_yaml_path = config_root / ".ai" / "config" / "node" / "node.yaml"
23
+
24
+ if not node_yaml_path.is_file():
25
+ return {}
26
+
27
+ try:
28
+ text = node_yaml_path.read_text(encoding="utf-8")
29
+ # Skip rye:signed header line
30
+ lines = text.splitlines(keepends=True)
31
+ if lines and lines[0].startswith("# rye:signed:"):
32
+ text = "".join(lines[1:])
33
+ data = yaml.safe_load(text)
34
+ except Exception:
35
+ logger.warning("Failed to load node.yaml from %s", node_yaml_path, exc_info=True)
36
+ return {}
37
+
38
+ if not isinstance(data, dict):
39
+ return {}
40
+
41
+ result: Dict[str, Any] = {}
42
+
43
+ identity = data.get("identity")
44
+ if isinstance(identity, dict):
45
+ if "name" in identity:
46
+ result["rye_remote_name"] = identity["name"]
47
+ if "signing_key_dir" in identity:
48
+ result["signing_key_dir"] = identity["signing_key_dir"]
49
+
50
+ features = data.get("features")
51
+ if isinstance(features, dict):
52
+ if "registry" in features:
53
+ result["registry_enabled"] = features["registry"]
54
+
55
+ limits = data.get("limits")
56
+ if isinstance(limits, dict):
57
+ if "max_concurrent" in limits:
58
+ result["max_concurrent"] = limits["max_concurrent"]
59
+ if "max_request_bytes" in limits:
60
+ result["max_request_bytes"] = limits["max_request_bytes"]
61
+ if "max_user_storage_bytes" in limits:
62
+ result["max_user_storage_bytes"] = limits["max_user_storage_bytes"]
63
+
64
+ return result
65
+
66
+
67
+ class Settings(BaseSettings):
68
+ model_config = ConfigDict(env_file=".env", env_file_encoding="utf-8")
69
+
70
+ # CAS storage
71
+ cas_base_path: str = "/cas"
72
+
73
+ # Remote signing key
74
+ signing_key_dir: str = "/cas/signing"
75
+
76
+ # Node config (authorized keys, node identity)
77
+ node_config_dir: str = "" # defaults to <cas_base_path>/config/
78
+
79
+ # Remote identity (server-asserted, set via RYE_REMOTE_NAME env var)
80
+ rye_remote_name: str = "default"
81
+
82
+ # Registry
83
+ registry_enabled: bool = False
84
+
85
+ # Concurrency
86
+ max_concurrent: int = 8
87
+
88
+ # Server
89
+ host: str = "0.0.0.0"
90
+ port: int = 8000
91
+
92
+ # Limits
93
+ max_request_bytes: int = 50 * 1024 * 1024 # 50MB
94
+ max_user_storage_bytes: int = 1024 * 1024 * 1024 # 1GB
95
+
96
+ @model_validator(mode="before")
97
+ @classmethod
98
+ def _apply_node_yaml_defaults(cls, values: Dict[str, Any]) -> Dict[str, Any]:
99
+ """Load node.yaml values as defaults — env vars override."""
100
+ node_config_dir = values.get("node_config_dir", "")
101
+ cas_base_path = values.get("cas_base_path", "/cas")
102
+ node_defaults = _load_node_yaml(node_config_dir, cas_base_path)
103
+
104
+ # node.yaml provides defaults: only set if not already provided
105
+ for key, val in node_defaults.items():
106
+ if key not in values or values[key] is None:
107
+ values[key] = val
108
+
109
+ return values
110
+
111
+ def _node_config(self) -> Path:
112
+ if self.node_config_dir:
113
+ return Path(self.node_config_dir)
114
+ return Path(self.cas_base_path) / "config"
115
+
116
+ def authorized_keys_dir(self) -> Path:
117
+ return self._node_config() / "authorized_keys"
118
+
119
+ def node_yaml_path(self) -> Path:
120
+ return self._node_config() / ".ai" / "config" / "node" / "node.yaml"
121
+
122
+ def hardware_descriptors(self) -> Dict[str, Any]:
123
+ """Read hardware section from node.yaml. Returns empty dict if unavailable."""
124
+ path = self.node_yaml_path()
125
+ if not path.is_file():
126
+ return {}
127
+ try:
128
+ text = path.read_text(encoding="utf-8")
129
+ lines = text.splitlines(keepends=True)
130
+ if lines and lines[0].startswith("# rye:signed:"):
131
+ text = "".join(lines[1:])
132
+ data = yaml.safe_load(text)
133
+ if isinstance(data, dict) and isinstance(data.get("hardware"), dict):
134
+ return data["hardware"]
135
+ except Exception:
136
+ logger.debug("Failed to read hardware from node.yaml", exc_info=True)
137
+ return {}
138
+
139
+ def user_cas_root(self, fingerprint: str) -> Path:
140
+ return Path(self.cas_base_path) / fingerprint / ".ai" / "objects"
141
+
142
+ def cache_root(self, fingerprint: str) -> Path:
143
+ return Path(self.cas_base_path) / fingerprint / "cache"
144
+
145
+ def exec_root(self, fingerprint: str) -> Path:
146
+ return Path(self.cas_base_path) / fingerprint / "executions"
147
+
148
+
149
+ @lru_cache
150
+ def get_settings() -> Settings:
151
+ return Settings()
@@ -0,0 +1,197 @@
1
+ """Local filesystem execution record tracking.
2
+
3
+ Replaces Supabase threads table with running files, append-only
4
+ execution log, and by-id index for O(1) lookup.
5
+ """
6
+
7
+ import datetime
8
+ import json
9
+ import logging
10
+ import os
11
+ from pathlib import Path
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def register_execution(
17
+ cas_base: str,
18
+ user_fp: str,
19
+ thread_id: str,
20
+ item_type: str,
21
+ item_id: str,
22
+ project_manifest_hash: str,
23
+ user_manifest_hash: str | None,
24
+ project_path: str | None,
25
+ remote_name: str,
26
+ system_version: str,
27
+ ) -> None:
28
+ try:
29
+ running_dir = Path(cas_base) / user_fp / "running"
30
+ running_dir.mkdir(parents=True, exist_ok=True)
31
+ record = {
32
+ "thread_id": thread_id,
33
+ "user_id": user_fp,
34
+ "item_type": item_type,
35
+ "item_id": item_id,
36
+ "execution_mode": "remote",
37
+ "remote_name": remote_name,
38
+ "project_path": project_path,
39
+ "project_manifest_hash": project_manifest_hash,
40
+ "user_manifest_hash": user_manifest_hash,
41
+ "system_version": system_version,
42
+ "state": "running",
43
+ "created_at": datetime.datetime.now(datetime.timezone.utc).isoformat(),
44
+ }
45
+ (running_dir / f"{thread_id}.json").write_text(json.dumps(record))
46
+ except Exception:
47
+ logger.warning("Failed to register execution %s", thread_id, exc_info=True)
48
+
49
+
50
+ def complete_execution(
51
+ cas_base: str,
52
+ user_fp: str,
53
+ thread_id: str,
54
+ state: str,
55
+ snapshot_hash: str | None = None,
56
+ runtime_outputs_bundle_hash: str | None = None,
57
+ merge_conflicts: dict | None = None,
58
+ unmerged_snapshot_hash: str | None = None,
59
+ ) -> None:
60
+ try:
61
+ base = Path(cas_base) / user_fp
62
+ running_file = base / "running" / f"{thread_id}.json"
63
+
64
+ # Read base metadata from running file
65
+ if running_file.exists():
66
+ record = json.loads(running_file.read_text())
67
+ else:
68
+ record = {"thread_id": thread_id}
69
+
70
+ completed_at = datetime.datetime.now(datetime.timezone.utc).isoformat()
71
+ record.update({
72
+ "state": state,
73
+ "completed_at": completed_at,
74
+ "snapshot_hash": snapshot_hash,
75
+ "runtime_outputs_bundle_hash": runtime_outputs_bundle_hash,
76
+ "merge_conflicts": merge_conflicts,
77
+ "unmerged_snapshot_hash": unmerged_snapshot_hash,
78
+ })
79
+
80
+ # Write by-id index
81
+ by_id_dir = base / "executions" / "by-id"
82
+ by_id_dir.mkdir(parents=True, exist_ok=True)
83
+ (by_id_dir / thread_id).write_text(json.dumps(record))
84
+
85
+ # Append to log
86
+ log_dir = base / "logs"
87
+ log_dir.mkdir(parents=True, exist_ok=True)
88
+ log_entry = {
89
+ "thread_id": thread_id,
90
+ "state": state,
91
+ "project_path": record.get("project_path"),
92
+ "completed_at": completed_at,
93
+ "snapshot_hash": snapshot_hash,
94
+ }
95
+ with open(log_dir / "executions.log", "a") as f:
96
+ f.write(json.dumps(log_entry) + "\n")
97
+
98
+ # Remove running file
99
+ if running_file.exists():
100
+ running_file.unlink()
101
+ except Exception:
102
+ logger.warning("Failed to complete execution %s", thread_id, exc_info=True)
103
+
104
+
105
+ def list_executions(
106
+ cas_base: str,
107
+ user_fp: str,
108
+ project_path: str | None = None,
109
+ limit: int = 20,
110
+ ) -> list[dict]:
111
+ base = Path(cas_base) / user_fp
112
+ results: list[dict] = []
113
+
114
+ # Collect in-flight executions from running dir
115
+ running_dir = base / "running"
116
+ if running_dir.is_dir():
117
+ for f in running_dir.iterdir():
118
+ if f.suffix == ".json":
119
+ try:
120
+ rec = json.loads(f.read_text())
121
+ if project_path is None or rec.get("project_path") == project_path:
122
+ results.append(rec)
123
+ except Exception:
124
+ logger.warning("Failed to read running file %s", f, exc_info=True)
125
+
126
+ # Read completed executions from log (most recent first)
127
+ log_file = base / "logs" / "executions.log"
128
+ if log_file.is_file():
129
+ try:
130
+ lines = log_file.read_text().splitlines()
131
+ for line in reversed(lines):
132
+ if len(results) >= limit:
133
+ break
134
+ if not line.strip():
135
+ continue
136
+ try:
137
+ entry = json.loads(line)
138
+ if project_path is None or entry.get("project_path") == project_path:
139
+ results.append(entry)
140
+ except json.JSONDecodeError:
141
+ continue
142
+ except Exception:
143
+ logger.warning("Failed to read execution log", exc_info=True)
144
+
145
+ return results[:limit]
146
+
147
+
148
+ def get_execution(
149
+ cas_base: str,
150
+ user_fp: str,
151
+ thread_id: str,
152
+ ) -> dict | None:
153
+ base = Path(cas_base) / user_fp
154
+
155
+ # Check by-id index first
156
+ by_id_file = base / "executions" / "by-id" / thread_id
157
+ if by_id_file.is_file():
158
+ try:
159
+ return json.loads(by_id_file.read_text())
160
+ except Exception:
161
+ logger.warning("Failed to read by-id record %s", thread_id, exc_info=True)
162
+
163
+ # Fall back to running dir
164
+ running_file = base / "running" / f"{thread_id}.json"
165
+ if running_file.is_file():
166
+ try:
167
+ return json.loads(running_file.read_text())
168
+ except Exception:
169
+ logger.warning("Failed to read running file %s", thread_id, exc_info=True)
170
+
171
+ return None
172
+
173
+
174
+ def store_conflict_record(
175
+ cas_base: str,
176
+ user_fp: str,
177
+ thread_id: str,
178
+ conflicts: dict,
179
+ unmerged_snapshot: str,
180
+ ) -> None:
181
+ try:
182
+ by_id_file = Path(cas_base) / user_fp / "executions" / "by-id" / thread_id
183
+ if by_id_file.is_file():
184
+ record = json.loads(by_id_file.read_text())
185
+ record["merge_conflicts"] = conflicts
186
+ record["unmerged_snapshot_hash"] = unmerged_snapshot
187
+ by_id_file.write_text(json.dumps(record))
188
+ else:
189
+ by_id_file.parent.mkdir(parents=True, exist_ok=True)
190
+ record = {
191
+ "thread_id": thread_id,
192
+ "merge_conflicts": conflicts,
193
+ "unmerged_snapshot_hash": unmerged_snapshot,
194
+ }
195
+ by_id_file.write_text(json.dumps(record))
196
+ except Exception:
197
+ logger.warning("Failed to store conflict record %s", thread_id, exc_info=True)