fraclab-sdk 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +1601 -0
- fraclab_sdk/__init__.py +34 -0
- fraclab_sdk/algorithm/__init__.py +13 -0
- fraclab_sdk/algorithm/export.py +1 -0
- fraclab_sdk/algorithm/library.py +378 -0
- fraclab_sdk/cli.py +381 -0
- fraclab_sdk/config.py +54 -0
- fraclab_sdk/devkit/__init__.py +25 -0
- fraclab_sdk/devkit/compile.py +342 -0
- fraclab_sdk/devkit/export.py +354 -0
- fraclab_sdk/devkit/validate.py +1043 -0
- fraclab_sdk/errors.py +124 -0
- fraclab_sdk/materialize/__init__.py +8 -0
- fraclab_sdk/materialize/fsops.py +125 -0
- fraclab_sdk/materialize/hash.py +28 -0
- fraclab_sdk/materialize/materializer.py +241 -0
- fraclab_sdk/models/__init__.py +52 -0
- fraclab_sdk/models/bundle_manifest.py +51 -0
- fraclab_sdk/models/dataspec.py +65 -0
- fraclab_sdk/models/drs.py +47 -0
- fraclab_sdk/models/output_contract.py +111 -0
- fraclab_sdk/models/run_output_manifest.py +119 -0
- fraclab_sdk/results/__init__.py +25 -0
- fraclab_sdk/results/preview.py +150 -0
- fraclab_sdk/results/reader.py +329 -0
- fraclab_sdk/run/__init__.py +10 -0
- fraclab_sdk/run/logs.py +42 -0
- fraclab_sdk/run/manager.py +403 -0
- fraclab_sdk/run/subprocess_runner.py +153 -0
- fraclab_sdk/runtime/__init__.py +11 -0
- fraclab_sdk/runtime/artifacts.py +303 -0
- fraclab_sdk/runtime/data_client.py +123 -0
- fraclab_sdk/runtime/runner_main.py +286 -0
- fraclab_sdk/runtime/snapshot_provider.py +1 -0
- fraclab_sdk/selection/__init__.py +11 -0
- fraclab_sdk/selection/model.py +247 -0
- fraclab_sdk/selection/validate.py +54 -0
- fraclab_sdk/snapshot/__init__.py +12 -0
- fraclab_sdk/snapshot/index.py +94 -0
- fraclab_sdk/snapshot/library.py +205 -0
- fraclab_sdk/snapshot/loader.py +217 -0
- fraclab_sdk/specs/manifest.py +89 -0
- fraclab_sdk/utils/io.py +32 -0
- fraclab_sdk-0.1.0.dist-info/METADATA +1622 -0
- fraclab_sdk-0.1.0.dist-info/RECORD +47 -0
- fraclab_sdk-0.1.0.dist-info/WHEEL +4 -0
- fraclab_sdk-0.1.0.dist-info/entry_points.txt +4 -0
|
@@ -0,0 +1,329 @@
|
|
|
1
|
+
"""Result reader implementation."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from urllib.parse import unquote, urlparse
|
|
7
|
+
|
|
8
|
+
from fraclab_sdk.errors import OutputContainmentError, ResultError
|
|
9
|
+
from fraclab_sdk.models import ArtifactInfo, RunOutputManifest
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def file_uri_to_path(uri: str) -> Path:
|
|
13
|
+
"""Convert file:// URI to Path.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
uri: A file:// URI string.
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Resolved Path object.
|
|
20
|
+
|
|
21
|
+
Raises:
|
|
22
|
+
ValueError: If URI scheme is not file://.
|
|
23
|
+
"""
|
|
24
|
+
parsed = urlparse(uri)
|
|
25
|
+
if parsed.scheme != "file":
|
|
26
|
+
raise ValueError(f"Expected file:// URI, got: {uri}")
|
|
27
|
+
decoded = unquote(parsed.path)
|
|
28
|
+
return Path(decoded).expanduser().resolve()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class ArtifactWithPath:
|
|
33
|
+
"""Artifact info with resolved file path."""
|
|
34
|
+
|
|
35
|
+
artifact: ArtifactInfo
|
|
36
|
+
path: Path | None
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class ResultReader:
|
|
40
|
+
"""Reader for run output results."""
|
|
41
|
+
|
|
42
|
+
def __init__(self, run_dir: Path) -> None:
|
|
43
|
+
"""Initialize result reader.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
run_dir: The run directory.
|
|
47
|
+
"""
|
|
48
|
+
self._run_dir = run_dir
|
|
49
|
+
self._output_dir = run_dir / "output"
|
|
50
|
+
self._manifest: RunOutputManifest | None = None
|
|
51
|
+
|
|
52
|
+
@property
|
|
53
|
+
def output_dir(self) -> Path:
|
|
54
|
+
"""Get the output directory path."""
|
|
55
|
+
return self._output_dir
|
|
56
|
+
|
|
57
|
+
def has_manifest(self) -> bool:
|
|
58
|
+
"""Check if output manifest exists."""
|
|
59
|
+
return (self._output_dir / "manifest.json").exists()
|
|
60
|
+
|
|
61
|
+
def read_manifest(self) -> RunOutputManifest:
|
|
62
|
+
"""Read the output manifest.
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
Parsed RunOutputManifest.
|
|
66
|
+
|
|
67
|
+
Raises:
|
|
68
|
+
ResultError: If manifest not found or invalid.
|
|
69
|
+
"""
|
|
70
|
+
if self._manifest is not None:
|
|
71
|
+
return self._manifest
|
|
72
|
+
|
|
73
|
+
manifest_path = self._output_dir / "manifest.json"
|
|
74
|
+
if not manifest_path.exists():
|
|
75
|
+
raise ResultError(f"Output manifest not found: {manifest_path}")
|
|
76
|
+
|
|
77
|
+
content = manifest_path.read_text()
|
|
78
|
+
try:
|
|
79
|
+
self._manifest = RunOutputManifest.model_validate_json(content)
|
|
80
|
+
except Exception:
|
|
81
|
+
# Legacy shape fallback: convert if possible
|
|
82
|
+
try:
|
|
83
|
+
data = json.loads(content)
|
|
84
|
+
data = self._coerce_legacy_manifest(data)
|
|
85
|
+
self._manifest = RunOutputManifest.model_validate(data)
|
|
86
|
+
except Exception as e: # pragma: no cover - best effort
|
|
87
|
+
raise ResultError(f"Failed to parse output manifest: {e}") from e
|
|
88
|
+
|
|
89
|
+
return self._manifest
|
|
90
|
+
|
|
91
|
+
def get_status(self) -> str:
|
|
92
|
+
"""Get run status from manifest.
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
Status string (e.g., "completed", "failed").
|
|
96
|
+
"""
|
|
97
|
+
manifest = self.read_manifest()
|
|
98
|
+
return manifest.status or "unknown"
|
|
99
|
+
|
|
100
|
+
def get_error(self) -> str | None:
|
|
101
|
+
"""Get error message if run failed.
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
Error message or None.
|
|
105
|
+
"""
|
|
106
|
+
manifest = self.read_manifest()
|
|
107
|
+
return manifest.error
|
|
108
|
+
|
|
109
|
+
def list_artifacts(self) -> list[ArtifactInfo]:
|
|
110
|
+
"""List all artifacts.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
List of ArtifactInfo objects.
|
|
114
|
+
"""
|
|
115
|
+
manifest = self.read_manifest()
|
|
116
|
+
return manifest.list_all_artifacts()
|
|
117
|
+
|
|
118
|
+
def get_artifact(self, artifact_key: str) -> ArtifactInfo | None:
|
|
119
|
+
"""Get artifact by key.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
artifact_key: The artifact key.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
ArtifactInfo or None if not found.
|
|
126
|
+
"""
|
|
127
|
+
manifest = self.read_manifest()
|
|
128
|
+
return manifest.get_artifact(artifact_key)
|
|
129
|
+
|
|
130
|
+
def get_artifact_path(self, artifact_key: str) -> Path | None:
|
|
131
|
+
"""Get file path for an artifact.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
artifact_key: The artifact key.
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
Path to artifact file or None if no file URI.
|
|
138
|
+
"""
|
|
139
|
+
artifact = self.get_artifact(artifact_key)
|
|
140
|
+
if artifact is None or artifact.fileUri is None:
|
|
141
|
+
return None
|
|
142
|
+
return file_uri_to_path(artifact.fileUri)
|
|
143
|
+
|
|
144
|
+
def get_artifact_with_path(self, artifact_key: str) -> ArtifactWithPath | None:
|
|
145
|
+
"""Get artifact with resolved path.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
artifact_key: The artifact key.
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
ArtifactWithPath or None if artifact not found.
|
|
152
|
+
"""
|
|
153
|
+
artifact = self.get_artifact(artifact_key)
|
|
154
|
+
if artifact is None:
|
|
155
|
+
return None
|
|
156
|
+
|
|
157
|
+
path = None
|
|
158
|
+
if artifact.fileUri:
|
|
159
|
+
path = file_uri_to_path(artifact.fileUri)
|
|
160
|
+
|
|
161
|
+
return ArtifactWithPath(artifact=artifact, path=path)
|
|
162
|
+
|
|
163
|
+
def read_artifact_json(self, artifact_key: str) -> dict | list | None:
|
|
164
|
+
"""Read JSON artifact content.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
artifact_key: The artifact key.
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
Parsed JSON content or None if not a JSON artifact.
|
|
171
|
+
"""
|
|
172
|
+
artifact = self.get_artifact(artifact_key)
|
|
173
|
+
if artifact is None:
|
|
174
|
+
return None
|
|
175
|
+
|
|
176
|
+
if artifact.artifactType not in {"json", "object"}:
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
if artifact.fileUri:
|
|
180
|
+
path = self._safe_artifact_path(file_uri_to_path(artifact.fileUri))
|
|
181
|
+
return json.loads(path.read_text())
|
|
182
|
+
|
|
183
|
+
if artifact.inline and "data" in artifact.inline:
|
|
184
|
+
return artifact.inline.get("data")
|
|
185
|
+
|
|
186
|
+
return None
|
|
187
|
+
|
|
188
|
+
def _safe_artifact_path(self, path: Path) -> Path:
|
|
189
|
+
"""Ensure artifact path is within output dir."""
|
|
190
|
+
try:
|
|
191
|
+
path.resolve().relative_to(self._output_dir)
|
|
192
|
+
except Exception:
|
|
193
|
+
raise ResultError(f"Artifact path escapes output dir: {path}")
|
|
194
|
+
return path
|
|
195
|
+
|
|
196
|
+
def _coerce_legacy_manifest(self, data: dict) -> dict:
|
|
197
|
+
"""
|
|
198
|
+
Convert legacy manifest shapes:
|
|
199
|
+
- top-level artifacts[] -> dataset 'artifacts' with items
|
|
200
|
+
- datasets[].artifacts[] -> datasets[].items with single artifact
|
|
201
|
+
"""
|
|
202
|
+
datasets = data.get("datasets", [])
|
|
203
|
+
new_datasets = []
|
|
204
|
+
for ds in datasets:
|
|
205
|
+
if "items" in ds:
|
|
206
|
+
new_datasets.append(ds)
|
|
207
|
+
continue
|
|
208
|
+
artifacts = ds.get("artifacts", [])
|
|
209
|
+
items = []
|
|
210
|
+
for art in artifacts:
|
|
211
|
+
items.append(
|
|
212
|
+
{
|
|
213
|
+
"itemKey": art.get("artifactKey") or art.get("key"),
|
|
214
|
+
"artifact": art,
|
|
215
|
+
}
|
|
216
|
+
)
|
|
217
|
+
new_datasets.append({"datasetKey": ds.get("datasetKey") or ds.get("key"), "items": items})
|
|
218
|
+
|
|
219
|
+
# If legacy top-level artifacts
|
|
220
|
+
top_artifacts = data.get("artifacts", [])
|
|
221
|
+
if top_artifacts:
|
|
222
|
+
items = [
|
|
223
|
+
{"itemKey": art.get("artifactKey") or art.get("key"), "artifact": art}
|
|
224
|
+
for art in top_artifacts
|
|
225
|
+
]
|
|
226
|
+
new_datasets.append({"datasetKey": "artifacts", "items": items})
|
|
227
|
+
|
|
228
|
+
data["datasets"] = new_datasets
|
|
229
|
+
data.pop("artifacts", None)
|
|
230
|
+
return data
|
|
231
|
+
|
|
232
|
+
def read_artifact_scalar(self, artifact_key: str):
|
|
233
|
+
"""Read scalar artifact value.
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
artifact_key: The artifact key.
|
|
237
|
+
|
|
238
|
+
Returns:
|
|
239
|
+
Scalar value or None if not a scalar artifact.
|
|
240
|
+
"""
|
|
241
|
+
artifact = self.get_artifact(artifact_key)
|
|
242
|
+
if artifact is None:
|
|
243
|
+
return None
|
|
244
|
+
|
|
245
|
+
if artifact.artifactType != "scalar":
|
|
246
|
+
return None
|
|
247
|
+
|
|
248
|
+
return artifact.value
|
|
249
|
+
|
|
250
|
+
def get_logs_dir(self) -> Path:
|
|
251
|
+
"""Get the logs directory path."""
|
|
252
|
+
return self._output_dir / "_logs"
|
|
253
|
+
|
|
254
|
+
def read_stdout(self) -> str | None:
|
|
255
|
+
"""Read stdout log if exists."""
|
|
256
|
+
stdout_path = self.get_logs_dir() / "stdout.log"
|
|
257
|
+
if stdout_path.exists():
|
|
258
|
+
return stdout_path.read_text()
|
|
259
|
+
return None
|
|
260
|
+
|
|
261
|
+
def read_stderr(self) -> str | None:
|
|
262
|
+
"""Read stderr log if exists."""
|
|
263
|
+
stderr_path = self.get_logs_dir() / "stderr.log"
|
|
264
|
+
if stderr_path.exists():
|
|
265
|
+
return stderr_path.read_text()
|
|
266
|
+
return None
|
|
267
|
+
|
|
268
|
+
def read_algorithm_log(self) -> str | None:
|
|
269
|
+
"""Read algorithm log if exists."""
|
|
270
|
+
log_path = self.get_logs_dir() / "algorithm.log"
|
|
271
|
+
if log_path.exists():
|
|
272
|
+
return log_path.read_text()
|
|
273
|
+
return None
|
|
274
|
+
|
|
275
|
+
def open_artifact(self, artifact_key: str) -> Path:
|
|
276
|
+
"""Get validated file path for an artifact.
|
|
277
|
+
|
|
278
|
+
This is the primary interface for UI/CLI to access artifact files.
|
|
279
|
+
It validates that the artifact path is contained within the output
|
|
280
|
+
directory to prevent path traversal attacks.
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
artifact_key: The artifact key.
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
Validated Path to artifact file.
|
|
287
|
+
|
|
288
|
+
Raises:
|
|
289
|
+
ResultError: If artifact not found or has no file URI.
|
|
290
|
+
OutputContainmentError: If artifact path is outside output directory.
|
|
291
|
+
"""
|
|
292
|
+
artifact = self.get_artifact(artifact_key)
|
|
293
|
+
if artifact is None:
|
|
294
|
+
raise ResultError(f"Artifact not found: {artifact_key}")
|
|
295
|
+
|
|
296
|
+
if artifact.fileUri is None:
|
|
297
|
+
raise ResultError(f"Artifact '{artifact_key}' has no file URI (may be a scalar)")
|
|
298
|
+
|
|
299
|
+
path = file_uri_to_path(artifact.fileUri)
|
|
300
|
+
|
|
301
|
+
# Validate path is within output directory
|
|
302
|
+
self._validate_path_containment(path)
|
|
303
|
+
|
|
304
|
+
if not path.exists():
|
|
305
|
+
raise ResultError(f"Artifact file not found: {path}")
|
|
306
|
+
|
|
307
|
+
return path
|
|
308
|
+
|
|
309
|
+
def _validate_path_containment(self, path: Path) -> Path:
|
|
310
|
+
"""Validate that path is contained within output directory.
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
path: Path to validate.
|
|
314
|
+
|
|
315
|
+
Returns:
|
|
316
|
+
Resolved path.
|
|
317
|
+
|
|
318
|
+
Raises:
|
|
319
|
+
OutputContainmentError: If path is outside output directory.
|
|
320
|
+
"""
|
|
321
|
+
resolved = path.resolve()
|
|
322
|
+
output_resolved = self._output_dir.resolve()
|
|
323
|
+
|
|
324
|
+
try:
|
|
325
|
+
resolved.relative_to(output_resolved)
|
|
326
|
+
except ValueError:
|
|
327
|
+
raise OutputContainmentError(str(resolved), str(output_resolved)) from None
|
|
328
|
+
|
|
329
|
+
return resolved
|
fraclab_sdk/run/logs.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""Log management for runs."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _tail(path: Path, max_lines: int, max_bytes: int) -> str:
|
|
9
|
+
if not path.exists():
|
|
10
|
+
return ""
|
|
11
|
+
data = path.read_bytes()
|
|
12
|
+
if len(data) > max_bytes:
|
|
13
|
+
data = data[-max_bytes:]
|
|
14
|
+
text = data.decode(errors="replace")
|
|
15
|
+
lines = text.splitlines()[-max_lines:]
|
|
16
|
+
return "\n".join(lines)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def tail_stdout(run_dir: Path, max_lines: int = 200, max_bytes: int = 65_536) -> str:
|
|
20
|
+
"""Tail stdout log."""
|
|
21
|
+
return _tail(run_dir / "output" / "_logs" / "stdout.log", max_lines, max_bytes)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def tail_stderr(run_dir: Path, max_lines: int = 200, max_bytes: int = 65_536) -> str:
|
|
25
|
+
"""Tail stderr log."""
|
|
26
|
+
return _tail(run_dir / "output" / "_logs" / "stderr.log", max_lines, max_bytes)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def read_execute(run_dir: Path) -> dict | None:
|
|
30
|
+
"""Read execute metadata."""
|
|
31
|
+
path = run_dir / "output" / "_logs" / "execute.json"
|
|
32
|
+
if not path.exists():
|
|
33
|
+
return None
|
|
34
|
+
import json
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
return json.loads(path.read_text())
|
|
38
|
+
except Exception:
|
|
39
|
+
return None
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
__all__ = ["tail_stdout", "tail_stderr", "read_execute"]
|