fraclab-sdk 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. README.md +1601 -0
  2. fraclab_sdk/__init__.py +34 -0
  3. fraclab_sdk/algorithm/__init__.py +13 -0
  4. fraclab_sdk/algorithm/export.py +1 -0
  5. fraclab_sdk/algorithm/library.py +378 -0
  6. fraclab_sdk/cli.py +381 -0
  7. fraclab_sdk/config.py +54 -0
  8. fraclab_sdk/devkit/__init__.py +25 -0
  9. fraclab_sdk/devkit/compile.py +342 -0
  10. fraclab_sdk/devkit/export.py +354 -0
  11. fraclab_sdk/devkit/validate.py +1043 -0
  12. fraclab_sdk/errors.py +124 -0
  13. fraclab_sdk/materialize/__init__.py +8 -0
  14. fraclab_sdk/materialize/fsops.py +125 -0
  15. fraclab_sdk/materialize/hash.py +28 -0
  16. fraclab_sdk/materialize/materializer.py +241 -0
  17. fraclab_sdk/models/__init__.py +52 -0
  18. fraclab_sdk/models/bundle_manifest.py +51 -0
  19. fraclab_sdk/models/dataspec.py +65 -0
  20. fraclab_sdk/models/drs.py +47 -0
  21. fraclab_sdk/models/output_contract.py +111 -0
  22. fraclab_sdk/models/run_output_manifest.py +119 -0
  23. fraclab_sdk/results/__init__.py +25 -0
  24. fraclab_sdk/results/preview.py +150 -0
  25. fraclab_sdk/results/reader.py +329 -0
  26. fraclab_sdk/run/__init__.py +10 -0
  27. fraclab_sdk/run/logs.py +42 -0
  28. fraclab_sdk/run/manager.py +403 -0
  29. fraclab_sdk/run/subprocess_runner.py +153 -0
  30. fraclab_sdk/runtime/__init__.py +11 -0
  31. fraclab_sdk/runtime/artifacts.py +303 -0
  32. fraclab_sdk/runtime/data_client.py +123 -0
  33. fraclab_sdk/runtime/runner_main.py +286 -0
  34. fraclab_sdk/runtime/snapshot_provider.py +1 -0
  35. fraclab_sdk/selection/__init__.py +11 -0
  36. fraclab_sdk/selection/model.py +247 -0
  37. fraclab_sdk/selection/validate.py +54 -0
  38. fraclab_sdk/snapshot/__init__.py +12 -0
  39. fraclab_sdk/snapshot/index.py +94 -0
  40. fraclab_sdk/snapshot/library.py +205 -0
  41. fraclab_sdk/snapshot/loader.py +217 -0
  42. fraclab_sdk/specs/manifest.py +89 -0
  43. fraclab_sdk/utils/io.py +32 -0
  44. fraclab_sdk-0.1.0.dist-info/METADATA +1622 -0
  45. fraclab_sdk-0.1.0.dist-info/RECORD +47 -0
  46. fraclab_sdk-0.1.0.dist-info/WHEEL +4 -0
  47. fraclab_sdk-0.1.0.dist-info/entry_points.txt +4 -0
@@ -0,0 +1,342 @@
1
+ """Algorithm compilation: generate static artifacts from workspace.
2
+
3
+ Compile workflow:
4
+ 1. Import schema.inputspec:INPUT_SPEC → model_json_schema() → dist/params.schema.json
5
+ 2. Import schema.output_contract:OUTPUT_CONTRACT → model_dump() → dist/output_contract.json
6
+ 3. Copy drs.json from bundle → dist/drs.json
7
+ 4. Update manifest.json with files pointers
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import hashlib
13
+ import json
14
+ import shutil
15
+ import subprocess
16
+ import sys
17
+ from dataclasses import dataclass
18
+ from datetime import datetime
19
+ from pathlib import Path
20
+ from typing import Any
21
+
22
+ from fraclab_sdk.errors import AlgorithmError
23
+
24
+
25
+ @dataclass
26
+ class CompileLog:
27
+ """Log entry for a compile subprocess."""
28
+
29
+ step: str
30
+ stdout: str
31
+ stderr: str
32
+ returncode: int
33
+ timestamp: str
34
+
35
+
36
+ @dataclass
37
+ class CompileResult:
38
+ """Result of algorithm compilation."""
39
+
40
+ params_schema_path: Path
41
+ output_contract_path: Path
42
+ drs_path: Path
43
+ manifest_updated: bool
44
+ bound_bundle: dict[str, str] | None = None
45
+
46
+
47
+ def _run_in_subprocess(
48
+ workspace: Path,
49
+ script: str,
50
+ step_name: str,
51
+ log_dir: Path | None = None,
52
+ ) -> tuple[dict[str, Any], CompileLog]:
53
+ """Run Python script in isolated subprocess with workspace on PYTHONPATH.
54
+
55
+ Args:
56
+ workspace: Algorithm workspace directory.
57
+ script: Python script to execute.
58
+ step_name: Name of the compilation step (for logging).
59
+ log_dir: Directory to save logs (optional).
60
+
61
+ Returns:
62
+ Tuple of (parsed JSON output, compile log).
63
+
64
+ Raises:
65
+ AlgorithmError: If subprocess fails.
66
+ """
67
+ env = {
68
+ "PYTHONPATH": str(workspace),
69
+ "PYTHONUNBUFFERED": "1",
70
+ }
71
+
72
+ result = subprocess.run(
73
+ [sys.executable, "-c", script],
74
+ cwd=workspace,
75
+ env={**dict(__import__("os").environ), **env},
76
+ capture_output=True,
77
+ text=True,
78
+ timeout=30,
79
+ )
80
+
81
+ # Create log entry
82
+ log = CompileLog(
83
+ step=step_name,
84
+ stdout=result.stdout,
85
+ stderr=result.stderr,
86
+ returncode=result.returncode,
87
+ timestamp=datetime.now().isoformat(),
88
+ )
89
+
90
+ # Save log to file if log_dir provided
91
+ if log_dir is not None:
92
+ log_file = log_dir / f"{step_name}.log"
93
+ log_content = (
94
+ f"Step: {step_name}\n"
95
+ f"Timestamp: {log.timestamp}\n"
96
+ f"Return code: {result.returncode}\n"
97
+ f"\n=== STDOUT ===\n{result.stdout}\n"
98
+ f"\n=== STDERR ===\n{result.stderr}\n"
99
+ )
100
+ log_file.write_text(log_content, encoding="utf-8")
101
+
102
+ if result.returncode != 0:
103
+ error_summary = result.stderr.strip() or result.stdout.strip() or "Unknown error"
104
+ # Truncate for CLI display, full log is in file
105
+ if len(error_summary) > 500:
106
+ error_summary = error_summary[:500] + "..."
107
+ log_path_hint = f" (full log: {log_dir / f'{step_name}.log'})" if log_dir else ""
108
+ raise AlgorithmError(
109
+ f"Compilation step '{step_name}' failed: {error_summary}{log_path_hint}"
110
+ )
111
+
112
+ try:
113
+ return json.loads(result.stdout), log
114
+ except json.JSONDecodeError as e:
115
+ log_path_hint = f" (full log: {log_dir / f'{step_name}.log'})" if log_dir else ""
116
+ raise AlgorithmError(
117
+ f"Invalid JSON output from '{step_name}': {e}{log_path_hint}"
118
+ ) from e
119
+
120
+
121
+ def _extract_params_schema(workspace: Path, log_dir: Path | None = None) -> dict[str, Any]:
122
+ """Extract params JSON Schema from INPUT_SPEC.
123
+
124
+ Args:
125
+ workspace: Algorithm workspace directory.
126
+ log_dir: Directory to save logs (optional).
127
+
128
+ Returns:
129
+ JSON Schema dict.
130
+ """
131
+ script = '''
132
+ import json
133
+ import sys
134
+
135
+ try:
136
+ from schema.inputspec import INPUT_SPEC
137
+ model = INPUT_SPEC
138
+ schema = model.model_json_schema()
139
+ print(json.dumps(schema))
140
+ except ImportError as e:
141
+ print(json.dumps({"error": f"Failed to import INPUT_SPEC: {e}"}))
142
+ sys.exit(0)
143
+ except Exception as e:
144
+ print(json.dumps({"error": f"Failed to generate schema: {e}"}))
145
+ sys.exit(0)
146
+ '''
147
+
148
+ result, _ = _run_in_subprocess(workspace, script, "params_schema", log_dir)
149
+ if "error" in result:
150
+ raise AlgorithmError(result["error"])
151
+ return result
152
+
153
+
154
+ def _extract_output_contract(workspace: Path, log_dir: Path | None = None) -> dict[str, Any]:
155
+ """Extract OutputContract from OUTPUT_CONTRACT.
156
+
157
+ Args:
158
+ workspace: Algorithm workspace directory.
159
+ log_dir: Directory to save logs (optional).
160
+
161
+ Returns:
162
+ OutputContract dict.
163
+ """
164
+ script = '''
165
+ import json
166
+ import sys
167
+
168
+ try:
169
+ from schema.output_contract import OUTPUT_CONTRACT
170
+ # Use model_dump with mode="json" for JSON-serializable output
171
+ if hasattr(OUTPUT_CONTRACT, 'model_dump'):
172
+ data = OUTPUT_CONTRACT.model_dump(mode="json")
173
+ else:
174
+ data = OUTPUT_CONTRACT.dict()
175
+ print(json.dumps(data))
176
+ except ImportError as e:
177
+ print(json.dumps({"error": f"Failed to import OUTPUT_CONTRACT: {e}"}))
178
+ sys.exit(0)
179
+ except Exception as e:
180
+ print(json.dumps({"error": f"Failed to dump contract: {e}"}))
181
+ sys.exit(0)
182
+ '''
183
+
184
+ result, _ = _run_in_subprocess(workspace, script, "output_contract", log_dir)
185
+ if "error" in result:
186
+ raise AlgorithmError(result["error"])
187
+ return result
188
+
189
+
190
+ def _compute_file_hash(path: Path) -> str:
191
+ """Compute SHA256 hash of file contents (raw bytes).
192
+
193
+ Args:
194
+ path: File path.
195
+
196
+ Returns:
197
+ Hex-encoded SHA256 hash.
198
+ """
199
+ hasher = hashlib.sha256()
200
+ hasher.update(path.read_bytes())
201
+ return hasher.hexdigest()
202
+
203
+
204
+ def compile_algorithm(
205
+ workspace: Path,
206
+ bundle_path: Path | None = None,
207
+ skip_inputspec: bool = False,
208
+ skip_output_contract: bool = False,
209
+ ) -> CompileResult:
210
+ """Compile algorithm workspace to generate static artifacts.
211
+
212
+ This generates:
213
+ - dist/params.schema.json (from schema.inputspec:CONFIG_MODEL)
214
+ - dist/output_contract.json (from schema.output_contract:OUTPUT_CONTRACT)
215
+ - dist/drs.json (copied from bundle)
216
+
217
+ And updates manifest.json with files pointers.
218
+
219
+ Args:
220
+ workspace: Path to algorithm workspace directory.
221
+ bundle_path: Path to data bundle (for drs.json). If None, drs.json must exist.
222
+ skip_inputspec: Skip InputSpec compilation (use existing params.schema.json).
223
+ skip_output_contract: Skip OutputContract compilation.
224
+
225
+ Returns:
226
+ CompileResult with paths to generated artifacts.
227
+
228
+ Raises:
229
+ AlgorithmError: If compilation fails.
230
+ """
231
+ workspace = Path(workspace).resolve()
232
+
233
+ if not workspace.is_dir():
234
+ raise AlgorithmError(f"Workspace not found: {workspace}")
235
+
236
+ # Validate workspace structure
237
+ manifest_path = workspace / "manifest.json"
238
+ algorithm_py_path = workspace / "main.py"
239
+
240
+ if not manifest_path.exists():
241
+ raise AlgorithmError(f"manifest.json not found in workspace: {workspace}")
242
+ if not algorithm_py_path.exists():
243
+ raise AlgorithmError(f"main.py not found in workspace: {workspace}")
244
+
245
+ # Create dist directory and compile logs directory
246
+ dist_dir = workspace / "dist"
247
+ dist_dir.mkdir(exist_ok=True)
248
+
249
+ log_dir = dist_dir / "_compile_logs"
250
+ log_dir.mkdir(exist_ok=True)
251
+
252
+ # 1. Generate params.schema.json from InputSpec
253
+ params_schema_path = dist_dir / "params.schema.json"
254
+ if not skip_inputspec:
255
+ schema_dir = workspace / "schema"
256
+ inputspec_path = schema_dir / "inputspec.py"
257
+ if not inputspec_path.exists():
258
+ raise AlgorithmError(
259
+ "schema/inputspec.py not found. Required for params schema generation."
260
+ )
261
+
262
+ params_schema = _extract_params_schema(workspace, log_dir)
263
+ params_schema_path.write_text(json.dumps(params_schema, indent=2), encoding="utf-8")
264
+
265
+ # 2. Generate output_contract.json from OutputContract
266
+ output_contract_path = dist_dir / "output_contract.json"
267
+ if not skip_output_contract:
268
+ schema_dir = workspace / "schema"
269
+ output_contract_file = schema_dir / "output_contract.py"
270
+ if not output_contract_file.exists():
271
+ raise AlgorithmError(
272
+ "schema/output_contract.py not found. Required for output contract generation."
273
+ )
274
+
275
+ output_contract = _extract_output_contract(workspace, log_dir)
276
+ output_contract_path.write_text(
277
+ json.dumps(output_contract, indent=2), encoding="utf-8"
278
+ )
279
+
280
+ # 3. Copy drs.json from bundle (or use existing)
281
+ drs_path = dist_dir / "drs.json"
282
+ bound_bundle: dict[str, str] | None = None
283
+
284
+ if bundle_path is not None:
285
+ bundle_path = Path(bundle_path).resolve()
286
+ if not bundle_path.is_dir():
287
+ raise AlgorithmError(f"Bundle path not found: {bundle_path}")
288
+
289
+ bundle_drs = bundle_path / "drs.json"
290
+ bundle_ds = bundle_path / "ds.json"
291
+ bundle_manifest = bundle_path / "manifest.json"
292
+
293
+ if not bundle_drs.exists():
294
+ raise AlgorithmError(f"drs.json not found in bundle: {bundle_path}")
295
+
296
+ # Copy drs.json (raw bytes to preserve hash)
297
+ shutil.copy2(bundle_drs, drs_path)
298
+
299
+ # Extract hash info from bundle manifest if available
300
+ if bundle_manifest.exists():
301
+ try:
302
+ manifest = json.loads(bundle_manifest.read_text())
303
+ spec_files = manifest.get("specFiles", {})
304
+ bound_bundle = {
305
+ "drsSha256": spec_files.get("drsSha256") or _compute_file_hash(bundle_drs),
306
+ }
307
+ if bundle_ds.exists():
308
+ bound_bundle["dsSha256"] = spec_files.get("dsSha256") or _compute_file_hash(
309
+ bundle_ds
310
+ )
311
+ except (json.JSONDecodeError, KeyError):
312
+ pass
313
+ elif not drs_path.exists():
314
+ raise AlgorithmError(
315
+ "drs.json not found in dist/. Provide --bundle to copy from bundle."
316
+ )
317
+
318
+ # 4. Update manifest.json with files pointers
319
+ manifest = json.loads(manifest_path.read_text())
320
+
321
+ files = manifest.get("files", {})
322
+ files["paramsSchemaPath"] = "dist/params.schema.json"
323
+ files["outputContractPath"] = "dist/output_contract.json"
324
+ files["drsPath"] = "dist/drs.json"
325
+ manifest["files"] = files
326
+
327
+ # Add bound bundle info if available
328
+ if bound_bundle:
329
+ manifest["boundBundle"] = bound_bundle
330
+
331
+ manifest_path.write_text(json.dumps(manifest, indent=2), encoding="utf-8")
332
+
333
+ return CompileResult(
334
+ params_schema_path=params_schema_path,
335
+ output_contract_path=output_contract_path,
336
+ drs_path=drs_path,
337
+ manifest_updated=True,
338
+ bound_bundle=bound_bundle,
339
+ )
340
+
341
+
342
+ __all__ = ["compile_algorithm", "CompileResult"]
@@ -0,0 +1,354 @@
1
+ """Algorithm export: package workspace for distribution.
2
+
3
+ Export creates a distributable algorithm package containing:
4
+ - main.py (required)
5
+ - manifest.json (required)
6
+ - dist/params.schema.json (required)
7
+ - dist/output_contract.json (required)
8
+ - dist/drs.json (required)
9
+ - README.md (optional)
10
+ - schema/** (optional, source code)
11
+ - examples/** (optional)
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import shutil
17
+ import zipfile
18
+ from dataclasses import dataclass
19
+ from pathlib import Path
20
+
21
+ from fraclab_sdk.errors import AlgorithmError
22
+
23
+ # Files that must exist in dist/
24
+ REQUIRED_DIST_FILES = [
25
+ "params.schema.json",
26
+ "output_contract.json",
27
+ "drs.json",
28
+ ]
29
+
30
+ # Required workspace files
31
+ REQUIRED_WORKSPACE_FILES = [
32
+ "main.py",
33
+ "manifest.json",
34
+ ]
35
+
36
+ # Patterns to include (relative to workspace)
37
+ INCLUDE_PATTERNS = [
38
+ "main.py",
39
+ "manifest.json",
40
+ "dist/params.schema.json",
41
+ "dist/output_contract.json",
42
+ "dist/drs.json",
43
+ "README.md",
44
+ "schema/**",
45
+ "examples/**",
46
+ ]
47
+
48
+ # Patterns to always reject
49
+ REJECT_PATTERNS = [
50
+ "__pycache__",
51
+ ".DS_Store",
52
+ ".idea",
53
+ ".git",
54
+ ".gitignore",
55
+ ".vscode",
56
+ "*.pyc",
57
+ "*.pyo",
58
+ "*.egg-info",
59
+ ".eggs",
60
+ "*.tmp",
61
+ "*.temp",
62
+ "*.bak",
63
+ ".pytest_cache",
64
+ ".mypy_cache",
65
+ ".ruff_cache",
66
+ ]
67
+
68
+
69
+ @dataclass
70
+ class ExportResult:
71
+ """Result of algorithm export."""
72
+
73
+ output_path: Path
74
+ files_included: list[str]
75
+ files_rejected: list[str]
76
+
77
+
78
+ def _should_reject(path: Path, workspace: Path) -> bool:
79
+ """Check if a path should be rejected.
80
+
81
+ Args:
82
+ path: Path to check.
83
+ workspace: Workspace root.
84
+
85
+ Returns:
86
+ True if path should be rejected.
87
+ """
88
+ rel_path = path.relative_to(workspace)
89
+ name = path.name
90
+
91
+ for pattern in REJECT_PATTERNS:
92
+ if pattern.startswith("*."):
93
+ if name.endswith(pattern[1:]):
94
+ return True
95
+ elif name == pattern or pattern in str(rel_path):
96
+ return True
97
+
98
+ # Reject hidden files/directories
99
+ return name.startswith(".")
100
+
101
+
102
+ def _is_path_contained(path: Path, root: Path) -> bool:
103
+ """Check if a path is contained within root (no directory traversal).
104
+
105
+ Args:
106
+ path: Path to check (will be resolved).
107
+ root: Root directory (will be resolved).
108
+
109
+ Returns:
110
+ True if path is within root.
111
+ """
112
+ try:
113
+ resolved = path.resolve()
114
+ root_resolved = root.resolve()
115
+ resolved.relative_to(root_resolved)
116
+ return True
117
+ except ValueError:
118
+ return False
119
+
120
+
121
+ def _check_symlink_safety(path: Path, workspace: Path) -> tuple[bool, str | None]:
122
+ """Check if a symlink is safe to include.
123
+
124
+ Args:
125
+ path: Path to check.
126
+ workspace: Workspace root.
127
+
128
+ Returns:
129
+ Tuple of (is_safe, reason_if_unsafe).
130
+ """
131
+ if not path.is_symlink():
132
+ return True, None
133
+
134
+ # Resolve the symlink target
135
+ try:
136
+ target = path.resolve()
137
+ except (OSError, RuntimeError) as e:
138
+ return False, f"Failed to resolve symlink: {e}"
139
+
140
+ # Check if target is within workspace
141
+ if not _is_path_contained(target, workspace):
142
+ return False, f"Symlink points outside workspace: {target}"
143
+
144
+ return True, None
145
+
146
+
147
+ def _collect_files(workspace: Path) -> tuple[list[Path], list[Path]]:
148
+ """Collect files to include in export.
149
+
150
+ Args:
151
+ workspace: Workspace root.
152
+
153
+ Returns:
154
+ Tuple of (included_files, rejected_files).
155
+
156
+ Raises:
157
+ AlgorithmError: If required files are missing or symlinks escape workspace.
158
+ """
159
+ included: list[Path] = []
160
+ rejected: list[Path] = []
161
+
162
+ def check_and_add(path: Path, required: bool = False) -> None:
163
+ """Check a file and add to included/rejected lists.
164
+
165
+ Args:
166
+ path: Path to check.
167
+ required: If True, raise error if file is unsafe.
168
+ """
169
+ if not path.exists():
170
+ if required:
171
+ raise AlgorithmError(f"Required file not found: {path.relative_to(workspace)}")
172
+ return
173
+
174
+ # Check symlink safety
175
+ is_safe, reason = _check_symlink_safety(path, workspace)
176
+ if not is_safe:
177
+ if required:
178
+ raise AlgorithmError(f"Required file is unsafe: {path.relative_to(workspace)} - {reason}")
179
+ rejected.append(path)
180
+ return
181
+
182
+ # Check path containment (resolved path must be in workspace)
183
+ if not _is_path_contained(path, workspace):
184
+ if required:
185
+ raise AlgorithmError(f"Required file escapes workspace: {path.relative_to(workspace)}")
186
+ rejected.append(path)
187
+ return
188
+
189
+ included.append(path)
190
+
191
+ # Required files
192
+ for filename in REQUIRED_WORKSPACE_FILES:
193
+ check_and_add(workspace / filename, required=True)
194
+
195
+ # Required dist files
196
+ dist_dir = workspace / "dist"
197
+ for filename in REQUIRED_DIST_FILES:
198
+ path = dist_dir / filename
199
+ if not path.exists():
200
+ raise AlgorithmError(
201
+ f"Required dist file not found: dist/{filename}. "
202
+ f"Run 'fraclab-sdk algo compile' first."
203
+ )
204
+ check_and_add(path, required=True)
205
+
206
+ # Optional files
207
+ readme = workspace / "README.md"
208
+ if readme.exists():
209
+ check_and_add(readme, required=False)
210
+
211
+ # Optional directories (schema, examples)
212
+ for dir_name in ["schema", "examples"]:
213
+ dir_path = workspace / dir_name
214
+ if dir_path.is_dir():
215
+ for file_path in dir_path.rglob("*"):
216
+ if file_path.is_file():
217
+ # Check symlink and containment first
218
+ is_safe, _ = _check_symlink_safety(file_path, workspace)
219
+ if not is_safe:
220
+ rejected.append(file_path)
221
+ continue
222
+
223
+ if not _is_path_contained(file_path, workspace):
224
+ rejected.append(file_path)
225
+ continue
226
+
227
+ # Then check standard rejection patterns
228
+ if _should_reject(file_path, workspace):
229
+ rejected.append(file_path)
230
+ else:
231
+ included.append(file_path)
232
+
233
+ return included, rejected
234
+
235
+
236
+ def _validate_workspace(workspace: Path) -> None:
237
+ """Validate workspace has required structure.
238
+
239
+ Args:
240
+ workspace: Workspace path.
241
+
242
+ Raises:
243
+ AlgorithmError: If validation fails.
244
+ """
245
+ if not workspace.is_dir():
246
+ raise AlgorithmError(f"Workspace not found: {workspace}")
247
+
248
+ for filename in REQUIRED_WORKSPACE_FILES:
249
+ if not (workspace / filename).exists():
250
+ raise AlgorithmError(f"Required file not found: {filename}")
251
+
252
+ dist_dir = workspace / "dist"
253
+ if not dist_dir.is_dir():
254
+ raise AlgorithmError(
255
+ "dist/ directory not found. Run 'fraclab-sdk algo compile' first."
256
+ )
257
+
258
+ for filename in REQUIRED_DIST_FILES:
259
+ if not (dist_dir / filename).exists():
260
+ raise AlgorithmError(
261
+ f"dist/{filename} not found. Run 'fraclab-sdk algo compile' first."
262
+ )
263
+
264
+
265
+ def export_algorithm_package(
266
+ workspace: Path,
267
+ output: Path,
268
+ auto_compile: bool = False,
269
+ bundle_path: Path | None = None,
270
+ ) -> ExportResult:
271
+ """Export algorithm workspace as a distributable package.
272
+
273
+ Args:
274
+ workspace: Path to algorithm workspace.
275
+ output: Output path (.zip file or directory).
276
+ auto_compile: If True, run compile before export if dist/ is missing.
277
+ bundle_path: Bundle path for auto-compile.
278
+
279
+ Returns:
280
+ ExportResult with export details.
281
+
282
+ Raises:
283
+ AlgorithmError: If export fails.
284
+ """
285
+ workspace = Path(workspace).resolve()
286
+ output = Path(output).resolve()
287
+
288
+ # Auto-compile if needed
289
+ if auto_compile:
290
+ dist_dir = workspace / "dist"
291
+ needs_compile = not dist_dir.exists() or not all(
292
+ (dist_dir / f).exists() for f in REQUIRED_DIST_FILES
293
+ )
294
+ if needs_compile:
295
+ from fraclab_sdk.devkit.compile import compile_algorithm
296
+
297
+ compile_algorithm(workspace, bundle_path=bundle_path)
298
+
299
+ # Validate workspace
300
+ _validate_workspace(workspace)
301
+
302
+ # Collect files
303
+ included, rejected = _collect_files(workspace)
304
+
305
+ # Export
306
+ if output.suffix == ".zip":
307
+ _export_to_zip(workspace, output, included)
308
+ else:
309
+ _export_to_dir(workspace, output, included)
310
+
311
+ return ExportResult(
312
+ output_path=output,
313
+ files_included=[str(f.relative_to(workspace)) for f in included],
314
+ files_rejected=[str(f.relative_to(workspace)) for f in rejected],
315
+ )
316
+
317
+
318
+ def _export_to_zip(workspace: Path, output: Path, files: list[Path]) -> None:
319
+ """Export files to a zip archive.
320
+
321
+ Args:
322
+ workspace: Workspace root.
323
+ output: Output zip path.
324
+ files: Files to include.
325
+ """
326
+ output.parent.mkdir(parents=True, exist_ok=True)
327
+
328
+ with zipfile.ZipFile(output, "w", zipfile.ZIP_DEFLATED) as zf:
329
+ for file_path in files:
330
+ rel_path = file_path.relative_to(workspace)
331
+ zf.write(file_path, rel_path)
332
+
333
+
334
+ def _export_to_dir(workspace: Path, output: Path, files: list[Path]) -> None:
335
+ """Export files to a directory.
336
+
337
+ Args:
338
+ workspace: Workspace root.
339
+ output: Output directory path.
340
+ files: Files to include.
341
+ """
342
+ if output.exists():
343
+ shutil.rmtree(output)
344
+
345
+ output.mkdir(parents=True)
346
+
347
+ for file_path in files:
348
+ rel_path = file_path.relative_to(workspace)
349
+ dest = output / rel_path
350
+ dest.parent.mkdir(parents=True, exist_ok=True)
351
+ shutil.copy2(file_path, dest)
352
+
353
+
354
+ __all__ = ["export_algorithm_package", "ExportResult"]