yanex 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- yanex/__init__.py +74 -0
- yanex/api.py +507 -0
- yanex/cli/__init__.py +3 -0
- yanex/cli/_utils.py +114 -0
- yanex/cli/commands/__init__.py +3 -0
- yanex/cli/commands/archive.py +177 -0
- yanex/cli/commands/compare.py +320 -0
- yanex/cli/commands/confirm.py +198 -0
- yanex/cli/commands/delete.py +203 -0
- yanex/cli/commands/list.py +243 -0
- yanex/cli/commands/run.py +625 -0
- yanex/cli/commands/show.py +560 -0
- yanex/cli/commands/unarchive.py +177 -0
- yanex/cli/commands/update.py +282 -0
- yanex/cli/filters/__init__.py +8 -0
- yanex/cli/filters/base.py +286 -0
- yanex/cli/filters/time_utils.py +178 -0
- yanex/cli/formatters/__init__.py +7 -0
- yanex/cli/formatters/console.py +325 -0
- yanex/cli/main.py +45 -0
- yanex/core/__init__.py +3 -0
- yanex/core/comparison.py +549 -0
- yanex/core/config.py +587 -0
- yanex/core/constants.py +16 -0
- yanex/core/environment.py +146 -0
- yanex/core/git_utils.py +153 -0
- yanex/core/manager.py +555 -0
- yanex/core/storage.py +682 -0
- yanex/ui/__init__.py +1 -0
- yanex/ui/compare_table.py +524 -0
- yanex/utils/__init__.py +3 -0
- yanex/utils/exceptions.py +70 -0
- yanex/utils/validation.py +165 -0
- yanex-0.1.0.dist-info/METADATA +251 -0
- yanex-0.1.0.dist-info/RECORD +39 -0
- yanex-0.1.0.dist-info/WHEEL +5 -0
- yanex-0.1.0.dist-info/entry_points.txt +2 -0
- yanex-0.1.0.dist-info/licenses/LICENSE +21 -0
- yanex-0.1.0.dist-info/top_level.txt +1 -0
yanex/core/storage.py
ADDED
@@ -0,0 +1,682 @@
|
|
1
|
+
"""
|
2
|
+
Storage management for experiments.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import json
|
6
|
+
import shutil
|
7
|
+
from datetime import datetime
|
8
|
+
from pathlib import Path
|
9
|
+
from typing import Any, Dict, List, Optional
|
10
|
+
|
11
|
+
from ..utils.exceptions import StorageError
|
12
|
+
from .config import save_yaml_config
|
13
|
+
|
14
|
+
|
15
|
+
class ExperimentStorage:
|
16
|
+
"""Manages file storage for experiments."""
|
17
|
+
|
18
|
+
def __init__(self, experiments_dir: Path = None):
|
19
|
+
"""
|
20
|
+
Initialize experiment storage.
|
21
|
+
|
22
|
+
Args:
|
23
|
+
experiments_dir: Base directory for experiments, defaults to ./experiments
|
24
|
+
"""
|
25
|
+
if experiments_dir is None:
|
26
|
+
experiments_dir = Path.cwd() / "experiments"
|
27
|
+
|
28
|
+
self.experiments_dir = experiments_dir
|
29
|
+
self.experiments_dir.mkdir(parents=True, exist_ok=True)
|
30
|
+
|
31
|
+
def create_experiment_directory(self, experiment_id: str) -> Path:
|
32
|
+
"""
|
33
|
+
Create directory structure for experiment.
|
34
|
+
|
35
|
+
Args:
|
36
|
+
experiment_id: Unique experiment identifier
|
37
|
+
|
38
|
+
Returns:
|
39
|
+
Path to experiment directory
|
40
|
+
|
41
|
+
Raises:
|
42
|
+
StorageError: If directory creation fails
|
43
|
+
"""
|
44
|
+
exp_dir = self.experiments_dir / experiment_id
|
45
|
+
|
46
|
+
if exp_dir.exists():
|
47
|
+
raise StorageError(f"Experiment directory already exists: {exp_dir}")
|
48
|
+
|
49
|
+
try:
|
50
|
+
exp_dir.mkdir(parents=True)
|
51
|
+
(exp_dir / "artifacts").mkdir()
|
52
|
+
except Exception as e:
|
53
|
+
raise StorageError(f"Failed to create experiment directory: {e}") from e
|
54
|
+
|
55
|
+
return exp_dir
|
56
|
+
|
57
|
+
def get_experiment_directory(
|
58
|
+
self, experiment_id: str, include_archived: bool = False
|
59
|
+
) -> Path:
|
60
|
+
"""
|
61
|
+
Get path to experiment directory.
|
62
|
+
|
63
|
+
Args:
|
64
|
+
experiment_id: Experiment identifier
|
65
|
+
include_archived: Whether to search archived experiments too
|
66
|
+
|
67
|
+
Returns:
|
68
|
+
Path to experiment directory
|
69
|
+
|
70
|
+
Raises:
|
71
|
+
StorageError: If experiment directory doesn't exist
|
72
|
+
"""
|
73
|
+
exp_dir = self.experiments_dir / experiment_id
|
74
|
+
|
75
|
+
if exp_dir.exists():
|
76
|
+
return exp_dir
|
77
|
+
|
78
|
+
if include_archived:
|
79
|
+
archive_dir = self.experiments_dir / "archived"
|
80
|
+
archive_path = archive_dir / experiment_id
|
81
|
+
if archive_path.exists():
|
82
|
+
return archive_path
|
83
|
+
|
84
|
+
# If we get here, experiment not found
|
85
|
+
locations = [f"{exp_dir}"]
|
86
|
+
if include_archived:
|
87
|
+
locations.append(f"{archive_path}")
|
88
|
+
|
89
|
+
raise StorageError(f"Experiment directory not found in: {', '.join(locations)}")
|
90
|
+
|
91
|
+
def get_experiment_dir(
|
92
|
+
self, experiment_id: str, include_archived: bool = False
|
93
|
+
) -> Path:
|
94
|
+
"""
|
95
|
+
Alias for get_experiment_directory to match show command usage.
|
96
|
+
"""
|
97
|
+
return self.get_experiment_directory(experiment_id, include_archived)
|
98
|
+
|
99
|
+
def save_metadata(
|
100
|
+
self,
|
101
|
+
experiment_id: str,
|
102
|
+
metadata: Dict[str, Any],
|
103
|
+
include_archived: bool = False,
|
104
|
+
) -> None:
|
105
|
+
"""
|
106
|
+
Save experiment metadata.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
experiment_id: Experiment identifier
|
110
|
+
metadata: Metadata dictionary to save
|
111
|
+
include_archived: Whether to search archived experiments too
|
112
|
+
|
113
|
+
Raises:
|
114
|
+
StorageError: If metadata cannot be saved
|
115
|
+
"""
|
116
|
+
exp_dir = self.get_experiment_directory(experiment_id, include_archived)
|
117
|
+
metadata_path = exp_dir / "metadata.json"
|
118
|
+
|
119
|
+
# Add timestamp
|
120
|
+
metadata_with_timestamp = metadata.copy()
|
121
|
+
metadata_with_timestamp["saved_at"] = datetime.utcnow().isoformat()
|
122
|
+
|
123
|
+
try:
|
124
|
+
with metadata_path.open("w", encoding="utf-8") as f:
|
125
|
+
json.dump(metadata_with_timestamp, f, indent=2, sort_keys=True)
|
126
|
+
except Exception as e:
|
127
|
+
raise StorageError(f"Failed to save metadata: {e}") from e
|
128
|
+
|
129
|
+
def load_metadata(
|
130
|
+
self, experiment_id: str, include_archived: bool = False
|
131
|
+
) -> Dict[str, Any]:
|
132
|
+
"""
|
133
|
+
Load experiment metadata.
|
134
|
+
|
135
|
+
Args:
|
136
|
+
experiment_id: Experiment identifier
|
137
|
+
include_archived: Whether to search archived experiments too
|
138
|
+
|
139
|
+
Returns:
|
140
|
+
Metadata dictionary
|
141
|
+
|
142
|
+
Raises:
|
143
|
+
StorageError: If metadata cannot be loaded
|
144
|
+
"""
|
145
|
+
exp_dir = self.get_experiment_directory(experiment_id, include_archived)
|
146
|
+
metadata_path = exp_dir / "metadata.json"
|
147
|
+
|
148
|
+
if not metadata_path.exists():
|
149
|
+
raise StorageError(f"Metadata file not found: {metadata_path}")
|
150
|
+
|
151
|
+
try:
|
152
|
+
with metadata_path.open("r", encoding="utf-8") as f:
|
153
|
+
return json.load(f)
|
154
|
+
except Exception as e:
|
155
|
+
raise StorageError(f"Failed to load metadata: {e}") from e
|
156
|
+
|
157
|
+
def save_config(self, experiment_id: str, config: Dict[str, Any]) -> None:
|
158
|
+
"""
|
159
|
+
Save experiment configuration.
|
160
|
+
|
161
|
+
Args:
|
162
|
+
experiment_id: Experiment identifier
|
163
|
+
config: Configuration dictionary to save
|
164
|
+
|
165
|
+
Raises:
|
166
|
+
StorageError: If configuration cannot be saved
|
167
|
+
"""
|
168
|
+
exp_dir = self.get_experiment_directory(experiment_id)
|
169
|
+
config_path = exp_dir / "config.yaml"
|
170
|
+
|
171
|
+
try:
|
172
|
+
save_yaml_config(config, config_path)
|
173
|
+
except Exception as e:
|
174
|
+
raise StorageError(f"Failed to save config: {e}") from e
|
175
|
+
|
176
|
+
def load_config(
|
177
|
+
self, experiment_id: str, include_archived: bool = False
|
178
|
+
) -> Dict[str, Any]:
|
179
|
+
"""
|
180
|
+
Load experiment configuration.
|
181
|
+
|
182
|
+
Args:
|
183
|
+
experiment_id: Experiment identifier
|
184
|
+
include_archived: Whether to search archived experiments too
|
185
|
+
|
186
|
+
Returns:
|
187
|
+
Configuration dictionary
|
188
|
+
|
189
|
+
Raises:
|
190
|
+
StorageError: If configuration cannot be loaded
|
191
|
+
"""
|
192
|
+
exp_dir = self.get_experiment_directory(experiment_id, include_archived)
|
193
|
+
config_path = exp_dir / "config.yaml"
|
194
|
+
|
195
|
+
if not config_path.exists():
|
196
|
+
return {}
|
197
|
+
|
198
|
+
try:
|
199
|
+
from .config import load_yaml_config
|
200
|
+
|
201
|
+
return load_yaml_config(config_path)
|
202
|
+
except Exception as e:
|
203
|
+
raise StorageError(f"Failed to load config: {e}") from e
|
204
|
+
|
205
|
+
def save_results(self, experiment_id: str, results: List[Dict[str, Any]]) -> None:
|
206
|
+
"""
|
207
|
+
Save experiment results.
|
208
|
+
|
209
|
+
Args:
|
210
|
+
experiment_id: Experiment identifier
|
211
|
+
results: List of result dictionaries
|
212
|
+
|
213
|
+
Raises:
|
214
|
+
StorageError: If results cannot be saved
|
215
|
+
"""
|
216
|
+
exp_dir = self.get_experiment_directory(experiment_id)
|
217
|
+
results_path = exp_dir / "results.json"
|
218
|
+
|
219
|
+
try:
|
220
|
+
with results_path.open("w", encoding="utf-8") as f:
|
221
|
+
json.dump(results, f, indent=2)
|
222
|
+
except Exception as e:
|
223
|
+
raise StorageError(f"Failed to save results: {e}") from e
|
224
|
+
|
225
|
+
def load_results(
|
226
|
+
self, experiment_id: str, include_archived: bool = False
|
227
|
+
) -> List[Dict[str, Any]]:
|
228
|
+
"""
|
229
|
+
Load experiment results.
|
230
|
+
|
231
|
+
Args:
|
232
|
+
experiment_id: Experiment identifier
|
233
|
+
include_archived: Whether to search archived experiments too
|
234
|
+
|
235
|
+
Returns:
|
236
|
+
List of result dictionaries
|
237
|
+
|
238
|
+
Raises:
|
239
|
+
StorageError: If results cannot be loaded
|
240
|
+
"""
|
241
|
+
exp_dir = self.get_experiment_directory(experiment_id, include_archived)
|
242
|
+
results_path = exp_dir / "results.json"
|
243
|
+
|
244
|
+
if not results_path.exists():
|
245
|
+
return []
|
246
|
+
|
247
|
+
try:
|
248
|
+
with results_path.open("r", encoding="utf-8") as f:
|
249
|
+
results = json.load(f)
|
250
|
+
return results if isinstance(results, list) else []
|
251
|
+
except Exception as e:
|
252
|
+
raise StorageError(f"Failed to load results: {e}") from e
|
253
|
+
|
254
|
+
def add_result_step(
|
255
|
+
self,
|
256
|
+
experiment_id: str,
|
257
|
+
result_data: Dict[str, Any],
|
258
|
+
step: Optional[int] = None,
|
259
|
+
) -> int:
|
260
|
+
"""
|
261
|
+
Add a result step to experiment results.
|
262
|
+
|
263
|
+
Args:
|
264
|
+
experiment_id: Experiment identifier
|
265
|
+
result_data: Result data for this step
|
266
|
+
step: Step number, auto-incremented if None
|
267
|
+
|
268
|
+
Returns:
|
269
|
+
Step number that was used
|
270
|
+
|
271
|
+
Raises:
|
272
|
+
StorageError: If result cannot be added
|
273
|
+
"""
|
274
|
+
results = self.load_results(experiment_id)
|
275
|
+
|
276
|
+
# Determine step number
|
277
|
+
if step is None:
|
278
|
+
# Auto-increment: find highest step number and add 1
|
279
|
+
max_step = -1
|
280
|
+
for result in results:
|
281
|
+
if "step" in result and isinstance(result["step"], int):
|
282
|
+
max_step = max(max_step, result["step"])
|
283
|
+
step = max_step + 1
|
284
|
+
|
285
|
+
# Check if step already exists
|
286
|
+
existing_index = None
|
287
|
+
for i, result in enumerate(results):
|
288
|
+
if result.get("step") == step:
|
289
|
+
existing_index = i
|
290
|
+
break
|
291
|
+
|
292
|
+
# Create result entry
|
293
|
+
result_entry = result_data.copy()
|
294
|
+
result_entry["step"] = step
|
295
|
+
result_entry["timestamp"] = datetime.utcnow().isoformat()
|
296
|
+
|
297
|
+
# Add or replace result
|
298
|
+
if existing_index is not None:
|
299
|
+
# Replace existing step (with warning - handled by caller)
|
300
|
+
results[existing_index] = result_entry
|
301
|
+
else:
|
302
|
+
# Add new result
|
303
|
+
results.append(result_entry)
|
304
|
+
|
305
|
+
# Sort results by step
|
306
|
+
results.sort(key=lambda x: x.get("step", 0))
|
307
|
+
|
308
|
+
# Save updated results
|
309
|
+
self.save_results(experiment_id, results)
|
310
|
+
|
311
|
+
return step
|
312
|
+
|
313
|
+
def save_artifact(
|
314
|
+
self, experiment_id: str, artifact_name: str, source_path: Path
|
315
|
+
) -> Path:
|
316
|
+
"""
|
317
|
+
Save an artifact file.
|
318
|
+
|
319
|
+
Args:
|
320
|
+
experiment_id: Experiment identifier
|
321
|
+
artifact_name: Name for the artifact
|
322
|
+
source_path: Path to source file
|
323
|
+
|
324
|
+
Returns:
|
325
|
+
Path where artifact was saved
|
326
|
+
|
327
|
+
Raises:
|
328
|
+
StorageError: If artifact cannot be saved
|
329
|
+
"""
|
330
|
+
exp_dir = self.get_experiment_directory(experiment_id)
|
331
|
+
artifacts_dir = exp_dir / "artifacts"
|
332
|
+
artifact_path = artifacts_dir / artifact_name
|
333
|
+
|
334
|
+
try:
|
335
|
+
if source_path.is_file():
|
336
|
+
shutil.copy2(source_path, artifact_path)
|
337
|
+
else:
|
338
|
+
raise StorageError(f"Source path is not a file: {source_path}")
|
339
|
+
except Exception as e:
|
340
|
+
raise StorageError(f"Failed to save artifact: {e}") from e
|
341
|
+
|
342
|
+
return artifact_path
|
343
|
+
|
344
|
+
def save_text_artifact(
|
345
|
+
self, experiment_id: str, artifact_name: str, content: str
|
346
|
+
) -> Path:
|
347
|
+
"""
|
348
|
+
Save text content as an artifact.
|
349
|
+
|
350
|
+
Args:
|
351
|
+
experiment_id: Experiment identifier
|
352
|
+
artifact_name: Name for the artifact
|
353
|
+
content: Text content to save
|
354
|
+
|
355
|
+
Returns:
|
356
|
+
Path where artifact was saved
|
357
|
+
|
358
|
+
Raises:
|
359
|
+
StorageError: If artifact cannot be saved
|
360
|
+
"""
|
361
|
+
exp_dir = self.get_experiment_directory(experiment_id)
|
362
|
+
artifacts_dir = exp_dir / "artifacts"
|
363
|
+
artifact_path = artifacts_dir / artifact_name
|
364
|
+
|
365
|
+
try:
|
366
|
+
with artifact_path.open("w", encoding="utf-8") as f:
|
367
|
+
f.write(content)
|
368
|
+
except Exception as e:
|
369
|
+
raise StorageError(f"Failed to save text artifact: {e}") from e
|
370
|
+
|
371
|
+
return artifact_path
|
372
|
+
|
373
|
+
def get_log_paths(self, experiment_id: str) -> Dict[str, Path]:
|
374
|
+
"""
|
375
|
+
Get paths for log files.
|
376
|
+
|
377
|
+
Args:
|
378
|
+
experiment_id: Experiment identifier
|
379
|
+
|
380
|
+
Returns:
|
381
|
+
Dictionary with log file paths
|
382
|
+
"""
|
383
|
+
exp_dir = self.get_experiment_directory(experiment_id)
|
384
|
+
|
385
|
+
return {
|
386
|
+
"stdout": exp_dir / "stdout.log",
|
387
|
+
"stderr": exp_dir / "stderr.log",
|
388
|
+
}
|
389
|
+
|
390
|
+
def list_experiments(self, include_archived: bool = False) -> List[str]:
|
391
|
+
"""
|
392
|
+
List all experiment IDs.
|
393
|
+
|
394
|
+
Args:
|
395
|
+
include_archived: Whether to include archived experiments
|
396
|
+
|
397
|
+
Returns:
|
398
|
+
List of experiment IDs
|
399
|
+
"""
|
400
|
+
experiment_ids = []
|
401
|
+
|
402
|
+
# List regular experiments
|
403
|
+
if self.experiments_dir.exists():
|
404
|
+
for item in self.experiments_dir.iterdir():
|
405
|
+
if item.is_dir() and len(item.name) == 8 and item.name != "archived":
|
406
|
+
# Basic validation that it looks like an experiment ID
|
407
|
+
experiment_ids.append(item.name)
|
408
|
+
|
409
|
+
# List archived experiments if requested
|
410
|
+
if include_archived:
|
411
|
+
archive_dir = self.experiments_dir / "archived"
|
412
|
+
if archive_dir.exists():
|
413
|
+
for item in archive_dir.iterdir():
|
414
|
+
if item.is_dir() and len(item.name) == 8:
|
415
|
+
# Basic validation that it looks like an experiment ID
|
416
|
+
experiment_ids.append(item.name)
|
417
|
+
|
418
|
+
return sorted(experiment_ids)
|
419
|
+
|
420
|
+
def experiment_exists(
|
421
|
+
self, experiment_id: str, include_archived: bool = False
|
422
|
+
) -> bool:
|
423
|
+
"""
|
424
|
+
Check if experiment exists.
|
425
|
+
|
426
|
+
Args:
|
427
|
+
experiment_id: Experiment identifier
|
428
|
+
include_archived: Whether to check archived experiments too
|
429
|
+
|
430
|
+
Returns:
|
431
|
+
True if experiment exists
|
432
|
+
"""
|
433
|
+
exp_dir = self.experiments_dir / experiment_id
|
434
|
+
if exp_dir.exists() and exp_dir.is_dir():
|
435
|
+
return True
|
436
|
+
|
437
|
+
if include_archived:
|
438
|
+
return self.archived_experiment_exists(experiment_id)
|
439
|
+
|
440
|
+
return False
|
441
|
+
|
442
|
+
def archive_experiment(
|
443
|
+
self, experiment_id: str, archive_dir: Optional[Path] = None
|
444
|
+
) -> Path:
|
445
|
+
"""
|
446
|
+
Archive an experiment by moving it to archive directory.
|
447
|
+
|
448
|
+
Args:
|
449
|
+
experiment_id: Experiment identifier
|
450
|
+
archive_dir: Archive directory, defaults to ./experiments/archived
|
451
|
+
|
452
|
+
Returns:
|
453
|
+
Path where experiment was archived
|
454
|
+
|
455
|
+
Raises:
|
456
|
+
StorageError: If archiving fails
|
457
|
+
"""
|
458
|
+
if archive_dir is None:
|
459
|
+
archive_dir = self.experiments_dir / "archived"
|
460
|
+
|
461
|
+
archive_dir.mkdir(parents=True, exist_ok=True)
|
462
|
+
|
463
|
+
exp_dir = self.get_experiment_directory(experiment_id)
|
464
|
+
archive_path = archive_dir / experiment_id
|
465
|
+
|
466
|
+
if archive_path.exists():
|
467
|
+
raise StorageError(f"Archive path already exists: {archive_path}")
|
468
|
+
|
469
|
+
try:
|
470
|
+
shutil.move(str(exp_dir), str(archive_path))
|
471
|
+
except Exception as e:
|
472
|
+
raise StorageError(f"Failed to archive experiment: {e}") from e
|
473
|
+
|
474
|
+
return archive_path
|
475
|
+
|
476
|
+
def unarchive_experiment(
|
477
|
+
self, experiment_id: str, archive_dir: Optional[Path] = None
|
478
|
+
) -> Path:
|
479
|
+
"""
|
480
|
+
Unarchive an experiment by moving it back to experiments directory.
|
481
|
+
|
482
|
+
Args:
|
483
|
+
experiment_id: Experiment identifier
|
484
|
+
archive_dir: Archive directory, defaults to ./experiments/archived
|
485
|
+
|
486
|
+
Returns:
|
487
|
+
Path where experiment was unarchived
|
488
|
+
|
489
|
+
Raises:
|
490
|
+
StorageError: If unarchiving fails
|
491
|
+
"""
|
492
|
+
if archive_dir is None:
|
493
|
+
archive_dir = self.experiments_dir / "archived"
|
494
|
+
|
495
|
+
archive_path = archive_dir / experiment_id
|
496
|
+
if not archive_path.exists():
|
497
|
+
raise StorageError(f"Archived experiment not found: {archive_path}")
|
498
|
+
|
499
|
+
exp_dir = self.experiments_dir / experiment_id
|
500
|
+
if exp_dir.exists():
|
501
|
+
raise StorageError(f"Experiment directory already exists: {exp_dir}")
|
502
|
+
|
503
|
+
try:
|
504
|
+
shutil.move(str(archive_path), str(exp_dir))
|
505
|
+
except Exception as e:
|
506
|
+
raise StorageError(f"Failed to unarchive experiment: {e}") from e
|
507
|
+
|
508
|
+
return exp_dir
|
509
|
+
|
510
|
+
def delete_experiment(self, experiment_id: str) -> None:
|
511
|
+
"""
|
512
|
+
Permanently delete an experiment directory.
|
513
|
+
|
514
|
+
Args:
|
515
|
+
experiment_id: Experiment identifier
|
516
|
+
|
517
|
+
Raises:
|
518
|
+
StorageError: If deletion fails
|
519
|
+
"""
|
520
|
+
exp_dir = self.get_experiment_directory(experiment_id)
|
521
|
+
|
522
|
+
try:
|
523
|
+
shutil.rmtree(exp_dir)
|
524
|
+
except Exception as e:
|
525
|
+
raise StorageError(f"Failed to delete experiment: {e}") from e
|
526
|
+
|
527
|
+
def delete_archived_experiment(
|
528
|
+
self, experiment_id: str, archive_dir: Optional[Path] = None
|
529
|
+
) -> None:
|
530
|
+
"""
|
531
|
+
Permanently delete an archived experiment directory.
|
532
|
+
|
533
|
+
Args:
|
534
|
+
experiment_id: Experiment identifier
|
535
|
+
archive_dir: Archive directory, defaults to ./experiments/archived
|
536
|
+
|
537
|
+
Raises:
|
538
|
+
StorageError: If deletion fails
|
539
|
+
"""
|
540
|
+
if archive_dir is None:
|
541
|
+
archive_dir = self.experiments_dir / "archived"
|
542
|
+
|
543
|
+
archive_path = archive_dir / experiment_id
|
544
|
+
if not archive_path.exists():
|
545
|
+
raise StorageError(f"Archived experiment not found: {archive_path}")
|
546
|
+
|
547
|
+
try:
|
548
|
+
shutil.rmtree(archive_path)
|
549
|
+
except Exception as e:
|
550
|
+
raise StorageError(f"Failed to delete archived experiment: {e}") from e
|
551
|
+
|
552
|
+
def list_archived_experiments(
|
553
|
+
self, archive_dir: Optional[Path] = None
|
554
|
+
) -> List[str]:
|
555
|
+
"""
|
556
|
+
List all archived experiment IDs.
|
557
|
+
|
558
|
+
Args:
|
559
|
+
archive_dir: Archive directory, defaults to ./experiments/archived
|
560
|
+
|
561
|
+
Returns:
|
562
|
+
List of archived experiment IDs
|
563
|
+
"""
|
564
|
+
if archive_dir is None:
|
565
|
+
archive_dir = self.experiments_dir / "archived"
|
566
|
+
|
567
|
+
if not archive_dir.exists():
|
568
|
+
return []
|
569
|
+
|
570
|
+
experiment_ids = []
|
571
|
+
for item in archive_dir.iterdir():
|
572
|
+
if item.is_dir() and len(item.name) == 8:
|
573
|
+
# Basic validation that it looks like an experiment ID
|
574
|
+
experiment_ids.append(item.name)
|
575
|
+
|
576
|
+
return sorted(experiment_ids)
|
577
|
+
|
578
|
+
def archived_experiment_exists(
|
579
|
+
self, experiment_id: str, archive_dir: Optional[Path] = None
|
580
|
+
) -> bool:
|
581
|
+
"""
|
582
|
+
Check if archived experiment exists.
|
583
|
+
|
584
|
+
Args:
|
585
|
+
experiment_id: Experiment identifier
|
586
|
+
archive_dir: Archive directory, defaults to ./experiments/archived
|
587
|
+
|
588
|
+
Returns:
|
589
|
+
True if archived experiment exists
|
590
|
+
"""
|
591
|
+
if archive_dir is None:
|
592
|
+
archive_dir = self.experiments_dir / "archived"
|
593
|
+
|
594
|
+
archive_path = archive_dir / experiment_id
|
595
|
+
return archive_path.exists() and archive_path.is_dir()
|
596
|
+
|
597
|
+
def get_archived_experiment_directory(
|
598
|
+
self, experiment_id: str, archive_dir: Optional[Path] = None
|
599
|
+
) -> Path:
|
600
|
+
"""
|
601
|
+
Get path to archived experiment directory.
|
602
|
+
|
603
|
+
Args:
|
604
|
+
experiment_id: Experiment identifier
|
605
|
+
archive_dir: Archive directory, defaults to ./experiments/archived
|
606
|
+
|
607
|
+
Returns:
|
608
|
+
Path to archived experiment directory
|
609
|
+
|
610
|
+
Raises:
|
611
|
+
StorageError: If archived experiment directory doesn't exist
|
612
|
+
"""
|
613
|
+
if archive_dir is None:
|
614
|
+
archive_dir = self.experiments_dir / "archived"
|
615
|
+
|
616
|
+
archive_path = archive_dir / experiment_id
|
617
|
+
|
618
|
+
if not archive_path.exists():
|
619
|
+
raise StorageError(
|
620
|
+
f"Archived experiment directory not found: {archive_path}"
|
621
|
+
)
|
622
|
+
|
623
|
+
return archive_path
|
624
|
+
|
625
|
+
def update_experiment_metadata(
|
626
|
+
self,
|
627
|
+
experiment_id: str,
|
628
|
+
updates: Dict[str, Any],
|
629
|
+
include_archived: bool = False,
|
630
|
+
) -> Dict[str, Any]:
|
631
|
+
"""
|
632
|
+
Update experiment metadata with new values.
|
633
|
+
|
634
|
+
Args:
|
635
|
+
experiment_id: Experiment identifier
|
636
|
+
updates: Dictionary of metadata updates to apply
|
637
|
+
include_archived: Whether to search archived experiments too
|
638
|
+
|
639
|
+
Returns:
|
640
|
+
Updated metadata dictionary
|
641
|
+
|
642
|
+
Raises:
|
643
|
+
StorageError: If metadata cannot be updated
|
644
|
+
"""
|
645
|
+
# Load current metadata
|
646
|
+
current_metadata = self.load_metadata(experiment_id, include_archived)
|
647
|
+
|
648
|
+
# Apply updates
|
649
|
+
updated_metadata = current_metadata.copy()
|
650
|
+
|
651
|
+
# Handle tag operations first (before the main loop)
|
652
|
+
if "add_tags" in updates or "remove_tags" in updates:
|
653
|
+
current_tags = set(updated_metadata.get("tags", []))
|
654
|
+
|
655
|
+
if "add_tags" in updates:
|
656
|
+
current_tags.update(updates["add_tags"])
|
657
|
+
|
658
|
+
if "remove_tags" in updates:
|
659
|
+
current_tags.difference_update(updates["remove_tags"])
|
660
|
+
|
661
|
+
updated_metadata["tags"] = sorted(current_tags)
|
662
|
+
|
663
|
+
# Handle other field updates
|
664
|
+
for key, value in updates.items():
|
665
|
+
if key in ["add_tags", "remove_tags"]:
|
666
|
+
# Skip these as they're handled above
|
667
|
+
continue
|
668
|
+
elif key in ["name", "description", "status"]:
|
669
|
+
# Direct field updates
|
670
|
+
if value == "":
|
671
|
+
# Empty string means clear the field
|
672
|
+
updated_metadata[key] = None
|
673
|
+
else:
|
674
|
+
updated_metadata[key] = value
|
675
|
+
else:
|
676
|
+
# Other fields - direct assignment
|
677
|
+
updated_metadata[key] = value
|
678
|
+
|
679
|
+
# Save updated metadata
|
680
|
+
self.save_metadata(experiment_id, updated_metadata, include_archived)
|
681
|
+
|
682
|
+
return updated_metadata
|
yanex/ui/__init__.py
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
# UI components for yanex
|