experimaestro 1.11.1__py3-none-any.whl → 2.0.0rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of experimaestro might be problematic. Click here for more details.
- experimaestro/annotations.py +1 -1
- experimaestro/cli/__init__.py +10 -11
- experimaestro/cli/progress.py +269 -0
- experimaestro/core/identifier.py +11 -2
- experimaestro/core/objects/config.py +64 -94
- experimaestro/core/types.py +35 -57
- experimaestro/launcherfinder/registry.py +3 -3
- experimaestro/mkdocs/base.py +6 -8
- experimaestro/notifications.py +12 -3
- experimaestro/progress.py +406 -0
- experimaestro/settings.py +4 -2
- experimaestro/tests/launchers/common.py +2 -2
- experimaestro/tests/restart.py +1 -1
- experimaestro/tests/test_checkers.py +2 -2
- experimaestro/tests/test_dependencies.py +12 -12
- experimaestro/tests/test_experiment.py +3 -3
- experimaestro/tests/test_file_progress.py +425 -0
- experimaestro/tests/test_file_progress_integration.py +477 -0
- experimaestro/tests/test_generators.py +61 -0
- experimaestro/tests/test_identifier.py +90 -81
- experimaestro/tests/test_instance.py +9 -9
- experimaestro/tests/test_objects.py +9 -32
- experimaestro/tests/test_outputs.py +6 -6
- experimaestro/tests/test_param.py +14 -14
- experimaestro/tests/test_progress.py +4 -4
- experimaestro/tests/test_serializers.py +5 -5
- experimaestro/tests/test_tags.py +15 -15
- experimaestro/tests/test_tasks.py +40 -36
- experimaestro/tests/test_tokens.py +8 -6
- experimaestro/tests/test_types.py +10 -10
- experimaestro/tests/test_validation.py +19 -19
- experimaestro/tests/token_reschedule.py +1 -1
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/METADATA +1 -1
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/RECORD +37 -32
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/LICENSE +0 -0
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/WHEEL +0 -0
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,406 @@
|
|
|
1
|
+
"""File-based progress tracking system for experimaestro tasks."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import threading
|
|
5
|
+
import time
|
|
6
|
+
from dataclasses import dataclass, asdict
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Optional, List, Iterator, Dict, Any
|
|
9
|
+
from datetime import datetime, timedelta
|
|
10
|
+
import fcntl
|
|
11
|
+
import os
|
|
12
|
+
|
|
13
|
+
from .utils import logger
|
|
14
|
+
|
|
15
|
+
DEFAULT_MAX_ENTRIES_PER_FILE = 10_000
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class ProgressEntry:
|
|
20
|
+
"""A single progress entry in the JSONL file"""
|
|
21
|
+
|
|
22
|
+
timestamp: float
|
|
23
|
+
level: int
|
|
24
|
+
progress: float
|
|
25
|
+
desc: Optional[str] = None
|
|
26
|
+
|
|
27
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
28
|
+
"""Convert to dictionary for JSON serialization"""
|
|
29
|
+
return asdict(self)
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def from_dict(cls, data: Dict[str, Any]) -> "ProgressEntry":
|
|
33
|
+
"""Create from dictionary"""
|
|
34
|
+
return cls(**data)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class StateFile:
|
|
38
|
+
"""Represents the state file for progress tracking.
|
|
39
|
+
Checks if the state must be written based on time and progress changes.
|
|
40
|
+
By default, it writes every second or when progress changes significantly (>1%)"""
|
|
41
|
+
|
|
42
|
+
def __init__(self, filename: Path):
|
|
43
|
+
self.filename = filename
|
|
44
|
+
self.state: Dict[int, ProgressEntry] = {}
|
|
45
|
+
|
|
46
|
+
# Write threshold to avoid too frequent writes
|
|
47
|
+
self._time_threshold = timedelta(seconds=1.0)
|
|
48
|
+
self._last_write_time: datetime = datetime.now()
|
|
49
|
+
# Minimum progress change to trigger write
|
|
50
|
+
self._progress_threshold = 0.01
|
|
51
|
+
self._last_write_progress: Optional[Dict[int, float]] = None
|
|
52
|
+
|
|
53
|
+
self.filename.parent.mkdir(parents=True, exist_ok=True)
|
|
54
|
+
self.load()
|
|
55
|
+
|
|
56
|
+
def _allow_write(self) -> bool:
|
|
57
|
+
"""Check if the state should be written based on time and progress changes.
|
|
58
|
+
Allows writing if:
|
|
59
|
+
- BOTH: More than 1 second has passed since last write
|
|
60
|
+
- AND: Progress has changed significantly (>1%)
|
|
61
|
+
- OR: All entries are done (progress >= 1.0)"""
|
|
62
|
+
time_check = datetime.now() - self._last_write_time > self._time_threshold
|
|
63
|
+
progress_check = self._last_write_progress is None or any(
|
|
64
|
+
abs(entry.progress - self._last_write_progress.get(entry.level, 0.0))
|
|
65
|
+
> self._progress_threshold
|
|
66
|
+
for entry in self.state.values()
|
|
67
|
+
)
|
|
68
|
+
all_entries_done = all(entry.progress >= 1.0 for entry in self.state.values())
|
|
69
|
+
return all_entries_done or (time_check and progress_check)
|
|
70
|
+
|
|
71
|
+
def write(self, force: bool = False):
|
|
72
|
+
"""Write the current state to the file."""
|
|
73
|
+
if self._allow_write() or force:
|
|
74
|
+
with open(self.filename, "w") as f:
|
|
75
|
+
json.dump({k: v.to_dict() for k, v in self.state.items()}, f)
|
|
76
|
+
self._last_write_time = datetime.now()
|
|
77
|
+
self._last_write_progress = {k: v.progress for k, v in self.state.items()}
|
|
78
|
+
|
|
79
|
+
def update(self, entry: ProgressEntry):
|
|
80
|
+
self.state[entry.level] = entry
|
|
81
|
+
|
|
82
|
+
def load(self):
|
|
83
|
+
"""Load the state from the file"""
|
|
84
|
+
if self.filename.exists():
|
|
85
|
+
with self.filename.open("r") as f:
|
|
86
|
+
try:
|
|
87
|
+
data = json.load(f)
|
|
88
|
+
self.state = {
|
|
89
|
+
int(k): ProgressEntry.from_dict(v) for k, v in data.items()
|
|
90
|
+
}
|
|
91
|
+
except (json.JSONDecodeError, IOError):
|
|
92
|
+
logger.warning(f"Failed to load state from {self.filename}")
|
|
93
|
+
|
|
94
|
+
def read(self) -> Dict[int, ProgressEntry]:
|
|
95
|
+
"""Read the state from the file"""
|
|
96
|
+
self.load()
|
|
97
|
+
return self.state
|
|
98
|
+
|
|
99
|
+
# flush on exit
|
|
100
|
+
def __del__(self):
|
|
101
|
+
"""Ensure state is written on exit"""
|
|
102
|
+
try:
|
|
103
|
+
self.write(force=True)
|
|
104
|
+
except Exception as e:
|
|
105
|
+
logger.error(f"Failed to write state on exit: {e}")
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class ProgressFileWriter:
|
|
109
|
+
# TODO: Implement buffering and flushing
|
|
110
|
+
|
|
111
|
+
def __init__(
|
|
112
|
+
self, task_path: Path, max_entries_per_file: int = DEFAULT_MAX_ENTRIES_PER_FILE
|
|
113
|
+
):
|
|
114
|
+
self.task_path = task_path
|
|
115
|
+
self.progress_dir = task_path / ".experimaestro"
|
|
116
|
+
self.max_entries_per_file = max_entries_per_file
|
|
117
|
+
self.current_file_index = 0
|
|
118
|
+
self.current_file_entries = 0
|
|
119
|
+
self.lock = threading.Lock()
|
|
120
|
+
|
|
121
|
+
# Ensure directory exists
|
|
122
|
+
self.progress_dir.mkdir(exist_ok=True)
|
|
123
|
+
|
|
124
|
+
# State is the latest entry per level
|
|
125
|
+
self.state = StateFile(self.progress_dir / "progress_state.json")
|
|
126
|
+
|
|
127
|
+
# Find the latest file index
|
|
128
|
+
self._find_latest_file()
|
|
129
|
+
|
|
130
|
+
def _find_latest_file(self):
|
|
131
|
+
"""Find the latest progress file and entry count"""
|
|
132
|
+
progress_files = list(self.progress_dir.glob("progress-*.jsonl"))
|
|
133
|
+
if not progress_files:
|
|
134
|
+
self.current_file_index = 0
|
|
135
|
+
self.current_file_entries = 0
|
|
136
|
+
return
|
|
137
|
+
|
|
138
|
+
# Sort by file index
|
|
139
|
+
max_index = None
|
|
140
|
+
for f in progress_files:
|
|
141
|
+
try:
|
|
142
|
+
index = int(f.stem.split("-")[1])
|
|
143
|
+
if max_index is None or index > max_index:
|
|
144
|
+
max_index = index
|
|
145
|
+
except (ValueError, IndexError):
|
|
146
|
+
continue
|
|
147
|
+
|
|
148
|
+
if max_index is not None:
|
|
149
|
+
self.current_file_index = max_index
|
|
150
|
+
# Count entries in current file
|
|
151
|
+
current_file = self._get_current_file_path()
|
|
152
|
+
if current_file.exists():
|
|
153
|
+
with current_file.open("r") as f:
|
|
154
|
+
self.current_file_entries = sum(1 for _ in f.readlines())
|
|
155
|
+
else:
|
|
156
|
+
self.current_file_entries = 0
|
|
157
|
+
else:
|
|
158
|
+
self.current_file_index = 0
|
|
159
|
+
self.current_file_entries = 0
|
|
160
|
+
|
|
161
|
+
def _get_current_file_path(self) -> Path:
|
|
162
|
+
"""Get path to current progress file"""
|
|
163
|
+
return self.progress_dir / f"progress-{self.current_file_index:04d}.jsonl"
|
|
164
|
+
|
|
165
|
+
def _get_latest_symlink_path(self) -> Path:
|
|
166
|
+
"""Get path to latest progress symlink"""
|
|
167
|
+
return self.progress_dir / "progress-latest.jsonl"
|
|
168
|
+
|
|
169
|
+
def _rotate_file_if_needed(self):
|
|
170
|
+
"""Create new file if current one is full"""
|
|
171
|
+
if self.current_file_entries >= self.max_entries_per_file:
|
|
172
|
+
self.current_file_index += 1
|
|
173
|
+
self.current_file_entries = 0
|
|
174
|
+
logger.debug(f"Rotating to new progress file: {self.current_file_index}")
|
|
175
|
+
|
|
176
|
+
def _update_latest_symlink(self):
|
|
177
|
+
"""Update symlink to point to latest file"""
|
|
178
|
+
current_file = self._get_current_file_path()
|
|
179
|
+
latest_symlink = self._get_latest_symlink_path()
|
|
180
|
+
|
|
181
|
+
# Remove existing symlink
|
|
182
|
+
if latest_symlink.exists() or latest_symlink.is_symlink():
|
|
183
|
+
latest_symlink.unlink()
|
|
184
|
+
|
|
185
|
+
# Create new symlink
|
|
186
|
+
latest_symlink.symlink_to(current_file.name)
|
|
187
|
+
|
|
188
|
+
def write_progress(self, level: int, progress: float, desc: Optional[str] = None):
|
|
189
|
+
"""Write a progress entry to the file
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
level: Progress level (0 is top level)
|
|
193
|
+
progress: Progress value between 0.0 and 1.0
|
|
194
|
+
desc: Optional description
|
|
195
|
+
"""
|
|
196
|
+
with self.lock:
|
|
197
|
+
# Eventually rotate internal state if needed
|
|
198
|
+
self._rotate_file_if_needed()
|
|
199
|
+
|
|
200
|
+
entry = ProgressEntry(
|
|
201
|
+
timestamp=time.time(), level=level, progress=progress, desc=desc
|
|
202
|
+
)
|
|
203
|
+
self.state.update(entry)
|
|
204
|
+
self.state.write(force=level == -1) # Force write on EOJ
|
|
205
|
+
|
|
206
|
+
current_file = self._get_current_file_path()
|
|
207
|
+
|
|
208
|
+
# Write with file locking for concurrent access
|
|
209
|
+
with current_file.open("a") as f:
|
|
210
|
+
try:
|
|
211
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_EX)
|
|
212
|
+
f.write(json.dumps(entry.to_dict()) + "\n")
|
|
213
|
+
f.flush() # Flush the file buffer
|
|
214
|
+
os.fsync(f.fileno()) # Ensure data is written to disk
|
|
215
|
+
finally:
|
|
216
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
|
217
|
+
|
|
218
|
+
self.current_file_entries += 1
|
|
219
|
+
self._update_latest_symlink()
|
|
220
|
+
|
|
221
|
+
logger.debug(
|
|
222
|
+
f"Progress written: level={level}, progress={progress}, desc={desc}"
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
def __del__(self):
|
|
226
|
+
"""Ensure state is written on exit"""
|
|
227
|
+
try:
|
|
228
|
+
self.state.write(force=True)
|
|
229
|
+
except Exception as e:
|
|
230
|
+
logger.error(f"Failed to write state on exit: {e}")
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
class ProgressFileReader:
|
|
234
|
+
"""Reads progress entries from JSONL files"""
|
|
235
|
+
|
|
236
|
+
def __init__(self, task_path: Path):
|
|
237
|
+
"""Initialize progress file reader
|
|
238
|
+
|
|
239
|
+
Args:
|
|
240
|
+
task_path: Path to the task directory
|
|
241
|
+
"""
|
|
242
|
+
self.task_path = task_path
|
|
243
|
+
self.progress_dir = task_path / ".experimaestro"
|
|
244
|
+
self.max_entries_per_file: Optional[int] = None
|
|
245
|
+
self.state = StateFile(self.progress_dir / "progress_state.json")
|
|
246
|
+
|
|
247
|
+
def get_progress_files(self) -> List[Path]:
|
|
248
|
+
"""Get all progress files sorted by index"""
|
|
249
|
+
if not self.progress_dir.exists():
|
|
250
|
+
return []
|
|
251
|
+
|
|
252
|
+
progress_files = list(self.progress_dir.glob("progress-*.jsonl"))
|
|
253
|
+
|
|
254
|
+
# Filter out symlinks to avoid duplicates
|
|
255
|
+
progress_files = [f for f in progress_files if not f.is_symlink()]
|
|
256
|
+
|
|
257
|
+
# Sort by file index
|
|
258
|
+
# Alternatively, we could simply sort by filename
|
|
259
|
+
def get_index(path: Path) -> int:
|
|
260
|
+
try:
|
|
261
|
+
return int(path.stem.split("-")[1])
|
|
262
|
+
except (ValueError, IndexError):
|
|
263
|
+
return 0
|
|
264
|
+
|
|
265
|
+
return sorted(progress_files, key=get_index)
|
|
266
|
+
|
|
267
|
+
def get_latest_file(self) -> Optional[Path]:
|
|
268
|
+
"""Get the latest progress file via symlink"""
|
|
269
|
+
latest_symlink = self.progress_dir / "progress-latest.jsonl"
|
|
270
|
+
if latest_symlink.exists() and latest_symlink.is_symlink():
|
|
271
|
+
return latest_symlink.resolve()
|
|
272
|
+
|
|
273
|
+
# Fallback to finding latest manually
|
|
274
|
+
files = self.get_progress_files()
|
|
275
|
+
return files[-1] if files else None
|
|
276
|
+
|
|
277
|
+
def read_entries(self, file_path: Path) -> Iterator[ProgressEntry]:
|
|
278
|
+
"""Read progress entries from a file
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
file_path: Path to progress file
|
|
282
|
+
|
|
283
|
+
Yields:
|
|
284
|
+
ProgressEntry objects
|
|
285
|
+
"""
|
|
286
|
+
if not file_path.exists():
|
|
287
|
+
return
|
|
288
|
+
|
|
289
|
+
try:
|
|
290
|
+
with file_path.open("r") as f:
|
|
291
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
|
292
|
+
try:
|
|
293
|
+
for line in f:
|
|
294
|
+
line = line.strip()
|
|
295
|
+
if line:
|
|
296
|
+
try:
|
|
297
|
+
data = json.loads(line)
|
|
298
|
+
yield ProgressEntry.from_dict(data)
|
|
299
|
+
except json.JSONDecodeError as e:
|
|
300
|
+
logger.warning(
|
|
301
|
+
f"Invalid JSON in progress file {file_path}: {e}"
|
|
302
|
+
)
|
|
303
|
+
finally:
|
|
304
|
+
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
|
305
|
+
except IOError as e:
|
|
306
|
+
logger.warning(f"Could not read progress file {file_path}: {e}")
|
|
307
|
+
|
|
308
|
+
def read_all_entries(self) -> Iterator[ProgressEntry]:
|
|
309
|
+
"""Read all progress entries from all files in order
|
|
310
|
+
|
|
311
|
+
Yields:
|
|
312
|
+
ProgressEntry objects in chronological order
|
|
313
|
+
"""
|
|
314
|
+
logger.warning("Reading all progress entries, this may be slow for large jobs.")
|
|
315
|
+
for file_path in self.get_progress_files():
|
|
316
|
+
yield from self.read_entries(file_path)
|
|
317
|
+
|
|
318
|
+
def read_latest_entries(self, count: Optional[int] = None) -> List[ProgressEntry]:
|
|
319
|
+
"""Read the latest N progress entries"""
|
|
320
|
+
entries = []
|
|
321
|
+
|
|
322
|
+
# Read files in reverse order to get latest entries first
|
|
323
|
+
files = self.get_progress_files()
|
|
324
|
+
# Fetch the max length of files, in lines
|
|
325
|
+
if files and count is None:
|
|
326
|
+
# Fetch the number of entries in the first file
|
|
327
|
+
# This is the most likely to be the longest file
|
|
328
|
+
count = sum(1 for _ in self.read_entries(files[0]))
|
|
329
|
+
if count is None:
|
|
330
|
+
count = DEFAULT_MAX_ENTRIES_PER_FILE
|
|
331
|
+
|
|
332
|
+
for file_path in reversed(files):
|
|
333
|
+
file_entries = list(self.read_entries(file_path))
|
|
334
|
+
entries.extend(reversed(file_entries))
|
|
335
|
+
|
|
336
|
+
if len(entries) >= count:
|
|
337
|
+
break
|
|
338
|
+
|
|
339
|
+
# Return latest entries in chronological order
|
|
340
|
+
return list(reversed(entries[:count]))
|
|
341
|
+
|
|
342
|
+
def get_current_progress(
|
|
343
|
+
self, count: Optional[int] = None
|
|
344
|
+
) -> Dict[int, ProgressEntry]:
|
|
345
|
+
"""Get the current progress for each level"""
|
|
346
|
+
logger.warning(
|
|
347
|
+
"Reading current progress from progress logs, this may be slow for large jobs."
|
|
348
|
+
)
|
|
349
|
+
return {entry.level: entry for entry in self.read_latest_entries(count)}
|
|
350
|
+
|
|
351
|
+
def get_current_state(self) -> Optional[Dict[int, ProgressEntry]]:
|
|
352
|
+
"""Fetch the latest progress entry from the state file"""
|
|
353
|
+
current_state = self.state.read()
|
|
354
|
+
return current_state or self.get_current_progress()
|
|
355
|
+
|
|
356
|
+
def is_done(self) -> bool:
|
|
357
|
+
"""Check if the task is done by looking for a special 'done' file.
|
|
358
|
+
Fallback to checking for end-of-job (EOJ) entries."""
|
|
359
|
+
|
|
360
|
+
task_name = self.task_path.parent.stem.split(".")[-1]
|
|
361
|
+
job_done_file = self.task_path / f"{task_name}.done"
|
|
362
|
+
if job_done_file.exists() and job_done_file.is_file():
|
|
363
|
+
return True
|
|
364
|
+
|
|
365
|
+
# Check if any progress file has a level -1 entry indicating EOJ
|
|
366
|
+
return any(entry.level == -1 for entry in self.read_all_entries())
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
class FileBasedProgressReporter:
|
|
370
|
+
"""File-based progress reporter that replaces the socket-based Reporter"""
|
|
371
|
+
|
|
372
|
+
def __init__(self, task_path: Path):
|
|
373
|
+
"""Initialize file-based progress reporter
|
|
374
|
+
|
|
375
|
+
Args:
|
|
376
|
+
task_path: Path to the task directory
|
|
377
|
+
"""
|
|
378
|
+
self.task_path = task_path
|
|
379
|
+
self.writer = ProgressFileWriter(task_path)
|
|
380
|
+
self.current_progress = {} # level -> (progress, desc)
|
|
381
|
+
self.lock = threading.Lock()
|
|
382
|
+
|
|
383
|
+
def set_progress(self, progress: float, level: int = 0, desc: Optional[str] = None):
|
|
384
|
+
"""Set progress for a specific level
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
progress: Progress value between 0.0 and 1.0
|
|
388
|
+
level: Progress level (0 is top level)
|
|
389
|
+
desc: Optional description
|
|
390
|
+
"""
|
|
391
|
+
with self.lock:
|
|
392
|
+
# Check if progress has changed significantly
|
|
393
|
+
current = self.current_progress.get(level, (None, None))
|
|
394
|
+
if (
|
|
395
|
+
current[0] is None
|
|
396
|
+
or abs(progress - current[0]) > 0.01
|
|
397
|
+
or desc != current[1]
|
|
398
|
+
):
|
|
399
|
+
self.current_progress[level] = (progress, desc)
|
|
400
|
+
self.writer.write_progress(level, progress, desc)
|
|
401
|
+
|
|
402
|
+
def eoj(self):
|
|
403
|
+
"""End of job notification"""
|
|
404
|
+
with self.lock:
|
|
405
|
+
# Write a special end-of-job marker
|
|
406
|
+
self.writer.write_progress(-1, 1.0, "EOJ")
|
experimaestro/settings.py
CHANGED
|
@@ -37,7 +37,7 @@ class WorkspaceSettings:
|
|
|
37
37
|
|
|
38
38
|
alt_workspaces: List[str] = field(default_factory=list)
|
|
39
39
|
"""Alternative workspaces to find jobs or experiments"""
|
|
40
|
-
|
|
40
|
+
|
|
41
41
|
def __post_init__(self):
|
|
42
42
|
self.path = self.path.expanduser().resolve()
|
|
43
43
|
|
|
@@ -83,7 +83,9 @@ def get_workspace(id: Optional[str] = None) -> Optional[WorkspaceSettings]:
|
|
|
83
83
|
return None
|
|
84
84
|
|
|
85
85
|
|
|
86
|
-
def find_workspace(
|
|
86
|
+
def find_workspace(
|
|
87
|
+
*, workspace: Optional[str] = None, workdir: Optional[Path] = None
|
|
88
|
+
) -> WorkspaceSettings:
|
|
87
89
|
"""Find workspace"""
|
|
88
90
|
workdir = Path(workdir) if workdir else None
|
|
89
91
|
|
|
@@ -80,7 +80,7 @@ def takeback(launcher, datapath, txp1, txp2):
|
|
|
80
80
|
waiting = datapath / "waiting"
|
|
81
81
|
|
|
82
82
|
with txp1:
|
|
83
|
-
task: WaitUntilTouched = WaitUntilTouched(
|
|
83
|
+
task: WaitUntilTouched = WaitUntilTouched.C(
|
|
84
84
|
touching=touching, waiting=waiting
|
|
85
85
|
).submit(launcher=launcher)
|
|
86
86
|
|
|
@@ -91,7 +91,7 @@ def takeback(launcher, datapath, txp1, txp2):
|
|
|
91
91
|
time.sleep(0.01)
|
|
92
92
|
|
|
93
93
|
with txp2:
|
|
94
|
-
result = WaitUntilTouched(touching=touching, waiting=waiting).submit(
|
|
94
|
+
result = WaitUntilTouched.C(touching=touching, waiting=waiting).submit(
|
|
95
95
|
launcher=launcher
|
|
96
96
|
)
|
|
97
97
|
|
experimaestro/tests/restart.py
CHANGED
|
@@ -64,7 +64,7 @@ def restart(terminate: Callable, experiment):
|
|
|
64
64
|
try:
|
|
65
65
|
with TemporaryExperiment("restart", maxwait=20) as xp:
|
|
66
66
|
# Create the task with dry_run and so we can get the file paths
|
|
67
|
-
task = Restart()
|
|
67
|
+
task = Restart.C()
|
|
68
68
|
task.submit(run_mode=RunMode.DRY_RUN)
|
|
69
69
|
|
|
70
70
|
# Start the experiment with another process, and kill the job
|
|
@@ -10,7 +10,7 @@ def test_choices():
|
|
|
10
10
|
class TestChoices(Config):
|
|
11
11
|
a: Annotated[str, Choices(["a", "b"])]
|
|
12
12
|
|
|
13
|
-
TestChoices(a="a").__xpm__.validate()
|
|
13
|
+
TestChoices.C(a="a").__xpm__.validate()
|
|
14
14
|
|
|
15
15
|
with pytest.raises((ValueError, KeyError)):
|
|
16
|
-
TestChoices(a="c").__xpm__.validate()
|
|
16
|
+
TestChoices.C(a="c").__xpm__.validate()
|
|
@@ -35,21 +35,21 @@ class TaskB(Task):
|
|
|
35
35
|
|
|
36
36
|
|
|
37
37
|
def test_dependencies_simple(xp):
|
|
38
|
-
a = TaskA().submit()
|
|
39
|
-
b = TaskB(a=a).submit()
|
|
38
|
+
a = TaskA.C().submit()
|
|
39
|
+
b = TaskB.C(a=a).submit()
|
|
40
40
|
check_dependencies(b, a)
|
|
41
41
|
|
|
42
42
|
|
|
43
43
|
def test_dependencies_implicit(xp):
|
|
44
|
-
a = TaskA().submit()
|
|
45
|
-
b = TaskB(a=a)
|
|
44
|
+
a = TaskA.C().submit()
|
|
45
|
+
b = TaskB.C(a=a)
|
|
46
46
|
b.submit()
|
|
47
47
|
check_dependencies(b, a)
|
|
48
48
|
|
|
49
49
|
|
|
50
50
|
class TaskC(Task):
|
|
51
51
|
def task_outputs(self, dep: Callable[[Config], None]) -> Any:
|
|
52
|
-
return dep(ConfigC(param_c=self))
|
|
52
|
+
return dep(ConfigC.C(param_c=self))
|
|
53
53
|
|
|
54
54
|
|
|
55
55
|
class ConfigC(Config):
|
|
@@ -61,15 +61,15 @@ class TaskD(Task):
|
|
|
61
61
|
|
|
62
62
|
|
|
63
63
|
def test_dependencies_task_output(xp):
|
|
64
|
-
task_c = TaskC()
|
|
64
|
+
task_c = TaskC.C()
|
|
65
65
|
c = task_c.submit()
|
|
66
|
-
d = TaskD(param_c=c).submit()
|
|
66
|
+
d = TaskD.C(param_c=c).submit()
|
|
67
67
|
check_dependencies(d, task_c)
|
|
68
68
|
|
|
69
69
|
|
|
70
70
|
class Inner_TaskA(Task):
|
|
71
71
|
def task_outputs(self, dep: Callable[[Config], None]) -> Any:
|
|
72
|
-
return dep(Inner_OutputTaskA())
|
|
72
|
+
return dep(Inner_OutputTaskA.C())
|
|
73
73
|
|
|
74
74
|
|
|
75
75
|
class Inner_OutputTaskA(Config):
|
|
@@ -81,13 +81,13 @@ class Inner_TaskB(Task):
|
|
|
81
81
|
|
|
82
82
|
|
|
83
83
|
def test_dependencies_inner_task_output(xp):
|
|
84
|
-
task_a = Inner_TaskA()
|
|
84
|
+
task_a = Inner_TaskA.C()
|
|
85
85
|
a = task_a.submit()
|
|
86
|
-
b = Inner_TaskB(param_a=a).submit()
|
|
86
|
+
b = Inner_TaskB.C(param_a=a).submit()
|
|
87
87
|
check_dependencies(b, task_a)
|
|
88
88
|
|
|
89
89
|
|
|
90
90
|
def test_dependencies_pre_task(xp):
|
|
91
|
-
a = TaskA().submit()
|
|
92
|
-
a2 = TaskA().add_pretasks(a).submit()
|
|
91
|
+
a = TaskA.C().submit()
|
|
92
|
+
a2 = TaskA.C().add_pretasks(a).submit()
|
|
93
93
|
check_dependencies(a2, a)
|
|
@@ -38,8 +38,8 @@ def test_experiment_history():
|
|
|
38
38
|
"""Test retrieving experiment history"""
|
|
39
39
|
with TemporaryDirectory() as workdir:
|
|
40
40
|
with TemporaryExperiment("experiment", workdir=workdir):
|
|
41
|
-
task_a = TaskA().submit()
|
|
42
|
-
TaskB(task_a=task_a, x=tag(1)).submit()
|
|
41
|
+
task_a = TaskA.C().submit()
|
|
42
|
+
TaskB.C(task_a=task_a, x=tag(1)).submit()
|
|
43
43
|
|
|
44
44
|
# Look at the experiment
|
|
45
45
|
xp = get_experiment("experiment", workdir=workdir)
|
|
@@ -66,7 +66,7 @@ def test_experiment_events():
|
|
|
66
66
|
|
|
67
67
|
flag = FlagHandler()
|
|
68
68
|
with TemporaryExperiment("experiment"):
|
|
69
|
-
task_a = TaskA()
|
|
69
|
+
task_a = TaskA.C()
|
|
70
70
|
task_a.submit()
|
|
71
71
|
task_a.on_completed(flag.set)
|
|
72
72
|
|