experimaestro 1.11.1__py3-none-any.whl → 2.0.0rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of experimaestro might be problematic. Click here for more details.
- experimaestro/annotations.py +1 -1
- experimaestro/cli/__init__.py +10 -11
- experimaestro/cli/progress.py +269 -0
- experimaestro/core/identifier.py +11 -2
- experimaestro/core/objects/config.py +64 -94
- experimaestro/core/types.py +35 -57
- experimaestro/launcherfinder/registry.py +3 -3
- experimaestro/mkdocs/base.py +6 -8
- experimaestro/notifications.py +12 -3
- experimaestro/progress.py +406 -0
- experimaestro/settings.py +4 -2
- experimaestro/tests/launchers/common.py +2 -2
- experimaestro/tests/restart.py +1 -1
- experimaestro/tests/test_checkers.py +2 -2
- experimaestro/tests/test_dependencies.py +12 -12
- experimaestro/tests/test_experiment.py +3 -3
- experimaestro/tests/test_file_progress.py +425 -0
- experimaestro/tests/test_file_progress_integration.py +477 -0
- experimaestro/tests/test_generators.py +61 -0
- experimaestro/tests/test_identifier.py +90 -81
- experimaestro/tests/test_instance.py +9 -9
- experimaestro/tests/test_objects.py +9 -32
- experimaestro/tests/test_outputs.py +6 -6
- experimaestro/tests/test_param.py +14 -14
- experimaestro/tests/test_progress.py +4 -4
- experimaestro/tests/test_serializers.py +5 -5
- experimaestro/tests/test_tags.py +15 -15
- experimaestro/tests/test_tasks.py +40 -36
- experimaestro/tests/test_tokens.py +8 -6
- experimaestro/tests/test_types.py +10 -10
- experimaestro/tests/test_validation.py +19 -19
- experimaestro/tests/token_reschedule.py +1 -1
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/METADATA +1 -1
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/RECORD +37 -32
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/LICENSE +0 -0
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/WHEEL +0 -0
- {experimaestro-1.11.1.dist-info → experimaestro-2.0.0rc0.dist-info}/entry_points.txt +0 -0
|
@@ -72,7 +72,7 @@ def test_progress_basic():
|
|
|
72
72
|
listener = ProgressListener()
|
|
73
73
|
xp.scheduler.addlistener(listener)
|
|
74
74
|
|
|
75
|
-
out = ProgressingTask().submit()
|
|
75
|
+
out = ProgressingTask.C().submit()
|
|
76
76
|
path = out.path # type: Path
|
|
77
77
|
job = out.__xpm__.job
|
|
78
78
|
|
|
@@ -103,7 +103,7 @@ def test_progress_multiple():
|
|
|
103
103
|
listener1 = ProgressListener()
|
|
104
104
|
xp1.scheduler.addlistener(listener1)
|
|
105
105
|
|
|
106
|
-
out = ProgressingTask().submit()
|
|
106
|
+
out = ProgressingTask.C().submit()
|
|
107
107
|
path = out.path # type: Path
|
|
108
108
|
job = out.__xpm__.job
|
|
109
109
|
|
|
@@ -122,7 +122,7 @@ def test_progress_multiple():
|
|
|
122
122
|
listener2 = ProgressListener()
|
|
123
123
|
xp2.scheduler.addlistener(listener2)
|
|
124
124
|
|
|
125
|
-
out = ProgressingTask().submit()
|
|
125
|
+
out = ProgressingTask.C().submit()
|
|
126
126
|
job = out.__xpm__.job # type: CommandLineJob
|
|
127
127
|
logger.info("Waiting for job to start (2)")
|
|
128
128
|
while job.state.notstarted():
|
|
@@ -217,7 +217,7 @@ def test_progress_nested():
|
|
|
217
217
|
listener = ProgressListener()
|
|
218
218
|
xp.scheduler.addlistener(listener)
|
|
219
219
|
|
|
220
|
-
out = NestedProgressingTask().submit()
|
|
220
|
+
out = NestedProgressingTask.C().submit()
|
|
221
221
|
job = out.__xpm__.job
|
|
222
222
|
path = out.path # type: Path
|
|
223
223
|
|
|
@@ -36,7 +36,7 @@ class Trainer(Task):
|
|
|
36
36
|
|
|
37
37
|
def task_outputs(self, dep):
|
|
38
38
|
model = copyconfig(self.model)
|
|
39
|
-
return model.add_pretasks(dep(LoadModel(value=model)))
|
|
39
|
+
return model.add_pretasks(dep(LoadModel.C(value=model)))
|
|
40
40
|
|
|
41
41
|
def execute(self):
|
|
42
42
|
assert not self.model.initialized, "Model not initialized"
|
|
@@ -56,14 +56,14 @@ class Evaluate(Task):
|
|
|
56
56
|
|
|
57
57
|
def test_serializers_xp():
|
|
58
58
|
with TemporaryExperiment("serializers", maxwait=20, port=0):
|
|
59
|
-
model = Model(submodel=SubModel())
|
|
60
|
-
trained_model: Model = Trainer(model=model).submit()
|
|
59
|
+
model = Model.C(submodel=SubModel.C())
|
|
60
|
+
trained_model: Model = Trainer.C(model=model).submit()
|
|
61
61
|
|
|
62
62
|
# Use the model itself
|
|
63
|
-
Evaluate(model=trained_model).submit()
|
|
63
|
+
Evaluate.C(model=trained_model).submit()
|
|
64
64
|
|
|
65
65
|
# Use a submodel
|
|
66
|
-
Evaluate(model=trained_model.submodel, is_submodel=True).add_pretasks_from(
|
|
66
|
+
Evaluate.C(model=trained_model.submodel, is_submodel=True).add_pretasks_from(
|
|
67
67
|
trained_model
|
|
68
68
|
).submit()
|
|
69
69
|
|
experimaestro/tests/test_tags.py
CHANGED
|
@@ -21,20 +21,20 @@ class Config2(Config):
|
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
def test_tag():
|
|
24
|
-
c = Config1(x=5)
|
|
24
|
+
c = Config1.C(x=5)
|
|
25
25
|
c.tag("x", 5)
|
|
26
26
|
assert c.tags() == {"x": 5}
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
def test_taggedvalue():
|
|
30
|
-
c = Config1(x=tag(5))
|
|
30
|
+
c = Config1.C(x=tag(5))
|
|
31
31
|
assert c.tags() == {"x": 5}
|
|
32
32
|
|
|
33
33
|
|
|
34
34
|
def test_tagcontain():
|
|
35
35
|
"""Test that tags are not propagated to the upper configurations"""
|
|
36
|
-
c1 = Config1(x=5)
|
|
37
|
-
c2 = Config2(c=c1, x=tag(3)).tag("out", 1)
|
|
36
|
+
c1 = Config1.C(x=5)
|
|
37
|
+
c2 = Config2.C(c=c1, x=tag(3)).tag("out", 1)
|
|
38
38
|
assert c1.tags() == {}
|
|
39
39
|
assert c2.tags() == {"x": 3, "out": 1}
|
|
40
40
|
|
|
@@ -50,17 +50,17 @@ def test_inneroutput():
|
|
|
50
50
|
class Evaluate(Task):
|
|
51
51
|
task: Param[MyTask]
|
|
52
52
|
|
|
53
|
-
output = Output().tag("hello", "world")
|
|
54
|
-
task = MyTask(outputs={}, mainoutput=output)
|
|
53
|
+
output = Output.C().tag("hello", "world")
|
|
54
|
+
task = MyTask.C(outputs={}, mainoutput=output)
|
|
55
55
|
task.submit(run_mode=RunMode.DRY_RUN)
|
|
56
56
|
assert output.tags() == {"hello": "world"}
|
|
57
57
|
|
|
58
|
-
output = Output().tag("hello", "world")
|
|
59
|
-
task = MyTask(outputs={"a": output}, mainoutput=Output())
|
|
58
|
+
output = Output.C().tag("hello", "world")
|
|
59
|
+
task = MyTask.C(outputs={"a": output}, mainoutput=Output.C())
|
|
60
60
|
task.submit(run_mode=RunMode.DRY_RUN)
|
|
61
61
|
assert output.tags() == {"hello": "world"}
|
|
62
62
|
|
|
63
|
-
evaluate = Evaluate(task=task).submit(run_mode=RunMode.DRY_RUN)
|
|
63
|
+
evaluate = Evaluate.C(task=task).submit(run_mode=RunMode.DRY_RUN)
|
|
64
64
|
assert evaluate.__xpm__.tags() == {"hello": "world"}
|
|
65
65
|
|
|
66
66
|
|
|
@@ -80,21 +80,21 @@ def test_tags_init_tasks():
|
|
|
80
80
|
x: Param[MyConfig]
|
|
81
81
|
|
|
82
82
|
def task_outputs(self, dep) -> MyConfig:
|
|
83
|
-
return dep(MyConfig())
|
|
83
|
+
return dep(MyConfig.C())
|
|
84
84
|
|
|
85
|
-
init_task = InitTask().tag("hello", "world")
|
|
86
|
-
task = MyTask()
|
|
85
|
+
init_task = InitTask.C().tag("hello", "world")
|
|
86
|
+
task = MyTask.C()
|
|
87
87
|
result = task.submit(run_mode=RunMode.DRY_RUN, init_tasks=[init_task])
|
|
88
88
|
assert result.tags() == {"hello": "world"}
|
|
89
89
|
|
|
90
|
-
other_task = TaskWithOutput(x=MyConfig().tag("hello", "world"))
|
|
90
|
+
other_task = TaskWithOutput.C(x=MyConfig.C().tag("hello", "world"))
|
|
91
91
|
assert other_task.tags() == {"hello": "world"}
|
|
92
92
|
|
|
93
93
|
result = other_task.submit(run_mode=RunMode.DRY_RUN)
|
|
94
94
|
assert isinstance(result, MyConfig)
|
|
95
95
|
assert result.tags() == {"hello": "world"}
|
|
96
96
|
|
|
97
|
-
result = MyTask().submit(run_mode=RunMode.DRY_RUN, init_tasks=[result])
|
|
97
|
+
result = MyTask.C().submit(run_mode=RunMode.DRY_RUN, init_tasks=[result])
|
|
98
98
|
assert result.tags() == {"hello": "world"}
|
|
99
99
|
|
|
100
100
|
|
|
@@ -115,6 +115,6 @@ def test_objects_tags():
|
|
|
115
115
|
x: Param[int]
|
|
116
116
|
|
|
117
117
|
context = DirectoryContext(Path("/__fakepath__"))
|
|
118
|
-
a = A(x=tag(1))
|
|
118
|
+
a = A.C(x=tag(1))
|
|
119
119
|
a.__xpm__.seal(context)
|
|
120
120
|
assert a.__xpm__.tags() == {"x": 1}
|
|
@@ -29,8 +29,8 @@ from .definitions_types import IntegerTask, FloatTask
|
|
|
29
29
|
|
|
30
30
|
def test_task_types():
|
|
31
31
|
with TemporaryExperiment("simple"):
|
|
32
|
-
assert IntegerTask(value=5).submit().__xpm__.job.wait() == JobState.DONE
|
|
33
|
-
assert FloatTask(value=5.1).submit().__xpm__.job.wait() == JobState.DONE
|
|
32
|
+
assert IntegerTask.C(value=5).submit().__xpm__.job.wait() == JobState.DONE
|
|
33
|
+
assert FloatTask.C(value=5.1).submit().__xpm__.job.wait() == JobState.DONE
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
def test_simple_task():
|
|
@@ -38,11 +38,11 @@ def test_simple_task():
|
|
|
38
38
|
assert isinstance(workdir, Path)
|
|
39
39
|
with TemporaryExperiment("helloworld", workdir=workdir, maxwait=20):
|
|
40
40
|
# Submit the tasks
|
|
41
|
-
hello = Say(word="hello").submit()
|
|
42
|
-
world = Say(word="world").submit()
|
|
41
|
+
hello = Say.C(word="hello").submit()
|
|
42
|
+
world = Say.C(word="world").submit()
|
|
43
43
|
|
|
44
44
|
# Concat will depend on the two first tasks
|
|
45
|
-
concat = Concat(strings=[hello, world]).submit()
|
|
45
|
+
concat = Concat.C(strings=[hello, world]).submit()
|
|
46
46
|
|
|
47
47
|
assert concat.__xpm__.job.state == JobState.DONE
|
|
48
48
|
assert Path(concat.stdout()).read_text() == "HELLO WORLD\n"
|
|
@@ -51,16 +51,16 @@ def test_simple_task():
|
|
|
51
51
|
def test_not_submitted():
|
|
52
52
|
"""A not submitted task should not be accepted as an argument"""
|
|
53
53
|
with TemporaryExperiment("helloworld", maxwait=2):
|
|
54
|
-
hello = Say(word="hello")
|
|
54
|
+
hello = Say.C(word="hello")
|
|
55
55
|
with pytest.raises(ValueError):
|
|
56
|
-
Concat(strings=[hello])
|
|
56
|
+
Concat.C(strings=[hello])
|
|
57
57
|
|
|
58
58
|
|
|
59
59
|
def test_fail_simple():
|
|
60
60
|
"""Failing task... should fail"""
|
|
61
61
|
with pytest.raises(FailedExperiment):
|
|
62
62
|
with TemporaryExperiment("failing", maxwait=20):
|
|
63
|
-
fail = Fail().submit()
|
|
63
|
+
fail = Fail.C().submit()
|
|
64
64
|
fail.touch()
|
|
65
65
|
|
|
66
66
|
|
|
@@ -70,8 +70,8 @@ def test_foreign_type():
|
|
|
70
70
|
# Submit the tasks
|
|
71
71
|
from .tasks.foreign import ForeignClassB2
|
|
72
72
|
|
|
73
|
-
b = ForeignClassB2(x=1, y=2)
|
|
74
|
-
a = ForeignTaskA(b=b).submit()
|
|
73
|
+
b = ForeignClassB2.C(x=1, y=2)
|
|
74
|
+
a = ForeignTaskA.C(b=b).submit()
|
|
75
75
|
|
|
76
76
|
assert a.__xpm__.job.wait() == JobState.DONE
|
|
77
77
|
assert a.stdout().read_text().strip() == "1"
|
|
@@ -81,8 +81,8 @@ def test_fail_dep():
|
|
|
81
81
|
"""Failing task... should cancel dependent"""
|
|
82
82
|
with pytest.raises(FailedExperiment):
|
|
83
83
|
with TemporaryExperiment("failingdep"):
|
|
84
|
-
fail = Fail().submit()
|
|
85
|
-
dep = FailConsumer(fail=fail).submit()
|
|
84
|
+
fail = Fail.C().submit()
|
|
85
|
+
dep = FailConsumer.C(fail=fail).submit()
|
|
86
86
|
fail.touch()
|
|
87
87
|
|
|
88
88
|
assert fail.__xpm__.job.state == JobState.ERROR
|
|
@@ -92,14 +92,14 @@ def test_fail_dep():
|
|
|
92
92
|
def test_unknown_attribute():
|
|
93
93
|
"""No check when setting attributes while executing"""
|
|
94
94
|
with TemporaryExperiment("unknown"):
|
|
95
|
-
method = SetUnknown().submit()
|
|
95
|
+
method = SetUnknown.C().submit()
|
|
96
96
|
|
|
97
97
|
assert method.__xpm__.job.wait() == JobState.DONE
|
|
98
98
|
|
|
99
99
|
|
|
100
100
|
def test_function():
|
|
101
101
|
with TemporaryExperiment("function"):
|
|
102
|
-
method = Method(a=1).submit()
|
|
102
|
+
method = Method.C(a=1).submit()
|
|
103
103
|
|
|
104
104
|
assert method.__xpm__.job.wait() == JobState.DONE
|
|
105
105
|
|
|
@@ -111,7 +111,7 @@ def test_done():
|
|
|
111
111
|
|
|
112
112
|
|
|
113
113
|
def restart_function(xp):
|
|
114
|
-
restart.Restart().submit()
|
|
114
|
+
restart.Restart.C().submit()
|
|
115
115
|
|
|
116
116
|
|
|
117
117
|
@pytest.mark.parametrize("terminate", restart.TERMINATES_FUNC)
|
|
@@ -123,8 +123,8 @@ def test_restart(terminate):
|
|
|
123
123
|
def test_submitted_twice():
|
|
124
124
|
"""Check that a job cannot be submitted twice within the same experiment"""
|
|
125
125
|
with TemporaryExperiment("duplicate", maxwait=20):
|
|
126
|
-
task1 = SimpleTask(x=1).submit()
|
|
127
|
-
task2 = SimpleTask(x=1).submit()
|
|
126
|
+
task1 = SimpleTask.C(x=1).submit()
|
|
127
|
+
task2 = SimpleTask.C(x=1).submit()
|
|
128
128
|
assert task1 is task2, f"{id(task1)} != {id(task2)}"
|
|
129
129
|
|
|
130
130
|
|
|
@@ -132,7 +132,7 @@ def test_configcache():
|
|
|
132
132
|
"""Test a configuration cache"""
|
|
133
133
|
|
|
134
134
|
with TemporaryExperiment("configcache", maxwait=20):
|
|
135
|
-
task = CacheConfigTask(data=CacheConfig()).submit()
|
|
135
|
+
task = CacheConfigTask.C(data=CacheConfig.C()).submit()
|
|
136
136
|
|
|
137
137
|
assert task.__xpm__.job.wait() == JobState.DONE
|
|
138
138
|
|
|
@@ -174,9 +174,13 @@ def test_tasks_deprecated_inner():
|
|
|
174
174
|
the new class"""
|
|
175
175
|
with TemporaryExperiment("deprecated", maxwait=0) as xp:
|
|
176
176
|
# --- Check that paths are really different first
|
|
177
|
-
task_new = TaskWithDeprecated(p=NewConfig()).submit(
|
|
178
|
-
|
|
179
|
-
|
|
177
|
+
task_new = TaskWithDeprecated.C(p=NewConfig.C()).submit(
|
|
178
|
+
run_mode=RunMode.DRY_RUN
|
|
179
|
+
)
|
|
180
|
+
task_old = TaskWithDeprecated.C(p=OldConfig.C()).submit(
|
|
181
|
+
run_mode=RunMode.DRY_RUN
|
|
182
|
+
)
|
|
183
|
+
task_deprecated = TaskWithDeprecated.C(p=DeprecatedConfig.C()).submit(
|
|
180
184
|
run_mode=RunMode.DRY_RUN
|
|
181
185
|
)
|
|
182
186
|
|
|
@@ -197,7 +201,7 @@ def test_tasks_deprecated_inner():
|
|
|
197
201
|
# --- Now check that automatic linking is performed
|
|
198
202
|
|
|
199
203
|
# Run old task with deprecated configuration
|
|
200
|
-
task_old = TaskWithDeprecated(p=OldConfig()).submit()
|
|
204
|
+
task_old = TaskWithDeprecated.C(p=OldConfig.C()).submit()
|
|
201
205
|
task_old.wait()
|
|
202
206
|
task_old_path = task_old.stdout().parent
|
|
203
207
|
|
|
@@ -229,9 +233,9 @@ def test_tasks_deprecated():
|
|
|
229
233
|
the new class"""
|
|
230
234
|
with TemporaryExperiment("deprecated", maxwait=20) as xp:
|
|
231
235
|
# Check that paths are really different first
|
|
232
|
-
task_new = NewTask(x=1).submit(run_mode=RunMode.DRY_RUN)
|
|
233
|
-
task_old = OldTask(x=1).submit(run_mode=RunMode.DRY_RUN)
|
|
234
|
-
task_deprecated = DeprecatedTask(x=1).submit(run_mode=RunMode.DRY_RUN)
|
|
236
|
+
task_new = NewTask.C(x=1).submit(run_mode=RunMode.DRY_RUN)
|
|
237
|
+
task_old = OldTask.C(x=1).submit(run_mode=RunMode.DRY_RUN)
|
|
238
|
+
task_deprecated = DeprecatedTask.C(x=1).submit(run_mode=RunMode.DRY_RUN)
|
|
235
239
|
|
|
236
240
|
assert (
|
|
237
241
|
task_new.stdout() != task_old.stdout()
|
|
@@ -241,7 +245,7 @@ def test_tasks_deprecated():
|
|
|
241
245
|
), "Deprecated path should be the same as non deprecated"
|
|
242
246
|
|
|
243
247
|
# OK, now check that automatic linking is performed
|
|
244
|
-
task_old = OldTask(x=1).submit()
|
|
248
|
+
task_old = OldTask.C(x=1).submit()
|
|
245
249
|
task_old.wait()
|
|
246
250
|
task_old_path = task_old.stdout().parent
|
|
247
251
|
|
|
@@ -270,7 +274,7 @@ class HookedTask(Task):
|
|
|
270
274
|
|
|
271
275
|
|
|
272
276
|
def test_task_submit_hook():
|
|
273
|
-
result = HookedTask().submit(run_mode=RunMode.DRY_RUN)
|
|
277
|
+
result = HookedTask.C().submit(run_mode=RunMode.DRY_RUN)
|
|
274
278
|
assert (
|
|
275
279
|
result.__xpm__.task.__xpm__.job.environ.get("JAVA_HOME", None)
|
|
276
280
|
== "THE_JAVA_HOME"
|
|
@@ -301,17 +305,17 @@ class MyLightweightTask(Task):
|
|
|
301
305
|
|
|
302
306
|
def test_task_lightweight():
|
|
303
307
|
with TemporaryExperiment("lightweight", maxwait=20):
|
|
304
|
-
x = LightweightConfig()
|
|
305
|
-
lwtask = LightweightTask(x=x)
|
|
308
|
+
x = LightweightConfig.C()
|
|
309
|
+
lwtask = LightweightTask.C(x=x)
|
|
306
310
|
assert (
|
|
307
|
-
MyLightweightTask(x=x).add_pretasks(lwtask).submit().__xpm__.job.wait()
|
|
311
|
+
MyLightweightTask.C(x=x).add_pretasks(lwtask).submit().__xpm__.job.wait()
|
|
308
312
|
== JobState.DONE
|
|
309
313
|
), "Pre-tasks should be executed"
|
|
310
314
|
|
|
311
|
-
x_2 = LightweightConfig()
|
|
312
|
-
lwtask_2 = LightweightTask(x=x)
|
|
315
|
+
x_2 = LightweightConfig.C()
|
|
316
|
+
lwtask_2 = LightweightTask.C(x=x)
|
|
313
317
|
assert (
|
|
314
|
-
MyLightweightTask(x=x_2.add_pretasks(lwtask_2))
|
|
318
|
+
MyLightweightTask.C(x=x_2.add_pretasks(lwtask_2))
|
|
315
319
|
.add_pretasks(lwtask_2)
|
|
316
320
|
.submit()
|
|
317
321
|
.__xpm__.job.wait()
|
|
@@ -321,9 +325,9 @@ def test_task_lightweight():
|
|
|
321
325
|
|
|
322
326
|
def test_task_lightweight_init():
|
|
323
327
|
with TemporaryExperiment("lightweight_init", maxwait=20):
|
|
324
|
-
x = LightweightConfig()
|
|
325
|
-
lwtask = LightweightTask(x=x)
|
|
328
|
+
x = LightweightConfig.C()
|
|
329
|
+
lwtask = LightweightTask.C(x=x)
|
|
326
330
|
assert (
|
|
327
|
-
MyLightweightTask(x=x).submit(init_tasks=[lwtask]).__xpm__.job.wait()
|
|
331
|
+
MyLightweightTask.C(x=x).submit(init_tasks=[lwtask]).__xpm__.job.wait()
|
|
328
332
|
== JobState.DONE
|
|
329
333
|
), "Init tasks should be executed"
|
|
@@ -32,7 +32,7 @@ def token_experiment(xp, token, ntasks=3):
|
|
|
32
32
|
|
|
33
33
|
tasks = []
|
|
34
34
|
for it in range(ntasks):
|
|
35
|
-
task = TokenTask(path=path, x=it)
|
|
35
|
+
task = TokenTask.C(path=path, x=it)
|
|
36
36
|
if token:
|
|
37
37
|
task.add_dependencies(token.dependency(1))
|
|
38
38
|
tasks.append(task.submit())
|
|
@@ -86,7 +86,7 @@ def test_token_cleanup():
|
|
|
86
86
|
with TemporaryExperiment("token_cleanup", maxwait=20) as xp:
|
|
87
87
|
token = CounterToken("token-cleanup", xp.workdir / "token-cleanup", 1)
|
|
88
88
|
|
|
89
|
-
task = dummy_task(x=1)
|
|
89
|
+
task = dummy_task.C(x=1)
|
|
90
90
|
dependency = token.dependency(1)
|
|
91
91
|
task.add_dependencies(dependency)
|
|
92
92
|
# Just to create the directory
|
|
@@ -98,7 +98,7 @@ def test_token_cleanup():
|
|
|
98
98
|
# The absence of process should be detected right away
|
|
99
99
|
logging.info("Lock without process")
|
|
100
100
|
TokenFile.create(dependency)
|
|
101
|
-
task2 = dummy_task(x=2)
|
|
101
|
+
task2 = dummy_task.C(x=2)
|
|
102
102
|
task2.add_dependencies(token.dependency(1)).submit()
|
|
103
103
|
xp.wait()
|
|
104
104
|
|
|
@@ -118,7 +118,7 @@ def test_token_cleanup():
|
|
|
118
118
|
p1 = subprocess.Popen(command)
|
|
119
119
|
job.pidpath.write_text(json.dumps({"pid": p1.pid, "type": "local"}))
|
|
120
120
|
|
|
121
|
-
task3 = dummy_task(x=3)
|
|
121
|
+
task3 = dummy_task.C(x=3)
|
|
122
122
|
task3.add_dependencies(token.dependency(1)).submit()
|
|
123
123
|
|
|
124
124
|
# Ends the script "waitforfile.py"
|
|
@@ -136,7 +136,9 @@ def test_token_monitor():
|
|
|
136
136
|
|
|
137
137
|
def run(xp, x, path):
|
|
138
138
|
token = xp.workspace.connector.createtoken("test-token-monitor", 1)
|
|
139
|
-
task =
|
|
139
|
+
task = (
|
|
140
|
+
TokenTask.C(path=path, x=x).add_dependencies(token.dependency(1)).submit()
|
|
141
|
+
)
|
|
140
142
|
return task
|
|
141
143
|
|
|
142
144
|
with TemporaryExperiment("tokens1", maxwait=20, port=0) as xp1, TemporaryExperiment(
|
|
@@ -241,7 +243,7 @@ def test_token_process():
|
|
|
241
243
|
|
|
242
244
|
def restart_function(xp):
|
|
243
245
|
token = CounterToken("restart-token", xp.workdir / "token", 1)
|
|
244
|
-
token(1, restart.Restart()).submit()
|
|
246
|
+
token(1, restart.Restart.C()).submit()
|
|
245
247
|
|
|
246
248
|
|
|
247
249
|
@pytest.mark.parametrize("terminate", restart.TERMINATES_FUNC)
|
|
@@ -26,17 +26,17 @@ def test_multiple_inheritance():
|
|
|
26
26
|
|
|
27
27
|
for C in (C1, C2):
|
|
28
28
|
logging.info("Testing %s", C)
|
|
29
|
-
ctype = C.
|
|
29
|
+
ctype = C.__getxpmtype__()
|
|
30
30
|
assert issubclass(C, A)
|
|
31
31
|
assert issubclass(C, B)
|
|
32
32
|
assert issubclass(C, B1)
|
|
33
33
|
|
|
34
|
-
assert ctype.
|
|
34
|
+
assert ctype.value_type == C.__getxpmtype__().value_type
|
|
35
35
|
|
|
36
|
-
assert issubclass(C.
|
|
37
|
-
assert issubclass(C.
|
|
38
|
-
assert issubclass(C.
|
|
39
|
-
assert not issubclass(C.
|
|
36
|
+
assert issubclass(C.__getxpmtype__().value_type, B1.__getxpmtype__().value_type)
|
|
37
|
+
assert issubclass(C.__getxpmtype__().value_type, B.__getxpmtype__().value_type)
|
|
38
|
+
assert issubclass(C.__getxpmtype__().value_type, A.__getxpmtype__().value_type)
|
|
39
|
+
assert not issubclass(C.__getxpmtype__().value_type, ConfigMixin)
|
|
40
40
|
|
|
41
41
|
|
|
42
42
|
def test_missing_hierarchy():
|
|
@@ -49,7 +49,7 @@ def test_missing_hierarchy():
|
|
|
49
49
|
class B(A1):
|
|
50
50
|
pass
|
|
51
51
|
|
|
52
|
-
B.
|
|
52
|
+
B.__getxpmtype__()
|
|
53
53
|
|
|
54
54
|
assert issubclass(B, A)
|
|
55
55
|
assert issubclass(B, A1)
|
|
@@ -59,7 +59,7 @@ def test_types_union():
|
|
|
59
59
|
class A(Config):
|
|
60
60
|
x: Param[Union[int, str]]
|
|
61
61
|
|
|
62
|
-
A(x=1)
|
|
63
|
-
A(x="hello")
|
|
62
|
+
A.C(x=1)
|
|
63
|
+
A.C(x="hello")
|
|
64
64
|
with pytest.raises(ValueError):
|
|
65
|
-
A(x=[])
|
|
65
|
+
A.C(x=[])
|
|
@@ -36,22 +36,22 @@ class C(Config):
|
|
|
36
36
|
|
|
37
37
|
|
|
38
38
|
def test_validation_simple():
|
|
39
|
-
expect_validate(A(value=1))
|
|
39
|
+
expect_validate(A.C(value=1))
|
|
40
40
|
|
|
41
41
|
|
|
42
42
|
def test_validation_missing():
|
|
43
|
-
expect_notvalidate(A())
|
|
43
|
+
expect_notvalidate(A.C())
|
|
44
44
|
|
|
45
45
|
|
|
46
46
|
def test_validation_simple_nested():
|
|
47
|
-
b = B()
|
|
48
|
-
b.a = A(value=1)
|
|
47
|
+
b = B.C()
|
|
48
|
+
b.a = A.C(value=1)
|
|
49
49
|
expect_validate(b)
|
|
50
50
|
|
|
51
51
|
|
|
52
52
|
def test_validation_missing_nested():
|
|
53
|
-
b = B()
|
|
54
|
-
b.a = A()
|
|
53
|
+
b = B.C()
|
|
54
|
+
b.a = A.C()
|
|
55
55
|
expect_notvalidate(b)
|
|
56
56
|
|
|
57
57
|
|
|
@@ -68,11 +68,11 @@ def test_validation_type():
|
|
|
68
68
|
__xpmid__ = valns.type.c
|
|
69
69
|
|
|
70
70
|
with pytest.raises(ValueError):
|
|
71
|
-
C(a=B())
|
|
71
|
+
C.C(a=B.C())
|
|
72
72
|
|
|
73
73
|
with pytest.raises(ValueError):
|
|
74
|
-
c = C()
|
|
75
|
-
c.a = B()
|
|
74
|
+
c = C.C()
|
|
75
|
+
c.a = B.C()
|
|
76
76
|
|
|
77
77
|
|
|
78
78
|
def test_validation_subtype():
|
|
@@ -86,7 +86,7 @@ def test_validation_subtype():
|
|
|
86
86
|
__xpmid__ = valns.subtype.b
|
|
87
87
|
a: Param[A]
|
|
88
88
|
|
|
89
|
-
expect_validate(B(a=A1()))
|
|
89
|
+
expect_validate(B.C(a=A1.C()))
|
|
90
90
|
|
|
91
91
|
|
|
92
92
|
def test_validation_path_generator():
|
|
@@ -96,7 +96,7 @@ def test_validation_path_generator():
|
|
|
96
96
|
__xpmid__ = valns.path.a
|
|
97
97
|
value: Meta[Path] = field(default_factory=PathGenerator("file.txt"))
|
|
98
98
|
|
|
99
|
-
a = A()
|
|
99
|
+
a = A.C()
|
|
100
100
|
a.__xpm__.validate()
|
|
101
101
|
with TemporaryExperiment("constant") as xp:
|
|
102
102
|
jobcontext = Job(a)
|
|
@@ -116,7 +116,7 @@ def test_validation_constant():
|
|
|
116
116
|
__xpmid__ = valns.constant.a
|
|
117
117
|
value: Constant[int] = 1
|
|
118
118
|
|
|
119
|
-
a = A()
|
|
119
|
+
a = A.C()
|
|
120
120
|
a.__xpm__.validate()
|
|
121
121
|
with TemporaryExperiment("constant"):
|
|
122
122
|
joba = Job(a)
|
|
@@ -133,7 +133,7 @@ class Child(Parent):
|
|
|
133
133
|
|
|
134
134
|
|
|
135
135
|
def test_validation_child():
|
|
136
|
-
expect_validate(Child(x=1))
|
|
136
|
+
expect_validate(Child.C(x=1))
|
|
137
137
|
|
|
138
138
|
|
|
139
139
|
# --- Path argument checks
|
|
@@ -144,7 +144,7 @@ class PathParent(Config):
|
|
|
144
144
|
|
|
145
145
|
|
|
146
146
|
def test_validation_path_option():
|
|
147
|
-
c = PathParent()
|
|
147
|
+
c = PathParent.C()
|
|
148
148
|
expect_validate(c)
|
|
149
149
|
|
|
150
150
|
|
|
@@ -157,7 +157,7 @@ def test_validation_seal():
|
|
|
157
157
|
class A(Config):
|
|
158
158
|
a: Param[int]
|
|
159
159
|
|
|
160
|
-
a = A(a=2)
|
|
160
|
+
a = A.C(a=2)
|
|
161
161
|
a.__xpm__.seal(EmptyContext())
|
|
162
162
|
|
|
163
163
|
with pytest.raises(AttributeError):
|
|
@@ -174,10 +174,10 @@ def test_validation_validation_enum():
|
|
|
174
174
|
class EnumConfig(Config):
|
|
175
175
|
a: Param[EnumParam]
|
|
176
176
|
|
|
177
|
-
expect_validate(EnumConfig(a=EnumParam.FIRST))
|
|
177
|
+
expect_validate(EnumConfig.C(a=EnumParam.FIRST))
|
|
178
178
|
|
|
179
179
|
try:
|
|
180
|
-
EnumConfig(a=1)
|
|
180
|
+
EnumConfig.C(a=1)
|
|
181
181
|
assert False, "Enum value should be rejected"
|
|
182
182
|
except AssertionError:
|
|
183
183
|
pass
|
|
@@ -199,7 +199,7 @@ class TaskConfigConsumer(Config):
|
|
|
199
199
|
|
|
200
200
|
|
|
201
201
|
def test_validation_taskargument():
|
|
202
|
-
x = taskconfig()
|
|
202
|
+
x = taskconfig.C()
|
|
203
203
|
with TemporaryExperiment("fake"):
|
|
204
204
|
x.submit(run_mode=RunMode.DRY_RUN)
|
|
205
|
-
expect_validate(TaskConfigConsumer(x=x))
|
|
205
|
+
expect_validate(TaskConfigConsumer.C(x=x))
|
|
@@ -31,7 +31,7 @@ if __name__ == "__main__":
|
|
|
31
31
|
logging.info("Reschedule with token [%s]: starting task in %s", x, workdir)
|
|
32
32
|
token = xp.workspace.connector.createtoken("test-token-reschedule", 1)
|
|
33
33
|
task = (
|
|
34
|
-
TokenTask(path=lockingpath, x=int(x))
|
|
34
|
+
TokenTask.C(path=lockingpath, x=int(x))
|
|
35
35
|
.add_dependencies(token.dependency(1))
|
|
36
36
|
.submit()
|
|
37
37
|
)
|