experimaestro 1.11.1__py3-none-any.whl → 2.0.0a8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of experimaestro might be problematic. Click here for more details.

Files changed (52) hide show
  1. experimaestro/annotations.py +1 -1
  2. experimaestro/cli/__init__.py +10 -11
  3. experimaestro/cli/progress.py +269 -0
  4. experimaestro/connectors/__init__.py +2 -2
  5. experimaestro/core/arguments.py +20 -1
  6. experimaestro/core/identifier.py +21 -7
  7. experimaestro/core/objects/config.py +174 -274
  8. experimaestro/core/objects/config_walk.py +4 -6
  9. experimaestro/core/objects.pyi +2 -6
  10. experimaestro/core/serializers.py +1 -8
  11. experimaestro/core/types.py +35 -57
  12. experimaestro/launcherfinder/registry.py +3 -3
  13. experimaestro/mkdocs/base.py +6 -8
  14. experimaestro/notifications.py +12 -3
  15. experimaestro/progress.py +406 -0
  16. experimaestro/scheduler/__init__.py +18 -1
  17. experimaestro/scheduler/base.py +87 -906
  18. experimaestro/scheduler/experiment.py +387 -0
  19. experimaestro/scheduler/jobs.py +475 -0
  20. experimaestro/scheduler/signal_handler.py +32 -0
  21. experimaestro/scheduler/state.py +1 -1
  22. experimaestro/server/__init__.py +36 -5
  23. experimaestro/settings.py +4 -2
  24. experimaestro/tests/launchers/common.py +2 -2
  25. experimaestro/tests/restart.py +1 -1
  26. experimaestro/tests/tasks/all.py +7 -0
  27. experimaestro/tests/test_checkers.py +2 -2
  28. experimaestro/tests/test_dependencies.py +11 -17
  29. experimaestro/tests/test_experiment.py +3 -3
  30. experimaestro/tests/test_file_progress.py +425 -0
  31. experimaestro/tests/test_file_progress_integration.py +477 -0
  32. experimaestro/tests/test_generators.py +93 -0
  33. experimaestro/tests/test_identifier.py +155 -135
  34. experimaestro/tests/test_instance.py +13 -18
  35. experimaestro/tests/test_objects.py +9 -32
  36. experimaestro/tests/test_outputs.py +6 -6
  37. experimaestro/tests/test_param.py +14 -14
  38. experimaestro/tests/test_progress.py +4 -4
  39. experimaestro/tests/test_serializers.py +0 -59
  40. experimaestro/tests/test_tags.py +15 -15
  41. experimaestro/tests/test_tasks.py +42 -51
  42. experimaestro/tests/test_tokens.py +8 -6
  43. experimaestro/tests/test_types.py +10 -10
  44. experimaestro/tests/test_validation.py +19 -19
  45. experimaestro/tests/token_reschedule.py +1 -1
  46. experimaestro/tools/diff.py +8 -1
  47. experimaestro/typingutils.py +11 -2
  48. {experimaestro-1.11.1.dist-info → experimaestro-2.0.0a8.dist-info}/METADATA +3 -2
  49. {experimaestro-1.11.1.dist-info → experimaestro-2.0.0a8.dist-info}/RECORD +52 -44
  50. {experimaestro-1.11.1.dist-info → experimaestro-2.0.0a8.dist-info}/WHEEL +1 -1
  51. {experimaestro-1.11.1.dist-info → experimaestro-2.0.0a8.dist-info}/entry_points.txt +0 -0
  52. {experimaestro-1.11.1.dist-info → experimaestro-2.0.0a8.dist-info/licenses}/LICENSE +0 -0
@@ -1,11 +1,9 @@
1
- import logging
2
1
  from pathlib import Path
3
2
 
4
3
  import pytest
5
4
  from experimaestro import Config, Task, Annotated, copyconfig, default
6
5
  from experimaestro.core.arguments import Param
7
6
  from experimaestro.core.objects import ConfigMixin
8
- from experimaestro.core.types import XPMValue
9
7
  from experimaestro.generators import pathgenerator
10
8
  from experimaestro.scheduler.workspace import RunMode
11
9
  from experimaestro.tests.utils import TemporaryExperiment
@@ -28,7 +26,7 @@ def test_object_default():
28
26
 
29
27
 
30
28
  class B(Config):
31
- a: Param[A] = A(x=3)
29
+ a: Param[A] = A.C(x=3)
32
30
 
33
31
 
34
32
  class C(B):
@@ -40,27 +38,26 @@ class D(B, A):
40
38
 
41
39
 
42
40
  class DefaultAnnotationConfig(Config):
43
- a: Annotated[A, default(A(x=3))]
41
+ a: Annotated[A, default(A.C(x=3))]
44
42
 
45
43
 
46
44
  def test_object_config_default():
47
45
  """Test default configurations as default values"""
48
- b = B()
46
+ b = B.C()
49
47
  assert b.a.x == 3
50
48
 
51
- c = C()
49
+ c = C.C()
52
50
  assert c.a.x == 3
53
51
 
54
- annotationConfig = DefaultAnnotationConfig()
52
+ annotationConfig = DefaultAnnotationConfig.C()
55
53
  assert annotationConfig.a.x == 3
56
54
 
57
55
 
58
56
  def test_hierarchy():
59
57
  """Test if the object hierarchy is OK"""
60
- OA = A.__getxpmtype__().objecttype
61
- OB = B.__getxpmtype__().objecttype
62
- OC = C.__getxpmtype__().objecttype
63
- OD = D.__getxpmtype__().objecttype
58
+ OA = A.__getxpmtype__().value_type
59
+ OB = B.__getxpmtype__().value_type
60
+ OC = C.__getxpmtype__().value_type
64
61
 
65
62
  assert issubclass(A, Config)
66
63
  assert issubclass(B, Config)
@@ -72,11 +69,6 @@ def test_hierarchy():
72
69
 
73
70
  assert issubclass(C, B)
74
71
 
75
- assert OA.__bases__ == (A, XPMValue)
76
- assert OB.__bases__ == (B, XPMValue)
77
- assert OC.__bases__ == (C, B.XPMValue)
78
- assert OD.__bases__ == (D, B.XPMValue, A.XPMValue)
79
-
80
72
 
81
73
  class CopyConfig(Task):
82
74
  path: Annotated[Path, pathgenerator("hello.txt")]
@@ -84,7 +76,7 @@ class CopyConfig(Task):
84
76
 
85
77
 
86
78
  def test_copyconfig(xp):
87
- b = CopyConfig(x=2)
79
+ b = CopyConfig.C(x=2)
88
80
 
89
81
  b.submit()
90
82
 
@@ -92,18 +84,3 @@ def test_copyconfig(xp):
92
84
 
93
85
  assert copy_b.x == b.x
94
86
  assert "path" not in copy_b.__xpm__.values
95
-
96
-
97
- def test_direct_config_warns(caplog):
98
- """Test that using a building Config directly raises a warning"""
99
- message = "Config.__new__ is deprecated"
100
-
101
- with caplog.at_level(logging.WARNING):
102
- A(x=3)
103
- assert message in caplog.text
104
-
105
- caplog.clear()
106
-
107
- with caplog.at_level(logging.WARNING):
108
- A.C(x=3)
109
- assert message not in caplog.text
@@ -32,17 +32,17 @@ class MainB(Task):
32
32
 
33
33
 
34
34
  def test_output_taskoutput():
35
- a = A(b=B())
36
- output, ioutput = Main(a=a).submit(run_mode=RunMode.DRY_RUN)
35
+ a = A.C(b=B.C())
36
+ output, ioutput = Main.C(a=a).submit(run_mode=RunMode.DRY_RUN)
37
37
 
38
38
  # Direct
39
- Main(a=output)
39
+ Main.C(a=output)
40
40
 
41
41
  # Via getattr
42
- Main(a=A(b=output.b))
42
+ Main.C(a=A.C(b=output.b))
43
43
 
44
44
  # Via getitem
45
- Main(a=ioutput["a"])
45
+ Main.C(a=ioutput["a"])
46
46
 
47
47
  # Now, submits
48
- Main(a=output).submit(run_mode=RunMode.DRY_RUN)
48
+ Main.C(a=output).submit(run_mode=RunMode.DRY_RUN)
@@ -148,7 +148,7 @@ def test_generatedpath():
148
148
  b: Param[B]
149
149
 
150
150
  basepath = Path("/tmp/testconflict")
151
- c = C(b=B(a=A())).instance(DirectoryContext(basepath))
151
+ c = C.C(b=B.C(a=A.C())).instance(DirectoryContext(basepath))
152
152
  assert c.b.a.path.relative_to(basepath) == Path("out/b/a/test.txt")
153
153
 
154
154
 
@@ -158,13 +158,13 @@ def test_config_class():
158
158
  class A(Config):
159
159
  x: Param[int]
160
160
 
161
- a = A(x=1)
161
+ a = A.C(x=1)
162
162
  assert a.x == 1
163
163
 
164
164
  class B(A):
165
165
  y: Param[int]
166
166
 
167
- b = B(x=1, y=2)
167
+ b = B.C(x=1, y=2)
168
168
  assert b.x == 1
169
169
  assert b.y == 2
170
170
 
@@ -174,7 +174,7 @@ def test_config_class():
174
174
  class C(Config):
175
175
  d: Param[D]
176
176
 
177
- c = C(d=D(x=1))
177
+ c = C.C(d=D.C(x=1))
178
178
  assert c.d.x == 1
179
179
 
180
180
 
@@ -182,7 +182,7 @@ def test_constant():
182
182
  class A(Config):
183
183
  x: Constant[int] = 2
184
184
 
185
- a = A()
185
+ a = A.C()
186
186
  assert a.x == 2, "Constant value not set"
187
187
 
188
188
  # We should not be able to change the value
@@ -202,7 +202,7 @@ class EnumConfig(Config):
202
202
  def test_param_enum():
203
203
  """Test for enum values"""
204
204
 
205
- a = EnumConfig(x=EnumParam.OTHER)
205
+ a = EnumConfig.C(x=EnumParam.OTHER)
206
206
  _a = serializeCycle(a)
207
207
 
208
208
  assert isinstance(_a, EnumConfig)
@@ -216,7 +216,7 @@ def test_inheritance():
216
216
  class B(A):
217
217
  y: Param[int] = 3
218
218
 
219
- b = B()
219
+ b = B.C()
220
220
  b.x = 2
221
221
  assert b.__xpm__.values["y"] == 3
222
222
  assert b.__xpm__.values["x"] == 2
@@ -247,7 +247,7 @@ def test_param_dict():
247
247
  assert isinstance(xarg.keytype, StrType)
248
248
  assert isinstance(xarg.valuetype, IntType)
249
249
 
250
- A(x={"OK": 1})
250
+ A.C(x={"OK": 1})
251
251
 
252
252
  with pytest.raises(TypeError):
253
253
  A(x={"wrong": "string"})
@@ -263,7 +263,7 @@ class ConfigWithDefault(Config):
263
263
 
264
264
 
265
265
  def test_param_default():
266
- assert ConfigWithDefault().x == 1
266
+ assert ConfigWithDefault.C().x == 1
267
267
 
268
268
 
269
269
  class ConfigWithDefaultFactory(Config):
@@ -271,7 +271,7 @@ class ConfigWithDefaultFactory(Config):
271
271
 
272
272
 
273
273
  def test_param_default_factory():
274
- value = ConfigWithDefaultFactory()
274
+ value = ConfigWithDefaultFactory.C()
275
275
  context = DirectoryContext(Path("/__fakepath__"))
276
276
  value.__xpm__.seal(context)
277
277
  assert value.x == 1
@@ -299,15 +299,15 @@ def test_param_default_set():
299
299
  class A0(Config):
300
300
  x: Param[int] = 2
301
301
 
302
- assert A0().instance().x == 2
303
- assert A0(x=3).instance().x == 3
302
+ assert A0.C().instance().x == 2
303
+ assert A0.C(x=3).instance().x == 3
304
304
 
305
305
  class A(Config):
306
306
  x: Param[int] = field(default_factory=lambda: 2)
307
307
 
308
- assert A().instance().x == 2
308
+ assert A.C().instance().x == 2
309
309
 
310
- assert A(x=3).instance().x == 3
310
+ assert A.C(x=3).instance().x == 3
311
311
 
312
312
 
313
313
  # --- Handling help annotations
@@ -72,7 +72,7 @@ def test_progress_basic():
72
72
  listener = ProgressListener()
73
73
  xp.scheduler.addlistener(listener)
74
74
 
75
- out = ProgressingTask().submit()
75
+ out = ProgressingTask.C().submit()
76
76
  path = out.path # type: Path
77
77
  job = out.__xpm__.job
78
78
 
@@ -103,7 +103,7 @@ def test_progress_multiple():
103
103
  listener1 = ProgressListener()
104
104
  xp1.scheduler.addlistener(listener1)
105
105
 
106
- out = ProgressingTask().submit()
106
+ out = ProgressingTask.C().submit()
107
107
  path = out.path # type: Path
108
108
  job = out.__xpm__.job
109
109
 
@@ -122,7 +122,7 @@ def test_progress_multiple():
122
122
  listener2 = ProgressListener()
123
123
  xp2.scheduler.addlistener(listener2)
124
124
 
125
- out = ProgressingTask().submit()
125
+ out = ProgressingTask.C().submit()
126
126
  job = out.__xpm__.job # type: CommandLineJob
127
127
  logger.info("Waiting for job to start (2)")
128
128
  while job.state.notstarted():
@@ -217,7 +217,7 @@ def test_progress_nested():
217
217
  listener = ProgressListener()
218
218
  xp.scheduler.addlistener(listener)
219
219
 
220
- out = NestedProgressingTask().submit()
220
+ out = NestedProgressingTask.C().submit()
221
221
  job = out.__xpm__.job
222
222
  path = out.path # type: Path
223
223
 
@@ -1,71 +1,12 @@
1
1
  from typing import Optional
2
2
  from experimaestro import (
3
3
  Config,
4
- Task,
5
4
  Param,
6
- SerializationLWTask,
7
- copyconfig,
8
5
  state_dict,
9
6
  from_state_dict,
10
7
  )
11
8
  from experimaestro.core.context import SerializationContext
12
9
  from experimaestro.core.objects import ConfigMixin
13
- from experimaestro.tests.utils import TemporaryExperiment
14
-
15
-
16
- class SubModel(Config):
17
- pass
18
-
19
-
20
- class Model(Config):
21
- submodel: Param[SubModel]
22
-
23
- def __post_init__(self):
24
- self.initialized = False
25
- self.submodel.initialized = False
26
-
27
-
28
- class LoadModel(SerializationLWTask):
29
- def execute(self):
30
- self.value.initialized = True
31
- self.value.submodel.initialized = True
32
-
33
-
34
- class Trainer(Task):
35
- model: Param[Config]
36
-
37
- def task_outputs(self, dep):
38
- model = copyconfig(self.model)
39
- return model.add_pretasks(dep(LoadModel(value=model)))
40
-
41
- def execute(self):
42
- assert not self.model.initialized, "Model not initialized"
43
-
44
-
45
- class Evaluate(Task):
46
- model: Param[Config]
47
- is_submodel: Param[bool] = False
48
-
49
- def execute(self):
50
- assert self.model.initialized, "Model not initialized"
51
- if self.is_submodel:
52
- assert isinstance(self.model, SubModel)
53
- else:
54
- assert isinstance(self.model, Model)
55
-
56
-
57
- def test_serializers_xp():
58
- with TemporaryExperiment("serializers", maxwait=20, port=0):
59
- model = Model(submodel=SubModel())
60
- trained_model: Model = Trainer(model=model).submit()
61
-
62
- # Use the model itself
63
- Evaluate(model=trained_model).submit()
64
-
65
- # Use a submodel
66
- Evaluate(model=trained_model.submodel, is_submodel=True).add_pretasks_from(
67
- trained_model
68
- ).submit()
69
10
 
70
11
 
71
12
  class Object1(Config):
@@ -21,20 +21,20 @@ class Config2(Config):
21
21
 
22
22
 
23
23
  def test_tag():
24
- c = Config1(x=5)
24
+ c = Config1.C(x=5)
25
25
  c.tag("x", 5)
26
26
  assert c.tags() == {"x": 5}
27
27
 
28
28
 
29
29
  def test_taggedvalue():
30
- c = Config1(x=tag(5))
30
+ c = Config1.C(x=tag(5))
31
31
  assert c.tags() == {"x": 5}
32
32
 
33
33
 
34
34
  def test_tagcontain():
35
35
  """Test that tags are not propagated to the upper configurations"""
36
- c1 = Config1(x=5)
37
- c2 = Config2(c=c1, x=tag(3)).tag("out", 1)
36
+ c1 = Config1.C(x=5)
37
+ c2 = Config2.C(c=c1, x=tag(3)).tag("out", 1)
38
38
  assert c1.tags() == {}
39
39
  assert c2.tags() == {"x": 3, "out": 1}
40
40
 
@@ -50,17 +50,17 @@ def test_inneroutput():
50
50
  class Evaluate(Task):
51
51
  task: Param[MyTask]
52
52
 
53
- output = Output().tag("hello", "world")
54
- task = MyTask(outputs={}, mainoutput=output)
53
+ output = Output.C().tag("hello", "world")
54
+ task = MyTask.C(outputs={}, mainoutput=output)
55
55
  task.submit(run_mode=RunMode.DRY_RUN)
56
56
  assert output.tags() == {"hello": "world"}
57
57
 
58
- output = Output().tag("hello", "world")
59
- task = MyTask(outputs={"a": output}, mainoutput=Output())
58
+ output = Output.C().tag("hello", "world")
59
+ task = MyTask.C(outputs={"a": output}, mainoutput=Output.C())
60
60
  task.submit(run_mode=RunMode.DRY_RUN)
61
61
  assert output.tags() == {"hello": "world"}
62
62
 
63
- evaluate = Evaluate(task=task).submit(run_mode=RunMode.DRY_RUN)
63
+ evaluate = Evaluate.C(task=task).submit(run_mode=RunMode.DRY_RUN)
64
64
  assert evaluate.__xpm__.tags() == {"hello": "world"}
65
65
 
66
66
 
@@ -80,21 +80,21 @@ def test_tags_init_tasks():
80
80
  x: Param[MyConfig]
81
81
 
82
82
  def task_outputs(self, dep) -> MyConfig:
83
- return dep(MyConfig())
83
+ return dep(MyConfig.C())
84
84
 
85
- init_task = InitTask().tag("hello", "world")
86
- task = MyTask()
85
+ init_task = InitTask.C().tag("hello", "world")
86
+ task = MyTask.C()
87
87
  result = task.submit(run_mode=RunMode.DRY_RUN, init_tasks=[init_task])
88
88
  assert result.tags() == {"hello": "world"}
89
89
 
90
- other_task = TaskWithOutput(x=MyConfig().tag("hello", "world"))
90
+ other_task = TaskWithOutput.C(x=MyConfig.C().tag("hello", "world"))
91
91
  assert other_task.tags() == {"hello": "world"}
92
92
 
93
93
  result = other_task.submit(run_mode=RunMode.DRY_RUN)
94
94
  assert isinstance(result, MyConfig)
95
95
  assert result.tags() == {"hello": "world"}
96
96
 
97
- result = MyTask().submit(run_mode=RunMode.DRY_RUN, init_tasks=[result])
97
+ result = MyTask.C().submit(run_mode=RunMode.DRY_RUN, init_tasks=[result])
98
98
  assert result.tags() == {"hello": "world"}
99
99
 
100
100
 
@@ -115,6 +115,6 @@ def test_objects_tags():
115
115
  x: Param[int]
116
116
 
117
117
  context = DirectoryContext(Path("/__fakepath__"))
118
- a = A(x=tag(1))
118
+ a = A.C(x=tag(1))
119
119
  a.__xpm__.seal(context)
120
120
  assert a.__xpm__.tags() == {"x": 1}
@@ -29,8 +29,8 @@ from .definitions_types import IntegerTask, FloatTask
29
29
 
30
30
  def test_task_types():
31
31
  with TemporaryExperiment("simple"):
32
- assert IntegerTask(value=5).submit().__xpm__.job.wait() == JobState.DONE
33
- assert FloatTask(value=5.1).submit().__xpm__.job.wait() == JobState.DONE
32
+ assert IntegerTask.C(value=5).submit().__xpm__.job.wait() == JobState.DONE
33
+ assert FloatTask.C(value=5.1).submit().__xpm__.job.wait() == JobState.DONE
34
34
 
35
35
 
36
36
  def test_simple_task():
@@ -38,11 +38,11 @@ def test_simple_task():
38
38
  assert isinstance(workdir, Path)
39
39
  with TemporaryExperiment("helloworld", workdir=workdir, maxwait=20):
40
40
  # Submit the tasks
41
- hello = Say(word="hello").submit()
42
- world = Say(word="world").submit()
41
+ hello = Say.C(word="hello").submit()
42
+ world = Say.C(word="world").submit()
43
43
 
44
44
  # Concat will depend on the two first tasks
45
- concat = Concat(strings=[hello, world]).submit()
45
+ concat = Concat.C(strings=[hello, world]).submit()
46
46
 
47
47
  assert concat.__xpm__.job.state == JobState.DONE
48
48
  assert Path(concat.stdout()).read_text() == "HELLO WORLD\n"
@@ -51,16 +51,16 @@ def test_simple_task():
51
51
  def test_not_submitted():
52
52
  """A not submitted task should not be accepted as an argument"""
53
53
  with TemporaryExperiment("helloworld", maxwait=2):
54
- hello = Say(word="hello")
54
+ hello = Say.C(word="hello")
55
55
  with pytest.raises(ValueError):
56
- Concat(strings=[hello])
56
+ Concat.C(strings=[hello])
57
57
 
58
58
 
59
59
  def test_fail_simple():
60
60
  """Failing task... should fail"""
61
61
  with pytest.raises(FailedExperiment):
62
62
  with TemporaryExperiment("failing", maxwait=20):
63
- fail = Fail().submit()
63
+ fail = Fail.C().submit()
64
64
  fail.touch()
65
65
 
66
66
 
@@ -70,8 +70,8 @@ def test_foreign_type():
70
70
  # Submit the tasks
71
71
  from .tasks.foreign import ForeignClassB2
72
72
 
73
- b = ForeignClassB2(x=1, y=2)
74
- a = ForeignTaskA(b=b).submit()
73
+ b = ForeignClassB2.C(x=1, y=2)
74
+ a = ForeignTaskA.C(b=b).submit()
75
75
 
76
76
  assert a.__xpm__.job.wait() == JobState.DONE
77
77
  assert a.stdout().read_text().strip() == "1"
@@ -81,8 +81,8 @@ def test_fail_dep():
81
81
  """Failing task... should cancel dependent"""
82
82
  with pytest.raises(FailedExperiment):
83
83
  with TemporaryExperiment("failingdep"):
84
- fail = Fail().submit()
85
- dep = FailConsumer(fail=fail).submit()
84
+ fail = Fail.C().submit()
85
+ dep = FailConsumer.C(fail=fail).submit()
86
86
  fail.touch()
87
87
 
88
88
  assert fail.__xpm__.job.state == JobState.ERROR
@@ -92,14 +92,14 @@ def test_fail_dep():
92
92
  def test_unknown_attribute():
93
93
  """No check when setting attributes while executing"""
94
94
  with TemporaryExperiment("unknown"):
95
- method = SetUnknown().submit()
95
+ method = SetUnknown.C().submit()
96
96
 
97
97
  assert method.__xpm__.job.wait() == JobState.DONE
98
98
 
99
99
 
100
100
  def test_function():
101
101
  with TemporaryExperiment("function"):
102
- method = Method(a=1).submit()
102
+ method = Method.C(a=1).submit()
103
103
 
104
104
  assert method.__xpm__.job.wait() == JobState.DONE
105
105
 
@@ -111,7 +111,7 @@ def test_done():
111
111
 
112
112
 
113
113
  def restart_function(xp):
114
- restart.Restart().submit()
114
+ restart.Restart.C().submit()
115
115
 
116
116
 
117
117
  @pytest.mark.parametrize("terminate", restart.TERMINATES_FUNC)
@@ -123,16 +123,23 @@ def test_restart(terminate):
123
123
  def test_submitted_twice():
124
124
  """Check that a job cannot be submitted twice within the same experiment"""
125
125
  with TemporaryExperiment("duplicate", maxwait=20):
126
- task1 = SimpleTask(x=1).submit()
127
- task2 = SimpleTask(x=1).submit()
128
- assert task1 is task2, f"{id(task1)} != {id(task2)}"
126
+
127
+ task1 = SimpleTask.C(x=1)
128
+ o1 = task1.submit()
129
+
130
+ task2 = SimpleTask.C(x=1)
131
+ o2 = task2.submit()
132
+
133
+ print(o1)
134
+ assert o1.task is not o2.task
135
+ assert task1.__xpm__.job is task2.__xpm__.job, f"{id(task1)} != {id(task2)}"
129
136
 
130
137
 
131
138
  def test_configcache():
132
139
  """Test a configuration cache"""
133
140
 
134
141
  with TemporaryExperiment("configcache", maxwait=20):
135
- task = CacheConfigTask(data=CacheConfig()).submit()
142
+ task = CacheConfigTask.C(data=CacheConfig.C()).submit()
136
143
 
137
144
  assert task.__xpm__.job.wait() == JobState.DONE
138
145
 
@@ -174,9 +181,13 @@ def test_tasks_deprecated_inner():
174
181
  the new class"""
175
182
  with TemporaryExperiment("deprecated", maxwait=0) as xp:
176
183
  # --- Check that paths are really different first
177
- task_new = TaskWithDeprecated(p=NewConfig()).submit(run_mode=RunMode.DRY_RUN)
178
- task_old = TaskWithDeprecated(p=OldConfig()).submit(run_mode=RunMode.DRY_RUN)
179
- task_deprecated = TaskWithDeprecated(p=DeprecatedConfig()).submit(
184
+ task_new = TaskWithDeprecated.C(p=NewConfig.C()).submit(
185
+ run_mode=RunMode.DRY_RUN
186
+ )
187
+ task_old = TaskWithDeprecated.C(p=OldConfig.C()).submit(
188
+ run_mode=RunMode.DRY_RUN
189
+ )
190
+ task_deprecated = TaskWithDeprecated.C(p=DeprecatedConfig.C()).submit(
180
191
  run_mode=RunMode.DRY_RUN
181
192
  )
182
193
 
@@ -197,7 +208,7 @@ def test_tasks_deprecated_inner():
197
208
  # --- Now check that automatic linking is performed
198
209
 
199
210
  # Run old task with deprecated configuration
200
- task_old = TaskWithDeprecated(p=OldConfig()).submit()
211
+ task_old = TaskWithDeprecated.C(p=OldConfig.C()).submit()
201
212
  task_old.wait()
202
213
  task_old_path = task_old.stdout().parent
203
214
 
@@ -229,9 +240,9 @@ def test_tasks_deprecated():
229
240
  the new class"""
230
241
  with TemporaryExperiment("deprecated", maxwait=20) as xp:
231
242
  # Check that paths are really different first
232
- task_new = NewTask(x=1).submit(run_mode=RunMode.DRY_RUN)
233
- task_old = OldTask(x=1).submit(run_mode=RunMode.DRY_RUN)
234
- task_deprecated = DeprecatedTask(x=1).submit(run_mode=RunMode.DRY_RUN)
243
+ task_new = NewTask.C(x=1).submit(run_mode=RunMode.DRY_RUN)
244
+ task_old = OldTask.C(x=1).submit(run_mode=RunMode.DRY_RUN)
245
+ task_deprecated = DeprecatedTask.C(x=1).submit(run_mode=RunMode.DRY_RUN)
235
246
 
236
247
  assert (
237
248
  task_new.stdout() != task_old.stdout()
@@ -241,7 +252,7 @@ def test_tasks_deprecated():
241
252
  ), "Deprecated path should be the same as non deprecated"
242
253
 
243
254
  # OK, now check that automatic linking is performed
244
- task_old = OldTask(x=1).submit()
255
+ task_old = OldTask.C(x=1).submit()
245
256
  task_old.wait()
246
257
  task_old_path = task_old.stdout().parent
247
258
 
@@ -270,7 +281,7 @@ class HookedTask(Task):
270
281
 
271
282
 
272
283
  def test_task_submit_hook():
273
- result = HookedTask().submit(run_mode=RunMode.DRY_RUN)
284
+ result = HookedTask.C().submit(run_mode=RunMode.DRY_RUN)
274
285
  assert (
275
286
  result.__xpm__.task.__xpm__.job.environ.get("JAVA_HOME", None)
276
287
  == "THE_JAVA_HOME"
@@ -299,31 +310,11 @@ class MyLightweightTask(Task):
299
310
  assert self.x.data == 1
300
311
 
301
312
 
302
- def test_task_lightweight():
303
- with TemporaryExperiment("lightweight", maxwait=20):
304
- x = LightweightConfig()
305
- lwtask = LightweightTask(x=x)
306
- assert (
307
- MyLightweightTask(x=x).add_pretasks(lwtask).submit().__xpm__.job.wait()
308
- == JobState.DONE
309
- ), "Pre-tasks should be executed"
310
-
311
- x_2 = LightweightConfig()
312
- lwtask_2 = LightweightTask(x=x)
313
- assert (
314
- MyLightweightTask(x=x_2.add_pretasks(lwtask_2))
315
- .add_pretasks(lwtask_2)
316
- .submit()
317
- .__xpm__.job.wait()
318
- == JobState.DONE
319
- ), "Pre-tasks should be run just once"
320
-
321
-
322
313
  def test_task_lightweight_init():
323
314
  with TemporaryExperiment("lightweight_init", maxwait=20):
324
- x = LightweightConfig()
325
- lwtask = LightweightTask(x=x)
315
+ x = LightweightConfig.C()
316
+ lwtask = LightweightTask.C(x=x)
326
317
  assert (
327
- MyLightweightTask(x=x).submit(init_tasks=[lwtask]).__xpm__.job.wait()
318
+ MyLightweightTask.C(x=x).submit(init_tasks=[lwtask]).__xpm__.job.wait()
328
319
  == JobState.DONE
329
320
  ), "Init tasks should be executed"
@@ -32,7 +32,7 @@ def token_experiment(xp, token, ntasks=3):
32
32
 
33
33
  tasks = []
34
34
  for it in range(ntasks):
35
- task = TokenTask(path=path, x=it)
35
+ task = TokenTask.C(path=path, x=it)
36
36
  if token:
37
37
  task.add_dependencies(token.dependency(1))
38
38
  tasks.append(task.submit())
@@ -86,7 +86,7 @@ def test_token_cleanup():
86
86
  with TemporaryExperiment("token_cleanup", maxwait=20) as xp:
87
87
  token = CounterToken("token-cleanup", xp.workdir / "token-cleanup", 1)
88
88
 
89
- task = dummy_task(x=1)
89
+ task = dummy_task.C(x=1)
90
90
  dependency = token.dependency(1)
91
91
  task.add_dependencies(dependency)
92
92
  # Just to create the directory
@@ -98,7 +98,7 @@ def test_token_cleanup():
98
98
  # The absence of process should be detected right away
99
99
  logging.info("Lock without process")
100
100
  TokenFile.create(dependency)
101
- task2 = dummy_task(x=2)
101
+ task2 = dummy_task.C(x=2)
102
102
  task2.add_dependencies(token.dependency(1)).submit()
103
103
  xp.wait()
104
104
 
@@ -118,7 +118,7 @@ def test_token_cleanup():
118
118
  p1 = subprocess.Popen(command)
119
119
  job.pidpath.write_text(json.dumps({"pid": p1.pid, "type": "local"}))
120
120
 
121
- task3 = dummy_task(x=3)
121
+ task3 = dummy_task.C(x=3)
122
122
  task3.add_dependencies(token.dependency(1)).submit()
123
123
 
124
124
  # Ends the script "waitforfile.py"
@@ -136,7 +136,9 @@ def test_token_monitor():
136
136
 
137
137
  def run(xp, x, path):
138
138
  token = xp.workspace.connector.createtoken("test-token-monitor", 1)
139
- task = TokenTask(path=path, x=x).add_dependencies(token.dependency(1)).submit()
139
+ task = (
140
+ TokenTask.C(path=path, x=x).add_dependencies(token.dependency(1)).submit()
141
+ )
140
142
  return task
141
143
 
142
144
  with TemporaryExperiment("tokens1", maxwait=20, port=0) as xp1, TemporaryExperiment(
@@ -241,7 +243,7 @@ def test_token_process():
241
243
 
242
244
  def restart_function(xp):
243
245
  token = CounterToken("restart-token", xp.workdir / "token", 1)
244
- token(1, restart.Restart()).submit()
246
+ token(1, restart.Restart.C()).submit()
245
247
 
246
248
 
247
249
  @pytest.mark.parametrize("terminate", restart.TERMINATES_FUNC)