experimaestro 1.11.1__py3-none-any.whl → 2.0.0b4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of experimaestro might be problematic. Click here for more details.

Files changed (133) hide show
  1. experimaestro/__init__.py +10 -11
  2. experimaestro/annotations.py +167 -206
  3. experimaestro/cli/__init__.py +140 -16
  4. experimaestro/cli/filter.py +42 -74
  5. experimaestro/cli/jobs.py +157 -106
  6. experimaestro/cli/progress.py +269 -0
  7. experimaestro/cli/refactor.py +249 -0
  8. experimaestro/click.py +0 -1
  9. experimaestro/commandline.py +19 -3
  10. experimaestro/connectors/__init__.py +22 -3
  11. experimaestro/connectors/local.py +12 -0
  12. experimaestro/core/arguments.py +192 -37
  13. experimaestro/core/identifier.py +127 -12
  14. experimaestro/core/objects/__init__.py +6 -0
  15. experimaestro/core/objects/config.py +702 -285
  16. experimaestro/core/objects/config_walk.py +24 -6
  17. experimaestro/core/serialization.py +91 -34
  18. experimaestro/core/serializers.py +1 -8
  19. experimaestro/core/subparameters.py +164 -0
  20. experimaestro/core/types.py +198 -83
  21. experimaestro/exceptions.py +26 -0
  22. experimaestro/experiments/cli.py +107 -25
  23. experimaestro/generators.py +50 -9
  24. experimaestro/huggingface.py +3 -1
  25. experimaestro/launcherfinder/parser.py +29 -0
  26. experimaestro/launcherfinder/registry.py +3 -3
  27. experimaestro/launchers/__init__.py +26 -1
  28. experimaestro/launchers/direct.py +12 -0
  29. experimaestro/launchers/slurm/base.py +154 -2
  30. experimaestro/mkdocs/base.py +6 -8
  31. experimaestro/mkdocs/metaloader.py +0 -1
  32. experimaestro/mypy.py +452 -7
  33. experimaestro/notifications.py +75 -16
  34. experimaestro/progress.py +404 -0
  35. experimaestro/rpyc.py +0 -1
  36. experimaestro/run.py +19 -6
  37. experimaestro/scheduler/__init__.py +18 -1
  38. experimaestro/scheduler/base.py +504 -959
  39. experimaestro/scheduler/dependencies.py +43 -28
  40. experimaestro/scheduler/dynamic_outputs.py +259 -130
  41. experimaestro/scheduler/experiment.py +582 -0
  42. experimaestro/scheduler/interfaces.py +474 -0
  43. experimaestro/scheduler/jobs.py +485 -0
  44. experimaestro/scheduler/services.py +186 -12
  45. experimaestro/scheduler/signal_handler.py +32 -0
  46. experimaestro/scheduler/state.py +1 -1
  47. experimaestro/scheduler/state_db.py +388 -0
  48. experimaestro/scheduler/state_provider.py +2345 -0
  49. experimaestro/scheduler/state_sync.py +834 -0
  50. experimaestro/scheduler/workspace.py +52 -10
  51. experimaestro/scriptbuilder.py +7 -0
  52. experimaestro/server/__init__.py +153 -32
  53. experimaestro/server/data/index.css +0 -125
  54. experimaestro/server/data/index.css.map +1 -1
  55. experimaestro/server/data/index.js +194 -58
  56. experimaestro/server/data/index.js.map +1 -1
  57. experimaestro/settings.py +47 -6
  58. experimaestro/sphinx/__init__.py +3 -3
  59. experimaestro/taskglobals.py +20 -0
  60. experimaestro/tests/conftest.py +80 -0
  61. experimaestro/tests/core/test_generics.py +2 -2
  62. experimaestro/tests/identifier_stability.json +45 -0
  63. experimaestro/tests/launchers/bin/sacct +6 -2
  64. experimaestro/tests/launchers/bin/sbatch +4 -2
  65. experimaestro/tests/launchers/common.py +2 -2
  66. experimaestro/tests/launchers/test_slurm.py +80 -0
  67. experimaestro/tests/restart.py +1 -1
  68. experimaestro/tests/tasks/all.py +7 -0
  69. experimaestro/tests/tasks/test_dynamic.py +231 -0
  70. experimaestro/tests/test_checkers.py +2 -2
  71. experimaestro/tests/test_cli_jobs.py +615 -0
  72. experimaestro/tests/test_dependencies.py +11 -17
  73. experimaestro/tests/test_deprecated.py +630 -0
  74. experimaestro/tests/test_environment.py +200 -0
  75. experimaestro/tests/test_experiment.py +3 -3
  76. experimaestro/tests/test_file_progress.py +425 -0
  77. experimaestro/tests/test_file_progress_integration.py +477 -0
  78. experimaestro/tests/test_forward.py +3 -3
  79. experimaestro/tests/test_generators.py +93 -0
  80. experimaestro/tests/test_identifier.py +520 -169
  81. experimaestro/tests/test_identifier_stability.py +458 -0
  82. experimaestro/tests/test_instance.py +16 -21
  83. experimaestro/tests/test_multitoken.py +442 -0
  84. experimaestro/tests/test_mypy.py +433 -0
  85. experimaestro/tests/test_objects.py +314 -30
  86. experimaestro/tests/test_outputs.py +8 -8
  87. experimaestro/tests/test_param.py +22 -26
  88. experimaestro/tests/test_partial_paths.py +231 -0
  89. experimaestro/tests/test_progress.py +2 -50
  90. experimaestro/tests/test_resumable_task.py +480 -0
  91. experimaestro/tests/test_serializers.py +141 -60
  92. experimaestro/tests/test_state_db.py +434 -0
  93. experimaestro/tests/test_subparameters.py +160 -0
  94. experimaestro/tests/test_tags.py +151 -15
  95. experimaestro/tests/test_tasks.py +137 -160
  96. experimaestro/tests/test_token_locking.py +252 -0
  97. experimaestro/tests/test_tokens.py +25 -19
  98. experimaestro/tests/test_types.py +133 -11
  99. experimaestro/tests/test_validation.py +19 -19
  100. experimaestro/tests/test_workspace_triggers.py +158 -0
  101. experimaestro/tests/token_reschedule.py +5 -3
  102. experimaestro/tests/utils.py +2 -2
  103. experimaestro/tokens.py +154 -57
  104. experimaestro/tools/diff.py +8 -1
  105. experimaestro/tui/__init__.py +8 -0
  106. experimaestro/tui/app.py +2303 -0
  107. experimaestro/tui/app.tcss +353 -0
  108. experimaestro/tui/log_viewer.py +228 -0
  109. experimaestro/typingutils.py +11 -2
  110. experimaestro/utils/__init__.py +23 -0
  111. experimaestro/utils/environment.py +148 -0
  112. experimaestro/utils/git.py +129 -0
  113. experimaestro/utils/resources.py +1 -1
  114. experimaestro/version.py +34 -0
  115. {experimaestro-1.11.1.dist-info → experimaestro-2.0.0b4.dist-info}/METADATA +70 -39
  116. experimaestro-2.0.0b4.dist-info/RECORD +181 -0
  117. {experimaestro-1.11.1.dist-info → experimaestro-2.0.0b4.dist-info}/WHEEL +1 -1
  118. experimaestro-2.0.0b4.dist-info/entry_points.txt +16 -0
  119. experimaestro/compat.py +0 -6
  120. experimaestro/core/objects.pyi +0 -225
  121. experimaestro/server/data/0c35d18bf06992036b69.woff2 +0 -0
  122. experimaestro/server/data/219aa9140e099e6c72ed.woff2 +0 -0
  123. experimaestro/server/data/3a4004a46a653d4b2166.woff +0 -0
  124. experimaestro/server/data/3baa5b8f3469222b822d.woff +0 -0
  125. experimaestro/server/data/4d73cb90e394b34b7670.woff +0 -0
  126. experimaestro/server/data/4ef4218c522f1eb6b5b1.woff2 +0 -0
  127. experimaestro/server/data/5d681e2edae8c60630db.woff +0 -0
  128. experimaestro/server/data/6f420cf17cc0d7676fad.woff2 +0 -0
  129. experimaestro/server/data/c380809fd3677d7d6903.woff2 +0 -0
  130. experimaestro/server/data/f882956fd323fd322f31.woff +0 -0
  131. experimaestro-1.11.1.dist-info/RECORD +0 -158
  132. experimaestro-1.11.1.dist-info/entry_points.txt +0 -17
  133. {experimaestro-1.11.1.dist-info → experimaestro-2.0.0b4.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,252 @@
1
+ """Unit tests for token locking mechanism
2
+
3
+ Tests the CounterToken condition variable-based synchronization
4
+ without requiring full scheduler integration.
5
+ """
6
+
7
+ import asyncio
8
+ import pytest
9
+ import tempfile
10
+ from pathlib import Path
11
+ import time
12
+
13
+ from experimaestro.tokens import CounterToken
14
+ from experimaestro.locking import LockError
15
+
16
+ pytestmark = pytest.mark.anyio
17
+
18
+
19
+ async def test_token_acquire_release():
20
+ """Test basic token acquire and release"""
21
+ with tempfile.TemporaryDirectory() as tmpdir:
22
+ token = CounterToken("test-basic", Path(tmpdir) / "token", count=1)
23
+
24
+ # Create a mock job target
25
+ class MockJob:
26
+ @property
27
+ def identifier(self):
28
+ return "mock-job-1"
29
+
30
+ @property
31
+ def basepath(self):
32
+ return Path(tmpdir) / "job1"
33
+
34
+ job = MockJob()
35
+
36
+ # Create dependency
37
+ dep = token.dependency(1)
38
+ dep.target = job
39
+
40
+ # Should be able to acquire
41
+ lock = await dep.aio_lock(timeout=1.0)
42
+ assert lock is not None
43
+ assert token.available == 0
44
+
45
+ # Release
46
+ lock.release()
47
+ assert token.available == 1
48
+
49
+
50
+ async def test_token_blocking():
51
+ """Test that acquiring blocks when no tokens available"""
52
+ with tempfile.TemporaryDirectory() as tmpdir:
53
+ token = CounterToken("test-blocking", Path(tmpdir) / "token", count=1)
54
+
55
+ class MockJob:
56
+ def __init__(self, name):
57
+ self.name = name
58
+
59
+ @property
60
+ def identifier(self):
61
+ return f"mock-job-{self.name}"
62
+
63
+ @property
64
+ def basepath(self):
65
+ return Path(tmpdir) / self.name
66
+
67
+ job1 = MockJob("1")
68
+ job2 = MockJob("2")
69
+
70
+ dep1 = token.dependency(1)
71
+ dep1.target = job1
72
+
73
+ dep2 = token.dependency(1)
74
+ dep2.target = job2
75
+
76
+ # Acquire with first dependency
77
+ lock1 = await dep1.aio_lock(timeout=0.5)
78
+ assert token.available == 0
79
+
80
+ # Second acquire should timeout
81
+ start = time.time()
82
+ with pytest.raises(LockError, match="Timeout"):
83
+ await dep2.aio_lock(timeout=0.5)
84
+ elapsed = time.time() - start
85
+ assert 0.4 < elapsed < 0.7 # Should timeout around 0.5s
86
+
87
+ # Release first lock
88
+ lock1.release()
89
+ assert token.available == 1
90
+
91
+ # Now second should succeed
92
+ lock2 = await dep2.aio_lock(timeout=0.5)
93
+ assert lock2 is not None
94
+ lock2.release()
95
+
96
+
97
+ async def test_token_notification():
98
+ """Test that condition notification wakes up waiting tasks"""
99
+ with tempfile.TemporaryDirectory() as tmpdir:
100
+ token = CounterToken("test-notify", Path(tmpdir) / "token", count=1)
101
+
102
+ class MockJob:
103
+ def __init__(self, name):
104
+ self.name = name
105
+
106
+ @property
107
+ def identifier(self):
108
+ return f"mock-job-{self.name}"
109
+
110
+ @property
111
+ def basepath(self):
112
+ return Path(tmpdir) / self.name
113
+
114
+ job1 = MockJob("1")
115
+ job2 = MockJob("2")
116
+
117
+ dep1 = token.dependency(1)
118
+ dep1.target = job1
119
+
120
+ dep2 = token.dependency(1)
121
+ dep2.target = job2
122
+
123
+ # Acquire with first dependency
124
+ lock1 = await dep1.aio_lock(timeout=0.5)
125
+
126
+ # Start second acquisition in background
127
+ async def acquire_second():
128
+ lock = await dep2.aio_lock(timeout=5.0) # Long timeout
129
+ return lock
130
+
131
+ task = asyncio.create_task(acquire_second())
132
+
133
+ # Give it time to start waiting
134
+ await asyncio.sleep(0.1)
135
+
136
+ # Release first lock - should notify waiting task
137
+ start = time.time()
138
+ lock1.release()
139
+
140
+ # Second task should complete quickly (not timeout)
141
+ lock2 = await task
142
+ elapsed = time.time() - start
143
+
144
+ assert lock2 is not None
145
+ assert elapsed < 1.0 # Should wake up immediately, not wait 5s
146
+ lock2.release()
147
+
148
+
149
+ async def test_token_multiple_waiting():
150
+ """Test multiple tasks waiting for tokens"""
151
+ with tempfile.TemporaryDirectory() as tmpdir:
152
+ token = CounterToken("test-multiple", Path(tmpdir) / "token", count=1)
153
+
154
+ class MockJob:
155
+ def __init__(self, name):
156
+ self.name = name
157
+
158
+ @property
159
+ def identifier(self):
160
+ return f"mock-job-{self.name}"
161
+
162
+ @property
163
+ def basepath(self):
164
+ return Path(tmpdir) / self.name
165
+
166
+ # Acquire the token
167
+ job1 = MockJob("1")
168
+ dep1 = token.dependency(1)
169
+ dep1.target = job1
170
+ lock1 = await dep1.aio_lock(timeout=0.5)
171
+
172
+ # Start multiple waiting tasks
173
+ acquired_order = []
174
+
175
+ async def acquire_task(name):
176
+ job = MockJob(name)
177
+ dep = token.dependency(1)
178
+ dep.target = job
179
+ lock = await dep.aio_lock(timeout=10.0)
180
+ acquired_order.append(name)
181
+ await asyncio.sleep(0.05) # Hold briefly
182
+ lock.release()
183
+
184
+ tasks = [
185
+ asyncio.create_task(acquire_task("2")),
186
+ asyncio.create_task(acquire_task("3")),
187
+ asyncio.create_task(acquire_task("4")),
188
+ ]
189
+
190
+ # Give tasks time to start waiting
191
+ await asyncio.sleep(0.1)
192
+
193
+ # Release first lock
194
+ lock1.release()
195
+
196
+ # Wait for all tasks to complete
197
+ await asyncio.gather(*tasks)
198
+
199
+ # All tasks should have acquired the lock
200
+ assert len(acquired_order) == 3
201
+ assert set(acquired_order) == {"2", "3", "4"}
202
+
203
+
204
+ async def test_token_timeout_zero():
205
+ """Test that timeout=0 waits indefinitely"""
206
+ with tempfile.TemporaryDirectory() as tmpdir:
207
+ token = CounterToken("test-timeout-zero", Path(tmpdir) / "token", count=1)
208
+
209
+ class MockJob:
210
+ def __init__(self, name):
211
+ self.name = name
212
+
213
+ @property
214
+ def identifier(self):
215
+ return f"mock-job-{self.name}"
216
+
217
+ @property
218
+ def basepath(self):
219
+ return Path(tmpdir) / self.name
220
+
221
+ job1 = MockJob("1")
222
+ job2 = MockJob("2")
223
+
224
+ dep1 = token.dependency(1)
225
+ dep1.target = job1
226
+
227
+ dep2 = token.dependency(1)
228
+ dep2.target = job2
229
+
230
+ # Acquire with first
231
+ lock1 = await dep1.aio_lock(timeout=0.5)
232
+
233
+ # Start waiting with timeout=0 (infinite)
234
+ async def acquire_infinite():
235
+ return await dep2.aio_lock(timeout=0) # Should wait forever
236
+
237
+ task = asyncio.create_task(acquire_infinite())
238
+
239
+ # Give it time to start waiting
240
+ await asyncio.sleep(0.1)
241
+
242
+ # Wait a bit more - task should still be waiting
243
+ await asyncio.sleep(0.5)
244
+ assert not task.done()
245
+
246
+ # Release first lock
247
+ lock1.release()
248
+
249
+ # Now task should complete
250
+ lock2 = await asyncio.wait_for(task, timeout=2.0)
251
+ assert lock2 is not None
252
+ lock2.release()
@@ -32,7 +32,7 @@ def token_experiment(xp, token, ntasks=3):
32
32
 
33
33
  tasks = []
34
34
  for it in range(ntasks):
35
- task = TokenTask(path=path, x=it)
35
+ task = TokenTask.C(path=path, x=it)
36
36
  if token:
37
37
  task.add_dependencies(token.dependency(1))
38
38
  tasks.append(task.submit())
@@ -59,9 +59,12 @@ def token_experiment(xp, token, ntasks=3):
59
59
  assert (times[i - 1] > times[i]) or (times[i] > times[i - 1])
60
60
 
61
61
 
62
- @pytest.mark.xfail(strict=True)
62
+ @pytest.mark.xfail(
63
+ strict=False,
64
+ reason="Timing-dependent: tasks may run sequentially even without token",
65
+ )
63
66
  def test_token_fail():
64
- """Simple token test: should fail without token"""
67
+ """Simple token test: should fail without token (but may pass due to timing)"""
65
68
  with TemporaryExperiment("tokens", maxwait=20) as xp:
66
69
  token_experiment(xp, None)
67
70
 
@@ -71,6 +74,7 @@ def test_token_ok():
71
74
  with TemporaryExperiment("tokens", maxwait=20) as xp:
72
75
  token = CounterToken("token-ok", xp.workdir / "token", 1)
73
76
  token_experiment(xp, token)
77
+
74
78
  logging.info("Finished token_ok test")
75
79
 
76
80
 
@@ -86,7 +90,7 @@ def test_token_cleanup():
86
90
  with TemporaryExperiment("token_cleanup", maxwait=20) as xp:
87
91
  token = CounterToken("token-cleanup", xp.workdir / "token-cleanup", 1)
88
92
 
89
- task = dummy_task(x=1)
93
+ task = dummy_task.C(x=1)
90
94
  dependency = token.dependency(1)
91
95
  task.add_dependencies(dependency)
92
96
  # Just to create the directory
@@ -98,7 +102,7 @@ def test_token_cleanup():
98
102
  # The absence of process should be detected right away
99
103
  logging.info("Lock without process")
100
104
  TokenFile.create(dependency)
101
- task2 = dummy_task(x=2)
105
+ task2 = dummy_task.C(x=2)
102
106
  task2.add_dependencies(token.dependency(1)).submit()
103
107
  xp.wait()
104
108
 
@@ -118,7 +122,7 @@ def test_token_cleanup():
118
122
  p1 = subprocess.Popen(command)
119
123
  job.pidpath.write_text(json.dumps({"pid": p1.pid, "type": "local"}))
120
124
 
121
- task3 = dummy_task(x=3)
125
+ task3 = dummy_task.C(x=3)
122
126
  task3.add_dependencies(token.dependency(1)).submit()
123
127
 
124
128
  # Ends the script "waitforfile.py"
@@ -129,28 +133,30 @@ def test_token_cleanup():
129
133
 
130
134
 
131
135
  def test_token_monitor():
132
- """Two different schedulers (within the same process)
136
+ """Two different experiments (within the same process and workspace)
133
137
 
134
138
  Test the ability of the token to monitor the filesystem
135
139
  """
136
140
 
137
141
  def run(xp, x, path):
138
142
  token = xp.workspace.connector.createtoken("test-token-monitor", 1)
139
- task = TokenTask(path=path, x=x).add_dependencies(token.dependency(1)).submit()
143
+ task = (
144
+ TokenTask.C(path=path, x=x).add_dependencies(token.dependency(1)).submit()
145
+ )
140
146
  return task
141
147
 
142
- with TemporaryExperiment("tokens1", maxwait=20, port=0) as xp1, TemporaryExperiment(
143
- "tokens2", maxwait=20
144
- ) as xp2:
145
- path = xp1.workspace.path / "test_token.file"
146
- task1 = run(xp1, 1, path)
147
- task2 = run(xp2, 2, path)
148
+ with TemporaryExperiment("tokens1", maxwait=20, port=0) as xp1:
149
+ # Use the same workspace for both experiments
150
+ with TemporaryExperiment(
151
+ "tokens2", workdir=xp1.workspace.path, maxwait=20
152
+ ) as xp2:
153
+ path = xp1.workspace.path / "test_token.file"
154
+ task1 = run(xp1, 1, path)
155
+ task2 = run(xp2, 2, path)
148
156
 
149
- time.sleep(0.5)
150
- path.write_text("Hello world")
157
+ time.sleep(0.5)
158
+ path.write_text("Hello world")
151
159
 
152
- xp1.wait()
153
- xp2.wait()
154
160
  time1 = get_times(task1)
155
161
  time2 = get_times(task2)
156
162
 
@@ -241,7 +247,7 @@ def test_token_process():
241
247
 
242
248
  def restart_function(xp):
243
249
  token = CounterToken("restart-token", xp.workdir / "token", 1)
244
- token(1, restart.Restart()).submit()
250
+ token(1, restart.Restart.C()).submit()
245
251
 
246
252
 
247
253
  @pytest.mark.parametrize("terminate", restart.TERMINATES_FUNC)
@@ -1,7 +1,7 @@
1
1
  # --- Task and types definitions
2
2
 
3
3
  import logging
4
- from experimaestro import Config, Param
4
+ from experimaestro import Config, Param, field
5
5
  from typing import Union
6
6
 
7
7
  import pytest
@@ -26,17 +26,17 @@ def test_multiple_inheritance():
26
26
 
27
27
  for C in (C1, C2):
28
28
  logging.info("Testing %s", C)
29
- ctype = C.__xpmtype__
29
+ ctype = C.__getxpmtype__()
30
30
  assert issubclass(C, A)
31
31
  assert issubclass(C, B)
32
32
  assert issubclass(C, B1)
33
33
 
34
- assert ctype.objecttype == C.__xpmtype__.objecttype
34
+ assert ctype.value_type == C.__getxpmtype__().value_type
35
35
 
36
- assert issubclass(C.__xpmtype__.objecttype, B1.__xpmtype__.basetype)
37
- assert issubclass(C.__xpmtype__.objecttype, B.__xpmtype__.basetype)
38
- assert issubclass(C.__xpmtype__.objecttype, A.__xpmtype__.basetype)
39
- assert not issubclass(C.__xpmtype__.objecttype, ConfigMixin)
36
+ assert issubclass(C.__getxpmtype__().value_type, B1.__getxpmtype__().value_type)
37
+ assert issubclass(C.__getxpmtype__().value_type, B.__getxpmtype__().value_type)
38
+ assert issubclass(C.__getxpmtype__().value_type, A.__getxpmtype__().value_type)
39
+ assert not issubclass(C.__getxpmtype__().value_type, ConfigMixin)
40
40
 
41
41
 
42
42
  def test_missing_hierarchy():
@@ -49,7 +49,7 @@ def test_missing_hierarchy():
49
49
  class B(A1):
50
50
  pass
51
51
 
52
- B.__xpmtype__
52
+ B.__getxpmtype__()
53
53
 
54
54
  assert issubclass(B, A)
55
55
  assert issubclass(B, A1)
@@ -59,7 +59,129 @@ def test_types_union():
59
59
  class A(Config):
60
60
  x: Param[Union[int, str]]
61
61
 
62
- A(x=1)
63
- A(x="hello")
62
+ A.C(x=1)
63
+ A.C(x="hello")
64
64
  with pytest.raises(ValueError):
65
- A(x=[])
65
+ A.C(x=[])
66
+
67
+
68
+ def test_override_warning_without_flag(caplog):
69
+ """Test that overriding a parameter without overrides=True produces a warning"""
70
+
71
+ class Parent(Config):
72
+ value: Param[int]
73
+
74
+ with caplog.at_level(logging.WARNING, logger="xpm"):
75
+ # Child overrides value without overrides=True
76
+ class Child(Parent):
77
+ value: Param[int]
78
+
79
+ # Force initialization to trigger the warning
80
+ Child.__getxpmtype__().arguments
81
+
82
+ assert "overrides parent parameter" in caplog.text
83
+ assert "Child" in caplog.text
84
+ assert "value" in caplog.text
85
+
86
+
87
+ def test_override_no_warning_with_flag(caplog):
88
+ """Test that overriding with overrides=True suppresses the warning"""
89
+
90
+ class Parent(Config):
91
+ value: Param[int]
92
+
93
+ with caplog.at_level(logging.WARNING, logger="xpm"):
94
+ # Child overrides value with overrides=True
95
+ class Child(Parent):
96
+ value: Param[int] = field(overrides=True)
97
+
98
+ # Force initialization
99
+ Child.__getxpmtype__().arguments
100
+
101
+ # No warning should be issued
102
+ assert "overrides parent parameter" not in caplog.text
103
+
104
+
105
+ def test_override_type_check_subtype_config():
106
+ """Test that overriding Config type with subtype is allowed"""
107
+
108
+ class BaseValue(Config):
109
+ x: Param[int]
110
+
111
+ class DerivedValue(BaseValue):
112
+ y: Param[int]
113
+
114
+ class Parent(Config):
115
+ value: Param[BaseValue]
116
+
117
+ # Should succeed - DerivedValue is subtype of BaseValue
118
+ class Child(Parent):
119
+ value: Param[DerivedValue] = field(overrides=True)
120
+
121
+ Child.__getxpmtype__().arguments
122
+
123
+
124
+ def test_override_type_check_incompatible_config():
125
+ """Test that overriding Config type with incompatible type raises error"""
126
+
127
+ class ValueA(Config):
128
+ x: Param[int]
129
+
130
+ class ValueB(Config):
131
+ y: Param[int]
132
+
133
+ class Parent(Config):
134
+ value: Param[ValueA]
135
+
136
+ # Should fail - ValueB is not a subtype of ValueA
137
+ with pytest.raises(TypeError, match="is not a subtype"):
138
+
139
+ class Child(Parent):
140
+ value: Param[ValueB] = field(overrides=True)
141
+
142
+ Child.__getxpmtype__().arguments
143
+
144
+
145
+ def test_override_type_check_primitive_incompatible():
146
+ """Test that overriding primitive type with incompatible type raises error"""
147
+
148
+ class Parent(Config):
149
+ value: Param[int]
150
+
151
+ # Should fail - str is not a subtype of int
152
+ with pytest.raises(TypeError, match="is not compatible"):
153
+
154
+ class Child(Parent):
155
+ value: Param[str] = field(overrides=True)
156
+
157
+ Child.__getxpmtype__().arguments
158
+
159
+
160
+ def test_override_type_check_same_type():
161
+ """Test that overriding with the same type is allowed"""
162
+
163
+ class Parent(Config):
164
+ value: Param[int]
165
+
166
+ # Should succeed - same type
167
+ class Child(Parent):
168
+ value: Param[int] = field(overrides=True)
169
+
170
+ Child.__getxpmtype__().arguments
171
+
172
+
173
+ def test_no_override_warning_for_new_param(caplog):
174
+ """Test that defining a new parameter doesn't produce a warning"""
175
+
176
+ class Parent(Config):
177
+ x: Param[int]
178
+
179
+ with caplog.at_level(logging.WARNING, logger="xpm"):
180
+ # Child defines a new parameter y, doesn't override x
181
+ class Child(Parent):
182
+ y: Param[int]
183
+
184
+ Child.__getxpmtype__().arguments
185
+
186
+ # No warning should be issued for new parameter
187
+ assert "overrides parent parameter" not in caplog.text
@@ -36,22 +36,22 @@ class C(Config):
36
36
 
37
37
 
38
38
  def test_validation_simple():
39
- expect_validate(A(value=1))
39
+ expect_validate(A.C(value=1))
40
40
 
41
41
 
42
42
  def test_validation_missing():
43
- expect_notvalidate(A())
43
+ expect_notvalidate(A.C())
44
44
 
45
45
 
46
46
  def test_validation_simple_nested():
47
- b = B()
48
- b.a = A(value=1)
47
+ b = B.C()
48
+ b.a = A.C(value=1)
49
49
  expect_validate(b)
50
50
 
51
51
 
52
52
  def test_validation_missing_nested():
53
- b = B()
54
- b.a = A()
53
+ b = B.C()
54
+ b.a = A.C()
55
55
  expect_notvalidate(b)
56
56
 
57
57
 
@@ -68,11 +68,11 @@ def test_validation_type():
68
68
  __xpmid__ = valns.type.c
69
69
 
70
70
  with pytest.raises(ValueError):
71
- C(a=B())
71
+ C.C(a=B.C())
72
72
 
73
73
  with pytest.raises(ValueError):
74
- c = C()
75
- c.a = B()
74
+ c = C.C()
75
+ c.a = B.C()
76
76
 
77
77
 
78
78
  def test_validation_subtype():
@@ -86,7 +86,7 @@ def test_validation_subtype():
86
86
  __xpmid__ = valns.subtype.b
87
87
  a: Param[A]
88
88
 
89
- expect_validate(B(a=A1()))
89
+ expect_validate(B.C(a=A1.C()))
90
90
 
91
91
 
92
92
  def test_validation_path_generator():
@@ -96,7 +96,7 @@ def test_validation_path_generator():
96
96
  __xpmid__ = valns.path.a
97
97
  value: Meta[Path] = field(default_factory=PathGenerator("file.txt"))
98
98
 
99
- a = A()
99
+ a = A.C()
100
100
  a.__xpm__.validate()
101
101
  with TemporaryExperiment("constant") as xp:
102
102
  jobcontext = Job(a)
@@ -116,7 +116,7 @@ def test_validation_constant():
116
116
  __xpmid__ = valns.constant.a
117
117
  value: Constant[int] = 1
118
118
 
119
- a = A()
119
+ a = A.C()
120
120
  a.__xpm__.validate()
121
121
  with TemporaryExperiment("constant"):
122
122
  joba = Job(a)
@@ -133,7 +133,7 @@ class Child(Parent):
133
133
 
134
134
 
135
135
  def test_validation_child():
136
- expect_validate(Child(x=1))
136
+ expect_validate(Child.C(x=1))
137
137
 
138
138
 
139
139
  # --- Path argument checks
@@ -144,7 +144,7 @@ class PathParent(Config):
144
144
 
145
145
 
146
146
  def test_validation_path_option():
147
- c = PathParent()
147
+ c = PathParent.C()
148
148
  expect_validate(c)
149
149
 
150
150
 
@@ -157,7 +157,7 @@ def test_validation_seal():
157
157
  class A(Config):
158
158
  a: Param[int]
159
159
 
160
- a = A(a=2)
160
+ a = A.C(a=2)
161
161
  a.__xpm__.seal(EmptyContext())
162
162
 
163
163
  with pytest.raises(AttributeError):
@@ -174,10 +174,10 @@ def test_validation_validation_enum():
174
174
  class EnumConfig(Config):
175
175
  a: Param[EnumParam]
176
176
 
177
- expect_validate(EnumConfig(a=EnumParam.FIRST))
177
+ expect_validate(EnumConfig.C(a=EnumParam.FIRST))
178
178
 
179
179
  try:
180
- EnumConfig(a=1)
180
+ EnumConfig.C(a=1)
181
181
  assert False, "Enum value should be rejected"
182
182
  except AssertionError:
183
183
  pass
@@ -199,7 +199,7 @@ class TaskConfigConsumer(Config):
199
199
 
200
200
 
201
201
  def test_validation_taskargument():
202
- x = taskconfig()
202
+ x = taskconfig.C()
203
203
  with TemporaryExperiment("fake"):
204
204
  x.submit(run_mode=RunMode.DRY_RUN)
205
- expect_validate(TaskConfigConsumer(x=x))
205
+ expect_validate(TaskConfigConsumer.C(x=x))