hpcflow-new2 0.2.0a69__py3-none-any.whl → 0.2.0a71__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,7 @@ from hpcflow.sdk.core.errors import (
8
8
  TaskTemplateMultipleSchemaObjectives,
9
9
  TaskTemplateUnexpectedInput,
10
10
  )
11
+ from hpcflow.sdk.core.parameters import NullDefault
11
12
  from hpcflow.sdk.core.test_utils import make_schemas, make_tasks, make_workflow
12
13
 
13
14
 
@@ -211,7 +212,10 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
211
212
  tmp_path,
212
213
  ):
213
214
  t1, t2 = make_tasks(
214
- schemas_spec=[[{"p1": None}, ("p2",), "t1"], [{"p2": None}, (), "t2"]],
215
+ schemas_spec=[
216
+ [{"p1": NullDefault.NULL}, ("p2",), "t1"],
217
+ [{"p2": NullDefault.NULL}, (), "t2"],
218
+ ],
215
219
  local_inputs={0: ("p1",)},
216
220
  )
217
221
  wk = hf.Workflow.from_template(
@@ -219,7 +223,7 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
219
223
  )
220
224
  available = t2.get_available_task_input_sources(
221
225
  element_set=t2.element_sets[0],
222
- source_tasks=[wk.tasks.t1.template],
226
+ source_tasks=[wk.tasks.t1],
223
227
  )
224
228
  available_exp = {
225
229
  "p2": [
@@ -246,7 +250,7 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
246
250
  )
247
251
  available = t2.get_available_task_input_sources(
248
252
  element_set=t2.element_sets[0],
249
- source_tasks=[wk.tasks.t1.template],
253
+ source_tasks=[wk.tasks.t1],
250
254
  )
251
255
  available_exp = {
252
256
  "p2": [
@@ -266,7 +270,10 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
266
270
  tmp_path,
267
271
  ):
268
272
  t1, t2 = make_tasks(
269
- schemas_spec=[[{"p1": None}, ("p2",), "t1"], [{"p2": None}, (), "t2"]],
273
+ schemas_spec=[
274
+ [{"p1": NullDefault.NULL}, ("p2",), "t1"],
275
+ [{"p2": NullDefault.NULL}, (), "t2"],
276
+ ],
270
277
  local_inputs={0: ("p1",), 1: ("p2",)},
271
278
  )
272
279
  wk = hf.Workflow.from_template(
@@ -274,7 +281,7 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
274
281
  )
275
282
  available = t2.get_available_task_input_sources(
276
283
  element_set=t2.element_sets[0],
277
- source_tasks=[wk.tasks.t1.template],
284
+ source_tasks=[wk.tasks.t1],
278
285
  )
279
286
  available_exp = {
280
287
  "p2": [
@@ -302,7 +309,7 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
302
309
  )
303
310
  available = t2.get_available_task_input_sources(
304
311
  element_set=t2.element_sets[0],
305
- source_tasks=[wk.tasks.t1.template],
312
+ source_tasks=[wk.tasks.t1],
306
313
  )
307
314
  available_exp = {
308
315
  "p2": [
@@ -324,9 +331,9 @@ def test_task_get_available_task_input_sources_expected_return_one_param_two_out
324
331
  ):
325
332
  t1, t2, t3 = make_tasks(
326
333
  schemas_spec=[
327
- [{"p1": None}, ("p2", "p3"), "t1"],
328
- [{"p2": None}, ("p3", "p4"), "t2"],
329
- [{"p3": None}, (), "t3"],
334
+ [{"p1": NullDefault.NULL}, ("p2", "p3"), "t1"],
335
+ [{"p2": NullDefault.NULL}, ("p3", "p4"), "t2"],
336
+ [{"p3": NullDefault.NULL}, (), "t3"],
330
337
  ],
331
338
  local_inputs={0: ("p1",), 1: ("p2",)},
332
339
  )
@@ -335,7 +342,7 @@ def test_task_get_available_task_input_sources_expected_return_one_param_two_out
335
342
  )
336
343
  available = t3.get_available_task_input_sources(
337
344
  element_set=t3.element_sets[0],
338
- source_tasks=[wk.tasks.t1.template, wk.tasks.t2.template],
345
+ source_tasks=[wk.tasks.t1, wk.tasks.t2],
339
346
  )
340
347
  available_exp = {
341
348
  "p3": [
@@ -361,8 +368,8 @@ def test_task_get_available_task_input_sources_expected_return_two_params_one_ou
361
368
  ):
362
369
  t1, t2 = make_tasks(
363
370
  schemas_spec=[
364
- [{"p1": None}, ("p2", "p3"), "t1"],
365
- [{"p2": None, "p3": None}, (), "t2"],
371
+ [{"p1": NullDefault.NULL}, ("p2", "p3"), "t1"],
372
+ [{"p2": NullDefault.NULL, "p3": NullDefault.NULL}, (), "t2"],
366
373
  ],
367
374
  local_inputs={0: ("p1",)},
368
375
  )
@@ -371,7 +378,7 @@ def test_task_get_available_task_input_sources_expected_return_two_params_one_ou
371
378
  )
372
379
  available = t2.get_available_task_input_sources(
373
380
  element_set=t2.element_sets[0],
374
- source_tasks=[wk.tasks.t1.template],
381
+ source_tasks=[wk.tasks.t1],
375
382
  )
376
383
  available_exp = {
377
384
  "p2": [
@@ -402,9 +409,9 @@ def test_task_get_available_task_input_sources_input_source_excluded_if_not_loca
402
409
 
403
410
  t1, t2, t3 = make_tasks(
404
411
  schemas_spec=[
405
- [{"p1": None}, ("p1",), "t1"], # sources for t3: input + output
406
- [{"p1": None}, ("p1",), "t2"], # sources fot t3: output only
407
- [{"p1": None}, ("p1",), "t3"],
412
+ [{"p1": NullDefault.NULL}, ("p1",), "t1"], # sources for t3: input + output
413
+ [{"p1": NullDefault.NULL}, ("p1",), "t2"], # sources fot t3: output only
414
+ [{"p1": NullDefault.NULL}, ("p1",), "t3"],
408
415
  ],
409
416
  local_inputs={0: ("p1",)},
410
417
  )
@@ -413,7 +420,7 @@ def test_task_get_available_task_input_sources_input_source_excluded_if_not_loca
413
420
  )
414
421
  available = t3.get_available_task_input_sources(
415
422
  element_set=t3.element_sets[0],
416
- source_tasks=[wk.tasks.t1.template, wk.tasks.t2.template],
423
+ source_tasks=[wk.tasks.t1, wk.tasks.t2],
417
424
  )
418
425
  available_exp = {
419
426
  "p1": [
@@ -492,6 +499,42 @@ def test_raise_on_multiple_input_values(param_p1):
492
499
  )
493
500
 
494
501
 
502
+ def test_raise_on_multiple_input_values_same_label(param_p1):
503
+ s1 = hf.TaskSchema(
504
+ objective="t1",
505
+ inputs=[hf.SchemaInput(parameter="p1", labels={"0": {}})],
506
+ )
507
+
508
+ with pytest.raises(TaskTemplateMultipleInputValues):
509
+ hf.Task(
510
+ schemas=s1,
511
+ inputs=[
512
+ hf.InputValue(param_p1, value=101, label=0),
513
+ hf.InputValue(param_p1, value=101, label=0),
514
+ ],
515
+ )
516
+
517
+
518
+ def test_multiple_input_values_different_labels(param_p1):
519
+ s1 = hf.TaskSchema(
520
+ objective="t1",
521
+ inputs=[
522
+ hf.SchemaInput(
523
+ parameter="p1",
524
+ labels={"0": {}, "1": {}},
525
+ multiple=True,
526
+ )
527
+ ],
528
+ )
529
+ hf.Task(
530
+ schemas=s1,
531
+ inputs=[
532
+ hf.InputValue(param_p1, value=101, label=0),
533
+ hf.InputValue(param_p1, value=101, label=1),
534
+ ],
535
+ )
536
+
537
+
495
538
  def test_expected_return_defined_and_undefined_input_types(param_p1, param_p2):
496
539
  s1 = make_schemas([[{"p1": None, "p2": None}, ()]])
497
540
 
@@ -1276,13 +1319,13 @@ def test_task_add_elements_simple_dependence_three_tasks(tmp_path, param_p1):
1276
1319
 
1277
1320
  def test_no_change_to_tasks_metadata_on_add_task_failure(tmp_path):
1278
1321
  wk = make_workflow(
1279
- schemas_spec=[[{"p1": None}, (), "t1"]],
1322
+ schemas_spec=[[{"p1": NullDefault.NULL}, (), "t1"]],
1280
1323
  local_inputs={0: ("p1",)},
1281
1324
  path=tmp_path,
1282
1325
  )
1283
1326
  tasks_meta = copy.deepcopy(wk._store.get_tasks())
1284
1327
 
1285
- s2 = make_schemas([[{"p1": None, "p3": None}, ()]])
1328
+ s2 = make_schemas([[{"p1": NullDefault.NULL, "p3": NullDefault.NULL}, ()]])
1286
1329
  t2 = hf.Task(schemas=s2)
1287
1330
  with pytest.raises(MissingInputs) as exc_info:
1288
1331
  wk.add_task(t2)
@@ -1292,12 +1335,14 @@ def test_no_change_to_tasks_metadata_on_add_task_failure(tmp_path):
1292
1335
 
1293
1336
  def test_no_change_to_parameter_data_on_add_task_failure(tmp_path, param_p2, param_p3):
1294
1337
  wk = make_workflow(
1295
- schemas_spec=[[{"p1": None}, (), "t1"]],
1338
+ schemas_spec=[[{"p1": NullDefault.NULL}, (), "t1"]],
1296
1339
  local_inputs={0: ("p1",)},
1297
1340
  path=tmp_path,
1298
1341
  )
1299
1342
  param_data = copy.deepcopy(wk.get_all_parameters())
1300
- s2 = make_schemas([[{"p1": None, "p2": None, "p3": None}, ()]])
1343
+ s2 = make_schemas(
1344
+ [[{"p1": NullDefault.NULL, "p2": NullDefault.NULL, "p3": NullDefault.NULL}, ()]]
1345
+ )
1301
1346
  t2 = hf.Task(schemas=s2, inputs=[hf.InputValue(param_p2, 201)])
1302
1347
  with pytest.raises(MissingInputs) as exc_info:
1303
1348
  wk.add_task(t2)
@@ -1307,13 +1352,13 @@ def test_no_change_to_parameter_data_on_add_task_failure(tmp_path, param_p2, par
1307
1352
 
1308
1353
  def test_expected_additional_parameter_data_on_add_task(tmp_path, param_p3):
1309
1354
  wk = make_workflow(
1310
- schemas_spec=[[{"p1": None}, (), "t1"]],
1355
+ schemas_spec=[[{"p1": NullDefault.NULL}, (), "t1"]],
1311
1356
  local_inputs={0: ("p1",)},
1312
1357
  path=tmp_path,
1313
1358
  )
1314
1359
  param_data = copy.deepcopy(wk.get_all_parameter_data())
1315
1360
 
1316
- s2 = make_schemas([[{"p1": None, "p3": None}, ()]])
1361
+ s2 = make_schemas([[{"p1": NullDefault.NULL, "p3": NullDefault.NULL}, ()]])
1317
1362
  t2 = hf.Task(schemas=s2, inputs=[hf.InputValue(param_p3, 301)])
1318
1363
  wk.add_task(t2)
1319
1364
 
@@ -0,0 +1,219 @@
1
+ import pytest
2
+
3
+ from hpcflow.app import app as hf
4
+ from hpcflow.sdk.core.test_utils import P1_parameter_cls as P1
5
+
6
+
7
+ @pytest.fixture
8
+ def null_config(tmp_path):
9
+ if not hf.is_config_loaded:
10
+ hf.load_config(config_dir=tmp_path)
11
+
12
+
13
+ def test_inputs_value_sequence_label_added_to_path():
14
+ seq = hf.ValueSequence(path="inputs.p1.a", values=[0, 1], nesting_order=0, label=0)
15
+ assert seq.path == "inputs.p1[0].a"
16
+
17
+
18
+ def test_inputs_value_sequence_no_label_added_to_path():
19
+ seq = hf.ValueSequence(path="inputs.p1.a", values=[0, 1], nesting_order=0, label="")
20
+ assert seq.path == "inputs.p1.a"
21
+
22
+
23
+ def test_inputs_value_sequence_label_attr_added():
24
+ seq = hf.ValueSequence(path="inputs.p1[1].a", values=[0, 1], nesting_order=0)
25
+ assert seq.label == "1"
26
+
27
+
28
+ def test_inputs_value_sequence_label_path_unmodified():
29
+ path = "inputs.p1[1].a"
30
+ seq = hf.ValueSequence(path=path, values=[0, 1], nesting_order=0)
31
+ assert seq.path == path
32
+
33
+
34
+ def test_raise_on_inputs_value_sequence_label_path_unequal():
35
+ with pytest.raises(ValueError):
36
+ hf.ValueSequence(path="inputs.p1[1].a", values=[0, 1], nesting_order=0, label="2")
37
+
38
+
39
+ def test_no_raise_on_inputs_value_sequence_label_path_equal():
40
+ hf.ValueSequence(path="inputs.p1[1].a", values=[0, 1], nesting_order=0, label="1")
41
+
42
+
43
+ def test_no_raise_on_inputs_value_sequence_label_path_cast_equal():
44
+ hf.ValueSequence(path="inputs.p1[1].a", values=[0, 1], nesting_order=0, label=1)
45
+
46
+
47
+ def test_raise_on_resources_value_sequence_with_path_label():
48
+ with pytest.raises(ValueError):
49
+ hf.ValueSequence(path="resources.main[1]", values=[0, 1], nesting_order=0)
50
+
51
+
52
+ def test_raise_on_resources_value_sequence_with_label_arg():
53
+ with pytest.raises(ValueError):
54
+ hf.ValueSequence(path="resources.main", values=[0, 1], nesting_order=0, label=1)
55
+
56
+
57
+ def test_inputs_value_sequence_simple_path_attributes():
58
+ path = "inputs.p1"
59
+ seq = hf.ValueSequence(path=path, values=[0, 1], nesting_order=0)
60
+ assert seq.path == path
61
+ assert seq.labelled_type == "p1"
62
+ assert seq.normalised_path == "inputs.p1"
63
+ assert seq.normalised_inputs_path == "p1"
64
+ assert seq.path_type == "inputs"
65
+ assert seq.input_type == "p1"
66
+ assert seq.input_path == ""
67
+ assert seq.resource_scope is None
68
+
69
+
70
+ def test_inputs_value_sequence_path_attributes():
71
+ path = "inputs.p1.a.b"
72
+ seq = hf.ValueSequence(path=path, values=[0, 1], nesting_order=0)
73
+ assert seq.path == path
74
+ assert seq.labelled_type == "p1"
75
+ assert seq.normalised_path == "inputs.p1.a.b"
76
+ assert seq.normalised_inputs_path == "p1.a.b"
77
+ assert seq.path_type == "inputs"
78
+ assert seq.input_type == "p1"
79
+ assert seq.input_path == "a.b"
80
+ assert seq.resource_scope is None
81
+
82
+
83
+ def test_inputs_value_sequence_with_path_label_path_attributes():
84
+ path = "inputs.p1[1].a.b"
85
+ seq = hf.ValueSequence(path=path, values=[0, 1], nesting_order=0)
86
+ assert seq.path == path
87
+ assert seq.labelled_type == "p1[1]"
88
+ assert seq.normalised_path == "inputs.p1[1].a.b"
89
+ assert seq.normalised_inputs_path == "p1[1].a.b"
90
+ assert seq.path_type == "inputs"
91
+ assert seq.input_type == "p1"
92
+ assert seq.input_path == "a.b"
93
+ assert seq.resource_scope is None
94
+
95
+
96
+ def test_inputs_value_sequence_with_arg_label_path_attributes():
97
+ path = "inputs.p1.a.b"
98
+ new_path = "inputs.p1[1].a.b"
99
+ seq = hf.ValueSequence(path=path, values=[0, 1], nesting_order=0, label=1)
100
+ assert seq.path == new_path
101
+ assert seq.labelled_type == "p1[1]"
102
+ assert seq.normalised_path == "inputs.p1[1].a.b"
103
+ assert seq.normalised_inputs_path == "p1[1].a.b"
104
+ assert seq.path_type == "inputs"
105
+ assert seq.input_type == "p1"
106
+ assert seq.input_path == "a.b"
107
+ assert seq.resource_scope is None
108
+
109
+
110
+ def test_resources_value_sequence_path_attributes():
111
+ path = "resources.main.num_cores"
112
+ seq = hf.ValueSequence(path=path, values=[0, 1], nesting_order=0)
113
+ assert seq.path == path
114
+ assert seq.labelled_type is None
115
+ assert seq.normalised_path == "resources.main.num_cores"
116
+ assert seq.normalised_inputs_path is None
117
+ assert seq.path_type == "resources"
118
+ assert seq.input_type is None
119
+ assert seq.input_path is None
120
+ assert seq.resource_scope == "main"
121
+
122
+
123
+ @pytest.mark.parametrize("store", ["json", "zarr"])
124
+ def test_value_sequence_object_values_during_workflow_init(null_config, tmp_path, store):
125
+ p1 = hf.Parameter("p1")
126
+ s1 = hf.TaskSchema(objective="t1", inputs=[hf.SchemaInput(parameter=p1)])
127
+ obj = P1(a=101)
128
+ seq = hf.ValueSequence(path="inputs.p1", values=[obj], nesting_order=0)
129
+ values_exp = [P1(a=101, d=None)]
130
+
131
+ t1 = hf.Task(
132
+ schemas=[s1],
133
+ sequences=[seq],
134
+ )
135
+ # before workflow initialisation:
136
+ assert seq.values == values_exp
137
+
138
+ wk = hf.Workflow.from_template_data(
139
+ tasks=[],
140
+ path=tmp_path,
141
+ template_name="temp",
142
+ store=store,
143
+ )
144
+
145
+ with wk.batch_update():
146
+ wk.add_task(t1)
147
+ # after workflow initialisation but before store commit:
148
+ assert wk.tasks[0].template.element_sets[0].sequences[0].values == values_exp
149
+
150
+ # after initialisation and store commit:
151
+ assert wk.tasks[0].template.element_sets[0].sequences[0].values == values_exp
152
+
153
+
154
+ @pytest.mark.parametrize("store", ["json", "zarr"])
155
+ def test_value_sequence_object_values_class_method_during_workflow_init(
156
+ null_config, tmp_path, store
157
+ ):
158
+ p1 = hf.Parameter("p1")
159
+ s1 = hf.TaskSchema(objective="t1", inputs=[hf.SchemaInput(parameter=p1)])
160
+ obj = P1.from_data(b=50, c=51)
161
+ seq = hf.ValueSequence(path="inputs.p1", values=[obj], nesting_order=0)
162
+ values_exp = [P1(a=101, d=None)]
163
+
164
+ t1 = hf.Task(
165
+ schemas=[s1],
166
+ sequences=[seq],
167
+ )
168
+ # before workflow initialisation:
169
+ assert seq.values == values_exp
170
+
171
+ wk = hf.Workflow.from_template_data(
172
+ tasks=[],
173
+ path=tmp_path,
174
+ template_name="temp",
175
+ store=store,
176
+ )
177
+
178
+ with wk.batch_update():
179
+ wk.add_task(t1)
180
+ # after workflow initialisation but before store commit:
181
+ assert wk.tasks[0].template.element_sets[0].sequences[0].values == values_exp
182
+
183
+ # after initialisation and store commit:
184
+ assert wk.tasks[0].template.element_sets[0].sequences[0].values == values_exp
185
+
186
+
187
+ @pytest.mark.parametrize("store", ["json", "zarr"])
188
+ def test_value_sequence_object_values_named_class_method_during_workflow_init(
189
+ null_config, tmp_path, store
190
+ ):
191
+ p1 = hf.Parameter("p1")
192
+ s1 = hf.TaskSchema(objective="t1", inputs=[hf.SchemaInput(parameter=p1)])
193
+ data = {"b": 50, "c": 51}
194
+ seq = hf.ValueSequence(
195
+ path="inputs.p1", values=[data], nesting_order=0, value_class_method="from_data"
196
+ )
197
+ values_exp = [data]
198
+
199
+ t1 = hf.Task(
200
+ schemas=[s1],
201
+ sequences=[seq],
202
+ )
203
+ # before workflow initialisation:
204
+ assert seq.values == values_exp
205
+
206
+ wk = hf.Workflow.from_template_data(
207
+ tasks=[],
208
+ path=tmp_path,
209
+ template_name="temp",
210
+ store=store,
211
+ )
212
+
213
+ with wk.batch_update():
214
+ wk.add_task(t1)
215
+ # after workflow initialisation but before store commit:
216
+ assert wk.tasks[0].template.element_sets[0].sequences[0].values == values_exp
217
+
218
+ # after initialisation and store commit:
219
+ assert wk.tasks[0].template.element_sets[0].sequences[0].values == values_exp