hpcflow-new2 0.2.0a175__py3-none-any.whl → 0.2.0a176__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hpcflow/_version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.0a175"
1
+ __version__ = "0.2.0a176"
hpcflow/sdk/core/loop.py CHANGED
@@ -438,7 +438,26 @@ class WorkflowLoop:
438
438
  cur_loop_idx = self.num_added_iterations[iters_key] - 1
439
439
  all_new_data_idx = {} # keys are (task.insert_ID and element.index)
440
440
 
441
+ # initialise a new `num_added_iterations` key on each child loop:
442
+ for child in child_loops:
443
+ iters_key_dct = {
444
+ **parent_loop_indices,
445
+ self.name: cur_loop_idx + 1,
446
+ }
447
+ added_iters_key_chd = tuple([iters_key_dct.get(j, 0) for j in child.parents])
448
+ child._initialise_pending_added_iters(added_iters_key_chd)
449
+
441
450
  for task in self.task_objects:
451
+
452
+ new_loop_idx = {
453
+ **parent_loop_indices,
454
+ self.name: cur_loop_idx + 1,
455
+ **{
456
+ child.name: 0
457
+ for child in child_loops
458
+ if task.insert_ID in child.task_insert_IDs
459
+ },
460
+ }
442
461
  for elem_idx in range(task.num_elements):
443
462
  # element needs to take into account changes made in this code
444
463
  element = task.elements[elem_idx]
@@ -587,10 +606,23 @@ class WorkflowLoop:
587
606
 
588
607
  elif orig_inp_src.source_type is InputSourceType.TASK:
589
608
  if orig_inp_src.task_ref not in self.task_insert_IDs:
590
- # source task not part of the loop; copy existing data idx:
591
- inp_dat_idx = element.iterations[0].get_data_idx()[
592
- inp_key
593
- ]
609
+ # TODO: what about groups?
610
+ # source the data_idx from the iteration with same parent
611
+ # loop indices as the new iteration to add:
612
+ src_iters = []
613
+ for iter_i in element.iterations:
614
+ skip_iter = False
615
+ for p_k, p_v in parent_loop_indices.items():
616
+ if iter_i.loop_idx.get(p_k) != p_v:
617
+ skip_iter = True
618
+ break
619
+ if not skip_iter:
620
+ src_iters.append(iter_i)
621
+
622
+ # could be multiple, but they should all have the same
623
+ # data index for this parameter:
624
+ src_iter = src_iters[0]
625
+ inp_dat_idx = src_iter.get_data_idx()[inp_key]
594
626
  else:
595
627
  is_group = False
596
628
  if (
@@ -673,20 +705,6 @@ class WorkflowLoop:
673
705
  )
674
706
  all_new_data_idx[(task.insert_ID, element.index)] = new_data_idx
675
707
 
676
- new_loop_idx = {
677
- **parent_loop_indices,
678
- self.name: cur_loop_idx + 1,
679
- **{
680
- child.name: 0
681
- for child in child_loops
682
- if task.insert_ID in child.task_insert_IDs
683
- },
684
- }
685
- # increment num_added_iterations on child loop for this parent loop index:
686
- for i in child_loops:
687
- added_iters_key_chd = tuple([new_loop_idx[j] for j in i.parents])
688
- i._initialise_pending_added_iters(added_iters_key_chd)
689
-
690
708
  iter_ID_i = self.workflow._store.add_element_iteration(
691
709
  element_ID=element.id_,
692
710
  data_idx=new_data_idx,
@@ -1159,3 +1159,130 @@ def test_multi_nested_loops(null_config, tmp_path):
1159
1159
  (0, {"inner": 0, "middle_1": 2, "middle_2": 1, "outer": 1}, (22,)),
1160
1160
  (0, {"inner": 1, "middle_1": 2, "middle_2": 1, "outer": 1}, (23,)),
1161
1161
  ]
1162
+
1163
+
1164
+ def test_nested_loop_input_from_parent_loop_task(null_config, tmp_path):
1165
+ """Test that an input in a nested-loop task is correctly sourced from latest
1166
+ iteration of the parent loop."""
1167
+ wk = make_workflow(
1168
+ schemas_spec=[
1169
+ [{"p1": None}, ("p2", "p3")],
1170
+ [{"p2": None}, ("p4",)],
1171
+ [{"p4": None, "p3": None}, ("p2", "p1")], # testing p3 source
1172
+ ],
1173
+ path=tmp_path,
1174
+ local_inputs={0: {"p1": 101}},
1175
+ loops=[
1176
+ hf.Loop(name="inner", tasks=[1, 2], num_iterations=3),
1177
+ hf.Loop(name="outer", tasks=[0, 1, 2], num_iterations=2),
1178
+ ],
1179
+ )
1180
+ pathway = wk.get_iteration_task_pathway(ret_data_idx=True)
1181
+ assert len(pathway) == 14
1182
+ p3_out_idx = [i[2][0]["outputs.p3"] for i in pathway if i[0] == 0]
1183
+ p3_inp_idx = [i[2][0]["inputs.p3"] for i in pathway if i[0] == 2]
1184
+ assert len(p3_out_idx) == 2 # 2 outer iterations
1185
+ assert len(p3_inp_idx) == 6 # 2 * 3 iterations
1186
+ assert p3_inp_idx == [p3_out_idx[0]] * 3 + [p3_out_idx[1]] * 3
1187
+
1188
+
1189
+ def test_doubly_nested_loop_input_from_parent_loop_task(null_config, tmp_path):
1190
+ """Test that an input in a doubly-nested-loop task is correctly sourced from latest
1191
+ iteration of the parent loop."""
1192
+ # test source of p6 in final task:
1193
+ wk = make_workflow(
1194
+ schemas_spec=[
1195
+ [{"p5": None}, ("p6", "p1")],
1196
+ [{"p1": None}, ("p2", "p3")],
1197
+ [{"p2": None}, ("p4",)],
1198
+ [{"p4": None, "p3": None, "p6": None}, ("p2", "p1", "p5")],
1199
+ ],
1200
+ path=tmp_path,
1201
+ local_inputs={0: {"p5": 101}},
1202
+ loops=[
1203
+ hf.Loop(name="inner", tasks=[2, 3], num_iterations=3),
1204
+ hf.Loop(name="middle", tasks=[1, 2, 3], num_iterations=3),
1205
+ hf.Loop(name="outer", tasks=[0, 1, 2, 3], num_iterations=3),
1206
+ ],
1207
+ overwrite=True,
1208
+ )
1209
+ pathway = wk.get_iteration_task_pathway(ret_data_idx=True)
1210
+ assert len(pathway) == 66
1211
+
1212
+ p6_out_idx = [i[2][0]["outputs.p6"] for i in pathway if i[0] == 0]
1213
+ p6_inp_idx = [i[2][0]["inputs.p6"] for i in pathway if i[0] == 3]
1214
+ assert len(p6_out_idx) == 3 # 2 outer iterations
1215
+ assert len(p6_inp_idx) == 27 # 3 * 3 * 3 iterations
1216
+ assert p6_inp_idx == [p6_out_idx[0]] * 9 + [p6_out_idx[1]] * 9 + [p6_out_idx[2]] * 9
1217
+
1218
+
1219
+ def test_loop_non_input_task_input_from_element_group(null_config, tmp_path):
1220
+ """Test correct sourcing of an element group input within a loop, for a task that is
1221
+ not that loop's "input task" with respect to that parameter."""
1222
+ s1 = hf.TaskSchema(
1223
+ objective="t1",
1224
+ inputs=[hf.SchemaInput("p1")],
1225
+ outputs=[hf.SchemaOutput("p2"), hf.SchemaOutput("p3")],
1226
+ actions=[
1227
+ hf.Action(
1228
+ commands=[
1229
+ hf.Command(
1230
+ command="echo $((<<parameter:p1>> + 1))",
1231
+ stdout="<<parameter:p2>>",
1232
+ stderr="<<parameter:p3>>",
1233
+ )
1234
+ ]
1235
+ )
1236
+ ],
1237
+ )
1238
+ s2 = hf.TaskSchema(
1239
+ objective="t2",
1240
+ inputs=[hf.SchemaInput("p2", group="my_group")],
1241
+ outputs=[hf.SchemaOutput("p4")],
1242
+ actions=[
1243
+ hf.Action(
1244
+ commands=[
1245
+ hf.Command(
1246
+ command="echo $((<<sum(parameter:p2)>> + 1))",
1247
+ stdout="<<parameter:p4>>",
1248
+ )
1249
+ ]
1250
+ )
1251
+ ],
1252
+ )
1253
+ s3 = hf.TaskSchema(
1254
+ objective="t3",
1255
+ inputs=[hf.SchemaInput("p3", group="my_group"), hf.SchemaInput("p4")],
1256
+ outputs=[hf.SchemaOutput("p2")],
1257
+ actions=[
1258
+ hf.Action(
1259
+ commands=[
1260
+ hf.Command(
1261
+ command="echo $((<<sum(parameter:p3)>> + <<parameter:p4>>))",
1262
+ stdout="<<parameter:p2>>",
1263
+ )
1264
+ ]
1265
+ )
1266
+ ],
1267
+ )
1268
+ wk = hf.Workflow.from_template_data(
1269
+ template_name="test_loop",
1270
+ path=tmp_path,
1271
+ tasks=[
1272
+ hf.Task(
1273
+ schema=s1,
1274
+ sequences=[hf.ValueSequence("inputs.p1", values=[1, 2, 3])],
1275
+ groups=[hf.ElementGroup("my_group")],
1276
+ ),
1277
+ hf.Task(schema=s2),
1278
+ hf.Task(schema=s3), # test source of p3 (should be group from t1)
1279
+ ],
1280
+ loops=[hf.Loop(name="inner", tasks=[1, 2], num_iterations=2)],
1281
+ )
1282
+ pathway = wk.get_iteration_task_pathway(ret_data_idx=True)
1283
+ assert len(pathway) == 5
1284
+
1285
+ expected = [i["outputs.p3"] for i in pathway[0][2]]
1286
+ for i in pathway:
1287
+ if i[0] == 2: # task 3
1288
+ assert i[2][0]["inputs.p3"] == expected
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hpcflow-new2
3
- Version: 0.2.0a175
3
+ Version: 0.2.0a176
4
4
  Summary: Computational workflow management
5
5
  License: MIT
6
6
  Author: aplowman
@@ -1,7 +1,7 @@
1
1
  hpcflow/__init__.py,sha256=WIETuRHeOp2SqUqHUzpjQ-lk9acbYv-6aWOhZPRdlhs,64
2
2
  hpcflow/__pyinstaller/__init__.py,sha256=YOzBlPSck6slucv6lJM9K80JtsJWxXRL00cv6tRj3oc,98
3
3
  hpcflow/__pyinstaller/hook-hpcflow.py,sha256=SeMopsPkhCyd9gqIrzwFNRj3ZlkUlUYl-74QYz61mo4,1089
4
- hpcflow/_version.py,sha256=qS9M42Fmpevc84bybtkqyQeAZhWacKKmZ1vAOYpl2WE,26
4
+ hpcflow/_version.py,sha256=SYxreQlS-GrGdoMoqVR3gpikj6-V4k7yVrXGBKl-Xxg,26
5
5
  hpcflow/app.py,sha256=d-kgfnZNlqlCi2H8bK26714brD_u3ibN3FaEZgjF9aA,1332
6
6
  hpcflow/cli.py,sha256=G2J3D9v6MnMWOWMMWK6UEKLn_6wnV9lT_qygEBBxg-I,66
7
7
  hpcflow/data/demo_data_manifest/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -52,7 +52,7 @@ hpcflow/sdk/core/element.py,sha256=hTAR2kxfGSRf4vFgWwrnyuP5z5RnKYOd2X6c6Xd70zo,4
52
52
  hpcflow/sdk/core/environment.py,sha256=DGUz1NvliKh6opP0IueGHD69rn_8wFLhDsq6kAmEgM4,4849
53
53
  hpcflow/sdk/core/errors.py,sha256=ku4wwsrmxBpJBFflUeZD6vrmAqgC7H02VdlRG4aAGqQ,9292
54
54
  hpcflow/sdk/core/json_like.py,sha256=LRZsUd1tn8zXC8fESeiXs7Eko-VdnB8zcXiqixKVcZM,18874
55
- hpcflow/sdk/core/loop.py,sha256=7WHif9U3KenNqICN5Ibq76WCwEn9xzBo0jnROwv4z-4,30051
55
+ hpcflow/sdk/core/loop.py,sha256=SYlkmjvxifBfykhjgXCbxnCvn37BfXGMRIbyOholrEE,30936
56
56
  hpcflow/sdk/core/object_list.py,sha256=HASx7AMniX82bTlROIgIvrjE_DupmwDgxfkfROmI3GA,20168
57
57
  hpcflow/sdk/core/parallel.py,sha256=LI-g-qOuOR1oaEUWVT0qW0hmiP9hsJyUP8_IfSTKYYo,95
58
58
  hpcflow/sdk/core/parameters.py,sha256=0h1M-fXqOVgruyM0Au7Fo38cUbHgDNEPd1Alb1FULxE,65588
@@ -129,7 +129,7 @@ hpcflow/tests/unit/test_group.py,sha256=J7Gx6BdzD2uoRUnBow7L_OfdfLUZi7sv5AZd1yss
129
129
  hpcflow/tests/unit/test_input_source.py,sha256=pYm1V4kBsBIFstKDcrbwUiDLb82rr7ITcUEFJES2dI0,39194
130
130
  hpcflow/tests/unit/test_input_value.py,sha256=H6GX1ee7fuq5f-OsfkfiSW8eye_pWwVLUCYUSnj1-Gg,5731
131
131
  hpcflow/tests/unit/test_json_like.py,sha256=aGCiGfT-tNiFu3yzW6d_T-oDc5QLwSUgq3pN3jFhyF0,29939
132
- hpcflow/tests/unit/test_loop.py,sha256=6SPRzqxk0X3aWPiUJJTu6lKODsPGNUOdDgzdN1dFs0k,40438
132
+ hpcflow/tests/unit/test_loop.py,sha256=GNKMGAXP0AaB7fl1TShluyh-PoG7O96X7SGUg6vnK6Y,45149
133
133
  hpcflow/tests/unit/test_object_list.py,sha256=nDpbRpCu4XqoYxKMr1_QmDS1s2_6nQOpIEBRHSAXoVg,3049
134
134
  hpcflow/tests/unit/test_parameter.py,sha256=39CVido_NJGX-Xj9NDSlazpGzWqMG4zp0GmIKwzO7lI,6659
135
135
  hpcflow/tests/unit/test_persistence.py,sha256=DPXFiuY2v8vj0zZ7299nf-KtgYT8LhHI52fq7UPoa6Y,8128
@@ -149,7 +149,7 @@ hpcflow/tests/unit/test_workflow_template.py,sha256=fF7LNveMwCledgncNCRfD9Nd9dL9
149
149
  hpcflow/tests/workflows/test_jobscript.py,sha256=9sp1o0g72JZbv2QlOl5v7wCZEFjotxiIKGNUxVaFgaA,724
150
150
  hpcflow/tests/workflows/test_workflows.py,sha256=xai6FRtGqG4lStJk6KmsqPUSuvqs9FrsBOxMVALshIs,13400
151
151
  hpcflow/viz_demo.ipynb,sha256=1QdnVsk72vihv2L6hOGyk318uEa22ZSgGxQCa7hW2oo,6238
152
- hpcflow_new2-0.2.0a175.dist-info/METADATA,sha256=43yDTsgpidfr8FPVgUgIDwGX4GJz9qZ9Owf5POXxBQ0,2466
153
- hpcflow_new2-0.2.0a175.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
154
- hpcflow_new2-0.2.0a175.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
155
- hpcflow_new2-0.2.0a175.dist-info/RECORD,,
152
+ hpcflow_new2-0.2.0a176.dist-info/METADATA,sha256=Xg97HSWIATWbQGGHQ-ORIDzX1dRhvQ2O0mmtxDluQto,2466
153
+ hpcflow_new2-0.2.0a176.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
154
+ hpcflow_new2-0.2.0a176.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
155
+ hpcflow_new2-0.2.0a176.dist-info/RECORD,,