hpcflow-new2 0.2.0a50__py3-none-any.whl → 0.2.0a52__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/_version.py +1 -1
- hpcflow/sdk/__init__.py +1 -1
- hpcflow/sdk/api.py +1 -1
- hpcflow/sdk/app.py +20 -11
- hpcflow/sdk/cli.py +34 -59
- hpcflow/sdk/core/__init__.py +13 -1
- hpcflow/sdk/core/actions.py +235 -126
- hpcflow/sdk/core/command_files.py +32 -24
- hpcflow/sdk/core/element.py +110 -114
- hpcflow/sdk/core/errors.py +57 -0
- hpcflow/sdk/core/loop.py +18 -34
- hpcflow/sdk/core/parameters.py +5 -3
- hpcflow/sdk/core/task.py +135 -131
- hpcflow/sdk/core/task_schema.py +11 -4
- hpcflow/sdk/core/utils.py +110 -2
- hpcflow/sdk/core/workflow.py +964 -676
- hpcflow/sdk/data/template_components/environments.yaml +0 -44
- hpcflow/sdk/data/template_components/task_schemas.yaml +52 -10
- hpcflow/sdk/persistence/__init__.py +21 -33
- hpcflow/sdk/persistence/base.py +1340 -458
- hpcflow/sdk/persistence/json.py +424 -546
- hpcflow/sdk/persistence/pending.py +563 -0
- hpcflow/sdk/persistence/store_resource.py +131 -0
- hpcflow/sdk/persistence/utils.py +57 -0
- hpcflow/sdk/persistence/zarr.py +852 -841
- hpcflow/sdk/submission/jobscript.py +133 -112
- hpcflow/sdk/submission/shells/bash.py +62 -16
- hpcflow/sdk/submission/shells/powershell.py +87 -16
- hpcflow/sdk/submission/submission.py +59 -35
- hpcflow/tests/unit/test_element.py +4 -9
- hpcflow/tests/unit/test_persistence.py +218 -0
- hpcflow/tests/unit/test_task.py +11 -12
- hpcflow/tests/unit/test_utils.py +82 -0
- hpcflow/tests/unit/test_workflow.py +3 -1
- {hpcflow_new2-0.2.0a50.dist-info → hpcflow_new2-0.2.0a52.dist-info}/METADATA +3 -1
- {hpcflow_new2-0.2.0a50.dist-info → hpcflow_new2-0.2.0a52.dist-info}/RECORD +38 -34
- {hpcflow_new2-0.2.0a50.dist-info → hpcflow_new2-0.2.0a52.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a50.dist-info → hpcflow_new2-0.2.0a52.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/loop.py
CHANGED
@@ -330,14 +330,12 @@ class WorkflowLoop:
|
|
330
330
|
f"`parent_loop_indices`."
|
331
331
|
)
|
332
332
|
all_new_data_idx = {} # keys are (task.insert_ID and element.index)
|
333
|
-
added_iters = 0
|
334
333
|
|
335
334
|
for task in self.task_objects:
|
336
|
-
|
337
|
-
|
335
|
+
for elem_idx in range(task.num_elements):
|
336
|
+
# element needs to take into account changes made in this code
|
337
|
+
element = task.elements[elem_idx]
|
338
338
|
|
339
|
-
for element in task.elements:
|
340
|
-
elem_iters_idx[element.index] = []
|
341
339
|
new_data_idx = {}
|
342
340
|
|
343
341
|
# copy resources from zeroth iteration:
|
@@ -406,6 +404,7 @@ class WorkflowLoop:
|
|
406
404
|
|
407
405
|
else:
|
408
406
|
inp_key = f"inputs.{inp.typ}"
|
407
|
+
|
409
408
|
orig_inp_src = element.input_sources[inp_key]
|
410
409
|
inp_dat_idx = None
|
411
410
|
|
@@ -429,8 +428,7 @@ class WorkflowLoop:
|
|
429
428
|
)
|
430
429
|
if (
|
431
430
|
len(src_elems_i) == 1
|
432
|
-
and src_elems_i[0].
|
433
|
-
== element.element_ID
|
431
|
+
and src_elems_i[0].id_ == element.id_
|
434
432
|
):
|
435
433
|
inp_dat_idx = prev_dat_idx[
|
436
434
|
f"{orig_inp_src.task_source_type.name.lower()}s.{inp.typ}"
|
@@ -448,41 +446,27 @@ class WorkflowLoop:
|
|
448
446
|
path_i = f"outputs.{out.typ}"
|
449
447
|
p_src = {
|
450
448
|
"type": "EAR_output",
|
451
|
-
"task_insert_ID": task.insert_ID,
|
452
|
-
"element_idx": element.index,
|
453
|
-
"run_idx": 0,
|
449
|
+
# "task_insert_ID": task.insert_ID,
|
450
|
+
# "element_idx": element.index,
|
451
|
+
# "run_idx": 0,
|
454
452
|
}
|
455
453
|
new_data_idx[path_i] = self.workflow._add_unset_parameter_data(p_src)
|
456
454
|
|
457
455
|
schema_params = set(
|
458
456
|
i for i in new_data_idx.keys() if len(i.split(".")) == 2
|
459
457
|
)
|
460
|
-
new_iter = {
|
461
|
-
"global_idx": self.workflow.num_element_iterations + added_iters,
|
462
|
-
"data_idx": new_data_idx,
|
463
|
-
"EARs_initialised": False,
|
464
|
-
"actions": {},
|
465
|
-
"schema_parameters": list(schema_params),
|
466
|
-
"loop_idx": {**parent_loop_indices, self.name: cur_loop_idx + 1},
|
467
|
-
}
|
468
458
|
all_new_data_idx[(task.insert_ID, element.index)] = new_data_idx
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
475
|
-
self.workflow._store.add_element_iterations(
|
476
|
-
task_idx=task.index,
|
477
|
-
task_insert_ID=task.insert_ID,
|
478
|
-
element_iterations=new_iters,
|
479
|
-
element_iters_idx=elem_iters_idx,
|
480
|
-
)
|
459
|
+
iter_ID_i = self.workflow._store.add_element_iteration(
|
460
|
+
element_ID=element.id_,
|
461
|
+
data_idx=new_data_idx,
|
462
|
+
schema_parameters=list(schema_params),
|
463
|
+
loop_idx={**parent_loop_indices, self.name: cur_loop_idx + 1},
|
464
|
+
)
|
481
465
|
|
482
|
-
|
466
|
+
task.initialise_EARs()
|
483
467
|
|
484
468
|
self._pending_num_added_iterations += 1
|
485
|
-
self.workflow._store.
|
486
|
-
|
487
|
-
|
469
|
+
self.workflow._store.update_loop_num_iters(
|
470
|
+
index=self.index,
|
471
|
+
num_iters=self.num_added_iterations,
|
488
472
|
)
|
hpcflow/sdk/core/parameters.py
CHANGED
@@ -101,7 +101,9 @@ class Parameter(JSONLike):
|
|
101
101
|
|
102
102
|
def __deepcopy__(self, memo):
|
103
103
|
kwargs = self.to_dict()
|
104
|
+
_validation = kwargs.pop("_validation")
|
104
105
|
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
106
|
+
obj._validation = _validation
|
105
107
|
return obj
|
106
108
|
|
107
109
|
def to_dict(self):
|
@@ -471,7 +473,7 @@ class ValueSequence(JSONLike):
|
|
471
473
|
if self._values_group_idx is not None:
|
472
474
|
vals = []
|
473
475
|
for pg_idx_i in self._values_group_idx:
|
474
|
-
|
476
|
+
val = self.workflow.get_parameter_data(pg_idx_i)
|
475
477
|
if self.parameter._value_class:
|
476
478
|
val = self.parameter._value_class(**val)
|
477
479
|
vals.append(val)
|
@@ -572,7 +574,7 @@ class AbstractInputValue(JSONLike):
|
|
572
574
|
@property
|
573
575
|
def value(self):
|
574
576
|
if self._value_group_idx is not None:
|
575
|
-
|
577
|
+
val = self.workflow.get_parameter_data(self._value_group_idx)
|
576
578
|
if self.parameter._value_class:
|
577
579
|
val = self.parameter._value_class(**val)
|
578
580
|
else:
|
@@ -895,7 +897,7 @@ class ResourceSpec(JSONLike):
|
|
895
897
|
|
896
898
|
def _get_value(self, value_name=None):
|
897
899
|
if self._value_group_idx is not None:
|
898
|
-
|
900
|
+
val = self.workflow.get_parameter_data(self._value_group_idx)
|
899
901
|
else:
|
900
902
|
val = self._get_members()
|
901
903
|
if value_name:
|
hpcflow/sdk/core/task.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
import copy
|
3
3
|
from dataclasses import dataclass
|
4
|
+
from pathlib import Path
|
4
5
|
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
5
6
|
|
6
7
|
from valida.rules import Rule
|
@@ -382,8 +383,8 @@ class ElementSet(JSONLike):
|
|
382
383
|
return [j for i in self.elements for j in i.iterations]
|
383
384
|
|
384
385
|
@property
|
385
|
-
def
|
386
|
-
return [i.
|
386
|
+
def elem_iter_IDs(self):
|
387
|
+
return [i.id_ for i in self.element_iterations]
|
387
388
|
|
388
389
|
def get_task_dependencies(self, as_objects=False):
|
389
390
|
"""Get upstream tasks that this element set depends on."""
|
@@ -502,6 +503,9 @@ class Task(JSONLike):
|
|
502
503
|
sourceable_elem_iters=sourceable_elem_iters,
|
503
504
|
)
|
504
505
|
|
506
|
+
# appended to when new element sets are added and reset on dump to disk:
|
507
|
+
self._pending_element_sets = []
|
508
|
+
|
505
509
|
self._validate()
|
506
510
|
self._name = self._get_name()
|
507
511
|
|
@@ -511,6 +515,13 @@ class Task(JSONLike):
|
|
511
515
|
|
512
516
|
self._set_parent_refs()
|
513
517
|
|
518
|
+
def _reset_pending_element_sets(self):
|
519
|
+
self._pending_element_sets = []
|
520
|
+
|
521
|
+
def _accept_pending_element_sets(self):
|
522
|
+
self._element_sets += self._pending_element_sets
|
523
|
+
self._reset_pending_element_sets()
|
524
|
+
|
514
525
|
def __eq__(self, other):
|
515
526
|
if not isinstance(other, self.__class__):
|
516
527
|
return False
|
@@ -520,9 +531,9 @@ class Task(JSONLike):
|
|
520
531
|
|
521
532
|
def _add_element_set(self, element_set: app.ElementSet):
|
522
533
|
"""Invoked by WorkflowTask._add_element_set."""
|
523
|
-
self.
|
534
|
+
self._pending_element_sets.append(element_set)
|
524
535
|
self.workflow_template.workflow._store.add_element_set(
|
525
|
-
self.
|
536
|
+
self.insert_ID, element_set.to_json_like()[0]
|
526
537
|
)
|
527
538
|
|
528
539
|
@classmethod
|
@@ -542,11 +553,13 @@ class Task(JSONLike):
|
|
542
553
|
kwargs = self.to_dict()
|
543
554
|
_insert_ID = kwargs.pop("insert_ID")
|
544
555
|
_dir_name = kwargs.pop("dir_name")
|
556
|
+
# _pending_element_sets = kwargs.pop("pending_element_sets")
|
545
557
|
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
546
558
|
obj._insert_ID = _insert_ID
|
547
559
|
obj._dir_name = _dir_name
|
548
560
|
obj._name = self._name
|
549
561
|
obj.workflow_template = self.workflow_template
|
562
|
+
obj._pending_element_sets = self._pending_element_sets
|
550
563
|
return obj
|
551
564
|
|
552
565
|
def to_persistent(self, workflow, insert_ID):
|
@@ -563,7 +576,11 @@ class Task(JSONLike):
|
|
563
576
|
|
564
577
|
def to_dict(self):
|
565
578
|
out = super().to_dict()
|
566
|
-
return {
|
579
|
+
return {
|
580
|
+
k.lstrip("_"): v
|
581
|
+
for k, v in out.items()
|
582
|
+
if k not in ("_name", "_pending_element_sets")
|
583
|
+
}
|
567
584
|
|
568
585
|
def set_sequence_parameters(self, element_set):
|
569
586
|
# set ValueSequence Parameter objects:
|
@@ -641,9 +658,9 @@ class Task(JSONLike):
|
|
641
658
|
# `initialise_EARs`:
|
642
659
|
param_src = {
|
643
660
|
"type": "EAR_output",
|
644
|
-
"task_insert_ID": self.insert_ID,
|
645
|
-
"element_idx": idx,
|
646
|
-
"run_idx": 0,
|
661
|
+
# "task_insert_ID": self.insert_ID,
|
662
|
+
# "element_idx": idx,
|
663
|
+
# "run_idx": 0,
|
647
664
|
}
|
648
665
|
data_ref = workflow._add_unset_parameter_data(param_src)
|
649
666
|
output_data_indices[path].append(data_ref)
|
@@ -732,7 +749,7 @@ class Task(JSONLike):
|
|
732
749
|
src_elem_iters = []
|
733
750
|
for es_idx_i in es_idx:
|
734
751
|
es_i = src_task_i.element_sets[es_idx_i]
|
735
|
-
src_elem_iters += es_i.
|
752
|
+
src_elem_iters += es_i.elem_iter_IDs
|
736
753
|
|
737
754
|
if element_set.sourceable_elem_iters is not None:
|
738
755
|
# can only use a subset of element iterations (this is the
|
@@ -765,11 +782,11 @@ class Task(JSONLike):
|
|
765
782
|
|
766
783
|
@property
|
767
784
|
def element_sets(self):
|
768
|
-
return self._element_sets
|
785
|
+
return self._element_sets + self._pending_element_sets
|
769
786
|
|
770
787
|
@property
|
771
788
|
def num_element_sets(self):
|
772
|
-
return len(self.
|
789
|
+
return len(self.element_sets)
|
773
790
|
|
774
791
|
@property
|
775
792
|
def insert_ID(self):
|
@@ -980,38 +997,27 @@ class WorkflowTask:
|
|
980
997
|
workflow: app.Workflow,
|
981
998
|
template: app.Task,
|
982
999
|
index: int,
|
983
|
-
|
984
|
-
num_element_iterations: int,
|
985
|
-
num_EARs: int,
|
1000
|
+
element_IDs: List[int],
|
986
1001
|
):
|
987
1002
|
self._workflow = workflow
|
988
1003
|
self._template = template
|
989
1004
|
self._index = index
|
990
|
-
self.
|
991
|
-
self._num_element_iterations = num_element_iterations
|
992
|
-
self._num_EARs = num_EARs
|
1005
|
+
self._element_IDs = element_IDs
|
993
1006
|
|
994
|
-
#
|
995
|
-
|
996
|
-
self._pending_num_elements = 0
|
997
|
-
self._pending_num_element_iterations = 0
|
998
|
-
self._pending_num_EARs = 0
|
1007
|
+
# appended to when new elements are added and reset on dump to disk:
|
1008
|
+
self._pending_element_IDs = []
|
999
1009
|
|
1000
1010
|
self._elements = None # assigned on `elements` first access
|
1001
1011
|
|
1002
1012
|
def __repr__(self) -> str:
|
1003
1013
|
return f"{self.__class__.__name__}(name={self.unique_name!r})"
|
1004
1014
|
|
1005
|
-
def
|
1006
|
-
self.
|
1007
|
-
self._pending_num_element_iterations = 0
|
1008
|
-
self._pending_num_EARs = 0
|
1015
|
+
def _reset_pending_element_IDs(self):
|
1016
|
+
self._pending_element_IDs = []
|
1009
1017
|
|
1010
|
-
def
|
1011
|
-
self.
|
1012
|
-
self.
|
1013
|
-
self._num_EARs = self.num_EARs
|
1014
|
-
self._reset_pending_elements()
|
1018
|
+
def _accept_pending_element_IDs(self):
|
1019
|
+
self._element_IDs += self._pending_element_IDs
|
1020
|
+
self._reset_pending_element_IDs()
|
1015
1021
|
|
1016
1022
|
@classmethod
|
1017
1023
|
def new_empty_task(cls, workflow: app.Workflow, template: app.Task, index: int):
|
@@ -1019,9 +1025,7 @@ class WorkflowTask:
|
|
1019
1025
|
workflow=workflow,
|
1020
1026
|
template=template,
|
1021
1027
|
index=index,
|
1022
|
-
|
1023
|
-
num_element_iterations=0,
|
1024
|
-
num_EARs=0,
|
1028
|
+
element_IDs=[],
|
1025
1029
|
)
|
1026
1030
|
return obj
|
1027
1031
|
|
@@ -1038,16 +1042,12 @@ class WorkflowTask:
|
|
1038
1042
|
return self._index
|
1039
1043
|
|
1040
1044
|
@property
|
1041
|
-
def
|
1042
|
-
return self.
|
1043
|
-
|
1044
|
-
@property
|
1045
|
-
def num_element_iterations(self):
|
1046
|
-
return self._num_element_iterations + self._pending_num_element_iterations
|
1045
|
+
def element_IDs(self):
|
1046
|
+
return self._element_IDs + self._pending_element_IDs
|
1047
1047
|
|
1048
1048
|
@property
|
1049
|
-
def
|
1050
|
-
return self.
|
1049
|
+
def num_elements(self):
|
1050
|
+
return len(self.element_IDs)
|
1051
1051
|
|
1052
1052
|
@property
|
1053
1053
|
def num_actions(self):
|
@@ -1154,9 +1154,7 @@ class WorkflowTask:
|
|
1154
1154
|
if inp_src.element_iters:
|
1155
1155
|
# only include "sourceable" element iterations:
|
1156
1156
|
src_elem_iters = [
|
1157
|
-
i
|
1158
|
-
for i in src_elem_iters
|
1159
|
-
if i.global_idx in inp_src.element_iters
|
1157
|
+
i for i in src_elem_iters if i.id_ in inp_src.element_iters
|
1160
1158
|
]
|
1161
1159
|
|
1162
1160
|
if not src_elem_iters:
|
@@ -1427,7 +1425,7 @@ class WorkflowTask:
|
|
1427
1425
|
if not iter_i.EARs_initialised:
|
1428
1426
|
try:
|
1429
1427
|
self._initialise_element_iter_EARs(iter_i)
|
1430
|
-
initialised.append(iter_i.
|
1428
|
+
initialised.append(iter_i.id_)
|
1431
1429
|
except UnsetParameterDataError:
|
1432
1430
|
# raised by `test_action_rule`; cannot yet initialise EARs
|
1433
1431
|
pass
|
@@ -1440,26 +1438,21 @@ class WorkflowTask:
|
|
1440
1438
|
all_data_idx = {}
|
1441
1439
|
action_runs = {}
|
1442
1440
|
|
1441
|
+
# keys are parameter indices, values are EAR_IDs to update those sources to
|
1443
1442
|
param_src_updates = {}
|
1443
|
+
|
1444
1444
|
count = 0
|
1445
1445
|
for act_idx, action in self.template.all_schema_actions():
|
1446
1446
|
if all(self.test_action_rule(i, schema_data_idx) for i in action.rules):
|
1447
|
-
|
1448
|
-
# note: indices below `EAR_idx` are redundant and can be derived from
|
1449
|
-
# `EAR_idx`:
|
1447
|
+
EAR_ID = self.workflow.num_EARs + count
|
1450
1448
|
param_source = {
|
1451
1449
|
"type": "EAR_output",
|
1452
|
-
"
|
1453
|
-
"EAR_idx": EAR_idx,
|
1454
|
-
"element_idx": element_iter.element.index,
|
1455
|
-
"iteration_idx": element_iter.index,
|
1456
|
-
"action_idx": act_idx,
|
1457
|
-
"run_idx": 0,
|
1450
|
+
"EAR_ID": EAR_ID,
|
1458
1451
|
}
|
1459
1452
|
psrc_update = (
|
1460
1453
|
action.generate_data_index( # adds an item to `all_data_idx`
|
1461
1454
|
act_idx=act_idx,
|
1462
|
-
|
1455
|
+
EAR_ID=EAR_ID,
|
1463
1456
|
schema_data_idx=schema_data_idx,
|
1464
1457
|
all_data_idx=all_data_idx,
|
1465
1458
|
workflow=self.workflow,
|
@@ -1469,39 +1462,27 @@ class WorkflowTask:
|
|
1469
1462
|
# with EARs initialised, we can update the pre-allocated schema-level
|
1470
1463
|
# parameters with the correct EAR reference:
|
1471
1464
|
for i in psrc_update:
|
1472
|
-
param_src_updates[i] = {
|
1473
|
-
"action_idx": act_idx,
|
1474
|
-
"EAR_idx": EAR_idx,
|
1475
|
-
"iteration_idx": element_iter.index,
|
1476
|
-
}
|
1465
|
+
param_src_updates[i] = {"EAR_ID": EAR_ID}
|
1477
1466
|
run_0 = {
|
1478
|
-
"
|
1479
|
-
"
|
1480
|
-
|
1481
|
-
"success": None,
|
1482
|
-
"start_time": None,
|
1483
|
-
"end_time": None,
|
1484
|
-
},
|
1467
|
+
"elem_iter_ID": element_iter.id_,
|
1468
|
+
"action_idx": act_idx,
|
1469
|
+
"metadata": {},
|
1485
1470
|
}
|
1486
|
-
action_runs[(act_idx,
|
1471
|
+
action_runs[(act_idx, EAR_ID)] = run_0
|
1487
1472
|
count += 1
|
1488
1473
|
|
1489
1474
|
# `generate_data_index` can modify data index for previous actions, so only assign
|
1490
1475
|
# this at the end:
|
1491
|
-
|
1492
|
-
|
1493
|
-
|
1494
|
-
|
1495
|
-
|
1496
|
-
|
1497
|
-
|
1498
|
-
|
1499
|
-
|
1500
|
-
|
1501
|
-
EARs=EARs,
|
1502
|
-
param_src_updates=param_src_updates,
|
1503
|
-
)
|
1504
|
-
return EARs
|
1476
|
+
for (act_idx, EAR_ID_i), run in action_runs.items():
|
1477
|
+
self.workflow._store.add_EAR(
|
1478
|
+
elem_iter_ID=element_iter.id_,
|
1479
|
+
action_idx=act_idx,
|
1480
|
+
data_idx=all_data_idx[(act_idx, EAR_ID_i)],
|
1481
|
+
metadata={},
|
1482
|
+
)
|
1483
|
+
|
1484
|
+
for pid, src in param_src_updates.items():
|
1485
|
+
self.workflow._store.update_param_source(pid, src)
|
1505
1486
|
|
1506
1487
|
def _add_element_set(self, element_set):
|
1507
1488
|
"""
|
@@ -1529,10 +1510,10 @@ class WorkflowTask:
|
|
1529
1510
|
|
1530
1511
|
element_inp_data_idx = self.resolve_element_data_indices(multiplicities)
|
1531
1512
|
|
1532
|
-
global_element_iter_idx_range = [
|
1533
|
-
|
1534
|
-
|
1535
|
-
]
|
1513
|
+
# global_element_iter_idx_range = [
|
1514
|
+
# self.workflow.num_element_iterations,
|
1515
|
+
# self.workflow.num_element_iterations + len(element_inp_data_idx),
|
1516
|
+
# ]
|
1536
1517
|
local_element_idx_range = [
|
1537
1518
|
self.num_elements,
|
1538
1519
|
self.num_elements + len(element_inp_data_idx),
|
@@ -1554,41 +1535,56 @@ class WorkflowTask:
|
|
1554
1535
|
src_idx,
|
1555
1536
|
)
|
1556
1537
|
|
1557
|
-
|
1558
|
-
|
1538
|
+
iter_IDs = []
|
1539
|
+
elem_IDs = []
|
1559
1540
|
for elem_idx, data_idx in enumerate(element_data_idx):
|
1560
1541
|
schema_params = set(i for i in data_idx.keys() if len(i.split(".")) == 2)
|
1561
|
-
elements.append(
|
1562
|
-
|
1563
|
-
|
1564
|
-
|
1565
|
-
|
1566
|
-
|
1567
|
-
|
1542
|
+
# elements.append(
|
1543
|
+
# {
|
1544
|
+
# "iterations_idx": [self.num_elements + elem_idx],
|
1545
|
+
# "es_idx": self.num_element_sets - 1,
|
1546
|
+
# "seq_idx": ,
|
1547
|
+
# "src_idx": ,
|
1548
|
+
# }
|
1549
|
+
# )
|
1550
|
+
elem_ID_i = self.workflow._store.add_element(
|
1551
|
+
task_ID=self.insert_ID,
|
1552
|
+
es_idx=self.num_element_sets - 1,
|
1553
|
+
seq_idx={k: v[elem_idx] for k, v in element_seq_idx.items()},
|
1554
|
+
src_idx={k: v[elem_idx] for k, v in element_src_idx.items()},
|
1568
1555
|
)
|
1569
|
-
|
1570
|
-
|
1571
|
-
|
1572
|
-
|
1573
|
-
"EARs_initialised": False,
|
1574
|
-
"actions": {},
|
1575
|
-
"schema_parameters": list(schema_params),
|
1576
|
-
"loop_idx": {},
|
1577
|
-
}
|
1556
|
+
iter_ID_i = self.workflow._store.add_element_iteration(
|
1557
|
+
element_ID=elem_ID_i,
|
1558
|
+
data_idx=data_idx,
|
1559
|
+
schema_parameters=list(schema_params),
|
1578
1560
|
)
|
1579
|
-
|
1580
|
-
|
1581
|
-
|
1582
|
-
|
1583
|
-
|
1584
|
-
|
1585
|
-
|
1586
|
-
|
1587
|
-
|
1561
|
+
iter_IDs.append(iter_ID_i)
|
1562
|
+
elem_IDs.append(elem_ID_i)
|
1563
|
+
|
1564
|
+
# element_iterations.append(
|
1565
|
+
# {
|
1566
|
+
# "global_idx": self.workflow.num_element_iterations + elem_idx,
|
1567
|
+
# "data_idx": data_idx,
|
1568
|
+
# "EARs_initialised": False,
|
1569
|
+
# "actions": {},
|
1570
|
+
# "schema_parameters": list(schema_params),
|
1571
|
+
# "loop_idx": {},
|
1572
|
+
# }
|
1573
|
+
# )
|
1574
|
+
|
1575
|
+
# self.workflow._store.add_elements(
|
1576
|
+
# self.index,
|
1577
|
+
# self.insert_ID,
|
1578
|
+
# elements,
|
1579
|
+
# element_iterations,
|
1580
|
+
# )
|
1581
|
+
self._pending_element_IDs += elem_IDs
|
1582
|
+
# self._pending_num_elements += len(element_data_idx)
|
1583
|
+
# self._pending_num_element_iterations += len(element_data_idx)
|
1588
1584
|
|
1589
1585
|
self.initialise_EARs()
|
1590
1586
|
|
1591
|
-
return
|
1587
|
+
return iter_IDs
|
1592
1588
|
|
1593
1589
|
def add_elements(
|
1594
1590
|
self,
|
@@ -1723,16 +1719,18 @@ class WorkflowTask:
|
|
1723
1719
|
def get_element_dependencies(
|
1724
1720
|
self,
|
1725
1721
|
as_objects: bool = False,
|
1726
|
-
) -> List[Union[
|
1727
|
-
"""Get elements from upstream tasks
|
1728
|
-
depends on."""
|
1722
|
+
) -> List[Union[int, app.Element]]:
|
1723
|
+
"""Get elements from upstream tasks that this task depends on."""
|
1729
1724
|
|
1730
1725
|
deps = []
|
1731
|
-
for element in self.elements:
|
1726
|
+
for element in self.elements[:]:
|
1732
1727
|
for iter_i in element.iterations:
|
1733
|
-
for
|
1734
|
-
if
|
1735
|
-
|
1728
|
+
for dep_elem_i in iter_i.get_element_dependencies(as_objects=True):
|
1729
|
+
if (
|
1730
|
+
dep_elem_i.task.insert_ID != self.insert_ID
|
1731
|
+
and dep_elem_i not in deps
|
1732
|
+
):
|
1733
|
+
deps.append(dep_elem_i.id_)
|
1736
1734
|
|
1737
1735
|
deps = sorted(deps)
|
1738
1736
|
if as_objects:
|
@@ -1772,17 +1770,15 @@ class WorkflowTask:
|
|
1772
1770
|
def get_dependent_elements(
|
1773
1771
|
self,
|
1774
1772
|
as_objects: bool = False,
|
1775
|
-
) -> List[Union[
|
1776
|
-
"""Get elements from downstream tasks
|
1777
|
-
on this task."""
|
1773
|
+
) -> List[Union[int, app.Element]]:
|
1774
|
+
"""Get elements from downstream tasks that depend on this task."""
|
1778
1775
|
deps = []
|
1779
1776
|
for task in self.downstream_tasks:
|
1780
|
-
for element in task.elements:
|
1781
|
-
elem_ID = element.element_ID
|
1777
|
+
for element in task.elements[:]:
|
1782
1778
|
for iter_i in element.iterations:
|
1783
1779
|
for dep_i in iter_i.get_task_dependencies(as_objects=False):
|
1784
|
-
if dep_i == self.insert_ID and
|
1785
|
-
deps.append(
|
1780
|
+
if dep_i == self.insert_ID and element.id_ not in deps:
|
1781
|
+
deps.append(element.id_)
|
1786
1782
|
|
1787
1783
|
deps = sorted(deps)
|
1788
1784
|
if as_objects:
|
@@ -1879,8 +1875,16 @@ class WorkflowTask:
|
|
1879
1875
|
# no intersection between paths
|
1880
1876
|
continue
|
1881
1877
|
|
1882
|
-
|
1883
|
-
if
|
1878
|
+
param = self.workflow.get_parameter(data_idx_i)
|
1879
|
+
if param.file:
|
1880
|
+
if param.file["store_contents"]:
|
1881
|
+
data = Path(self.workflow.path) / param.file["path"]
|
1882
|
+
else:
|
1883
|
+
data = Path(param.file["path"])
|
1884
|
+
data = data.as_posix()
|
1885
|
+
else:
|
1886
|
+
data = param.data
|
1887
|
+
if raise_on_unset and not param.is_set:
|
1884
1888
|
raise UnsetParameterDataError(
|
1885
1889
|
f"Element data path {path!r} resolves to unset data for (at least) "
|
1886
1890
|
f"data index path: {path_i!r}."
|
hpcflow/sdk/core/task_schema.py
CHANGED
@@ -197,10 +197,17 @@ class TaskSchema(JSONLike):
|
|
197
197
|
if extra_ins and not has_script:
|
198
198
|
# TODO: bit of a hack, need to consider script ins/outs later
|
199
199
|
# i.e. are all schema inputs "consumed" by an action?
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
200
|
+
|
201
|
+
# consider OFP inputs:
|
202
|
+
for act_i in self.actions:
|
203
|
+
for OFP_j in act_i.output_file_parsers:
|
204
|
+
extra_ins = extra_ins - set(OFP_j.inputs or [])
|
205
|
+
|
206
|
+
if extra_ins:
|
207
|
+
raise ValueError(
|
208
|
+
f"Schema {self.name!r} inputs {tuple(extra_ins)!r} are not used by "
|
209
|
+
f"any actions."
|
210
|
+
)
|
204
211
|
|
205
212
|
missing_outs = set(self.output_types) - set(all_outs)
|
206
213
|
if missing_outs and not has_script:
|