hpcflow-new2 0.2.0a175__py3-none-any.whl → 0.2.0a177__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/_version.py +1 -1
- hpcflow/sdk/core/cache.py +142 -0
- hpcflow/sdk/core/element.py +7 -0
- hpcflow/sdk/core/loop.py +134 -95
- hpcflow/sdk/core/loop_cache.py +140 -0
- hpcflow/sdk/core/task.py +29 -24
- hpcflow/sdk/core/utils.py +11 -1
- hpcflow/sdk/core/workflow.py +65 -22
- hpcflow/sdk/persistence/base.py +16 -3
- hpcflow/sdk/persistence/json.py +11 -4
- hpcflow/sdk/persistence/pending.py +2 -0
- hpcflow/sdk/persistence/zarr.py +8 -1
- hpcflow/tests/unit/test_loop.py +127 -0
- hpcflow/tests/unit/test_utils.py +21 -0
- {hpcflow_new2-0.2.0a175.dist-info → hpcflow_new2-0.2.0a177.dist-info}/METADATA +1 -1
- {hpcflow_new2-0.2.0a175.dist-info → hpcflow_new2-0.2.0a177.dist-info}/RECORD +18 -16
- {hpcflow_new2-0.2.0a175.dist-info → hpcflow_new2-0.2.0a177.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a175.dist-info → hpcflow_new2-0.2.0a177.dist-info}/entry_points.txt +0 -0
hpcflow/_version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.2.
|
1
|
+
__version__ = "0.2.0a177"
|
@@ -0,0 +1,142 @@
|
|
1
|
+
from collections import defaultdict
|
2
|
+
from dataclasses import dataclass
|
3
|
+
from typing import Set, Dict
|
4
|
+
|
5
|
+
from hpcflow.sdk.log import TimeIt
|
6
|
+
|
7
|
+
|
8
|
+
@dataclass
|
9
|
+
class DependencyCache:
|
10
|
+
"""Class to bulk-retrieve dependencies between elements, iterations, and runs."""
|
11
|
+
|
12
|
+
run_dependencies: Dict[int, Set]
|
13
|
+
run_dependents: Dict[int, Set]
|
14
|
+
iter_run_dependencies: Dict[int, Set]
|
15
|
+
iter_iter_dependencies: Dict[int, Set]
|
16
|
+
elem_iter_dependencies: Dict[int, Set]
|
17
|
+
elem_elem_dependencies: Dict[int, Set]
|
18
|
+
elem_elem_dependents: Dict[int, Set]
|
19
|
+
elem_elem_dependents_rec: Dict[int, Set]
|
20
|
+
|
21
|
+
elements: Dict
|
22
|
+
iterations: Dict
|
23
|
+
|
24
|
+
@classmethod
|
25
|
+
@TimeIt.decorator
|
26
|
+
def build(cls, workflow):
|
27
|
+
num_iters = workflow.num_element_iterations
|
28
|
+
num_elems = workflow.num_elements
|
29
|
+
num_runs = workflow.num_EARs
|
30
|
+
|
31
|
+
all_store_runs = workflow._store.get_EARs(list(range(num_runs)))
|
32
|
+
all_store_iters = workflow._store.get_element_iterations(list(range(num_iters)))
|
33
|
+
all_store_elements = workflow._store.get_elements(list(range(num_elems)))
|
34
|
+
all_param_sources = workflow.get_all_parameter_sources()
|
35
|
+
all_data_idx = [
|
36
|
+
{
|
37
|
+
k: v if isinstance(v, list) else [v]
|
38
|
+
for k, v in i.data_idx.items()
|
39
|
+
if k not in ("repeats.",)
|
40
|
+
}
|
41
|
+
for i in all_store_runs
|
42
|
+
]
|
43
|
+
|
44
|
+
# run dependencies and dependents
|
45
|
+
run_dependencies = {}
|
46
|
+
run_dependents = defaultdict(set)
|
47
|
+
for idx, i in enumerate(all_data_idx):
|
48
|
+
run_i_sources = set()
|
49
|
+
for j in i.values():
|
50
|
+
for k in j:
|
51
|
+
run_k = all_param_sources[k].get("EAR_ID")
|
52
|
+
if run_k is not None and run_k != idx:
|
53
|
+
run_i_sources.add(run_k)
|
54
|
+
run_dependencies[idx] = run_i_sources
|
55
|
+
for m in run_i_sources:
|
56
|
+
run_dependents[m].add(idx)
|
57
|
+
|
58
|
+
# add missing:
|
59
|
+
for k in range(num_runs):
|
60
|
+
run_dependents[k]
|
61
|
+
|
62
|
+
run_dependents = dict(run_dependents)
|
63
|
+
|
64
|
+
# iteration dependencies
|
65
|
+
all_iter_run_IDs = {
|
66
|
+
i.id_: [k for j in i.EAR_IDs.values() for k in j] for i in all_store_iters
|
67
|
+
}
|
68
|
+
# for each iteration, which runs does it depend on?
|
69
|
+
iter_run_dependencies = {
|
70
|
+
k: set(j for i in v for j in run_dependencies[i])
|
71
|
+
for k, v in all_iter_run_IDs.items()
|
72
|
+
}
|
73
|
+
|
74
|
+
# for each run, which iteration does it belong to?
|
75
|
+
all_run_iter_IDs = {}
|
76
|
+
for iter_ID, run_IDs in all_iter_run_IDs.items():
|
77
|
+
for run_ID in run_IDs:
|
78
|
+
all_run_iter_IDs[run_ID] = iter_ID
|
79
|
+
|
80
|
+
# for each iteration, which iterations does it depend on?
|
81
|
+
iter_iter_dependencies = {
|
82
|
+
k: set(all_run_iter_IDs[i] for i in v)
|
83
|
+
for k, v in iter_run_dependencies.items()
|
84
|
+
}
|
85
|
+
|
86
|
+
all_elem_iter_IDs = {i.id_: i.iteration_IDs for i in all_store_elements}
|
87
|
+
|
88
|
+
elem_iter_dependencies = {
|
89
|
+
k: set(j for i in v for j in iter_iter_dependencies[i])
|
90
|
+
for k, v in all_elem_iter_IDs.items()
|
91
|
+
}
|
92
|
+
|
93
|
+
# for each iteration, which element does it belong to?
|
94
|
+
all_iter_elem_IDs = {}
|
95
|
+
for elem_ID, iter_IDs in all_elem_iter_IDs.items():
|
96
|
+
for iter_ID in iter_IDs:
|
97
|
+
all_iter_elem_IDs[iter_ID] = elem_ID
|
98
|
+
|
99
|
+
# element dependencies
|
100
|
+
elem_elem_dependencies = {
|
101
|
+
k: set(all_iter_elem_IDs[i] for i in v)
|
102
|
+
for k, v in elem_iter_dependencies.items()
|
103
|
+
}
|
104
|
+
|
105
|
+
# for each element, which elements depend on it (directly)?
|
106
|
+
elem_elem_dependents = defaultdict(set)
|
107
|
+
for k, v in elem_elem_dependencies.items():
|
108
|
+
for i in v:
|
109
|
+
elem_elem_dependents[i].add(k)
|
110
|
+
|
111
|
+
# for each element, which elements depend on it (recursively)?
|
112
|
+
elem_elem_dependents_rec = defaultdict(set)
|
113
|
+
for k in list(elem_elem_dependents):
|
114
|
+
for i in elem_elem_dependents[k]:
|
115
|
+
elem_elem_dependents_rec[k].add(i)
|
116
|
+
elem_elem_dependents_rec[k].update(
|
117
|
+
{m for m in elem_elem_dependents[i] if m != k}
|
118
|
+
)
|
119
|
+
|
120
|
+
# add missing keys:
|
121
|
+
for k in range(num_elems):
|
122
|
+
elem_elem_dependents[k]
|
123
|
+
elem_elem_dependents_rec[k]
|
124
|
+
|
125
|
+
elem_elem_dependents = dict(elem_elem_dependents)
|
126
|
+
elem_elem_dependents_rec = dict(elem_elem_dependents_rec)
|
127
|
+
|
128
|
+
elements = workflow.get_all_elements()
|
129
|
+
iterations = workflow.get_all_element_iterations()
|
130
|
+
|
131
|
+
return cls(
|
132
|
+
run_dependencies=run_dependencies,
|
133
|
+
run_dependents=run_dependents,
|
134
|
+
iter_run_dependencies=iter_run_dependencies,
|
135
|
+
iter_iter_dependencies=iter_iter_dependencies,
|
136
|
+
elem_iter_dependencies=elem_iter_dependencies,
|
137
|
+
elem_elem_dependencies=elem_elem_dependencies,
|
138
|
+
elem_elem_dependents=elem_elem_dependents,
|
139
|
+
elem_elem_dependents_rec=elem_elem_dependents_rec,
|
140
|
+
elements=elements,
|
141
|
+
iterations=iterations,
|
142
|
+
)
|
hpcflow/sdk/core/element.py
CHANGED
@@ -675,6 +675,7 @@ class ElementIteration:
|
|
675
675
|
default=default,
|
676
676
|
)
|
677
677
|
|
678
|
+
@TimeIt.decorator
|
678
679
|
def get_EAR_dependencies(
|
679
680
|
self,
|
680
681
|
as_objects: Optional[bool] = False,
|
@@ -708,6 +709,7 @@ class ElementIteration:
|
|
708
709
|
out = self.workflow.get_EARs_from_IDs(out)
|
709
710
|
return out
|
710
711
|
|
712
|
+
@TimeIt.decorator
|
711
713
|
def get_element_iteration_dependencies(
|
712
714
|
self, as_objects: bool = False
|
713
715
|
) -> List[Union[int, app.ElementIteration]]:
|
@@ -719,6 +721,7 @@ class ElementIteration:
|
|
719
721
|
out = self.workflow.get_element_iterations_from_IDs(out)
|
720
722
|
return out
|
721
723
|
|
724
|
+
@TimeIt.decorator
|
722
725
|
def get_element_dependencies(
|
723
726
|
self,
|
724
727
|
as_objects: Optional[bool] = False,
|
@@ -769,6 +772,7 @@ class ElementIteration:
|
|
769
772
|
|
770
773
|
return out
|
771
774
|
|
775
|
+
@TimeIt.decorator
|
772
776
|
def get_dependent_EARs(
|
773
777
|
self, as_objects: bool = False
|
774
778
|
) -> List[Union[int, app.ElementActionRun]]:
|
@@ -793,6 +797,7 @@ class ElementIteration:
|
|
793
797
|
|
794
798
|
return deps
|
795
799
|
|
800
|
+
@TimeIt.decorator
|
796
801
|
def get_dependent_element_iterations(
|
797
802
|
self, as_objects: bool = False
|
798
803
|
) -> List[Union[int, app.ElementIteration]]:
|
@@ -816,6 +821,7 @@ class ElementIteration:
|
|
816
821
|
|
817
822
|
return deps
|
818
823
|
|
824
|
+
@TimeIt.decorator
|
819
825
|
def get_dependent_elements(
|
820
826
|
self,
|
821
827
|
as_objects: bool = False,
|
@@ -1246,6 +1252,7 @@ class Element:
|
|
1246
1252
|
"""Get tasks that depend on the most recent iteration of this element."""
|
1247
1253
|
return self.latest_iteration.get_dependent_tasks(as_objects=as_objects)
|
1248
1254
|
|
1255
|
+
@TimeIt.decorator
|
1249
1256
|
def get_dependent_elements_recursively(self, task_insert_ID=None):
|
1250
1257
|
"""Get downstream elements that depend on this element, including recursive
|
1251
1258
|
dependencies.
|
hpcflow/sdk/core/loop.py
CHANGED
@@ -6,9 +6,11 @@ from typing import Dict, List, Optional, Tuple, Union
|
|
6
6
|
from hpcflow.sdk import app
|
7
7
|
from hpcflow.sdk.core.errors import LoopTaskSubsetError
|
8
8
|
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
9
|
+
from hpcflow.sdk.core.loop_cache import LoopCache
|
9
10
|
from hpcflow.sdk.core.parameters import InputSourceType
|
10
11
|
from hpcflow.sdk.core.task import WorkflowTask
|
11
|
-
from hpcflow.sdk.core.utils import check_valid_py_identifier
|
12
|
+
from hpcflow.sdk.core.utils import check_valid_py_identifier, nth_key, nth_value
|
13
|
+
from hpcflow.sdk.log import TimeIt
|
12
14
|
|
13
15
|
# from .parameters import Parameter
|
14
16
|
|
@@ -198,6 +200,7 @@ class WorkflowLoop:
|
|
198
200
|
|
199
201
|
self._validate()
|
200
202
|
|
203
|
+
@TimeIt.decorator
|
201
204
|
def _validate(self):
|
202
205
|
# task subset must be a contiguous range of task indices:
|
203
206
|
task_indices = self.task_indices
|
@@ -328,6 +331,7 @@ class WorkflowLoop:
|
|
328
331
|
return self.workflow.tasks[: self.task_objects[0].index]
|
329
332
|
|
330
333
|
@staticmethod
|
334
|
+
@TimeIt.decorator
|
331
335
|
def _find_iterable_parameters(loop_template: app.Loop):
|
332
336
|
all_inputs_first_idx = {}
|
333
337
|
all_outputs_idx = {}
|
@@ -355,18 +359,19 @@ class WorkflowLoop:
|
|
355
359
|
return iterable_params
|
356
360
|
|
357
361
|
@classmethod
|
362
|
+
@TimeIt.decorator
|
358
363
|
def new_empty_loop(
|
359
364
|
cls,
|
360
365
|
index: int,
|
361
366
|
workflow: app.Workflow,
|
362
367
|
template: app.Loop,
|
363
|
-
|
368
|
+
iter_loop_idx: List[Dict],
|
364
369
|
) -> Tuple[app.WorkflowLoop, List[Dict[str, int]]]:
|
365
370
|
parent_loops = cls._get_parent_loops(index, workflow, template)
|
366
371
|
parent_names = [i.name for i in parent_loops]
|
367
372
|
num_added_iters = {}
|
368
|
-
for
|
369
|
-
num_added_iters[tuple([
|
373
|
+
for i in iter_loop_idx:
|
374
|
+
num_added_iters[tuple([i[j] for j in parent_names])] = 1
|
370
375
|
|
371
376
|
obj = cls(
|
372
377
|
index=index,
|
@@ -379,6 +384,7 @@ class WorkflowLoop:
|
|
379
384
|
return obj
|
380
385
|
|
381
386
|
@classmethod
|
387
|
+
@TimeIt.decorator
|
382
388
|
def _get_parent_loops(
|
383
389
|
cls,
|
384
390
|
index: int,
|
@@ -399,12 +405,14 @@ class WorkflowLoop:
|
|
399
405
|
parents.append(loop_i)
|
400
406
|
return parents
|
401
407
|
|
408
|
+
@TimeIt.decorator
|
402
409
|
def get_parent_loops(self) -> List[app.WorkflowLoop]:
|
403
410
|
"""Get loops whose task subset is a superset of this loop's task subset. If two
|
404
411
|
loops have identical task subsets, the first loop in the workflow loop list is
|
405
412
|
considered the child."""
|
406
413
|
return self._get_parent_loops(self.index, self.workflow, self.template)
|
407
414
|
|
415
|
+
@TimeIt.decorator
|
408
416
|
def get_child_loops(self) -> List[app.WorkflowLoop]:
|
409
417
|
"""Get loops whose task subset is a subset of this loop's task subset. If two
|
410
418
|
loops have identical task subsets, the first loop in the workflow loop list is
|
@@ -426,10 +434,12 @@ class WorkflowLoop:
|
|
426
434
|
children = sorted(children, key=lambda x: len(next(iter(x.num_added_iterations))))
|
427
435
|
return children
|
428
436
|
|
429
|
-
|
437
|
+
@TimeIt.decorator
|
438
|
+
def add_iteration(self, parent_loop_indices=None, cache: Optional[LoopCache] = None):
|
439
|
+
if not cache:
|
440
|
+
cache = LoopCache.build(self.workflow)
|
430
441
|
parent_loops = self.get_parent_loops()
|
431
442
|
child_loops = self.get_child_loops()
|
432
|
-
child_loop_names = [i.name for i in child_loops]
|
433
443
|
parent_loop_indices = parent_loop_indices or {}
|
434
444
|
if parent_loops and not parent_loop_indices:
|
435
445
|
parent_loop_indices = {i.name: 0 for i in parent_loops}
|
@@ -438,25 +448,39 @@ class WorkflowLoop:
|
|
438
448
|
cur_loop_idx = self.num_added_iterations[iters_key] - 1
|
439
449
|
all_new_data_idx = {} # keys are (task.insert_ID and element.index)
|
440
450
|
|
451
|
+
# initialise a new `num_added_iterations` key on each child loop:
|
452
|
+
for child in child_loops:
|
453
|
+
iters_key_dct = {
|
454
|
+
**parent_loop_indices,
|
455
|
+
self.name: cur_loop_idx + 1,
|
456
|
+
}
|
457
|
+
added_iters_key_chd = tuple([iters_key_dct.get(j, 0) for j in child.parents])
|
458
|
+
child._initialise_pending_added_iters(added_iters_key_chd)
|
459
|
+
|
441
460
|
for task in self.task_objects:
|
461
|
+
|
462
|
+
new_loop_idx = {
|
463
|
+
**parent_loop_indices,
|
464
|
+
self.name: cur_loop_idx + 1,
|
465
|
+
**{
|
466
|
+
child.name: 0
|
467
|
+
for child in child_loops
|
468
|
+
if task.insert_ID in child.task_insert_IDs
|
469
|
+
},
|
470
|
+
}
|
471
|
+
added_iter_IDs = []
|
442
472
|
for elem_idx in range(task.num_elements):
|
443
|
-
|
444
|
-
|
445
|
-
|
473
|
+
|
474
|
+
elem_ID = task.element_IDs[elem_idx]
|
475
|
+
|
446
476
|
new_data_idx = {}
|
447
|
-
existing_inners = []
|
448
|
-
for iter_i in element.iterations:
|
449
|
-
if iter_i.loop_idx[self.name] == cur_loop_idx:
|
450
|
-
existing_inner_i = {
|
451
|
-
k: v
|
452
|
-
for k, v in iter_i.loop_idx.items()
|
453
|
-
if k in child_loop_names
|
454
|
-
}
|
455
|
-
if existing_inner_i:
|
456
|
-
existing_inners.append(existing_inner_i)
|
457
477
|
|
458
478
|
# copy resources from zeroth iteration:
|
459
|
-
|
479
|
+
zeroth_iter_ID, zi_iter_data_idx = cache.zeroth_iters[elem_ID]
|
480
|
+
zi_elem_ID, zi_idx = cache.iterations[zeroth_iter_ID]
|
481
|
+
zi_data_idx = nth_value(cache.data_idx[zi_elem_ID], zi_idx)
|
482
|
+
|
483
|
+
for key, val in zi_data_idx.items():
|
460
484
|
if key.startswith("resources."):
|
461
485
|
new_data_idx[key] = val
|
462
486
|
|
@@ -474,41 +498,47 @@ class WorkflowLoop:
|
|
474
498
|
# identify element(s) from which this iterable input should be
|
475
499
|
# parametrised:
|
476
500
|
if task.insert_ID == iter_dat["output_tasks"][-1]:
|
477
|
-
|
501
|
+
src_elem_ID = elem_ID
|
478
502
|
grouped_elems = None
|
479
503
|
else:
|
480
|
-
|
481
|
-
|
482
|
-
|
504
|
+
src_elem_IDs_all = cache.element_dependents[elem_ID]
|
505
|
+
src_elem_IDs = {
|
506
|
+
k: v
|
507
|
+
for k, v in src_elem_IDs_all.items()
|
508
|
+
if cache.elements[k]["task_insert_ID"]
|
509
|
+
== iter_dat["output_tasks"][-1]
|
510
|
+
}
|
483
511
|
# consider groups
|
484
512
|
inp_group_name = inp.single_labelled_data.get("group")
|
485
513
|
grouped_elems = []
|
486
|
-
for
|
514
|
+
for src_elem_j_ID, src_elem_j_dat in src_elem_IDs.items():
|
487
515
|
i_in_group = any(
|
488
|
-
|
516
|
+
k == inp_group_name
|
517
|
+
for k in src_elem_j_dat["group_names"]
|
489
518
|
)
|
490
519
|
if i_in_group:
|
491
|
-
grouped_elems.append(
|
520
|
+
grouped_elems.append(src_elem_j_ID)
|
492
521
|
|
493
|
-
if not grouped_elems and len(
|
522
|
+
if not grouped_elems and len(src_elem_IDs) > 1:
|
494
523
|
raise NotImplementedError(
|
495
|
-
f"Multiple elements found in the iterable parameter
|
496
|
-
f" latest output task (insert ID: "
|
497
|
-
f"{iter_dat['output_tasks'][-1]}) that can be used
|
498
|
-
f"parametrise the next iteration:
|
524
|
+
f"Multiple elements found in the iterable parameter "
|
525
|
+
f"{inp!r}'s latest output task (insert ID: "
|
526
|
+
f"{iter_dat['output_tasks'][-1]}) that can be used "
|
527
|
+
f"to parametrise the next iteration: "
|
528
|
+
f"{list(src_elem_IDs.keys())!r}."
|
499
529
|
)
|
500
530
|
|
501
|
-
elif not
|
531
|
+
elif not src_elem_IDs:
|
502
532
|
# TODO: maybe OK?
|
503
533
|
raise NotImplementedError(
|
504
|
-
f"No elements found in the iterable parameter
|
505
|
-
f" latest output task (insert ID: "
|
506
|
-
f"{iter_dat['output_tasks'][-1]}) that can be used
|
507
|
-
f"parametrise the next iteration."
|
534
|
+
f"No elements found in the iterable parameter "
|
535
|
+
f"{inp!r}'s latest output task (insert ID: "
|
536
|
+
f"{iter_dat['output_tasks'][-1]}) that can be used "
|
537
|
+
f"to parametrise the next iteration."
|
508
538
|
)
|
509
539
|
|
510
540
|
else:
|
511
|
-
|
541
|
+
src_elem_ID = nth_key(src_elem_IDs, 0)
|
512
542
|
|
513
543
|
child_loop_max_iters = {}
|
514
544
|
parent_loop_same_iters = {
|
@@ -534,63 +564,69 @@ class WorkflowLoop:
|
|
534
564
|
|
535
565
|
# identify the ElementIteration from which this input should be
|
536
566
|
# parametrised:
|
537
|
-
|
567
|
+
loop_idx_key = tuple(sorted(source_iter_loop_idx.items()))
|
538
568
|
if grouped_elems:
|
539
|
-
|
540
|
-
for
|
541
|
-
|
542
|
-
|
543
|
-
|
544
|
-
break
|
569
|
+
src_data_idx = []
|
570
|
+
for src_elem_ID in grouped_elems:
|
571
|
+
src_data_idx.append(
|
572
|
+
cache.data_idx[src_elem_ID][loop_idx_key]
|
573
|
+
)
|
545
574
|
else:
|
546
|
-
|
547
|
-
if iter_i.loop_idx == source_iter_loop_idx:
|
548
|
-
source_iter = iter_i
|
549
|
-
break
|
575
|
+
src_data_idx = cache.data_idx[src_elem_ID][loop_idx_key]
|
550
576
|
|
551
|
-
if not
|
577
|
+
if not src_data_idx:
|
552
578
|
raise RuntimeError(
|
553
579
|
f"Could not find a source iteration with loop_idx: "
|
554
580
|
f"{source_iter_loop_idx!r}."
|
555
581
|
)
|
556
582
|
|
557
583
|
if grouped_elems:
|
558
|
-
inp_dat_idx = [
|
559
|
-
i.get_data_idx()[f"outputs.{inp.typ}"]
|
560
|
-
for i in source_iter
|
561
|
-
]
|
584
|
+
inp_dat_idx = [i[f"outputs.{inp.typ}"] for i in src_data_idx]
|
562
585
|
else:
|
563
|
-
inp_dat_idx =
|
586
|
+
inp_dat_idx = src_data_idx[f"outputs.{inp.typ}"]
|
564
587
|
new_data_idx[f"inputs.{inp.typ}"] = inp_dat_idx
|
565
588
|
|
566
589
|
else:
|
567
590
|
inp_key = f"inputs.{inp.typ}"
|
568
591
|
|
569
|
-
orig_inp_src =
|
592
|
+
orig_inp_src = cache.elements[elem_ID]["input_sources"][inp_key]
|
570
593
|
inp_dat_idx = None
|
571
594
|
|
572
595
|
if orig_inp_src.source_type is InputSourceType.LOCAL:
|
573
596
|
# keep locally defined inputs from original element
|
574
|
-
inp_dat_idx =
|
597
|
+
inp_dat_idx = zi_data_idx[inp_key]
|
575
598
|
|
576
599
|
elif orig_inp_src.source_type is InputSourceType.DEFAULT:
|
577
600
|
# keep default value from original element
|
578
|
-
inp_dat_idx_iter_0 = element.iterations[0].get_data_idx()
|
579
601
|
try:
|
580
|
-
inp_dat_idx =
|
602
|
+
inp_dat_idx = zi_data_idx[inp_key]
|
581
603
|
except KeyError:
|
582
604
|
# if this input is required by a conditional action, and
|
583
605
|
# that condition is not met, then this input will not
|
584
606
|
# exist in the action-run data index, so use the initial
|
585
607
|
# iteration data index:
|
586
|
-
inp_dat_idx =
|
608
|
+
inp_dat_idx = zi_iter_data_idx[inp_key]
|
587
609
|
|
588
610
|
elif orig_inp_src.source_type is InputSourceType.TASK:
|
589
611
|
if orig_inp_src.task_ref not in self.task_insert_IDs:
|
590
|
-
# source
|
591
|
-
|
592
|
-
|
593
|
-
]
|
612
|
+
# source the data_idx from the iteration with same parent
|
613
|
+
# loop indices as the new iteration to add:
|
614
|
+
# src_iters = []
|
615
|
+
src_data_idx = []
|
616
|
+
for li_k, di_k in cache.data_idx[elem_ID].items():
|
617
|
+
skip_iter = False
|
618
|
+
li_k_dct = dict(li_k)
|
619
|
+
for p_k, p_v in parent_loop_indices.items():
|
620
|
+
if li_k_dct.get(p_k) != p_v:
|
621
|
+
skip_iter = True
|
622
|
+
break
|
623
|
+
if not skip_iter:
|
624
|
+
src_data_idx.append(di_k)
|
625
|
+
|
626
|
+
# could be multiple, but they should all have the same
|
627
|
+
# data index for this parameter:
|
628
|
+
src_data_idx = src_data_idx[0]
|
629
|
+
inp_dat_idx = src_data_idx[inp_key]
|
594
630
|
else:
|
595
631
|
is_group = False
|
596
632
|
if (
|
@@ -613,19 +649,24 @@ class WorkflowLoop:
|
|
613
649
|
# find which element in that task `element`
|
614
650
|
# depends on:
|
615
651
|
task_i = self.workflow.tasks.get(insert_ID=tiID)
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
620
|
-
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
652
|
+
elem_i_ID = task_i.element_IDs[e_idx]
|
653
|
+
src_elem_IDs_all = cache.element_dependents[
|
654
|
+
elem_i_ID
|
655
|
+
]
|
656
|
+
src_elem_IDs_i = {
|
657
|
+
k: v
|
658
|
+
for k, v in src_elem_IDs_all.items()
|
659
|
+
if cache.elements[k]["task_insert_ID"]
|
660
|
+
== task.insert_ID
|
661
|
+
}
|
662
|
+
|
663
|
+
# filter src_elem_IDs_i for matching element IDs:
|
664
|
+
src_elem_IDs_i = [
|
665
|
+
i for i in src_elem_IDs_i if i == elem_ID
|
625
666
|
]
|
626
667
|
if (
|
627
|
-
len(
|
628
|
-
and
|
668
|
+
len(src_elem_IDs_i) == 1
|
669
|
+
and src_elem_IDs_i[0] == elem_ID
|
629
670
|
):
|
630
671
|
new_sources.append((tiID, e_idx))
|
631
672
|
|
@@ -648,10 +689,11 @@ class WorkflowLoop:
|
|
648
689
|
new_data_idx[inp_key] = inp_dat_idx
|
649
690
|
|
650
691
|
# add any locally defined sub-parameters:
|
692
|
+
inp_statuses = cache.elements[elem_ID]["input_statuses"]
|
651
693
|
inp_status_inps = set([f"inputs.{i}" for i in inp_statuses])
|
652
694
|
sub_params = inp_status_inps - set(new_data_idx.keys())
|
653
695
|
for sub_param_i in sub_params:
|
654
|
-
sub_param_data_idx_iter_0 =
|
696
|
+
sub_param_data_idx_iter_0 = zi_data_idx
|
655
697
|
try:
|
656
698
|
sub_param_data_idx = sub_param_data_idx_iter_0[sub_param_i]
|
657
699
|
except KeyError:
|
@@ -659,7 +701,7 @@ class WorkflowLoop:
|
|
659
701
|
# and that condition is not met, then this input will not exist in
|
660
702
|
# the action-run data index, so use the initial iteration data
|
661
703
|
# index:
|
662
|
-
sub_param_data_idx =
|
704
|
+
sub_param_data_idx = zi_data_idx[sub_param_i]
|
663
705
|
|
664
706
|
new_data_idx[sub_param_i] = sub_param_data_idx
|
665
707
|
|
@@ -671,30 +713,26 @@ class WorkflowLoop:
|
|
671
713
|
schema_params = set(
|
672
714
|
i for i in new_data_idx.keys() if len(i.split(".")) == 2
|
673
715
|
)
|
674
|
-
all_new_data_idx[(task.insert_ID,
|
675
|
-
|
676
|
-
new_loop_idx = {
|
677
|
-
**parent_loop_indices,
|
678
|
-
self.name: cur_loop_idx + 1,
|
679
|
-
**{
|
680
|
-
child.name: 0
|
681
|
-
for child in child_loops
|
682
|
-
if task.insert_ID in child.task_insert_IDs
|
683
|
-
},
|
684
|
-
}
|
685
|
-
# increment num_added_iterations on child loop for this parent loop index:
|
686
|
-
for i in child_loops:
|
687
|
-
added_iters_key_chd = tuple([new_loop_idx[j] for j in i.parents])
|
688
|
-
i._initialise_pending_added_iters(added_iters_key_chd)
|
716
|
+
all_new_data_idx[(task.insert_ID, elem_idx)] = new_data_idx
|
689
717
|
|
690
718
|
iter_ID_i = self.workflow._store.add_element_iteration(
|
691
|
-
element_ID=
|
719
|
+
element_ID=elem_ID,
|
692
720
|
data_idx=new_data_idx,
|
693
721
|
schema_parameters=list(schema_params),
|
694
722
|
loop_idx=new_loop_idx,
|
695
723
|
)
|
724
|
+
if cache:
|
725
|
+
cache.add_iteration(
|
726
|
+
iter_ID=iter_ID_i,
|
727
|
+
task_insert_ID=task.insert_ID,
|
728
|
+
element_ID=elem_ID,
|
729
|
+
loop_idx=new_loop_idx,
|
730
|
+
data_idx=new_data_idx,
|
731
|
+
)
|
732
|
+
|
733
|
+
added_iter_IDs.append(iter_ID_i)
|
696
734
|
|
697
|
-
|
735
|
+
task.initialise_EARs(iter_IDs=added_iter_IDs)
|
698
736
|
|
699
737
|
added_iters_key = tuple(parent_loop_indices[k] for k in self.parents)
|
700
738
|
self._increment_pending_added_iters(added_iters_key)
|
@@ -713,7 +751,8 @@ class WorkflowLoop:
|
|
713
751
|
**par_idx,
|
714
752
|
**parent_loop_indices,
|
715
753
|
self.name: cur_loop_idx + 1,
|
716
|
-
}
|
754
|
+
},
|
755
|
+
cache=cache,
|
717
756
|
)
|
718
757
|
|
719
758
|
def test_termination(self, element_iter):
|