hpcflow-new2 0.2.0a164__py3-none-any.whl → 0.2.0a167__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hpcflow/_version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.0a164"
1
+ __version__ = "0.2.0a167"
@@ -55,6 +55,18 @@ class ExtraInputs(Exception):
55
55
  super().__init__(message)
56
56
 
57
57
 
58
+ class UnavailableInputSource(ValueError):
59
+ pass
60
+
61
+
62
+ class InapplicableInputSourceElementIters(ValueError):
63
+ pass
64
+
65
+
66
+ class NoCoincidentInputSources(ValueError):
67
+ pass
68
+
69
+
58
70
  class TaskTemplateInvalidNesting(ValueError):
59
71
  pass
60
72
 
@@ -131,6 +143,10 @@ class MalformedParameterPathError(ValueError):
131
143
  pass
132
144
 
133
145
 
146
+ class MalformedNestingOrderPath(ValueError):
147
+ pass
148
+
149
+
134
150
  class UnknownResourceSpecItemError(ValueError):
135
151
  pass
136
152
 
@@ -869,6 +869,11 @@ class ValueSequence(JSONLike):
869
869
  vals = (vals.T).tolist()
870
870
  return vals
871
871
 
872
+ @classmethod
873
+ def _values_from_random_uniform(cls, num, low=0.0, high=1.0, seed=None):
874
+ rng = np.random.default_rng(seed)
875
+ return rng.uniform(low=low, high=high, size=num).tolist()
876
+
872
877
  @classmethod
873
878
  def from_linear_space(
874
879
  cls,
@@ -1027,6 +1032,25 @@ class ValueSequence(JSONLike):
1027
1032
  obj._values_method_args = args
1028
1033
  return obj
1029
1034
 
1035
+ @classmethod
1036
+ def from_random_uniform(
1037
+ cls,
1038
+ path,
1039
+ num,
1040
+ low=0.0,
1041
+ high=1.0,
1042
+ seed=None,
1043
+ nesting_order=0,
1044
+ label=None,
1045
+ **kwargs,
1046
+ ):
1047
+ args = {"low": low, "high": high, "num": num, "seed": seed, **kwargs}
1048
+ values = cls._values_from_random_uniform(**args)
1049
+ obj = cls(values=values, path=path, nesting_order=nesting_order, label=label)
1050
+ obj._values_method = "from_random_uniform"
1051
+ obj._values_method_args = args
1052
+ return obj
1053
+
1030
1054
 
1031
1055
  @dataclass
1032
1056
  class AbstractInputValue(JSONLike):
@@ -1793,7 +1817,7 @@ class InputSource(JSONLike):
1793
1817
  f"task_source_type={self.task_source_type.name.lower()!r}",
1794
1818
  )
1795
1819
 
1796
- if self.element_iters:
1820
+ if self.element_iters is not None:
1797
1821
  args_lst.append(f"element_iters={self.element_iters}")
1798
1822
 
1799
1823
  if self.where is not None:
@@ -1831,7 +1855,7 @@ class InputSource(JSONLike):
1831
1855
  out = [self.source_type.name.lower()]
1832
1856
  if self.source_type is InputSourceType.TASK:
1833
1857
  out += [str(self.task_ref), self.task_source_type.name.lower()]
1834
- if self.element_iters:
1858
+ if self.element_iters is not None:
1835
1859
  out += ["[" + ",".join(f"{i}" for i in self.element_iters) + "]"]
1836
1860
  elif self.source_type is InputSourceType.IMPORT:
1837
1861
  out += [str(self.import_ref)]
hpcflow/sdk/core/task.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
+ from collections import defaultdict
2
3
  import copy
3
4
  from dataclasses import dataclass
4
- import os
5
5
  from pathlib import Path
6
6
  from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple, Union
7
7
 
@@ -11,20 +11,23 @@ from valida.rules import Rule
11
11
  from hpcflow.sdk import app
12
12
  from hpcflow.sdk.core.task_schema import TaskSchema
13
13
  from hpcflow.sdk.log import TimeIt
14
- from hpcflow.sdk.submission.shells import DEFAULT_SHELL_NAMES
15
14
  from .json_like import ChildObjectSpec, JSONLike
16
15
  from .element import ElementGroup
17
16
  from .errors import (
18
17
  ContainerKeyError,
19
18
  ExtraInputs,
19
+ InapplicableInputSourceElementIters,
20
+ MalformedNestingOrderPath,
20
21
  MayNeedObjectError,
21
22
  MissingInputs,
22
23
  NoAvailableElementSetsError,
24
+ NoCoincidentInputSources,
23
25
  TaskTemplateInvalidNesting,
24
26
  TaskTemplateMultipleInputValues,
25
27
  TaskTemplateMultipleSchemaObjectives,
26
28
  TaskTemplateUnexpectedInput,
27
29
  TaskTemplateUnexpectedSequenceInput,
30
+ UnavailableInputSource,
28
31
  UnknownEnvironmentPresetError,
29
32
  UnrequiredInputSources,
30
33
  UnsetParameterDataError,
@@ -269,6 +272,16 @@ class ElementSet(JSONLike):
269
272
  }
270
273
  ]
271
274
 
275
+ # check `nesting_order` paths:
276
+ allowed_nesting_paths = ("inputs", "resources", "repeats")
277
+ for k in self.nesting_order:
278
+ if k.split(".")[0] not in allowed_nesting_paths:
279
+ raise MalformedNestingOrderPath(
280
+ f"Element set: nesting order path {k!r} not understood. Each key in "
281
+ f"`nesting_order` must be start with one of "
282
+ f"{allowed_nesting_paths!r}."
283
+ )
284
+
272
285
  inp_paths = [i.normalised_inputs_path for i in self.inputs]
273
286
  dup_inp_paths = get_duplicate_items(inp_paths)
274
287
  if dup_inp_paths:
@@ -358,6 +371,7 @@ class ElementSet(JSONLike):
358
371
  nesting_order=None,
359
372
  env_preset=None,
360
373
  environments=None,
374
+ allow_non_coincident_task_sources=None,
361
375
  element_sets=None,
362
376
  sourceable_elem_iters=None,
363
377
  ):
@@ -381,10 +395,22 @@ class ElementSet(JSONLike):
381
395
  "If providing an `element_set`, no other arguments are allowed."
382
396
  )
383
397
  else:
384
- element_sets = [cls(*args, sourceable_elem_iters=sourceable_elem_iters)]
398
+ element_sets = [
399
+ cls(
400
+ *args,
401
+ sourceable_elem_iters=sourceable_elem_iters,
402
+ allow_non_coincident_task_sources=allow_non_coincident_task_sources,
403
+ )
404
+ ]
385
405
  else:
386
406
  if element_sets is None:
387
- element_sets = [cls(*args, sourceable_elem_iters=sourceable_elem_iters)]
407
+ element_sets = [
408
+ cls(
409
+ *args,
410
+ sourceable_elem_iters=sourceable_elem_iters,
411
+ allow_non_coincident_task_sources=allow_non_coincident_task_sources,
412
+ )
413
+ ]
388
414
 
389
415
  return element_sets
390
416
 
@@ -552,6 +578,7 @@ class Task(JSONLike):
552
578
  nesting_order: Optional[List] = None,
553
579
  env_preset: Optional[str] = None,
554
580
  environments: Optional[Dict[str, Dict[str, Any]]] = None,
581
+ allow_non_coincident_task_sources: Optional[bool] = False,
555
582
  element_sets: Optional[List[app.ElementSet]] = None,
556
583
  output_labels: Optional[List[app.OutputLabel]] = None,
557
584
  sourceable_elem_iters: Optional[List[int]] = None,
@@ -565,6 +592,10 @@ class Task(JSONLike):
565
592
  schema names that uniquely identify a task schema. If strings are provided,
566
593
  the `TaskSchema` object will be fetched from the known task schemas loaded by
567
594
  the app configuration.
595
+ allow_non_coincident_task_sources
596
+ If True, if more than one parameter is sourced from the same task, then allow
597
+ these sources to come from distinct element sub-sets. If False (default),
598
+ only the intersection of element sub-sets for all parameters are included.
568
599
  merge_envs
569
600
  If True, merge environment presets (set via the element set `env_preset` key)
570
601
  into `resources` using the "any" scope. If False, these presets are ignored.
@@ -616,6 +647,7 @@ class Task(JSONLike):
616
647
  env_preset=env_preset,
617
648
  environments=environments,
618
649
  element_sets=element_sets,
650
+ allow_non_coincident_task_sources=allow_non_coincident_task_sources,
619
651
  sourceable_elem_iters=sourceable_elem_iters,
620
652
  )
621
653
  self._output_labels = output_labels or []
@@ -821,10 +853,6 @@ class Task(JSONLike):
821
853
 
822
854
  return names
823
855
 
824
- def _get_nesting_order(self, seq):
825
- """Find the nesting order for a task sequence."""
826
- return self.nesting_order[seq.normalised_path] if len(seq.values) > 1 else -1
827
-
828
856
  @TimeIt.decorator
829
857
  def _prepare_persistent_outputs(self, workflow, local_element_idx_range):
830
858
  # TODO: check that schema is present when adding task? (should this be here?)
@@ -904,6 +932,9 @@ class Task(JSONLike):
904
932
  def _get_task_source_element_iters(
905
933
  self, in_or_out: str, src_task, labelled_path, element_set
906
934
  ) -> List[int]:
935
+ """Get a sorted list of element iteration IDs that provide either inputs or
936
+ outputs from the provided source task."""
937
+
907
938
  if in_or_out == "input":
908
939
  # input parameter might not be provided e.g. if it is only used
909
940
  # to generate an input file, and that input file is passed
@@ -921,7 +952,7 @@ class Task(JSONLike):
921
952
  src_elem_iters = []
922
953
  for es_idx_i in es_idx:
923
954
  es_i = src_task.element_sets[es_idx_i]
924
- src_elem_iters += es_i.elem_iter_IDs
955
+ src_elem_iters += es_i.elem_iter_IDs # should be sorted already
925
956
 
926
957
  if element_set.sourceable_elem_iters is not None:
927
958
  # can only use a subset of element iterations (this is the
@@ -929,8 +960,8 @@ class Task(JSONLike):
929
960
  # element set, in which case we only want to consider newly
930
961
  # added upstream elements when adding elements from this
931
962
  # element set):
932
- src_elem_iters = list(
933
- set(element_set.sourceable_elem_iters) & set(src_elem_iters)
963
+ src_elem_iters = sorted(
964
+ list(set(element_set.sourceable_elem_iters) & set(src_elem_iters))
934
965
  )
935
966
 
936
967
  return src_elem_iters
@@ -1269,10 +1300,14 @@ class Task(JSONLike):
1269
1300
  for es_i in self.element_sets:
1270
1301
  for inp_j in es_i.inputs:
1271
1302
  if inp_j.is_sub_value:
1272
- out.append(("input", inp_j.normalised_inputs_path))
1303
+ val_j = ("input", inp_j.normalised_inputs_path)
1304
+ if val_j not in out:
1305
+ out.append(val_j)
1273
1306
  for seq_j in es_i.sequences:
1274
1307
  if seq_j.is_sub_value:
1275
- out.append(("input", seq_j.normalised_inputs_path))
1308
+ val_j = ("input", seq_j.normalised_inputs_path)
1309
+ if val_j not in out:
1310
+ out.append(val_j)
1276
1311
 
1277
1312
  return tuple(out)
1278
1313
 
@@ -1399,10 +1434,12 @@ class WorkflowTask:
1399
1434
  return self.dir_name
1400
1435
  return self.dir_name + "_" + "_".join((f"{k}-{v}" for k, v in loop_idx.items()))
1401
1436
 
1402
- def get_all_element_iterations(self):
1403
- return [j for i in self.elements[:] for j in i.iterations]
1437
+ def get_all_element_iterations(self) -> Dict[int, app.ElementIteration]:
1438
+ return {j.id_: j for i in self.elements for j in i.iterations}
1404
1439
 
1405
- def _make_new_elements_persistent(self, element_set, element_set_idx):
1440
+ def _make_new_elements_persistent(
1441
+ self, element_set, element_set_idx, padded_elem_iters
1442
+ ):
1406
1443
  """Save parameter data to the persistent workflow."""
1407
1444
 
1408
1445
  # TODO: rewrite. This method is a little hard to follow and results in somewhat
@@ -1496,9 +1533,8 @@ class WorkflowTask:
1496
1533
  if inp_src.element_iters:
1497
1534
  # only include "sourceable" element iterations:
1498
1535
  src_elem_iters = [
1499
- i for i in src_elem_iters if i.id_ in inp_src.element_iters
1536
+ src_elem_iters[i] for i in inp_src.element_iters
1500
1537
  ]
1501
-
1502
1538
  src_elem_set_idx = [
1503
1539
  i.element.element_set_idx for i in src_elem_iters
1504
1540
  ]
@@ -1512,8 +1548,14 @@ class WorkflowTask:
1512
1548
  else:
1513
1549
  src_key = f"{task_source_type}s.{labelled_path_i}"
1514
1550
 
1551
+ padded_iters = padded_elem_iters.get(labelled_path_i, [])
1515
1552
  grp_idx = [
1516
- iter_i.get_data_idx()[src_key] for iter_i in src_elem_iters
1553
+ (
1554
+ iter_i.get_data_idx()[src_key]
1555
+ if iter_i_idx not in padded_iters
1556
+ else -1
1557
+ )
1558
+ for iter_i_idx, iter_i in enumerate(src_elem_iters)
1517
1559
  ]
1518
1560
 
1519
1561
  if inp_group_name:
@@ -1523,7 +1565,6 @@ class WorkflowTask:
1523
1565
  ):
1524
1566
  src_es = src_task.template.element_sets[src_set_idx_i]
1525
1567
  if inp_group_name in [i.name for i in src_es.groups or []]:
1526
- # print(f"IN GROUP; {dat_idx_i}; {src_set_idx_i=}")
1527
1568
  group_dat_idx.append(dat_idx_i)
1528
1569
  else:
1529
1570
  # if for any recursive iteration dependency, this group is
@@ -1570,7 +1611,7 @@ class WorkflowTask:
1570
1611
  input_data_idx[key] += grp_idx
1571
1612
  source_idx[key] += [inp_src_idx] * len(grp_idx)
1572
1613
 
1573
- else:
1614
+ else: # BUG: doesn't work for multiple task inputs sources
1574
1615
  # overwrite existing local source (if it exists):
1575
1616
  input_data_idx[key] = grp_idx
1576
1617
  source_idx[key] = [inp_src_idx] * len(grp_idx)
@@ -1594,7 +1635,7 @@ class WorkflowTask:
1594
1635
 
1595
1636
  return (input_data_idx, sequence_idx, source_idx)
1596
1637
 
1597
- def ensure_input_sources(self, element_set):
1638
+ def ensure_input_sources(self, element_set) -> Dict[str, List[int]]:
1598
1639
  """Check valid input sources are specified for a new task to be added to the
1599
1640
  workflow in a given position. If none are specified, set them according to the
1600
1641
  default behaviour.
@@ -1649,29 +1690,38 @@ class WorkflowTask:
1649
1690
  specified_source, self.unique_name
1650
1691
  )
1651
1692
  avail_idx = specified_source.is_in(avail_i)
1652
- available_source = avail_i[avail_idx]
1653
- if avail_idx is None:
1654
- raise ValueError(
1693
+ try:
1694
+ available_source = avail_i[avail_idx]
1695
+ except TypeError:
1696
+ raise UnavailableInputSource(
1655
1697
  f"The input source {specified_source.to_string()!r} is not "
1656
1698
  f"available for input path {path_i!r}. Available "
1657
1699
  f"input sources are: {[i.to_string() for i in avail_i]}."
1658
- )
1659
- else:
1660
- # overwrite with the source from available_sources, since it may have
1661
- # the `element_iters` attribute assigned, but first check if we need
1662
- # to filter:
1663
- filtered_IDs = None
1664
- if specified_source.where:
1665
- elem_iters = self.workflow.get_element_iterations_from_IDs(
1666
- available_source.element_iters
1700
+ ) from None
1701
+
1702
+ elem_iters_IDs = available_source.element_iters
1703
+ if specified_source.element_iters:
1704
+ # user-specified iter IDs; these must be a subset of available
1705
+ # element_iters:
1706
+ if not set(specified_source.element_iters).issubset(elem_iters_IDs):
1707
+ raise InapplicableInputSourceElementIters(
1708
+ f"The specified `element_iters` for input source "
1709
+ f"{specified_source.to_string()!r} are not all applicable. "
1710
+ f"Applicable element iteration IDs for this input source "
1711
+ f"are: {elem_iters_IDs!r}."
1667
1712
  )
1668
- filtered = specified_source.where.filter(elem_iters)
1669
- filtered_IDs = [i.id_ for i in filtered]
1713
+ elem_iters_IDs = specified_source.element_iters
1670
1714
 
1671
- if filtered_IDs is not None:
1672
- available_source.element_iters = filtered_IDs
1715
+ if specified_source.where:
1716
+ # filter iter IDs by user-specified rules, maintaining order:
1717
+ elem_iters = self.workflow.get_element_iterations_from_IDs(
1718
+ elem_iters_IDs
1719
+ )
1720
+ filtered = specified_source.where.filter(elem_iters)
1721
+ elem_iters_IDs = [i.id_ for i in filtered]
1673
1722
 
1674
- element_set.input_sources[path_i][s_idx] = available_source
1723
+ available_source.element_iters = elem_iters_IDs
1724
+ element_set.input_sources[path_i][s_idx] = available_source
1675
1725
 
1676
1726
  # sorting ensures that root parameters come before sub-parameters, which is
1677
1727
  # necessary when considering if we want to include a sub-parameter, when setting
@@ -1729,23 +1779,98 @@ class WorkflowTask:
1729
1779
  if not has_root_param:
1730
1780
  set_root_params.append(input_type)
1731
1781
 
1782
+ # for task sources that span multiple element sets, pad out sub-parameter
1783
+ # `element_iters` to include the element iterations from other element sets in
1784
+ # which the "root" parameter is defined:
1785
+ sources_by_task = defaultdict(dict)
1786
+ elem_iter_by_task = defaultdict(dict)
1787
+ all_elem_iters = set()
1788
+ for inp_type, sources in element_set.input_sources.items():
1789
+ source = sources[0]
1790
+ if source.source_type is InputSourceType.TASK:
1791
+ sources_by_task[source.task_ref][inp_type] = source
1792
+ all_elem_iters.update(source.element_iters)
1793
+ elem_iter_by_task[source.task_ref][inp_type] = source.element_iters
1794
+
1795
+ all_elem_iter_objs = self.workflow.get_element_iterations_from_IDs(all_elem_iters)
1796
+ all_elem_iters_by_ID = {i.id_: i for i in all_elem_iter_objs}
1797
+
1798
+ # element set indices:
1799
+ padded_elem_iters = defaultdict(list)
1800
+ es_idx_by_task = defaultdict(dict)
1801
+ for task_ref, task_iters in elem_iter_by_task.items():
1802
+ for inp_type, inp_iters in task_iters.items():
1803
+ es_indices = [
1804
+ all_elem_iters_by_ID[i].element.element_set_idx for i in inp_iters
1805
+ ]
1806
+ es_idx_by_task[task_ref][inp_type] = (es_indices, set(es_indices))
1807
+ root_params = {k for k in task_iters if "." not in k}
1808
+ root_param_nesting = {
1809
+ k: element_set.nesting_order.get(f"inputs.{k}", None) for k in root_params
1810
+ }
1811
+ for root_param_i in root_params:
1812
+ sub_params = {
1813
+ k
1814
+ for k in task_iters
1815
+ if k.split(".")[0] == root_param_i and k != root_param_i
1816
+ }
1817
+ rp_elem_sets = es_idx_by_task[task_ref][root_param_i][0]
1818
+ rp_elem_sets_uniq = es_idx_by_task[task_ref][root_param_i][1]
1819
+
1820
+ for sub_param_j in sub_params:
1821
+ sub_param_nesting = element_set.nesting_order.get(
1822
+ f"inputs.{sub_param_j}", None
1823
+ )
1824
+ if sub_param_nesting == root_param_nesting[root_param_i]:
1825
+
1826
+ sp_elem_sets_uniq = es_idx_by_task[task_ref][sub_param_j][1]
1827
+
1828
+ if sp_elem_sets_uniq != rp_elem_sets_uniq:
1829
+
1830
+ # replace elem_iters in sub-param sequence with those from the
1831
+ # root parameter, but re-order the elem iters to match their
1832
+ # original order:
1833
+ iters_copy = elem_iter_by_task[task_ref][root_param_i][:]
1834
+
1835
+ # "mask" iter IDs corresponding to the sub-parameter's element
1836
+ # sets, and keep track of the extra indices so they can be
1837
+ # ignored later:
1838
+ sp_iters_new = []
1839
+ for idx, (i, j) in enumerate(zip(iters_copy, rp_elem_sets)):
1840
+ if j in sp_elem_sets_uniq:
1841
+ sp_iters_new.append(None)
1842
+ else:
1843
+ sp_iters_new.append(i)
1844
+ padded_elem_iters[sub_param_j].append(idx)
1845
+
1846
+ # fill in sub-param elem_iters in their specified order
1847
+ sub_iters_it = iter(elem_iter_by_task[task_ref][sub_param_j])
1848
+ sp_iters_new = [
1849
+ i if i is not None else next(sub_iters_it)
1850
+ for i in sp_iters_new
1851
+ ]
1852
+
1853
+ # update sub-parameter element iters:
1854
+ for src_idx, src in enumerate(
1855
+ element_set.input_sources[sub_param_j]
1856
+ ):
1857
+ if src.source_type is InputSourceType.TASK:
1858
+ element_set.input_sources[sub_param_j][
1859
+ src_idx
1860
+ ].element_iters = sp_iters_new
1861
+ # assumes only a single task-type source for this
1862
+ # parameter
1863
+ break
1864
+
1732
1865
  # TODO: collate all input sources separately, then can fall back to a different
1733
1866
  # input source (if it was not specified manually) and if the "top" input source
1734
1867
  # results in no available elements due to `allow_non_coincident_task_sources`.
1735
1868
 
1736
1869
  if not element_set.allow_non_coincident_task_sources:
1737
- sources_by_task = {}
1738
- for inp_type, sources in element_set.input_sources.items():
1739
- source = sources[0]
1740
- if source.source_type is InputSourceType.TASK:
1741
- if source.task_ref not in sources_by_task:
1742
- sources_by_task[source.task_ref] = {}
1743
- sources_by_task[source.task_ref][inp_type] = source
1744
-
1745
1870
  # if multiple parameters are sourced from the same upstream task, only use
1746
1871
  # element iterations for which all parameters are available (the set
1747
1872
  # intersection):
1748
- for sources in sources_by_task.values():
1873
+ for task_ref, sources in sources_by_task.items():
1749
1874
  # if a parameter has multiple labels, disregard from this by removing all
1750
1875
  # parameters:
1751
1876
  seen_labelled = {}
@@ -1773,12 +1898,24 @@ class WorkflowTask:
1773
1898
  intersect_task_i = set(first_src.element_iters)
1774
1899
  for src_i in sources.values():
1775
1900
  intersect_task_i.intersection_update(src_i.element_iters)
1901
+ if not intersect_task_i:
1902
+ raise NoCoincidentInputSources(
1903
+ f"Task {self.name!r}: input sources from task {task_ref!r} have "
1904
+ f"no coincident applicable element iterations. Consider setting "
1905
+ f"the element set (or task) argument "
1906
+ f"`allow_non_coincident_task_sources` to `True`, which will "
1907
+ f"allow for input sources from the same task to use different "
1908
+ f"(non-coinciding) subsets of element iterations from the "
1909
+ f"source task."
1910
+ )
1776
1911
 
1777
- # now change elements for the affected input sources:
1912
+ # now change elements for the affected input sources.
1913
+ # sort by original order of first_src.element_iters
1914
+ int_task_i_lst = [
1915
+ i for i in first_src.element_iters if i in intersect_task_i
1916
+ ]
1778
1917
  for inp_type in sources.keys():
1779
- element_set.input_sources[inp_type][0].element_iters = list(
1780
- intersect_task_i
1781
- )
1918
+ element_set.input_sources[inp_type][0].element_iters = int_task_i_lst
1782
1919
 
1783
1920
  if missing:
1784
1921
  missing_str = ", ".join(f"{i!r}" for i in missing)
@@ -1787,6 +1924,8 @@ class WorkflowTask:
1787
1924
  missing_inputs=missing,
1788
1925
  )
1789
1926
 
1927
+ return padded_elem_iters
1928
+
1790
1929
  def generate_new_elements(
1791
1930
  self,
1792
1931
  input_data_indices,
@@ -1799,7 +1938,11 @@ class WorkflowTask:
1799
1938
  element_sequence_indices = {}
1800
1939
  element_src_indices = {}
1801
1940
  for i_idx, i in enumerate(element_data_indices):
1802
- elem_i = {k: input_data_indices[k][v] for k, v in i.items()}
1941
+ elem_i = {
1942
+ k: input_data_indices[k][v]
1943
+ for k, v in i.items()
1944
+ if input_data_indices[k][v] != -1
1945
+ }
1803
1946
  elem_i.update({k: v[i_idx] for k, v in output_data_indices.items()})
1804
1947
  new_elements.append(elem_i)
1805
1948
 
@@ -1815,7 +1958,11 @@ class WorkflowTask:
1815
1958
  if k in source_indices:
1816
1959
  if k not in element_src_indices:
1817
1960
  element_src_indices[k] = []
1818
- element_src_indices[k].append(source_indices[k][v])
1961
+ if input_data_indices[k][v] != -1:
1962
+ src_idx_k = source_indices[k][v]
1963
+ else:
1964
+ src_idx_k = -1
1965
+ element_src_indices[k].append(src_idx_k)
1819
1966
 
1820
1967
  return new_elements, element_sequence_indices, element_src_indices
1821
1968
 
@@ -2003,13 +2150,14 @@ class WorkflowTask:
2003
2150
 
2004
2151
  self.template.set_sequence_parameters(element_set)
2005
2152
 
2006
- self.ensure_input_sources(element_set) # may modify element_set.input_sources
2153
+ # may modify element_set.input_sources:
2154
+ padded_elem_iters = self.ensure_input_sources(element_set)
2007
2155
 
2008
2156
  (input_data_idx, seq_idx, src_idx) = self._make_new_elements_persistent(
2009
2157
  element_set=element_set,
2010
2158
  element_set_idx=self.num_element_sets,
2159
+ padded_elem_iters=padded_elem_iters,
2011
2160
  )
2012
-
2013
2161
  element_set.task_template = self.template # may modify element_set.nesting_order
2014
2162
 
2015
2163
  multiplicities = self.template.prepare_element_resolution(
@@ -2047,7 +2195,7 @@ class WorkflowTask:
2047
2195
  task_ID=self.insert_ID,
2048
2196
  es_idx=self.num_element_sets - 1,
2049
2197
  seq_idx={k: v[elem_idx] for k, v in element_seq_idx.items()},
2050
- src_idx={k: v[elem_idx] for k, v in element_src_idx.items()},
2198
+ src_idx={k: v[elem_idx] for k, v in element_src_idx.items() if v != -1},
2051
2199
  )
2052
2200
  iter_ID_i = self.workflow._store.add_element_iteration(
2053
2201
  element_ID=elem_ID_i,
@@ -1,5 +1,6 @@
1
1
  import pytest
2
2
  from hpcflow.app import app as hf
3
+ from hpcflow.sdk.core.errors import MalformedNestingOrderPath
3
4
 
4
5
 
5
6
  @pytest.fixture
@@ -101,3 +102,12 @@ def test_merge_envs_no_envs_with_resource_envs(null_config):
101
102
  def test_raise_env_and_envs_specified(null_config):
102
103
  with pytest.raises(ValueError):
103
104
  hf.ElementSet(env_preset="my_preset", environments={"my_env": {"version": 1}})
105
+
106
+
107
+ def test_nesting_order_paths_raise(null_config):
108
+ with pytest.raises(MalformedNestingOrderPath):
109
+ hf.ElementSet(nesting_order={"bad_path.p1": 1})
110
+
111
+
112
+ def test_nesting_order_paths_no_raise(null_config):
113
+ hf.ElementSet(nesting_order={"inputs.p1": 1, "resources.any": 2, "repeats": 3})
@@ -1,10 +1,16 @@
1
1
  import numpy as np
2
2
  import pytest
3
3
  from hpcflow.app import app as hf
4
- from hpcflow.sdk.core.errors import MissingInputs
4
+ from hpcflow.sdk.core.errors import (
5
+ InapplicableInputSourceElementIters,
6
+ MissingInputs,
7
+ NoCoincidentInputSources,
8
+ UnavailableInputSource,
9
+ )
5
10
  from hpcflow.sdk.core.test_utils import (
6
11
  P1_parameter_cls as P1,
7
12
  P1_sub_parameter_cls as P1_sub,
13
+ make_schemas,
8
14
  )
9
15
 
10
16
 
@@ -660,3 +666,597 @@ def test_sub_parameter_task_input_source_allowed_when_root_parameter_is_task_out
660
666
  ],
661
667
  "p2": [hf.InputSource.local()],
662
668
  }
669
+
670
+
671
+ def test_raise_unavailable_input_source(null_config, tmp_path):
672
+ t1 = hf.Task(schema=hf.task_schemas.test_t1_ps, inputs={"p1": 1})
673
+ t2 = hf.Task(
674
+ schema=hf.task_schemas.test_t1_ps,
675
+ input_sources={"p1": [hf.InputSource.local()]},
676
+ )
677
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
678
+ with pytest.raises(UnavailableInputSource):
679
+ hf.Workflow.from_template(wkt, path=tmp_path)
680
+
681
+
682
+ def test_input_source_specify_element_iters(null_config, tmp_path):
683
+ t1 = hf.Task(
684
+ schema=hf.task_schemas.test_t1_ps,
685
+ sequences=[
686
+ hf.ValueSequence(
687
+ path="inputs.p1",
688
+ values=[{"a": 1}, {"a": 2}, {"a": 3}],
689
+ ),
690
+ ],
691
+ )
692
+ t2 = hf.Task(
693
+ schema=hf.task_schemas.test_t1_ps,
694
+ input_sources={
695
+ "p1": [
696
+ hf.InputSource.task(
697
+ task_ref=0, task_source_type="input", element_iters=[0, 2]
698
+ )
699
+ ]
700
+ },
701
+ )
702
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
703
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
704
+ assert len(wk.tasks[1].elements) == 2
705
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [1, 3]
706
+
707
+
708
+ def test_input_source_raise_on_inapplicable_specified_element_iters(
709
+ null_config, tmp_path
710
+ ):
711
+ t1 = hf.Task(
712
+ schema=hf.task_schemas.test_t1_ps,
713
+ sequences=[
714
+ hf.ValueSequence(
715
+ path="inputs.p1",
716
+ values=[{"a": 1}, {"a": 2}, {"a": 3}],
717
+ ),
718
+ ],
719
+ )
720
+ t2 = hf.Task(
721
+ schema=hf.task_schemas.test_t1_ps,
722
+ input_sources={
723
+ "p1": [
724
+ hf.InputSource.task(
725
+ task_ref=0, task_source_type="input", element_iters=[0, 4]
726
+ )
727
+ ]
728
+ },
729
+ )
730
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
731
+ with pytest.raises(InapplicableInputSourceElementIters):
732
+ hf.Workflow.from_template(wkt, path=tmp_path)
733
+
734
+
735
+ def test_input_source_specify_element_iters_and_where(null_config, tmp_path):
736
+ """Test the where argument further filters the element_iters argument."""
737
+ t1 = hf.Task(
738
+ schema=hf.task_schemas.test_t1_ps,
739
+ sequences=[
740
+ hf.ValueSequence(
741
+ path="inputs.p1",
742
+ values=[{"a": 1}, {"a": 2}, {"a": 3}],
743
+ ),
744
+ ],
745
+ )
746
+ t2 = hf.Task(
747
+ schema=hf.task_schemas.test_t1_ps,
748
+ input_sources={
749
+ "p1": [
750
+ hf.InputSource.task(
751
+ task_ref=0,
752
+ task_source_type="input",
753
+ element_iters=[0, 2],
754
+ where=hf.Rule(path="inputs.p1.a", condition={"value.equal_to": 3}),
755
+ )
756
+ ]
757
+ },
758
+ )
759
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
760
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
761
+ assert len(wk.tasks[1].elements) == 1
762
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [3]
763
+
764
+
765
+ def test_element_iters_order_with_allow_non_coincident_task_sources_False(
766
+ null_config, tmp_path
767
+ ):
768
+ t1 = hf.Task(
769
+ schema=hf.task_schemas.test_t1_ps,
770
+ sequences=[
771
+ hf.ValueSequence(
772
+ path="inputs.p1",
773
+ values=[11, 12, 13],
774
+ ),
775
+ ],
776
+ )
777
+ t2 = hf.Task(
778
+ schema=hf.task_schemas.test_t1_ps,
779
+ input_sources={
780
+ "p1": [
781
+ hf.InputSource.task(
782
+ task_ref=0, task_source_type="input", element_iters=[2, 0, 1]
783
+ )
784
+ ],
785
+ },
786
+ allow_non_coincident_task_sources=False,
787
+ )
788
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
789
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
790
+
791
+ assert len(wk.tasks[1].elements) == 3
792
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [13, 11, 12]
793
+
794
+
795
+ def test_element_iters_order_with_allow_non_coincident_task_sources_True(
796
+ null_config, tmp_path
797
+ ):
798
+ t1 = hf.Task(
799
+ schema=hf.task_schemas.test_t1_ps,
800
+ sequences=[
801
+ hf.ValueSequence(
802
+ path="inputs.p1",
803
+ values=[11, 12, 13],
804
+ ),
805
+ ],
806
+ )
807
+ t2 = hf.Task(
808
+ schema=hf.task_schemas.test_t1_ps,
809
+ input_sources={
810
+ "p1": [
811
+ hf.InputSource.task(
812
+ task_ref=0, task_source_type="input", element_iters=[2, 0, 1]
813
+ )
814
+ ],
815
+ },
816
+ allow_non_coincident_task_sources=True,
817
+ )
818
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
819
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
820
+
821
+ assert len(wk.tasks[1].elements) == 3
822
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [13, 11, 12]
823
+
824
+
825
+ def test_element_iters_order_with_allow_non_coincident_task_sources_True_multiple_sources(
826
+ null_config, tmp_path
827
+ ):
828
+ """Test no-reordering of specified element iterations of sources from the same task."""
829
+ s1 = make_schemas([[{"p1": None, "p2": None}, ("p3",), "t1"]])
830
+
831
+ t1 = hf.Task(
832
+ schema=s1,
833
+ sequences=[
834
+ hf.ValueSequence(
835
+ path="inputs.p1",
836
+ values=[11, 12, 13],
837
+ ),
838
+ hf.ValueSequence(
839
+ path="inputs.p2",
840
+ values=[21, 22, 23],
841
+ ),
842
+ ],
843
+ )
844
+ t2 = hf.Task(
845
+ schema=s1,
846
+ input_sources={
847
+ "p1": [
848
+ hf.InputSource.task(
849
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
850
+ )
851
+ ],
852
+ "p2": [
853
+ hf.InputSource.task(
854
+ task_ref=0, task_source_type="input", element_iters=[1, 0]
855
+ )
856
+ ],
857
+ },
858
+ allow_non_coincident_task_sources=True,
859
+ )
860
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
861
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
862
+
863
+ assert len(wk.tasks[1].elements) == 2
864
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [11, 12]
865
+ assert [i.value for i in wk.tasks[1].inputs.p2] == [22, 21]
866
+
867
+
868
+ def test_element_iters_order_with_allow_non_coincident_task_sources_False_multiple_sources(
869
+ null_config, tmp_path
870
+ ):
871
+ """Test reordering of specified element iterations of sources from the same task."""
872
+ s1 = make_schemas([[{"p1": None, "p2": None}, ("p3",), "t1"]])
873
+
874
+ t1 = hf.Task(
875
+ schema=s1,
876
+ sequences=[
877
+ hf.ValueSequence(
878
+ path="inputs.p1",
879
+ values=[11, 12, 13],
880
+ ),
881
+ hf.ValueSequence(
882
+ path="inputs.p2",
883
+ values=[21, 22, 23],
884
+ ),
885
+ ],
886
+ )
887
+ t2 = hf.Task(
888
+ schema=s1,
889
+ input_sources={
890
+ "p1": [
891
+ hf.InputSource.task(
892
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
893
+ )
894
+ ],
895
+ "p2": [
896
+ hf.InputSource.task(
897
+ task_ref=0, task_source_type="input", element_iters=[1, 0]
898
+ )
899
+ ],
900
+ },
901
+ allow_non_coincident_task_sources=False,
902
+ )
903
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
904
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
905
+
906
+ assert len(wk.tasks[1].elements) == 2
907
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [11, 12]
908
+ assert [i.value for i in wk.tasks[1].inputs.p2] == [21, 22]
909
+
910
+
911
+ def test_not_allow_non_coincident_task_sources(null_config, tmp_path):
912
+ """Test only one coincident element from the two input sources"""
913
+ s1 = make_schemas([[{"p1": None, "p2": None}, ("p3",), "t1"]])
914
+ t1 = hf.Task(
915
+ schema=s1,
916
+ inputs={"p1": 1},
917
+ sequences=[
918
+ hf.ValueSequence(path="inputs.p2", values=[21, 22, 23]),
919
+ ],
920
+ )
921
+ t2 = hf.Task(
922
+ schema=s1,
923
+ input_sources={
924
+ "p1": [
925
+ hf.InputSource.task(
926
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
927
+ )
928
+ ],
929
+ "p2": [
930
+ hf.InputSource.task(
931
+ task_ref=0, task_source_type="input", element_iters=[1, 2]
932
+ )
933
+ ],
934
+ },
935
+ allow_non_coincident_task_sources=False,
936
+ )
937
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
938
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
939
+
940
+ assert len(wk.tasks[1].elements) == 1
941
+ assert [i.value for i in wk.tasks[1].inputs.p2] == [22]
942
+
943
+
944
+ def test_allow_non_coincident_task_sources(null_config, tmp_path):
945
+ """Test can combine inputs from non-coincident element iterations of the same task."""
946
+ s1 = make_schemas([[{"p1": None, "p2": None}, ("p3",), "t1"]])
947
+ t1 = hf.Task(
948
+ schema=s1,
949
+ sequences=[
950
+ hf.ValueSequence(
951
+ path="inputs.p1",
952
+ values=[11, 12, 13],
953
+ ),
954
+ hf.ValueSequence(
955
+ path="inputs.p2",
956
+ values=[21, 22, 23],
957
+ ),
958
+ ],
959
+ )
960
+ t2 = hf.Task(
961
+ schema=s1,
962
+ input_sources={
963
+ "p1": [
964
+ hf.InputSource.task(
965
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
966
+ )
967
+ ],
968
+ "p2": [
969
+ hf.InputSource.task(
970
+ task_ref=0, task_source_type="input", element_iters=[1, 2]
971
+ )
972
+ ],
973
+ },
974
+ allow_non_coincident_task_sources=True,
975
+ )
976
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
977
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
978
+
979
+ assert len(wk.tasks[1].elements) == 2
980
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [11, 12]
981
+ assert [i.value for i in wk.tasks[1].inputs.p2] == [22, 23]
982
+
983
+
984
+ def test_input_source_task_input_from_multiple_element_sets_with_param_sequence(
985
+ null_config, tmp_path
986
+ ):
987
+ t1 = hf.Task(
988
+ schema=hf.task_schemas.test_t1_ps,
989
+ element_sets=[
990
+ hf.ElementSet(inputs={"p1": {"a": 1}}),
991
+ hf.ElementSet(
992
+ sequences=[
993
+ hf.ValueSequence(
994
+ path="inputs.p1",
995
+ values=[{"a": 2}, {"a": 3}],
996
+ ),
997
+ ],
998
+ ),
999
+ ],
1000
+ )
1001
+ t2 = hf.Task(schema=hf.task_schemas.test_t1_ps)
1002
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1003
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1004
+ assert len(wk.tasks[1].elements) == 3
1005
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [1, 2, 3]
1006
+
1007
+
1008
+ def test_raise_no_coincident_input_sources(null_config, tmp_path):
1009
+ s1 = make_schemas([[{"p1": None, "p2": None}, ("p3",), "t1"]])
1010
+ t1 = hf.Task(
1011
+ schema=s1,
1012
+ inputs={"p1": 100},
1013
+ sequences=[
1014
+ hf.ValueSequence.from_range(path="inputs.p2", start=0, stop=4),
1015
+ ],
1016
+ )
1017
+ t2 = hf.Task(
1018
+ schema=s1,
1019
+ allow_non_coincident_task_sources=False,
1020
+ input_sources={
1021
+ "p1": [
1022
+ hf.InputSource.task(
1023
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
1024
+ )
1025
+ ],
1026
+ "p2": [
1027
+ hf.InputSource.task(
1028
+ task_ref=0, task_source_type="input", element_iters=[2, 3]
1029
+ )
1030
+ ],
1031
+ },
1032
+ )
1033
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1034
+ with pytest.raises(NoCoincidentInputSources):
1035
+ hf.Workflow.from_template(wkt, path=tmp_path)
1036
+
1037
+
1038
+ def test_input_source_task_input_from_multiple_element_sets_with_sub_param_sequence(
1039
+ null_config, tmp_path
1040
+ ):
1041
+ t1 = hf.Task(
1042
+ schema=hf.task_schemas.test_t1_ps,
1043
+ element_sets=[
1044
+ hf.ElementSet(inputs={"p1": {"a": 1}}),
1045
+ hf.ElementSet(
1046
+ inputs={"p1": {"a": 1}},
1047
+ sequences=[
1048
+ hf.ValueSequence(
1049
+ path="inputs.p1.a",
1050
+ values=[2, 3],
1051
+ ),
1052
+ ],
1053
+ ),
1054
+ ],
1055
+ )
1056
+ t2 = hf.Task(schema=hf.task_schemas.test_t1_ps)
1057
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1058
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1059
+ assert len(wk.tasks[1].elements) == 3
1060
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [1, 2, 3]
1061
+
1062
+
1063
+ def test_input_source_task_input_from_multiple_element_sets_with_sub_param_sequence_manual_sources_root_param(
1064
+ null_config, tmp_path
1065
+ ):
1066
+ t1 = hf.Task(
1067
+ schema=hf.task_schemas.test_t1_ps,
1068
+ element_sets=[
1069
+ hf.ElementSet(inputs={"p1": {"a": 1}}),
1070
+ hf.ElementSet(
1071
+ inputs={"p1": {"a": 1}},
1072
+ sequences=[
1073
+ hf.ValueSequence(
1074
+ path="inputs.p1.a",
1075
+ values=[2, 3],
1076
+ ),
1077
+ ],
1078
+ ),
1079
+ ],
1080
+ )
1081
+ t2 = hf.Task(
1082
+ schema=hf.task_schemas.test_t1_ps,
1083
+ input_sources={
1084
+ "p1": [
1085
+ hf.InputSource.task(
1086
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
1087
+ )
1088
+ ]
1089
+ },
1090
+ )
1091
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1092
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1093
+ assert len(wk.tasks[1].elements) == 2
1094
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [1, 2]
1095
+
1096
+
1097
+ def test_input_source_inputs_from_multiple_element_sets_with_sub_parameter_sequences_complex(
1098
+ null_config, tmp_path
1099
+ ):
1100
+ t1 = hf.Task(
1101
+ schema=hf.task_schemas.test_t1_ps,
1102
+ element_sets=[
1103
+ hf.ElementSet(
1104
+ inputs={"p1": {"a": 1}},
1105
+ sequences=[
1106
+ hf.ValueSequence(
1107
+ path="inputs.p1.a",
1108
+ values=[2],
1109
+ ),
1110
+ ],
1111
+ ),
1112
+ hf.ElementSet(
1113
+ inputs={"p1": {"a": 1}},
1114
+ sequences=[
1115
+ hf.ValueSequence(
1116
+ path="inputs.p1.c",
1117
+ values=[2, 3],
1118
+ ),
1119
+ ],
1120
+ ),
1121
+ hf.ElementSet(
1122
+ inputs={"p1": {"a": 1}},
1123
+ sequences=[
1124
+ hf.ValueSequence(
1125
+ path="inputs.p1.b",
1126
+ values=[22, 33],
1127
+ ),
1128
+ hf.ValueSequence(
1129
+ path="inputs.p1.a",
1130
+ values=[4, 5],
1131
+ ),
1132
+ ],
1133
+ ),
1134
+ ],
1135
+ )
1136
+ t2 = hf.Task(schema=hf.task_schemas.test_t1_ps)
1137
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1138
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1139
+
1140
+ assert len(wk.tasks[1].elements) == 5
1141
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [
1142
+ {"a": 2},
1143
+ {"a": 1, "c": 2},
1144
+ {"a": 1, "c": 3},
1145
+ {"a": 4, "b": 22},
1146
+ {"a": 5, "b": 33},
1147
+ ]
1148
+
1149
+
1150
+ def test_input_source_inputs_from_multiple_element_sets_with_sub_parameter_sequences_complex_reordered_iters(
1151
+ null_config, tmp_path
1152
+ ):
1153
+ t1 = hf.Task(
1154
+ schema=hf.task_schemas.test_t1_ps,
1155
+ element_sets=[
1156
+ hf.ElementSet(
1157
+ inputs={"p1": {"a": 1}},
1158
+ sequences=[
1159
+ hf.ValueSequence(
1160
+ path="inputs.p1.a",
1161
+ values=[2],
1162
+ ),
1163
+ ],
1164
+ ),
1165
+ hf.ElementSet(
1166
+ inputs={"p1": {"a": 1}},
1167
+ sequences=[
1168
+ hf.ValueSequence(
1169
+ path="inputs.p1.c",
1170
+ values=[2, 3],
1171
+ ),
1172
+ ],
1173
+ ),
1174
+ hf.ElementSet(
1175
+ inputs={"p1": {"a": 1}},
1176
+ sequences=[
1177
+ hf.ValueSequence(
1178
+ path="inputs.p1.b",
1179
+ values=[22, 33],
1180
+ ),
1181
+ hf.ValueSequence(
1182
+ path="inputs.p1.a",
1183
+ values=[4, 5],
1184
+ ),
1185
+ ],
1186
+ ),
1187
+ ],
1188
+ )
1189
+ t2 = hf.Task(
1190
+ schema=hf.task_schemas.test_t1_ps,
1191
+ input_sources={
1192
+ # reordered p1.c elem iters:
1193
+ "p1.c": [
1194
+ hf.InputSource.task(
1195
+ task_ref=0, task_source_type="input", element_iters=[2, 1]
1196
+ )
1197
+ ]
1198
+ },
1199
+ allow_non_coincident_task_sources=True, # to maintain custom ordering
1200
+ )
1201
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1202
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1203
+
1204
+ assert len(wk.tasks[1].elements) == 5
1205
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [
1206
+ {"a": 2},
1207
+ {"a": 1, "c": 3},
1208
+ {"a": 1, "c": 2},
1209
+ {"a": 4, "b": 22},
1210
+ {"a": 5, "b": 33},
1211
+ ]
1212
+
1213
+
1214
+ def test_input_source_inputs_from_multiple_element_sets_with_sub_parameter_sequences_mixed_padding(
1215
+ null_config, tmp_path
1216
+ ):
1217
+
1218
+ t1 = hf.Task(
1219
+ schema=hf.task_schemas.test_t1_ps,
1220
+ element_sets=[
1221
+ hf.ElementSet(
1222
+ inputs={"p1": {"a": 1}},
1223
+ ),
1224
+ hf.ElementSet(
1225
+ inputs={"p1": {"a": 1}},
1226
+ nesting_order={"inputs.p1.a": 0, "inputs.p1.b": 1},
1227
+ sequences=[
1228
+ hf.ValueSequence(
1229
+ path="inputs.p1.a",
1230
+ values=[4, 5],
1231
+ ),
1232
+ hf.ValueSequence(
1233
+ path="inputs.p1.b",
1234
+ values=[22],
1235
+ ),
1236
+ ],
1237
+ ),
1238
+ ],
1239
+ )
1240
+ t2 = hf.Task(
1241
+ schema=hf.task_schemas.test_t1_ps,
1242
+ # `p1.b` has a different nesting order to the root param `p1`, so it will not be
1243
+ # "padded" to have the same multiplicity as `p1`/`p1.a`. With a higher nesting
1244
+ # order, it will be "applied" to all other elements, meaning we'll gain a value
1245
+ # for `p1.b` for all elements (including from the first element set, which didn't
1246
+ # have a value for `p1.b`):
1247
+ nesting_order={
1248
+ "inputs.p1": 0,
1249
+ "inputs.p1.a": 0,
1250
+ "inputs.p1.b": 1,
1251
+ },
1252
+ )
1253
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1254
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1255
+
1256
+ assert len(wk.tasks[1].elements) == 4
1257
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [
1258
+ {"a": 1, "b": 22},
1259
+ {"a": 1, "b": 22},
1260
+ {"a": 5, "b": 22},
1261
+ {"a": 5, "b": 22},
1262
+ ]
@@ -721,7 +721,7 @@ def test_expected_name_two_schemas_both_with_method_and_implementation():
721
721
  def test_raise_on_negative_nesting_order():
722
722
  s1 = make_schemas([[{"p1": None}, ()]])
723
723
  with pytest.raises(TaskTemplateInvalidNesting):
724
- hf.Task(schema=s1, nesting_order={"p1": -1})
724
+ hf.Task(schema=s1, nesting_order={"inputs.p1": -1})
725
725
 
726
726
 
727
727
  # TODO: test resolution of elements and with raise MissingInputs
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hpcflow-new2
3
- Version: 0.2.0a164
3
+ Version: 0.2.0a167
4
4
  Summary: Computational workflow management
5
5
  License: MIT
6
6
  Author: aplowman
@@ -1,7 +1,7 @@
1
1
  hpcflow/__init__.py,sha256=WIETuRHeOp2SqUqHUzpjQ-lk9acbYv-6aWOhZPRdlhs,64
2
2
  hpcflow/__pyinstaller/__init__.py,sha256=YOzBlPSck6slucv6lJM9K80JtsJWxXRL00cv6tRj3oc,98
3
3
  hpcflow/__pyinstaller/hook-hpcflow.py,sha256=SeMopsPkhCyd9gqIrzwFNRj3ZlkUlUYl-74QYz61mo4,1089
4
- hpcflow/_version.py,sha256=R9jeM3QgojiIsBflXpjJhqXm0gfjNECHSLEUIKEcqB4,26
4
+ hpcflow/_version.py,sha256=h4k1gSq8cV1CPxyYRPexhoJG7Mrqixae5DzNY2gBfoY,26
5
5
  hpcflow/app.py,sha256=d-kgfnZNlqlCi2H8bK26714brD_u3ibN3FaEZgjF9aA,1332
6
6
  hpcflow/cli.py,sha256=G2J3D9v6MnMWOWMMWK6UEKLn_6wnV9lT_qygEBBxg-I,66
7
7
  hpcflow/data/demo_data_manifest/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -50,15 +50,15 @@ hpcflow/sdk/core/command_files.py,sha256=GEFlgZv7g9lkFoNgwyDtmlI_90e2TWliCJuJimn
50
50
  hpcflow/sdk/core/commands.py,sha256=5SKxSBuYz8sSvfpp9p5utBwMoQV6Pd2KlGBCpXAHDxE,12741
51
51
  hpcflow/sdk/core/element.py,sha256=hTAR2kxfGSRf4vFgWwrnyuP5z5RnKYOd2X6c6Xd70zo,47048
52
52
  hpcflow/sdk/core/environment.py,sha256=DGUz1NvliKh6opP0IueGHD69rn_8wFLhDsq6kAmEgM4,4849
53
- hpcflow/sdk/core/errors.py,sha256=yZnO9xuXFSfjI5lg6cepB38-VIytGdkzjW6mNEx3sh4,8962
53
+ hpcflow/sdk/core/errors.py,sha256=jU-vA9e1CW7nFp8MUWH1XKswPYWoF35fVFhIm8N-5GE,9192
54
54
  hpcflow/sdk/core/json_like.py,sha256=LRZsUd1tn8zXC8fESeiXs7Eko-VdnB8zcXiqixKVcZM,18874
55
55
  hpcflow/sdk/core/loop.py,sha256=5Ai_HHCzM21_IOvtgobE8yi_b9dc9jWQijt2LyI3PlM,21865
56
56
  hpcflow/sdk/core/object_list.py,sha256=HASx7AMniX82bTlROIgIvrjE_DupmwDgxfkfROmI3GA,20168
57
57
  hpcflow/sdk/core/parallel.py,sha256=LI-g-qOuOR1oaEUWVT0qW0hmiP9hsJyUP8_IfSTKYYo,95
58
- hpcflow/sdk/core/parameters.py,sha256=ooJLzXjSFvKQANfDbGCTbs0IrgMyV-BiJ26d44Mswlk,64820
58
+ hpcflow/sdk/core/parameters.py,sha256=0h1M-fXqOVgruyM0Au7Fo38cUbHgDNEPd1Alb1FULxE,65588
59
59
  hpcflow/sdk/core/rule.py,sha256=3jVsSZCBv4Odxy8QbSbKo9ZcRuU-5DRJoNK8adXCEpI,4567
60
60
  hpcflow/sdk/core/run_dir_files.py,sha256=_k-hA7dlry9GZw5ZXcntFcPGxg07p03hnHSM5S-2G2Y,2197
61
- hpcflow/sdk/core/task.py,sha256=_tOVRa533YED0d0P-4cynEjfwgvfrKwYC1fQUdqr2NU,113611
61
+ hpcflow/sdk/core/task.py,sha256=rfzjTHCldnTIlLQ1yWnMC-cJjvyVehFt-WCVL3_H2kc,121281
62
62
  hpcflow/sdk/core/task_schema.py,sha256=TipXzC2guu9zilv0En-rHt6lUCTSIj5faI4lVWQdUbA,32346
63
63
  hpcflow/sdk/core/test_utils.py,sha256=UKitv3qvZpSz8vE_HRaJjPDnaOeojy6WVntSskPs03Q,9354
64
64
  hpcflow/sdk/core/utils.py,sha256=pReOwnmuxJqexPUdaA8UMjJ4o8ucllBVVssWjb_LNQc,25651
@@ -124,8 +124,8 @@ hpcflow/tests/unit/test_config.py,sha256=yW_tCjCaReCud7Lv4-CLt8mZ7XoaGLGxjozJQoZ
124
124
  hpcflow/tests/unit/test_config_file.py,sha256=eB1wJimxk7v4vYtwQ1kwPcExdjcN1q-rdsVOeYHN3dQ,4375
125
125
  hpcflow/tests/unit/test_element.py,sha256=JxqLigktPbdoZz6zV8iXiqPG7-vrdexUpzK5xdbWpzg,20526
126
126
  hpcflow/tests/unit/test_element_iteration.py,sha256=K0oxoDSSKy2JAcAhhE_l63M3u1cus1SsVc5LR2jLe0k,1292
127
- hpcflow/tests/unit/test_element_set.py,sha256=oAEU_owzp7xqpdR8K0vNdIeDiA6dOLiMM88WIKPtySI,3351
128
- hpcflow/tests/unit/test_input_source.py,sha256=QKAqIpMk-idbSvoWMhjFLroXzBtsWFL4yEG3Frkvq70,20467
127
+ hpcflow/tests/unit/test_element_set.py,sha256=oy0KTIwUKm5NNnBYVNNjLcYfipb3sZWDldjE1klSGUU,3709
128
+ hpcflow/tests/unit/test_input_source.py,sha256=pYm1V4kBsBIFstKDcrbwUiDLb82rr7ITcUEFJES2dI0,39194
129
129
  hpcflow/tests/unit/test_input_value.py,sha256=hHoe9rlCbfFvqqkAu4ylu95YVaAhinthXqTCUNEnSm8,5454
130
130
  hpcflow/tests/unit/test_json_like.py,sha256=aGCiGfT-tNiFu3yzW6d_T-oDc5QLwSUgq3pN3jFhyF0,29939
131
131
  hpcflow/tests/unit/test_loop.py,sha256=vBnCnoweiFIi2T68bu6PCQ4yH829pxK6Oe4Comg0bHo,21060
@@ -139,7 +139,7 @@ hpcflow/tests/unit/test_schema_input.py,sha256=738tJjEzBcQ8Z_YTcybVBW7Twy_hXmbPW
139
139
  hpcflow/tests/unit/test_shell.py,sha256=FDtQ9fHRhSKiVtxMJ8BRisoeSvvk8zmJndTB4LlhqGc,3442
140
140
  hpcflow/tests/unit/test_slurm.py,sha256=ewfNuXXUEEelAxcd7MBbAQ-RCvU8xBenHTAyfXYF-R0,1064
141
141
  hpcflow/tests/unit/test_submission.py,sha256=kQ3ksjGlfp47AYuwTA27RDX2XxRU3YxKlKC1ACTbXw8,16682
142
- hpcflow/tests/unit/test_task.py,sha256=KveejJ6U9fFzMf4Z8mfcOatfyXKQG4fmQ5FCpDqlLco,70195
142
+ hpcflow/tests/unit/test_task.py,sha256=rSMD5eLJxss81HgAxGU9yKTFLZHE2LYdG-XFaN5B5N4,70202
143
143
  hpcflow/tests/unit/test_task_schema.py,sha256=j5HHxoqq4Mb223jKcusgX-C6-TsmKG0PLjYQ4M01ZHo,4531
144
144
  hpcflow/tests/unit/test_utils.py,sha256=JMhSRZFqmI9ZhREJet9en_y3aRVlQlWE7OKpkdt8SVI,14172
145
145
  hpcflow/tests/unit/test_value_sequence.py,sha256=Xv121bLTnrLpdhmwoJ_yJvXoJ3L-iOWEQBTl9V_aMio,15463
@@ -148,7 +148,7 @@ hpcflow/tests/unit/test_workflow_template.py,sha256=fF7LNveMwCledgncNCRfD9Nd9dL9
148
148
  hpcflow/tests/workflows/test_jobscript.py,sha256=9sp1o0g72JZbv2QlOl5v7wCZEFjotxiIKGNUxVaFgaA,724
149
149
  hpcflow/tests/workflows/test_workflows.py,sha256=xai6FRtGqG4lStJk6KmsqPUSuvqs9FrsBOxMVALshIs,13400
150
150
  hpcflow/viz_demo.ipynb,sha256=1QdnVsk72vihv2L6hOGyk318uEa22ZSgGxQCa7hW2oo,6238
151
- hpcflow_new2-0.2.0a164.dist-info/METADATA,sha256=xONNW9D3OecXiNTVC-RYksxn5h6boxU6PZzFQ43TI0E,2473
152
- hpcflow_new2-0.2.0a164.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
153
- hpcflow_new2-0.2.0a164.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
154
- hpcflow_new2-0.2.0a164.dist-info/RECORD,,
151
+ hpcflow_new2-0.2.0a167.dist-info/METADATA,sha256=maOtUjIfwEsLP3sRORoy7XV6qHEv3Cx1VS8fwywqkeM,2473
152
+ hpcflow_new2-0.2.0a167.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
153
+ hpcflow_new2-0.2.0a167.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
154
+ hpcflow_new2-0.2.0a167.dist-info/RECORD,,