hpcflow-new2 0.2.0a169__py3-none-any.whl → 0.2.0a173__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hpcflow/_version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.0a169"
1
+ __version__ = "0.2.0a173"
hpcflow/sdk/app.py CHANGED
@@ -19,6 +19,7 @@ from typing import Any, Callable, Dict, List, Optional, Type, Union, Tuple
19
19
  import warnings
20
20
  import zipfile
21
21
  from platformdirs import user_cache_path, user_data_dir
22
+ import requests
22
23
  from reretry import retry
23
24
  import rich
24
25
  from rich.console import Console, Group
@@ -63,6 +64,46 @@ SDK_logger = get_SDK_logger(__name__)
63
64
  DEMO_WK_FORMATS = {".yaml": "yaml", ".yml": "yaml", ".json": "json", ".jsonc": "json"}
64
65
 
65
66
 
67
+ def rate_limit_safe_url_to_fs(app, *args, logger=None, **kwargs):
68
+ """Call fsspec's `url_to_fs` but retry on `requests.exceptions.HTTPError`s
69
+
70
+ References
71
+ ----------
72
+ [1]: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?
73
+ apiVersion=2022-11-28#about-secondary-rate-limits
74
+ """
75
+
76
+ auth = {}
77
+ if app.run_time_info.in_pytest:
78
+ gh_token = os.environ.get("GH_TOKEN")
79
+ if gh_token:
80
+ # using the GitHub actions built in token increases the number of API
81
+ # requests allowed per hour to 1000 [1]. fsspec requires "username" to be
82
+ # set if using "token":
83
+ auth = {"username": "", "token": gh_token}
84
+ logger.info(
85
+ "calling fsspec's `url_to_fs` with a token from the env variable "
86
+ "`GH_TOKEN`."
87
+ )
88
+
89
+ # GitHub actions testing is potentially highly concurrent, with multiple
90
+ # Python versions and OSes being tested at the same time; so we might hit
91
+ # GitHub's secondary rate limit:
92
+ @retry(
93
+ requests.exceptions.HTTPError,
94
+ tries=3,
95
+ delay=5,
96
+ backoff=1.5,
97
+ jitter=(0, 20),
98
+ logger=logger,
99
+ )
100
+ def _inner(*args, **kwargs):
101
+ kwargs.update(auth)
102
+ return url_to_fs(*args, **kwargs)
103
+
104
+ return _inner(*args, **kwargs)
105
+
106
+
66
107
  def __getattr__(name):
67
108
  """Allow access to core classes and API functions (useful for type annotations)."""
68
109
  try:
@@ -2278,7 +2319,11 @@ class BaseApp(metaclass=Singleton):
2278
2319
  f"`demo_data_manifest_file`: "
2279
2320
  f"{self.config.demo_data_manifest_file!r}."
2280
2321
  )
2281
- fs, url_path = url_to_fs(str(self.config.demo_data_manifest_file))
2322
+ fs, url_path = rate_limit_safe_url_to_fs(
2323
+ self,
2324
+ str(self.config.demo_data_manifest_file),
2325
+ logger=self.logger,
2326
+ )
2282
2327
  with fs.open(url_path) as fh:
2283
2328
  manifest = json.load(fh)
2284
2329
  else:
@@ -2324,7 +2369,11 @@ class BaseApp(metaclass=Singleton):
2324
2369
  """
2325
2370
 
2326
2371
  def _retrieve_source_path_from_config(src_fn):
2327
- fs, url_path = url_to_fs(self.config.demo_data_dir)
2372
+ fs, url_path = rate_limit_safe_url_to_fs(
2373
+ self,
2374
+ self.config.demo_data_dir,
2375
+ logger=self.logger,
2376
+ )
2328
2377
  if isinstance(fs, LocalFileSystem):
2329
2378
  out = url_path
2330
2379
  delete = False
hpcflow/sdk/core/task.py CHANGED
@@ -941,6 +941,14 @@ class Task(JSONLike):
941
941
  # directly, so consider only source task element sets that
942
942
  # provide the input locally:
943
943
  es_idx = src_task.get_param_provided_element_sets(labelled_path)
944
+ for es_i in src_task.element_sets:
945
+ # add any element set that has task sources for this parameter
946
+ for inp_src_i in es_i.input_sources.get(labelled_path, []):
947
+ if inp_src_i.source_type is InputSourceType.TASK:
948
+ if es_i.index not in es_idx:
949
+ es_idx.append(es_i.index)
950
+ break
951
+
944
952
  else:
945
953
  # outputs are always available, so consider all source task
946
954
  # element sets:
@@ -2332,6 +2340,7 @@ class WorkflowTask:
2332
2340
  resources=elem_prop.element_set.resources[:],
2333
2341
  repeats=elem_prop.element_set.repeats,
2334
2342
  nesting_order=elem_prop.nesting_order,
2343
+ input_sources=elem_prop.input_sources,
2335
2344
  sourceable_elem_iters=src_elem_iters,
2336
2345
  )
2337
2346
 
@@ -3027,6 +3036,7 @@ class ElementPropagation:
3027
3036
 
3028
3037
  task: app.Task
3029
3038
  nesting_order: Optional[Dict] = None
3039
+ input_sources: Optional[Dict] = None
3030
3040
 
3031
3041
  @property
3032
3042
  def element_set(self):
@@ -3037,6 +3047,7 @@ class ElementPropagation:
3037
3047
  return self.__class__(
3038
3048
  task=self.task,
3039
3049
  nesting_order=copy.deepcopy(self.nesting_order, memo),
3050
+ input_sources=copy.deepcopy(self.input_sources, memo),
3040
3051
  )
3041
3052
 
3042
3053
  @classmethod
@@ -96,11 +96,13 @@ def make_tasks(
96
96
  local_sequences=None,
97
97
  local_resources=None,
98
98
  nesting_orders=None,
99
+ input_sources=None,
99
100
  ):
100
101
  local_inputs = local_inputs or {}
101
102
  local_sequences = local_sequences or {}
102
103
  local_resources = local_resources or {}
103
104
  nesting_orders = nesting_orders or {}
105
+ input_sources = input_sources or {}
104
106
  schemas = make_schemas(schemas_spec, ret_list=True)
105
107
  tasks = []
106
108
  for s_idx, s in enumerate(schemas):
@@ -117,13 +119,13 @@ def make_tasks(
117
119
  for i in local_sequences.get(s_idx, [])
118
120
  ]
119
121
  res = {k: v for k, v in local_resources.get(s_idx, {}).items()}
120
-
121
122
  task = hf.Task(
122
123
  schema=s,
123
124
  inputs=inputs,
124
125
  sequences=seqs,
125
126
  resources=res,
126
127
  nesting_order=nesting_orders.get(s_idx, {}),
128
+ input_sources=input_sources.get(s_idx, None),
127
129
  )
128
130
  tasks.append(task)
129
131
  return tasks
@@ -136,6 +138,7 @@ def make_workflow(
136
138
  local_sequences=None,
137
139
  local_resources=None,
138
140
  nesting_orders=None,
141
+ input_sources=None,
139
142
  resources=None,
140
143
  name="w1",
141
144
  overwrite=False,
@@ -147,6 +150,7 @@ def make_workflow(
147
150
  local_sequences=local_sequences,
148
151
  local_resources=local_resources,
149
152
  nesting_orders=nesting_orders,
153
+ input_sources=input_sources,
150
154
  )
151
155
  wk = hf.Workflow.from_template(
152
156
  hf.WorkflowTemplate(name=name, tasks=tasks, resources=resources),
@@ -1,4 +1,6 @@
1
+ import sys
1
2
  import pytest
3
+ import requests
2
4
 
3
5
  from hpcflow.app import app as hf
4
6
 
@@ -87,6 +89,14 @@ def test_get_demo_data_manifest(null_config):
87
89
  hf.get_demo_data_files_manifest()
88
90
 
89
91
 
92
+ @pytest.mark.xfail(
93
+ condition=sys.platform == "darwin",
94
+ raises=requests.exceptions.HTTPError,
95
+ reason=(
96
+ "GHA MacOS runners use the same IP address, so we get rate limited when "
97
+ "retrieving demo data from GitHub."
98
+ ),
99
+ )
90
100
  def test_get_demo_data_cache(null_config):
91
101
  hf.clear_demo_data_cache_dir()
92
102
  hf.cache_demo_data_file("text_file.txt")
@@ -1,4 +1,6 @@
1
+ import sys
1
2
  import pytest
3
+ import requests
2
4
 
3
5
  from hpcflow.app import app as hf
4
6
  from hpcflow.sdk.core.errors import InputValueDuplicateSequenceAddress
@@ -155,6 +157,14 @@ def test_value_is_dict_check_no_raise_if_sub_parameter(null_config):
155
157
  hf.InputValue("p1c", path="a", value=101)
156
158
 
157
159
 
160
+ @pytest.mark.xfail(
161
+ condition=sys.platform == "darwin",
162
+ raises=requests.exceptions.HTTPError,
163
+ reason=(
164
+ "GHA MacOS runners use the same IP address, so we get rate limited when "
165
+ "retrieving demo data from GitHub."
166
+ ),
167
+ )
158
168
  def test_demo_data_value(null_config):
159
169
  name = "text_file.txt"
160
170
  assert hf.InputValue("p1", value=f"<<demo_data_file:{name}>>").value == str(
@@ -1,9 +1,11 @@
1
1
  from dataclasses import dataclass
2
2
  import random
3
3
  import string
4
+ import sys
4
5
  from textwrap import dedent
5
6
 
6
7
  import pytest
8
+ import requests
7
9
 
8
10
  from hpcflow.app import app as hf
9
11
  from hpcflow.sdk.core.parameters import ParameterValue
@@ -148,6 +150,14 @@ def test_slice(new_null_config, tmp_path):
148
150
  assert p1_params[1].value == values[2]
149
151
 
150
152
 
153
+ @pytest.mark.xfail(
154
+ condition=sys.platform == "darwin",
155
+ raises=requests.exceptions.HTTPError,
156
+ reason=(
157
+ "GHA MacOS runners use the same IP address, so we get rate limited when "
158
+ "retrieving demo data from GitHub."
159
+ ),
160
+ )
151
161
  def test_demo_data_substitution_param_value_class_method(new_null_config, tmp_path):
152
162
  yaml_str = dedent(
153
163
  """\
@@ -171,6 +181,14 @@ def test_demo_data_substitution_param_value_class_method(new_null_config, tmp_pa
171
181
  }
172
182
 
173
183
 
184
+ @pytest.mark.xfail(
185
+ condition=sys.platform == "darwin",
186
+ raises=requests.exceptions.HTTPError,
187
+ reason=(
188
+ "GHA MacOS runners use the same IP address, so we get rate limited when "
189
+ "retrieving demo data from GitHub."
190
+ ),
191
+ )
174
192
  def test_demo_data_substitution_value_sequence_class_method(new_null_config, tmp_path):
175
193
  yaml_str = dedent(
176
194
  """\
@@ -404,7 +404,7 @@ def test_task_get_available_task_input_sources_expected_return_two_params_one_ou
404
404
  assert available == available_exp
405
405
 
406
406
 
407
- def test_task_get_available_task_input_sources_input_source_excluded_if_not_local(
407
+ def test_task_get_available_task_input_sources_one_parameter_extravaganza(
408
408
  tmp_path,
409
409
  ):
410
410
  """Test an input source is excluded if it is not locally defined (meaning it comes
@@ -433,6 +433,12 @@ def test_task_get_available_task_input_sources_input_source_excluded_if_not_loca
433
433
  task_source_type=hf.TaskSourceType.OUTPUT,
434
434
  element_iters=[1],
435
435
  ),
436
+ hf.InputSource(
437
+ source_type=hf.InputSourceType.TASK,
438
+ task_ref=1,
439
+ task_source_type=hf.TaskSourceType.INPUT,
440
+ element_iters=[1],
441
+ ),
436
442
  hf.InputSource(
437
443
  source_type=hf.InputSourceType.TASK,
438
444
  task_ref=0,
@@ -1194,7 +1200,7 @@ def test_task_add_elements_multi_task_dependence_expected_workflow_num_elements(
1194
1200
  },
1195
1201
  )
1196
1202
  num_elems_new = wk.num_elements
1197
- assert num_elems_new - num_elems == 5
1203
+ assert num_elems_new - num_elems == 7
1198
1204
 
1199
1205
 
1200
1206
  def test_task_add_elements_multi_task_dependence_expected_task_num_elements(
@@ -1221,6 +1227,39 @@ def test_task_add_elements_multi_task_dependence_expected_task_num_elements(
1221
1227
  )
1222
1228
  num_elems_new = [task.num_elements for task in wk.tasks]
1223
1229
  num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1230
+ assert num_elems_diff == [1, 2, 4]
1231
+
1232
+
1233
+ def test_task_add_elements_multi_task_dependence_expected_task_num_elements_custom_input_source(
1234
+ tmp_path, param_p1
1235
+ ):
1236
+ wk = make_workflow(
1237
+ schemas_spec=[
1238
+ [{"p1": None}, ("p3",), "t1"],
1239
+ [{"p2": None, "p3": None}, ("p4",), "t2"],
1240
+ [{"p3": None, "p4": None}, (), "t3"],
1241
+ ],
1242
+ local_inputs={0: ("p1",)},
1243
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1244
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1245
+ input_sources={
1246
+ 2: {"p3": [hf.InputSource.task(0)]}
1247
+ }, # override default (t2 input)
1248
+ path=tmp_path,
1249
+ )
1250
+ num_elems = [task.num_elements for task in wk.tasks]
1251
+ wk.tasks.t1.add_elements(
1252
+ inputs=[hf.InputValue(param_p1, 102)],
1253
+ propagate_to={
1254
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1255
+ "t3": {
1256
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1257
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1258
+ },
1259
+ },
1260
+ )
1261
+ num_elems_new = [task.num_elements for task in wk.tasks]
1262
+ num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1224
1263
  assert num_elems_diff == [1, 2, 2]
1225
1264
 
1226
1265
 
@@ -1264,6 +1303,60 @@ def test_task_add_elements_multi_task_dependence_expected_new_data_index(
1264
1303
  + t3_num_elems_new
1265
1304
  ]
1266
1305
 
1306
+ assert (
1307
+ new_elems_t1 == [["inputs.p1", "outputs.p3", "resources.any"]]
1308
+ and new_elems_t2
1309
+ == [["inputs.p2", "inputs.p3", "outputs.p4", "resources.any"]] * 2
1310
+ and new_elems_t3 == [["inputs.p3", "inputs.p4", "resources.any"]] * 4
1311
+ )
1312
+
1313
+
1314
+ def test_task_add_elements_multi_task_dependence_expected_new_data_index_custom_input_source(
1315
+ tmp_path, param_p1
1316
+ ):
1317
+ wk = make_workflow(
1318
+ schemas_spec=[
1319
+ [{"p1": None}, ("p3",), "t1"],
1320
+ [{"p2": None, "p3": None}, ("p4",), "t2"],
1321
+ [{"p3": None, "p4": None}, (), "t3"],
1322
+ ],
1323
+ local_inputs={0: ("p1",)},
1324
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1325
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1326
+ input_sources={
1327
+ 2: {"p3": [hf.InputSource.task(0)]}
1328
+ }, # override default (t2 input)
1329
+ path=tmp_path,
1330
+ )
1331
+ t1_num_elems = wk.tasks.t1.num_elements
1332
+ t2_num_elems = wk.tasks.t2.num_elements
1333
+ t3_num_elems = wk.tasks.t3.num_elements
1334
+ wk.tasks.t1.add_elements(
1335
+ inputs=[hf.InputValue(param_p1, 102)],
1336
+ propagate_to={
1337
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1338
+ "t3": {
1339
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1340
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1341
+ },
1342
+ },
1343
+ )
1344
+ t1_num_elems_new = wk.tasks.t1.num_elements
1345
+ t2_num_elems_new = wk.tasks.t2.num_elements
1346
+ t3_num_elems_new = wk.tasks.t3.num_elements
1347
+ data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1348
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1349
+ new_elems_t2 = data_index_new[
1350
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
1351
+ ]
1352
+ new_elems_t3 = data_index_new[
1353
+ t1_num_elems_new
1354
+ + t2_num_elems_new
1355
+ + t3_num_elems : t1_num_elems_new
1356
+ + t2_num_elems_new
1357
+ + t3_num_elems_new
1358
+ ]
1359
+
1267
1360
  assert (
1268
1361
  new_elems_t1 == [["inputs.p1", "outputs.p3", "resources.any"]]
1269
1362
  and new_elems_t2
@@ -1297,6 +1390,40 @@ def test_task_add_elements_sequence_multi_task_dependence_workflow_num_elements(
1297
1390
  },
1298
1391
  )
1299
1392
  num_elems_new = wk.num_elements
1393
+ assert num_elems_new - num_elems == 45
1394
+
1395
+
1396
+ def test_task_add_elements_sequence_multi_task_dependence_workflow_num_elements_custom_input_source(
1397
+ tmp_path,
1398
+ ):
1399
+ wk = make_workflow(
1400
+ schemas_spec=[
1401
+ [{"p1": None}, ("p3",), "t1"],
1402
+ [{"p2": None, "p3": None}, ("p4",), "t2"],
1403
+ [{"p3": None, "p4": None}, (), "t3"],
1404
+ ],
1405
+ local_inputs={0: ("p1",)},
1406
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1407
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1408
+ input_sources={
1409
+ 2: {"p3": [hf.InputSource.task(0)]}
1410
+ }, # override default (t2 input)
1411
+ path=tmp_path,
1412
+ )
1413
+ num_elems = wk.num_elements
1414
+ wk.tasks.t1.add_elements(
1415
+ sequences=[
1416
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1417
+ ],
1418
+ propagate_to={
1419
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1420
+ "t3": {
1421
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1422
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1423
+ },
1424
+ },
1425
+ )
1426
+ num_elems_new = wk.num_elements
1300
1427
  assert num_elems_new - num_elems == 27
1301
1428
 
1302
1429
 
@@ -1326,6 +1453,41 @@ def test_task_add_elements_sequence_multi_task_dependence_expected_task_num_elem
1326
1453
  )
1327
1454
  num_elems_new = [task.num_elements for task in wk.tasks]
1328
1455
  num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1456
+ assert num_elems_diff == [3, 6, 36]
1457
+
1458
+
1459
+ def test_task_add_elements_sequence_multi_task_dependence_expected_task_num_elements_custom_input_source(
1460
+ tmp_path,
1461
+ ):
1462
+ wk = make_workflow(
1463
+ schemas_spec=[
1464
+ [{"p1": None}, ("p3",), "t1"],
1465
+ [{"p2": None, "p3": None}, ("p4",), "t2"],
1466
+ [{"p3": None, "p4": None}, (), "t3"],
1467
+ ],
1468
+ local_inputs={0: ("p1",)},
1469
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1470
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1471
+ input_sources={
1472
+ 2: {"p3": [hf.InputSource.task(0)]}
1473
+ }, # override default (t2 input)
1474
+ path=tmp_path,
1475
+ )
1476
+ num_elems = [task.num_elements for task in wk.tasks]
1477
+ wk.tasks.t1.add_elements(
1478
+ sequences=[
1479
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1480
+ ],
1481
+ propagate_to={
1482
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1483
+ "t3": {
1484
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1485
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1486
+ },
1487
+ },
1488
+ )
1489
+ num_elems_new = [task.num_elements for task in wk.tasks]
1490
+ num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1329
1491
  assert num_elems_diff == [3, 6, 18]
1330
1492
 
1331
1493
 
@@ -1359,6 +1521,62 @@ def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_inde
1359
1521
  t2_num_elems_new = wk.tasks.t2.num_elements
1360
1522
  t3_num_elems_new = wk.tasks.t3.num_elements
1361
1523
 
1524
+ data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1525
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1526
+ new_elems_t2 = data_index_new[
1527
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
1528
+ ]
1529
+ new_elems_t3 = data_index_new[
1530
+ t1_num_elems_new
1531
+ + t2_num_elems_new
1532
+ + t3_num_elems : t1_num_elems_new
1533
+ + t2_num_elems_new
1534
+ + t3_num_elems_new
1535
+ ]
1536
+ assert (
1537
+ new_elems_t1 == [["inputs.p1", "outputs.p3", "resources.any"]] * 3
1538
+ and new_elems_t2
1539
+ == [["inputs.p2", "inputs.p3", "outputs.p4", "resources.any"]] * 6
1540
+ and new_elems_t3 == [["inputs.p3", "inputs.p4", "resources.any"]] * 36
1541
+ )
1542
+
1543
+
1544
+ def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_index_custom_input_source(
1545
+ tmp_path,
1546
+ ):
1547
+ wk = make_workflow(
1548
+ schemas_spec=[
1549
+ [{"p1": None}, ("p3",), "t1"],
1550
+ [{"p2": None, "p3": None}, ("p4",), "t2"],
1551
+ [{"p3": None, "p4": None}, (), "t3"],
1552
+ ],
1553
+ local_inputs={0: ("p1",)},
1554
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1555
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1556
+ input_sources={
1557
+ 2: {"p3": [hf.InputSource.task(0)]}
1558
+ }, # override default (t2 input)
1559
+ path=tmp_path,
1560
+ )
1561
+ t1_num_elems = wk.tasks.t1.num_elements
1562
+ t2_num_elems = wk.tasks.t2.num_elements
1563
+ t3_num_elems = wk.tasks.t3.num_elements
1564
+ wk.tasks.t1.add_elements(
1565
+ sequences=[
1566
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1567
+ ],
1568
+ propagate_to={
1569
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1570
+ "t3": {
1571
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1572
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1573
+ },
1574
+ },
1575
+ )
1576
+ t1_num_elems_new = wk.tasks.t1.num_elements
1577
+ t2_num_elems_new = wk.tasks.t2.num_elements
1578
+ t3_num_elems_new = wk.tasks.t3.num_elements
1579
+
1362
1580
  data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1363
1581
  new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1364
1582
  new_elems_t2 = data_index_new[
@@ -2176,3 +2394,67 @@ def test_raise_UnknownEnvironmentPresetError_sequence(null_config):
2176
2394
  seq = hf.ValueSequence(path="env_preset", values=["my_env_preset"])
2177
2395
  with pytest.raises(UnknownEnvironmentPresetError):
2178
2396
  hf.Task(schema=ts, sequences=[seq])
2397
+
2398
+
2399
+ def test_group_values_input_and_output_source_from_upstream(null_config, tmp_path):
2400
+ """
2401
+ | task | inputs | outputs | group | num_elements |
2402
+ | ---- | ------ | ------- | -------- | ---------------------------|
2403
+ | t1 | p0 | p1 | - | 3 |
2404
+ | t2 | p1 | p2 | my_group | 3 |
2405
+ | t3 | p1, p2 | - | - | 1 (grouped p1, grouped p2) |
2406
+ """
2407
+ s1 = hf.TaskSchema(
2408
+ objective="t1",
2409
+ inputs=[hf.SchemaInput("p0")],
2410
+ outputs=[hf.SchemaOutput("p1")],
2411
+ actions=[
2412
+ hf.Action(
2413
+ commands=[
2414
+ hf.Command(
2415
+ command="echo <<parameter:p0>> + 1",
2416
+ stdout="<<parameter:p1>>",
2417
+ )
2418
+ ]
2419
+ )
2420
+ ],
2421
+ )
2422
+ s2 = hf.TaskSchema(
2423
+ objective="t2",
2424
+ inputs=[hf.SchemaInput("p1")],
2425
+ outputs=[hf.SchemaOutput("p2")],
2426
+ actions=[
2427
+ hf.Action(
2428
+ commands=[
2429
+ hf.Command(
2430
+ command="echo <<parameter:p1>> + 1",
2431
+ stdout="<<parameter:p2>>",
2432
+ )
2433
+ ]
2434
+ )
2435
+ ],
2436
+ )
2437
+ s3 = hf.TaskSchema(
2438
+ objective="t3",
2439
+ inputs=[
2440
+ hf.SchemaInput("p1", group="my_group"),
2441
+ hf.SchemaInput("p2", group="my_group"),
2442
+ ],
2443
+ )
2444
+ t1 = hf.Task(
2445
+ schema=s1,
2446
+ inputs={"p0": 1},
2447
+ repeats=3,
2448
+ )
2449
+ t2 = hf.Task(schema=s2, groups=[hf.ElementGroup("my_group")])
2450
+ t3 = hf.Task(schema=s3)
2451
+ wk = hf.Workflow.from_template_data(
2452
+ template_name="test_group",
2453
+ tasks=[t1, t2, t3],
2454
+ path=tmp_path,
2455
+ )
2456
+ assert wk.tasks[0].num_elements == 3
2457
+ assert wk.tasks[1].num_elements == 3
2458
+ assert wk.tasks[2].num_elements == 1
2459
+ assert [i.value for i in wk.tasks[2].inputs.p1] == [[None, None, None]]
2460
+ assert [i.value for i in wk.tasks[2].inputs.p2] == [[None, None, None]]
@@ -1,5 +1,7 @@
1
+ import sys
1
2
  import numpy as np
2
3
  import pytest
4
+ import requests
3
5
 
4
6
  from hpcflow.app import app as hf
5
7
  from hpcflow.sdk.core.test_utils import P1_parameter_cls as P1
@@ -407,6 +409,14 @@ def test_nesting_order_three_seqs_all_decimal(null_config, tmp_path):
407
409
  assert wk.tasks.test.elements[5].get("inputs") == {"p1": "b", "p2": "e", "p3": "k"}
408
410
 
409
411
 
412
+ @pytest.mark.xfail(
413
+ condition=sys.platform == "darwin",
414
+ raises=requests.exceptions.HTTPError,
415
+ reason=(
416
+ "GHA MacOS runners use the same IP address, so we get rate limited when "
417
+ "retrieving demo data from GitHub."
418
+ ),
419
+ )
410
420
  def test_demo_data_values(null_config):
411
421
  name = "text_file.txt"
412
422
  assert hf.ValueSequence(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hpcflow-new2
3
- Version: 0.2.0a169
3
+ Version: 0.2.0a173
4
4
  Summary: Computational workflow management
5
5
  License: MIT
6
6
  Author: aplowman
@@ -1,7 +1,7 @@
1
1
  hpcflow/__init__.py,sha256=WIETuRHeOp2SqUqHUzpjQ-lk9acbYv-6aWOhZPRdlhs,64
2
2
  hpcflow/__pyinstaller/__init__.py,sha256=YOzBlPSck6slucv6lJM9K80JtsJWxXRL00cv6tRj3oc,98
3
3
  hpcflow/__pyinstaller/hook-hpcflow.py,sha256=SeMopsPkhCyd9gqIrzwFNRj3ZlkUlUYl-74QYz61mo4,1089
4
- hpcflow/_version.py,sha256=hxFrOWNQUJIOw8YnjryDCR0GrhCs69VkopK-elg35sw,26
4
+ hpcflow/_version.py,sha256=DsUOrvu-8mgrOcHrDTSSYHvMcnzqaEQ4ZiMdpfSpyJI,26
5
5
  hpcflow/app.py,sha256=d-kgfnZNlqlCi2H8bK26714brD_u3ibN3FaEZgjF9aA,1332
6
6
  hpcflow/cli.py,sha256=G2J3D9v6MnMWOWMMWK6UEKLn_6wnV9lT_qygEBBxg-I,66
7
7
  hpcflow/data/demo_data_manifest/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -35,7 +35,7 @@ hpcflow/data/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3
35
35
  hpcflow/data/workflows/workflow_1.yaml,sha256=lF7Re2SVc_5gQk5AwB0gXaq-n-T5ia4su3zNQ9oMRV0,220
36
36
  hpcflow/examples.ipynb,sha256=cLKp4QsxwwMXRanDnfWY9kqsV23q6G4raOpu6IZXnMw,28553
37
37
  hpcflow/sdk/__init__.py,sha256=SdzVm7dydLv2kmr2tqrH14Gf1GEAEhsEuAuqiGBLHhM,5700
38
- hpcflow/sdk/app.py,sha256=DDIHHvXjjjpCbZ4HQXk7ByuvzUUPsDy7WxS_940bG48,95077
38
+ hpcflow/sdk/app.py,sha256=lS9sl6YEbB02LZbltlgs8neokZNmxfl4aT46-2UuDTQ,96661
39
39
  hpcflow/sdk/cli.py,sha256=uIzF2YEFQFxmJNFwZM_OZWpzgJK0BnqNY3zwa9sfVzM,35028
40
40
  hpcflow/sdk/cli_common.py,sha256=kDSIe90mxD37lQqvIXDyRauLtTRRnNueSljcRPF_w0M,4738
41
41
  hpcflow/sdk/config/__init__.py,sha256=qJrrxcAN4f1u_RyTtXgz-xlTLwNafE9v0VEMP1x6-bU,70
@@ -58,9 +58,9 @@ hpcflow/sdk/core/parallel.py,sha256=LI-g-qOuOR1oaEUWVT0qW0hmiP9hsJyUP8_IfSTKYYo,
58
58
  hpcflow/sdk/core/parameters.py,sha256=0h1M-fXqOVgruyM0Au7Fo38cUbHgDNEPd1Alb1FULxE,65588
59
59
  hpcflow/sdk/core/rule.py,sha256=3jVsSZCBv4Odxy8QbSbKo9ZcRuU-5DRJoNK8adXCEpI,4567
60
60
  hpcflow/sdk/core/run_dir_files.py,sha256=_k-hA7dlry9GZw5ZXcntFcPGxg07p03hnHSM5S-2G2Y,2197
61
- hpcflow/sdk/core/task.py,sha256=rfzjTHCldnTIlLQ1yWnMC-cJjvyVehFt-WCVL3_H2kc,121281
61
+ hpcflow/sdk/core/task.py,sha256=AmPHKLjJH5EiS1YO58fF0KXk47IDqkb3uYI1KVvKymc,121858
62
62
  hpcflow/sdk/core/task_schema.py,sha256=TipXzC2guu9zilv0En-rHt6lUCTSIj5faI4lVWQdUbA,32346
63
- hpcflow/sdk/core/test_utils.py,sha256=UKitv3qvZpSz8vE_HRaJjPDnaOeojy6WVntSskPs03Q,9354
63
+ hpcflow/sdk/core/test_utils.py,sha256=tM5HRy2jkHw-JzvgqMxO3OYb2pecANGIv6Xjln8WwFI,9536
64
64
  hpcflow/sdk/core/utils.py,sha256=pReOwnmuxJqexPUdaA8UMjJ4o8ucllBVVssWjb_LNQc,25651
65
65
  hpcflow/sdk/core/validation.py,sha256=KBKiy5DdfGiGmMaB0HdKTY0V972u5dJzvkYkX0_KtCo,518
66
66
  hpcflow/sdk/core/workflow.py,sha256=9YCgqCLQmkHuMnZS3HTEth-2syDhaXOJoBCuYv5Kqkw,108627
@@ -117,7 +117,7 @@ hpcflow/tests/scripts/test_main_scripts.py,sha256=6mZvcppeFf9yL7XipYwHKxQvILKzCo
117
117
  hpcflow/tests/shells/wsl/test_wsl_submission.py,sha256=IrpvsxVfsQCUmS8KKn7w9DiVFR8z_ak_IWyAd1E0KKc,516
118
118
  hpcflow/tests/unit/test_action.py,sha256=GDyx2ak6H-gvuHAG7-oia39crFcX1bGC3h2M7j8teOs,26334
119
119
  hpcflow/tests/unit/test_action_rule.py,sha256=vX7hMo_9AO5iUGWdDF8uP8rM4jghZidusiY4ZvNcEKo,556
120
- hpcflow/tests/unit/test_app.py,sha256=dujHPEpUMAHVWgA-TB0zeACMfmmMgFTDAJgX0Dd3qzQ,2967
120
+ hpcflow/tests/unit/test_app.py,sha256=1JVEuGlPHhO3igkbHsKAQOkBrRbXD-YFsRcyKD3uF2o,3244
121
121
  hpcflow/tests/unit/test_cli.py,sha256=9oQZOlX0z5LC4e2JFLuIgGnUXgmR2RCAtbXR5XRwqJs,288
122
122
  hpcflow/tests/unit/test_command.py,sha256=qf2z0wofxCisza90HMDlb81wsNhByXRAOPMfmRv6paA,22422
123
123
  hpcflow/tests/unit/test_config.py,sha256=yW_tCjCaReCud7Lv4-CLt8mZ7XoaGLGxjozJQoZYZ2c,2507
@@ -126,11 +126,11 @@ hpcflow/tests/unit/test_element.py,sha256=JxqLigktPbdoZz6zV8iXiqPG7-vrdexUpzK5xd
126
126
  hpcflow/tests/unit/test_element_iteration.py,sha256=K0oxoDSSKy2JAcAhhE_l63M3u1cus1SsVc5LR2jLe0k,1292
127
127
  hpcflow/tests/unit/test_element_set.py,sha256=oy0KTIwUKm5NNnBYVNNjLcYfipb3sZWDldjE1klSGUU,3709
128
128
  hpcflow/tests/unit/test_input_source.py,sha256=pYm1V4kBsBIFstKDcrbwUiDLb82rr7ITcUEFJES2dI0,39194
129
- hpcflow/tests/unit/test_input_value.py,sha256=hHoe9rlCbfFvqqkAu4ylu95YVaAhinthXqTCUNEnSm8,5454
129
+ hpcflow/tests/unit/test_input_value.py,sha256=H6GX1ee7fuq5f-OsfkfiSW8eye_pWwVLUCYUSnj1-Gg,5731
130
130
  hpcflow/tests/unit/test_json_like.py,sha256=aGCiGfT-tNiFu3yzW6d_T-oDc5QLwSUgq3pN3jFhyF0,29939
131
131
  hpcflow/tests/unit/test_loop.py,sha256=vBnCnoweiFIi2T68bu6PCQ4yH829pxK6Oe4Comg0bHo,21060
132
132
  hpcflow/tests/unit/test_object_list.py,sha256=nDpbRpCu4XqoYxKMr1_QmDS1s2_6nQOpIEBRHSAXoVg,3049
133
- hpcflow/tests/unit/test_parameter.py,sha256=g-4pZeIYW0GQyy2rL9JkKBpOC6p1PuGOiq4cbBIwzq0,6132
133
+ hpcflow/tests/unit/test_parameter.py,sha256=39CVido_NJGX-Xj9NDSlazpGzWqMG4zp0GmIKwzO7lI,6659
134
134
  hpcflow/tests/unit/test_persistence.py,sha256=DPXFiuY2v8vj0zZ7299nf-KtgYT8LhHI52fq7UPoa6Y,8128
135
135
  hpcflow/tests/unit/test_resources.py,sha256=2RRFIn5brKyD1UNmkmnvyjPZowa-dokQd0EuCEeo7so,7799
136
136
  hpcflow/tests/unit/test_run.py,sha256=uvG2BbVOD0JJAJCbdh0MMRJME8tVzOm7H4PTLzPLWZY,2613
@@ -139,16 +139,16 @@ hpcflow/tests/unit/test_schema_input.py,sha256=738tJjEzBcQ8Z_YTcybVBW7Twy_hXmbPW
139
139
  hpcflow/tests/unit/test_shell.py,sha256=FDtQ9fHRhSKiVtxMJ8BRisoeSvvk8zmJndTB4LlhqGc,3442
140
140
  hpcflow/tests/unit/test_slurm.py,sha256=ewfNuXXUEEelAxcd7MBbAQ-RCvU8xBenHTAyfXYF-R0,1064
141
141
  hpcflow/tests/unit/test_submission.py,sha256=kQ3ksjGlfp47AYuwTA27RDX2XxRU3YxKlKC1ACTbXw8,16682
142
- hpcflow/tests/unit/test_task.py,sha256=rSMD5eLJxss81HgAxGU9yKTFLZHE2LYdG-XFaN5B5N4,70202
142
+ hpcflow/tests/unit/test_task.py,sha256=QJuEpJ0y0nBesprgoau5R2kFZBCW-ygNmYatLT_M5-o,80227
143
143
  hpcflow/tests/unit/test_task_schema.py,sha256=j5HHxoqq4Mb223jKcusgX-C6-TsmKG0PLjYQ4M01ZHo,4531
144
144
  hpcflow/tests/unit/test_utils.py,sha256=JMhSRZFqmI9ZhREJet9en_y3aRVlQlWE7OKpkdt8SVI,14172
145
- hpcflow/tests/unit/test_value_sequence.py,sha256=Xv121bLTnrLpdhmwoJ_yJvXoJ3L-iOWEQBTl9V_aMio,15463
145
+ hpcflow/tests/unit/test_value_sequence.py,sha256=yJh5YRxN-VYMbCWiUaLH4T_Ue5F2IfVS3e11zx6HlS0,15740
146
146
  hpcflow/tests/unit/test_workflow.py,sha256=Eyr9BhnsFisAPotEAeYrAvxXT1d2i6oshEh1_OxgnSc,22732
147
147
  hpcflow/tests/unit/test_workflow_template.py,sha256=fF7LNveMwCledgncNCRfD9Nd9dL9tSPtlAAOKV3ovAU,5396
148
148
  hpcflow/tests/workflows/test_jobscript.py,sha256=9sp1o0g72JZbv2QlOl5v7wCZEFjotxiIKGNUxVaFgaA,724
149
149
  hpcflow/tests/workflows/test_workflows.py,sha256=xai6FRtGqG4lStJk6KmsqPUSuvqs9FrsBOxMVALshIs,13400
150
150
  hpcflow/viz_demo.ipynb,sha256=1QdnVsk72vihv2L6hOGyk318uEa22ZSgGxQCa7hW2oo,6238
151
- hpcflow_new2-0.2.0a169.dist-info/METADATA,sha256=V_GOYel9UeZ7LNLC1pvXWmN_vji3HzwHTwhKCDhBaO8,2473
152
- hpcflow_new2-0.2.0a169.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
153
- hpcflow_new2-0.2.0a169.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
154
- hpcflow_new2-0.2.0a169.dist-info/RECORD,,
151
+ hpcflow_new2-0.2.0a173.dist-info/METADATA,sha256=gD5RJj14jIPQe718k6cfXERGcIcx_ywckgoB75S_IjM,2473
152
+ hpcflow_new2-0.2.0a173.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
153
+ hpcflow_new2-0.2.0a173.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
154
+ hpcflow_new2-0.2.0a173.dist-info/RECORD,,