hpcflow-new2 0.2.0a69__py3-none-any.whl → 0.2.0a71__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  from contextlib import contextmanager
2
2
  import copy
3
3
  from dataclasses import dataclass, field
4
- from typing import Dict, List, Optional, Union
4
+ from typing import Dict, List, Optional, Tuple, Union
5
5
 
6
6
  from hpcflow.sdk import app
7
7
  from hpcflow.sdk.core.parameters import Parameter
@@ -50,7 +50,7 @@ class TaskSchema(JSONLike):
50
50
  def __init__(
51
51
  self,
52
52
  objective: Union[app.TaskObjective, str],
53
- actions: List[app.Action],
53
+ actions: List[app.Action] = None,
54
54
  method: Optional[str] = None,
55
55
  implementation: Optional[str] = None,
56
56
  inputs: Optional[List[Union[app.Parameter, app.SchemaInput]]] = None,
@@ -59,7 +59,7 @@ class TaskSchema(JSONLike):
59
59
  _hash_value: Optional[str] = None,
60
60
  ):
61
61
  self.objective = objective
62
- self.actions = actions
62
+ self.actions = actions or []
63
63
  self.method = method
64
64
  self.implementation = implementation
65
65
  self.inputs = inputs or []
@@ -203,10 +203,11 @@ class TaskSchema(JSONLike):
203
203
  for OFP_j in act_i.output_file_parsers:
204
204
  extra_ins = extra_ins - set(OFP_j.inputs or [])
205
205
 
206
- if extra_ins:
206
+ if self.actions and extra_ins:
207
+ # allow for no actions (e.g. defining inputs for downstream tasks)
207
208
  raise ValueError(
208
- f"Schema {self.name!r} inputs {tuple(extra_ins)!r} are not used by "
209
- f"any actions."
209
+ f"Schema {self.name!r} inputs {tuple(extra_ins)!r} are not used "
210
+ f"by any actions."
210
211
  )
211
212
 
212
213
  missing_outs = set(self.output_types) - set(all_outs)
@@ -225,11 +226,12 @@ class TaskSchema(JSONLike):
225
226
  def make_persistent(self, workflow: app.Workflow, source: Dict) -> List[int]:
226
227
  new_refs = []
227
228
  for input_i in self.inputs:
228
- if input_i.default_value is not None:
229
- _, dat_ref, is_new = input_i.default_value.make_persistent(
230
- workflow, source
231
- )
232
- new_refs.extend(dat_ref) if is_new else None
229
+ for lab_info in input_i.labelled_info():
230
+ if "default_value" in lab_info:
231
+ _, dat_ref, is_new = lab_info["default_value"].make_persistent(
232
+ workflow, source
233
+ )
234
+ new_refs.extend(dat_ref) if is_new else None
233
235
  return new_refs
234
236
 
235
237
  @property
@@ -243,19 +245,26 @@ class TaskSchema(JSONLike):
243
245
 
244
246
  @property
245
247
  def input_types(self):
246
- return tuple(i.typ for i in self.inputs)
248
+ return tuple(j for i in self.inputs for j in i.all_labelled_types)
247
249
 
248
250
  @property
249
251
  def output_types(self):
250
252
  return tuple(i.typ for i in self.outputs)
251
253
 
252
254
  @property
253
- def provides_parameters(self):
254
- return tuple(
255
- i
256
- for i in self.inputs + self.outputs
257
- if i.propagation_mode != ParameterPropagationMode.NEVER
258
- )
255
+ def provides_parameters(self) -> Tuple[Tuple[str, str]]:
256
+ out = []
257
+ for schema_inp in self.inputs:
258
+ for labelled_info in schema_inp.labelled_info():
259
+ prop_mode = labelled_info["propagation_mode"]
260
+ if prop_mode is not ParameterPropagationMode.NEVER:
261
+ out.append(
262
+ (schema_inp.input_or_output, labelled_info["labelled_type"])
263
+ )
264
+ for schema_out in self.outputs:
265
+ if schema_out.propagation_mode is not ParameterPropagationMode.NEVER:
266
+ out.append((schema_out.input_or_output, schema_out.typ))
267
+ return tuple(out)
259
268
 
260
269
  @property
261
270
  def task_template(self):
@@ -1,5 +1,7 @@
1
- from typing import List, Tuple, Union
1
+ from dataclasses import dataclass
2
+ from typing import List, Optional, Tuple, Union
2
3
  from hpcflow.app import app as hf
4
+ from hpcflow.sdk.core.parameters import ParameterValue
3
5
 
4
6
 
5
7
  def make_schemas(ins_outs, ret_list=False):
@@ -39,6 +41,7 @@ def make_schemas(ins_outs, ret_list=False):
39
41
  output_file_parsers=out_file_parsers,
40
42
  environments=[hf.ActionEnvironment(hf.Environment(name="env_1"))],
41
43
  )
44
+ print(f"{ins_i=}")
42
45
  out.append(
43
46
  hf.TaskSchema(
44
47
  objective=obj,
@@ -149,3 +152,15 @@ def make_workflow(
149
152
  store=store,
150
153
  )
151
154
  return wk
155
+
156
+
157
+ @dataclass
158
+ class P1_parameter_cls(ParameterValue):
159
+ _typ = "p1"
160
+
161
+ a: int
162
+ d: Optional[int] = None
163
+
164
+ @classmethod
165
+ def from_data(cls, b, c):
166
+ return cls(a=b + c)
hpcflow/sdk/core/utils.py CHANGED
@@ -1,4 +1,4 @@
1
- import copy
1
+ import enum
2
2
  from functools import wraps
3
3
  import contextlib
4
4
  import hashlib
@@ -14,7 +14,7 @@ import string
14
14
  import subprocess
15
15
  from datetime import datetime, timezone
16
16
  import sys
17
- from typing import List, Mapping
17
+ from typing import Type, Union, List, Mapping
18
18
 
19
19
  from ruamel.yaml import YAML
20
20
  import sentry_sdk
@@ -613,3 +613,19 @@ def open_file(filename):
613
613
  else:
614
614
  opener = "open" if sys.platform == "darwin" else "xdg-open"
615
615
  subprocess.call([opener, filename])
616
+
617
+
618
+ def get_enum_by_name_or_val(enum_cls: Type, key: Union[str, None]) -> enum.Enum:
619
+ """Retrieve an enum by name or value, assuming uppercase names and integer values."""
620
+ err = f"Unknown enum key or value {key!r} for class {enum_cls!r}"
621
+ if key is None or isinstance(key, enum_cls):
622
+ return key
623
+ elif isinstance(key, (int, float)):
624
+ return enum_cls(int(key)) # retrieve by value
625
+ elif isinstance(key, str):
626
+ try:
627
+ return getattr(enum_cls, key.upper()) # retrieve by name
628
+ except AttributeError:
629
+ raise ValueError(err)
630
+ else:
631
+ raise ValueError(err)
@@ -127,9 +127,11 @@ class WorkflowTemplate(JSONLike):
127
127
  if "element_sets" not in task_dat:
128
128
  # add a single element set:
129
129
  schemas = task_dat.pop("schemas")
130
+ out_labels = task_dat.pop("output_labels", [])
130
131
  data["tasks"][task_idx] = {
131
132
  "schemas": schemas,
132
133
  "element_sets": [task_dat],
134
+ "output_labels": out_labels,
133
135
  }
134
136
 
135
137
  # extract out any template components:
@@ -1179,15 +1181,15 @@ class Workflow:
1179
1181
  for loop in self.loops:
1180
1182
  loop._accept_pending_num_added_iters()
1181
1183
 
1182
- if is_workflow_creation:
1183
- self._store.remove_replaced_dir()
1184
-
1185
1184
  # TODO: handle errors in commit pending?
1186
1185
  self._store._pending.commit_all()
1187
-
1188
1186
  self._accept_pending()
1189
- self.app.persistence_logger.info("exiting batch update")
1190
- self._in_batch_mode = False
1187
+
1188
+ if is_workflow_creation:
1189
+ self._store.remove_replaced_dir()
1190
+
1191
+ self.app.persistence_logger.info("exiting batch update")
1192
+ self._in_batch_mode = False
1191
1193
 
1192
1194
  @classmethod
1193
1195
  def temporary_rename(cls, path: str, fs) -> List[str]:
@@ -1341,10 +1343,11 @@ class Workflow:
1341
1343
  return self.get_parameters([index], **kwargs)[0]
1342
1344
 
1343
1345
  def get_parameter_data(self, index: int, **kwargs: Dict) -> Any:
1344
- return (
1345
- self.get_parameter(index, **kwargs).data
1346
- or self.get_parameter(index, **kwargs).file
1347
- )
1346
+ param = self.get_parameter(index, **kwargs)
1347
+ if param.data is not None:
1348
+ return param.data
1349
+ else:
1350
+ return param.file
1348
1351
 
1349
1352
  def get_parameter_source(self, index: int) -> Dict:
1350
1353
  return self.get_parameter_sources([index])[0]
@@ -1576,7 +1579,6 @@ class Workflow:
1576
1579
  if EAR.action.abortable and exit_code == ABORT_EXIT_CODE:
1577
1580
  # the point of aborting an EAR is to continue with the workflow:
1578
1581
  success = True
1579
- self._store.set_EAR_end(EAR_ID, exit_code, success)
1580
1582
 
1581
1583
  for IFG_i in EAR.action.input_file_generators:
1582
1584
  inp_file = IFG_i.input_file
@@ -1644,6 +1646,8 @@ class Workflow:
1644
1646
  )
1645
1647
  self._store.set_EAR_skip(EAR_dep_ID)
1646
1648
 
1649
+ self._store.set_EAR_end(EAR_ID, exit_code, success)
1650
+
1647
1651
  def set_EAR_skip(self, EAR_ID: int) -> None:
1648
1652
  """Record that an EAR is to be skipped due to an upstream failure."""
1649
1653
  with self._store.cached_load():
@@ -58,14 +58,7 @@ def test_shared_data_from_json_like_with_shared_data_dependency(act_1):
58
58
  "implementation": ts1.implementation,
59
59
  "version": ts1.version,
60
60
  "objective": ts1.objective.name,
61
- "inputs": [
62
- {
63
- "group": None,
64
- "where": None,
65
- "parameter": f"hash:{p1_hash}",
66
- "propagation_mode": "IMPLICIT",
67
- }
68
- ],
61
+ "inputs": [{"parameter": f"hash:{p1_hash}", "labels": {"": {}}}],
69
62
  "outputs": [],
70
63
  "actions": [
71
64
  {
@@ -88,3 +88,44 @@ def test_value_sequence_from_json_like_class_method_attribute_is_set():
88
88
  json_like, shared_data=hf.template_components
89
89
  )
90
90
  assert val_seq.value_class_method == cls_method
91
+
92
+
93
+ def test_path_attributes():
94
+ inp = hf.InputValue(parameter="p1", value=101, path="a.b")
95
+ assert inp.labelled_type == "p1"
96
+ assert inp.normalised_path == "inputs.p1.a.b"
97
+ assert inp.normalised_inputs_path == "p1.a.b"
98
+
99
+
100
+ def test_path_attributes_with_label_arg():
101
+ inp = hf.InputValue(parameter="p1", value=101, path="a.b", label="1")
102
+ assert inp.labelled_type == "p1[1]"
103
+ assert inp.normalised_path == "inputs.p1[1].a.b"
104
+ assert inp.normalised_inputs_path == "p1[1].a.b"
105
+
106
+
107
+ def test_path_attributes_with_label_arg_cast():
108
+ inp = hf.InputValue(parameter="p1", value=101, path="a.b", label=1)
109
+ assert inp.labelled_type == "p1[1]"
110
+ assert inp.normalised_path == "inputs.p1[1].a.b"
111
+ assert inp.normalised_inputs_path == "p1[1].a.b"
112
+
113
+
114
+ def test_from_json_like():
115
+ inp = hf.InputValue.from_json_like(
116
+ json_like={"parameter": "p1", "value": 101},
117
+ shared_data=hf.template_components,
118
+ )
119
+ assert inp.parameter.typ == hf.Parameter("p1").typ
120
+ assert inp.value == 101
121
+ assert inp.label == ""
122
+
123
+
124
+ def test_from_json_like_with_label():
125
+ inp = hf.InputValue.from_json_like(
126
+ json_like={"parameter": "p1[1]", "value": 101},
127
+ shared_data=hf.template_components,
128
+ )
129
+ assert inp.parameter.typ == hf.Parameter("p1").typ
130
+ assert inp.value == 101
131
+ assert inp.label == "1"
@@ -0,0 +1,191 @@
1
+ import pytest
2
+
3
+ from hpcflow.app import app as hf
4
+ from hpcflow.sdk.core.test_utils import P1_parameter_cls as P1
5
+
6
+
7
+ @pytest.fixture
8
+ def null_config(tmp_path):
9
+ if not hf.is_config_loaded:
10
+ hf.load_config(config_dir=tmp_path)
11
+
12
+
13
+ def test_null_default_value():
14
+ p1 = hf.Parameter("p1")
15
+ p1_inp = hf.SchemaInput(parameter=p1)
16
+ assert "default_value" not in p1_inp.labels[""]
17
+
18
+
19
+ def test_none_default_value():
20
+ """A `None` default value is set with a value of `None`"""
21
+ p1 = hf.Parameter("p1")
22
+ p1_inp = hf.SchemaInput(parameter=p1, default_value=None)
23
+ def_val_exp = hf.InputValue(parameter=p1, label="", value=None)
24
+ def_val_exp._schema_input = p1_inp
25
+ assert p1_inp.labels[""]["default_value"] == def_val_exp
26
+
27
+
28
+ def test_from_json_like_labels_and_default():
29
+ json_like = {
30
+ "parameter": "p1",
31
+ "labels": {"0": {}},
32
+ "default_value": None,
33
+ }
34
+ inp = hf.SchemaInput.from_json_like(
35
+ json_like=json_like,
36
+ shared_data=hf.template_components,
37
+ )
38
+ assert inp.labels["0"]["default_value"].value == None
39
+
40
+
41
+ def test_element_get_removes_schema_param_trivial_label(null_config, tmp_path):
42
+ p1_val = 101
43
+ label = "my_label"
44
+ s1 = hf.TaskSchema(
45
+ objective="t1", inputs=[hf.SchemaInput(parameter="p1", labels={label: {}})]
46
+ )
47
+ t1 = hf.Task(schemas=[s1], inputs=[hf.InputValue("p1", p1_val, label=label)])
48
+ wk = hf.Workflow.from_template_data(
49
+ tasks=[t1],
50
+ path=tmp_path,
51
+ template_name="temp",
52
+ )
53
+ assert f"inputs.p1[{label}]" in wk.tasks[0].elements[0].get_data_idx("inputs")
54
+ assert wk.tasks[0].elements[0].get("inputs") == {"p1": p1_val}
55
+
56
+
57
+ def test_element_inputs_removes_schema_param_trivial_label(null_config, tmp_path):
58
+ p1_val = 101
59
+ label = "my_label"
60
+ s1 = hf.TaskSchema(
61
+ objective="t1",
62
+ inputs=[hf.SchemaInput(parameter="p1", labels={label: {}})],
63
+ actions=[
64
+ hf.Action(
65
+ environments=[hf.ActionEnvironment(environment=hf.envs.null_env)],
66
+ commands=[hf.Command(command=f"echo <<parameter:p1[{label}]>>")],
67
+ ),
68
+ ],
69
+ )
70
+ t1 = hf.Task(schemas=[s1], inputs=[hf.InputValue("p1", p1_val, label=label)])
71
+ wk = hf.Workflow.from_template_data(
72
+ tasks=[t1],
73
+ path=tmp_path,
74
+ template_name="temp",
75
+ )
76
+ element = wk.tasks[0].elements[0]
77
+ # element inputs:
78
+ assert element.inputs._get_prefixed_names() == ["p1"]
79
+
80
+ # element iteration inputs:
81
+ assert element.iterations[0].inputs._get_prefixed_names() == ["p1"]
82
+
83
+ # run inputs:
84
+ assert element.iterations[0].action_runs[0].inputs._get_prefixed_names() == ["p1"]
85
+
86
+
87
+ def test_element_get_does_not_removes_multiple_schema_param_label(null_config, tmp_path):
88
+ p1_val = 101
89
+ label = "my_label"
90
+ s1 = hf.TaskSchema(
91
+ objective="t1",
92
+ inputs=[hf.SchemaInput(parameter="p1", labels={label: {}}, multiple=True)],
93
+ )
94
+ t1 = hf.Task(schemas=[s1], inputs=[hf.InputValue("p1", p1_val, label=label)])
95
+ wk = hf.Workflow.from_template_data(
96
+ tasks=[t1],
97
+ path=tmp_path,
98
+ template_name="temp",
99
+ )
100
+ assert f"inputs.p1[{label}]" in wk.tasks[0].elements[0].get_data_idx("inputs")
101
+ assert wk.tasks[0].elements[0].get("inputs") == {f"p1[{label}]": p1_val}
102
+
103
+
104
+ def test_element_inputs_does_not_remove_multiple_schema_param_label(
105
+ null_config, tmp_path
106
+ ):
107
+ p1_val = 101
108
+ label = "my_label"
109
+ s1 = hf.TaskSchema(
110
+ objective="t1",
111
+ inputs=[hf.SchemaInput(parameter="p1", labels={label: {}}, multiple=True)],
112
+ actions=[
113
+ hf.Action(
114
+ environments=[hf.ActionEnvironment(environment=hf.envs.null_env)],
115
+ commands=[hf.Command(command=f"echo <<parameter:p1[{label}]>>")],
116
+ ),
117
+ ],
118
+ )
119
+ t1 = hf.Task(schemas=[s1], inputs=[hf.InputValue("p1", p1_val, label=label)])
120
+ wk = hf.Workflow.from_template_data(
121
+ tasks=[t1],
122
+ path=tmp_path,
123
+ template_name="temp",
124
+ )
125
+ element = wk.tasks[0].elements[0]
126
+ # element inputs:
127
+ assert element.inputs._get_prefixed_names() == [f"p1[{label}]"]
128
+
129
+ # element iteration inputs:
130
+ assert element.iterations[0].inputs._get_prefixed_names() == [f"p1[{label}]"]
131
+
132
+ # run inputs:
133
+ assert element.iterations[0].action_runs[0].inputs._get_prefixed_names() == [
134
+ f"p1[{label}]"
135
+ ]
136
+
137
+
138
+ def test_get_input_values_for_multiple_schema_input(null_config, tmp_path):
139
+ p1_val = 101
140
+ label = "my_label"
141
+ s1 = hf.TaskSchema(
142
+ objective="t1",
143
+ inputs=[
144
+ hf.SchemaInput(parameter="p1", labels={label: {}}, multiple=True),
145
+ hf.SchemaInput(parameter="p2", default_value=201),
146
+ ],
147
+ actions=[
148
+ hf.Action(
149
+ environments=[hf.ActionEnvironment(environment=hf.envs.null_env)],
150
+ commands=[
151
+ hf.Command(command=f"echo <<parameter:p1[{label}]>> <<parameter:p2>>")
152
+ ],
153
+ ),
154
+ ],
155
+ )
156
+ t1 = hf.Task(schemas=[s1], inputs=[hf.InputValue("p1", p1_val, label=label)])
157
+ wk = hf.Workflow.from_template_data(
158
+ tasks=[t1],
159
+ path=tmp_path,
160
+ template_name="temp",
161
+ )
162
+ run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
163
+ assert run.get_input_values() == {"p2": 201, "p1": {label: 101}}
164
+
165
+
166
+ def test_get_input_values_for_multiple_schema_input_with_object(null_config, tmp_path):
167
+ p1_val = P1(a=101)
168
+ label = "my_label"
169
+ s1 = hf.TaskSchema(
170
+ objective="t1",
171
+ inputs=[
172
+ hf.SchemaInput(parameter="p1", labels={label: {}}, multiple=True),
173
+ hf.SchemaInput(parameter="p2", default_value=201),
174
+ ],
175
+ actions=[
176
+ hf.Action(
177
+ environments=[hf.ActionEnvironment(environment=hf.envs.null_env)],
178
+ commands=[
179
+ hf.Command(command=f"echo <<parameter:p1[{label}]>> <<parameter:p2>>")
180
+ ],
181
+ ),
182
+ ],
183
+ )
184
+ t1 = hf.Task(schemas=[s1], inputs=[hf.InputValue("p1", p1_val, label=label)])
185
+ wk = hf.Workflow.from_template_data(
186
+ tasks=[t1],
187
+ path=tmp_path,
188
+ template_name="temp",
189
+ )
190
+ run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
191
+ assert run.get_input_values() == {"p2": 201, "p1": {label: p1_val}}