hpcflow-new2 0.2.0a162__py3-none-any.whl → 0.2.0a163__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. hpcflow/_version.py +1 -1
  2. hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
  3. hpcflow/sdk/app.py +29 -42
  4. hpcflow/sdk/cli.py +1 -1
  5. hpcflow/sdk/core/actions.py +63 -14
  6. hpcflow/sdk/core/command_files.py +6 -4
  7. hpcflow/sdk/core/commands.py +7 -0
  8. hpcflow/sdk/core/element.py +39 -8
  9. hpcflow/sdk/core/errors.py +16 -0
  10. hpcflow/sdk/core/object_list.py +26 -14
  11. hpcflow/sdk/core/parameters.py +21 -3
  12. hpcflow/sdk/core/task.py +107 -2
  13. hpcflow/sdk/core/task_schema.py +16 -1
  14. hpcflow/sdk/core/test_utils.py +5 -2
  15. hpcflow/sdk/core/workflow.py +93 -5
  16. hpcflow/sdk/data/workflow_spec_schema.yaml +14 -58
  17. hpcflow/sdk/demo/cli.py +1 -1
  18. hpcflow/sdk/submission/submission.py +21 -10
  19. hpcflow/tests/scripts/test_main_scripts.py +60 -0
  20. hpcflow/tests/unit/test_action.py +16 -0
  21. hpcflow/tests/unit/test_element.py +27 -25
  22. hpcflow/tests/unit/test_element_set.py +32 -0
  23. hpcflow/tests/unit/test_parameter.py +11 -9
  24. hpcflow/tests/unit/test_resources.py +7 -9
  25. hpcflow/tests/unit/test_schema_input.py +8 -8
  26. hpcflow/tests/unit/test_task.py +26 -27
  27. hpcflow/tests/unit/test_task_schema.py +39 -8
  28. hpcflow/tests/unit/test_value_sequence.py +5 -0
  29. hpcflow/tests/unit/test_workflow.py +4 -9
  30. hpcflow/tests/unit/test_workflow_template.py +122 -1
  31. {hpcflow_new2-0.2.0a162.dist-info → hpcflow_new2-0.2.0a163.dist-info}/METADATA +1 -1
  32. {hpcflow_new2-0.2.0a162.dist-info → hpcflow_new2-0.2.0a163.dist-info}/RECORD +34 -33
  33. {hpcflow_new2-0.2.0a162.dist-info → hpcflow_new2-0.2.0a163.dist-info}/WHEEL +0 -0
  34. {hpcflow_new2-0.2.0a162.dist-info → hpcflow_new2-0.2.0a163.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/task.py CHANGED
@@ -25,6 +25,7 @@ from .errors import (
25
25
  TaskTemplateMultipleSchemaObjectives,
26
26
  TaskTemplateUnexpectedInput,
27
27
  TaskTemplateUnexpectedSequenceInput,
28
+ UnknownEnvironmentPresetError,
28
29
  UnrequiredInputSources,
29
30
  UnsetParameterDataError,
30
31
  )
@@ -126,8 +127,11 @@ class ElementSet(JSONLike):
126
127
  groups: Optional[List[app.ElementGroup]] = None,
127
128
  input_sources: Optional[Dict[str, app.InputSource]] = None,
128
129
  nesting_order: Optional[List] = None,
130
+ env_preset: Optional[str] = None,
131
+ environments: Optional[Dict[str, Dict[str, Any]]] = None,
129
132
  sourceable_elem_iters: Optional[List[int]] = None,
130
133
  allow_non_coincident_task_sources: Optional[bool] = False,
134
+ merge_envs: Optional[bool] = True,
131
135
  ):
132
136
  """
133
137
  Parameters
@@ -140,7 +144,10 @@ class ElementSet(JSONLike):
140
144
  If True, if more than one parameter is sourced from the same task, then allow
141
145
  these sources to come from distinct element sub-sets. If False (default),
142
146
  only the intersection of element sub-sets for all parameters are included.
143
-
147
+ merge_envs
148
+ If True, merge `environments` into `resources` using the "any" scope. If
149
+ False, `environments` are ignored. This is required on first initialisation,
150
+ but not on subsequent re-initialisation from a persistent workflow.
144
151
  """
145
152
 
146
153
  self.inputs = inputs or []
@@ -151,8 +158,11 @@ class ElementSet(JSONLike):
151
158
  self.sequences = sequences or []
152
159
  self.input_sources = input_sources or {}
153
160
  self.nesting_order = nesting_order or {}
161
+ self.env_preset = env_preset
162
+ self.environments = environments
154
163
  self.sourceable_elem_iters = sourceable_elem_iters
155
164
  self.allow_non_coincident_task_sources = allow_non_coincident_task_sources
165
+ self.merge_envs = merge_envs
156
166
 
157
167
  self._validate()
158
168
  self._set_parent_refs()
@@ -161,6 +171,18 @@ class ElementSet(JSONLike):
161
171
  self._defined_input_types = None # assigned on _task_template assignment
162
172
  self._element_local_idx_range = None # assigned by WorkflowTask._add_element_set
163
173
 
174
+ # merge `environments` into element set resources (this mutates `resources`, and
175
+ # should only happen on creation of the element set, not re-initialisation from a
176
+ # persistent workflow):
177
+ if self.environments and self.merge_envs:
178
+ envs_res = self.app.ResourceList(
179
+ [self.app.ResourceSpec(scope="any", environments=self.environments)]
180
+ )
181
+ self.resources.merge_other(envs_res)
182
+ self.merge_envs = False
183
+
184
+ # note: `env_preset` is merged into resources by the Task init.
185
+
164
186
  def __deepcopy__(self, memo):
165
187
  dct = self.to_dict()
166
188
  orig_inp = dct.pop("original_input_sources", None)
@@ -278,6 +300,10 @@ class ElementSet(JSONLike):
278
300
  f"provided for parameter {src_key!r}."
279
301
  )
280
302
 
303
+ # disallow both `env_preset` and `environments` specifications:
304
+ if self.env_preset and self.environments:
305
+ raise ValueError("Specify at most one of `env_preset` and `environments`.")
306
+
281
307
  def _validate_against_template(self):
282
308
  unexpected_types = (
283
309
  set(self.input_types) - self.task_template.all_schema_input_types
@@ -330,6 +356,8 @@ class ElementSet(JSONLike):
330
356
  groups=None,
331
357
  input_sources=None,
332
358
  nesting_order=None,
359
+ env_preset=None,
360
+ environments=None,
333
361
  element_sets=None,
334
362
  sourceable_elem_iters=None,
335
363
  ):
@@ -342,6 +370,8 @@ class ElementSet(JSONLike):
342
370
  groups,
343
371
  input_sources,
344
372
  nesting_order,
373
+ env_preset,
374
+ environments,
345
375
  )
346
376
  args_not_none = [i is not None for i in args]
347
377
 
@@ -520,9 +550,12 @@ class Task(JSONLike):
520
550
  sequences: Optional[List[app.ValueSequence]] = None,
521
551
  input_sources: Optional[Dict[str, app.InputSource]] = None,
522
552
  nesting_order: Optional[List] = None,
553
+ env_preset: Optional[str] = None,
554
+ environments: Optional[Dict[str, Dict[str, Any]]] = None,
523
555
  element_sets: Optional[List[app.ElementSet]] = None,
524
556
  output_labels: Optional[List[app.OutputLabel]] = None,
525
557
  sourceable_elem_iters: Optional[List[int]] = None,
558
+ merge_envs: Optional[bool] = True,
526
559
  ):
527
560
  """
528
561
  Parameters
@@ -532,7 +565,11 @@ class Task(JSONLike):
532
565
  schema names that uniquely identify a task schema. If strings are provided,
533
566
  the `TaskSchema` object will be fetched from the known task schemas loaded by
534
567
  the app configuration.
535
-
568
+ merge_envs
569
+ If True, merge environment presets (set via the element set `env_preset` key)
570
+ into `resources` using the "any" scope. If False, these presets are ignored.
571
+ This is required on first initialisation, but not on subsequent
572
+ re-initialisation from a persistent workflow.
536
573
  """
537
574
 
538
575
  # TODO: allow init via specifying objective and/or method and/or implementation
@@ -576,10 +613,13 @@ class Task(JSONLike):
576
613
  groups=groups,
577
614
  input_sources=input_sources,
578
615
  nesting_order=nesting_order,
616
+ env_preset=env_preset,
617
+ environments=environments,
579
618
  element_sets=element_sets,
580
619
  sourceable_elem_iters=sourceable_elem_iters,
581
620
  )
582
621
  self._output_labels = output_labels or []
622
+ self.merge_envs = merge_envs
583
623
 
584
624
  # appended to when new element sets are added and reset on dump to disk:
585
625
  self._pending_element_sets = []
@@ -591,8 +631,73 @@ class Task(JSONLike):
591
631
  self._insert_ID = None
592
632
  self._dir_name = None
593
633
 
634
+ if self.merge_envs:
635
+ self._merge_envs_into_resources()
636
+
637
+ # TODO: consider adding a new element_set; will need to merge new environments?
638
+
594
639
  self._set_parent_refs({"schema": "schemas"})
595
640
 
641
+ def _merge_envs_into_resources(self):
642
+ # for each element set, merge `env_preset` into `resources` (this mutates
643
+ # `resources`, and should only happen on creation of the task, not
644
+ # re-initialisation from a persistent workflow):
645
+ self.merge_envs = False
646
+
647
+ # TODO: required so we don't raise below; can be removed once we consider multiple
648
+ # schemas:
649
+ has_presets = False
650
+ for es in self.element_sets:
651
+ if es.env_preset:
652
+ has_presets = True
653
+ break
654
+ for seq in es.sequences:
655
+ if seq.path == "env_preset":
656
+ has_presets = True
657
+ break
658
+ if has_presets:
659
+ break
660
+
661
+ if not has_presets:
662
+ return
663
+ try:
664
+ env_presets = self.schema.environment_presets
665
+ except ValueError:
666
+ # TODO: consider multiple schemas
667
+ raise NotImplementedError(
668
+ "Cannot merge environment presets into a task with multiple schemas."
669
+ )
670
+
671
+ for es in self.element_sets:
672
+ if es.env_preset:
673
+ # retrieve env specifiers from presets defined in the schema:
674
+ try:
675
+ env_specs = env_presets[es.env_preset]
676
+ except (TypeError, KeyError):
677
+ raise UnknownEnvironmentPresetError(
678
+ f"There is no environment preset named {es.env_preset!r} "
679
+ f"defined in the task schema {self.schema.name}."
680
+ )
681
+ envs_res = self.app.ResourceList(
682
+ [self.app.ResourceSpec(scope="any", environments=env_specs)]
683
+ )
684
+ es.resources.merge_other(envs_res)
685
+
686
+ for seq in es.sequences:
687
+ if seq.path == "env_preset":
688
+ # change to a resources path:
689
+ seq.path = f"resources.any.environments"
690
+ _values = []
691
+ for i in seq.values:
692
+ try:
693
+ _values.append(env_presets[i])
694
+ except (TypeError, KeyError):
695
+ raise UnknownEnvironmentPresetError(
696
+ f"There is no environment preset named {i!r} defined "
697
+ f"in the task schema {self.schema.name}."
698
+ )
699
+ seq._values = _values
700
+
596
701
  def _reset_pending_element_sets(self):
597
702
  self._pending_element_sets = []
598
703
 
@@ -2,7 +2,7 @@ from contextlib import contextmanager
2
2
  import copy
3
3
  from dataclasses import dataclass
4
4
  from importlib import import_module
5
- from typing import Dict, List, Optional, Tuple, Union
5
+ from typing import Any, Dict, List, Optional, Tuple, Union
6
6
  from html import escape
7
7
 
8
8
  from rich import print as rich_print
@@ -13,6 +13,7 @@ from rich.markup import escape as rich_esc
13
13
  from rich.text import Text
14
14
 
15
15
  from hpcflow.sdk import app
16
+ from hpcflow.sdk.core.errors import EnvironmentPresetUnknownEnvironmentError
16
17
  from hpcflow.sdk.core.parameters import Parameter
17
18
  from .json_like import ChildObjectSpec, JSONLike
18
19
  from .parameters import NullDefault, ParameterPropagationMode, SchemaInput
@@ -89,6 +90,7 @@ class TaskSchema(JSONLike):
89
90
  version: Optional[str] = None,
90
91
  parameter_class_modules: Optional[List[str]] = None,
91
92
  web_doc: Optional[bool] = True,
93
+ environment_presets: Optional[Dict[str, Dict[str, Dict[str, Any]]]] = None,
92
94
  _hash_value: Optional[str] = None,
93
95
  ):
94
96
  self.objective = objective
@@ -99,6 +101,7 @@ class TaskSchema(JSONLike):
99
101
  self.outputs = outputs or []
100
102
  self.parameter_class_modules = parameter_class_modules or []
101
103
  self.web_doc = web_doc
104
+ self.environment_presets = environment_presets
102
105
  self._hash_value = _hash_value
103
106
 
104
107
  self._set_parent_refs()
@@ -114,6 +117,18 @@ class TaskSchema(JSONLike):
114
117
 
115
118
  self._update_parameter_value_classes()
116
119
 
120
+ if self.environment_presets:
121
+ # validate against env names in actions:
122
+ env_names = {act.get_environment_name() for act in self.actions}
123
+ preset_envs = {i for v in self.environment_presets.values() for i in v.keys()}
124
+ bad_envs = preset_envs - env_names
125
+ if bad_envs:
126
+ raise EnvironmentPresetUnknownEnvironmentError(
127
+ f"Task schema {self.name} has environment presets that refer to one "
128
+ f"or more environments that are not referenced in any of the task "
129
+ f"schema's actions: {', '.join(f'{i!r}' for i in bad_envs)}."
130
+ )
131
+
117
132
  # if version is not None: # TODO: this seems fragile
118
133
  # self.assign_versions(
119
134
  # version=version,
@@ -61,8 +61,11 @@ def make_parameters(num):
61
61
  return [hf.Parameter(f"p{i + 1}") for i in range(num)]
62
62
 
63
63
 
64
- def make_actions(ins_outs: List[Tuple[Union[Tuple, str], str]]) -> List[hf.Action]:
65
- act_env = hf.ActionEnvironment(environment="env1")
64
+ def make_actions(
65
+ ins_outs: List[Tuple[Union[Tuple, str], str]],
66
+ env="env1",
67
+ ) -> List[hf.Action]:
68
+ act_env = hf.ActionEnvironment(environment=env)
66
69
  actions = []
67
70
  for ins_outs_i in ins_outs:
68
71
  if len(ins_outs_i) == 2:
@@ -105,6 +105,7 @@ class WorkflowTemplate(JSONLike):
105
105
  """
106
106
 
107
107
  _app_attr = "app"
108
+ _validation_schema = "workflow_spec_schema.yaml"
108
109
 
109
110
  _child_objects = (
110
111
  ChildObjectSpec(
@@ -132,9 +133,12 @@ class WorkflowTemplate(JSONLike):
132
133
  loops: Optional[List[app.Loop]] = field(default_factory=lambda: [])
133
134
  workflow: Optional[app.Workflow] = None
134
135
  resources: Optional[Dict[str, Dict]] = None
136
+ environments: Optional[Dict[str, Dict[str, Any]]] = None
137
+ env_presets: Optional[Union[str, List[str]]] = None
135
138
  source_file: Optional[str] = field(default=None, compare=False)
136
139
  store_kwargs: Optional[Dict] = field(default_factory=lambda: {})
137
140
  merge_resources: Optional[bool] = True
141
+ merge_envs: Optional[bool] = True
138
142
 
139
143
  def __post_init__(self):
140
144
  self.resources = self.app.ResourceList.normalise(self.resources)
@@ -146,12 +150,95 @@ class WorkflowTemplate(JSONLike):
146
150
  if self.merge_resources:
147
151
  for task in self.tasks:
148
152
  for element_set in task.element_sets:
149
- element_set.resources.merge_template_resources(self.resources)
153
+ element_set.resources.merge_other(self.resources)
150
154
  self.merge_resources = False
151
155
 
156
+ if self.merge_envs:
157
+ self._merge_envs_into_task_resources()
158
+
152
159
  if self.doc and not isinstance(self.doc, list):
153
160
  self.doc = [self.doc]
154
161
 
162
+ def _merge_envs_into_task_resources(self):
163
+
164
+ self.merge_envs = False
165
+
166
+ # disallow both `env_presets` and `environments` specifications:
167
+ if self.env_presets and self.environments:
168
+ raise ValueError(
169
+ "Workflow template: specify at most one of `env_presets` and "
170
+ "`environments`."
171
+ )
172
+
173
+ if not isinstance(self.env_presets, list):
174
+ self.env_presets = [self.env_presets] if self.env_presets else []
175
+
176
+ for task in self.tasks:
177
+
178
+ # get applicable environments and environment preset names:
179
+ try:
180
+ schema = task.schema
181
+ except ValueError:
182
+ # TODO: consider multiple schemas
183
+ raise NotImplementedError(
184
+ "Cannot merge environment presets into a task without multiple "
185
+ "schemas."
186
+ )
187
+ schema_presets = schema.environment_presets
188
+ app_envs = {act.get_environment_name() for act in schema.actions}
189
+ for es in task.element_sets:
190
+ app_env_specs_i = None
191
+ if not es.environments and not es.env_preset:
192
+ # no task level envs/presets specified, so merge template-level:
193
+ if self.environments:
194
+ app_env_specs_i = {
195
+ k: v for k, v in self.environments.items() if k in app_envs
196
+ }
197
+ if app_env_specs_i:
198
+ self.app.logger.info(
199
+ f"(task {task.name!r}, element set {es.index}): using "
200
+ f"template-level requested `environment` specifiers: "
201
+ f"{app_env_specs_i!r}."
202
+ )
203
+ es.environments = app_env_specs_i
204
+
205
+ elif self.env_presets:
206
+ # take only the first applicable preset:
207
+ app_presets_i = [
208
+ k for k in self.env_presets if k in schema_presets
209
+ ]
210
+ if app_presets_i:
211
+ app_env_specs_i = schema_presets[app_presets_i[0]]
212
+ self.app.logger.info(
213
+ f"(task {task.name!r}, element set {es.index}): using "
214
+ f"template-level requested {app_presets_i[0]!r} "
215
+ f"`env_preset`: {app_env_specs_i!r}."
216
+ )
217
+ es.env_preset = app_presets_i[0]
218
+
219
+ else:
220
+ # no env/preset applicable here (and no env/preset at task level),
221
+ # so apply a default preset if available:
222
+ app_env_specs_i = (schema_presets or {}).get("", None)
223
+ if app_env_specs_i:
224
+ self.app.logger.info(
225
+ f"(task {task.name!r}, element set {es.index}): setting "
226
+ f"to default (empty-string named) `env_preset`: "
227
+ f"{app_env_specs_i}."
228
+ )
229
+ es.env_preset = ""
230
+
231
+ if app_env_specs_i:
232
+ es.resources.merge_other(
233
+ self.app.ResourceList(
234
+ [
235
+ self.app.ResourceSpec(
236
+ scope="any", environments=app_env_specs_i
237
+ )
238
+ ]
239
+ )
240
+ )
241
+
155
242
  @classmethod
156
243
  @TimeIt.decorator
157
244
  def _from_data(cls, data: Dict) -> app.WorkflowTemplate:
@@ -172,28 +259,29 @@ class WorkflowTemplate(JSONLike):
172
259
  }
173
260
 
174
261
  # extract out any template components:
175
- params_dat = data.pop("parameters", [])
262
+ tcs = data.pop("template_components", {})
263
+ params_dat = tcs.pop("parameters", [])
176
264
  if params_dat:
177
265
  parameters = cls.app.ParametersList.from_json_like(
178
266
  params_dat, shared_data=cls.app.template_components
179
267
  )
180
268
  cls.app.parameters.add_objects(parameters, skip_duplicates=True)
181
269
 
182
- cmd_files_dat = data.pop("command_files", [])
270
+ cmd_files_dat = tcs.pop("command_files", [])
183
271
  if cmd_files_dat:
184
272
  cmd_files = cls.app.CommandFilesList.from_json_like(
185
273
  cmd_files_dat, shared_data=cls.app.template_components
186
274
  )
187
275
  cls.app.command_files.add_objects(cmd_files, skip_duplicates=True)
188
276
 
189
- envs_dat = data.pop("environments", [])
277
+ envs_dat = tcs.pop("environments", [])
190
278
  if envs_dat:
191
279
  envs = cls.app.EnvironmentsList.from_json_like(
192
280
  envs_dat, shared_data=cls.app.template_components
193
281
  )
194
282
  cls.app.envs.add_objects(envs, skip_duplicates=True)
195
283
 
196
- ts_dat = data.pop("task_schemas", [])
284
+ ts_dat = tcs.pop("task_schemas", [])
197
285
  if ts_dat:
198
286
  task_schemas = cls.app.TaskSchemasList.from_json_like(
199
287
  ts_dat, shared_data=cls.app.template_components
@@ -1,64 +1,20 @@
1
1
  rules:
2
2
  - path: []
3
- condition:
4
- value.allowed_keys: [tasks]
5
-
6
- - path: [tasks]
7
- condition: { value.type.equal_to: list }
8
-
9
- - path: [tasks, { type: list_value }]
10
3
  condition:
11
4
  value.allowed_keys:
12
- [
13
- objective,
14
- method,
15
- implementation,
16
- resources,
17
- inputs,
18
- input_sources,
19
- input_files,
20
- perturbations,
21
- sequences,
22
- groups,
23
- repeats,
24
- nesting_order,
25
- ]
5
+ - doc
6
+ - name
7
+ - source_file
8
+ - resources
9
+ - environments
10
+ - env_presets
11
+ - template_components
12
+ - tasks
13
+ - loops
14
+ - store_kwargs
15
+ - merge_resources
16
+ - merge_envs
17
+ - workflow
26
18
 
27
- - path: [tasks, { type: list_value }, resources]
28
- condition: { value.type.equal_to: dict }
29
-
30
- - path: [tasks, { type: list_value }, perturbations]
31
- condition: { value.type.equal_to: dict }
32
-
33
- - path: [tasks, { type: list_value }, nesting_order]
34
- condition: { value.type.equal_to: dict }
35
-
36
- - path: [tasks, { type: list_value }, sequences]
37
- condition: { value.type.equal_to: list }
38
-
39
- - path: [tasks, { type: list_value }, inputs]
40
- condition: { value.type.in: [list, dict] }
41
-
42
- - path: [tasks, { type: list_value }, inputs, { type: list_value }]
43
- condition:
44
- and:
45
- - value.required_keys: [parameter, value]
46
- - value.allowed_keys: [parameter, value, path]
47
-
48
- - path: [tasks, { type: list_value }, inputs, { type: list_value }, parameter]
49
- condition: { value.type.equal_to: str }
50
-
51
- - path: [tasks, { type: list_value }, inputs, { type: list_value }, path]
19
+ - path: [tasks]
52
20
  condition: { value.type.equal_to: list }
53
-
54
- - path: [tasks, { type: list_value }, sequences, { type: list_value }]
55
- condition:
56
- and:
57
- - value.required_keys: [path, nesting_order]
58
- - value.keys_contain_one_of:
59
- [
60
- values,
61
- values.from_linear_space,
62
- values.from_grometric_space,
63
- values_from_log_space,
64
- ]
hpcflow/sdk/demo/cli.py CHANGED
@@ -184,7 +184,7 @@ def get_demo_workflow_CLI(app):
184
184
  status=status,
185
185
  )
186
186
  if print_idx:
187
- click.echo(out)
187
+ click.echo(out[1])
188
188
 
189
189
  @demo_workflow.command("copy")
190
190
  @click.argument("workflow_name")
@@ -15,9 +15,11 @@ from hpcflow.sdk.core.errors import (
15
15
  MissingEnvironmentError,
16
16
  MissingEnvironmentExecutableError,
17
17
  MissingEnvironmentExecutableInstanceError,
18
+ MultipleEnvironmentsError,
18
19
  SubmissionFailure,
19
20
  )
20
21
  from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
22
+ from hpcflow.sdk.core.object_list import ObjectListMultipleMatchError
21
23
  from hpcflow.sdk.log import TimeIt
22
24
 
23
25
 
@@ -90,31 +92,40 @@ class Submission(JSONLike):
90
92
  req_envs = defaultdict(lambda: defaultdict(set))
91
93
  for js_idx, js_i in enumerate(self.jobscripts):
92
94
  for run in js_i.all_EARs:
93
- env_label = run.action.get_environment_label()
95
+ env_spec_h = tuple(zip(*run.env_spec.items())) # hashable
94
96
  for exec_label_j in run.action.get_required_executables():
95
- req_envs[env_label][exec_label_j].add(js_idx)
96
- if env_label not in req_envs:
97
- req_envs[env_label] = {}
97
+ req_envs[env_spec_h][exec_label_j].add(js_idx)
98
+ if env_spec_h not in req_envs:
99
+ req_envs[env_spec_h] = {}
98
100
 
99
101
  # check these envs/execs exist in app data:
100
102
  envs = []
101
- for env_lab, exec_js in req_envs.items():
103
+ for env_spec_h, exec_js in req_envs.items():
104
+ env_spec = dict(zip(*env_spec_h))
105
+ non_name_spec = {k: v for k, v in env_spec.items() if k != "name"}
106
+ spec_str = f" with specifiers {non_name_spec!r}" if non_name_spec else ""
107
+ env_ref = f"{env_spec['name']!r}{spec_str}"
102
108
  try:
103
- env_i = self.app.envs.get(env_lab)
109
+ env_i = self.app.envs.get(**env_spec)
110
+ except ObjectListMultipleMatchError:
111
+ raise MultipleEnvironmentsError(
112
+ f"Multiple environments {env_ref} are defined on this machine."
113
+ )
104
114
  except ValueError:
105
115
  raise MissingEnvironmentError(
106
- f"The environment {env_lab!r} is not defined on this machine, so the "
116
+ f"The environment {env_ref} is not defined on this machine, so the "
107
117
  f"submission cannot be created."
108
118
  ) from None
109
119
  else:
110
- envs.append(env_i)
120
+ if env_i not in envs:
121
+ envs.append(env_i)
111
122
 
112
123
  for exec_i_lab, js_idx_set in exec_js.items():
113
124
  try:
114
125
  exec_i = env_i.executables.get(exec_i_lab)
115
126
  except ValueError:
116
127
  raise MissingEnvironmentExecutableError(
117
- f"The environment {env_lab!r} as defined on this machine has no "
128
+ f"The environment {env_ref} as defined on this machine has no "
118
129
  f"executable labelled {exec_i_lab!r}, which is required for this "
119
130
  f"submission, so the submission cannot be created."
120
131
  ) from None
@@ -127,7 +138,7 @@ class Submission(JSONLike):
127
138
  if not exec_instances:
128
139
  raise MissingEnvironmentExecutableInstanceError(
129
140
  f"No matching executable instances found for executable "
130
- f"{exec_i_lab!r} of environment {env_lab!r} for jobscript "
141
+ f"{exec_i_lab!r} of environment {env_ref} for jobscript "
131
142
  f"index {js_idx_j!r} with requested resources "
132
143
  f"{filter_exec!r}."
133
144
  )
@@ -447,3 +447,63 @@ def test_script_hdf5_out_obj(null_config, tmp_path):
447
447
  # to be later Python versions):
448
448
  time.sleep(10)
449
449
  assert wk.tasks[0].elements[0].outputs.p1c.value == P1(a=p1_val + 100)
450
+
451
+
452
+ @pytest.mark.integration
453
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
454
+ def test_script_direct_in_pass_env_spec(new_null_config, tmp_path):
455
+
456
+ vers_spec = {"version": "1.2"}
457
+ env = hf.Environment(
458
+ name="python_env_with_specifiers",
459
+ specifiers=vers_spec,
460
+ executables=[
461
+ hf.Executable(
462
+ label="python_script",
463
+ instances=[
464
+ hf.ExecutableInstance(
465
+ command="python <<script_name>> <<args>>",
466
+ num_cores=1,
467
+ parallel_mode=None,
468
+ )
469
+ ],
470
+ )
471
+ ],
472
+ )
473
+ hf.envs.add_object(env, skip_duplicates=True)
474
+
475
+ s1 = hf.TaskSchema(
476
+ objective="t1",
477
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
478
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
479
+ actions=[
480
+ hf.Action(
481
+ script="<<script:main_script_test_direct_in_direct_out_env_spec.py>>",
482
+ script_data_in="direct",
483
+ script_data_out="direct",
484
+ script_exe="python_script",
485
+ script_pass_env_spec=True,
486
+ environments=[
487
+ hf.ActionEnvironment(environment="python_env_with_specifiers")
488
+ ],
489
+ )
490
+ ],
491
+ )
492
+ t1 = hf.Task(
493
+ schema=s1,
494
+ inputs={"p1": 101},
495
+ environments={"python_env_with_specifiers": vers_spec},
496
+ )
497
+ wk = hf.Workflow.from_template_data(
498
+ tasks=[t1],
499
+ template_name="main_script_test",
500
+ path=tmp_path,
501
+ )
502
+ wk.submit(wait=True, add_to_known=False)
503
+ # TODO: investigate why the value is not always populated on GHA Ubuntu runners (tends
504
+ # to be later Python versions):
505
+ time.sleep(10)
506
+ assert wk.tasks[0].elements[0].outputs.p2.value == {
507
+ "name": "python_env_with_specifiers",
508
+ **vers_spec,
509
+ }
@@ -3,6 +3,7 @@ import pytest
3
3
 
4
4
  from hpcflow.app import app as hf
5
5
  from hpcflow.sdk.core.errors import (
6
+ ActionEnvironmentMissingNameError,
6
7
  UnknownScriptDataKey,
7
8
  UnknownScriptDataParameter,
8
9
  UnsupportedScriptDataFormat,
@@ -602,3 +603,18 @@ def test_process_script_data_in_fmt_dict_mixed(null_config):
602
603
  "p1": {"format": "json"},
603
604
  "p2": {"format": "hdf5"},
604
605
  }
606
+
607
+
608
+ def test_ActionEnvironment_env_str(null_config):
609
+ act_env = hf.ActionEnvironment(environment="my_env")
610
+ assert act_env.environment == {"name": "my_env"}
611
+
612
+
613
+ def test_ActionEnvironment_env_dict(null_config):
614
+ act_env = hf.ActionEnvironment(environment={"name": "my_env", "key": "value"})
615
+ assert act_env.environment == {"name": "my_env", "key": "value"}
616
+
617
+
618
+ def test_ActionEnvironment_raises_on_missing_name(null_config):
619
+ with pytest.raises(ActionEnvironmentMissingNameError):
620
+ hf.ActionEnvironment(environment={"key": "value"})