hpcflow-new2 0.2.0a190__py3-none-any.whl → 0.2.0a199__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +1 -0
  2. hpcflow/_version.py +1 -1
  3. hpcflow/data/scripts/bad_script.py +2 -0
  4. hpcflow/data/scripts/do_nothing.py +2 -0
  5. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  6. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  7. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  8. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  9. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  10. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  11. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  12. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  13. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  14. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  15. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  16. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  17. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  18. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  19. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  20. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  21. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  22. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  23. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  24. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  25. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  26. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  27. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  28. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  29. hpcflow/data/scripts/script_exit_test.py +5 -0
  30. hpcflow/data/template_components/environments.yaml +1 -1
  31. hpcflow/sdk/__init__.py +5 -0
  32. hpcflow/sdk/app.py +150 -89
  33. hpcflow/sdk/cli.py +263 -84
  34. hpcflow/sdk/cli_common.py +99 -5
  35. hpcflow/sdk/config/callbacks.py +38 -1
  36. hpcflow/sdk/config/config.py +102 -13
  37. hpcflow/sdk/config/errors.py +19 -5
  38. hpcflow/sdk/config/types.py +3 -0
  39. hpcflow/sdk/core/__init__.py +25 -1
  40. hpcflow/sdk/core/actions.py +914 -262
  41. hpcflow/sdk/core/cache.py +76 -34
  42. hpcflow/sdk/core/command_files.py +14 -128
  43. hpcflow/sdk/core/commands.py +35 -6
  44. hpcflow/sdk/core/element.py +122 -50
  45. hpcflow/sdk/core/errors.py +58 -2
  46. hpcflow/sdk/core/execute.py +207 -0
  47. hpcflow/sdk/core/loop.py +408 -50
  48. hpcflow/sdk/core/loop_cache.py +4 -4
  49. hpcflow/sdk/core/parameters.py +382 -37
  50. hpcflow/sdk/core/run_dir_files.py +13 -40
  51. hpcflow/sdk/core/skip_reason.py +7 -0
  52. hpcflow/sdk/core/task.py +119 -30
  53. hpcflow/sdk/core/task_schema.py +68 -0
  54. hpcflow/sdk/core/test_utils.py +66 -27
  55. hpcflow/sdk/core/types.py +54 -1
  56. hpcflow/sdk/core/utils.py +78 -7
  57. hpcflow/sdk/core/workflow.py +1538 -336
  58. hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
  59. hpcflow/sdk/demo/cli.py +7 -0
  60. hpcflow/sdk/helper/cli.py +1 -0
  61. hpcflow/sdk/log.py +42 -15
  62. hpcflow/sdk/persistence/base.py +405 -53
  63. hpcflow/sdk/persistence/json.py +177 -52
  64. hpcflow/sdk/persistence/pending.py +237 -69
  65. hpcflow/sdk/persistence/store_resource.py +3 -2
  66. hpcflow/sdk/persistence/types.py +15 -4
  67. hpcflow/sdk/persistence/zarr.py +928 -81
  68. hpcflow/sdk/submission/jobscript.py +1408 -489
  69. hpcflow/sdk/submission/schedulers/__init__.py +40 -5
  70. hpcflow/sdk/submission/schedulers/direct.py +33 -19
  71. hpcflow/sdk/submission/schedulers/sge.py +51 -16
  72. hpcflow/sdk/submission/schedulers/slurm.py +44 -16
  73. hpcflow/sdk/submission/schedulers/utils.py +7 -2
  74. hpcflow/sdk/submission/shells/base.py +68 -20
  75. hpcflow/sdk/submission/shells/bash.py +222 -129
  76. hpcflow/sdk/submission/shells/powershell.py +200 -150
  77. hpcflow/sdk/submission/submission.py +852 -119
  78. hpcflow/sdk/submission/types.py +18 -21
  79. hpcflow/sdk/typing.py +24 -5
  80. hpcflow/sdk/utils/arrays.py +71 -0
  81. hpcflow/sdk/utils/deferred_file.py +55 -0
  82. hpcflow/sdk/utils/hashing.py +16 -0
  83. hpcflow/sdk/utils/patches.py +12 -0
  84. hpcflow/sdk/utils/strings.py +33 -0
  85. hpcflow/tests/api/test_api.py +32 -0
  86. hpcflow/tests/conftest.py +19 -0
  87. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  88. hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
  89. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  90. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  91. hpcflow/tests/scripts/test_main_scripts.py +821 -70
  92. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  93. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  94. hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -0
  95. hpcflow/tests/unit/test_action.py +176 -0
  96. hpcflow/tests/unit/test_app.py +20 -0
  97. hpcflow/tests/unit/test_cache.py +46 -0
  98. hpcflow/tests/unit/test_cli.py +133 -0
  99. hpcflow/tests/unit/test_config.py +122 -1
  100. hpcflow/tests/unit/test_element_iteration.py +47 -0
  101. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  102. hpcflow/tests/unit/test_loop.py +1332 -27
  103. hpcflow/tests/unit/test_meta_task.py +325 -0
  104. hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
  105. hpcflow/tests/unit/test_parameter.py +13 -0
  106. hpcflow/tests/unit/test_persistence.py +190 -8
  107. hpcflow/tests/unit/test_run.py +109 -3
  108. hpcflow/tests/unit/test_run_directories.py +29 -0
  109. hpcflow/tests/unit/test_shell.py +20 -0
  110. hpcflow/tests/unit/test_submission.py +5 -76
  111. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  112. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  113. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  114. hpcflow/tests/unit/utils/test_patches.py +5 -0
  115. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  116. hpcflow/tests/workflows/__init__.py +0 -0
  117. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  118. hpcflow/tests/workflows/test_jobscript.py +332 -0
  119. hpcflow/tests/workflows/test_run_status.py +198 -0
  120. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  121. hpcflow/tests/workflows/test_submission.py +140 -0
  122. hpcflow/tests/workflows/test_workflows.py +142 -2
  123. hpcflow/tests/workflows/test_zip.py +18 -0
  124. hpcflow/viz_demo.ipynb +6587 -3
  125. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +7 -4
  126. hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
  127. hpcflow_new2-0.2.0a190.dist-info/RECORD +0 -165
  128. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
  129. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
  130. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/cache.py CHANGED
@@ -14,41 +14,50 @@ if TYPE_CHECKING:
14
14
  from collections.abc import Sequence
15
15
  from typing_extensions import Self
16
16
  from .element import Element, ElementIteration
17
+ from .actions import ElementActionRun
17
18
  from .workflow import Workflow
18
19
  from ..persistence.base import StoreEAR, StoreElement, StoreElementIter
19
20
 
20
21
 
21
22
  @dataclass
22
- class DependencyCache:
23
- """
24
- Class to bulk-retrieve dependencies between elements, iterations, and runs.
25
- """
23
+ class ObjectCache:
24
+ """Class to bulk-retrieve and store elements, iterations, runs and their various
25
+ dependencies."""
26
+
27
+ #: The elements of the workflow that this cache was built from.
28
+ elements: list[Element] | None = None
29
+ #: The iterations of the workflow that this cache was built from.
30
+ iterations: list[ElementIteration] | None = None
31
+ #: The runs of the workflow that this cache was built from.
32
+ runs: list[ElementActionRun] | None = None
26
33
 
27
34
  #: What EARs (by ID) a given EAR depends on.
28
- run_dependencies: dict[int, set[int]]
35
+ run_dependencies: dict[int, set[int]] | None = None
29
36
  #: What EARs (by ID) are depending on a given EAR.
30
- run_dependents: dict[int, set[int]]
37
+ run_dependents: dict[int, set[int]] | None = None
31
38
  #: What EARs (by ID) a given iteration depends on.
32
- iter_run_dependencies: dict[int, set[int]]
39
+ iter_run_dependencies: dict[int, set[int]] | None = None
33
40
  #: What iterations (by ID) a given iteration depends on.
34
- iter_iter_dependencies: dict[int, set[int]]
41
+ iter_iter_dependencies: dict[int, set[int]] | None = None
35
42
  #: What iterations (by ID) a given element depends on.
36
- elem_iter_dependencies: dict[int, set[int]]
43
+ elem_iter_dependencies: dict[int, set[int]] | None = None
37
44
  #: What elements (by ID) a given element depends on.
38
- elem_elem_dependencies: dict[int, set[int]]
45
+ elem_elem_dependencies: dict[int, set[int]] | None = None
39
46
  #: What elements (by ID) are depending on a given element.
40
- elem_elem_dependents: dict[int, set[int]]
47
+ elem_elem_dependents: dict[int, set[int]] | None = None
41
48
  #: Transitive closure of :py:attr:`elem_elem_dependents`.
42
- elem_elem_dependents_rec: dict[int, set[int]]
43
-
44
- #: The elements of the workflow that this cache was built from.
45
- elements: list[Element]
46
- #: The iterations of the workflow that this cache was built from.
47
- iterations: list[ElementIteration]
49
+ elem_elem_dependents_rec: dict[int, set[int]] | None = None
48
50
 
49
51
  @classmethod
50
52
  @TimeIt.decorator
51
- def build(cls, workflow: Workflow) -> Self:
53
+ def build(
54
+ cls,
55
+ workflow: Workflow,
56
+ dependencies: bool = False,
57
+ elements: bool = False,
58
+ iterations: bool = False,
59
+ runs: bool = False,
60
+ ):
52
61
  """
53
62
  Build a cache instance.
54
63
 
@@ -56,7 +65,51 @@ class DependencyCache:
56
65
  ----------
57
66
  workflow: ~hpcflow.app.Workflow
58
67
  The workflow to build the cache from.
68
+ dependencies
69
+ If True, calculate dependencies.
70
+ elements
71
+ If True, include elements in the cache.
72
+ iterations
73
+ If True, include iterations in the cache.
74
+ runs
75
+ If True, include runs in the cache.
76
+
59
77
  """
78
+ kwargs = {}
79
+ if dependencies:
80
+ kwargs.update(cls._get_dependencies(workflow))
81
+
82
+ if elements:
83
+ kwargs["elements"] = workflow.get_all_elements()
84
+
85
+ if iterations:
86
+ kwargs["iterations"] = workflow.get_all_element_iterations()
87
+
88
+ if runs:
89
+ kwargs["runs"] = workflow.get_all_EARs()
90
+
91
+ return cls(**kwargs)
92
+
93
+ @classmethod
94
+ @TimeIt.decorator
95
+ def _get_dependencies(cls, workflow: Workflow):
96
+ def _get_recursive_deps(elem_id: int, seen_ids: list[int] | None = None):
97
+ if seen_ids is None:
98
+ seen_ids = [elem_id]
99
+ elif elem_id in seen_ids:
100
+ # stop recursion
101
+ return set()
102
+ else:
103
+ seen_ids.append(elem_id)
104
+ return set(elem_elem_dependents[elem_id]).union(
105
+ [
106
+ j
107
+ for i in elem_elem_dependents[elem_id]
108
+ for j in _get_recursive_deps(i, seen_ids)
109
+ if j != elem_id
110
+ ]
111
+ )
112
+
60
113
  num_iters = workflow.num_element_iterations
61
114
  num_elems = workflow.num_elements
62
115
  num_runs = workflow.num_EARs
@@ -84,8 +137,8 @@ class DependencyCache:
84
137
  for idx, dict_i in enumerate(all_data_idx):
85
138
  run_i_sources = set(
86
139
  run_k
87
- for idx in chain.from_iterable(dict_i.values())
88
- if (run_k := all_param_sources[idx].get("EAR_ID")) is not None
140
+ for dat_idx_k in chain.from_iterable(dict_i.values())
141
+ if (run_k := all_param_sources[dat_idx_k].get("EAR_ID")) is not None
89
142
  and run_k != idx
90
143
  )
91
144
  run_dependencies[idx] = run_i_sources
@@ -149,14 +202,8 @@ class DependencyCache:
149
202
 
150
203
  # for each element, which elements depend on it (recursively)?
151
204
  elem_elem_dependents_rec: defaultdict[int, set[int]] = defaultdict(set)
152
- for k in tuple(elem_elem_dependents):
153
- # NB: code below modifies elem_elem_dependents during this loop;
154
- # copy above is mandatory!
155
- for i in elem_elem_dependents[k]:
156
- elem_elem_dependents_rec[k].add(i)
157
- elem_elem_dependents_rec[k].update(
158
- m for m in elem_elem_dependents[i] if m != k
159
- )
205
+ for i in list(elem_elem_dependents):
206
+ elem_elem_dependents_rec[i] = _get_recursive_deps(i)
160
207
 
161
208
  # add missing keys and downgrade to dict:
162
209
  for elem_idx in range(num_elems):
@@ -165,10 +212,7 @@ class DependencyCache:
165
212
  elem_elem_dependents.default_factory = None
166
213
  elem_elem_dependents_rec.default_factory = None
167
214
 
168
- elements = workflow.get_all_elements()
169
- iterations = workflow.get_all_element_iterations()
170
-
171
- return cls(
215
+ return dict(
172
216
  run_dependencies=run_dependencies,
173
217
  run_dependents=run_dependents,
174
218
  iter_run_dependencies=iter_run_dependencies,
@@ -177,6 +221,4 @@ class DependencyCache:
177
221
  elem_elem_dependencies=elem_elem_dependencies,
178
222
  elem_elem_dependents=elem_elem_dependents,
179
223
  elem_elem_dependents_rec=elem_elem_dependents_rec,
180
- elements=elements,
181
- iterations=iterations,
182
224
  )
@@ -10,13 +10,14 @@ from textwrap import dedent
10
10
  from typing import Protocol, cast, overload, TYPE_CHECKING
11
11
  from typing_extensions import Final, override
12
12
 
13
- from hpcflow.sdk.typing import hydrate, ParamSource
13
+ from hpcflow.sdk.typing import PathLike, hydrate, ParamSource
14
14
  from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
15
15
  from hpcflow.sdk.core.utils import search_dir_files_by_regex
16
16
  from hpcflow.sdk.core.zarr_io import zarr_decode
17
17
  from hpcflow.sdk.core.parameters import _process_demo_data_strings
18
18
 
19
19
  if TYPE_CHECKING:
20
+ import os
20
21
  from collections.abc import Mapping
21
22
  from typing import Any, ClassVar
22
23
  from typing_extensions import Self
@@ -33,7 +34,7 @@ class FileNamePart(Protocol):
33
34
  A filename or piece of filename that can be expanded.
34
35
  """
35
36
 
36
- def value(self, directory: str = ".") -> str | list[str]:
37
+ def value(self, directory: str | os.PathLike = ".") -> str | list[str]:
37
38
  """
38
39
  Get the part of the file, possibly with directory specified.
39
40
  Implementations of this may ignore the directory.
@@ -74,7 +75,7 @@ class FileSpec(JSONLike):
74
75
  self._hash_value = _hash_value
75
76
  self.__hash = hash((label, self.name))
76
77
 
77
- def value(self, directory: str = ".") -> str:
78
+ def value(self, directory: str | os.PathLike = ".") -> str:
78
79
  """
79
80
  The path to a file, optionally resolved with respect to a particular directory.
80
81
  """
@@ -178,17 +179,17 @@ class FileNameSpec(JSONLike):
178
179
  """
179
180
  return self._app.FileNameExt(self)
180
181
 
181
- def value(self, directory: str = ".") -> list[str] | str:
182
+ def value(self, directory: str | os.PathLike = ".") -> list[str] | str:
182
183
  """
183
184
  Get the template-resolved name of the file
184
185
  (or files matched if the name is a regex pattern).
185
186
 
186
187
  Parameters
187
188
  ----------
188
- directory: str
189
+ directory: PathLike
189
190
  Where to resolve values with respect to.
190
191
  """
191
- format_args = [arg.value(directory) for arg in self.args]
192
+ format_args = [arg.value(Path(directory)) for arg in self.args]
192
193
  value = self.name.format(*format_args)
193
194
  if self.is_regex:
194
195
  return search_dir_files_by_regex(value, directory=directory)
@@ -207,7 +208,7 @@ class FileNameStem(JSONLike):
207
208
  #: The file specification this is derived from.
208
209
  file_name: FileNameSpec
209
210
 
210
- def value(self, directory: str = ".") -> str:
211
+ def value(self, directory: str | os.PathLike = ".") -> str:
211
212
  """
212
213
  Get the stem, possibly with directory specified.
213
214
  """
@@ -227,7 +228,7 @@ class FileNameExt(JSONLike):
227
228
  #: The file specification this is derived from.
228
229
  file_name: FileNameSpec
229
230
 
230
- def value(self, directory: str = ".") -> str:
231
+ def value(self, directory: str | os.PathLike = ".") -> str:
231
232
  """
232
233
  Get the extension.
233
234
  """
@@ -301,6 +302,8 @@ class InputFileGenerator(JSONLike):
301
302
  abortable: bool = False
302
303
  #: User-specified rules for whether to run the generator.
303
304
  rules: list[ActionRule] = field(default_factory=list)
305
+ #: Whether the generator requires a working directory.
306
+ requires_dir: bool = True
304
307
 
305
308
  def get_action_rules(self) -> list[ActionRule]:
306
309
  """
@@ -311,58 +314,6 @@ class InputFileGenerator(JSONLike):
311
314
  self._app.ActionRule.check_missing(f"input_files.{self.input_file.label}")
312
315
  ] + self.rules
313
316
 
314
- def compose_source(self, snip_path: Path) -> str:
315
- """Generate the file contents of this input file generator source."""
316
-
317
- script_main_func = snip_path.stem
318
- with snip_path.open("rt") as fp:
319
- script_str = fp.read()
320
-
321
- main_block = dedent(
322
- """\
323
- if __name__ == "__main__":
324
- import sys
325
- from pathlib import Path
326
- import {app_module} as app
327
- app.load_config(
328
- log_file_path=Path("{run_log_file}").resolve(),
329
- config_dir=r"{cfg_dir}",
330
- config_key=r"{cfg_invoc_key}",
331
- )
332
- wk_path, EAR_ID = sys.argv[1:]
333
- EAR_ID = int(EAR_ID)
334
- wk = app.Workflow(wk_path)
335
- EAR = wk.get_EARs_from_IDs([EAR_ID])[0]
336
- {script_main_func}(path=Path({file_path!r}), **EAR.get_IFG_input_values())
337
- """
338
- )
339
- main_block = main_block.format(
340
- run_log_file=self._app.RunDirAppFiles.get_log_file_name(),
341
- app_module=self._app.module,
342
- cfg_dir=self._app.config.config_directory,
343
- cfg_invoc_key=self._app.config.config_key,
344
- script_main_func=script_main_func,
345
- file_path=self.input_file.name.value(),
346
- )
347
-
348
- out = dedent(
349
- """\
350
- {script_str}
351
- {main_block}
352
- """
353
- )
354
-
355
- return out.format(script_str=script_str, main_block=main_block)
356
-
357
- def write_source(self, action: Action, env_spec: Mapping[str, Any]) -> None:
358
- """
359
- Write the script if it is specified as a snippet script, otherwise we assume
360
- the script already exists in the working directory.
361
- """
362
- if snip_path := action.get_snippet_script_path(self.script, env_spec):
363
- with Path(snip_path.name).open("wt", newline="\n") as fp:
364
- fp.write(self.compose_source(snip_path))
365
-
366
317
 
367
318
  @dataclass
368
319
  @hydrate
@@ -449,7 +400,7 @@ class OutputFileParser(JSONLike):
449
400
  inputs: list[str] | None = None
450
401
  #: Optional multiple outputs from the upstream actions of the schema that are
451
402
  #: required to parametrise this parser.
452
- #: Not to be confused with :py:attr:`output` (plural).
403
+ #: Not to be confused with :py:attr:`output` (singular).
453
404
  outputs: list[str] | None = None
454
405
  #: Miscellaneous options.
455
406
  options: dict[str, Any] | None = None
@@ -464,6 +415,8 @@ class OutputFileParser(JSONLike):
464
415
  clean_up: list[str] = field(default_factory=list)
465
416
  #: Rules for whether to enable this parser.
466
417
  rules: list[ActionRule] = field(default_factory=list)
418
+ #: Whether the parser requires a working directory.
419
+ requires_dir: bool = True
467
420
 
468
421
  def __post_init__(self, save_files: list[FileSpec] | bool) -> None:
469
422
  if not save_files:
@@ -501,73 +454,6 @@ class OutputFileParser(JSONLike):
501
454
  for out_f in self.output_files
502
455
  ] + self.rules
503
456
 
504
- def compose_source(self, snip_path: Path) -> str:
505
- """Generate the file contents of this output file parser source."""
506
-
507
- if self.output is None:
508
- # might be used just for saving files:
509
- return ""
510
-
511
- script_main_func = snip_path.stem
512
- with snip_path.open("rt") as fp:
513
- script_str = fp.read()
514
-
515
- main_block = dedent(
516
- """\
517
- if __name__ == "__main__":
518
- import sys
519
- from pathlib import Path
520
- import {app_module} as app
521
- app.load_config(
522
- log_file_path=Path("{run_log_file}").resolve(),
523
- config_dir=r"{cfg_dir}",
524
- config_key=r"{cfg_invoc_key}",
525
- )
526
- wk_path, EAR_ID = sys.argv[1:]
527
- EAR_ID = int(EAR_ID)
528
- wk = app.Workflow(wk_path)
529
- EAR = wk.get_EARs_from_IDs([EAR_ID])[0]
530
- value = {script_main_func}(
531
- **EAR.get_OFP_output_files(),
532
- **EAR.get_OFP_inputs(),
533
- **EAR.get_OFP_outputs(),
534
- )
535
- wk.save_parameter(name="{param_name}", value=value, EAR_ID=EAR_ID)
536
-
537
- """
538
- )
539
- main_block = main_block.format(
540
- run_log_file=self._app.RunDirAppFiles.get_log_file_name(),
541
- app_module=self._app.module,
542
- cfg_dir=self._app.config.config_directory,
543
- cfg_invoc_key=self._app.config.config_key,
544
- script_main_func=script_main_func,
545
- param_name=f"outputs.{self.output.typ}",
546
- )
547
-
548
- out = dedent(
549
- """\
550
- {script_str}
551
- {main_block}
552
- """
553
- )
554
-
555
- return out.format(script_str=script_str, main_block=main_block)
556
-
557
- def write_source(self, action: Action, env_spec: Mapping[str, Any]) -> None:
558
- """
559
- Write the actual output parser to a file so it can be enacted.
560
- """
561
- if self.output is None:
562
- # might be used just for saving files:
563
- return
564
-
565
- # write the script if it is specified as a snippet script, otherwise we assume
566
- # the script already exists in the working directory:
567
- if snip_path := action.get_snippet_script_path(self.script, env_spec):
568
- with Path(snip_path.name).open("wt", newline="\n") as fp:
569
- fp.write(self.compose_source(snip_path))
570
-
571
457
 
572
458
  @hydrate
573
459
  class _FileContentsSpecifier(JSONLike):
@@ -20,7 +20,7 @@ from hpcflow.sdk.core.parameters import ParameterValue
20
20
  if TYPE_CHECKING:
21
21
  from collections.abc import Callable, Iterable, Mapping, Sequence
22
22
  from re import Pattern
23
- from .actions import ActionRule
23
+ from .actions import ActionRule, Action
24
24
  from .element import ElementActionRun
25
25
  from .environment import Environment
26
26
  from ..submission.shells import Shell
@@ -81,6 +81,11 @@ class Command(JSONLike):
81
81
  #: Rules that state whether this command is eligible to run.
82
82
  rules: list[ActionRule] = field(default_factory=list)
83
83
 
84
+ action: Action | None = None # assigned by parent Action
85
+
86
+ def __post_init__(self):
87
+ self._set_parent_refs()
88
+
84
89
  def __repr__(self) -> str:
85
90
  out = []
86
91
  if self.command:
@@ -102,6 +107,20 @@ class Command(JSONLike):
102
107
 
103
108
  return f"{self.__class__.__name__}({', '.join(out)})"
104
109
 
110
+ def __eq__(self, other):
111
+ if not isinstance(other, self.__class__):
112
+ return False
113
+ return (
114
+ self.command == other.command
115
+ and self.executable == other.executable
116
+ and self.arguments == other.arguments
117
+ and self.variables == other.variables
118
+ and self.stdout == other.stdout
119
+ and self.stderr == other.stderr
120
+ and self.stdin == other.stdin
121
+ and self.rules == other.rules
122
+ )
123
+
105
124
  def __get_initial_command_line(self) -> str:
106
125
  if self.command:
107
126
  return self.command
@@ -142,6 +161,7 @@ class Command(JSONLike):
142
161
  exec_cmd = executable.filter_instances(**filter_exec)[0].command
143
162
  return exec_cmd.replace("<<num_cores>>", str(EAR.resources.num_cores))
144
163
  elif typ == "script":
164
+ # TODO: is this needed? we have <<script_name>> <<script_path>> etc as command variables
145
165
  return EAR.action.get_script_name(val)
146
166
  else:
147
167
  raise ValueError("impossible match occurred")
@@ -191,7 +211,11 @@ class Command(JSONLike):
191
211
  cmd_str = cmd_str.rstrip()
192
212
 
193
213
  # remove any left over "<<args>>" and "<<script_name>>"s:
194
- cmd_str = cmd_str.replace("<<args>>", "").replace("<<script_name>>", "")
214
+ cmd_str = (
215
+ cmd_str.replace("<<args>>", "")
216
+ .replace("<<script_name>>", "")
217
+ .replace("<<script_path>>", "")
218
+ )
195
219
 
196
220
  # substitute input parameters in command:
197
221
  types_pattern = "|".join(parse_types)
@@ -208,7 +232,10 @@ class Command(JSONLike):
208
232
  cmd_inp = ".".join(cmd_inp_parts[:-1])
209
233
  else:
210
234
  cmd_inp = cmd_inp_full
211
- inp_val = EAR.get(f"inputs.{cmd_inp}") # TODO: what if schema output?
235
+ inp_val = EAR.get(
236
+ f"inputs.{cmd_inp}",
237
+ raise_on_unset=True,
238
+ ) # TODO: what if schema output?
212
239
  pattern_i = pattern.format(
213
240
  types_pattern=types_pattern,
214
241
  name=re.escape(cmd_inp),
@@ -219,9 +246,11 @@ class Command(JSONLike):
219
246
  string=cmd_str,
220
247
  )
221
248
 
222
- # substitute input files in command:
223
- for cmd_file in EAR.action.get_command_input_file_labels():
224
- file_path = EAR.get(f"input_files.{cmd_file}") # TODO: what if out file?
249
+ # substitute input/output files in command:
250
+ for cmd_file in EAR.action.get_command_file_labels():
251
+ file_path = EAR.get(
252
+ f"input_files.{cmd_file}", raise_on_unset=True
253
+ ) or EAR.get(f"output_files.{cmd_file}", raise_on_unset=True)
225
254
  # assuming we have copied this file to the EAR directory, then we just
226
255
  # need the file name:
227
256
  file_name = Path(file_path).name