hpcflow-new2 0.2.0a190__py3-none-any.whl → 0.2.0a200__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +1 -0
  2. hpcflow/_version.py +1 -1
  3. hpcflow/data/scripts/bad_script.py +2 -0
  4. hpcflow/data/scripts/do_nothing.py +2 -0
  5. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  6. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  7. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  8. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  9. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  10. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  11. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  12. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  13. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  14. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  15. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  16. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  17. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  18. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  19. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  20. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  21. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  22. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  23. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  24. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  25. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  26. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  27. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  28. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  29. hpcflow/data/scripts/script_exit_test.py +5 -0
  30. hpcflow/data/template_components/environments.yaml +1 -1
  31. hpcflow/sdk/__init__.py +5 -0
  32. hpcflow/sdk/app.py +166 -92
  33. hpcflow/sdk/cli.py +263 -84
  34. hpcflow/sdk/cli_common.py +99 -5
  35. hpcflow/sdk/config/callbacks.py +38 -1
  36. hpcflow/sdk/config/config.py +102 -13
  37. hpcflow/sdk/config/errors.py +19 -5
  38. hpcflow/sdk/config/types.py +3 -0
  39. hpcflow/sdk/core/__init__.py +25 -1
  40. hpcflow/sdk/core/actions.py +914 -262
  41. hpcflow/sdk/core/cache.py +76 -34
  42. hpcflow/sdk/core/command_files.py +14 -128
  43. hpcflow/sdk/core/commands.py +35 -6
  44. hpcflow/sdk/core/element.py +122 -50
  45. hpcflow/sdk/core/errors.py +58 -2
  46. hpcflow/sdk/core/execute.py +207 -0
  47. hpcflow/sdk/core/loop.py +408 -50
  48. hpcflow/sdk/core/loop_cache.py +4 -4
  49. hpcflow/sdk/core/parameters.py +382 -37
  50. hpcflow/sdk/core/run_dir_files.py +13 -40
  51. hpcflow/sdk/core/skip_reason.py +7 -0
  52. hpcflow/sdk/core/task.py +119 -30
  53. hpcflow/sdk/core/task_schema.py +68 -0
  54. hpcflow/sdk/core/test_utils.py +66 -27
  55. hpcflow/sdk/core/types.py +54 -1
  56. hpcflow/sdk/core/utils.py +136 -19
  57. hpcflow/sdk/core/workflow.py +1587 -356
  58. hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
  59. hpcflow/sdk/demo/cli.py +7 -0
  60. hpcflow/sdk/helper/cli.py +1 -0
  61. hpcflow/sdk/log.py +42 -15
  62. hpcflow/sdk/persistence/base.py +405 -53
  63. hpcflow/sdk/persistence/json.py +177 -52
  64. hpcflow/sdk/persistence/pending.py +237 -69
  65. hpcflow/sdk/persistence/store_resource.py +3 -2
  66. hpcflow/sdk/persistence/types.py +15 -4
  67. hpcflow/sdk/persistence/zarr.py +928 -81
  68. hpcflow/sdk/submission/jobscript.py +1408 -489
  69. hpcflow/sdk/submission/schedulers/__init__.py +40 -5
  70. hpcflow/sdk/submission/schedulers/direct.py +33 -19
  71. hpcflow/sdk/submission/schedulers/sge.py +51 -16
  72. hpcflow/sdk/submission/schedulers/slurm.py +44 -16
  73. hpcflow/sdk/submission/schedulers/utils.py +7 -2
  74. hpcflow/sdk/submission/shells/base.py +68 -20
  75. hpcflow/sdk/submission/shells/bash.py +222 -129
  76. hpcflow/sdk/submission/shells/powershell.py +200 -150
  77. hpcflow/sdk/submission/submission.py +852 -119
  78. hpcflow/sdk/submission/types.py +18 -21
  79. hpcflow/sdk/typing.py +24 -5
  80. hpcflow/sdk/utils/arrays.py +71 -0
  81. hpcflow/sdk/utils/deferred_file.py +55 -0
  82. hpcflow/sdk/utils/hashing.py +16 -0
  83. hpcflow/sdk/utils/patches.py +12 -0
  84. hpcflow/sdk/utils/strings.py +33 -0
  85. hpcflow/tests/api/test_api.py +32 -0
  86. hpcflow/tests/conftest.py +19 -0
  87. hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
  88. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  89. hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
  90. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  91. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  92. hpcflow/tests/scripts/test_main_scripts.py +821 -70
  93. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  94. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  95. hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -0
  96. hpcflow/tests/unit/test_action.py +176 -0
  97. hpcflow/tests/unit/test_app.py +20 -0
  98. hpcflow/tests/unit/test_cache.py +46 -0
  99. hpcflow/tests/unit/test_cli.py +133 -0
  100. hpcflow/tests/unit/test_config.py +122 -1
  101. hpcflow/tests/unit/test_element_iteration.py +47 -0
  102. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  103. hpcflow/tests/unit/test_loop.py +1332 -27
  104. hpcflow/tests/unit/test_meta_task.py +325 -0
  105. hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
  106. hpcflow/tests/unit/test_parameter.py +13 -0
  107. hpcflow/tests/unit/test_persistence.py +190 -8
  108. hpcflow/tests/unit/test_run.py +109 -3
  109. hpcflow/tests/unit/test_run_directories.py +29 -0
  110. hpcflow/tests/unit/test_shell.py +20 -0
  111. hpcflow/tests/unit/test_submission.py +5 -76
  112. hpcflow/tests/unit/test_workflow_template.py +31 -0
  113. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  114. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  115. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  116. hpcflow/tests/unit/utils/test_patches.py +5 -0
  117. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  118. hpcflow/tests/workflows/__init__.py +0 -0
  119. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  120. hpcflow/tests/workflows/test_jobscript.py +332 -0
  121. hpcflow/tests/workflows/test_run_status.py +198 -0
  122. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  123. hpcflow/tests/workflows/test_submission.py +140 -0
  124. hpcflow/tests/workflows/test_workflows.py +142 -2
  125. hpcflow/tests/workflows/test_zip.py +18 -0
  126. hpcflow/viz_demo.ipynb +6587 -3
  127. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/METADATA +7 -4
  128. hpcflow_new2-0.2.0a200.dist-info/RECORD +222 -0
  129. hpcflow_new2-0.2.0a190.dist-info/RECORD +0 -165
  130. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/LICENSE +0 -0
  131. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/WHEEL +0 -0
  132. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/entry_points.txt +0 -0
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Types for the submission subsystem.
3
3
  """
4
+
4
5
  from __future__ import annotations
5
6
  from typing import Any, TYPE_CHECKING
6
7
  from typing_extensions import NotRequired, TypeAlias, TypedDict
@@ -22,14 +23,15 @@ class JobScriptDescriptor(TypedDict):
22
23
  #: Elements handled by the jobscript.
23
24
  elements: dict[int, list[int]]
24
25
  #: Dependencies of the jobscript.
25
- dependencies: NotRequired[dict[int, ResolvedDependencies]]
26
+ dependencies: NotRequired[dict[int, ResolvedJobscriptBlockDependencies]]
26
27
  #: Hash of resources.
27
28
  resource_hash: NotRequired[str]
28
29
 
29
30
 
30
- class ResolvedDependencies(TypedDict):
31
+ class ResolvedJobscriptBlockDependencies(TypedDict):
31
32
  """
32
- The resolution of a dependency.
33
+ The resolution of a jobscript block dependency. This represents the dependency of one
34
+ jobscript block on another.
33
35
  """
34
36
 
35
37
  #: Mapping of jobscript elements.
@@ -43,6 +45,9 @@ class JobScriptCreationArguments(TypedDict):
43
45
  Arguments to pass to create a :class:`Jobscript`.
44
46
  """
45
47
 
48
+ # TODO: this currently represents a mix of arguments for both jobscripts and jobscript
49
+ # blocks; need to separate
50
+
46
51
  #: The task insertion IDs.
47
52
  task_insert_IDs: list[int]
48
53
  #: The actions of the tasks.
@@ -52,11 +57,13 @@ class JobScriptCreationArguments(TypedDict):
52
57
  #: Element action run information.
53
58
  EAR_ID: NDArray
54
59
  #: Resources to use.
55
- resources: ElementResources
60
+ resources: NotRequired[ElementResources]
56
61
  #: Description of what loops are in play.
57
62
  task_loop_idx: list[dict[str, int]]
58
63
  #: Description of dependencies.
59
- dependencies: dict[int, ResolvedDependencies]
64
+ dependencies: dict[int | tuple[int, int], ResolvedJobscriptBlockDependencies]
65
+ #: Whether this is an array jobscript.
66
+ is_array: NotRequired[bool]
60
67
  #: When the jobscript was submitted, if known.
61
68
  submit_time: NotRequired[datetime]
62
69
  #: Where the jobscript was submitted, if known.
@@ -121,23 +128,13 @@ class JobscriptHeaderArgs(TypedDict):
121
128
 
122
129
  #: Application invocation. (Arguments, etc.)
123
130
  app_invoc: str | Sequence[str]
131
+ #: Workflow application alias.
132
+ workflow_app_alias: NotRequired[str]
133
+ #: Environment setup.
134
+ env_setup: NotRequired[str]
135
+ #: Application name in CAPS
136
+ app_caps: NotRequired[str]
124
137
  #: Configuration directory.
125
138
  config_dir: NotRequired[str]
126
139
  #: Configuration key.
127
140
  config_invoc_key: NotRequired[Any]
128
- #: Name of EAR file.
129
- EAR_file_name: NotRequired[str]
130
- #: Name of file containing run directories.
131
- element_run_dirs_file_path: NotRequired[str]
132
- #: Environment setup.
133
- env_setup: NotRequired[str]
134
- #: Jobscript index.
135
- js_idx: NotRequired[int]
136
- #: Log file for the run.
137
- run_log_file: NotRequired[str]
138
- #: Submission index.
139
- sub_idx: NotRequired[int]
140
- #: Workflow application alias.
141
- workflow_app_alias: NotRequired[str]
142
- #: Path to workflow.
143
- workflow_path: NotRequired[str]
hpcflow/sdk/typing.py CHANGED
@@ -1,9 +1,10 @@
1
1
  """
2
2
  Common type aliases.
3
3
  """
4
+
4
5
  from __future__ import annotations
5
6
  from dataclasses import InitVar
6
- from typing import ClassVar, Final, TypeVar, cast, TYPE_CHECKING
7
+ from typing import Any, ClassVar, Final, TypeVar, cast, TYPE_CHECKING
7
8
  from typing_extensions import NotRequired, TypeAlias, TypedDict
8
9
  from pathlib import Path
9
10
  import re
@@ -11,6 +12,7 @@ import re
11
12
  if TYPE_CHECKING:
12
13
  from collections.abc import Mapping
13
14
  from datetime import datetime
15
+ from rich.status import Status
14
16
  from .core.object_list import (
15
17
  CommandFilesList,
16
18
  EnvironmentsList,
@@ -20,6 +22,7 @@ if TYPE_CHECKING:
20
22
  from .submission.enums import JobscriptElementState
21
23
  from .submission.submission import Submission
22
24
 
25
+
23
26
  #: Type of a value that can be treated as a path.
24
27
  PathLike: TypeAlias = "str | Path | None"
25
28
 
@@ -102,7 +105,7 @@ class KnownSubmissionItem(TypedDict):
102
105
  #: Jobscripts in submission.
103
106
  jobscripts: list[int]
104
107
  #: Active jobscript state.
105
- active_jobscripts: Mapping[int, Mapping[int, JobscriptElementState]]
108
+ active_jobscripts: Mapping[int, Mapping[int, Mapping[int, JobscriptElementState]]]
106
109
  #: Whether this is deleted.
107
110
  deleted: bool
108
111
  #: Whether this is unloadable.
@@ -128,6 +131,22 @@ class TemplateComponents(TypedDict):
128
131
  scripts: NotRequired[dict[str, Path]]
129
132
 
130
133
 
134
+ class MakeWorkflowCommonArgs(TypedDict):
135
+ """
136
+ Common keys used in workflow construction in :py:meth:`BaseApp._make_workflow`.
137
+ """
138
+
139
+ path: str | None
140
+ name: str | None
141
+ overwrite: bool
142
+ store: str
143
+ ts_fmt: str | None
144
+ ts_name_fmt: str | None
145
+ store_kwargs: dict[str, Any] | None
146
+ variables: dict[str, Any] | None
147
+ status: Status | None
148
+
149
+
131
150
  #: Simplification of :class:`TemplateComponents` to allow some types of
132
151
  #: internal manipulations.
133
152
  BasicTemplateComponents: TypeAlias = "dict[str, list[dict]]"
@@ -153,16 +172,16 @@ def hydrate(cls: type[_T]) -> type[_T]:
153
172
  Partially hydrates the annotations on fields in a class, so that a @dataclass
154
173
  annotation can recognise that ClassVar-annotated fields are class variables.
155
174
  """
156
- anns = {}
175
+ anns: dict[str, Any] = {}
157
176
  for f, a in cls.__annotations__.items():
158
177
  if isinstance(a, str):
159
178
  m = _CLASS_VAR_RE.match(a)
160
179
  if m:
161
- anns[f] = ClassVar[m[1]]
180
+ anns[f] = cast(Any, ClassVar[m[1]])
162
181
  continue
163
182
  m = _INIT_VAR_RE.match(a)
164
183
  if m:
165
- anns[f] = InitVar(cast(type, m[1]))
184
+ anns[f] = cast(Any, InitVar(cast(type, m[1])))
166
185
  continue
167
186
  anns[f] = a
168
187
  cls.__annotations__ = anns
@@ -0,0 +1,71 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, overload
4
+
5
+ if TYPE_CHECKING:
6
+ from numpy.typing import NDArray
7
+
8
+
9
+ @overload
10
+ def get_2D_idx(idx: int, num_cols: int) -> tuple[int, int]:
11
+ ...
12
+
13
+
14
+ @overload
15
+ def get_2D_idx(idx: NDArray, num_cols: int) -> tuple[NDArray, NDArray]:
16
+ ...
17
+
18
+
19
+ def get_2D_idx(idx: int | NDArray, num_cols: int) -> tuple[int | NDArray, int | NDArray]:
20
+ """Convert a 1D index to a 2D index, assuming items are arranged in a row-major
21
+ order."""
22
+ row_idx = idx // num_cols
23
+ col_idx = idx % num_cols
24
+ return (row_idx, col_idx)
25
+
26
+
27
+ def get_1D_idx(
28
+ row_idx: int | NDArray, col_idx: int | NDArray, num_cols: int
29
+ ) -> int | NDArray:
30
+ """Convert a 2D (row, col) index into a 1D index, assuming items are arranged in a
31
+ row-major order."""
32
+ return row_idx * num_cols + col_idx
33
+
34
+
35
+ def split_arr(arr: NDArray, metadata_size: int) -> list[tuple[NDArray, NDArray]]:
36
+ """Split a 1D integer array into a list of tuples, each containing a metadata array
37
+ and a data array, where the size of each (metadata + data) sub-array is specified as
38
+ the integer immediately before each (metadata + data) sub-array.
39
+
40
+ Parameters
41
+ ----------
42
+ arr
43
+ One dimensional integer array to split.
44
+ metadata_size
45
+ How many elements to include in the metadata array. This can be zero.
46
+
47
+ Returns
48
+ -------
49
+ sub_arrs
50
+ List of tuples of integer arrays. The integers that define the sizes of the
51
+ sub-arrays are excluded.
52
+
53
+ Examples
54
+ --------
55
+ >>> split_arr(np.array([4, 0, 1, 2, 3, 4, 1, 4, 5, 6]), metadata_size=1)
56
+ [(array([0]), array([1, 2, 3])), (array([1]), array([4, 5, 6]))]
57
+
58
+ """
59
+ count = 0
60
+ block_start = 0
61
+ sub_arrs = []
62
+ while count < len(arr):
63
+ size = arr[block_start]
64
+ start = block_start + 1
65
+ end = start + size
66
+ metadata_i = arr[start : start + metadata_size]
67
+ sub_arr_i = arr[start + metadata_size : end]
68
+ sub_arrs.append((metadata_i, sub_arr_i))
69
+ count += size + 1
70
+ block_start = end
71
+ return sub_arrs
@@ -0,0 +1,55 @@
1
+ from os import PathLike
2
+ from typing import Literal, Union
3
+
4
+
5
+ class DeferredFileWriter:
6
+ """A class that provides a context manager for deferring writing or appending to a
7
+ file until a write method is called.
8
+
9
+ Attributes
10
+ ----------
11
+ filename
12
+ The file path to open
13
+ mode
14
+ The mode to use.
15
+
16
+ Examples
17
+ --------
18
+ >>> with DeferredFileWrite("new_file.txt", "w") as f:
19
+ ... # file is not yet created
20
+ ... f.write("contents")
21
+ ... # file is now created, but not closed
22
+ ... # file is now closed
23
+
24
+ """
25
+
26
+ def __init__(self, filename: Union[str, PathLike], mode: Literal["w", "a"], **kwargs):
27
+ self.filename = filename
28
+ self.mode = mode
29
+ self.file = None
30
+ self.kwargs = kwargs
31
+ self._is_open = False
32
+
33
+ def _ensure_open(self):
34
+ if not self._is_open:
35
+ self.file = open(self.filename, self.mode, **self.kwargs)
36
+ self._is_open = True
37
+
38
+ def write(self, data):
39
+ self._ensure_open()
40
+ self.file.write(data)
41
+
42
+ def writelines(self, lines):
43
+ self._ensure_open()
44
+ self.file.writelines(lines)
45
+
46
+ def close(self):
47
+ if self._is_open:
48
+ self.file.close()
49
+ self._is_open = False
50
+
51
+ def __enter__(self):
52
+ return self
53
+
54
+ def __exit__(self, exc_type, exc_value, traceback):
55
+ self.close()
@@ -0,0 +1,16 @@
1
+ def get_hash(obj):
2
+ """Return a hash from an arbitrarily nested structure dicts, lists, tuples, and
3
+ sets.
4
+
5
+ Note the resulting hash is not necessarily stable across sessions or machines.
6
+ """
7
+
8
+ if isinstance(obj, (set, tuple, list)):
9
+ return hash(tuple([type(obj)] + [get_hash(i) for i in obj]))
10
+
11
+ elif not isinstance(obj, dict):
12
+ return hash(obj)
13
+
14
+ new_obj = {k: get_hash(obj[k]) for k in obj}
15
+
16
+ return hash(frozenset(sorted(new_obj.items())))
@@ -0,0 +1,12 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def resolve_path(path):
5
+ """On Windows Python 3.8, 3.9, and 3.10, `Pathlib.resolve` does
6
+ not return an absolute path for non-existant paths, when it should.
7
+
8
+ See: https://github.com/python/cpython/issues/82852
9
+
10
+ """
11
+ # TODO: this only seems to be used in a test; remove?
12
+ return Path.cwd() / Path(path).resolve() # cwd is ignored if already absolute
@@ -0,0 +1,33 @@
1
+ from typing import Iterable
2
+
3
+
4
+ def shorten_list_str(
5
+ lst: Iterable, items: int = 10, end_num: int = 1, placeholder: str = "..."
6
+ ) -> str:
7
+ """Format a list as a string, including only some maximum number of items.
8
+
9
+ Parameters
10
+ ----------
11
+ lst:
12
+ The list to format in a shortened form.
13
+ items:
14
+ The total number of items to include in the formatted list.
15
+ end_num:
16
+ The number of items to include at the end of the formatted list.
17
+ placeholder
18
+ The placeholder to use to replace excess items in the formatted list.
19
+
20
+ Examples
21
+ --------
22
+ >>> shorten_list_str(list(range(20)), items=5)
23
+ '[0, 1, 2, 3, ..., 19]'
24
+
25
+ """
26
+ lst = list(lst)
27
+ if len(lst) <= items + 1: # (don't replace only one item)
28
+ lst_short = lst
29
+ else:
30
+ start_num = items - end_num
31
+ lst_short = lst[:start_num] + ["..."] + lst[-end_num:]
32
+
33
+ return "[" + ", ".join(f"{i}" for i in lst_short) + "]"
@@ -0,0 +1,32 @@
1
+ import pytest
2
+ from hpcflow.sdk.core.utils import get_file_context
3
+ from hpcflow.app import app as hf
4
+
5
+
6
+ @pytest.mark.integration
7
+ def test_api_make_and_submit_workflow(null_config, tmp_path):
8
+ with get_file_context("hpcflow.tests.data", "workflow_1.yaml") as file_path:
9
+ wk = hf.make_and_submit_workflow(
10
+ file_path,
11
+ path=tmp_path,
12
+ status=False,
13
+ add_to_known=False,
14
+ wait=True,
15
+ )
16
+ p2 = wk.tasks[0].elements[0].outputs.p2
17
+ assert isinstance(p2, hf.ElementParameter)
18
+ assert p2.value == "201"
19
+
20
+
21
+ @pytest.mark.integration
22
+ def test_api_make_and_submit_demo_workflow(null_config, tmp_path):
23
+ wk = hf.make_and_submit_demo_workflow(
24
+ "workflow_1",
25
+ path=tmp_path,
26
+ status=False,
27
+ add_to_known=False,
28
+ wait=True,
29
+ )
30
+ p2 = wk.tasks[0].elements[0].outputs.p2
31
+ assert isinstance(p2, hf.ElementParameter)
32
+ assert p2.value == "201"
hpcflow/tests/conftest.py CHANGED
@@ -29,6 +29,13 @@ def pytest_addoption(parser: pytest.Parser):
29
29
  default=False,
30
30
  help="run integration-like workflow submission tests",
31
31
  )
32
+ parser.addoption(
33
+ "--repeat",
34
+ action="store",
35
+ default=1,
36
+ type=int,
37
+ help="number of times to repeat each test",
38
+ )
32
39
 
33
40
 
34
41
  def pytest_configure(config: pytest.Config):
@@ -102,3 +109,15 @@ def new_null_config(tmp_path: Path):
102
109
  hf.load_config(config_dir=tmp_path, warn=False)
103
110
  hf.load_template_components(warn=False)
104
111
  hf.run_time_info.in_pytest = True
112
+
113
+
114
+ @pytest.fixture
115
+ def unload_config():
116
+ hf.unload_config()
117
+
118
+
119
+ def pytest_generate_tests(metafunc):
120
+ repeats_num = int(metafunc.config.getoption("--repeat"))
121
+ if repeats_num > 1:
122
+ metafunc.fixturenames.append("tmp_ct")
123
+ metafunc.parametrize("tmp_ct", range(repeats_num))
@@ -0,0 +1,26 @@
1
+ doc: |
2
+ A workflow for benchmarking the overhead introduced by hpcflow in running a Python
3
+ script `N` times.
4
+
5
+ template_components:
6
+ task_schemas:
7
+ - objective: run_script
8
+ inputs:
9
+ - parameter: p1
10
+ outputs:
11
+ - parameter: p2
12
+ actions:
13
+ - environments:
14
+ - scope:
15
+ type: any
16
+ environment: python_env
17
+ script: <<script:main_script_test_direct_in_direct_out.py>>
18
+ script_exe: python_script
19
+ script_data_in: direct
20
+ script_data_out: direct
21
+
22
+ tasks:
23
+ - schema: run_script
24
+ inputs:
25
+ p1: 101
26
+ repeats: <<var:N[default=1]>>
@@ -0,0 +1,29 @@
1
+ template_components:
2
+ task_schemas:
3
+ - objective: add_p1_components
4
+ inputs:
5
+ - parameter: p1
6
+ outputs:
7
+ - parameter: p2
8
+ actions:
9
+ - rules:
10
+ - path: resources.os_name
11
+ condition: { value.equal_to: posix }
12
+ commands:
13
+ - command: echo "$((<<parameter:p1.a>> + <<parameter:p1.b>>))"
14
+ stdout: <<int(parameter:p2)>>
15
+ - rules:
16
+ - path: resources.os_name
17
+ condition: { value.equal_to: nt }
18
+ commands:
19
+ - command: Write-Output ((<<parameter:p1.a>> + <<parameter:p1.b>>))
20
+ stdout: <<int(parameter:p2)>>
21
+ tasks:
22
+ - schema: add_p1_components
23
+ inputs:
24
+ p1: {}
25
+ multi_path_sequences:
26
+ - paths: [inputs.p1.a, inputs.p1.b]
27
+ values:
28
+ - [101, 102]
29
+ - [201, 202]
@@ -2,42 +2,41 @@ doc: |
2
2
  A workflow to sleep a specified number of seconds, and then set an output parameter
3
3
  `is_finished` to the string "true". This is used for testing the run-abort
4
4
  functionality.
5
- parameters:
6
- - type: sleep_time_seconds
7
- - type: is_finished
8
- task_schemas:
9
- - objective: sleep
10
- inputs:
11
- - parameter: sleep_time_seconds
12
- outputs:
13
- - parameter: is_finished
14
- actions:
15
- - abortable: true
16
- rules:
17
- - path: resources.os_name
18
- condition: { value.equal_to: posix }
19
- commands:
20
- - command: sleep <<parameter:sleep_time_seconds>>
21
- - abortable: true
22
- rules:
23
- - path: resources.os_name
24
- condition: { value.equal_to: nt }
25
- commands:
26
- - command: Start-Sleep <<parameter:sleep_time_seconds>>
27
- - rules:
28
- - path: resources.os_name
29
- condition: { value.equal_to: posix }
30
- commands:
31
- - command: echo "true"
32
- stdout: <<parameter:is_finished>>
33
- - rules:
34
- - path: resources.os_name
35
- condition: { value.equal_to: nt }
36
- commands:
37
- - command: Write-Output "true"
38
- stdout: <<parameter:is_finished>>
5
+
6
+ template_components:
7
+ task_schemas:
8
+ - objective: sleep
9
+ inputs:
10
+ - parameter: sleep_time_seconds
11
+ outputs:
12
+ - parameter: is_finished
13
+ actions:
14
+ - abortable: true
15
+ rules:
16
+ - path: resources.os_name
17
+ condition: { value.equal_to: posix }
18
+ commands:
19
+ - command: sleep <<parameter:sleep_time_seconds>>
20
+ - abortable: true
21
+ rules:
22
+ - path: resources.os_name
23
+ condition: { value.equal_to: nt }
24
+ commands:
25
+ - command: Start-Sleep <<parameter:sleep_time_seconds>>
26
+ - rules:
27
+ - path: resources.os_name
28
+ condition: { value.equal_to: posix }
29
+ commands:
30
+ - command: echo "true"
31
+ stdout: <<parameter:is_finished>>
32
+ - rules:
33
+ - path: resources.os_name
34
+ condition: { value.equal_to: nt }
35
+ commands:
36
+ - command: Write-Output "true"
37
+ stdout: <<parameter:is_finished>>
39
38
 
40
39
  tasks:
41
40
  - schema: sleep
42
41
  inputs:
43
- sleep_time_seconds: 3600 # 1 hour
42
+ sleep_time_seconds: 300 # 5 minutes
@@ -0,0 +1,36 @@
1
+ import pytest
2
+ from hpcflow.app import app as hf
3
+
4
+
5
+ def test_SGE_process_resources_multi_core_with_parallel_env(null_config):
6
+
7
+ scheduler_config = {
8
+ "parallel_environments": {
9
+ None: {"num_cores": [1, 1, 1]}, # [start, step, stop]
10
+ "my_parallel_env": {"num_cores": [2, 1, 32]},
11
+ }
12
+ }
13
+
14
+ scheduler = hf.SGEPosix()
15
+ resources = hf.ElementResources(num_cores=2, SGE_parallel_env="my_parallel_env")
16
+
17
+ scheduler.process_resources(resources, scheduler_config)
18
+
19
+ assert resources.num_cores == 2
20
+ assert resources.SGE_parallel_env == "my_parallel_env"
21
+
22
+
23
+ def test_SGE_process_resources_raises_on_single_core_with_parallel_env(null_config):
24
+
25
+ scheduler_config = {
26
+ "parallel_environments": {
27
+ None: {"num_cores": [1, 1, 1]}, # [start, step, stop]
28
+ "my_parallel_env": {"num_cores": [2, 1, 32]},
29
+ }
30
+ }
31
+
32
+ scheduler = hf.SGEPosix()
33
+ resources = hf.ElementResources(num_cores=1, SGE_parallel_env="my_parallel_env")
34
+
35
+ with pytest.raises(ValueError):
36
+ scheduler.process_resources(resources, scheduler_config)