hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
  2. hpcflow/_version.py +1 -1
  3. hpcflow/app.py +1 -0
  4. hpcflow/data/scripts/bad_script.py +2 -0
  5. hpcflow/data/scripts/do_nothing.py +2 -0
  6. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  7. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  8. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  9. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  10. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  11. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  12. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  13. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  14. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  15. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  16. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  17. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  18. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  19. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  20. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  21. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  22. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  23. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
  24. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  25. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
  26. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  27. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  28. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  29. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  30. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  31. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  32. hpcflow/data/scripts/script_exit_test.py +5 -0
  33. hpcflow/data/template_components/environments.yaml +1 -1
  34. hpcflow/sdk/__init__.py +26 -15
  35. hpcflow/sdk/app.py +2192 -768
  36. hpcflow/sdk/cli.py +506 -296
  37. hpcflow/sdk/cli_common.py +105 -7
  38. hpcflow/sdk/config/__init__.py +1 -1
  39. hpcflow/sdk/config/callbacks.py +115 -43
  40. hpcflow/sdk/config/cli.py +126 -103
  41. hpcflow/sdk/config/config.py +674 -318
  42. hpcflow/sdk/config/config_file.py +131 -95
  43. hpcflow/sdk/config/errors.py +125 -84
  44. hpcflow/sdk/config/types.py +148 -0
  45. hpcflow/sdk/core/__init__.py +25 -1
  46. hpcflow/sdk/core/actions.py +1771 -1059
  47. hpcflow/sdk/core/app_aware.py +24 -0
  48. hpcflow/sdk/core/cache.py +139 -79
  49. hpcflow/sdk/core/command_files.py +263 -287
  50. hpcflow/sdk/core/commands.py +145 -112
  51. hpcflow/sdk/core/element.py +828 -535
  52. hpcflow/sdk/core/enums.py +192 -0
  53. hpcflow/sdk/core/environment.py +74 -93
  54. hpcflow/sdk/core/errors.py +455 -52
  55. hpcflow/sdk/core/execute.py +207 -0
  56. hpcflow/sdk/core/json_like.py +540 -272
  57. hpcflow/sdk/core/loop.py +751 -347
  58. hpcflow/sdk/core/loop_cache.py +164 -47
  59. hpcflow/sdk/core/object_list.py +370 -207
  60. hpcflow/sdk/core/parameters.py +1100 -627
  61. hpcflow/sdk/core/rule.py +59 -41
  62. hpcflow/sdk/core/run_dir_files.py +21 -37
  63. hpcflow/sdk/core/skip_reason.py +7 -0
  64. hpcflow/sdk/core/task.py +1649 -1339
  65. hpcflow/sdk/core/task_schema.py +308 -196
  66. hpcflow/sdk/core/test_utils.py +191 -114
  67. hpcflow/sdk/core/types.py +440 -0
  68. hpcflow/sdk/core/utils.py +485 -309
  69. hpcflow/sdk/core/validation.py +82 -9
  70. hpcflow/sdk/core/workflow.py +2544 -1178
  71. hpcflow/sdk/core/zarr_io.py +98 -137
  72. hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
  73. hpcflow/sdk/demo/cli.py +53 -33
  74. hpcflow/sdk/helper/cli.py +18 -15
  75. hpcflow/sdk/helper/helper.py +75 -63
  76. hpcflow/sdk/helper/watcher.py +61 -28
  77. hpcflow/sdk/log.py +122 -71
  78. hpcflow/sdk/persistence/__init__.py +8 -31
  79. hpcflow/sdk/persistence/base.py +1360 -606
  80. hpcflow/sdk/persistence/defaults.py +6 -0
  81. hpcflow/sdk/persistence/discovery.py +38 -0
  82. hpcflow/sdk/persistence/json.py +568 -188
  83. hpcflow/sdk/persistence/pending.py +382 -179
  84. hpcflow/sdk/persistence/store_resource.py +39 -23
  85. hpcflow/sdk/persistence/types.py +318 -0
  86. hpcflow/sdk/persistence/utils.py +14 -11
  87. hpcflow/sdk/persistence/zarr.py +1337 -433
  88. hpcflow/sdk/runtime.py +44 -41
  89. hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
  90. hpcflow/sdk/submission/jobscript.py +1651 -692
  91. hpcflow/sdk/submission/schedulers/__init__.py +167 -39
  92. hpcflow/sdk/submission/schedulers/direct.py +121 -81
  93. hpcflow/sdk/submission/schedulers/sge.py +170 -129
  94. hpcflow/sdk/submission/schedulers/slurm.py +291 -268
  95. hpcflow/sdk/submission/schedulers/utils.py +12 -2
  96. hpcflow/sdk/submission/shells/__init__.py +14 -15
  97. hpcflow/sdk/submission/shells/base.py +150 -29
  98. hpcflow/sdk/submission/shells/bash.py +283 -173
  99. hpcflow/sdk/submission/shells/os_version.py +31 -30
  100. hpcflow/sdk/submission/shells/powershell.py +228 -170
  101. hpcflow/sdk/submission/submission.py +1014 -335
  102. hpcflow/sdk/submission/types.py +140 -0
  103. hpcflow/sdk/typing.py +182 -12
  104. hpcflow/sdk/utils/arrays.py +71 -0
  105. hpcflow/sdk/utils/deferred_file.py +55 -0
  106. hpcflow/sdk/utils/hashing.py +16 -0
  107. hpcflow/sdk/utils/patches.py +12 -0
  108. hpcflow/sdk/utils/strings.py +33 -0
  109. hpcflow/tests/api/test_api.py +32 -0
  110. hpcflow/tests/conftest.py +27 -6
  111. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  112. hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
  113. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  114. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
  115. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  116. hpcflow/tests/scripts/test_main_scripts.py +866 -85
  117. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  118. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  119. hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
  120. hpcflow/tests/unit/test_action.py +262 -75
  121. hpcflow/tests/unit/test_action_rule.py +9 -4
  122. hpcflow/tests/unit/test_app.py +33 -6
  123. hpcflow/tests/unit/test_cache.py +46 -0
  124. hpcflow/tests/unit/test_cli.py +134 -1
  125. hpcflow/tests/unit/test_command.py +71 -54
  126. hpcflow/tests/unit/test_config.py +142 -16
  127. hpcflow/tests/unit/test_config_file.py +21 -18
  128. hpcflow/tests/unit/test_element.py +58 -62
  129. hpcflow/tests/unit/test_element_iteration.py +50 -1
  130. hpcflow/tests/unit/test_element_set.py +29 -19
  131. hpcflow/tests/unit/test_group.py +4 -2
  132. hpcflow/tests/unit/test_input_source.py +116 -93
  133. hpcflow/tests/unit/test_input_value.py +29 -24
  134. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  135. hpcflow/tests/unit/test_json_like.py +44 -35
  136. hpcflow/tests/unit/test_loop.py +1396 -84
  137. hpcflow/tests/unit/test_meta_task.py +325 -0
  138. hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
  139. hpcflow/tests/unit/test_object_list.py +17 -12
  140. hpcflow/tests/unit/test_parameter.py +29 -7
  141. hpcflow/tests/unit/test_persistence.py +237 -42
  142. hpcflow/tests/unit/test_resources.py +20 -18
  143. hpcflow/tests/unit/test_run.py +117 -6
  144. hpcflow/tests/unit/test_run_directories.py +29 -0
  145. hpcflow/tests/unit/test_runtime.py +2 -1
  146. hpcflow/tests/unit/test_schema_input.py +23 -15
  147. hpcflow/tests/unit/test_shell.py +23 -2
  148. hpcflow/tests/unit/test_slurm.py +8 -7
  149. hpcflow/tests/unit/test_submission.py +38 -89
  150. hpcflow/tests/unit/test_task.py +352 -247
  151. hpcflow/tests/unit/test_task_schema.py +33 -20
  152. hpcflow/tests/unit/test_utils.py +9 -11
  153. hpcflow/tests/unit/test_value_sequence.py +15 -12
  154. hpcflow/tests/unit/test_workflow.py +114 -83
  155. hpcflow/tests/unit/test_workflow_template.py +0 -1
  156. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  157. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  158. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  159. hpcflow/tests/unit/utils/test_patches.py +5 -0
  160. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  161. hpcflow/tests/workflows/__init__.py +0 -0
  162. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  163. hpcflow/tests/workflows/test_jobscript.py +334 -1
  164. hpcflow/tests/workflows/test_run_status.py +198 -0
  165. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  166. hpcflow/tests/workflows/test_submission.py +140 -0
  167. hpcflow/tests/workflows/test_workflows.py +160 -15
  168. hpcflow/tests/workflows/test_zip.py +18 -0
  169. hpcflow/viz_demo.ipynb +6587 -3
  170. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
  171. hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
  172. hpcflow/sdk/core/parallel.py +0 -21
  173. hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
  174. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
  175. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
  176. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,140 @@
1
+ """
2
+ Types for the submission subsystem.
3
+ """
4
+
5
+ from __future__ import annotations
6
+ from typing import Any, TYPE_CHECKING
7
+ from typing_extensions import NotRequired, TypeAlias, TypedDict
8
+
9
+ if TYPE_CHECKING:
10
+ from collections.abc import Sequence
11
+ from datetime import datetime
12
+ from numpy.typing import NDArray
13
+ from ..core.element import ElementResources
14
+
15
+
16
+ class JobScriptDescriptor(TypedDict):
17
+ """
18
+ Descriptor for a jobscript.
19
+ """
20
+
21
+ #: Resources required by the jobscript.
22
+ resources: Any
23
+ #: Elements handled by the jobscript.
24
+ elements: dict[int, list[int]]
25
+ #: Dependencies of the jobscript.
26
+ dependencies: NotRequired[dict[int, ResolvedJobscriptBlockDependencies]]
27
+ #: Hash of resources.
28
+ resource_hash: NotRequired[str]
29
+
30
+
31
+ class ResolvedJobscriptBlockDependencies(TypedDict):
32
+ """
33
+ The resolution of a jobscript block dependency. This represents the dependency of one
34
+ jobscript block on another.
35
+ """
36
+
37
+ #: Mapping of jobscript elements.
38
+ js_element_mapping: dict[int, list[int]]
39
+ #: Whether this is an array mapping.
40
+ is_array: NotRequired[bool]
41
+
42
+
43
+ class JobScriptCreationArguments(TypedDict):
44
+ """
45
+ Arguments to pass to create a :class:`Jobscript`.
46
+ """
47
+
48
+ # TODO: this currently represents a mix of arguments for both jobscripts and jobscript
49
+ # blocks; need to separate
50
+
51
+ #: The task insertion IDs.
52
+ task_insert_IDs: list[int]
53
+ #: The actions of the tasks.
54
+ task_actions: list[tuple[int, int, int]]
55
+ #: The elements of the tasks.
56
+ task_elements: dict[int, list[int]]
57
+ #: Element action run information.
58
+ EAR_ID: NDArray
59
+ #: Resources to use.
60
+ resources: NotRequired[ElementResources]
61
+ #: Description of what loops are in play.
62
+ task_loop_idx: list[dict[str, int]]
63
+ #: Description of dependencies.
64
+ dependencies: dict[int | tuple[int, int], ResolvedJobscriptBlockDependencies]
65
+ #: Whether this is an array jobscript.
66
+ is_array: NotRequired[bool]
67
+ #: When the jobscript was submitted, if known.
68
+ submit_time: NotRequired[datetime]
69
+ #: Where the jobscript was submitted, if known.
70
+ submit_hostname: NotRequired[str]
71
+ #: Description of what the jobscript was submitted to, if known.
72
+ submit_machine: NotRequired[str]
73
+ #: The command line used to do the commit, if known.
74
+ submit_cmdline: NotRequired[list[str]]
75
+ #: The job ID from the scheduler, if known.
76
+ scheduler_job_ID: NotRequired[str]
77
+ #: The process ID of the subprocess, if known.
78
+ process_ID: NotRequired[int]
79
+ #: Version info about the target system.
80
+ version_info: NotRequired[dict[str, str | list[str]]]
81
+ #: The name of the OS.
82
+ os_name: NotRequired[str]
83
+ #: The name of the shell.
84
+ shell_name: NotRequired[str]
85
+ #: The scheduler used.
86
+ scheduler_name: NotRequired[str]
87
+ #: Whether the jobscript is currently running.
88
+ running: NotRequired[bool]
89
+ #: Do not supply!
90
+ resource_hash: NotRequired[str]
91
+ #: Do not supply!
92
+ elements: NotRequired[dict[int, list[int]]]
93
+
94
+
95
+ class SchedulerRef(TypedDict):
96
+ """
97
+ Scheduler reference descriptor.
98
+ """
99
+
100
+ #: Jobscript references.
101
+ js_refs: list # Internal type is horrible and variable
102
+ #: Number of jobscript elements.
103
+ num_js_elements: int
104
+
105
+
106
+ class SubmissionPart(TypedDict):
107
+ """
108
+ A part of a submission.
109
+ """
110
+
111
+ #: Timestamp for when this part was submitted.
112
+ submit_time: datetime
113
+ #: The jobscripts involved in this submission.
114
+ jobscripts: list[int]
115
+
116
+
117
+ # This needs PEP 728 for a better type, alas
118
+ #: Version data.
119
+ VersionInfo: TypeAlias = "dict[str, str | list[str]]"
120
+
121
+
122
+ # TODO: This really doesn't belong here?!
123
+ class JobscriptHeaderArgs(TypedDict):
124
+ """
125
+ Keyword arguments to use when creating a job script from a
126
+ :class:`Jobscript`.
127
+ """
128
+
129
+ #: Application invocation. (Arguments, etc.)
130
+ app_invoc: str | Sequence[str]
131
+ #: Workflow application alias.
132
+ workflow_app_alias: NotRequired[str]
133
+ #: Environment setup.
134
+ env_setup: NotRequired[str]
135
+ #: Application name in CAPS
136
+ app_caps: NotRequired[str]
137
+ #: Configuration directory.
138
+ config_dir: NotRequired[str]
139
+ #: Configuration key.
140
+ config_invoc_key: NotRequired[Any]
hpcflow/sdk/typing.py CHANGED
@@ -1,18 +1,188 @@
1
1
  """
2
2
  Common type aliases.
3
3
  """
4
- from typing import Tuple, TypeVar
4
+
5
+ from __future__ import annotations
6
+ from dataclasses import InitVar
7
+ from typing import Any, ClassVar, Final, TypeVar, cast, TYPE_CHECKING
8
+ from typing_extensions import NotRequired, TypeAlias, TypedDict
5
9
  from pathlib import Path
10
+ import re
11
+
12
+ if TYPE_CHECKING:
13
+ from collections.abc import Mapping
14
+ from datetime import datetime
15
+ from rich.status import Status
16
+ from .core.object_list import (
17
+ CommandFilesList,
18
+ EnvironmentsList,
19
+ ParametersList,
20
+ TaskSchemasList,
21
+ )
22
+ from .submission.enums import JobscriptElementState
23
+ from .submission.submission import Submission
24
+
6
25
 
7
26
  #: Type of a value that can be treated as a path.
8
- PathLike = TypeVar("PathLike", str, Path, None) # TODO: maybe don't need TypeVar?
9
-
10
- #: Type of an element index:
11
- #: (task_insert_ID, element_idx)
12
- E_idx_type = Tuple[int, int]
13
- #: Type of an element iteration index:
14
- #: (task_insert_ID, element_idx, iteration_idx)
15
- EI_idx_type = Tuple[int, int, int]
16
- #: Type of an element action run index:
17
- #: (task_insert_ID, element_idx, iteration_idx, action_idx, run_idx)
18
- EAR_idx_type = Tuple[int, int, int, int, int]
27
+ PathLike: TypeAlias = "str | Path | None"
28
+
29
+
30
+ class ParamSource(TypedDict):
31
+ """
32
+ A parameter source descriptor.
33
+ """
34
+
35
+ #: Parameter type name.
36
+ type: NotRequired[str]
37
+ #: EAR ID.
38
+ EAR_ID: NotRequired[int]
39
+ #: Task insertion ID.
40
+ task_insert_ID: NotRequired[int]
41
+ #: Action index.
42
+ action_idx: NotRequired[int]
43
+ #: Element index.
44
+ element_idx: NotRequired[int]
45
+ #: Element set index.
46
+ element_set_idx: NotRequired[int]
47
+ #: Element action run index.
48
+ run_idx: NotRequired[int]
49
+ #: Sequence index.
50
+ sequence_idx: NotRequired[int]
51
+ #: Task index.
52
+ task_idx: NotRequired[int]
53
+ #: Name of method used to create the parameter's value(s).
54
+ value_class_method: NotRequired[str]
55
+
56
+
57
+ class KnownSubmission(TypedDict):
58
+ """
59
+ Describes a known submission.
60
+ """
61
+
62
+ #: Local ID.
63
+ local_id: int
64
+ #: Workflow global ID.
65
+ workflow_id: str
66
+ #: Whether the submission is active.
67
+ is_active: bool
68
+ #: Submission index.
69
+ sub_idx: int
70
+ #: Submission time.
71
+ submit_time: str
72
+ #: Path to submission.
73
+ path: str
74
+ #: Start time.
75
+ start_time: str
76
+ #: Finish time.
77
+ end_time: str
78
+
79
+
80
+ class KnownSubmissionItem(TypedDict):
81
+ """
82
+ Describes a known submission.
83
+ """
84
+
85
+ #: Local ID.
86
+ local_id: int
87
+ #: Workflow global ID.
88
+ workflow_id: str
89
+ #: Path to the workflow.
90
+ workflow_path: str
91
+ #: Time of submission.
92
+ submit_time: str
93
+ #: Parsed time of submission.
94
+ submit_time_obj: NotRequired[datetime | None]
95
+ #: Time of start.
96
+ start_time: str
97
+ #: Parsed time of start.
98
+ start_time_obj: datetime | None
99
+ #: Time of finish.
100
+ end_time: str
101
+ #: Parsed time of finish.
102
+ end_time_obj: datetime | None
103
+ #: Submission index.
104
+ sub_idx: int
105
+ #: Jobscripts in submission.
106
+ jobscripts: list[int]
107
+ #: Active jobscript state.
108
+ active_jobscripts: Mapping[int, Mapping[int, Mapping[int, JobscriptElementState]]]
109
+ #: Whether this is deleted.
110
+ deleted: bool
111
+ #: Whether this is unloadable.
112
+ unloadable: bool
113
+ #: Expanded submission object.
114
+ submission: NotRequired[Submission]
115
+
116
+
117
+ class TemplateComponents(TypedDict):
118
+ """
119
+ Components loaded from templates.
120
+ """
121
+
122
+ #: Parameters loaded from templates.
123
+ parameters: NotRequired[ParametersList]
124
+ #: Command files loaded from templates.
125
+ command_files: NotRequired[CommandFilesList]
126
+ #: Execution environments loaded from templates.
127
+ environments: NotRequired[EnvironmentsList]
128
+ #: Task schemas loaded from templates.
129
+ task_schemas: NotRequired[TaskSchemasList]
130
+ #: Scripts discovered by templates.
131
+ scripts: NotRequired[dict[str, Path]]
132
+
133
+
134
+ class MakeWorkflowCommonArgs(TypedDict):
135
+ """
136
+ Common keys used in workflow construction in :py:meth:`BaseApp._make_workflow`.
137
+ """
138
+
139
+ path: str | None
140
+ name: str | None
141
+ overwrite: bool
142
+ store: str
143
+ ts_fmt: str | None
144
+ ts_name_fmt: str | None
145
+ store_kwargs: dict[str, Any] | None
146
+ variables: dict[str, Any] | None
147
+ status: Status | None
148
+
149
+
150
+ #: Simplification of :class:`TemplateComponents` to allow some types of
151
+ #: internal manipulations.
152
+ BasicTemplateComponents: TypeAlias = "dict[str, list[dict]]"
153
+
154
+
155
+ DataIndex: TypeAlias = "dict[str, int | list[int]]"
156
+ """
157
+ The type of indices to data. These are *normally* dictionaries of integers,
158
+ but can have leaves being lists of integers when dealing with element groups
159
+ (i.e., when a downstream element uses outputs from multiple upstream elements,
160
+ rather than just a single upstream element).
161
+ """
162
+
163
+
164
+ _T = TypeVar("_T")
165
+
166
+ _CLASS_VAR_RE: Final = re.compile(r"ClassVar\[(.*)\]")
167
+ _INIT_VAR_RE: Final = re.compile(r"InitVar\[(.*)\]")
168
+
169
+
170
+ def hydrate(cls: type[_T]) -> type[_T]:
171
+ """
172
+ Partially hydrates the annotations on fields in a class, so that a @dataclass
173
+ annotation can recognise that ClassVar-annotated fields are class variables.
174
+ """
175
+ anns: dict[str, Any] = {}
176
+ for f, a in cls.__annotations__.items():
177
+ if isinstance(a, str):
178
+ m = _CLASS_VAR_RE.match(a)
179
+ if m:
180
+ anns[f] = cast(Any, ClassVar[m[1]])
181
+ continue
182
+ m = _INIT_VAR_RE.match(a)
183
+ if m:
184
+ anns[f] = cast(Any, InitVar(cast(type, m[1])))
185
+ continue
186
+ anns[f] = a
187
+ cls.__annotations__ = anns
188
+ return cls
@@ -0,0 +1,71 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, overload
4
+
5
+ if TYPE_CHECKING:
6
+ from numpy.typing import NDArray
7
+
8
+
9
+ @overload
10
+ def get_2D_idx(idx: int, num_cols: int) -> tuple[int, int]:
11
+ ...
12
+
13
+
14
+ @overload
15
+ def get_2D_idx(idx: NDArray, num_cols: int) -> tuple[NDArray, NDArray]:
16
+ ...
17
+
18
+
19
+ def get_2D_idx(idx: int | NDArray, num_cols: int) -> tuple[int | NDArray, int | NDArray]:
20
+ """Convert a 1D index to a 2D index, assuming items are arranged in a row-major
21
+ order."""
22
+ row_idx = idx // num_cols
23
+ col_idx = idx % num_cols
24
+ return (row_idx, col_idx)
25
+
26
+
27
+ def get_1D_idx(
28
+ row_idx: int | NDArray, col_idx: int | NDArray, num_cols: int
29
+ ) -> int | NDArray:
30
+ """Convert a 2D (row, col) index into a 1D index, assuming items are arranged in a
31
+ row-major order."""
32
+ return row_idx * num_cols + col_idx
33
+
34
+
35
+ def split_arr(arr: NDArray, metadata_size: int) -> list[tuple[NDArray, NDArray]]:
36
+ """Split a 1D integer array into a list of tuples, each containing a metadata array
37
+ and a data array, where the size of each (metadata + data) sub-array is specified as
38
+ the integer immediately before each (metadata + data) sub-array.
39
+
40
+ Parameters
41
+ ----------
42
+ arr
43
+ One dimensional integer array to split.
44
+ metadata_size
45
+ How many elements to include in the metadata array. This can be zero.
46
+
47
+ Returns
48
+ -------
49
+ sub_arrs
50
+ List of tuples of integer arrays. The integers that define the sizes of the
51
+ sub-arrays are excluded.
52
+
53
+ Examples
54
+ --------
55
+ >>> split_arr(np.array([4, 0, 1, 2, 3, 4, 1, 4, 5, 6]), metadata_size=1)
56
+ [(array([0]), array([1, 2, 3])), (array([1]), array([4, 5, 6]))]
57
+
58
+ """
59
+ count = 0
60
+ block_start = 0
61
+ sub_arrs = []
62
+ while count < len(arr):
63
+ size = arr[block_start]
64
+ start = block_start + 1
65
+ end = start + size
66
+ metadata_i = arr[start : start + metadata_size]
67
+ sub_arr_i = arr[start + metadata_size : end]
68
+ sub_arrs.append((metadata_i, sub_arr_i))
69
+ count += size + 1
70
+ block_start = end
71
+ return sub_arrs
@@ -0,0 +1,55 @@
1
+ from os import PathLike
2
+ from typing import Literal, Union
3
+
4
+
5
+ class DeferredFileWriter:
6
+ """A class that provides a context manager for deferring writing or appending to a
7
+ file until a write method is called.
8
+
9
+ Attributes
10
+ ----------
11
+ filename
12
+ The file path to open
13
+ mode
14
+ The mode to use.
15
+
16
+ Examples
17
+ --------
18
+ >>> with DeferredFileWrite("new_file.txt", "w") as f:
19
+ ... # file is not yet created
20
+ ... f.write("contents")
21
+ ... # file is now created, but not closed
22
+ ... # file is now closed
23
+
24
+ """
25
+
26
+ def __init__(self, filename: Union[str, PathLike], mode: Literal["w", "a"], **kwargs):
27
+ self.filename = filename
28
+ self.mode = mode
29
+ self.file = None
30
+ self.kwargs = kwargs
31
+ self._is_open = False
32
+
33
+ def _ensure_open(self):
34
+ if not self._is_open:
35
+ self.file = open(self.filename, self.mode, **self.kwargs)
36
+ self._is_open = True
37
+
38
+ def write(self, data):
39
+ self._ensure_open()
40
+ self.file.write(data)
41
+
42
+ def writelines(self, lines):
43
+ self._ensure_open()
44
+ self.file.writelines(lines)
45
+
46
+ def close(self):
47
+ if self._is_open:
48
+ self.file.close()
49
+ self._is_open = False
50
+
51
+ def __enter__(self):
52
+ return self
53
+
54
+ def __exit__(self, exc_type, exc_value, traceback):
55
+ self.close()
@@ -0,0 +1,16 @@
1
+ def get_hash(obj):
2
+ """Return a hash from an arbitrarily nested structure dicts, lists, tuples, and
3
+ sets.
4
+
5
+ Note the resulting hash is not necessarily stable across sessions or machines.
6
+ """
7
+
8
+ if isinstance(obj, (set, tuple, list)):
9
+ return hash(tuple([type(obj)] + [get_hash(i) for i in obj]))
10
+
11
+ elif not isinstance(obj, dict):
12
+ return hash(obj)
13
+
14
+ new_obj = {k: get_hash(obj[k]) for k in obj}
15
+
16
+ return hash(frozenset(sorted(new_obj.items())))
@@ -0,0 +1,12 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def resolve_path(path):
5
+ """On Windows Python 3.8, 3.9, and 3.10, `Pathlib.resolve` does
6
+ not return an absolute path for non-existant paths, when it should.
7
+
8
+ See: https://github.com/python/cpython/issues/82852
9
+
10
+ """
11
+ # TODO: this only seems to be used in a test; remove?
12
+ return Path.cwd() / Path(path).resolve() # cwd is ignored if already absolute
@@ -0,0 +1,33 @@
1
+ from typing import Iterable
2
+
3
+
4
+ def shorten_list_str(
5
+ lst: Iterable, items: int = 10, end_num: int = 1, placeholder: str = "..."
6
+ ) -> str:
7
+ """Format a list as a string, including only some maximum number of items.
8
+
9
+ Parameters
10
+ ----------
11
+ lst:
12
+ The list to format in a shortened form.
13
+ items:
14
+ The total number of items to include in the formatted list.
15
+ end_num:
16
+ The number of items to include at the end of the formatted list.
17
+ placeholder
18
+ The placeholder to use to replace excess items in the formatted list.
19
+
20
+ Examples
21
+ --------
22
+ >>> shorten_list_str(list(range(20)), items=5)
23
+ '[0, 1, 2, 3, ..., 19]'
24
+
25
+ """
26
+ lst = list(lst)
27
+ if len(lst) <= items + 1: # (don't replace only one item)
28
+ lst_short = lst
29
+ else:
30
+ start_num = items - end_num
31
+ lst_short = lst[:start_num] + ["..."] + lst[-end_num:]
32
+
33
+ return "[" + ", ".join(f"{i}" for i in lst_short) + "]"
@@ -0,0 +1,32 @@
1
+ import pytest
2
+ from hpcflow.sdk.core.utils import get_file_context
3
+ from hpcflow.app import app as hf
4
+
5
+
6
+ @pytest.mark.integration
7
+ def test_api_make_and_submit_workflow(null_config, tmp_path):
8
+ with get_file_context("hpcflow.tests.data", "workflow_1.yaml") as file_path:
9
+ wk = hf.make_and_submit_workflow(
10
+ file_path,
11
+ path=tmp_path,
12
+ status=False,
13
+ add_to_known=False,
14
+ wait=True,
15
+ )
16
+ p2 = wk.tasks[0].elements[0].outputs.p2
17
+ assert isinstance(p2, hf.ElementParameter)
18
+ assert p2.value == "201"
19
+
20
+
21
+ @pytest.mark.integration
22
+ def test_api_make_and_submit_demo_workflow(null_config, tmp_path):
23
+ wk = hf.make_and_submit_demo_workflow(
24
+ "workflow_1",
25
+ path=tmp_path,
26
+ status=False,
27
+ add_to_known=False,
28
+ wait=True,
29
+ )
30
+ p2 = wk.tasks[0].elements[0].outputs.p2
31
+ assert isinstance(p2, hf.ElementParameter)
32
+ assert p2.value == "201"
hpcflow/tests/conftest.py CHANGED
@@ -1,8 +1,10 @@
1
+ from __future__ import annotations
2
+ from pathlib import Path
1
3
  import pytest
2
4
  from hpcflow.app import app as hf
3
5
 
4
6
 
5
- def pytest_addoption(parser):
7
+ def pytest_addoption(parser: pytest.Parser):
6
8
  parser.addoption(
7
9
  "--slurm",
8
10
  action="store_true",
@@ -27,9 +29,16 @@ def pytest_addoption(parser):
27
29
  default=False,
28
30
  help="run integration-like workflow submission tests",
29
31
  )
32
+ parser.addoption(
33
+ "--repeat",
34
+ action="store",
35
+ default=1,
36
+ type=int,
37
+ help="number of times to repeat each test",
38
+ )
30
39
 
31
40
 
32
- def pytest_configure(config):
41
+ def pytest_configure(config: pytest.Config):
33
42
  config.addinivalue_line("markers", "slurm: mark test as slurm to run")
34
43
  config.addinivalue_line("markers", "wsl: mark test as wsl to run")
35
44
  config.addinivalue_line(
@@ -42,7 +51,7 @@ def pytest_configure(config):
42
51
  hf.run_time_info.in_pytest = True
43
52
 
44
53
 
45
- def pytest_collection_modifyitems(config, items):
54
+ def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]):
46
55
  if config.getoption("--slurm"):
47
56
  # --slurm given in cli: only run slurm tests
48
57
  for item in items:
@@ -84,19 +93,31 @@ def pytest_collection_modifyitems(config, items):
84
93
  )
85
94
 
86
95
 
87
- def pytest_unconfigure(config):
96
+ def pytest_unconfigure(config: pytest.Config):
88
97
  hf.run_time_info.in_pytest = False
89
98
 
90
99
 
91
100
  @pytest.fixture
92
- def null_config(tmp_path):
101
+ def null_config(tmp_path: Path):
93
102
  if not hf.is_config_loaded:
94
103
  hf.load_config(config_dir=tmp_path)
95
104
  hf.run_time_info.in_pytest = True
96
105
 
97
106
 
98
107
  @pytest.fixture
99
- def new_null_config(tmp_path):
108
+ def new_null_config(tmp_path: Path):
100
109
  hf.load_config(config_dir=tmp_path, warn=False)
101
110
  hf.load_template_components(warn=False)
102
111
  hf.run_time_info.in_pytest = True
112
+
113
+
114
+ @pytest.fixture
115
+ def unload_config():
116
+ hf.unload_config()
117
+
118
+
119
+ def pytest_generate_tests(metafunc):
120
+ repeats_num = int(metafunc.config.getoption("--repeat"))
121
+ if repeats_num > 1:
122
+ metafunc.fixturenames.append("tmp_ct")
123
+ metafunc.parametrize("tmp_ct", range(repeats_num))
@@ -0,0 +1,29 @@
1
+ template_components:
2
+ task_schemas:
3
+ - objective: add_p1_components
4
+ inputs:
5
+ - parameter: p1
6
+ outputs:
7
+ - parameter: p2
8
+ actions:
9
+ - rules:
10
+ - path: resources.os_name
11
+ condition: { value.equal_to: posix }
12
+ commands:
13
+ - command: echo "$((<<parameter:p1.a>> + <<parameter:p1.b>>))"
14
+ stdout: <<int(parameter:p2)>>
15
+ - rules:
16
+ - path: resources.os_name
17
+ condition: { value.equal_to: nt }
18
+ commands:
19
+ - command: Write-Output ((<<parameter:p1.a>> + <<parameter:p1.b>>))
20
+ stdout: <<int(parameter:p2)>>
21
+ tasks:
22
+ - schema: add_p1_components
23
+ inputs:
24
+ p1: {}
25
+ multi_path_sequences:
26
+ - paths: [inputs.p1.a, inputs.p1.b]
27
+ values:
28
+ - [101, 102]
29
+ - [201, 202]