hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -461
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -490
- hpcflow/archive/archive.py +0 -307
- hpcflow/archive/cloud/cloud.py +0 -45
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -427
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -233
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2595
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -322
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -166
- hpcflow/variables.py +0 -543
- hpcflow-0.1.15.dist-info/METADATA +0 -168
- hpcflow-0.1.15.dist-info/RECORD +0 -45
- hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.15.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,538 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utilities for making data to use in testing.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, ClassVar, TYPE_CHECKING
|
|
9
|
+
from hpcflow.app import app as hf
|
|
10
|
+
from hpcflow.sdk.core.parameters import ParameterValue
|
|
11
|
+
from hpcflow.sdk.core.utils import get_file_context
|
|
12
|
+
from hpcflow.sdk.submission.shells import ALL_SHELLS
|
|
13
|
+
from hpcflow.sdk.typing import hydrate
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from collections.abc import Iterable, Mapping, Sequence
|
|
18
|
+
from typing_extensions import TypeAlias, Self
|
|
19
|
+
from h5py import Group as HDF5Group # type: ignore
|
|
20
|
+
from .actions import Action
|
|
21
|
+
from .element import ElementGroup
|
|
22
|
+
from .loop import Loop
|
|
23
|
+
from .parameters import InputSource, Parameter, SchemaInput, InputValue
|
|
24
|
+
from .task import Task
|
|
25
|
+
from .task_schema import TaskSchema
|
|
26
|
+
from .types import Resources
|
|
27
|
+
from .workflow import Workflow, WorkflowTemplate
|
|
28
|
+
from ..app import BaseApp
|
|
29
|
+
from ..typing import PathLike
|
|
30
|
+
# mypy: disable-error-code="no-untyped-def"
|
|
31
|
+
|
|
32
|
+
#: A string or a tuple of strings.
|
|
33
|
+
Strs: TypeAlias = "str | tuple[str, ...]"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def make_schemas(
|
|
37
|
+
*ins_outs: tuple[dict[str, Any], tuple[str, ...]]
|
|
38
|
+
| tuple[dict[str, Any], tuple[str, ...], str]
|
|
39
|
+
| tuple[dict[str, Any], tuple[str, ...], str, dict[str, Any]]
|
|
40
|
+
) -> list[TaskSchema]:
|
|
41
|
+
"""
|
|
42
|
+
Construct a collection of schemas.
|
|
43
|
+
"""
|
|
44
|
+
out: list[TaskSchema] = []
|
|
45
|
+
for idx, info in enumerate(ins_outs):
|
|
46
|
+
act_kwargs: dict[str, Any] = {}
|
|
47
|
+
if len(info) == 2:
|
|
48
|
+
(ins_i, outs_i) = info
|
|
49
|
+
obj = f"t{idx}"
|
|
50
|
+
elif len(info) == 3:
|
|
51
|
+
(ins_i, outs_i, obj) = info
|
|
52
|
+
else:
|
|
53
|
+
(ins_i, outs_i, obj, act_kwargs) = info
|
|
54
|
+
|
|
55
|
+
# distribute outputs over multiple commands' stdout:
|
|
56
|
+
cmds_lst = []
|
|
57
|
+
for out_idx, out_j in enumerate(outs_i):
|
|
58
|
+
cmd = hf.Command(
|
|
59
|
+
command=(
|
|
60
|
+
"echo $(("
|
|
61
|
+
+ " + ".join(f"<<parameter:{i}>> + {100 + out_idx}" for i in ins_i)
|
|
62
|
+
+ "))"
|
|
63
|
+
),
|
|
64
|
+
stdout=f"<<int(parameter:{out_j})>>",
|
|
65
|
+
)
|
|
66
|
+
cmds_lst.append(cmd)
|
|
67
|
+
|
|
68
|
+
if not outs_i:
|
|
69
|
+
# no outputs
|
|
70
|
+
cmds_lst = [
|
|
71
|
+
hf.Command(
|
|
72
|
+
command=(
|
|
73
|
+
"echo $(("
|
|
74
|
+
+ " + ".join(f"<<parameter:{i}>> + 100" for i in ins_i)
|
|
75
|
+
+ "))"
|
|
76
|
+
),
|
|
77
|
+
)
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
act_i = hf.Action(commands=cmds_lst, **act_kwargs)
|
|
81
|
+
out.append(
|
|
82
|
+
hf.TaskSchema(
|
|
83
|
+
objective=obj,
|
|
84
|
+
actions=[act_i],
|
|
85
|
+
inputs=[hf.SchemaInput(k, default_value=v) for k, v in ins_i.items()],
|
|
86
|
+
outputs=[hf.SchemaOutput(hf.Parameter(k)) for k in outs_i],
|
|
87
|
+
)
|
|
88
|
+
)
|
|
89
|
+
return out
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def make_parameters(num: int) -> list[Parameter]:
|
|
93
|
+
"""
|
|
94
|
+
Construct a sequence of parameters.
|
|
95
|
+
"""
|
|
96
|
+
return [hf.Parameter(f"p{i + 1}") for i in range(num)]
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def make_actions(
|
|
100
|
+
ins_outs: list[tuple[Strs, str] | tuple[Strs, str, str]],
|
|
101
|
+
env: str = "env1",
|
|
102
|
+
) -> list[Action]:
|
|
103
|
+
"""
|
|
104
|
+
Construct a collection of actions.
|
|
105
|
+
"""
|
|
106
|
+
act_env = hf.ActionEnvironment(environment=env)
|
|
107
|
+
actions = []
|
|
108
|
+
for ins_outs_i in ins_outs:
|
|
109
|
+
if len(ins_outs_i) == 2:
|
|
110
|
+
ins, out = ins_outs_i
|
|
111
|
+
err: str | None = None
|
|
112
|
+
else:
|
|
113
|
+
ins, out, err = ins_outs_i
|
|
114
|
+
if not isinstance(ins, tuple):
|
|
115
|
+
ins = (ins,)
|
|
116
|
+
cmd_str = "doSomething "
|
|
117
|
+
for i in ins:
|
|
118
|
+
cmd_str += f" <<parameter:{i}>>"
|
|
119
|
+
stdout = f"<<parameter:{out}>>"
|
|
120
|
+
stderr = None
|
|
121
|
+
if err:
|
|
122
|
+
stderr = f"<<parameter:{err}>>"
|
|
123
|
+
act = hf.Action(
|
|
124
|
+
commands=[hf.Command(cmd_str, stdout=stdout, stderr=stderr)],
|
|
125
|
+
environments=[act_env],
|
|
126
|
+
)
|
|
127
|
+
actions.append(act)
|
|
128
|
+
return actions
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def make_tasks(
|
|
132
|
+
schemas_spec: Iterable[
|
|
133
|
+
tuple[dict[str, Any], tuple[str, ...]]
|
|
134
|
+
| tuple[dict[str, Any], tuple[str, ...], str]
|
|
135
|
+
],
|
|
136
|
+
local_inputs: dict[int, Iterable[str]] | None = None,
|
|
137
|
+
local_sequences: (
|
|
138
|
+
dict[int, Iterable[tuple[str, int, int | float | None]]] | None
|
|
139
|
+
) = None,
|
|
140
|
+
local_resources: dict[int, dict[str, dict]] | None = None,
|
|
141
|
+
nesting_orders: dict[int, dict[str, float]] | None = None,
|
|
142
|
+
input_sources: dict[int, dict[str, list[InputSource]]] | None = None,
|
|
143
|
+
groups: dict[int, Iterable[ElementGroup]] | None = None,
|
|
144
|
+
) -> list[Task]:
|
|
145
|
+
"""
|
|
146
|
+
Construct a sequence of tasks.
|
|
147
|
+
"""
|
|
148
|
+
local_inputs = local_inputs or {}
|
|
149
|
+
local_sequences = local_sequences or {}
|
|
150
|
+
local_resources = local_resources or {}
|
|
151
|
+
nesting_orders = nesting_orders or {}
|
|
152
|
+
input_sources = input_sources or {}
|
|
153
|
+
groups = groups or {}
|
|
154
|
+
schemas = make_schemas(*schemas_spec)
|
|
155
|
+
tasks: list[Task] = []
|
|
156
|
+
for s_idx, s in enumerate(schemas):
|
|
157
|
+
inputs = [
|
|
158
|
+
hf.InputValue(hf.Parameter(i), value=int(i[1:]) * 100)
|
|
159
|
+
for i in local_inputs.get(s_idx, ())
|
|
160
|
+
]
|
|
161
|
+
seqs = [
|
|
162
|
+
hf.ValueSequence(
|
|
163
|
+
path=i[0],
|
|
164
|
+
values=[(int(i[0].split(".")[1][1:]) * 100) + j for j in range(i[1])],
|
|
165
|
+
nesting_order=i[2],
|
|
166
|
+
)
|
|
167
|
+
for i in local_sequences.get(s_idx, ())
|
|
168
|
+
]
|
|
169
|
+
res = {k: v for k, v in local_resources.get(s_idx, {}).items()}
|
|
170
|
+
task = hf.Task(
|
|
171
|
+
schema=s,
|
|
172
|
+
inputs=inputs,
|
|
173
|
+
sequences=seqs,
|
|
174
|
+
resources=res,
|
|
175
|
+
nesting_order=nesting_orders.get(s_idx, {}),
|
|
176
|
+
input_sources=input_sources.get(s_idx, None),
|
|
177
|
+
groups=list(groups.get(s_idx, ())),
|
|
178
|
+
)
|
|
179
|
+
tasks.append(task)
|
|
180
|
+
return tasks
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def make_workflow(
|
|
184
|
+
schemas_spec: Iterable[
|
|
185
|
+
tuple[dict[str, Any], tuple[str, ...]]
|
|
186
|
+
| tuple[dict[str, Any], tuple[str, ...], str]
|
|
187
|
+
],
|
|
188
|
+
path: PathLike | None = None,
|
|
189
|
+
local_inputs: dict[int, Iterable[str]] | None = None,
|
|
190
|
+
local_sequences: (
|
|
191
|
+
dict[int, Iterable[tuple[str, int, int | float | None]]] | None
|
|
192
|
+
) = None,
|
|
193
|
+
local_resources: dict[int, dict[str, dict]] | None = None,
|
|
194
|
+
nesting_orders: dict[int, dict[str, float]] | None = None,
|
|
195
|
+
input_sources: dict[int, dict[str, list[InputSource]]] | None = None,
|
|
196
|
+
resources: Resources = None,
|
|
197
|
+
loops: list[Loop] | None = None,
|
|
198
|
+
groups: dict[int, Iterable[ElementGroup]] | None = None,
|
|
199
|
+
name: str = "w1",
|
|
200
|
+
overwrite: bool = False,
|
|
201
|
+
store: str = "zarr",
|
|
202
|
+
) -> Workflow:
|
|
203
|
+
"""
|
|
204
|
+
Construct a workflow.
|
|
205
|
+
"""
|
|
206
|
+
tasks = make_tasks(
|
|
207
|
+
schemas_spec,
|
|
208
|
+
local_inputs=local_inputs,
|
|
209
|
+
local_sequences=local_sequences,
|
|
210
|
+
local_resources=local_resources,
|
|
211
|
+
nesting_orders=nesting_orders,
|
|
212
|
+
input_sources=input_sources,
|
|
213
|
+
groups=groups,
|
|
214
|
+
)
|
|
215
|
+
template: Mapping[str, Any] = {
|
|
216
|
+
"name": name,
|
|
217
|
+
"tasks": tasks,
|
|
218
|
+
"resources": resources,
|
|
219
|
+
**({"loops": loops} if loops else {}),
|
|
220
|
+
}
|
|
221
|
+
wk = hf.Workflow.from_template(
|
|
222
|
+
hf.WorkflowTemplate(**template),
|
|
223
|
+
path=path,
|
|
224
|
+
name=name,
|
|
225
|
+
overwrite=overwrite,
|
|
226
|
+
store=store,
|
|
227
|
+
)
|
|
228
|
+
return wk
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def make_test_data_YAML_workflow(
|
|
232
|
+
workflow_name: str,
|
|
233
|
+
path: PathLike,
|
|
234
|
+
app: BaseApp | None = None,
|
|
235
|
+
pkg: str = "hpcflow.tests.data",
|
|
236
|
+
**kwargs,
|
|
237
|
+
) -> Workflow:
|
|
238
|
+
"""Generate a workflow whose template file is defined in the test data directory."""
|
|
239
|
+
app = app or hf
|
|
240
|
+
with get_file_context(pkg, workflow_name) as file_path:
|
|
241
|
+
return app.Workflow.from_YAML_file(YAML_path=file_path, path=path, **kwargs)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def make_test_data_YAML_workflow_template(
|
|
245
|
+
workflow_name: str,
|
|
246
|
+
app: BaseApp | None = None,
|
|
247
|
+
pkg: str = "hpcflow.tests.data",
|
|
248
|
+
**kwargs,
|
|
249
|
+
) -> WorkflowTemplate:
|
|
250
|
+
"""Generate a workflow template whose file is defined in the test data directory."""
|
|
251
|
+
app = app or hf
|
|
252
|
+
with get_file_context(pkg, workflow_name) as file_path:
|
|
253
|
+
return app.WorkflowTemplate.from_file(path=file_path, **kwargs)
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
@dataclass
|
|
257
|
+
@hydrate
|
|
258
|
+
class P1_sub_parameter_cls(ParameterValue):
|
|
259
|
+
"""
|
|
260
|
+
Parameter value handler: ``p1_sub``
|
|
261
|
+
"""
|
|
262
|
+
|
|
263
|
+
_typ: ClassVar[str] = "p1_sub"
|
|
264
|
+
|
|
265
|
+
e: int = 0
|
|
266
|
+
|
|
267
|
+
def CLI_format(self) -> str:
|
|
268
|
+
return str(self.e)
|
|
269
|
+
|
|
270
|
+
@property
|
|
271
|
+
def twice_e(self):
|
|
272
|
+
return self.e * 2
|
|
273
|
+
|
|
274
|
+
def prepare_JSON_dump(self) -> dict[str, Any]:
|
|
275
|
+
return {"e": self.e}
|
|
276
|
+
|
|
277
|
+
def dump_to_HDF5_group(self, group: HDF5Group):
|
|
278
|
+
group.attrs["e"] = self.e
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
@dataclass
|
|
282
|
+
@hydrate
|
|
283
|
+
class P1_sub_parameter_cls_2(ParameterValue):
|
|
284
|
+
"""
|
|
285
|
+
Parameter value handler: ``p1_sub_2``
|
|
286
|
+
"""
|
|
287
|
+
|
|
288
|
+
_typ: ClassVar[str] = "p1_sub_2"
|
|
289
|
+
|
|
290
|
+
f: int = 0
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
@dataclass
|
|
294
|
+
@hydrate
|
|
295
|
+
class P1_parameter_cls(ParameterValue):
|
|
296
|
+
"""
|
|
297
|
+
Parameter value handler: ``p1c``
|
|
298
|
+
|
|
299
|
+
Note
|
|
300
|
+
----
|
|
301
|
+
This is a composite value handler.
|
|
302
|
+
"""
|
|
303
|
+
|
|
304
|
+
_typ: ClassVar[str] = "p1c"
|
|
305
|
+
_sub_parameters: ClassVar[dict[str, str]] = {
|
|
306
|
+
"sub_param": "p1_sub",
|
|
307
|
+
"sub_param_2": "p1_sub_2",
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
a: int = 0
|
|
311
|
+
d: int | None = None
|
|
312
|
+
sub_param: P1_sub_parameter_cls | None = None
|
|
313
|
+
|
|
314
|
+
def __post_init__(self):
|
|
315
|
+
if self.sub_param is not None and not isinstance(
|
|
316
|
+
self.sub_param, P1_sub_parameter_cls
|
|
317
|
+
):
|
|
318
|
+
self.sub_param = P1_sub_parameter_cls(**self.sub_param)
|
|
319
|
+
|
|
320
|
+
@classmethod
|
|
321
|
+
def from_data(cls, b: int, c: int):
|
|
322
|
+
return cls(a=b + c)
|
|
323
|
+
|
|
324
|
+
@classmethod
|
|
325
|
+
def from_file(cls, path: str):
|
|
326
|
+
with Path(path).open("rt") as fh:
|
|
327
|
+
lns = fh.readlines()
|
|
328
|
+
a = int(lns[0])
|
|
329
|
+
return cls(a=a)
|
|
330
|
+
|
|
331
|
+
@property
|
|
332
|
+
def twice_a(self) -> int:
|
|
333
|
+
return self.a * 2
|
|
334
|
+
|
|
335
|
+
@property
|
|
336
|
+
def sub_param_prop(self) -> P1_sub_parameter_cls:
|
|
337
|
+
return P1_sub_parameter_cls(e=4 * self.a)
|
|
338
|
+
|
|
339
|
+
def CLI_format(self) -> str:
|
|
340
|
+
return str(self.a)
|
|
341
|
+
|
|
342
|
+
@staticmethod
|
|
343
|
+
def CLI_format_group(*objs) -> str:
|
|
344
|
+
return ""
|
|
345
|
+
|
|
346
|
+
@staticmethod
|
|
347
|
+
def sum(*objs, **kwargs) -> str:
|
|
348
|
+
return str(sum(i.a for i in objs))
|
|
349
|
+
|
|
350
|
+
def custom_CLI_format(self, add: str | None = None, sub: str | None = None) -> str:
|
|
351
|
+
add_i = 4 if add is None else int(add)
|
|
352
|
+
sub_i = 0 if sub is None else int(sub)
|
|
353
|
+
return str(self.a + add_i - sub_i)
|
|
354
|
+
|
|
355
|
+
def custom_CLI_format_prep(self, reps: str | None = None) -> list[int]:
|
|
356
|
+
"""Used for testing custom object CLI formatting.
|
|
357
|
+
|
|
358
|
+
For example, with a command like this:
|
|
359
|
+
|
|
360
|
+
`<<join[delim=","](parameter:p1c.custom_CLI_format_prep(reps=4))>>`.
|
|
361
|
+
|
|
362
|
+
"""
|
|
363
|
+
reps_int = 1 if reps is None else int(reps)
|
|
364
|
+
return [self.a] * reps_int
|
|
365
|
+
|
|
366
|
+
@classmethod
|
|
367
|
+
def CLI_parse(cls, a_str: str, double: str = "", e: str | None = None):
|
|
368
|
+
a = int(a_str)
|
|
369
|
+
if double.lower() == "true":
|
|
370
|
+
a *= 2
|
|
371
|
+
if e:
|
|
372
|
+
sub_param = P1_sub_parameter_cls(e=int(e))
|
|
373
|
+
else:
|
|
374
|
+
sub_param = None
|
|
375
|
+
return cls(a=a, sub_param=sub_param)
|
|
376
|
+
|
|
377
|
+
def prepare_JSON_dump(self) -> dict[str, Any]:
|
|
378
|
+
sub_param_js = self.sub_param.prepare_JSON_dump() if self.sub_param else None
|
|
379
|
+
return {"a": self.a, "d": self.d, "sub_param": sub_param_js}
|
|
380
|
+
|
|
381
|
+
def dump_to_HDF5_group(self, group: HDF5Group):
|
|
382
|
+
group.attrs["a"] = self.a
|
|
383
|
+
if self.d is not None:
|
|
384
|
+
group.attrs["d"] = self.d
|
|
385
|
+
if self.sub_param:
|
|
386
|
+
sub_group = group.create_group("sub_param")
|
|
387
|
+
self.sub_param.dump_to_HDF5_group(sub_group)
|
|
388
|
+
|
|
389
|
+
@classmethod
|
|
390
|
+
def dump_element_group_to_HDF5_group(self, objs: list[Self], group: HDF5Group):
|
|
391
|
+
"""
|
|
392
|
+
Write a list (from an element group) of parameter values to an HDF5 group.
|
|
393
|
+
"""
|
|
394
|
+
|
|
395
|
+
for obj_idx, p1_obj in enumerate(objs):
|
|
396
|
+
grp_i = group.create_group(f"{obj_idx}")
|
|
397
|
+
grp_i.attrs["a"] = p1_obj.a
|
|
398
|
+
if p1_obj.d is not None:
|
|
399
|
+
group.attrs["d"] = p1_obj.d
|
|
400
|
+
if p1_obj.sub_param:
|
|
401
|
+
sub_group = grp_i.create_group("sub_param")
|
|
402
|
+
p1_obj.sub_param.dump_to_HDF5_group(sub_group)
|
|
403
|
+
|
|
404
|
+
@classmethod
|
|
405
|
+
def save_from_JSON(cls, data: dict, param_id: int | list[int], workflow: Workflow):
|
|
406
|
+
obj = cls(**data) # TODO: pass sub-param
|
|
407
|
+
workflow.set_parameter_value(param_id=param_id, value=obj, commit=True)
|
|
408
|
+
|
|
409
|
+
@classmethod
|
|
410
|
+
def save_from_HDF5_group(cls, group: HDF5Group, param_id: int, workflow: Workflow):
|
|
411
|
+
a = group.attrs["a"].item()
|
|
412
|
+
if "d" in group.attrs:
|
|
413
|
+
d = group.attrs["d"].item()
|
|
414
|
+
else:
|
|
415
|
+
d = None
|
|
416
|
+
if "sub_param" in group:
|
|
417
|
+
sub_group = group.get("sub_param")
|
|
418
|
+
e = sub_group.attrs["e"].item()
|
|
419
|
+
sub_param = P1_sub_parameter_cls(e=e)
|
|
420
|
+
else:
|
|
421
|
+
sub_param = None
|
|
422
|
+
obj = cls(a=a, d=d, sub_param=sub_param)
|
|
423
|
+
workflow.set_parameter_value(param_id=param_id, value=obj, commit=True)
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def make_workflow_to_run_command(
|
|
427
|
+
command,
|
|
428
|
+
path,
|
|
429
|
+
outputs=None,
|
|
430
|
+
name="w1",
|
|
431
|
+
overwrite=False,
|
|
432
|
+
store="zarr",
|
|
433
|
+
requires_dir=False,
|
|
434
|
+
):
|
|
435
|
+
"""Generate a single-task single-action workflow that runs the specified command,
|
|
436
|
+
optionally generating some outputs."""
|
|
437
|
+
|
|
438
|
+
outputs = outputs or []
|
|
439
|
+
commands = [hf.Command(command=command)]
|
|
440
|
+
commands += [
|
|
441
|
+
hf.Command(command=f'echo "output_{out}"', stdout=f"<<parameter:{out}>>")
|
|
442
|
+
for out in outputs
|
|
443
|
+
]
|
|
444
|
+
schema = hf.TaskSchema(
|
|
445
|
+
objective="run_command",
|
|
446
|
+
outputs=[hf.SchemaOutput(i) for i in outputs],
|
|
447
|
+
actions=[hf.Action(commands=commands, requires_dir=requires_dir)],
|
|
448
|
+
)
|
|
449
|
+
template = {
|
|
450
|
+
"name": name,
|
|
451
|
+
"tasks": [hf.Task(schema=schema)],
|
|
452
|
+
}
|
|
453
|
+
wk = hf.Workflow.from_template(
|
|
454
|
+
hf.WorkflowTemplate(**template),
|
|
455
|
+
path=path,
|
|
456
|
+
name=name,
|
|
457
|
+
overwrite=overwrite,
|
|
458
|
+
store=store,
|
|
459
|
+
)
|
|
460
|
+
return wk
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
def command_line_test(
|
|
464
|
+
cmd_str: str,
|
|
465
|
+
expected: str,
|
|
466
|
+
inputs: dict[str, Any] | list[InputValue],
|
|
467
|
+
path: Path,
|
|
468
|
+
outputs: Sequence[str] | None = None,
|
|
469
|
+
cmd_stdout: str | None = None,
|
|
470
|
+
shell_args: tuple[str, str] | None = None,
|
|
471
|
+
schema_inputs: list[Parameter | SchemaInput] | None = None,
|
|
472
|
+
):
|
|
473
|
+
"""Utility function for testing `Command.get_command_line` in various scenarios, via
|
|
474
|
+
a single-action, single-command workflow.
|
|
475
|
+
|
|
476
|
+
The functions asserts that the generated command line based on `cmd_str` is equal to
|
|
477
|
+
the provided `expected` command line.
|
|
478
|
+
|
|
479
|
+
Parameters
|
|
480
|
+
----------
|
|
481
|
+
cmd_str
|
|
482
|
+
The command string to test.
|
|
483
|
+
expected
|
|
484
|
+
The resolved commandline string that should be generated.
|
|
485
|
+
inputs
|
|
486
|
+
Either a dictionary mapping string input names to values, or a list of
|
|
487
|
+
`InputValue` objects.
|
|
488
|
+
path
|
|
489
|
+
The path to use to create the workflow during the test.
|
|
490
|
+
outputs
|
|
491
|
+
List of string output names.
|
|
492
|
+
cmd_stdout
|
|
493
|
+
The `Command` object's stdout attribute.
|
|
494
|
+
shell_args
|
|
495
|
+
Tuple of shell name and os name, used to select which `Shell` to instantiate.
|
|
496
|
+
schema_inputs
|
|
497
|
+
List of `SchemaInput` objects to use. If not passed, simple schema inputs will be
|
|
498
|
+
generated.
|
|
499
|
+
|
|
500
|
+
"""
|
|
501
|
+
|
|
502
|
+
inputs_ = (
|
|
503
|
+
[hf.InputValue(inp_name, value=inp_val) for inp_name, inp_val in inputs.items()]
|
|
504
|
+
if isinstance(inputs, dict)
|
|
505
|
+
else inputs
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
schema_inputs_ = (
|
|
509
|
+
schema_inputs
|
|
510
|
+
if schema_inputs
|
|
511
|
+
else [hf.SchemaInput(parameter=inp_val.parameter) for inp_val in inputs_]
|
|
512
|
+
)
|
|
513
|
+
|
|
514
|
+
s1 = hf.TaskSchema(
|
|
515
|
+
objective="t1",
|
|
516
|
+
inputs=schema_inputs_,
|
|
517
|
+
outputs=[
|
|
518
|
+
hf.SchemaOutput(parameter=hf.Parameter(out_name))
|
|
519
|
+
for out_name in outputs or ()
|
|
520
|
+
],
|
|
521
|
+
actions=[hf.Action(commands=[hf.Command(command=cmd_str, stdout=cmd_stdout)])],
|
|
522
|
+
)
|
|
523
|
+
wk = hf.Workflow.from_template_data(
|
|
524
|
+
tasks=[hf.Task(schema=s1, inputs=inputs_)],
|
|
525
|
+
path=path,
|
|
526
|
+
template_name="test_get_command_line",
|
|
527
|
+
overwrite=True,
|
|
528
|
+
)
|
|
529
|
+
t1 = wk.tasks.t1
|
|
530
|
+
assert isinstance(t1, hf.WorkflowTask)
|
|
531
|
+
run = t1.elements[0].iterations[0].action_runs[0]
|
|
532
|
+
command = run.action.commands[0]
|
|
533
|
+
shell_args_ = shell_args or ("powershell", "nt")
|
|
534
|
+
shell = ALL_SHELLS[shell_args_[0]][shell_args_[1]]()
|
|
535
|
+
cmd_line, _ = command.get_command_line(
|
|
536
|
+
EAR=run, shell=shell, env=run.get_environment()
|
|
537
|
+
)
|
|
538
|
+
assert cmd_line == expected
|