hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -461
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -490
- hpcflow/archive/archive.py +0 -307
- hpcflow/archive/cloud/cloud.py +0 -45
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -427
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -233
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2595
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -322
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -166
- hpcflow/variables.py +0 -543
- hpcflow-0.1.15.dist-info/METADATA +0 -168
- hpcflow-0.1.15.dist-info/RECORD +0 -45
- hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.15.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import os
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import pytest
|
|
5
|
+
from hpcflow.app import app as hf
|
|
6
|
+
from hpcflow.sdk.core.errors import UnsupportedSchedulerError
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def test_init_scope_equivalence_simple() -> None:
|
|
10
|
+
rs1 = hf.ResourceSpec(scope=hf.ActionScope.any(), num_cores=1)
|
|
11
|
+
rs2 = hf.ResourceSpec(scope="any", num_cores=1)
|
|
12
|
+
assert rs1 == rs2
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def test_init_scope_equivalence_with_kwargs() -> None:
|
|
16
|
+
rs1 = hf.ResourceSpec(
|
|
17
|
+
scope=hf.ActionScope.input_file_generator(file="my_file"), num_cores=1
|
|
18
|
+
)
|
|
19
|
+
rs2 = hf.ResourceSpec(scope="input_file_generator[file=my_file]", num_cores=1)
|
|
20
|
+
assert rs1 == rs2
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def test_init_no_args() -> None:
|
|
24
|
+
rs1 = hf.ResourceSpec()
|
|
25
|
+
rs2 = hf.ResourceSpec(scope="any")
|
|
26
|
+
assert rs1 == rs2
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def test_resource_list_raise_on_identical_scopes() -> None:
|
|
30
|
+
with pytest.raises(ValueError):
|
|
31
|
+
hf.ResourceList.normalise([{"scope": "any"}, {"scope": "any"}])
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def test_merge_other_same_scope() -> None:
|
|
35
|
+
res_lst_1 = hf.ResourceList.from_json_like({"any": {"num_cores": 1}})
|
|
36
|
+
res_lst_2 = hf.ResourceList.from_json_like({"any": {}})
|
|
37
|
+
res_lst_2.merge_other(res_lst_1)
|
|
38
|
+
assert res_lst_2 == hf.ResourceList.from_json_like({"any": {"num_cores": 1}})
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def test_merge_other_same_scope_no_overwrite() -> None:
|
|
42
|
+
res_lst_1 = hf.ResourceList.from_json_like({"any": {"num_cores": 1}})
|
|
43
|
+
res_lst_2 = hf.ResourceList.from_json_like({"any": {"num_cores": 2}})
|
|
44
|
+
res_lst_2.merge_other(res_lst_1)
|
|
45
|
+
assert res_lst_2 == hf.ResourceList.from_json_like({"any": {"num_cores": 2}})
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def test_merge_other_multi_scope() -> None:
|
|
49
|
+
res_lst_1 = hf.ResourceList.from_json_like({"any": {"num_cores": 1}})
|
|
50
|
+
res_lst_2 = hf.ResourceList.from_json_like({"any": {}, "main": {"num_cores": 3}})
|
|
51
|
+
res_lst_2.merge_other(res_lst_1)
|
|
52
|
+
assert res_lst_2 == hf.ResourceList.from_json_like(
|
|
53
|
+
{"any": {"num_cores": 1}, "main": {"num_cores": 3}}
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@pytest.mark.parametrize("store", ["json", "zarr"])
|
|
58
|
+
def test_merge_other_persistent_workflow_reload(null_config, tmp_path: Path, store: str):
|
|
59
|
+
wkt = hf.WorkflowTemplate(
|
|
60
|
+
name="test_load",
|
|
61
|
+
resources={"any": {"num_cores": 2}},
|
|
62
|
+
tasks=[
|
|
63
|
+
hf.Task(
|
|
64
|
+
schema=hf.task_schemas.test_t1_ps,
|
|
65
|
+
inputs={"p1": 101},
|
|
66
|
+
),
|
|
67
|
+
],
|
|
68
|
+
)
|
|
69
|
+
wk = hf.Workflow.from_template(wkt, path=tmp_path, store=store)
|
|
70
|
+
wk = hf.Workflow(wk.path)
|
|
71
|
+
assert wk.template.tasks[0].element_sets[0].resources[0].num_cores == 2
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@pytest.mark.parametrize("store", ["json", "zarr"])
|
|
75
|
+
def test_use_persistent_resource_spec(null_config, tmp_path: Path, store: str):
|
|
76
|
+
# create a workflow from which we can use a resource spec in a new workflow:
|
|
77
|
+
num_cores_check = 2
|
|
78
|
+
wk_base = hf.Workflow.from_template_data(
|
|
79
|
+
template_name="wk_base",
|
|
80
|
+
path=tmp_path,
|
|
81
|
+
store=store,
|
|
82
|
+
tasks=[
|
|
83
|
+
hf.Task(
|
|
84
|
+
schema=[hf.task_schemas.test_t1_ps],
|
|
85
|
+
inputs=[hf.InputValue("p1", 101)],
|
|
86
|
+
resources={"any": {"num_cores": num_cores_check}},
|
|
87
|
+
)
|
|
88
|
+
],
|
|
89
|
+
)
|
|
90
|
+
resource_spec = wk_base.tasks[0].template.element_sets[0].resources[0]
|
|
91
|
+
|
|
92
|
+
wk = hf.Workflow.from_template_data(
|
|
93
|
+
template_name="wk",
|
|
94
|
+
path=tmp_path,
|
|
95
|
+
store=store,
|
|
96
|
+
tasks=[
|
|
97
|
+
hf.Task(
|
|
98
|
+
schema=[hf.task_schemas.test_t1_ps],
|
|
99
|
+
inputs=[hf.InputValue("p1", 101)],
|
|
100
|
+
),
|
|
101
|
+
],
|
|
102
|
+
resources=[resource_spec],
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
assert wk.tasks[0].template.element_sets[0].resources[0].num_cores == num_cores_check
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
@pytest.mark.parametrize("store", ["json", "zarr"])
|
|
109
|
+
def test_use_persistent_resource_list(null_config, tmp_path: Path, store: str):
|
|
110
|
+
# create a workflow from which we can use the resource list in a new workflow:
|
|
111
|
+
num_cores_check = 2
|
|
112
|
+
wk_base = hf.Workflow.from_template_data(
|
|
113
|
+
template_name="wk_base",
|
|
114
|
+
path=tmp_path,
|
|
115
|
+
store=store,
|
|
116
|
+
tasks=[
|
|
117
|
+
hf.Task(
|
|
118
|
+
schema=[hf.task_schemas.test_t1_ps],
|
|
119
|
+
inputs=[hf.InputValue("p1", 101)],
|
|
120
|
+
resources={"any": {"num_cores": num_cores_check}},
|
|
121
|
+
)
|
|
122
|
+
],
|
|
123
|
+
)
|
|
124
|
+
resource_list = wk_base.tasks[0].template.element_sets[0].resources
|
|
125
|
+
|
|
126
|
+
wk = hf.Workflow.from_template_data(
|
|
127
|
+
template_name="wk",
|
|
128
|
+
path=tmp_path,
|
|
129
|
+
store=store,
|
|
130
|
+
tasks=[
|
|
131
|
+
hf.Task(
|
|
132
|
+
schema=[hf.task_schemas.test_t1_ps],
|
|
133
|
+
inputs=[hf.InputValue("p1", 101)],
|
|
134
|
+
),
|
|
135
|
+
],
|
|
136
|
+
resources=resource_list[:], # must pass a list!
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
assert wk.tasks[0].template.element_sets[0].resources[0].num_cores == num_cores_check
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@pytest.mark.parametrize("store", ["json", "zarr"])
|
|
143
|
+
def test_default_scheduler_set(new_null_config, tmp_path: Path, store: str):
|
|
144
|
+
wk = hf.Workflow.from_template_data(
|
|
145
|
+
template_name="wk",
|
|
146
|
+
path=tmp_path,
|
|
147
|
+
store=store,
|
|
148
|
+
tasks=[
|
|
149
|
+
hf.Task(
|
|
150
|
+
schema=[hf.task_schemas.test_t1_bash],
|
|
151
|
+
inputs=[hf.InputValue("p1", 101)],
|
|
152
|
+
),
|
|
153
|
+
],
|
|
154
|
+
)
|
|
155
|
+
wk.add_submission()
|
|
156
|
+
assert wk.submissions[0].jobscripts[0].scheduler_name == hf.config.default_scheduler
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def test_scheduler_case_insensitive(null_config) -> None:
|
|
160
|
+
rs1 = hf.ResourceSpec(scheduler="direct")
|
|
161
|
+
rs2 = hf.ResourceSpec(scheduler="dIrEcT")
|
|
162
|
+
assert rs1 == rs2
|
|
163
|
+
assert rs1.scheduler == rs2.scheduler == "direct"
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def test_scheduler_strip(null_config) -> None:
|
|
167
|
+
rs1 = hf.ResourceSpec(scheduler=" direct ")
|
|
168
|
+
rs2 = hf.ResourceSpec(scheduler="direct")
|
|
169
|
+
assert rs1 == rs2
|
|
170
|
+
assert rs1.scheduler == rs2.scheduler == "direct"
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def test_shell_case_insensitive(null_config) -> None:
|
|
174
|
+
shell_name = "bash" if os.name == "posix" else "powershell"
|
|
175
|
+
shell_name_title = shell_name
|
|
176
|
+
n = shell_name_title[0]
|
|
177
|
+
shell_name_title = shell_name_title.replace(n, n.upper())
|
|
178
|
+
assert shell_name != shell_name_title
|
|
179
|
+
rs1 = hf.ResourceSpec(shell=shell_name)
|
|
180
|
+
rs2 = hf.ResourceSpec(shell=shell_name_title)
|
|
181
|
+
assert rs1 == rs2
|
|
182
|
+
assert rs1.shell == rs2.shell == shell_name
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def test_shell_strip(null_config) -> None:
|
|
186
|
+
shell_name = "bash" if os.name == "posix" else "powershell"
|
|
187
|
+
rs1 = hf.ResourceSpec(shell=f" {shell_name} ")
|
|
188
|
+
rs2 = hf.ResourceSpec(shell=shell_name)
|
|
189
|
+
assert rs1 == rs2
|
|
190
|
+
assert rs1.shell == rs2.shell == shell_name
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def test_os_name_case_insensitive(null_config):
|
|
194
|
+
rs1 = hf.ResourceSpec(os_name="nt")
|
|
195
|
+
rs2 = hf.ResourceSpec(os_name="NT")
|
|
196
|
+
assert rs1 == rs2
|
|
197
|
+
assert rs1.os_name == rs2.os_name == "nt"
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def test_os_name_strip(null_config) -> None:
|
|
201
|
+
rs1 = hf.ResourceSpec(os_name=" nt ")
|
|
202
|
+
rs2 = hf.ResourceSpec(os_name="nt")
|
|
203
|
+
assert rs1 == rs2
|
|
204
|
+
assert rs1.os_name == rs2.os_name == "nt"
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def test_raise_on_unsupported_scheduler(new_null_config, tmp_path: Path):
|
|
208
|
+
# slurm not supported by default config file:
|
|
209
|
+
wk = hf.Workflow.from_template_data(
|
|
210
|
+
template_name="wk1",
|
|
211
|
+
path=tmp_path,
|
|
212
|
+
tasks=[
|
|
213
|
+
hf.Task(
|
|
214
|
+
schema=[hf.task_schemas.test_t1_bash],
|
|
215
|
+
inputs=[hf.InputValue("p1", 101)],
|
|
216
|
+
resources=[hf.ResourceSpec(scheduler="slurm")],
|
|
217
|
+
)
|
|
218
|
+
],
|
|
219
|
+
)
|
|
220
|
+
with pytest.raises(UnsupportedSchedulerError):
|
|
221
|
+
wk.add_submission()
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def test_can_use_non_default_scheduler(new_null_config, tmp_path: Path):
|
|
225
|
+
# for either OS choose a compatible scheduler not set by default:
|
|
226
|
+
if os.name == "nt":
|
|
227
|
+
opt_scheduler = "direct_posix" # i.e for WSL
|
|
228
|
+
else:
|
|
229
|
+
opt_scheduler = "slurm"
|
|
230
|
+
hf.config.add_scheduler(opt_scheduler)
|
|
231
|
+
|
|
232
|
+
wk = hf.Workflow.from_template_data(
|
|
233
|
+
template_name="wk1",
|
|
234
|
+
path=tmp_path,
|
|
235
|
+
tasks=[
|
|
236
|
+
hf.Task(
|
|
237
|
+
schema=[hf.task_schemas.test_t1_bash],
|
|
238
|
+
inputs=[hf.InputValue("p1", 101)],
|
|
239
|
+
resources=[hf.ResourceSpec(scheduler=opt_scheduler)],
|
|
240
|
+
)
|
|
241
|
+
],
|
|
242
|
+
)
|
|
243
|
+
wk.add_submission()
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from hpcflow.app import app as hf
|
|
9
|
+
from hpcflow.sdk.core.actions import SkipReason
|
|
10
|
+
from hpcflow.sdk.core.test_utils import make_workflow_to_run_command
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def test_compose_commands_no_shell_var(null_config, tmp_path: Path):
|
|
14
|
+
ts = hf.TaskSchema(
|
|
15
|
+
objective="test_compose_commands",
|
|
16
|
+
actions=[hf.Action(commands=[hf.Command(command="Start-Sleep 10")])],
|
|
17
|
+
)
|
|
18
|
+
wk = hf.Workflow.from_template_data(
|
|
19
|
+
template_name="test_compose_commands",
|
|
20
|
+
path=tmp_path,
|
|
21
|
+
tasks=[hf.Task(schema=ts)],
|
|
22
|
+
)
|
|
23
|
+
sub = wk.add_submission()
|
|
24
|
+
assert sub is not None
|
|
25
|
+
js = sub.jobscripts[0]
|
|
26
|
+
run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
|
|
27
|
+
_, shell_vars = run.compose_commands(environments=sub.environments, shell=js.shell)
|
|
28
|
+
assert shell_vars == {0: []}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def test_compose_commands_single_shell_var(null_config, tmp_path: Path):
|
|
32
|
+
ts = hf.TaskSchema(
|
|
33
|
+
objective="test_compose_commands",
|
|
34
|
+
inputs=[hf.SchemaInput("p1")],
|
|
35
|
+
outputs=[hf.SchemaOutput("p1")],
|
|
36
|
+
actions=[
|
|
37
|
+
hf.Action(
|
|
38
|
+
commands=[
|
|
39
|
+
hf.Command(
|
|
40
|
+
command="Write-Output (<<parameter:p1>> + 100)",
|
|
41
|
+
stdout="<<int(parameter:p1)>>",
|
|
42
|
+
),
|
|
43
|
+
],
|
|
44
|
+
),
|
|
45
|
+
],
|
|
46
|
+
)
|
|
47
|
+
wk = hf.Workflow.from_template_data(
|
|
48
|
+
template_name="test_compose_commands",
|
|
49
|
+
path=tmp_path,
|
|
50
|
+
tasks=[hf.Task(schema=ts, inputs={"p1": 101})],
|
|
51
|
+
)
|
|
52
|
+
sub = wk.add_submission()
|
|
53
|
+
assert sub is not None
|
|
54
|
+
js = sub.jobscripts[0]
|
|
55
|
+
run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
|
|
56
|
+
_, shell_vars = run.compose_commands(environments=sub.environments, shell=js.shell)
|
|
57
|
+
assert shell_vars == {0: [("outputs.p1", "parameter_p1", "stdout")]}
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def test_compose_commands_multi_single_shell_var(null_config, tmp_path: Path):
|
|
61
|
+
ts = hf.TaskSchema(
|
|
62
|
+
objective="test_compose_commands",
|
|
63
|
+
inputs=[hf.SchemaInput("p1")],
|
|
64
|
+
outputs=[hf.SchemaOutput("p1")],
|
|
65
|
+
actions=[
|
|
66
|
+
hf.Action(
|
|
67
|
+
commands=[
|
|
68
|
+
hf.Command(command="Start-Sleep 10"),
|
|
69
|
+
hf.Command(
|
|
70
|
+
command="Write-Output (<<parameter:p1>> + 100)",
|
|
71
|
+
stdout="<<int(parameter:p1)>>",
|
|
72
|
+
),
|
|
73
|
+
],
|
|
74
|
+
),
|
|
75
|
+
],
|
|
76
|
+
)
|
|
77
|
+
wk = hf.Workflow.from_template_data(
|
|
78
|
+
template_name="test_compose_commands",
|
|
79
|
+
path=tmp_path,
|
|
80
|
+
tasks=[hf.Task(schema=ts, inputs={"p1": 101})],
|
|
81
|
+
)
|
|
82
|
+
sub = wk.add_submission()
|
|
83
|
+
assert sub is not None
|
|
84
|
+
js = sub.jobscripts[0]
|
|
85
|
+
run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
|
|
86
|
+
_, shell_vars = run.compose_commands(environments=sub.environments, shell=js.shell)
|
|
87
|
+
assert shell_vars == {0: [], 1: [("outputs.p1", "parameter_p1", "stdout")]}
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@pytest.mark.integration
|
|
91
|
+
def test_run_dir_diff_new_file(null_config, tmp_path):
|
|
92
|
+
if os.name == "nt":
|
|
93
|
+
command = "New-Item -Path 'new_file.txt' -ItemType File"
|
|
94
|
+
else:
|
|
95
|
+
command = "touch new_file.txt"
|
|
96
|
+
wk = make_workflow_to_run_command(
|
|
97
|
+
command=command,
|
|
98
|
+
requires_dir=True,
|
|
99
|
+
path=tmp_path,
|
|
100
|
+
name="w2",
|
|
101
|
+
overwrite=True,
|
|
102
|
+
)
|
|
103
|
+
wk.submit(wait=True, add_to_known=False, status=False)
|
|
104
|
+
assert wk.get_all_EARs()[0].dir_diff.files_created == ["new_file.txt"]
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
@pytest.mark.integration
|
|
108
|
+
def test_run_skip_reason_upstream_failure(null_config, tmp_path):
|
|
109
|
+
ts = hf.TaskSchema(
|
|
110
|
+
objective="t1",
|
|
111
|
+
inputs=[hf.SchemaInput("p1")],
|
|
112
|
+
outputs=[hf.SchemaInput("p2")],
|
|
113
|
+
actions=[
|
|
114
|
+
hf.Action(
|
|
115
|
+
commands=[
|
|
116
|
+
hf.Command(
|
|
117
|
+
command="echo $(( <<parameter:p1>> + 100 ))",
|
|
118
|
+
stdout="<<parameter:p2>>",
|
|
119
|
+
),
|
|
120
|
+
hf.Command(command="exit 1"),
|
|
121
|
+
]
|
|
122
|
+
),
|
|
123
|
+
hf.Action(
|
|
124
|
+
commands=[
|
|
125
|
+
hf.Command(
|
|
126
|
+
command="echo $(( <<parameter:p2>> + 100 ))",
|
|
127
|
+
stdout="<<parameter:p2>>",
|
|
128
|
+
),
|
|
129
|
+
]
|
|
130
|
+
), # should be skipped due to failure of action 0
|
|
131
|
+
],
|
|
132
|
+
)
|
|
133
|
+
wk = hf.Workflow.from_template_data(
|
|
134
|
+
template_name="test_skip_reason",
|
|
135
|
+
path=tmp_path,
|
|
136
|
+
tasks=[hf.Task(schema=ts, inputs={"p1": 100})],
|
|
137
|
+
)
|
|
138
|
+
wk.submit(wait=True, add_to_known=False, status=False)
|
|
139
|
+
runs = wk.get_all_EARs()
|
|
140
|
+
assert not runs[0].success
|
|
141
|
+
assert not runs[1].success
|
|
142
|
+
assert runs[0].skip_reason is SkipReason.NOT_SKIPPED
|
|
143
|
+
assert runs[1].skip_reason is SkipReason.UPSTREAM_FAILURE
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
@pytest.mark.integration
|
|
147
|
+
def test_run_skip_reason_loop_termination(null_config, tmp_path):
|
|
148
|
+
ts = hf.TaskSchema(
|
|
149
|
+
objective="t1",
|
|
150
|
+
inputs=[hf.SchemaInput("p1")],
|
|
151
|
+
outputs=[hf.SchemaInput("p1")],
|
|
152
|
+
actions=[
|
|
153
|
+
hf.Action(
|
|
154
|
+
commands=[
|
|
155
|
+
hf.Command(
|
|
156
|
+
command="echo $(( <<parameter:p1>> + 100 ))",
|
|
157
|
+
stdout="<<int(parameter:p1)>>",
|
|
158
|
+
),
|
|
159
|
+
]
|
|
160
|
+
),
|
|
161
|
+
],
|
|
162
|
+
)
|
|
163
|
+
loop_term = hf.Rule(path="outputs.p1", condition={"value.equal_to": 300})
|
|
164
|
+
wk = hf.Workflow.from_template_data(
|
|
165
|
+
template_name="test_skip_reason",
|
|
166
|
+
path=tmp_path,
|
|
167
|
+
tasks=[hf.Task(schema=ts, inputs={"p1": 100})],
|
|
168
|
+
loops=[
|
|
169
|
+
hf.Loop(name="my_loop", tasks=[0], termination=loop_term, num_iterations=3)
|
|
170
|
+
],
|
|
171
|
+
)
|
|
172
|
+
# loop should terminate after the second iteration
|
|
173
|
+
wk.submit(wait=True, add_to_known=False, status=False)
|
|
174
|
+
runs = wk.get_all_EARs()
|
|
175
|
+
|
|
176
|
+
assert runs[0].get("outputs.p1") == 200
|
|
177
|
+
assert runs[1].get("outputs.p1") == 300
|
|
178
|
+
assert not runs[2].get("outputs.p1")
|
|
179
|
+
|
|
180
|
+
assert runs[0].success
|
|
181
|
+
assert runs[1].success
|
|
182
|
+
assert not runs[2].success
|
|
183
|
+
|
|
184
|
+
assert runs[0].skip_reason is SkipReason.NOT_SKIPPED
|
|
185
|
+
assert runs[1].skip_reason is SkipReason.NOT_SKIPPED
|
|
186
|
+
assert runs[2].skip_reason is SkipReason.LOOP_TERMINATION
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def test_get_data_in_values_input_files(null_config, tmp_path: Path):
|
|
190
|
+
with (file_name := Path("my_file.txt")).open("wt") as fh:
|
|
191
|
+
fh.write("hello!\n")
|
|
192
|
+
file_spec = hf.FileSpec("my_file", str(file_name))
|
|
193
|
+
s1 = hf.TaskSchema(
|
|
194
|
+
objective="t1",
|
|
195
|
+
inputs=[
|
|
196
|
+
hf.SchemaInput(parameter="p1"),
|
|
197
|
+
],
|
|
198
|
+
actions=[
|
|
199
|
+
hf.Action(
|
|
200
|
+
input_file_generators=[
|
|
201
|
+
hf.InputFileGenerator(
|
|
202
|
+
input_file=file_spec,
|
|
203
|
+
inputs=[hf.Parameter("p1")],
|
|
204
|
+
)
|
|
205
|
+
],
|
|
206
|
+
)
|
|
207
|
+
],
|
|
208
|
+
)
|
|
209
|
+
t1 = hf.Task(schema=[s1], input_files=[hf.InputFile(file_spec, path="my_file.txt")])
|
|
210
|
+
wk = hf.Workflow.from_template_data(
|
|
211
|
+
tasks=[t1],
|
|
212
|
+
path=tmp_path,
|
|
213
|
+
template_name="test_get_data_in_values_input_files",
|
|
214
|
+
)
|
|
215
|
+
run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
|
|
216
|
+
assert run.get_data_in_values(("input_files.my_file",)) == {
|
|
217
|
+
"my_file": wk.input_files_path.joinpath("0/my_file.txt").as_posix()
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def test_get_data_in_values_user_provided_input_file(null_config, tmp_path: Path):
|
|
222
|
+
|
|
223
|
+
# pass an input file so the IFG doesn't need to run
|
|
224
|
+
with (file_name := Path("my_file.txt")).open("wt") as fh:
|
|
225
|
+
fh.write("hello!\n")
|
|
226
|
+
|
|
227
|
+
file_spec = hf.FileSpec("my_file", str(file_name))
|
|
228
|
+
s1 = hf.TaskSchema(
|
|
229
|
+
objective="t1",
|
|
230
|
+
inputs=[
|
|
231
|
+
hf.SchemaInput(parameter="p1"),
|
|
232
|
+
],
|
|
233
|
+
actions=[
|
|
234
|
+
hf.Action(
|
|
235
|
+
input_file_generators=[
|
|
236
|
+
hf.InputFileGenerator(
|
|
237
|
+
input_file=file_spec,
|
|
238
|
+
inputs=[hf.Parameter("p1")],
|
|
239
|
+
)
|
|
240
|
+
],
|
|
241
|
+
)
|
|
242
|
+
],
|
|
243
|
+
)
|
|
244
|
+
t1 = hf.Task(schema=[s1], input_files=[hf.InputFile(file_spec, path="my_file.txt")])
|
|
245
|
+
wk = hf.Workflow.from_template_data(
|
|
246
|
+
tasks=[t1],
|
|
247
|
+
path=tmp_path,
|
|
248
|
+
template_name="test_get_data_in_values_input_files",
|
|
249
|
+
)
|
|
250
|
+
run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
|
|
251
|
+
assert run.get_data_in_values(("input_files.my_file",)) == {
|
|
252
|
+
"my_file": wk.input_files_path.joinpath("0/my_file.txt").as_posix()
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def test_get_data_in_values_input_file_to_script(null_config, tmp_path: Path):
|
|
257
|
+
# pass an input file so the IFG doesn't need to run
|
|
258
|
+
with (file_name := Path("my_file.txt")).open("wt") as fh:
|
|
259
|
+
fh.write("hello!\n")
|
|
260
|
+
|
|
261
|
+
file_spec = hf.FileSpec("my_file", str(file_name))
|
|
262
|
+
act = hf.Action(
|
|
263
|
+
input_file_generators=[
|
|
264
|
+
hf.InputFileGenerator(
|
|
265
|
+
input_file=file_spec,
|
|
266
|
+
inputs=[hf.Parameter("p1")],
|
|
267
|
+
)
|
|
268
|
+
],
|
|
269
|
+
script_data_in={"input_files.my_file": "direct"},
|
|
270
|
+
)
|
|
271
|
+
s1 = hf.TaskSchema(
|
|
272
|
+
objective="ts1",
|
|
273
|
+
inputs=[hf.SchemaInput("p1")],
|
|
274
|
+
actions=[act],
|
|
275
|
+
)
|
|
276
|
+
t1 = hf.Task(schema=[s1], input_files=[hf.InputFile(file_spec, path="my_file.txt")])
|
|
277
|
+
wk = hf.Workflow.from_template_data(
|
|
278
|
+
tasks=[t1],
|
|
279
|
+
path=tmp_path,
|
|
280
|
+
template_name="test_get_data_in_values_input_files",
|
|
281
|
+
)
|
|
282
|
+
act_runs = wk.tasks[0].elements[0].iterations[0].action_runs
|
|
283
|
+
assert len(act_runs) == 1
|
|
284
|
+
assert act_runs[0].get_data_in_values(("input_files.my_file",)) == {
|
|
285
|
+
"my_file": wk.input_files_path.joinpath("0/my_file.txt").as_posix()
|
|
286
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
import pytest
|
|
3
|
+
from hpcflow.app import app as hf
|
|
4
|
+
from hpcflow.sdk.core.test_utils import make_workflow
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@pytest.mark.parametrize("store", ["json", "zarr"])
|
|
8
|
+
def test_run_directories(null_config, tmp_path, store):
|
|
9
|
+
wk = make_workflow(
|
|
10
|
+
schemas_spec=[
|
|
11
|
+
[{"p1": None}, ("p1",), "t1"],
|
|
12
|
+
[{"p2": None}, ("p2",), "t2", {"requires_dir": True}],
|
|
13
|
+
],
|
|
14
|
+
local_inputs={0: ("p1",)},
|
|
15
|
+
local_sequences={1: [("inputs.p2", 2, 0)]},
|
|
16
|
+
path=tmp_path,
|
|
17
|
+
store=store,
|
|
18
|
+
)
|
|
19
|
+
lp_0 = hf.Loop(name="my_loop", tasks=[1], num_iterations=2)
|
|
20
|
+
wk.add_loop(lp_0)
|
|
21
|
+
sub = wk.add_submission() # populates run directories
|
|
22
|
+
|
|
23
|
+
run_dirs = wk.get_run_directories()
|
|
24
|
+
|
|
25
|
+
assert run_dirs[0] is None
|
|
26
|
+
assert str(run_dirs[1]) == str(Path(wk.path).joinpath("execute/t_1/e_0/i_0"))
|
|
27
|
+
assert str(run_dirs[2]) == str(Path(wk.path).joinpath("execute/t_1/e_1/i_0"))
|
|
28
|
+
assert str(run_dirs[3]) == str(Path(wk.path).joinpath("execute/t_1/e_0/i_1"))
|
|
29
|
+
assert str(run_dirs[4]) == str(Path(wk.path).joinpath("execute/t_1/e_1/i_1"))
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from hpcflow.app import app as hf
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def test_in_pytest_if_not_frozen() -> None:
|
|
6
|
+
"""This is to check we can get the correct invocation command when running non-frozen
|
|
7
|
+
tests (when frozen the invocation command is just the executable file)."""
|
|
8
|
+
if not hf.run_time_info.is_frozen:
|
|
9
|
+
assert hf.run_time_info.in_pytest
|