hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -461
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -490
- hpcflow/archive/archive.py +0 -307
- hpcflow/archive/cloud/cloud.py +0 -45
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -427
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -233
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2595
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -322
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -166
- hpcflow/variables.py +0 -543
- hpcflow-0.1.15.dist-info/METADATA +0 -168
- hpcflow-0.1.15.dist-info/RECORD +0 -45
- hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.15.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,1066 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from collections.abc import Mapping
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import pytest
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from hpcflow.app import app as hf
|
|
8
|
+
from hpcflow.sdk.core.errors import (
|
|
9
|
+
ActionEnvironmentMissingNameError,
|
|
10
|
+
UnknownActionDataKey,
|
|
11
|
+
UnknownActionDataParameter,
|
|
12
|
+
UnsupportedActionDataFormat,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@pytest.fixture
|
|
17
|
+
def dummy_action_kwargs_pre_proc():
|
|
18
|
+
act_kwargs = {
|
|
19
|
+
"commands": [hf.Command("ls")],
|
|
20
|
+
"input_file_generators": [
|
|
21
|
+
hf.InputFileGenerator(
|
|
22
|
+
input_file=hf.FileSpec("inp_file", name="file.inp"),
|
|
23
|
+
inputs=[hf.Parameter("p1")],
|
|
24
|
+
)
|
|
25
|
+
],
|
|
26
|
+
}
|
|
27
|
+
return act_kwargs
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def test_action_equality(null_config) -> None:
|
|
31
|
+
a1 = hf.Action(commands=[hf.Command("ls")], environments=[])
|
|
32
|
+
a2 = hf.Action(commands=[hf.Command("ls")], environments=[])
|
|
33
|
+
assert a1 == a2
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def test_action_scope_to_string_any() -> None:
|
|
37
|
+
assert hf.ActionScope.any().to_string() == "any"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def test_action_scope_to_string_main() -> None:
|
|
41
|
+
assert hf.ActionScope.main().to_string() == "main"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def test_action_scope_to_string_processing() -> None:
|
|
45
|
+
assert hf.ActionScope.processing().to_string() == "processing"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def test_action_scope_to_string_input_file_generator_no_kwargs() -> None:
|
|
49
|
+
assert hf.ActionScope.input_file_generator().to_string() == "input_file_generator"
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def test_action_scope_to_string_output_file_parser_no_kwargs() -> None:
|
|
53
|
+
assert hf.ActionScope.output_file_parser().to_string() == "output_file_parser"
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def test_action_scope_to_string_input_file_generator_with_kwargs() -> None:
|
|
57
|
+
assert (
|
|
58
|
+
hf.ActionScope.input_file_generator(file="file1").to_string()
|
|
59
|
+
== "input_file_generator[file=file1]"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def test_action_scope_to_string_output_file_parser_with_kwargs() -> None:
|
|
64
|
+
assert (
|
|
65
|
+
hf.ActionScope.output_file_parser(output="out1").to_string()
|
|
66
|
+
== "output_file_parser[output=out1]"
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def test_action_scope_class_method_init_scope_any() -> None:
|
|
71
|
+
assert hf.ActionScope(typ=hf.ActionScopeType.ANY) == hf.ActionScope.any()
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def test_action_scope_class_method_init_scope_main() -> None:
|
|
75
|
+
assert hf.ActionScope(typ=hf.ActionScopeType.MAIN) == hf.ActionScope.main()
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def test_action_scope_class_method_init_scope_processing() -> None:
|
|
79
|
+
assert (
|
|
80
|
+
hf.ActionScope(typ=hf.ActionScopeType.PROCESSING) == hf.ActionScope.processing()
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def test_action_scope_class_method_init_scope_input_file_generator_no_kwargs() -> None:
|
|
85
|
+
assert (
|
|
86
|
+
hf.ActionScope(typ=hf.ActionScopeType.INPUT_FILE_GENERATOR)
|
|
87
|
+
== hf.ActionScope.input_file_generator()
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def test_action_scope_class_method_init_scope_output_file_parser_no_kwargs() -> None:
|
|
92
|
+
assert (
|
|
93
|
+
hf.ActionScope(typ=hf.ActionScopeType.OUTPUT_FILE_PARSER)
|
|
94
|
+
== hf.ActionScope.output_file_parser()
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def test_action_scope_class_method_init_scope_input_file_generator_with_kwargs() -> None:
|
|
99
|
+
assert hf.ActionScope(
|
|
100
|
+
typ=hf.ActionScopeType.INPUT_FILE_GENERATOR, file="file1"
|
|
101
|
+
) == hf.ActionScope.input_file_generator(file="file1")
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def test_action_scope_class_method_init_scope_output_file_parser_with_kwargs() -> None:
|
|
105
|
+
assert hf.ActionScope(
|
|
106
|
+
typ=hf.ActionScopeType.OUTPUT_FILE_PARSER, output="out1"
|
|
107
|
+
) == hf.ActionScope.output_file_parser(output="out1")
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def test_action_scope_raise_on_unknown_kwargs_type_any() -> None:
|
|
111
|
+
with pytest.raises(TypeError):
|
|
112
|
+
hf.ActionScope(typ=hf.ActionScopeType.ANY, bad="arg")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def test_action_scope_raise_on_unknown_kwargs_type_main() -> None:
|
|
116
|
+
with pytest.raises(TypeError):
|
|
117
|
+
hf.ActionScope(typ=hf.ActionScopeType.MAIN, bad="arg")
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def test_action_scope_raise_on_unknown_kwargs_type_processing() -> None:
|
|
121
|
+
with pytest.raises(TypeError):
|
|
122
|
+
hf.ActionScope(typ=hf.ActionScopeType.PROCESSING, bad="arg")
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def test_action_scope_raise_on_unknown_kwargs_type_input_file_generator() -> None:
|
|
126
|
+
with pytest.raises(TypeError):
|
|
127
|
+
hf.ActionScope(typ=hf.ActionScopeType.INPUT_FILE_GENERATOR, bad="arg")
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def test_action_scope_raise_on_unknown_kwargs_type_output_file_parser() -> None:
|
|
131
|
+
with pytest.raises(TypeError):
|
|
132
|
+
hf.ActionScope(typ=hf.ActionScopeType.OUTPUT_FILE_PARSER, bad="arg")
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def test_action_scope_no_raise_on_good_kwargs_type_input_file_generator() -> None:
|
|
136
|
+
hf.ActionScope(typ=hf.ActionScopeType.INPUT_FILE_GENERATOR, file="file1")
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def test_action_scope_no_raise_on_good_kwargs_type_output_file_parser() -> None:
|
|
140
|
+
hf.ActionScope(typ=hf.ActionScopeType.OUTPUT_FILE_PARSER, output="out1")
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def test_action_scope_no_raise_on_no_kwargs_type_input_file_generator() -> None:
|
|
144
|
+
hf.ActionScope(typ=hf.ActionScopeType.INPUT_FILE_GENERATOR)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def test_action_scope_no_raise_on_no_kwargs_type_output_file_parser() -> None:
|
|
148
|
+
hf.ActionScope(typ=hf.ActionScopeType.OUTPUT_FILE_PARSER)
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def test_action_scope_json_like_round_trip() -> None:
|
|
152
|
+
as1 = hf.ActionScope.input_file_generator(file="file1")
|
|
153
|
+
js, _ = as1.to_json_like()
|
|
154
|
+
assert isinstance(js, Mapping)
|
|
155
|
+
as1_rl = hf.ActionScope.from_json_like(js)
|
|
156
|
+
assert as1 == as1_rl
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def test_action_scope_from_json_like_string_and_dict_equality() -> None:
|
|
160
|
+
as1_js = "input_file_generator[file=file1]"
|
|
161
|
+
as2_js: Mapping[str, Any] = {
|
|
162
|
+
"type": "input_file_generator",
|
|
163
|
+
"kwargs": {
|
|
164
|
+
"file": "file1",
|
|
165
|
+
},
|
|
166
|
+
}
|
|
167
|
+
assert hf.ActionScope.from_json_like(as1_js) == hf.ActionScope.from_json_like(as2_js)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def test_get_command_input_types_sub_parameters_true_no_sub_parameter() -> None:
|
|
171
|
+
act = hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1>> + 100)")])
|
|
172
|
+
assert act.get_command_input_types(sub_parameters=True) == ("p1",)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def test_get_command_input_types_sub_parameters_true_with_sub_parameter() -> None:
|
|
176
|
+
act = hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1.a>> + 100)")])
|
|
177
|
+
assert act.get_command_input_types(sub_parameters=True) == ("p1.a",)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def test_get_command_input_types_sub_parameters_false_no_sub_parameter() -> None:
|
|
181
|
+
act = hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1>> + 100)")])
|
|
182
|
+
assert act.get_command_input_types(sub_parameters=False) == ("p1",)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def test_get_command_input_types_sub_parameters_false_with_sub_parameter() -> None:
|
|
186
|
+
act = hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1.a>> + 100)")])
|
|
187
|
+
assert act.get_command_input_types(sub_parameters=False) == ("p1",)
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def test_get_command_input_types_sum_sub_parameters_true_no_sub_param() -> None:
|
|
191
|
+
act = hf.Action(commands=[hf.Command("Write-Output <<sum(parameter:p1)>>")])
|
|
192
|
+
assert act.get_command_input_types(sub_parameters=True) == ("p1",)
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def test_get_command_input_types_sum_sub_parameters_true_with_sub_parameter() -> None:
|
|
196
|
+
act = hf.Action(commands=[hf.Command("Write-Output <<sum(parameter:p1.a)>>")])
|
|
197
|
+
assert act.get_command_input_types(sub_parameters=True) == ("p1.a",)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def test_get_command_input_types_sum_sub_parameters_false_no_sub_param() -> None:
|
|
201
|
+
act = hf.Action(commands=[hf.Command("Write-Output <<sum(parameter:p1)>>")])
|
|
202
|
+
assert act.get_command_input_types(sub_parameters=False) == ("p1",)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def test_get_command_input_types_sum_sub_parameters_false_with_sub_parameter() -> None:
|
|
206
|
+
act = hf.Action(commands=[hf.Command("Write-Output <<sum(parameter:p1.a)>>")])
|
|
207
|
+
assert act.get_command_input_types(sub_parameters=False) == ("p1",)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def test_get_command_input_types_label_sub_parameters_true_no_sub_param() -> None:
|
|
211
|
+
act = hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1[one]>> + 100)")])
|
|
212
|
+
assert act.get_command_input_types(sub_parameters=True) == ("p1[one]",)
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def test_get_command_input_types_label_sub_parameters_true_with_sub_parameter() -> None:
|
|
216
|
+
act = hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1[one].a>> + 100)")])
|
|
217
|
+
assert act.get_command_input_types(sub_parameters=True) == ("p1[one].a",)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def test_get_command_input_types_label_sub_parameters_false_no_sub_param() -> None:
|
|
221
|
+
act = hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1[one]>> + 100)")])
|
|
222
|
+
assert act.get_command_input_types(sub_parameters=False) == ("p1[one]",)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def test_get_command_input_types_label_sub_parameters_false_with_sub_parameter() -> None:
|
|
226
|
+
act = hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1[one].a>> + 100)")])
|
|
227
|
+
assert act.get_command_input_types(sub_parameters=False) == ("p1[one]",)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def test_get_script_name(null_config) -> None:
|
|
231
|
+
expected = {
|
|
232
|
+
"<<script:/software/hello.py>>": "hello.py",
|
|
233
|
+
"<<script:software/hello.py>>": "hello.py",
|
|
234
|
+
r"<<script:C:\long\path\to\script.py>>": "script.py",
|
|
235
|
+
"/path/to/script.py": "/path/to/script.py",
|
|
236
|
+
}
|
|
237
|
+
for k, v in expected.items():
|
|
238
|
+
assert hf.Action.get_script_name(k) == v
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def test_is_snippet_script(null_config) -> None:
|
|
242
|
+
expected = {
|
|
243
|
+
"<<script:/software/hello.py>>": True,
|
|
244
|
+
"<<script:software/hello.py>>": True,
|
|
245
|
+
r"<<script:C:\long\path\to\script.py>>": True,
|
|
246
|
+
"/path/to/script.py": False,
|
|
247
|
+
}
|
|
248
|
+
for k, v in expected.items():
|
|
249
|
+
assert hf.Action.is_snippet_script(k) == v
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def test_get_snippet_script_path(null_config) -> None:
|
|
253
|
+
expected = {
|
|
254
|
+
"<<script:/software/hello.py>>": Path("/software/hello.py"),
|
|
255
|
+
"<<script:software/hello.py>>": Path("software/hello.py"),
|
|
256
|
+
r"<<script:C:\long\path\to\script.py>>": Path(r"C:\long\path\to\script.py"),
|
|
257
|
+
}
|
|
258
|
+
for k, v in expected.items():
|
|
259
|
+
assert hf.Action.get_snippet_script_path(k) == v
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def test_get_snippet_script_path_False(null_config) -> None:
|
|
263
|
+
assert not hf.Action.get_snippet_script_path("/path/to/script.py")
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def test_process_script_data_in_str(null_config) -> None:
|
|
267
|
+
act = hf.Action(script="<<script:path/to/some/script>>", script_data_in="json")
|
|
268
|
+
ts = hf.TaskSchema(objective="ts1", inputs=[hf.SchemaInput("p1")], actions=[act])
|
|
269
|
+
assert ts.actions[0].script_data_in == {"inputs.p1": {"format": "json"}}
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def test_process_script_data_in_str_dict_equivalence(null_config) -> None:
|
|
273
|
+
act_1 = hf.Action(script="<<script:path/to/some/script>>", script_data_in="json")
|
|
274
|
+
act_2 = hf.Action(
|
|
275
|
+
script="<<script:path/to/some/script>>", script_data_in={"inputs.p1": "json"}
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
ts_1 = hf.TaskSchema(objective="ts1", inputs=[hf.SchemaInput("p1")], actions=[act_1])
|
|
279
|
+
ts_2 = hf.TaskSchema(objective="ts1", inputs=[hf.SchemaInput("p1")], actions=[act_2])
|
|
280
|
+
|
|
281
|
+
assert ts_1.actions[0].script_data_in == ts_2.actions[0].script_data_in
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def test_process_script_data_in_str_multi(null_config) -> None:
|
|
285
|
+
act = hf.Action(script="<<script:path/to/some/script>>", script_data_in="json")
|
|
286
|
+
ts = hf.TaskSchema(
|
|
287
|
+
objective="ts1",
|
|
288
|
+
inputs=[hf.SchemaInput("p1"), hf.SchemaInput("p2")],
|
|
289
|
+
actions=[act],
|
|
290
|
+
)
|
|
291
|
+
assert ts.actions[0].script_data_in == {
|
|
292
|
+
"inputs.p1": {"format": "json"},
|
|
293
|
+
"inputs.p2": {"format": "json"},
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def test_process_script_data_in_str_labelled_single(null_config) -> None:
|
|
298
|
+
act = hf.Action(script="<<script:path/to/some/script>>", script_data_in="json")
|
|
299
|
+
ts = hf.TaskSchema(
|
|
300
|
+
objective="ts1",
|
|
301
|
+
inputs=[hf.SchemaInput("p1", labels={"one": {}})],
|
|
302
|
+
actions=[act],
|
|
303
|
+
)
|
|
304
|
+
assert ts.actions[0].script_data_in == {"inputs.p1": {"format": "json"}}
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def test_process_script_data_in_str_labelled_multiple(null_config) -> None:
|
|
308
|
+
act = hf.Action(script="<<script:path/to/some/script>>", script_data_in="json")
|
|
309
|
+
ts = hf.TaskSchema(
|
|
310
|
+
objective="ts1",
|
|
311
|
+
inputs=[hf.SchemaInput("p1", labels={"one": {}}, multiple=True)],
|
|
312
|
+
actions=[act],
|
|
313
|
+
)
|
|
314
|
+
assert ts.actions[0].script_data_in == {"inputs.p1[one]": {"format": "json"}}
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
def test_process_script_data_in_dict_all_str_equivalence(null_config) -> None:
|
|
318
|
+
act_1 = hf.Action(script="<<script:path/to/some/script>>", script_data_in="json")
|
|
319
|
+
act_2 = hf.Action(
|
|
320
|
+
script="<<script:path/to/some/script>>", script_data_in={"*": "json"}
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
ts_1 = hf.TaskSchema(objective="ts1", inputs=[hf.SchemaInput("p1")], actions=[act_1])
|
|
324
|
+
ts_2 = hf.TaskSchema(objective="ts1", inputs=[hf.SchemaInput("p1")], actions=[act_2])
|
|
325
|
+
|
|
326
|
+
assert ts_1.actions[0].script_data_in == ts_2.actions[0].script_data_in
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def test_process_script_data_in_dict_all_str_equivalence_multi(null_config) -> None:
|
|
330
|
+
act_1 = hf.Action(script="<<script:path/to/some/script>>", script_data_in="json")
|
|
331
|
+
act_2 = hf.Action(
|
|
332
|
+
script="<<script:path/to/some/script>>", script_data_in={"*": "json"}
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
ts_1 = hf.TaskSchema(
|
|
336
|
+
objective="ts1",
|
|
337
|
+
inputs=[hf.SchemaInput("p1"), hf.SchemaInput("p2")],
|
|
338
|
+
actions=[act_1],
|
|
339
|
+
)
|
|
340
|
+
ts_2 = hf.TaskSchema(
|
|
341
|
+
objective="ts1",
|
|
342
|
+
inputs=[hf.SchemaInput("p1"), hf.SchemaInput("p2")],
|
|
343
|
+
actions=[act_2],
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
assert ts_1.actions[0].script_data_in == ts_2.actions[0].script_data_in
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
def test_process_script_data_in_dict_mixed(null_config) -> None:
|
|
350
|
+
act = hf.Action(
|
|
351
|
+
script="<<script:path/to/some/script>>",
|
|
352
|
+
script_data_in={"p1": "json", "p2": "hdf5"},
|
|
353
|
+
)
|
|
354
|
+
ts = hf.TaskSchema(
|
|
355
|
+
objective="ts1",
|
|
356
|
+
inputs=[hf.SchemaInput("p1"), hf.SchemaInput("p2")],
|
|
357
|
+
actions=[act],
|
|
358
|
+
)
|
|
359
|
+
assert ts.actions[0].script_data_in == {
|
|
360
|
+
"inputs.p1": {"format": "json"},
|
|
361
|
+
"inputs.p2": {"format": "hdf5"},
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def test_process_script_data_in_dict_mixed_all(null_config) -> None:
|
|
366
|
+
act = hf.Action(
|
|
367
|
+
script="<<script:path/to/some/script>>",
|
|
368
|
+
script_data_in={"p1": "json", "*": "hdf5"},
|
|
369
|
+
)
|
|
370
|
+
ts = hf.TaskSchema(
|
|
371
|
+
objective="ts1",
|
|
372
|
+
inputs=[
|
|
373
|
+
hf.SchemaInput("p1"),
|
|
374
|
+
hf.SchemaInput("p2"),
|
|
375
|
+
hf.SchemaInput("p3"),
|
|
376
|
+
],
|
|
377
|
+
actions=[act],
|
|
378
|
+
)
|
|
379
|
+
assert ts.actions[0].script_data_in == {
|
|
380
|
+
"inputs.p1": {"format": "json"},
|
|
381
|
+
"inputs.p2": {"format": "hdf5"},
|
|
382
|
+
"inputs.p3": {"format": "hdf5"},
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
def test_process_script_data_in_dict_labels_multiple(null_config) -> None:
|
|
387
|
+
act = hf.Action(
|
|
388
|
+
script="<<script:path/to/some/script>>",
|
|
389
|
+
script_data_in={"p1[one]": "json"},
|
|
390
|
+
)
|
|
391
|
+
ts = hf.TaskSchema(
|
|
392
|
+
objective="ts1",
|
|
393
|
+
inputs=[
|
|
394
|
+
hf.SchemaInput("p1", labels={"one": {}}, multiple=True),
|
|
395
|
+
],
|
|
396
|
+
actions=[act],
|
|
397
|
+
)
|
|
398
|
+
assert ts.actions[0].script_data_in == {"inputs.p1[one]": {"format": "json"}}
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
def test_process_script_data_in_dict_labels_multiple_two(null_config) -> None:
|
|
402
|
+
act = hf.Action(
|
|
403
|
+
script="<<script:path/to/some/script>>",
|
|
404
|
+
script_data_in={"p1[one]": "json", "p1[two]": "hdf5"},
|
|
405
|
+
)
|
|
406
|
+
ts = hf.TaskSchema(
|
|
407
|
+
objective="ts1",
|
|
408
|
+
inputs=[
|
|
409
|
+
hf.SchemaInput("p1", labels={"one": {}, "two": {}}, multiple=True),
|
|
410
|
+
],
|
|
411
|
+
actions=[act],
|
|
412
|
+
)
|
|
413
|
+
assert ts.actions[0].script_data_in == {
|
|
414
|
+
"inputs.p1[one]": {"format": "json"},
|
|
415
|
+
"inputs.p1[two]": {"format": "hdf5"},
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def test_process_script_data_in_dict_labels_multiple_two_catch_all(null_config) -> None:
|
|
420
|
+
act = hf.Action(
|
|
421
|
+
script="<<script:path/to/some/script>>",
|
|
422
|
+
script_data_in={"p1[one]": "json", "*": "hdf5"},
|
|
423
|
+
)
|
|
424
|
+
ts = hf.TaskSchema(
|
|
425
|
+
objective="ts1",
|
|
426
|
+
inputs=[
|
|
427
|
+
hf.SchemaInput("p1", labels={"one": {}, "two": {}}, multiple=True),
|
|
428
|
+
],
|
|
429
|
+
actions=[act],
|
|
430
|
+
)
|
|
431
|
+
assert ts.actions[0].script_data_in == {
|
|
432
|
+
"inputs.p1[one]": {"format": "json"},
|
|
433
|
+
"inputs.p1[two]": {"format": "hdf5"},
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
def test_process_script_data_in_dict_excluded(null_config) -> None:
|
|
438
|
+
act = hf.Action(
|
|
439
|
+
script="<<script:path/to/some/script>>",
|
|
440
|
+
script_data_in={"p1": "json"},
|
|
441
|
+
)
|
|
442
|
+
ts = hf.TaskSchema(
|
|
443
|
+
objective="ts1",
|
|
444
|
+
inputs=[
|
|
445
|
+
hf.SchemaInput("p1"),
|
|
446
|
+
hf.SchemaInput("p2"),
|
|
447
|
+
],
|
|
448
|
+
actions=[act],
|
|
449
|
+
)
|
|
450
|
+
assert ts.actions[0].script_data_in == {"inputs.p1": {"format": "json"}}
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
def test_process_script_data_in_dict_unlabelled_to_labelled(null_config) -> None:
|
|
454
|
+
act = hf.Action(
|
|
455
|
+
script="<<script:path/to/some/script>>",
|
|
456
|
+
script_data_in={"p1": "json"},
|
|
457
|
+
)
|
|
458
|
+
ts = hf.TaskSchema(
|
|
459
|
+
objective="ts1",
|
|
460
|
+
inputs=[
|
|
461
|
+
hf.SchemaInput("p1", labels={"one": {}, "two": {}}, multiple=True),
|
|
462
|
+
],
|
|
463
|
+
actions=[act],
|
|
464
|
+
)
|
|
465
|
+
assert ts.actions[0].script_data_in == {
|
|
466
|
+
"inputs.p1[one]": {"format": "json"},
|
|
467
|
+
"inputs.p1[two]": {"format": "json"},
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
def test_process_script_data_in_dict_unlabelled_to_labelled_with_mixed_label(
|
|
472
|
+
null_config,
|
|
473
|
+
) -> None:
|
|
474
|
+
act = hf.Action(
|
|
475
|
+
script="<<script:path/to/some/script>>",
|
|
476
|
+
script_data_in={"p1": "json", "p1[two]": "hdf5"},
|
|
477
|
+
)
|
|
478
|
+
ts = hf.TaskSchema(
|
|
479
|
+
objective="ts1",
|
|
480
|
+
inputs=[
|
|
481
|
+
hf.SchemaInput("p1", labels={"one": {}, "two": {}}, multiple=True),
|
|
482
|
+
],
|
|
483
|
+
actions=[act],
|
|
484
|
+
)
|
|
485
|
+
assert ts.actions[0].script_data_in == {
|
|
486
|
+
"inputs.p1[one]": {"format": "json"},
|
|
487
|
+
"inputs.p1[two]": {"format": "hdf5"},
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
def test_process_script_data_in_dict_labelled_mixed_catch_all(null_config) -> None:
|
|
492
|
+
act = hf.Action(
|
|
493
|
+
script="<<script:path/to/some/script>>",
|
|
494
|
+
script_data_in={"p1[one]": "json", "*": "hdf5"},
|
|
495
|
+
)
|
|
496
|
+
ts = hf.TaskSchema(
|
|
497
|
+
objective="ts1",
|
|
498
|
+
inputs=[
|
|
499
|
+
hf.SchemaInput("p1", labels={"one": {}, "two": {}}, multiple=True),
|
|
500
|
+
],
|
|
501
|
+
actions=[act],
|
|
502
|
+
)
|
|
503
|
+
assert ts.actions[0].script_data_in == {
|
|
504
|
+
"inputs.p1[one]": {"format": "json"},
|
|
505
|
+
"inputs.p1[two]": {"format": "hdf5"},
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
|
|
509
|
+
def test_process_script_data_in_dict_unlabelled_to_labelled_mixed_catch_all(
|
|
510
|
+
null_config,
|
|
511
|
+
) -> None:
|
|
512
|
+
act = hf.Action(
|
|
513
|
+
script="<<script:path/to/some/script>>",
|
|
514
|
+
script_data_in={"p1": "json", "*": "hdf5"},
|
|
515
|
+
)
|
|
516
|
+
ts = hf.TaskSchema(
|
|
517
|
+
objective="ts1",
|
|
518
|
+
inputs=[
|
|
519
|
+
hf.SchemaInput("p1", labels={"one": {}, "two": {}}, multiple=True),
|
|
520
|
+
hf.SchemaInput("p2"),
|
|
521
|
+
],
|
|
522
|
+
actions=[act],
|
|
523
|
+
)
|
|
524
|
+
assert ts.actions[0].script_data_in == {
|
|
525
|
+
"inputs.p1[one]": {"format": "json"},
|
|
526
|
+
"inputs.p1[two]": {"format": "json"},
|
|
527
|
+
"inputs.p2": {"format": "hdf5"},
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
def test_process_script_data_in_str_raise_invalid_format(null_config) -> None:
|
|
532
|
+
act = hf.Action(
|
|
533
|
+
script="<<script:path/to/some/script>>", script_data_in="some_weird_format"
|
|
534
|
+
)
|
|
535
|
+
with pytest.raises(UnsupportedActionDataFormat):
|
|
536
|
+
hf.TaskSchema(
|
|
537
|
+
objective="ts1",
|
|
538
|
+
inputs=[hf.SchemaInput("p1")],
|
|
539
|
+
actions=[act],
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
|
|
543
|
+
def test_process_script_data_in_dict_raise_invalid_parameter(null_config) -> None:
|
|
544
|
+
act = hf.Action(
|
|
545
|
+
script="<<script:path/to/some/script>>",
|
|
546
|
+
script_data_in={"p2": "json"},
|
|
547
|
+
)
|
|
548
|
+
with pytest.raises(UnknownActionDataParameter):
|
|
549
|
+
hf.TaskSchema(
|
|
550
|
+
objective="ts1",
|
|
551
|
+
inputs=[hf.SchemaInput("p1")],
|
|
552
|
+
actions=[act],
|
|
553
|
+
)
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
def test_process_script_data_in_dict_raise_invalid_parameter_unknown_label(
|
|
557
|
+
null_config,
|
|
558
|
+
) -> None:
|
|
559
|
+
act = hf.Action(
|
|
560
|
+
script="<<script:path/to/some/script>>",
|
|
561
|
+
script_data_in={"p1[two]": "json"},
|
|
562
|
+
)
|
|
563
|
+
with pytest.raises(UnknownActionDataParameter):
|
|
564
|
+
hf.TaskSchema(
|
|
565
|
+
objective="ts1",
|
|
566
|
+
inputs=[hf.SchemaInput("p1", labels={"one": {}}, multiple=True)],
|
|
567
|
+
actions=[act],
|
|
568
|
+
)
|
|
569
|
+
|
|
570
|
+
|
|
571
|
+
def test_process_script_data_in_dict_raise_invalid_script_key(null_config) -> None:
|
|
572
|
+
bad_script_data: Any = {"p1": {"format": "json", "BAD_KEY": 1}}
|
|
573
|
+
act = hf.Action(
|
|
574
|
+
script="<<script:path/to/some/script>>",
|
|
575
|
+
script_data_in=bad_script_data,
|
|
576
|
+
)
|
|
577
|
+
with pytest.raises(UnknownActionDataKey):
|
|
578
|
+
hf.TaskSchema(
|
|
579
|
+
objective="ts1",
|
|
580
|
+
inputs=[hf.SchemaInput("p1")],
|
|
581
|
+
actions=[act],
|
|
582
|
+
)
|
|
583
|
+
|
|
584
|
+
|
|
585
|
+
def test_process_script_data_out_mixed(null_config) -> None:
|
|
586
|
+
act = hf.Action(
|
|
587
|
+
script="<<script:path/to/some/script>>",
|
|
588
|
+
script_data_in="json",
|
|
589
|
+
script_data_out={"p2": "json", "p3": "direct"},
|
|
590
|
+
)
|
|
591
|
+
ts = hf.TaskSchema(
|
|
592
|
+
objective="ts1",
|
|
593
|
+
inputs=[hf.SchemaInput("p1")],
|
|
594
|
+
outputs=[hf.SchemaInput("p2"), hf.SchemaInput("p3")],
|
|
595
|
+
actions=[act],
|
|
596
|
+
)
|
|
597
|
+
assert ts.actions[0].script_data_out == {
|
|
598
|
+
"outputs.p2": {"format": "json"},
|
|
599
|
+
"outputs.p3": {"format": "direct"},
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
|
|
603
|
+
def test_process_script_data_in_fmt_dict_mixed(null_config) -> None:
|
|
604
|
+
act = hf.Action(
|
|
605
|
+
script="<<script:path/to/some/script>>",
|
|
606
|
+
script_data_in={"p1": {"format": "json"}, "p2": "hdf5"},
|
|
607
|
+
)
|
|
608
|
+
ts = hf.TaskSchema(
|
|
609
|
+
objective="ts1",
|
|
610
|
+
inputs=[hf.SchemaInput("p1"), hf.SchemaInput("p2")],
|
|
611
|
+
actions=[act],
|
|
612
|
+
)
|
|
613
|
+
assert ts.actions[0].script_data_in == {
|
|
614
|
+
"inputs.p1": {"format": "json"},
|
|
615
|
+
"inputs.p2": {"format": "hdf5"},
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
|
|
619
|
+
def test_process_script_data_in_input_files(null_config) -> None:
|
|
620
|
+
act = hf.Action(
|
|
621
|
+
input_file_generators=[
|
|
622
|
+
hf.InputFileGenerator(
|
|
623
|
+
input_file=hf.FileSpec("my_file", "my_file.txt"),
|
|
624
|
+
inputs=[hf.Parameter("p1")],
|
|
625
|
+
)
|
|
626
|
+
],
|
|
627
|
+
script_data_in={"input_files.my_file": "direct"},
|
|
628
|
+
)
|
|
629
|
+
ts = hf.TaskSchema(
|
|
630
|
+
objective="ts1",
|
|
631
|
+
inputs=[hf.SchemaInput("p1")],
|
|
632
|
+
actions=[act],
|
|
633
|
+
)
|
|
634
|
+
assert ts.actions[0].script_data_in == {"inputs.p1": {"format": "direct"}}
|
|
635
|
+
assert ts.actions[1].script_data_in == {"input_files.my_file": {"format": "direct"}}
|
|
636
|
+
|
|
637
|
+
|
|
638
|
+
def test_ActionEnvironment_env_str(null_config) -> None:
|
|
639
|
+
act_env = hf.ActionEnvironment(environment="my_env")
|
|
640
|
+
assert act_env.environment == {"name": "my_env"}
|
|
641
|
+
|
|
642
|
+
|
|
643
|
+
def test_ActionEnvironment_env_dict(null_config) -> None:
|
|
644
|
+
act_env = hf.ActionEnvironment(environment={"name": "my_env", "key": "value"})
|
|
645
|
+
assert act_env.environment == {"name": "my_env", "key": "value"}
|
|
646
|
+
|
|
647
|
+
|
|
648
|
+
def test_ActionEnvironment_raises_on_missing_name(null_config) -> None:
|
|
649
|
+
with pytest.raises(ActionEnvironmentMissingNameError):
|
|
650
|
+
hf.ActionEnvironment(environment={"key": "value"})
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
def test_rules_allow_runs_initialised(null_config, tmp_path: Path):
|
|
654
|
+
"""Test rules that do not depend on execution allow for runs to be initialised."""
|
|
655
|
+
act = hf.Action(
|
|
656
|
+
script="<<script:path/to/some/script>>",
|
|
657
|
+
rules=[hf.ActionRule(path="inputs.p1", condition={"value.less_than": 2})],
|
|
658
|
+
)
|
|
659
|
+
ts = hf.TaskSchema(
|
|
660
|
+
objective="ts1",
|
|
661
|
+
inputs=[hf.SchemaInput("p1")],
|
|
662
|
+
actions=[act],
|
|
663
|
+
)
|
|
664
|
+
t1 = hf.Task(
|
|
665
|
+
schema=ts, sequences=[hf.ValueSequence(path="inputs.p1", values=[1.5, 2.5])]
|
|
666
|
+
)
|
|
667
|
+
wk = hf.Workflow.from_template_data(
|
|
668
|
+
template_name="test",
|
|
669
|
+
path=tmp_path,
|
|
670
|
+
tasks=[t1],
|
|
671
|
+
)
|
|
672
|
+
assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
|
|
673
|
+
assert wk.tasks[0].elements[1].iterations[0].EARs_initialised
|
|
674
|
+
assert len(wk.tasks[0].elements[0].actions) == 1
|
|
675
|
+
assert len(wk.tasks[0].elements[1].actions) == 0
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
def test_rules_prevent_runs_initialised(null_config, tmp_path: Path):
|
|
679
|
+
"""Test rules that depend on execution prevent initialising runs."""
|
|
680
|
+
act1 = hf.Action(script="<<script:path/to/some/script>>")
|
|
681
|
+
act2 = hf.Action(
|
|
682
|
+
script="<<script:path/to/some/script>>",
|
|
683
|
+
rules=[hf.ActionRule(path="inputs.p2", condition={"value.less_than": 2})],
|
|
684
|
+
)
|
|
685
|
+
ts1 = hf.TaskSchema(
|
|
686
|
+
objective="ts1",
|
|
687
|
+
inputs=[hf.SchemaInput("p1")],
|
|
688
|
+
outputs=[hf.SchemaOutput("p2")],
|
|
689
|
+
actions=[act1],
|
|
690
|
+
)
|
|
691
|
+
ts2 = hf.TaskSchema(
|
|
692
|
+
objective="ts2",
|
|
693
|
+
inputs=[hf.SchemaInput("p2")],
|
|
694
|
+
actions=[act2],
|
|
695
|
+
)
|
|
696
|
+
t1 = hf.Task(schema=ts1, inputs={"p1": 1.2})
|
|
697
|
+
t2 = hf.Task(schema=ts2)
|
|
698
|
+
wk = hf.Workflow.from_template_data(
|
|
699
|
+
template_name="test",
|
|
700
|
+
path=tmp_path,
|
|
701
|
+
tasks=[t1, t2],
|
|
702
|
+
)
|
|
703
|
+
assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
|
|
704
|
+
assert not wk.tasks[1].elements[0].iterations[0].EARs_initialised
|
|
705
|
+
|
|
706
|
+
|
|
707
|
+
def test_command_rules_allow_runs_initialised(null_config, tmp_path: Path):
|
|
708
|
+
"""Test command rules that do not depend on execution allow for runs to be
|
|
709
|
+
initialised."""
|
|
710
|
+
act = hf.Action(
|
|
711
|
+
commands=[
|
|
712
|
+
hf.Command(
|
|
713
|
+
command='echo "p1=<<parameter:p1>>"',
|
|
714
|
+
rules=[hf.ActionRule(path="inputs.p1", condition={"value.less_than": 2})],
|
|
715
|
+
)
|
|
716
|
+
],
|
|
717
|
+
)
|
|
718
|
+
ts = hf.TaskSchema(
|
|
719
|
+
objective="ts1",
|
|
720
|
+
inputs=[hf.SchemaInput("p1")],
|
|
721
|
+
actions=[act],
|
|
722
|
+
)
|
|
723
|
+
t1 = hf.Task(
|
|
724
|
+
schema=ts, sequences=[hf.ValueSequence(path="inputs.p1", values=[1.5, 2.5])]
|
|
725
|
+
)
|
|
726
|
+
wk = hf.Workflow.from_template_data(
|
|
727
|
+
template_name="test",
|
|
728
|
+
path=tmp_path,
|
|
729
|
+
tasks=[t1],
|
|
730
|
+
)
|
|
731
|
+
assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
|
|
732
|
+
assert wk.tasks[0].elements[1].iterations[0].EARs_initialised
|
|
733
|
+
assert len(wk.tasks[0].elements[0].actions) == 1
|
|
734
|
+
assert len(wk.tasks[0].elements[1].actions) == 1
|
|
735
|
+
assert len(wk.tasks[0].elements[0].action_runs[0].commands_idx) == 1
|
|
736
|
+
assert len(wk.tasks[0].elements[1].action_runs[0].commands_idx) == 0
|
|
737
|
+
|
|
738
|
+
|
|
739
|
+
def test_command_rules_prevent_runs_initialised(null_config, tmp_path: Path):
|
|
740
|
+
"""Test command rules that do depend on execution prevent runs being initialised."""
|
|
741
|
+
act1 = hf.Action(
|
|
742
|
+
commands=[
|
|
743
|
+
hf.Command(command='echo "p1=<<parameter:p1>>"', stdout="<<parameter:p2>>")
|
|
744
|
+
]
|
|
745
|
+
)
|
|
746
|
+
act2 = hf.Action(
|
|
747
|
+
commands=[
|
|
748
|
+
hf.Command(
|
|
749
|
+
command='echo "p1=<<parameter:p2>>"',
|
|
750
|
+
rules=[hf.ActionRule(path="inputs.p2", condition={"value.less_than": 2})],
|
|
751
|
+
)
|
|
752
|
+
],
|
|
753
|
+
)
|
|
754
|
+
ts1 = hf.TaskSchema(
|
|
755
|
+
objective="ts1",
|
|
756
|
+
inputs=[hf.SchemaInput("p1")],
|
|
757
|
+
outputs=[hf.SchemaOutput("p2")],
|
|
758
|
+
actions=[act1],
|
|
759
|
+
)
|
|
760
|
+
ts2 = hf.TaskSchema(
|
|
761
|
+
objective="ts2",
|
|
762
|
+
inputs=[hf.SchemaInput("p2")],
|
|
763
|
+
actions=[act2],
|
|
764
|
+
)
|
|
765
|
+
t1 = hf.Task(schema=ts1, inputs={"p1": 0})
|
|
766
|
+
t2 = hf.Task(schema=ts2)
|
|
767
|
+
wk = hf.Workflow.from_template_data(
|
|
768
|
+
template_name="test",
|
|
769
|
+
path=tmp_path,
|
|
770
|
+
tasks=[t1, t2],
|
|
771
|
+
)
|
|
772
|
+
assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
|
|
773
|
+
assert len(wk.tasks[0].elements[0].action_runs[0].commands_idx) == 1
|
|
774
|
+
assert not wk.tasks[1].elements[0].iterations[0].EARs_initialised
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
def test_command_rules_prevent_runs_initialised_with_valid_action_rules(
|
|
778
|
+
null_config, tmp_path: Path
|
|
779
|
+
):
|
|
780
|
+
"""Test command rules that do depend on execution prevent runs being initialised, even
|
|
781
|
+
when the parent action rules can be tested and are valid."""
|
|
782
|
+
act1 = hf.Action(
|
|
783
|
+
commands=[
|
|
784
|
+
hf.Command(command='echo "p1=<<parameter:p1>>"', stdout="<<parameter:p2>>")
|
|
785
|
+
]
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
# action rule is testable and valid, but command rule is not testable, so the action
|
|
789
|
+
# runs should not be initialised:
|
|
790
|
+
act2 = hf.Action(
|
|
791
|
+
commands=[
|
|
792
|
+
hf.Command(
|
|
793
|
+
command='echo "p1=<<parameter:p1>>; p2=<<parameter:p2>>"',
|
|
794
|
+
rules=[hf.ActionRule(path="inputs.p2", condition={"value.less_than": 2})],
|
|
795
|
+
)
|
|
796
|
+
],
|
|
797
|
+
rules=[hf.ActionRule(path="inputs.p1", condition={"value.less_than": 2})],
|
|
798
|
+
)
|
|
799
|
+
ts1 = hf.TaskSchema(
|
|
800
|
+
objective="ts1",
|
|
801
|
+
inputs=[hf.SchemaInput("p1")],
|
|
802
|
+
outputs=[hf.SchemaOutput("p2")],
|
|
803
|
+
actions=[act1],
|
|
804
|
+
)
|
|
805
|
+
ts2 = hf.TaskSchema(
|
|
806
|
+
objective="ts2",
|
|
807
|
+
inputs=[hf.SchemaInput("p1"), hf.SchemaInput("p2")],
|
|
808
|
+
actions=[act2],
|
|
809
|
+
)
|
|
810
|
+
t1 = hf.Task(schema=ts1, inputs={"p1": 0})
|
|
811
|
+
t2 = hf.Task(schema=ts2)
|
|
812
|
+
wk = hf.Workflow.from_template_data(
|
|
813
|
+
template_name="test",
|
|
814
|
+
path=tmp_path,
|
|
815
|
+
tasks=[t1, t2],
|
|
816
|
+
)
|
|
817
|
+
assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
|
|
818
|
+
assert len(wk.tasks[0].elements[0].action_runs[0].commands_idx) == 1
|
|
819
|
+
|
|
820
|
+
assert not wk.tasks[1].elements[0].iterations[0].EARs_initialised
|
|
821
|
+
|
|
822
|
+
|
|
823
|
+
def test_get_commands_file_hash_distinct_act_idx(null_config):
|
|
824
|
+
act = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
825
|
+
data_idx = {"inputs.p1": 0}
|
|
826
|
+
h1 = act.get_commands_file_hash(data_idx=data_idx, action_idx=0)
|
|
827
|
+
h2 = act.get_commands_file_hash(data_idx=data_idx, action_idx=1)
|
|
828
|
+
assert h1 != h2
|
|
829
|
+
|
|
830
|
+
|
|
831
|
+
def test_get_commands_file_hash_distinct_data_idx_vals(null_config):
|
|
832
|
+
act = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
833
|
+
h1 = act.get_commands_file_hash(data_idx={"inputs.p1": 0}, action_idx=0)
|
|
834
|
+
h2 = act.get_commands_file_hash(data_idx={"inputs.p1": 1}, action_idx=0)
|
|
835
|
+
assert h1 != h2
|
|
836
|
+
|
|
837
|
+
|
|
838
|
+
def test_get_commands_file_hash_distinct_data_idx_sub_vals(null_config):
|
|
839
|
+
act = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
840
|
+
di_1 = {"inputs.p1": 0, "inputs.p1.a": 1}
|
|
841
|
+
di_2 = {"inputs.p1": 0, "inputs.p1.a": 2}
|
|
842
|
+
h1 = act.get_commands_file_hash(data_idx=di_1, action_idx=0)
|
|
843
|
+
h2 = act.get_commands_file_hash(data_idx=di_2, action_idx=0)
|
|
844
|
+
assert h1 != h2
|
|
845
|
+
|
|
846
|
+
|
|
847
|
+
def test_get_commands_file_hash_equivalent_data_idx_outputs(null_config):
|
|
848
|
+
"""Different output data indices should not generate distinct hashes."""
|
|
849
|
+
act = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
850
|
+
di_1 = {"inputs.p1": 0, "outputs.p2": 1}
|
|
851
|
+
di_2 = {"inputs.p1": 0, "outputs.p2": 2}
|
|
852
|
+
h1 = act.get_commands_file_hash(data_idx=di_1, action_idx=0)
|
|
853
|
+
h2 = act.get_commands_file_hash(data_idx=di_2, action_idx=0)
|
|
854
|
+
assert h1 == h2
|
|
855
|
+
|
|
856
|
+
|
|
857
|
+
def test_get_commands_file_hash_return_int(null_config):
|
|
858
|
+
act = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
859
|
+
h1 = act.get_commands_file_hash(data_idx={"inputs.p1": 0}, action_idx=0)
|
|
860
|
+
assert type(h1) == int
|
|
861
|
+
|
|
862
|
+
|
|
863
|
+
def test_get_commands_file_hash_distinct_schema(null_config):
|
|
864
|
+
act_1 = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
865
|
+
act_2 = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
866
|
+
hf.TaskSchema(objective="t1", inputs=[hf.SchemaInput("p1")], actions=[act_1])
|
|
867
|
+
hf.TaskSchema(objective="t2", inputs=[hf.SchemaInput("p1")], actions=[act_2])
|
|
868
|
+
assert act_1.task_schema
|
|
869
|
+
assert act_2.task_schema
|
|
870
|
+
h1 = act_1.get_commands_file_hash(data_idx={}, action_idx=0)
|
|
871
|
+
h2 = act_2.get_commands_file_hash(data_idx={}, action_idx=0)
|
|
872
|
+
assert h1 != h2
|
|
873
|
+
|
|
874
|
+
|
|
875
|
+
def test_get_commands_file_hash_equivalent_cmd_rule_inputs_path(null_config):
|
|
876
|
+
"""Input-path rule does not affect hash, given equivalent data indices."""
|
|
877
|
+
act = hf.Action(
|
|
878
|
+
commands=[
|
|
879
|
+
hf.Command(
|
|
880
|
+
command="echo <<parameter:p1>>",
|
|
881
|
+
rules=[hf.ActionRule(path="inputs.p1", condition={"value.equal_to": 1})],
|
|
882
|
+
)
|
|
883
|
+
],
|
|
884
|
+
)
|
|
885
|
+
h1 = act.get_commands_file_hash(data_idx={"inputs.p1": 0}, action_idx=0)
|
|
886
|
+
h2 = act.get_commands_file_hash(data_idx={"inputs.p1": 0}, action_idx=0)
|
|
887
|
+
assert h1 == h2
|
|
888
|
+
|
|
889
|
+
|
|
890
|
+
def test_get_commands_file_hash_distinct_cmd_rule_resources_path(null_config):
|
|
891
|
+
"""Resource-path rule affects hash given distinct resource data indices."""
|
|
892
|
+
act = hf.Action(
|
|
893
|
+
commands=[
|
|
894
|
+
hf.Command(
|
|
895
|
+
command="echo <<parameter:p1>>",
|
|
896
|
+
rules=[
|
|
897
|
+
hf.ActionRule(
|
|
898
|
+
path="resources.num_cores", condition={"value.equal_to": 8}
|
|
899
|
+
)
|
|
900
|
+
],
|
|
901
|
+
)
|
|
902
|
+
],
|
|
903
|
+
)
|
|
904
|
+
di_1 = {"inputs.p1": 0, "resources.any.num_cores": 2}
|
|
905
|
+
di_2 = {"inputs.p1": 0, "resources.any.num_cores": 3}
|
|
906
|
+
h1 = act.get_commands_file_hash(data_idx=di_1, action_idx=0)
|
|
907
|
+
h2 = act.get_commands_file_hash(data_idx=di_2, action_idx=0)
|
|
908
|
+
assert h1 != h2
|
|
909
|
+
|
|
910
|
+
|
|
911
|
+
def test_get_commands_file_hash_distinct_env_spec(null_config):
|
|
912
|
+
act = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
913
|
+
data_idx = {"inputs.p1": 0}
|
|
914
|
+
h1 = act.get_commands_file_hash(
|
|
915
|
+
data_idx=data_idx,
|
|
916
|
+
action_idx=0,
|
|
917
|
+
env_spec_hashable=(("version", "name"), ("3.12", "python_env")),
|
|
918
|
+
)
|
|
919
|
+
h2 = act.get_commands_file_hash(
|
|
920
|
+
data_idx=data_idx,
|
|
921
|
+
action_idx=0,
|
|
922
|
+
env_spec_hashable=(("version", "name"), ("3.13", "python_env")),
|
|
923
|
+
)
|
|
924
|
+
assert h1 != h2
|
|
925
|
+
|
|
926
|
+
|
|
927
|
+
def test_get_commands_file_hash_equivalent_env_spec(null_config):
|
|
928
|
+
act = hf.Action(commands=[hf.Command("echo <<parameter:p1>>")])
|
|
929
|
+
data_idx = {"inputs.p1": 0}
|
|
930
|
+
env_spec_hashable = (("version", "name"), ("3.12", "python_env"))
|
|
931
|
+
h1 = act.get_commands_file_hash(
|
|
932
|
+
data_idx=data_idx,
|
|
933
|
+
action_idx=0,
|
|
934
|
+
env_spec_hashable=env_spec_hashable,
|
|
935
|
+
)
|
|
936
|
+
h2 = act.get_commands_file_hash(
|
|
937
|
+
data_idx=data_idx,
|
|
938
|
+
action_idx=0,
|
|
939
|
+
env_spec_hashable=env_spec_hashable,
|
|
940
|
+
)
|
|
941
|
+
assert h1 == h2
|
|
942
|
+
|
|
943
|
+
|
|
944
|
+
def test_get_script_input_output_file_paths_json_in_json_out(null_config):
|
|
945
|
+
act = hf.Action(
|
|
946
|
+
script="<<script:main_script_test_json_in_json_out.py>>",
|
|
947
|
+
script_data_in="json",
|
|
948
|
+
script_data_out="json",
|
|
949
|
+
script_exe="python_script",
|
|
950
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
|
951
|
+
requires_dir=True,
|
|
952
|
+
)
|
|
953
|
+
s1 = hf.TaskSchema(
|
|
954
|
+
objective="t1",
|
|
955
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
|
956
|
+
outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
|
|
957
|
+
actions=[act],
|
|
958
|
+
)
|
|
959
|
+
assert s1.actions[0].get_input_output_file_paths("script", (0, 1, 2)) == {
|
|
960
|
+
"inputs": {"json": Path("js_0_block_1_act_2_inputs.json")},
|
|
961
|
+
"outputs": {"json": Path("js_0_block_1_act_2_outputs.json")},
|
|
962
|
+
}
|
|
963
|
+
|
|
964
|
+
|
|
965
|
+
def test_get_script_input_output_file_paths_hdf5_in_direct_out(null_config):
|
|
966
|
+
act = hf.Action(
|
|
967
|
+
script="<<script:main_script_test_hdf5_in_obj_2.py>>",
|
|
968
|
+
script_data_in="hdf5",
|
|
969
|
+
script_data_out="direct",
|
|
970
|
+
script_exe="python_script",
|
|
971
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
|
972
|
+
requires_dir=True,
|
|
973
|
+
)
|
|
974
|
+
s1 = hf.TaskSchema(
|
|
975
|
+
objective="t1",
|
|
976
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
|
977
|
+
outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
|
|
978
|
+
actions=[act],
|
|
979
|
+
)
|
|
980
|
+
assert s1.actions[0].get_input_output_file_paths("script", (0, 1, 2)) == {
|
|
981
|
+
"inputs": {"hdf5": Path("js_0_block_1_act_2_inputs.h5")},
|
|
982
|
+
"outputs": {},
|
|
983
|
+
}
|
|
984
|
+
|
|
985
|
+
|
|
986
|
+
def test_get_script_input_output_file_command_args_json_in_json_out(null_config):
|
|
987
|
+
act = hf.Action(
|
|
988
|
+
script="<<script:main_script_test_json_in_json_out.py>>",
|
|
989
|
+
script_data_in="json",
|
|
990
|
+
script_data_out="json",
|
|
991
|
+
script_exe="python_script",
|
|
992
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
|
993
|
+
requires_dir=True,
|
|
994
|
+
)
|
|
995
|
+
s1 = hf.TaskSchema(
|
|
996
|
+
objective="t1",
|
|
997
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
|
998
|
+
outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
|
|
999
|
+
actions=[act],
|
|
1000
|
+
)
|
|
1001
|
+
js_idx, blk_idx, blk_act_idx = s1.actions[0].get_block_act_idx_shell_vars()
|
|
1002
|
+
assert s1.actions[0].get_input_output_file_command_args("script") == [
|
|
1003
|
+
"--inputs-json",
|
|
1004
|
+
f"js_{js_idx}_block_{blk_idx}_act_{blk_act_idx}_inputs.json",
|
|
1005
|
+
"--outputs-json",
|
|
1006
|
+
f"js_{js_idx}_block_{blk_idx}_act_{blk_act_idx}_outputs.json",
|
|
1007
|
+
]
|
|
1008
|
+
|
|
1009
|
+
|
|
1010
|
+
def test_get_script_input_output_file_command_args_hdf5_in_direct_out(null_config):
|
|
1011
|
+
act = hf.Action(
|
|
1012
|
+
script="<<script:main_script_test_hdf5_in_obj_2.py>>",
|
|
1013
|
+
script_data_in="hdf5",
|
|
1014
|
+
script_data_out="direct",
|
|
1015
|
+
script_exe="python_script",
|
|
1016
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
|
1017
|
+
requires_dir=True,
|
|
1018
|
+
)
|
|
1019
|
+
s1 = hf.TaskSchema(
|
|
1020
|
+
objective="t1",
|
|
1021
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
|
1022
|
+
outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
|
|
1023
|
+
actions=[act],
|
|
1024
|
+
)
|
|
1025
|
+
js_idx, blk_idx, blk_act_idx = s1.actions[0].get_block_act_idx_shell_vars()
|
|
1026
|
+
assert s1.actions[0].get_input_output_file_command_args("script") == [
|
|
1027
|
+
"--inputs-hdf5",
|
|
1028
|
+
f"js_{js_idx}_block_{blk_idx}_act_{blk_act_idx}_inputs.h5",
|
|
1029
|
+
]
|
|
1030
|
+
|
|
1031
|
+
|
|
1032
|
+
def test_from_json_like_envs_as_dict_equivalence(null_config):
|
|
1033
|
+
json_like_1 = {
|
|
1034
|
+
"commands": [{"command": "hello"}],
|
|
1035
|
+
"environments": [
|
|
1036
|
+
{"scope": "processing", "environment": "python_env"},
|
|
1037
|
+
{"scope": "main", "environment": "sim_env"},
|
|
1038
|
+
],
|
|
1039
|
+
}
|
|
1040
|
+
json_like_2 = {
|
|
1041
|
+
"commands": [{"command": "hello"}],
|
|
1042
|
+
"environments": {"processing": "python_env", "main": "sim_env"},
|
|
1043
|
+
}
|
|
1044
|
+
assert hf.Action.from_json_like(json_like_1) == hf.Action.from_json_like(json_like_2)
|
|
1045
|
+
|
|
1046
|
+
|
|
1047
|
+
def test_get_input_types_jinja_template(null_config):
|
|
1048
|
+
act = hf.Action(jinja_template="test/test_template.txt")
|
|
1049
|
+
hf.TaskSchema(
|
|
1050
|
+
objective="obj",
|
|
1051
|
+
inputs=[hf.SchemaInput("fruits"), hf.SchemaInput("name")],
|
|
1052
|
+
actions=[act],
|
|
1053
|
+
) # the action must be bound to a task schema for `get_input_types` to work
|
|
1054
|
+
assert sorted(act.get_input_types()) == sorted(("fruits", "name"))
|
|
1055
|
+
|
|
1056
|
+
|
|
1057
|
+
def test_is_input_type_required_jinja_template(null_config):
|
|
1058
|
+
act = hf.Action(jinja_template="test/test_template.txt")
|
|
1059
|
+
hf.TaskSchema(
|
|
1060
|
+
objective="obj",
|
|
1061
|
+
inputs=[hf.SchemaInput("fruits"), hf.SchemaInput("name")],
|
|
1062
|
+
actions=[act],
|
|
1063
|
+
) # the action must be bound to a task schema for `is_input_type_required` to work
|
|
1064
|
+
assert act.is_input_type_required("name", [])
|
|
1065
|
+
assert act.is_input_type_required("fruits", [])
|
|
1066
|
+
assert not act.is_input_type_required("vegetables", [])
|