hpcflow-new2 0.2.0a145__tar.gz → 0.2.0a147__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/PKG-INFO +1 -1
- hpcflow_new2-0.2.0a147/hpcflow/_version.py +1 -0
- hpcflow_new2-0.2.0a147/hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow_new2-0.2.0a147/hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/actions.py +25 -10
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/persistence/base.py +11 -2
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/persistence/json.py +2 -1
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/persistence/pending.py +5 -5
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/persistence/zarr.py +4 -1
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/__init__.py +8 -1
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/slurm.py +5 -1
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/submission.py +13 -3
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/scripts/test_main_scripts.py +67 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_schema_input.py +3 -3
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_slurm.py +11 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_submission.py +114 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/pyproject.toml +2 -2
- hpcflow_new2-0.2.0a145/hpcflow/_version.py +0 -1
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/README.md +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/__pyinstaller/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/__pyinstaller/hook-hpcflow.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/app.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/cli.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/demo_data_manifest/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/demo_data_manifest/demo_data_manifest.json +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/demo_task_1_parse_p3.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/generate_t1_file_01.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_json_in_json_out.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_json_in_obj.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/main_script_test_json_out_obj.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/parse_t1_file_01.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/command_files.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/environments.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/parameters.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/task_schemas.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/workflows/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/workflows/workflow_1.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/examples.ipynb +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/app.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/cli.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/cli_common.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/config/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/config/callbacks.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/config/cli.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/config/config.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/config/config_file.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/config/errors.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/command_files.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/commands.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/element.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/environment.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/errors.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/json_like.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/loop.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/object_list.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/parallel.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/parameters.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/rule.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/task.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/task_schema.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/test_utils.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/utils.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/validation.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/workflow.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/core/zarr_io.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/config_file_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/config_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/environments_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/files_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/parameters_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/task_schema_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/workflow_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/demo/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/demo/cli.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/helper/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/helper/cli.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/helper/helper.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/helper/watcher.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/log.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/persistence/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/persistence/store_resource.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/persistence/utils.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/runtime.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/jobscript.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/jobscript_info.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/direct.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/sge.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/utils.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/shells/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/shells/base.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/shells/bash.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/shells/os_version.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/shells/powershell.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/typing.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/conftest.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/data/__init__.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/data/workflow_1.json +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/data/workflow_1.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/data/workflow_1_slurm.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/data/workflow_1_wsl.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/data/workflow_test_run_abort.yaml +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/schedulers/slurm/test_slurm_submission.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/shells/wsl/test_wsl_submission.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_action.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_action_rule.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_app.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_cli.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_command.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_config.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_config_file.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_element.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_element_iteration.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_element_set.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_input_source.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_input_value.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_json_like.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_loop.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_object_list.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_parameter.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_persistence.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_resources.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_run.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_runtime.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_shell.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_task.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_task_schema.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_utils.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_value_sequence.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_workflow.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_workflow_template.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/workflows/test_jobscript.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/workflows/test_workflows.py +0 -0
- {hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/viz_demo.ipynb +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
__version__ = "0.2.0a147"
|
hpcflow_new2-0.2.0a147/hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
import json
|
2
|
+
|
3
|
+
|
4
|
+
def main_script_test_json_sub_param_in_json_out_labels(_input_files, _output_files):
|
5
|
+
# read inputs
|
6
|
+
with _input_files["json"].open("rt") as fh:
|
7
|
+
inp_dat = json.load(fh)
|
8
|
+
a = int(inp_dat["a"])
|
9
|
+
p1_2 = int(inp_dat["p1[two]"])
|
10
|
+
|
11
|
+
# process
|
12
|
+
p2 = a + p1_2
|
13
|
+
|
14
|
+
# save outputs
|
15
|
+
with _output_files["json"].open("wt") as fh:
|
16
|
+
json.dump({"p2": p2}, fh)
|
@@ -148,6 +148,7 @@ class ElementActionRun:
|
|
148
148
|
skip: bool,
|
149
149
|
exit_code: Union[int, None],
|
150
150
|
metadata: Dict,
|
151
|
+
run_hostname: Union[str, None],
|
151
152
|
) -> None:
|
152
153
|
self._id = id_
|
153
154
|
self._is_pending = is_pending
|
@@ -163,6 +164,7 @@ class ElementActionRun:
|
|
163
164
|
self._snapshot_end = snapshot_end
|
164
165
|
self._exit_code = exit_code
|
165
166
|
self._metadata = metadata
|
167
|
+
self._run_hostname = run_hostname
|
166
168
|
|
167
169
|
# assigned on first access of corresponding properties:
|
168
170
|
self._inputs = None
|
@@ -222,6 +224,10 @@ class ElementActionRun:
|
|
222
224
|
def metadata(self):
|
223
225
|
return self._metadata
|
224
226
|
|
227
|
+
@property
|
228
|
+
def run_hostname(self):
|
229
|
+
return self._run_hostname
|
230
|
+
|
225
231
|
@property
|
226
232
|
def start_time(self):
|
227
233
|
return self._start_time
|
@@ -478,7 +484,7 @@ class ElementActionRun:
|
|
478
484
|
Parameters
|
479
485
|
----------
|
480
486
|
inputs
|
481
|
-
If specified, a list of input parameter types to include,
|
487
|
+
If specified, a list of input parameter types to include, or a dict whose keys
|
482
488
|
are input parameter types to include. For schema inputs that have
|
483
489
|
`multiple=True`, the input type should be labelled. If a dict is passed, and
|
484
490
|
the key "all_iterations` is present and `True`, the return for that input
|
@@ -489,10 +495,11 @@ class ElementActionRun:
|
|
489
495
|
keys.
|
490
496
|
|
491
497
|
"""
|
498
|
+
if not inputs:
|
499
|
+
inputs = self.get_parameter_names("inputs")
|
500
|
+
|
492
501
|
out = {}
|
493
|
-
for inp_name in
|
494
|
-
if inputs and inp_name not in inputs:
|
495
|
-
continue
|
502
|
+
for inp_name in inputs:
|
496
503
|
path_i, label_i = split_param_label(inp_name)
|
497
504
|
|
498
505
|
try:
|
@@ -512,12 +519,20 @@ class ElementActionRun:
|
|
512
519
|
else:
|
513
520
|
val_i = self.get(f"inputs.{inp_name}")
|
514
521
|
|
522
|
+
key = inp_name
|
523
|
+
if label_dict and label_i:
|
524
|
+
key = path_i # exclude label from key
|
525
|
+
|
526
|
+
if "." in key:
|
527
|
+
# for sub-parameters, take only the final part as the dict key:
|
528
|
+
key = key.split(".")[-1]
|
529
|
+
|
515
530
|
if label_dict and label_i:
|
516
|
-
if
|
517
|
-
out[
|
518
|
-
out[
|
531
|
+
if key not in out:
|
532
|
+
out[key] = {}
|
533
|
+
out[key][label_i] = val_i
|
519
534
|
else:
|
520
|
-
out[
|
535
|
+
out[key] = val_i
|
521
536
|
|
522
537
|
return out
|
523
538
|
|
@@ -1163,8 +1178,8 @@ class Action(JSONLike):
|
|
1163
1178
|
# validation:
|
1164
1179
|
allowed_keys = ("format", "all_iterations")
|
1165
1180
|
for k, v in all_params.items():
|
1166
|
-
# validate parameter name:
|
1167
|
-
if k not in param_names:
|
1181
|
+
# validate parameter name (sub-parameters are allowed):
|
1182
|
+
if k.split(".")[0] not in param_names:
|
1168
1183
|
raise UnknownScriptDataParameter(
|
1169
1184
|
f"Script data parameter {k!r} is not a known parameter of the "
|
1170
1185
|
f"action. Parameters ({prefix}) are: {param_names!r}."
|
@@ -10,6 +10,7 @@ import enum
|
|
10
10
|
import os
|
11
11
|
from pathlib import Path
|
12
12
|
import shutil
|
13
|
+
import socket
|
13
14
|
import time
|
14
15
|
from typing import Any, Dict, Iterable, List, Optional, Tuple, TypeVar, Union
|
15
16
|
|
@@ -306,6 +307,7 @@ class StoreEAR:
|
|
306
307
|
snapshot_end: Optional[Dict] = None
|
307
308
|
exit_code: Optional[int] = None
|
308
309
|
metadata: Dict[str, Any] = None
|
310
|
+
run_hostname: Optional[str] = None
|
309
311
|
|
310
312
|
@staticmethod
|
311
313
|
def _encode_datetime(dt: Union[datetime, None], ts_fmt: str) -> str:
|
@@ -331,6 +333,7 @@ class StoreEAR:
|
|
331
333
|
"snapshot_end": self.snapshot_end,
|
332
334
|
"exit_code": self.exit_code,
|
333
335
|
"metadata": self.metadata,
|
336
|
+
"run_hostname": self.run_hostname,
|
334
337
|
}
|
335
338
|
|
336
339
|
@classmethod
|
@@ -365,6 +368,7 @@ class StoreEAR:
|
|
365
368
|
"snapshot_end": self.snapshot_end,
|
366
369
|
"exit_code": self.exit_code,
|
367
370
|
"metadata": self.metadata,
|
371
|
+
"run_hostname": self.run_hostname,
|
368
372
|
}
|
369
373
|
|
370
374
|
def update(
|
@@ -377,6 +381,7 @@ class StoreEAR:
|
|
377
381
|
snapshot_start: Optional[Dict] = None,
|
378
382
|
snapshot_end: Optional[Dict] = None,
|
379
383
|
exit_code: Optional[int] = None,
|
384
|
+
run_hostname: Optional[str] = None,
|
380
385
|
) -> AnySEAR:
|
381
386
|
"""Return a shallow copy, with specified data updated."""
|
382
387
|
|
@@ -388,6 +393,7 @@ class StoreEAR:
|
|
388
393
|
snap_s = snapshot_start if snapshot_start is not None else self.snapshot_start
|
389
394
|
snap_e = snapshot_end if snapshot_end is not None else self.snapshot_end
|
390
395
|
exit_code = exit_code if exit_code is not None else self.exit_code
|
396
|
+
run_hn = run_hostname if run_hostname is not None else self.run_hostname
|
391
397
|
|
392
398
|
return self.__class__(
|
393
399
|
id_=self.id_,
|
@@ -404,6 +410,7 @@ class StoreEAR:
|
|
404
410
|
snapshot_start=snap_s,
|
405
411
|
snapshot_end=snap_e,
|
406
412
|
exit_code=exit_code,
|
413
|
+
run_hostname=run_hn,
|
407
414
|
)
|
408
415
|
|
409
416
|
|
@@ -961,7 +968,8 @@ class PersistentStore(ABC):
|
|
961
968
|
snapshot = JSONLikeDirSnapShot()
|
962
969
|
snapshot.take(".")
|
963
970
|
ss_js = snapshot.to_json_like()
|
964
|
-
|
971
|
+
run_hostname = socket.gethostname()
|
972
|
+
self._pending.set_EAR_starts[EAR_ID] = (dt, ss_js, run_hostname)
|
965
973
|
if save:
|
966
974
|
self.save()
|
967
975
|
return dt
|
@@ -1419,7 +1427,7 @@ class PersistentStore(ABC):
|
|
1419
1427
|
pend_end = self._pending.set_EAR_ends.get(EAR_i.id_)
|
1420
1428
|
pend_skip = True if EAR_i.id_ in self._pending.set_EAR_skips else None
|
1421
1429
|
|
1422
|
-
p_st, p_ss = pend_start if pend_start else (None, None)
|
1430
|
+
p_st, p_ss, p_hn = pend_start if pend_start else (None, None, None)
|
1423
1431
|
p_et, p_se, p_ex, p_sx = pend_end if pend_end else (None, None, None, None)
|
1424
1432
|
|
1425
1433
|
updates = {
|
@@ -1431,6 +1439,7 @@ class PersistentStore(ABC):
|
|
1431
1439
|
"snapshot_start": p_ss,
|
1432
1440
|
"snapshot_end": p_se,
|
1433
1441
|
"exit_code": p_ex,
|
1442
|
+
"run_hostname": p_hn,
|
1434
1443
|
}
|
1435
1444
|
if any(i is not None for i in updates.values()):
|
1436
1445
|
EAR_i = EAR_i.update(**updates)
|
@@ -232,10 +232,11 @@ class JSONPersistentStore(PersistentStore):
|
|
232
232
|
with self.using_resource("metadata", action="update") as md:
|
233
233
|
md["runs"][EAR_id]["submission_idx"] = sub_idx
|
234
234
|
|
235
|
-
def _update_EAR_start(self, EAR_id: int, s_time: datetime, s_snap: Dict):
|
235
|
+
def _update_EAR_start(self, EAR_id: int, s_time: datetime, s_snap: Dict, s_hn: str):
|
236
236
|
with self.using_resource("metadata", action="update") as md:
|
237
237
|
md["runs"][EAR_id]["start_time"] = s_time.strftime(self.ts_fmt)
|
238
238
|
md["runs"][EAR_id]["snapshot_start"] = s_snap
|
239
|
+
md["runs"][EAR_id]["run_hostname"] = s_hn
|
239
240
|
|
240
241
|
def _update_EAR_end(
|
241
242
|
self, EAR_id: int, e_time: datetime, e_snap: Dict, ext_code: int, success: bool
|
@@ -59,7 +59,7 @@ class PendingChanges:
|
|
59
59
|
self.set_EARs_initialised: List[int] = None
|
60
60
|
self.set_EAR_submission_indices: Dict[int, int] = None
|
61
61
|
self.set_EAR_skips: List[int] = None
|
62
|
-
self.set_EAR_starts: Dict[int, Tuple[datetime, Dict]] = None
|
62
|
+
self.set_EAR_starts: Dict[int, Tuple[datetime, Dict], str] = None
|
63
63
|
self.set_EAR_ends: Dict[int, Tuple[datetime, Dict, int, bool]] = None
|
64
64
|
|
65
65
|
self.set_js_metadata: Dict[int, Dict[int, Any]] = None
|
@@ -286,12 +286,12 @@ class PendingChanges:
|
|
286
286
|
|
287
287
|
def commit_EAR_starts(self) -> None:
|
288
288
|
# TODO: could be batched up?
|
289
|
-
for EAR_id, (time, snap) in self.set_EAR_starts.items():
|
289
|
+
for EAR_id, (time, snap, hostname) in self.set_EAR_starts.items():
|
290
290
|
self.logger.debug(
|
291
|
-
f"commit: adding pending start time ({time!r})
|
292
|
-
f"directory snapshot to EAR ID {EAR_id!r}."
|
291
|
+
f"commit: adding pending start time ({time!r}), run hostname "
|
292
|
+
f"({hostname!r}), and directory snapshot to EAR ID {EAR_id!r}."
|
293
293
|
)
|
294
|
-
self.store._update_EAR_start(EAR_id, time, snap)
|
294
|
+
self.store._update_EAR_start(EAR_id, time, snap, hostname)
|
295
295
|
self.clear_set_EAR_starts()
|
296
296
|
|
297
297
|
def commit_EAR_ends(self) -> None:
|
@@ -230,6 +230,7 @@ class ZarrStoreEAR(StoreEAR):
|
|
230
230
|
self.snapshot_end,
|
231
231
|
self.exit_code,
|
232
232
|
self.metadata,
|
233
|
+
self.run_hostname,
|
233
234
|
]
|
234
235
|
return EAR_enc
|
235
236
|
|
@@ -250,6 +251,7 @@ class ZarrStoreEAR(StoreEAR):
|
|
250
251
|
"snapshot_end": EAR_dat[10],
|
251
252
|
"exit_code": EAR_dat[11],
|
252
253
|
"metadata": EAR_dat[12],
|
254
|
+
"run_hostname": EAR_dat[13],
|
253
255
|
}
|
254
256
|
return cls(is_pending=False, **obj_dat)
|
255
257
|
|
@@ -598,7 +600,7 @@ class ZarrPersistentStore(PersistentStore):
|
|
598
600
|
if attrs != attrs_orig:
|
599
601
|
arr.attrs.put(attrs)
|
600
602
|
|
601
|
-
def _update_EAR_start(self, EAR_id: int, s_time: datetime, s_snap: Dict):
|
603
|
+
def _update_EAR_start(self, EAR_id: int, s_time: datetime, s_snap: Dict, s_hn: str):
|
602
604
|
arr = self._get_EARs_arr(mode="r+")
|
603
605
|
attrs_orig = arr.attrs.asdict()
|
604
606
|
attrs = copy.deepcopy(attrs_orig)
|
@@ -607,6 +609,7 @@ class ZarrPersistentStore(PersistentStore):
|
|
607
609
|
EAR_i = EAR_i.update(
|
608
610
|
start_time=s_time,
|
609
611
|
snapshot_start=s_snap,
|
612
|
+
run_hostname=s_hn,
|
610
613
|
)
|
611
614
|
arr[EAR_id] = EAR_i.encode(attrs, self.ts_fmt)
|
612
615
|
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/__init__.py
RENAMED
@@ -10,7 +10,6 @@ class NullScheduler:
|
|
10
10
|
|
11
11
|
def __init__(
|
12
12
|
self,
|
13
|
-
submit_cmd=None,
|
14
13
|
shell_args=None,
|
15
14
|
shebang_args=None,
|
16
15
|
options=None,
|
@@ -19,6 +18,10 @@ class NullScheduler:
|
|
19
18
|
self.shell_args = shell_args or self.DEFAULT_SHELL_ARGS
|
20
19
|
self.options = options or []
|
21
20
|
|
21
|
+
@property
|
22
|
+
def unique_properties(self):
|
23
|
+
return (self.__class__.__name__,)
|
24
|
+
|
22
25
|
def __eq__(self, other) -> bool:
|
23
26
|
if type(self) != type(other):
|
24
27
|
return False
|
@@ -64,6 +67,10 @@ class Scheduler(NullScheduler):
|
|
64
67
|
self.array_switch = array_switch or self.DEFAULT_ARRAY_SWITCH
|
65
68
|
self.array_item_var = array_item_var or self.DEFAULT_ARRAY_ITEM_VAR
|
66
69
|
|
70
|
+
@property
|
71
|
+
def unique_properties(self):
|
72
|
+
return (self.__class__.__name__, self.submit_cmd, self.show_cmd, self.del_cmd)
|
73
|
+
|
67
74
|
def format_switch(self, switch):
|
68
75
|
return f"{self.js_cmd} {switch}"
|
69
76
|
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/slurm.py
RENAMED
@@ -416,7 +416,11 @@ class SlurmPosix(Scheduler):
|
|
416
416
|
_arr_idx = []
|
417
417
|
for i_range_str in arr_idx.strip("[]").split(","):
|
418
418
|
if "-" in i_range_str:
|
419
|
-
|
419
|
+
range_parts = i_range_str.split("-")
|
420
|
+
if "%" in range_parts[1]:
|
421
|
+
# indicates max concurrent array items; not needed
|
422
|
+
range_parts[1] = range_parts[1].split("%")[0]
|
423
|
+
i_args = [int(j) - 1 for j in range_parts]
|
420
424
|
_arr_idx.extend(list(range(i_args[0], i_args[1] + 1)))
|
421
425
|
else:
|
422
426
|
_arr_idx.append(int(i_range_str) - 1)
|
@@ -332,14 +332,24 @@ class Submission(JSONLike):
|
|
332
332
|
def get_unique_schedulers_of_jobscripts(
|
333
333
|
jobscripts: List[Jobscript],
|
334
334
|
) -> Dict[Tuple[Tuple[int, int]], Scheduler]:
|
335
|
-
"""Get unique schedulers and which of the passed jobscripts they correspond to.
|
335
|
+
"""Get unique schedulers and which of the passed jobscripts they correspond to.
|
336
|
+
|
337
|
+
Uniqueness is determines only by the `Scheduler.unique_properties` tuple.
|
338
|
+
|
339
|
+
"""
|
336
340
|
js_idx = []
|
337
341
|
schedulers = []
|
342
|
+
|
343
|
+
# list of tuples of scheduler properties we consider to determine "uniqueness",
|
344
|
+
# with the first string being the scheduler type (class name):
|
345
|
+
seen_schedulers = []
|
346
|
+
|
338
347
|
for js in jobscripts:
|
339
|
-
if js.scheduler not in
|
348
|
+
if js.scheduler.unique_properties not in seen_schedulers:
|
349
|
+
seen_schedulers.append(js.scheduler.unique_properties)
|
340
350
|
schedulers.append(js.scheduler)
|
341
351
|
js_idx.append([])
|
342
|
-
sched_idx =
|
352
|
+
sched_idx = seen_schedulers.index(js.scheduler.unique_properties)
|
343
353
|
js_idx[sched_idx].append((js.submission.index, js.index))
|
344
354
|
|
345
355
|
sched_js_idx = dict(zip((tuple(i) for i in js_idx), schedulers))
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/scripts/test_main_scripts.py
RENAMED
@@ -33,6 +33,32 @@ def test_script_direct_in_direct_out(null_config, tmp_path):
|
|
33
33
|
assert wk.tasks[0].elements[0].outputs.p2.value == p1_val + 100
|
34
34
|
|
35
35
|
|
36
|
+
@pytest.mark.integration
|
37
|
+
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
38
|
+
def test_script_direct_sub_param_in_direct_out(null_config, tmp_path):
|
39
|
+
s1 = hf.TaskSchema(
|
40
|
+
objective="t1",
|
41
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
42
|
+
outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
|
43
|
+
actions=[
|
44
|
+
hf.Action(
|
45
|
+
script="<<script:main_script_test_direct_sub_param_in_direct_out.py>>",
|
46
|
+
script_data_in={"p1.a": "direct"},
|
47
|
+
script_data_out="direct",
|
48
|
+
script_exe="python_script",
|
49
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
50
|
+
)
|
51
|
+
],
|
52
|
+
)
|
53
|
+
p1_val = {"a": 101}
|
54
|
+
t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
|
55
|
+
wk = hf.Workflow.from_template_data(
|
56
|
+
tasks=[t1], template_name="main_script_test", path=tmp_path
|
57
|
+
)
|
58
|
+
wk.submit(wait=True, add_to_known=False)
|
59
|
+
assert wk.tasks[0].elements[0].outputs.p2.value == p1_val["a"] + 100
|
60
|
+
|
61
|
+
|
36
62
|
@pytest.mark.integration
|
37
63
|
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
38
64
|
def test_script_direct_in_direct_out_single_label(null_config, tmp_path):
|
@@ -170,6 +196,47 @@ def test_script_json_in_json_out_labels(null_config, tmp_path):
|
|
170
196
|
assert wk.tasks[0].elements[0].outputs.p2.value == p1_1_val + p1_2_val
|
171
197
|
|
172
198
|
|
199
|
+
@pytest.mark.integration
|
200
|
+
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
201
|
+
def test_script_json_sub_param_in_json_out_labels(null_config, tmp_path):
|
202
|
+
p1_label_1 = "one"
|
203
|
+
p1_label_2 = "two"
|
204
|
+
s1 = hf.TaskSchema(
|
205
|
+
objective="t1",
|
206
|
+
inputs=[
|
207
|
+
hf.SchemaInput(
|
208
|
+
parameter=hf.Parameter("p1"),
|
209
|
+
labels={p1_label_1: {}, p1_label_2: {}},
|
210
|
+
multiple=True,
|
211
|
+
)
|
212
|
+
],
|
213
|
+
outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
|
214
|
+
actions=[
|
215
|
+
hf.Action(
|
216
|
+
script="<<script:main_script_test_json_sub_param_in_json_out_labels.py>>",
|
217
|
+
script_data_in={"p1[one].a": "json", "p1[two]": "json"},
|
218
|
+
script_data_out="json",
|
219
|
+
script_exe="python_script",
|
220
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
221
|
+
)
|
222
|
+
],
|
223
|
+
)
|
224
|
+
a_val = 101
|
225
|
+
p1_2_val = 201
|
226
|
+
t1 = hf.Task(
|
227
|
+
schema=s1,
|
228
|
+
inputs={
|
229
|
+
f"p1[{p1_label_1}]": {"a": a_val},
|
230
|
+
f"p1[{p1_label_2}]": p1_2_val,
|
231
|
+
},
|
232
|
+
)
|
233
|
+
wk = hf.Workflow.from_template_data(
|
234
|
+
tasks=[t1], template_name="main_script_test", path=tmp_path
|
235
|
+
)
|
236
|
+
wk.submit(wait=True, add_to_known=False)
|
237
|
+
assert wk.tasks[0].elements[0].outputs.p2.value == a_val + p1_2_val
|
238
|
+
|
239
|
+
|
173
240
|
@pytest.mark.integration
|
174
241
|
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
175
242
|
def test_script_json_and_direct_in_json_out(null_config, tmp_path):
|
@@ -192,7 +192,7 @@ def test_get_input_values_subset(null_config, tmp_path):
|
|
192
192
|
template_name="temp",
|
193
193
|
)
|
194
194
|
run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
|
195
|
-
assert run.get_input_values(inputs=("p1")) == {"p1": 101}
|
195
|
+
assert run.get_input_values(inputs=("p1",)) == {"p1": 101}
|
196
196
|
|
197
197
|
|
198
198
|
def test_get_input_values_subset_labelled_label_dict_False(null_config, tmp_path):
|
@@ -226,7 +226,7 @@ def test_get_input_values_subset_labelled_label_dict_False(null_config, tmp_path
|
|
226
226
|
template_name="temp",
|
227
227
|
)
|
228
228
|
run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
|
229
|
-
assert run.get_input_values(inputs=("p1[one]"), label_dict=False) == {"p1[one]": 101}
|
229
|
+
assert run.get_input_values(inputs=("p1[one]",), label_dict=False) == {"p1[one]": 101}
|
230
230
|
|
231
231
|
|
232
232
|
def test_get_input_values_subset_labelled_label_dict_True(null_config, tmp_path):
|
@@ -260,7 +260,7 @@ def test_get_input_values_subset_labelled_label_dict_True(null_config, tmp_path)
|
|
260
260
|
template_name="temp",
|
261
261
|
)
|
262
262
|
run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
|
263
|
-
assert run.get_input_values(inputs=("p1[one]"), label_dict=True) == {
|
263
|
+
assert run.get_input_values(inputs=("p1[one]",), label_dict=True) == {
|
264
264
|
"p1": {"one": 101}
|
265
265
|
}
|
266
266
|
|
@@ -25,3 +25,14 @@ def test_parse_job_ID_array_simple_mixed_range():
|
|
25
25
|
"30627658",
|
26
26
|
[4, 7, 8, 9],
|
27
27
|
)
|
28
|
+
|
29
|
+
|
30
|
+
def test_parse_job_ID_array_simple_range_with_max_concurrent():
|
31
|
+
assert SlurmPosix._parse_job_IDs("3397752_[9-11%2]") == ("3397752", [8, 9, 10])
|
32
|
+
|
33
|
+
|
34
|
+
def test_parse_job_ID_array_simple_multiple_range_max_concurrent():
|
35
|
+
assert SlurmPosix._parse_job_IDs("49203_[3-5%1,9-11%2]") == (
|
36
|
+
"49203",
|
37
|
+
[2, 3, 4, 8, 9, 10],
|
38
|
+
)
|
@@ -402,3 +402,117 @@ def test_abort_EARs_file_update_with_existing_abort(null_config, tmp_path):
|
|
402
402
|
|
403
403
|
lines_exp = ["0", "1", "1"]
|
404
404
|
assert lines == "\n".join(lines_exp) + "\n"
|
405
|
+
|
406
|
+
|
407
|
+
def test_unique_schedulers_one_direct(new_null_config, tmp_path):
|
408
|
+
t1 = hf.Task(
|
409
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
410
|
+
inputs={"p1": 1},
|
411
|
+
)
|
412
|
+
t2 = hf.Task(
|
413
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
414
|
+
inputs={"p1": 1},
|
415
|
+
)
|
416
|
+
wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
|
417
|
+
wk = hf.Workflow.from_template(
|
418
|
+
template=wkt,
|
419
|
+
path=tmp_path,
|
420
|
+
)
|
421
|
+
sub = wk.add_submission()
|
422
|
+
scheds = sub.get_unique_schedulers()
|
423
|
+
|
424
|
+
assert len(scheds) == 1
|
425
|
+
|
426
|
+
|
427
|
+
def test_unique_schedulers_one_direct_distinct_resources(new_null_config, tmp_path):
|
428
|
+
t1 = hf.Task(
|
429
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
430
|
+
inputs={"p1": 1},
|
431
|
+
resources={"any": {"num_cores": 1}},
|
432
|
+
)
|
433
|
+
t2 = hf.Task(
|
434
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
435
|
+
inputs={"p1": 1},
|
436
|
+
resources={"any": {"num_cores": 2}},
|
437
|
+
)
|
438
|
+
wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
|
439
|
+
wk = hf.Workflow.from_template(
|
440
|
+
template=wkt,
|
441
|
+
path=tmp_path,
|
442
|
+
)
|
443
|
+
sub = wk.add_submission()
|
444
|
+
scheds = sub.get_unique_schedulers()
|
445
|
+
|
446
|
+
assert len(scheds) == 1
|
447
|
+
|
448
|
+
|
449
|
+
@pytest.mark.slurm
|
450
|
+
def test_unique_schedulers_one_SLURM(new_null_config, tmp_path):
|
451
|
+
hf.config.add_scheduler("slurm")
|
452
|
+
t1 = hf.Task(
|
453
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
454
|
+
inputs={"p1": 1},
|
455
|
+
resources={"any": {"scheduler": "slurm"}},
|
456
|
+
)
|
457
|
+
t2 = hf.Task(
|
458
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
459
|
+
inputs={"p1": 1},
|
460
|
+
resources={"any": {"scheduler": "slurm"}},
|
461
|
+
)
|
462
|
+
wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
|
463
|
+
wk = hf.Workflow.from_template(
|
464
|
+
template=wkt,
|
465
|
+
path=tmp_path,
|
466
|
+
)
|
467
|
+
sub = wk.add_submission()
|
468
|
+
scheds = sub.get_unique_schedulers()
|
469
|
+
|
470
|
+
assert len(scheds) == 1
|
471
|
+
|
472
|
+
|
473
|
+
@pytest.mark.slurm
|
474
|
+
def test_unique_schedulers_one_SLURM_distinct_resources(new_null_config, tmp_path):
|
475
|
+
hf.config.add_scheduler("slurm")
|
476
|
+
t1 = hf.Task(
|
477
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
478
|
+
inputs={"p1": 1},
|
479
|
+
resources={"any": {"scheduler": "slurm", "num_cores": 1}},
|
480
|
+
)
|
481
|
+
t2 = hf.Task(
|
482
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
483
|
+
inputs={"p1": 1},
|
484
|
+
resources={"any": {"scheduler": "slurm", "num_cores": 2}},
|
485
|
+
)
|
486
|
+
wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
|
487
|
+
wk = hf.Workflow.from_template(
|
488
|
+
template=wkt,
|
489
|
+
path=tmp_path,
|
490
|
+
)
|
491
|
+
sub = wk.add_submission()
|
492
|
+
scheds = sub.get_unique_schedulers()
|
493
|
+
|
494
|
+
assert len(scheds) == 1
|
495
|
+
|
496
|
+
|
497
|
+
@pytest.mark.slurm
|
498
|
+
def test_unique_schedulers_two_direct_and_SLURM(new_null_config, tmp_path):
|
499
|
+
hf.config.add_scheduler("slurm")
|
500
|
+
t1 = hf.Task(
|
501
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
502
|
+
inputs={"p1": 1},
|
503
|
+
resources={"any": {"scheduler": "direct"}},
|
504
|
+
)
|
505
|
+
t2 = hf.Task(
|
506
|
+
schema=hf.task_schemas.test_t1_conditional_OS,
|
507
|
+
inputs={"p1": 1},
|
508
|
+
resources={"any": {"scheduler": "slurm"}},
|
509
|
+
)
|
510
|
+
wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
|
511
|
+
wk = hf.Workflow.from_template(
|
512
|
+
template=wkt,
|
513
|
+
path=tmp_path,
|
514
|
+
)
|
515
|
+
sub = wk.add_submission()
|
516
|
+
scheds = sub.get_unique_schedulers()
|
517
|
+
|
518
|
+
assert len(scheds) == 2
|
@@ -1,7 +1,7 @@
|
|
1
1
|
|
2
2
|
[tool.poetry]
|
3
3
|
name = "hpcflow-new2"
|
4
|
-
version = "0.2.
|
4
|
+
version = "0.2.0a147"
|
5
5
|
|
6
6
|
description = "Computational workflow management"
|
7
7
|
authors = ["aplowman <adam.plowman@manchester.ac.uk>"]
|
@@ -70,7 +70,7 @@ hook-dirs = "hpcflow.__pyinstaller:get_hook_dirs"
|
|
70
70
|
|
71
71
|
[tool.commitizen]
|
72
72
|
name = "cz_conventional_commits"
|
73
|
-
version = "0.2.
|
73
|
+
version = "0.2.0a147"
|
74
74
|
tag_format = "v$version"
|
75
75
|
version_files = [
|
76
76
|
"pyproject.toml:version",
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__ = "0.2.0a145"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/demo_data_manifest/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/demo_task_1_parse_p3.py
RENAMED
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/scripts/generate_t1_file_01.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/__init__.py
RENAMED
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/environments.yaml
RENAMED
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/parameters.yaml
RENAMED
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/data/template_components/task_schemas.yaml
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/environments_spec_schema.yaml
RENAMED
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/parameters_spec_schema.yaml
RENAMED
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/task_schema_spec_schema.yaml
RENAMED
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/data/workflow_spec_schema.yaml
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/direct.py
RENAMED
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/schedulers/utils.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/shells/os_version.py
RENAMED
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/sdk/submission/shells/powershell.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/data/workflow_test_run_abort.yaml
RENAMED
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/shells/wsl/test_wsl_submission.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_element_iteration.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{hpcflow_new2-0.2.0a145 → hpcflow_new2-0.2.0a147}/hpcflow/tests/unit/test_workflow_template.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|