hpcflow-new2 0.2.0a161__tar.gz → 0.2.0a163__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/PKG-INFO +1 -1
- hpcflow_new2-0.2.0a163/hpcflow/_version.py +1 -0
- hpcflow_new2-0.2.0a163/hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/app.py +29 -42
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/cli.py +1 -1
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/actions.py +63 -14
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/command_files.py +6 -4
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/commands.py +7 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/element.py +39 -6
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/errors.py +16 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/object_list.py +26 -14
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/parameters.py +21 -3
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/task.py +107 -2
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/task_schema.py +16 -1
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/test_utils.py +5 -2
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/workflow.py +93 -5
- hpcflow_new2-0.2.0a163/hpcflow/sdk/data/workflow_spec_schema.yaml +20 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/demo/cli.py +1 -1
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/submission.py +21 -10
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/scripts/test_main_scripts.py +60 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_action.py +16 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_element.py +90 -25
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_element_set.py +32 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_parameter.py +11 -9
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_resources.py +7 -9
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_schema_input.py +8 -8
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_task.py +26 -27
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_task_schema.py +39 -8
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_value_sequence.py +5 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_workflow.py +4 -9
- hpcflow_new2-0.2.0a163/hpcflow/tests/unit/test_workflow_template.py +156 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/pyproject.toml +2 -2
- hpcflow_new2-0.2.0a161/hpcflow/_version.py +0 -1
- hpcflow_new2-0.2.0a161/hpcflow/sdk/data/workflow_spec_schema.yaml +0 -64
- hpcflow_new2-0.2.0a161/hpcflow/tests/unit/test_workflow_template.py +0 -35
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/README.md +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/__pyinstaller/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/__pyinstaller/hook-hpcflow.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/app.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/cli.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/demo_data_manifest/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/demo_data_manifest/demo_data_manifest.json +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/demo_task_1_parse_p3.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/generate_t1_file_01.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_json_in_json_out.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_json_in_obj.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_json_out_obj.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/scripts/parse_t1_file_01.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/template_components/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/template_components/command_files.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/template_components/environments.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/template_components/parameters.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/template_components/task_schemas.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/workflows/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/data/workflows/workflow_1.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/examples.ipynb +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/cli_common.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/config/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/config/callbacks.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/config/cli.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/config/config.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/config/config_file.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/config/errors.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/environment.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/json_like.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/loop.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/parallel.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/rule.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/run_dir_files.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/utils.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/validation.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/core/zarr_io.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/data/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/data/config_file_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/data/config_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/data/environments_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/data/files_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/data/parameters_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/data/task_schema_spec_schema.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/demo/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/helper/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/helper/cli.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/helper/helper.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/helper/watcher.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/log.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/persistence/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/persistence/base.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/persistence/json.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/persistence/pending.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/persistence/store_resource.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/persistence/utils.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/persistence/zarr.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/runtime.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/jobscript.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/jobscript_info.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/schedulers/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/schedulers/direct.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/schedulers/sge.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/schedulers/slurm.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/schedulers/utils.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/shells/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/shells/base.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/shells/bash.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/shells/os_version.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/submission/shells/powershell.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/sdk/typing.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/conftest.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/data/__init__.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/data/benchmark_N_elements.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/data/workflow_1.json +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/data/workflow_1.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/data/workflow_1_slurm.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/data/workflow_1_wsl.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/data/workflow_test_run_abort.yaml +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/schedulers/slurm/test_slurm_submission.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/shells/wsl/test_wsl_submission.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_action_rule.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_app.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_cli.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_command.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_config.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_config_file.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_element_iteration.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_input_source.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_input_value.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_json_like.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_loop.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_object_list.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_persistence.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_run.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_runtime.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_shell.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_slurm.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_submission.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/unit/test_utils.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/workflows/test_jobscript.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/tests/workflows/test_workflows.py +0 -0
- {hpcflow_new2-0.2.0a161 → hpcflow_new2-0.2.0a163}/hpcflow/viz_demo.ipynb +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
__version__ = "0.2.0a163"
|
@@ -729,49 +729,28 @@ class BaseApp(metaclass=Singleton):
|
|
729
729
|
|
730
730
|
@TimeIt.decorator
|
731
731
|
def _load_scripts(self):
|
732
|
-
from setuptools import find_packages
|
733
732
|
|
734
733
|
# TODO: load custom directories / custom functions (via decorator)
|
734
|
+
scripts_package = f"{self.package_name}.{self.scripts_dir}"
|
735
735
|
|
736
|
-
|
737
|
-
|
738
|
-
|
739
|
-
|
740
|
-
|
741
|
-
)
|
742
|
-
packages = [root_scripts_dir] + [root_scripts_dir + "." + i for i in packages]
|
743
|
-
packages = [self.package_name + "." + i for i in packages]
|
744
|
-
num_root_dirs = len(root_scripts_dir.split(".")) + 1
|
736
|
+
try:
|
737
|
+
ctx = resources.as_file(resources.files(scripts_package))
|
738
|
+
except AttributeError:
|
739
|
+
# < python 3.9; `resource.path` deprecated since 3.11
|
740
|
+
ctx = resources.path(scripts_package, "")
|
745
741
|
|
746
742
|
scripts = {}
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
|
753
|
-
|
754
|
-
|
755
|
-
|
756
|
-
|
757
|
-
|
758
|
-
contents = resources.contents(pkg)
|
759
|
-
_is_rsrc = lambda pkg, name: resources.is_resource(pkg, name)
|
760
|
-
|
761
|
-
script_names = (
|
762
|
-
name for name in contents if name != "__init__.py" and _is_rsrc(pkg, name)
|
763
|
-
)
|
764
|
-
|
765
|
-
for i in script_names:
|
766
|
-
script_key = "/".join(pkg.split(".")[num_root_dirs:] + [i])
|
767
|
-
try:
|
768
|
-
script_ctx = resources.as_file(resources.files(pkg).joinpath(i))
|
769
|
-
except AttributeError:
|
770
|
-
# < python 3.9; `resource.path` deprecated since 3.11
|
771
|
-
script_ctx = resources.path(pkg, i)
|
772
|
-
|
773
|
-
with script_ctx as script:
|
774
|
-
scripts[script_key] = script
|
743
|
+
with ctx as path:
|
744
|
+
for dirpath, _, filenames in os.walk(path):
|
745
|
+
dirpath = Path(dirpath)
|
746
|
+
if dirpath.name == "__pycache__":
|
747
|
+
continue
|
748
|
+
for filename in filenames:
|
749
|
+
if filename == "__init__.py":
|
750
|
+
continue
|
751
|
+
val = dirpath.joinpath(filename)
|
752
|
+
key = str(val.relative_to(path).as_posix())
|
753
|
+
scripts[key] = Path(val)
|
775
754
|
|
776
755
|
return scripts
|
777
756
|
|
@@ -1380,15 +1359,19 @@ class BaseApp(metaclass=Singleton):
|
|
1380
1359
|
variables=variables,
|
1381
1360
|
status=status,
|
1382
1361
|
)
|
1383
|
-
|
1362
|
+
submitted_js = wk.submit(
|
1384
1363
|
JS_parallelism=JS_parallelism,
|
1385
1364
|
wait=wait,
|
1386
1365
|
add_to_known=add_to_known,
|
1387
|
-
return_idx=
|
1366
|
+
return_idx=True,
|
1388
1367
|
tasks=tasks,
|
1389
1368
|
cancel=cancel,
|
1390
1369
|
status=status,
|
1391
1370
|
)
|
1371
|
+
if return_idx:
|
1372
|
+
return (wk, submitted_js)
|
1373
|
+
else:
|
1374
|
+
return wk
|
1392
1375
|
|
1393
1376
|
def _make_demo_workflow(
|
1394
1377
|
self,
|
@@ -1553,15 +1536,19 @@ class BaseApp(metaclass=Singleton):
|
|
1553
1536
|
store_kwargs=store_kwargs,
|
1554
1537
|
variables=variables,
|
1555
1538
|
)
|
1556
|
-
|
1539
|
+
submitted_js = wk.submit(
|
1557
1540
|
JS_parallelism=JS_parallelism,
|
1558
1541
|
wait=wait,
|
1559
1542
|
add_to_known=add_to_known,
|
1560
|
-
return_idx=
|
1543
|
+
return_idx=True,
|
1561
1544
|
tasks=tasks,
|
1562
1545
|
cancel=cancel,
|
1563
1546
|
status=status,
|
1564
1547
|
)
|
1548
|
+
if return_idx:
|
1549
|
+
return (wk, submitted_js)
|
1550
|
+
else:
|
1551
|
+
return wk
|
1565
1552
|
|
1566
1553
|
def _submit_workflow(
|
1567
1554
|
self,
|
@@ -173,7 +173,7 @@ def _make_API_CLI(app):
|
|
173
173
|
status=status,
|
174
174
|
)
|
175
175
|
if print_idx:
|
176
|
-
click.echo(out)
|
176
|
+
click.echo(out[1])
|
177
177
|
|
178
178
|
@click.command(context_settings={"ignore_unknown_options": True})
|
179
179
|
@click.argument("py_test_args", nargs=-1, type=click.UNPROCESSED)
|
@@ -16,6 +16,7 @@ from watchdog.utils.dirsnapshot import DirectorySnapshotDiff
|
|
16
16
|
from hpcflow.sdk import app
|
17
17
|
from hpcflow.sdk.core import ABORT_EXIT_CODE
|
18
18
|
from hpcflow.sdk.core.errors import (
|
19
|
+
ActionEnvironmentMissingNameError,
|
19
20
|
MissingCompatibleActionEnvironment,
|
20
21
|
OutputFileParserNoOutputError,
|
21
22
|
UnknownScriptDataKey,
|
@@ -458,14 +459,18 @@ class ElementActionRun:
|
|
458
459
|
self._output_files = self.app.ElementOutputFiles(element_action_run=self)
|
459
460
|
return self._output_files
|
460
461
|
|
462
|
+
@property
|
463
|
+
def env_spec(self) -> Dict[str, Any]:
|
464
|
+
return self.resources.environments[self.action.get_environment_name()]
|
465
|
+
|
461
466
|
@TimeIt.decorator
|
462
467
|
def get_resources(self):
|
463
468
|
"""Resolve specific resources for this EAR, considering all applicable scopes and
|
464
469
|
template-level resources."""
|
465
470
|
return self.element_iteration.get_resources(self.action)
|
466
471
|
|
467
|
-
def
|
468
|
-
return self.action.
|
472
|
+
def get_environment_spec(self) -> str:
|
473
|
+
return self.action.get_environment_spec()
|
469
474
|
|
470
475
|
def get_environment(self) -> app.Environment:
|
471
476
|
return self.action.get_environment()
|
@@ -542,6 +547,9 @@ class ElementActionRun:
|
|
542
547
|
else:
|
543
548
|
out[key] = val_i
|
544
549
|
|
550
|
+
if self.action.script_pass_env_spec:
|
551
|
+
out["env_spec"] = self.env_spec
|
552
|
+
|
545
553
|
return out
|
546
554
|
|
547
555
|
def get_input_values_direct(self, label_dict: bool = True):
|
@@ -562,6 +570,10 @@ class ElementActionRun:
|
|
562
570
|
typ = i.path[len("inputs.") :]
|
563
571
|
if typ in input_types:
|
564
572
|
inputs[typ] = i.value
|
573
|
+
|
574
|
+
if self.action.script_pass_env_spec:
|
575
|
+
inputs["env_spec"] = self.env_spec
|
576
|
+
|
565
577
|
return inputs
|
566
578
|
|
567
579
|
def get_OFP_output_files(self) -> Dict[str, Union[str, List[str]]]:
|
@@ -585,6 +597,10 @@ class ElementActionRun:
|
|
585
597
|
inputs = {}
|
586
598
|
for inp_typ in self.action.output_file_parsers[0].inputs or []:
|
587
599
|
inputs[inp_typ] = self.get(f"inputs.{inp_typ}")
|
600
|
+
|
601
|
+
if self.action.script_pass_env_spec:
|
602
|
+
inputs["env_spec"] = self.env_spec
|
603
|
+
|
588
604
|
return inputs
|
589
605
|
|
590
606
|
def get_OFP_outputs(self) -> Dict[str, Union[str, List[str]]]:
|
@@ -626,7 +642,7 @@ class ElementActionRun:
|
|
626
642
|
|
627
643
|
# write the script if it is specified as a app data script, otherwise we assume
|
628
644
|
# the script already exists in the working directory:
|
629
|
-
snip_path = self.action.get_snippet_script_path(self.action.script)
|
645
|
+
snip_path = self.action.get_snippet_script_path(self.action.script, self.env_spec)
|
630
646
|
if snip_path:
|
631
647
|
script_name = snip_path.name
|
632
648
|
source_str = self.action.compose_source(snip_path)
|
@@ -677,22 +693,23 @@ class ElementActionRun:
|
|
677
693
|
"stdout"/"stderr").
|
678
694
|
"""
|
679
695
|
self.app.persistence_logger.debug("EAR.compose_commands")
|
696
|
+
env_spec = self.env_spec
|
697
|
+
|
680
698
|
for ifg in self.action.input_file_generators:
|
681
699
|
# TODO: there should only be one at this stage if expanded?
|
682
|
-
ifg.write_source(self.action)
|
700
|
+
ifg.write_source(self.action, env_spec)
|
683
701
|
|
684
702
|
for ofp in self.action.output_file_parsers:
|
685
703
|
# TODO: there should only be one at this stage if expanded?
|
686
704
|
if ofp.output is None:
|
687
705
|
raise OutputFileParserNoOutputError()
|
688
|
-
ofp.write_source(self.action)
|
706
|
+
ofp.write_source(self.action, env_spec)
|
689
707
|
|
690
708
|
if self.action.script:
|
691
709
|
self.write_source(js_idx=jobscript.index, js_act_idx=JS_action_idx)
|
692
710
|
|
693
711
|
command_lns = []
|
694
|
-
|
695
|
-
env = jobscript.submission.environments.get(env_label)
|
712
|
+
env = jobscript.submission.environments.get(**env_spec)
|
696
713
|
if env.setup:
|
697
714
|
command_lns += list(env.setup)
|
698
715
|
|
@@ -953,13 +970,24 @@ class ActionEnvironment(JSONLike):
|
|
953
970
|
),
|
954
971
|
)
|
955
972
|
|
956
|
-
environment: str
|
973
|
+
environment: Union[str, Dict[str, Any]]
|
957
974
|
scope: Optional[app.ActionScope] = None
|
958
975
|
|
959
976
|
def __post_init__(self):
|
960
977
|
if self.scope is None:
|
961
978
|
self.scope = self.app.ActionScope.any()
|
962
979
|
|
980
|
+
orig_env = copy.deepcopy(self.environment)
|
981
|
+
if isinstance(self.environment, str):
|
982
|
+
self.environment = {"name": self.environment}
|
983
|
+
|
984
|
+
if "name" not in self.environment:
|
985
|
+
raise ActionEnvironmentMissingNameError(
|
986
|
+
f"The action-environment environment specification must include a string "
|
987
|
+
f"`name` key, or be specified as string that is that name. Provided "
|
988
|
+
f"environment key was {orig_env!r}."
|
989
|
+
)
|
990
|
+
|
963
991
|
|
964
992
|
class ActionRule(JSONLike):
|
965
993
|
"""Class to represent a rule/condition that must be True if an action is to be
|
@@ -1089,6 +1117,7 @@ class Action(JSONLike):
|
|
1089
1117
|
script_data_out: Optional[str] = None,
|
1090
1118
|
script_data_files_use_opt: Optional[bool] = False,
|
1091
1119
|
script_exe: Optional[str] = None,
|
1120
|
+
script_pass_env_spec: Optional[bool] = False,
|
1092
1121
|
abortable: Optional[bool] = False,
|
1093
1122
|
input_file_generators: Optional[List[app.InputFileGenerator]] = None,
|
1094
1123
|
output_file_parsers: Optional[List[app.OutputFileParser]] = None,
|
@@ -1117,6 +1146,7 @@ class Action(JSONLike):
|
|
1117
1146
|
script_data_files_use_opt if not self.script_is_python else True
|
1118
1147
|
)
|
1119
1148
|
self.script_exe = script_exe.lower() if script_exe else None
|
1149
|
+
self.script_pass_env_spec = script_pass_env_spec
|
1120
1150
|
self.environments = environments or [
|
1121
1151
|
self.app.ActionEnvironment(environment="null_env")
|
1122
1152
|
]
|
@@ -1413,7 +1443,10 @@ class Action(JSONLike):
|
|
1413
1443
|
commands=self.commands,
|
1414
1444
|
)
|
1415
1445
|
|
1416
|
-
def
|
1446
|
+
def get_environment_name(self) -> str:
|
1447
|
+
return self.get_environment_spec()["name"]
|
1448
|
+
|
1449
|
+
def get_environment_spec(self) -> Dict[str, Any]:
|
1417
1450
|
if not self._from_expand:
|
1418
1451
|
raise RuntimeError(
|
1419
1452
|
f"Cannot choose a single environment from this action because it is not "
|
@@ -1422,7 +1455,7 @@ class Action(JSONLike):
|
|
1422
1455
|
return self.environments[0].environment
|
1423
1456
|
|
1424
1457
|
def get_environment(self) -> app.Environment:
|
1425
|
-
return self.app.envs.get(self.
|
1458
|
+
return self.app.envs.get(**self.get_environment_spec())
|
1426
1459
|
|
1427
1460
|
@staticmethod
|
1428
1461
|
def is_snippet_script(script: str) -> bool:
|
@@ -1442,7 +1475,9 @@ class Action(JSONLike):
|
|
1442
1475
|
return script
|
1443
1476
|
|
1444
1477
|
@classmethod
|
1445
|
-
def get_snippet_script_str(
|
1478
|
+
def get_snippet_script_str(
|
1479
|
+
cls, script, env_spec: Optional[Dict[str, Any]] = None
|
1480
|
+
) -> str:
|
1446
1481
|
if not cls.is_snippet_script(script):
|
1447
1482
|
raise ValueError(
|
1448
1483
|
f"Must be an app-data script name (e.g. "
|
@@ -1450,14 +1485,24 @@ class Action(JSONLike):
|
|
1450
1485
|
)
|
1451
1486
|
pattern = r"\<\<script:(.*:?)\>\>"
|
1452
1487
|
match_obj = re.match(pattern, script)
|
1453
|
-
|
1488
|
+
out = match_obj.group(1)
|
1489
|
+
|
1490
|
+
if env_spec:
|
1491
|
+
out = re.sub(
|
1492
|
+
pattern=r"\<\<env:(.*?)\>\>",
|
1493
|
+
repl=lambda match_obj: env_spec[match_obj.group(1)],
|
1494
|
+
string=out,
|
1495
|
+
)
|
1496
|
+
return out
|
1454
1497
|
|
1455
1498
|
@classmethod
|
1456
|
-
def get_snippet_script_path(
|
1499
|
+
def get_snippet_script_path(
|
1500
|
+
cls, script_path, env_spec: Optional[Dict[str, Any]] = None
|
1501
|
+
) -> Path:
|
1457
1502
|
if not cls.is_snippet_script(script_path):
|
1458
1503
|
return False
|
1459
1504
|
|
1460
|
-
path = cls.get_snippet_script_str(script_path)
|
1505
|
+
path = cls.get_snippet_script_str(script_path, env_spec)
|
1461
1506
|
if path in cls.app.scripts:
|
1462
1507
|
path = cls.app.scripts.get(path)
|
1463
1508
|
|
@@ -1527,7 +1572,9 @@ class Action(JSONLike):
|
|
1527
1572
|
input_file_generators=[ifg],
|
1528
1573
|
environments=[self.get_input_file_generator_action_env(ifg)],
|
1529
1574
|
rules=main_rules + ifg.get_action_rules(),
|
1575
|
+
script_pass_env_spec=ifg.script_pass_env_spec,
|
1530
1576
|
abortable=ifg.abortable,
|
1577
|
+
# TODO: add script_data_in etc? and to OFP?
|
1531
1578
|
)
|
1532
1579
|
act_i._task_schema = self.task_schema
|
1533
1580
|
if ifg.input_file not in inp_files:
|
@@ -1558,6 +1605,7 @@ class Action(JSONLike):
|
|
1558
1605
|
output_file_parsers=[ofp],
|
1559
1606
|
environments=[self.get_output_file_parser_action_env(ofp)],
|
1560
1607
|
rules=list(self.rules) + ofp.get_action_rules(),
|
1608
|
+
script_pass_env_spec=ofp.script_pass_env_spec,
|
1561
1609
|
abortable=ofp.abortable,
|
1562
1610
|
)
|
1563
1611
|
act_i._task_schema = self.task_schema
|
@@ -1616,6 +1664,7 @@ class Action(JSONLike):
|
|
1616
1664
|
script_data_in=self.script_data_in,
|
1617
1665
|
script_data_out=self.script_data_out,
|
1618
1666
|
script_exe=self.script_exe,
|
1667
|
+
script_pass_env_spec=self.script_pass_env_spec,
|
1619
1668
|
environments=[self.get_commands_action_env()],
|
1620
1669
|
abortable=self.abortable,
|
1621
1670
|
rules=main_rules,
|
@@ -131,6 +131,7 @@ class InputFileGenerator(JSONLike):
|
|
131
131
|
inputs: List[app.Parameter]
|
132
132
|
script: str = None
|
133
133
|
environment: app.Environment = None
|
134
|
+
script_pass_env_spec: Optional[bool] = False
|
134
135
|
abortable: Optional[bool] = False
|
135
136
|
rules: Optional[List[app.ActionRule]] = None
|
136
137
|
|
@@ -188,11 +189,11 @@ class InputFileGenerator(JSONLike):
|
|
188
189
|
out = out.format(script_str=script_str, main_block=main_block)
|
189
190
|
return out
|
190
191
|
|
191
|
-
def write_source(self, action):
|
192
|
+
def write_source(self, action, env_spec: Dict[str, Any]):
|
192
193
|
|
193
194
|
# write the script if it is specified as a snippet script, otherwise we assume
|
194
195
|
# the script already exists in the working directory:
|
195
|
-
snip_path = action.get_snippet_script_path(self.script)
|
196
|
+
snip_path = action.get_snippet_script_path(self.script, env_spec)
|
196
197
|
if snip_path:
|
197
198
|
source_str = self.compose_source(snip_path)
|
198
199
|
with Path(snip_path.name).open("wt", newline="\n") as fp:
|
@@ -255,6 +256,7 @@ class OutputFileParser(JSONLike):
|
|
255
256
|
inputs: List[str] = None
|
256
257
|
outputs: List[str] = None
|
257
258
|
options: Dict = None
|
259
|
+
script_pass_env_spec: Optional[bool] = False
|
258
260
|
abortable: Optional[bool] = False
|
259
261
|
save_files: Union[List[str], bool] = True
|
260
262
|
clean_up: Optional[List[str]] = None
|
@@ -342,14 +344,14 @@ class OutputFileParser(JSONLike):
|
|
342
344
|
out = out.format(script_str=script_str, main_block=main_block)
|
343
345
|
return out
|
344
346
|
|
345
|
-
def write_source(self, action):
|
347
|
+
def write_source(self, action, env_spec: Dict[str, Any]):
|
346
348
|
if self.output is None:
|
347
349
|
# might be used just for saving files:
|
348
350
|
return
|
349
351
|
|
350
352
|
# write the script if it is specified as a snippet script, otherwise we assume
|
351
353
|
# the script already exists in the working directory:
|
352
|
-
snip_path = action.get_snippet_script_path(self.script)
|
354
|
+
snip_path = action.get_snippet_script_path(self.script, env_spec)
|
353
355
|
if snip_path:
|
354
356
|
source_str = self.compose_source(snip_path)
|
355
357
|
with Path(snip_path.name).open("wt", newline="\n") as fp:
|
@@ -107,6 +107,7 @@ class Command(JSONLike):
|
|
107
107
|
|
108
108
|
file_regex = r"(\<\<file:{}\>\>?)"
|
109
109
|
exe_script_regex = r"\<\<(executable|script):(.*?)\>\>"
|
110
|
+
env_specs_regex = r"\<\<env:(.*?)\>\>"
|
110
111
|
|
111
112
|
# substitute executables:
|
112
113
|
cmd_str = re.sub(
|
@@ -118,6 +119,12 @@ class Command(JSONLike):
|
|
118
119
|
# executable command might itself contain variables defined in `variables`, and/or
|
119
120
|
# an `<<args>>` variable::
|
120
121
|
for var_key, var_val in (self.variables or {}).items():
|
122
|
+
# substitute any `<<env:>>` specifiers
|
123
|
+
var_val = re.sub(
|
124
|
+
pattern=env_specs_regex,
|
125
|
+
repl=lambda match_obj: EAR.env_spec[match_obj.group(1)],
|
126
|
+
string=var_val,
|
127
|
+
)
|
121
128
|
cmd_str = cmd_str.replace(f"<<{var_key}>>", var_val)
|
122
129
|
if "<<args>>" in cmd_str:
|
123
130
|
args_str = " ".join(self.arguments or [])
|
@@ -199,6 +199,7 @@ class ElementResources(JSONLike):
|
|
199
199
|
scheduler_args: Optional[Dict] = None
|
200
200
|
shell_args: Optional[Dict] = None
|
201
201
|
os_name: Optional[str] = None
|
202
|
+
environments: Optional[Dict] = None
|
202
203
|
|
203
204
|
# SGE scheduler specific:
|
204
205
|
SGE_parallel_env: str = None
|
@@ -241,14 +242,24 @@ class ElementResources(JSONLike):
|
|
241
242
|
return hash(tuple((keys, vals)))
|
242
243
|
|
243
244
|
exclude = ("time_limit",)
|
244
|
-
sub_dicts = ("scheduler_args", "shell_args")
|
245
245
|
dct = {k: copy.deepcopy(v) for k, v in self.__dict__.items() if k not in exclude}
|
246
|
-
if "options" in dct.get("scheduler_args", []):
|
247
|
-
dct["scheduler_args"]["options"] = tuple(dct["scheduler_args"]["options"])
|
248
246
|
|
249
|
-
|
250
|
-
|
251
|
-
|
247
|
+
scheduler_args = dct["scheduler_args"]
|
248
|
+
shell_args = dct["shell_args"]
|
249
|
+
envs = dct["environments"]
|
250
|
+
|
251
|
+
if isinstance(scheduler_args, dict):
|
252
|
+
if "options" in scheduler_args:
|
253
|
+
dct["scheduler_args"]["options"] = _hash_dict(scheduler_args["options"])
|
254
|
+
dct["scheduler_args"] = _hash_dict(dct["scheduler_args"])
|
255
|
+
|
256
|
+
if isinstance(shell_args, dict):
|
257
|
+
dct["shell_args"] = _hash_dict(shell_args)
|
258
|
+
|
259
|
+
if isinstance(envs, dict):
|
260
|
+
for k, v in envs.items():
|
261
|
+
dct["environments"][k] = _hash_dict(v)
|
262
|
+
dct["environments"] = _hash_dict(dct["environments"])
|
252
263
|
|
253
264
|
return _hash_dict(dct)
|
254
265
|
|
@@ -876,6 +887,28 @@ class ElementIteration:
|
|
876
887
|
# an EAR?" which would then allow us to test a resources-based action rule.
|
877
888
|
|
878
889
|
resource_specs = copy.deepcopy(self.get("resources"))
|
890
|
+
|
891
|
+
env_spec = action.get_environment_spec()
|
892
|
+
env_name = env_spec["name"]
|
893
|
+
|
894
|
+
# set default env specifiers, if none set:
|
895
|
+
if "any" not in resource_specs:
|
896
|
+
resource_specs["any"] = {}
|
897
|
+
if "environments" not in resource_specs["any"]:
|
898
|
+
resource_specs["any"]["environments"] = {env_name: copy.deepcopy(env_spec)}
|
899
|
+
|
900
|
+
for scope, dat in resource_specs.items():
|
901
|
+
if "environments" in dat:
|
902
|
+
# keep only relevant user-provided environment specifiers:
|
903
|
+
resource_specs[scope]["environments"] = {
|
904
|
+
k: v for k, v in dat["environments"].items() if k == env_name
|
905
|
+
}
|
906
|
+
# merge user-provided specifiers into action specifiers:
|
907
|
+
resource_specs[scope]["environments"][env_name] = {
|
908
|
+
**resource_specs[scope]["environments"].get(env_name, {}),
|
909
|
+
**copy.deepcopy(env_spec),
|
910
|
+
}
|
911
|
+
|
879
912
|
resources = {}
|
880
913
|
for scope in action.get_possible_scopes()[::-1]:
|
881
914
|
# loop in reverse so higher-specificity scopes take precedence:
|
@@ -95,6 +95,10 @@ class MissingActionEnvironment(Exception):
|
|
95
95
|
pass
|
96
96
|
|
97
97
|
|
98
|
+
class ActionEnvironmentMissingNameError(Exception):
|
99
|
+
pass
|
100
|
+
|
101
|
+
|
98
102
|
class FromSpecMissingObjectError(Exception):
|
99
103
|
pass
|
100
104
|
|
@@ -391,3 +395,15 @@ class UnknownScriptDataKey(ValueError):
|
|
391
395
|
|
392
396
|
class MissingVariableSubstitutionError(KeyError):
|
393
397
|
pass
|
398
|
+
|
399
|
+
|
400
|
+
class EnvironmentPresetUnknownEnvironmentError(ValueError):
|
401
|
+
pass
|
402
|
+
|
403
|
+
|
404
|
+
class UnknownEnvironmentPresetError(ValueError):
|
405
|
+
pass
|
406
|
+
|
407
|
+
|
408
|
+
class MultipleEnvironmentsError(ValueError):
|
409
|
+
pass
|
@@ -4,6 +4,10 @@ from types import SimpleNamespace
|
|
4
4
|
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
5
5
|
|
6
6
|
|
7
|
+
class ObjectListMultipleMatchError(ValueError):
|
8
|
+
pass
|
9
|
+
|
10
|
+
|
7
11
|
class ObjectList(JSONLike):
|
8
12
|
"""A list-like class that provides item access via a `get` method according to
|
9
13
|
attributes or dict-keys.
|
@@ -116,14 +120,22 @@ class ObjectList(JSONLike):
|
|
116
120
|
if not result:
|
117
121
|
available = []
|
118
122
|
for obj in self._objects:
|
119
|
-
|
123
|
+
attr_vals = {}
|
124
|
+
for k in kwargs:
|
125
|
+
try:
|
126
|
+
attr_vals[k] = self._get_obj_attr(obj, k)
|
127
|
+
except (AttributeError, KeyError):
|
128
|
+
continue
|
129
|
+
available.append(attr_vals)
|
120
130
|
raise ValueError(
|
121
131
|
f"No {self._descriptor} objects with attributes: {kwargs}. Available "
|
122
132
|
f"objects have attributes: {tuple(available)!r}."
|
123
133
|
)
|
124
134
|
|
125
135
|
elif len(result) > 1:
|
126
|
-
raise
|
136
|
+
raise ObjectListMultipleMatchError(
|
137
|
+
f"Multiple objects with attributes: {kwargs}."
|
138
|
+
)
|
127
139
|
|
128
140
|
return result[0]
|
129
141
|
|
@@ -571,23 +583,23 @@ class ResourceList(ObjectList):
|
|
571
583
|
def get_scopes(self):
|
572
584
|
return tuple(i.scope for i in self._objects)
|
573
585
|
|
574
|
-
def
|
575
|
-
"""Merge lower-precedence
|
576
|
-
for scope_i in
|
586
|
+
def merge_other(self, other):
|
587
|
+
"""Merge lower-precedence other resource list into this resource list."""
|
588
|
+
for scope_i in other.get_scopes():
|
577
589
|
try:
|
578
|
-
|
590
|
+
self_scoped = self.get(scope=scope_i)
|
579
591
|
except ValueError:
|
580
|
-
|
592
|
+
in_self = False
|
581
593
|
else:
|
582
|
-
|
594
|
+
in_self = True
|
583
595
|
|
584
|
-
|
585
|
-
if
|
586
|
-
for k, v in
|
587
|
-
if getattr(
|
588
|
-
setattr(
|
596
|
+
other_scoped = other.get(scope=scope_i)
|
597
|
+
if in_self:
|
598
|
+
for k, v in other_scoped._get_members().items():
|
599
|
+
if getattr(self_scoped, k) is None:
|
600
|
+
setattr(self_scoped, f"_{k}", copy.deepcopy(v))
|
589
601
|
else:
|
590
|
-
self.add_object(copy.deepcopy(
|
602
|
+
self.add_object(copy.deepcopy(other_scoped))
|
591
603
|
|
592
604
|
|
593
605
|
def index(obj_lst, obj):
|
@@ -678,9 +678,11 @@ class ValueSequence(JSONLike):
|
|
678
678
|
)
|
679
679
|
path_l = path.lower()
|
680
680
|
path_split = path_l.split(".")
|
681
|
-
|
681
|
+
allowed_path_start = ("inputs", "resources", "environments", "env_preset")
|
682
|
+
if not path_split[0] in allowed_path_start:
|
682
683
|
raise MalformedParameterPathError(
|
683
|
-
f
|
684
|
+
f"`path` must start with one of: "
|
685
|
+
f'{", ".join(f"{i!r}" for i in allowed_path_start)}, but given path '
|
684
686
|
f"is: {path!r}."
|
685
687
|
)
|
686
688
|
|
@@ -703,7 +705,7 @@ class ValueSequence(JSONLike):
|
|
703
705
|
elif label_from_path:
|
704
706
|
label = label_from_path
|
705
707
|
|
706
|
-
|
708
|
+
elif path_split[0] == "resources":
|
707
709
|
if label_from_path or label_arg:
|
708
710
|
raise ValueError(
|
709
711
|
f"{self.__class__.__name__} `label` argument ({label_arg!r}) and/or "
|
@@ -728,6 +730,14 @@ class ValueSequence(JSONLike):
|
|
728
730
|
f"resource item names are: {allowed_keys_str}."
|
729
731
|
)
|
730
732
|
|
733
|
+
elif path_split[0] == "environments":
|
734
|
+
# rewrite as a resources path:
|
735
|
+
path = f"resources.any.{path}"
|
736
|
+
|
737
|
+
# note: `env_preset` paths also need to be transformed into `resources` paths, but
|
738
|
+
# we cannot do that until the sequence is part of a task, since the available
|
739
|
+
# environment presets are defined in the task schema.
|
740
|
+
|
731
741
|
return path, label
|
732
742
|
|
733
743
|
def to_dict(self):
|
@@ -1330,6 +1340,7 @@ class ResourceSpec(JSONLike):
|
|
1330
1340
|
"scheduler_args",
|
1331
1341
|
"shell_args",
|
1332
1342
|
"os_name",
|
1343
|
+
"environments",
|
1333
1344
|
"SGE_parallel_env",
|
1334
1345
|
"SLURM_partition",
|
1335
1346
|
"SLURM_num_tasks",
|
@@ -1364,6 +1375,7 @@ class ResourceSpec(JSONLike):
|
|
1364
1375
|
scheduler_args: Optional[Dict] = None,
|
1365
1376
|
shell_args: Optional[Dict] = None,
|
1366
1377
|
os_name: Optional[str] = None,
|
1378
|
+
environments: Optional[Dict] = None,
|
1367
1379
|
SGE_parallel_env: Optional[str] = None,
|
1368
1380
|
SLURM_partition: Optional[str] = None,
|
1369
1381
|
SLURM_num_tasks: Optional[str] = None,
|
@@ -1392,6 +1404,7 @@ class ResourceSpec(JSONLike):
|
|
1392
1404
|
self._scheduler = self._process_string(scheduler)
|
1393
1405
|
self._shell = self._process_string(shell)
|
1394
1406
|
self._os_name = self._process_string(os_name)
|
1407
|
+
self._environments = environments
|
1395
1408
|
self._use_job_array = use_job_array
|
1396
1409
|
self._max_array_items = max_array_items
|
1397
1410
|
self._time_limit = time_limit
|
@@ -1526,6 +1539,7 @@ class ResourceSpec(JSONLike):
|
|
1526
1539
|
self._scheduler_args = None
|
1527
1540
|
self._shell_args = None
|
1528
1541
|
self._os_name = None
|
1542
|
+
self._environments = None
|
1529
1543
|
|
1530
1544
|
return (self.normalised_path, [data_ref], is_new)
|
1531
1545
|
|
@@ -1625,6 +1639,10 @@ class ResourceSpec(JSONLike):
|
|
1625
1639
|
def os_name(self):
|
1626
1640
|
return self._get_value("os_name")
|
1627
1641
|
|
1642
|
+
@property
|
1643
|
+
def environments(self):
|
1644
|
+
return self._get_value("environments")
|
1645
|
+
|
1628
1646
|
@property
|
1629
1647
|
def SGE_parallel_env(self):
|
1630
1648
|
return self._get_value("SGE_parallel_env")
|