hpcflow 0.1.9__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -462
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.9.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -458
- hpcflow/archive/archive.py +0 -308
- hpcflow/archive/cloud/cloud.py +0 -47
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -432
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -232
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2549
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -323
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -167
- hpcflow/variables.py +0 -544
- hpcflow-0.1.9.dist-info/METADATA +0 -168
- hpcflow-0.1.9.dist-info/RECORD +0 -45
- hpcflow-0.1.9.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.9.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Types for the submission subsystem.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
from typing import Any, TYPE_CHECKING
|
|
7
|
+
from typing_extensions import NotRequired, TypeAlias, TypedDict
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from collections.abc import Sequence
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from numpy.typing import NDArray
|
|
13
|
+
from ..core.element import ElementResources
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class JobScriptDescriptor(TypedDict):
|
|
17
|
+
"""
|
|
18
|
+
Descriptor for a jobscript.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
#: Resources required by the jobscript.
|
|
22
|
+
resources: Any
|
|
23
|
+
#: Elements handled by the jobscript.
|
|
24
|
+
elements: dict[int, list[int]]
|
|
25
|
+
#: Dependencies of the jobscript.
|
|
26
|
+
dependencies: NotRequired[dict[int, ResolvedJobscriptBlockDependencies]]
|
|
27
|
+
#: Hash of resources.
|
|
28
|
+
resource_hash: NotRequired[str]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ResolvedJobscriptBlockDependencies(TypedDict):
|
|
32
|
+
"""
|
|
33
|
+
The resolution of a jobscript block dependency. This represents the dependency of one
|
|
34
|
+
jobscript block on another.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
#: Mapping of jobscript elements.
|
|
38
|
+
js_element_mapping: dict[int, list[int]]
|
|
39
|
+
#: Whether this is an array mapping.
|
|
40
|
+
is_array: NotRequired[bool]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class JobScriptCreationArguments(TypedDict):
|
|
44
|
+
"""
|
|
45
|
+
Arguments to pass to create a :class:`Jobscript`.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
# TODO: this currently represents a mix of arguments for both jobscripts and jobscript
|
|
49
|
+
# blocks; need to separate
|
|
50
|
+
|
|
51
|
+
#: The task insertion IDs.
|
|
52
|
+
task_insert_IDs: list[int]
|
|
53
|
+
#: The actions of the tasks.
|
|
54
|
+
task_actions: list[tuple[int, int, int]]
|
|
55
|
+
#: The elements of the tasks.
|
|
56
|
+
task_elements: dict[int, list[int]]
|
|
57
|
+
#: Element action run information.
|
|
58
|
+
EAR_ID: NDArray
|
|
59
|
+
#: Resources to use.
|
|
60
|
+
resources: NotRequired[ElementResources]
|
|
61
|
+
#: Description of what loops are in play.
|
|
62
|
+
task_loop_idx: list[dict[str, int]]
|
|
63
|
+
#: Description of dependencies.
|
|
64
|
+
dependencies: dict[int | tuple[int, int], ResolvedJobscriptBlockDependencies]
|
|
65
|
+
#: Whether this is an array jobscript.
|
|
66
|
+
is_array: NotRequired[bool]
|
|
67
|
+
#: When the jobscript was submitted, if known.
|
|
68
|
+
submit_time: NotRequired[datetime]
|
|
69
|
+
#: Where the jobscript was submitted, if known.
|
|
70
|
+
submit_hostname: NotRequired[str]
|
|
71
|
+
#: Description of what the jobscript was submitted to, if known.
|
|
72
|
+
submit_machine: NotRequired[str]
|
|
73
|
+
#: The command line used to do the commit, if known.
|
|
74
|
+
submit_cmdline: NotRequired[list[str]]
|
|
75
|
+
#: The job ID from the scheduler, if known.
|
|
76
|
+
scheduler_job_ID: NotRequired[str]
|
|
77
|
+
#: The process ID of the subprocess, if known.
|
|
78
|
+
process_ID: NotRequired[int]
|
|
79
|
+
#: Version info about the target system.
|
|
80
|
+
version_info: NotRequired[dict[str, str | list[str]]]
|
|
81
|
+
#: The name of the OS.
|
|
82
|
+
os_name: NotRequired[str]
|
|
83
|
+
#: The name of the shell.
|
|
84
|
+
shell_name: NotRequired[str]
|
|
85
|
+
#: The scheduler used.
|
|
86
|
+
scheduler_name: NotRequired[str]
|
|
87
|
+
#: Whether the jobscript is currently running.
|
|
88
|
+
running: NotRequired[bool]
|
|
89
|
+
#: Do not supply!
|
|
90
|
+
resource_hash: NotRequired[str]
|
|
91
|
+
#: Do not supply!
|
|
92
|
+
elements: NotRequired[dict[int, list[int]]]
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class SchedulerRef(TypedDict):
|
|
96
|
+
"""
|
|
97
|
+
Scheduler reference descriptor.
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
#: Jobscript references.
|
|
101
|
+
js_refs: list # Internal type is horrible and variable
|
|
102
|
+
#: Number of jobscript elements.
|
|
103
|
+
num_js_elements: int
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class SubmissionPart(TypedDict):
|
|
107
|
+
"""
|
|
108
|
+
A part of a submission.
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
#: Timestamp for when this part was submitted.
|
|
112
|
+
submit_time: datetime
|
|
113
|
+
#: The jobscripts involved in this submission.
|
|
114
|
+
jobscripts: list[int]
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
# This needs PEP 728 for a better type, alas
|
|
118
|
+
#: Version data.
|
|
119
|
+
VersionInfo: TypeAlias = "dict[str, str | list[str]]"
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
# TODO: This really doesn't belong here?!
|
|
123
|
+
class JobscriptHeaderArgs(TypedDict):
|
|
124
|
+
"""
|
|
125
|
+
Keyword arguments to use when creating a job script from a
|
|
126
|
+
:class:`Jobscript`.
|
|
127
|
+
"""
|
|
128
|
+
|
|
129
|
+
#: Application invocation. (Arguments, etc.)
|
|
130
|
+
app_invoc: str | Sequence[str]
|
|
131
|
+
#: Workflow application alias.
|
|
132
|
+
workflow_app_alias: NotRequired[str]
|
|
133
|
+
#: Environment setup.
|
|
134
|
+
env_setup: NotRequired[str]
|
|
135
|
+
#: Application name in CAPS
|
|
136
|
+
app_caps: NotRequired[str]
|
|
137
|
+
#: Configuration directory.
|
|
138
|
+
config_dir: NotRequired[str]
|
|
139
|
+
#: Configuration key.
|
|
140
|
+
config_invoc_key: NotRequired[Any]
|
hpcflow/sdk/typing.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Common type aliases.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
from dataclasses import InitVar
|
|
7
|
+
from typing import Any, ClassVar, Final, TypeVar, cast, TYPE_CHECKING
|
|
8
|
+
from typing_extensions import NotRequired, TypeAlias, TypedDict
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import re
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from collections.abc import Mapping
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
from rich.status import Status
|
|
16
|
+
from .core.object_list import (
|
|
17
|
+
CommandFilesList,
|
|
18
|
+
EnvironmentsList,
|
|
19
|
+
ParametersList,
|
|
20
|
+
TaskSchemasList,
|
|
21
|
+
)
|
|
22
|
+
from .submission.enums import JobscriptElementState
|
|
23
|
+
from .submission.submission import Submission
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
#: Type of a value that can be treated as a path.
|
|
27
|
+
PathLike: TypeAlias = "str | Path | None"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ParamSource(TypedDict):
|
|
31
|
+
"""
|
|
32
|
+
A parameter source descriptor.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
#: Parameter type name.
|
|
36
|
+
type: NotRequired[str]
|
|
37
|
+
#: EAR ID.
|
|
38
|
+
EAR_ID: NotRequired[int]
|
|
39
|
+
#: Task insertion ID.
|
|
40
|
+
task_insert_ID: NotRequired[int]
|
|
41
|
+
#: Action index.
|
|
42
|
+
action_idx: NotRequired[int]
|
|
43
|
+
#: Element index.
|
|
44
|
+
element_idx: NotRequired[int]
|
|
45
|
+
#: Element set index.
|
|
46
|
+
element_set_idx: NotRequired[int]
|
|
47
|
+
#: Element action run index.
|
|
48
|
+
run_idx: NotRequired[int]
|
|
49
|
+
#: Sequence index.
|
|
50
|
+
sequence_idx: NotRequired[int]
|
|
51
|
+
#: Task index.
|
|
52
|
+
task_idx: NotRequired[int]
|
|
53
|
+
#: Name of method used to create the parameter's value(s).
|
|
54
|
+
value_class_method: NotRequired[str]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class KnownSubmission(TypedDict):
|
|
58
|
+
"""
|
|
59
|
+
Describes a known submission.
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
#: Local ID.
|
|
63
|
+
local_id: int
|
|
64
|
+
#: Workflow global ID.
|
|
65
|
+
workflow_id: str
|
|
66
|
+
#: Whether the submission is active.
|
|
67
|
+
is_active: bool
|
|
68
|
+
#: Submission index.
|
|
69
|
+
sub_idx: int
|
|
70
|
+
#: Submission time.
|
|
71
|
+
submit_time: str
|
|
72
|
+
#: Path to submission.
|
|
73
|
+
path: str
|
|
74
|
+
#: Start time.
|
|
75
|
+
start_time: str
|
|
76
|
+
#: Finish time.
|
|
77
|
+
end_time: str
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class KnownSubmissionItem(TypedDict):
|
|
81
|
+
"""
|
|
82
|
+
Describes a known submission.
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
#: Local ID.
|
|
86
|
+
local_id: int
|
|
87
|
+
#: Workflow global ID.
|
|
88
|
+
workflow_id: str
|
|
89
|
+
#: Path to the workflow.
|
|
90
|
+
workflow_path: str
|
|
91
|
+
#: Time of submission.
|
|
92
|
+
submit_time: str
|
|
93
|
+
#: Parsed time of submission.
|
|
94
|
+
submit_time_obj: NotRequired[datetime | None]
|
|
95
|
+
#: Time of start.
|
|
96
|
+
start_time: str
|
|
97
|
+
#: Parsed time of start.
|
|
98
|
+
start_time_obj: datetime | None
|
|
99
|
+
#: Time of finish.
|
|
100
|
+
end_time: str
|
|
101
|
+
#: Parsed time of finish.
|
|
102
|
+
end_time_obj: datetime | None
|
|
103
|
+
#: Submission index.
|
|
104
|
+
sub_idx: int
|
|
105
|
+
#: Jobscripts in submission.
|
|
106
|
+
jobscripts: list[int]
|
|
107
|
+
#: Active jobscript state.
|
|
108
|
+
active_jobscripts: Mapping[int, Mapping[int, Mapping[int, JobscriptElementState]]]
|
|
109
|
+
#: Whether this is deleted.
|
|
110
|
+
deleted: bool
|
|
111
|
+
#: Whether this is unloadable.
|
|
112
|
+
unloadable: bool
|
|
113
|
+
#: Expanded submission object.
|
|
114
|
+
submission: NotRequired[Submission]
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class TemplateComponents(TypedDict):
|
|
118
|
+
"""
|
|
119
|
+
Components loaded from templates.
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
#: Parameters loaded from templates.
|
|
123
|
+
parameters: NotRequired[ParametersList]
|
|
124
|
+
#: Command files loaded from templates.
|
|
125
|
+
command_files: NotRequired[CommandFilesList]
|
|
126
|
+
#: Execution environments loaded from templates.
|
|
127
|
+
environments: NotRequired[EnvironmentsList]
|
|
128
|
+
#: Task schemas loaded from templates.
|
|
129
|
+
task_schemas: NotRequired[TaskSchemasList]
|
|
130
|
+
#: Scripts discovered by templates.
|
|
131
|
+
scripts: NotRequired[dict[str, Path]]
|
|
132
|
+
#: Programs discovered by templates.
|
|
133
|
+
programs: NotRequired[dict[str, Path]]
|
|
134
|
+
#: Jinja templates discovered by templates.
|
|
135
|
+
jinja_templates: NotRequired[dict[str, Path]]
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class MakeWorkflowCommonArgs(TypedDict):
|
|
139
|
+
"""
|
|
140
|
+
Common keys used in workflow construction in :py:meth:`BaseApp._make_workflow`.
|
|
141
|
+
"""
|
|
142
|
+
|
|
143
|
+
path: str | None
|
|
144
|
+
name: str | None
|
|
145
|
+
name_add_timestamp: bool | None
|
|
146
|
+
name_use_dir: bool | None
|
|
147
|
+
overwrite: bool
|
|
148
|
+
store: str
|
|
149
|
+
ts_fmt: str | None
|
|
150
|
+
ts_name_fmt: str | None
|
|
151
|
+
store_kwargs: dict[str, Any] | None
|
|
152
|
+
variables: dict[str, Any] | None
|
|
153
|
+
status: Status | None
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
#: Simplification of :class:`TemplateComponents` to allow some types of
|
|
157
|
+
#: internal manipulations.
|
|
158
|
+
BasicTemplateComponents: TypeAlias = "dict[str, list[dict]]"
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
DataIndex: TypeAlias = "dict[str, int | list[int]]"
|
|
162
|
+
"""
|
|
163
|
+
The type of indices to data. These are *normally* dictionaries of integers,
|
|
164
|
+
but can have leaves being lists of integers when dealing with element groups
|
|
165
|
+
(i.e., when a downstream element uses outputs from multiple upstream elements,
|
|
166
|
+
rather than just a single upstream element).
|
|
167
|
+
"""
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
_T = TypeVar("_T")
|
|
171
|
+
|
|
172
|
+
_CLASS_VAR_RE: Final = re.compile(r"ClassVar\[(.*)\]")
|
|
173
|
+
_INIT_VAR_RE: Final = re.compile(r"InitVar\[(.*)\]")
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def hydrate(cls: type[_T]) -> type[_T]:
|
|
177
|
+
"""
|
|
178
|
+
Partially hydrates the annotations on fields in a class, so that a @dataclass
|
|
179
|
+
annotation can recognise that ClassVar-annotated fields are class variables.
|
|
180
|
+
"""
|
|
181
|
+
anns: dict[str, Any] = {}
|
|
182
|
+
for f, a in cls.__annotations__.items():
|
|
183
|
+
if isinstance(a, str):
|
|
184
|
+
m = _CLASS_VAR_RE.match(a)
|
|
185
|
+
if m:
|
|
186
|
+
anns[f] = cast(Any, ClassVar[m[1]])
|
|
187
|
+
continue
|
|
188
|
+
m = _INIT_VAR_RE.match(a)
|
|
189
|
+
if m:
|
|
190
|
+
anns[f] = cast(Any, InitVar(cast(type, m[1])))
|
|
191
|
+
continue
|
|
192
|
+
anns[f] = a
|
|
193
|
+
cls.__annotations__ = anns
|
|
194
|
+
return cls
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, overload
|
|
4
|
+
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
from numpy.typing import NDArray
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@overload
|
|
10
|
+
def get_2D_idx(idx: int, num_cols: int) -> tuple[int, int]: ...
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@overload
|
|
14
|
+
def get_2D_idx(idx: NDArray, num_cols: int) -> tuple[NDArray, NDArray]: ...
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def get_2D_idx(idx: int | NDArray, num_cols: int) -> tuple[int | NDArray, int | NDArray]:
|
|
18
|
+
"""Convert a 1D index to a 2D index, assuming items are arranged in a row-major
|
|
19
|
+
order."""
|
|
20
|
+
row_idx = idx // num_cols
|
|
21
|
+
col_idx = idx % num_cols
|
|
22
|
+
return (row_idx, col_idx)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_1D_idx(
|
|
26
|
+
row_idx: int | NDArray, col_idx: int | NDArray, num_cols: int
|
|
27
|
+
) -> int | NDArray:
|
|
28
|
+
"""Convert a 2D (row, col) index into a 1D index, assuming items are arranged in a
|
|
29
|
+
row-major order."""
|
|
30
|
+
return row_idx * num_cols + col_idx
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def split_arr(arr: NDArray, metadata_size: int) -> list[tuple[NDArray, NDArray]]:
|
|
34
|
+
"""Split a 1D integer array into a list of tuples, each containing a metadata array
|
|
35
|
+
and a data array, where the size of each (metadata + data) sub-array is specified as
|
|
36
|
+
the integer immediately before each (metadata + data) sub-array.
|
|
37
|
+
|
|
38
|
+
Parameters
|
|
39
|
+
----------
|
|
40
|
+
arr
|
|
41
|
+
One dimensional integer array to split.
|
|
42
|
+
metadata_size
|
|
43
|
+
How many elements to include in the metadata array. This can be zero.
|
|
44
|
+
|
|
45
|
+
Returns
|
|
46
|
+
-------
|
|
47
|
+
sub_arrs
|
|
48
|
+
List of tuples of integer arrays. The integers that define the sizes of the
|
|
49
|
+
sub-arrays are excluded.
|
|
50
|
+
|
|
51
|
+
Examples
|
|
52
|
+
--------
|
|
53
|
+
>>> split_arr(np.array([4, 0, 1, 2, 3, 4, 1, 4, 5, 6]), metadata_size=1)
|
|
54
|
+
[(array([0]), array([1, 2, 3])), (array([1]), array([4, 5, 6]))]
|
|
55
|
+
|
|
56
|
+
"""
|
|
57
|
+
count = 0
|
|
58
|
+
block_start = 0
|
|
59
|
+
sub_arrs = []
|
|
60
|
+
while count < len(arr):
|
|
61
|
+
size = arr[block_start]
|
|
62
|
+
start = block_start + 1
|
|
63
|
+
end = start + size
|
|
64
|
+
metadata_i = arr[start : start + metadata_size]
|
|
65
|
+
sub_arr_i = arr[start + metadata_size : end]
|
|
66
|
+
sub_arrs.append((metadata_i, sub_arr_i))
|
|
67
|
+
count += size + 1
|
|
68
|
+
block_start = end
|
|
69
|
+
return sub_arrs
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from os import PathLike
|
|
2
|
+
from typing import Literal, Union
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DeferredFileWriter:
|
|
6
|
+
"""A class that provides a context manager for deferring writing or appending to a
|
|
7
|
+
file until a write method is called.
|
|
8
|
+
|
|
9
|
+
Attributes
|
|
10
|
+
----------
|
|
11
|
+
filename
|
|
12
|
+
The file path to open
|
|
13
|
+
mode
|
|
14
|
+
The mode to use.
|
|
15
|
+
|
|
16
|
+
Examples
|
|
17
|
+
--------
|
|
18
|
+
>>> with DeferredFileWrite("new_file.txt", "w") as f:
|
|
19
|
+
... # file is not yet created
|
|
20
|
+
... f.write("contents")
|
|
21
|
+
... # file is now created, but not closed
|
|
22
|
+
... # file is now closed
|
|
23
|
+
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, filename: Union[str, PathLike], mode: Literal["w", "a"], **kwargs):
|
|
27
|
+
self.filename = filename
|
|
28
|
+
self.mode = mode
|
|
29
|
+
self.file = None
|
|
30
|
+
self.kwargs = kwargs
|
|
31
|
+
self._is_open = False
|
|
32
|
+
|
|
33
|
+
def _ensure_open(self):
|
|
34
|
+
if not self._is_open:
|
|
35
|
+
self.file = open(self.filename, self.mode, **self.kwargs)
|
|
36
|
+
self._is_open = True
|
|
37
|
+
|
|
38
|
+
def write(self, data):
|
|
39
|
+
self._ensure_open()
|
|
40
|
+
self.file.write(data)
|
|
41
|
+
|
|
42
|
+
def writelines(self, lines):
|
|
43
|
+
self._ensure_open()
|
|
44
|
+
self.file.writelines(lines)
|
|
45
|
+
|
|
46
|
+
def close(self):
|
|
47
|
+
if self._is_open:
|
|
48
|
+
self.file.close()
|
|
49
|
+
self._is_open = False
|
|
50
|
+
|
|
51
|
+
def __enter__(self):
|
|
52
|
+
return self
|
|
53
|
+
|
|
54
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
55
|
+
self.close()
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
def get_hash(obj):
|
|
2
|
+
"""Return a hash from an arbitrarily nested structure dicts, lists, tuples, and
|
|
3
|
+
sets.
|
|
4
|
+
|
|
5
|
+
Note the resulting hash is not necessarily stable across sessions or machines.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
if isinstance(obj, (set, tuple, list)):
|
|
9
|
+
return hash(tuple([type(obj)] + [get_hash(i) for i in obj]))
|
|
10
|
+
|
|
11
|
+
elif not isinstance(obj, dict):
|
|
12
|
+
return hash(obj)
|
|
13
|
+
|
|
14
|
+
new_obj = {k: get_hash(obj[k]) for k in obj}
|
|
15
|
+
|
|
16
|
+
return hash(frozenset(sorted(new_obj.items())))
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from contextlib import contextmanager
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import sys
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def resolve_path(path):
|
|
8
|
+
"""On Windows Python 3.8, 3.9, and 3.10, `Pathlib.resolve` does
|
|
9
|
+
not return an absolute path for non-existant paths, when it should.
|
|
10
|
+
|
|
11
|
+
See: https://github.com/python/cpython/issues/82852
|
|
12
|
+
|
|
13
|
+
"""
|
|
14
|
+
# TODO: this only seems to be used in a test; remove?
|
|
15
|
+
return Path.cwd() / Path(path).resolve() # cwd is ignored if already absolute
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@contextmanager
|
|
19
|
+
def override_module_attrs(module_name: str, overrides: dict[str, Any]):
|
|
20
|
+
"""Context manager to temporarily override module-level attributes. The module must be
|
|
21
|
+
imported (i.e. within `sys.modules`)."""
|
|
22
|
+
|
|
23
|
+
module = sys.modules[module_name]
|
|
24
|
+
original_values = {k: getattr(module, k) for k in overrides}
|
|
25
|
+
try:
|
|
26
|
+
for k, v in overrides.items():
|
|
27
|
+
setattr(module, k, v)
|
|
28
|
+
yield
|
|
29
|
+
finally:
|
|
30
|
+
for k, v in original_values.items():
|
|
31
|
+
setattr(module, k, v)
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from typing import Iterable
|
|
2
|
+
import re
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def shorten_list_str(
|
|
6
|
+
lst: Iterable, items: int = 10, end_num: int = 1, placeholder: str = "..."
|
|
7
|
+
) -> str:
|
|
8
|
+
"""Format a list as a string, including only some maximum number of items.
|
|
9
|
+
|
|
10
|
+
Parameters
|
|
11
|
+
----------
|
|
12
|
+
lst:
|
|
13
|
+
The list to format in a shortened form.
|
|
14
|
+
items:
|
|
15
|
+
The total number of items to include in the formatted list.
|
|
16
|
+
end_num:
|
|
17
|
+
The number of items to include at the end of the formatted list.
|
|
18
|
+
placeholder
|
|
19
|
+
The placeholder to use to replace excess items in the formatted list.
|
|
20
|
+
|
|
21
|
+
Examples
|
|
22
|
+
--------
|
|
23
|
+
>>> shorten_list_str(list(range(20)), items=5)
|
|
24
|
+
'[0, 1, 2, 3, ..., 19]'
|
|
25
|
+
|
|
26
|
+
"""
|
|
27
|
+
lst = list(lst)
|
|
28
|
+
if len(lst) <= items + 1: # (don't replace only one item)
|
|
29
|
+
lst_short = lst
|
|
30
|
+
else:
|
|
31
|
+
start_num = items - end_num
|
|
32
|
+
lst_short = lst[:start_num] + ["..."] + lst[-end_num:]
|
|
33
|
+
|
|
34
|
+
return "[" + ", ".join(f"{i}" for i in lst_short) + "]"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def extract_py_from_future_imports(py_str: str) -> tuple[str, set[str]]:
|
|
38
|
+
"""
|
|
39
|
+
Remove any `from __future__ import <feature>` lines from a string of Python code, and
|
|
40
|
+
return the modified string, and a list of `<feature>`s that were imported.
|
|
41
|
+
|
|
42
|
+
Notes
|
|
43
|
+
-----
|
|
44
|
+
This is required when generated a combined-scripts jobscript that concatenates
|
|
45
|
+
multiple Python scripts into one script. If `__future__` statements are included in
|
|
46
|
+
these individual scripts, they must be moved to the top of the file [1].
|
|
47
|
+
|
|
48
|
+
References
|
|
49
|
+
----------
|
|
50
|
+
[1] https://docs.python.org/3/reference/simple_stmts.html#future-statements
|
|
51
|
+
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
pattern = r"^from __future__ import (.*)\n"
|
|
55
|
+
if future_imports := (set(re.findall(pattern, py_str, flags=re.MULTILINE) or ())):
|
|
56
|
+
future_imports = {
|
|
57
|
+
j.strip() for i in future_imports for j in i.split(",") if j.strip()
|
|
58
|
+
}
|
|
59
|
+
py_str = re.sub(pattern, "", py_str, flags=re.MULTILINE)
|
|
60
|
+
|
|
61
|
+
return (py_str, future_imports)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def capitalise_first_letter(chars: str) -> str:
|
|
65
|
+
"""
|
|
66
|
+
Convert the first character of a string to upper case (if that makes sense).
|
|
67
|
+
The rest of the string is unchanged.
|
|
68
|
+
"""
|
|
69
|
+
return chars[0].upper() + chars[1:]
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from hpcflow.sdk.core.utils import get_file_context
|
|
3
|
+
from hpcflow.app import app as hf
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@pytest.mark.integration
|
|
7
|
+
def test_api_make_and_submit_workflow(null_config, tmp_path):
|
|
8
|
+
with get_file_context("hpcflow.tests.data", "workflow_1.yaml") as file_path:
|
|
9
|
+
wk = hf.make_and_submit_workflow(
|
|
10
|
+
file_path,
|
|
11
|
+
path=tmp_path,
|
|
12
|
+
status=False,
|
|
13
|
+
add_to_known=False,
|
|
14
|
+
wait=True,
|
|
15
|
+
)
|
|
16
|
+
p2 = wk.tasks[0].elements[0].outputs.p2
|
|
17
|
+
assert isinstance(p2, hf.ElementParameter)
|
|
18
|
+
assert p2.value == "201"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@pytest.mark.integration
|
|
22
|
+
def test_api_make_and_submit_demo_workflow(null_config, tmp_path):
|
|
23
|
+
wk = hf.make_and_submit_demo_workflow(
|
|
24
|
+
"workflow_1",
|
|
25
|
+
path=tmp_path,
|
|
26
|
+
status=False,
|
|
27
|
+
add_to_known=False,
|
|
28
|
+
wait=True,
|
|
29
|
+
)
|
|
30
|
+
p2 = wk.tasks[0].elements[0].outputs.p2
|
|
31
|
+
assert isinstance(p2, hf.ElementParameter)
|
|
32
|
+
assert p2.value == "201"
|