hpcflow 0.1.9__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -462
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.9.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -458
- hpcflow/archive/archive.py +0 -308
- hpcflow/archive/cloud/cloud.py +0 -47
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -432
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -232
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2549
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -323
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -167
- hpcflow/variables.py +0 -544
- hpcflow-0.1.9.dist-info/METADATA +0 -168
- hpcflow-0.1.9.dist-info/RECORD +0 -45
- hpcflow-0.1.9.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.9.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,385 @@
|
|
|
1
|
+
"""Click CLI options that are used as decorators in multiple modules."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
import click
|
|
5
|
+
|
|
6
|
+
from hpcflow.sdk.core import ALL_TEMPLATE_FORMATS
|
|
7
|
+
from hpcflow.sdk.persistence.defaults import DEFAULT_STORE_FORMAT
|
|
8
|
+
from hpcflow.sdk.persistence.discovery import ALL_STORE_FORMATS
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class BoolOrString(click.ParamType):
|
|
12
|
+
"""Custom Click parameter type to accepts a bool or a choice of strings."""
|
|
13
|
+
|
|
14
|
+
name = "bool-or-string"
|
|
15
|
+
|
|
16
|
+
def __init__(self, allowed_strings, true_strings=None, false_strings=None):
|
|
17
|
+
self.allowed_strings = allowed_strings
|
|
18
|
+
self.true_strings = true_strings if true_strings else ["true", "yes", "on"]
|
|
19
|
+
self.false_strings = false_strings if false_strings else ["false", "no", "off"]
|
|
20
|
+
|
|
21
|
+
def convert(self, value, param, ctx):
|
|
22
|
+
# Check if the value is a boolean
|
|
23
|
+
if isinstance(value, bool):
|
|
24
|
+
return value
|
|
25
|
+
|
|
26
|
+
# Normalize value to string
|
|
27
|
+
value = str(value).lower()
|
|
28
|
+
|
|
29
|
+
# Check if the value is one of the true strings
|
|
30
|
+
if value in self.true_strings:
|
|
31
|
+
return True
|
|
32
|
+
|
|
33
|
+
# Check if the value is one of the false strings
|
|
34
|
+
if value in self.false_strings:
|
|
35
|
+
return False
|
|
36
|
+
|
|
37
|
+
# If the value matches neither, it must be one of the expected strings
|
|
38
|
+
if value not in self.allowed_strings:
|
|
39
|
+
allowed_fmt = ", ".join(f"{i!r}" for i in self.allowed_strings)
|
|
40
|
+
self.fail(
|
|
41
|
+
message=f"{value} is not a valid boolean or one of {allowed_fmt}.",
|
|
42
|
+
param=param,
|
|
43
|
+
ctx=ctx,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
return value
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def sub_tasks_callback(ctx, param, value: str | None) -> list[int] | None:
|
|
50
|
+
"""
|
|
51
|
+
Parse subtasks.
|
|
52
|
+
"""
|
|
53
|
+
if value:
|
|
54
|
+
return [int(i) for i in value.split(",")]
|
|
55
|
+
else:
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
#: Standard option
|
|
60
|
+
format_option = click.option(
|
|
61
|
+
"--format",
|
|
62
|
+
type=click.Choice(ALL_TEMPLATE_FORMATS),
|
|
63
|
+
default=None,
|
|
64
|
+
help=(
|
|
65
|
+
'If specified, one of "json" or "yaml". This forces parsing from a '
|
|
66
|
+
"particular format."
|
|
67
|
+
),
|
|
68
|
+
)
|
|
69
|
+
#: Standard option
|
|
70
|
+
path_option = click.option(
|
|
71
|
+
"--path",
|
|
72
|
+
type=click.Path(exists=True),
|
|
73
|
+
help=(
|
|
74
|
+
"The directory in which the workflow will be generated. If not specified, the "
|
|
75
|
+
"config item `default_workflow_path` will be used; if that is not set, the "
|
|
76
|
+
"current directory is used."
|
|
77
|
+
),
|
|
78
|
+
)
|
|
79
|
+
#: Standard option
|
|
80
|
+
name_option = click.option(
|
|
81
|
+
"--name",
|
|
82
|
+
help=(
|
|
83
|
+
"The name of the workflow. If specified, the workflow directory will be "
|
|
84
|
+
"`path` joined with `name`. If not specified the workflow template name "
|
|
85
|
+
"will be used, in combination with a date-timestamp."
|
|
86
|
+
),
|
|
87
|
+
)
|
|
88
|
+
#: Standard option
|
|
89
|
+
name_timestamp_option = click.option(
|
|
90
|
+
"--name-timestamp/--name-no-timestamp",
|
|
91
|
+
"name_add_timestamp",
|
|
92
|
+
is_flag=True,
|
|
93
|
+
default=None,
|
|
94
|
+
help=(
|
|
95
|
+
"If True, suffix the workflow name with a date-timestamp. A default value can be "
|
|
96
|
+
" set with the config item `workflow_name_add_timestamp`; otherwise set to "
|
|
97
|
+
"`True`."
|
|
98
|
+
),
|
|
99
|
+
)
|
|
100
|
+
#: Standard option
|
|
101
|
+
name_dir_option = click.option(
|
|
102
|
+
"--name-dir/--name-no-dir",
|
|
103
|
+
"name_use_dir",
|
|
104
|
+
is_flag=True,
|
|
105
|
+
default=None,
|
|
106
|
+
help=(
|
|
107
|
+
"If True, and `--name-timestamp` is also True, the workflow directory name "
|
|
108
|
+
"will be just the date-timestamp, and will be contained within a parent "
|
|
109
|
+
"directory corresponding to the workflow name. A default value can be set with "
|
|
110
|
+
"the config item `workflow_name_use_dir`; otherwise set to `False`."
|
|
111
|
+
),
|
|
112
|
+
)
|
|
113
|
+
#: Standard option
|
|
114
|
+
overwrite_option = click.option(
|
|
115
|
+
"--overwrite",
|
|
116
|
+
is_flag=True,
|
|
117
|
+
default=False,
|
|
118
|
+
help=(
|
|
119
|
+
"If True and the workflow directory (`path` + `name`) already exists, "
|
|
120
|
+
"the existing directory will be overwritten."
|
|
121
|
+
),
|
|
122
|
+
)
|
|
123
|
+
#: Standard option
|
|
124
|
+
store_option = click.option(
|
|
125
|
+
"--store",
|
|
126
|
+
type=click.Choice(ALL_STORE_FORMATS),
|
|
127
|
+
help="The persistent store type to use.",
|
|
128
|
+
default=DEFAULT_STORE_FORMAT,
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
#: Standard option
|
|
132
|
+
ts_fmt_option = click.option(
|
|
133
|
+
"--ts-fmt",
|
|
134
|
+
help=(
|
|
135
|
+
"The datetime format to use for storing datetimes. Datetimes are always "
|
|
136
|
+
"stored in UTC (because Numpy does not store time zone info), so this "
|
|
137
|
+
"should not include a time zone name."
|
|
138
|
+
),
|
|
139
|
+
)
|
|
140
|
+
#: Standard option
|
|
141
|
+
ts_name_fmt_option = click.option(
|
|
142
|
+
"--ts-name-fmt",
|
|
143
|
+
help=(
|
|
144
|
+
"The datetime format to use when generating the workflow name, where it "
|
|
145
|
+
"includes a timestamp."
|
|
146
|
+
),
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
#: Standard option
|
|
150
|
+
variables_option = click.option(
|
|
151
|
+
"-v",
|
|
152
|
+
"--var",
|
|
153
|
+
"variables",
|
|
154
|
+
type=(str, str),
|
|
155
|
+
multiple=True,
|
|
156
|
+
help=(
|
|
157
|
+
"Workflow template variable value to be substituted in to the template file or "
|
|
158
|
+
"string. Multiple variable values can be specified."
|
|
159
|
+
),
|
|
160
|
+
)
|
|
161
|
+
#: Standard option
|
|
162
|
+
js_parallelism_option = click.option(
|
|
163
|
+
"--js-parallelism",
|
|
164
|
+
help=(
|
|
165
|
+
"If True, allow multiple jobscripts to execute simultaneously. If "
|
|
166
|
+
"'scheduled'/'direct', only allow simultaneous execution of scheduled/direct "
|
|
167
|
+
"jobscripts. Raises if set to True, 'scheduled', or 'direct', but the store type "
|
|
168
|
+
"does not support the `jobscript_parallelism` feature. If not set, jobscript "
|
|
169
|
+
"parallelism will be used if the store type supports it, for scheduled "
|
|
170
|
+
"jobscripts only."
|
|
171
|
+
),
|
|
172
|
+
type=BoolOrString(["direct", "scheduled"]),
|
|
173
|
+
)
|
|
174
|
+
#: Standard option
|
|
175
|
+
wait_option = click.option(
|
|
176
|
+
"--wait",
|
|
177
|
+
help=("If True, this command will block until the workflow execution is complete."),
|
|
178
|
+
is_flag=True,
|
|
179
|
+
default=False,
|
|
180
|
+
)
|
|
181
|
+
#: Standard option
|
|
182
|
+
add_to_known_opt = click.option(
|
|
183
|
+
"--add-to-known/--no-add-to-known",
|
|
184
|
+
default=True,
|
|
185
|
+
help="If True, add this submission to the known-submissions file.",
|
|
186
|
+
)
|
|
187
|
+
#: Standard option
|
|
188
|
+
print_idx_opt = click.option(
|
|
189
|
+
"--print-idx",
|
|
190
|
+
help="If True, print the submitted jobscript indices for each submission index.",
|
|
191
|
+
is_flag=True,
|
|
192
|
+
default=False,
|
|
193
|
+
)
|
|
194
|
+
#: Standard option
|
|
195
|
+
tasks_opt = click.option(
|
|
196
|
+
"--tasks",
|
|
197
|
+
help=(
|
|
198
|
+
"List of comma-separated task indices to include in this submission. By default "
|
|
199
|
+
"all tasks are included."
|
|
200
|
+
),
|
|
201
|
+
callback=sub_tasks_callback,
|
|
202
|
+
)
|
|
203
|
+
#: Standard option
|
|
204
|
+
cancel_opt = click.option(
|
|
205
|
+
"--cancel",
|
|
206
|
+
help="Immediately cancel the submission. Useful for testing and benchmarking.",
|
|
207
|
+
is_flag=True,
|
|
208
|
+
default=False,
|
|
209
|
+
)
|
|
210
|
+
#: Standard option
|
|
211
|
+
submit_status_opt = click.option(
|
|
212
|
+
"--status/--no-status",
|
|
213
|
+
help="If True, display a live status to track submission progress.",
|
|
214
|
+
default=True,
|
|
215
|
+
)
|
|
216
|
+
#: Standard option
|
|
217
|
+
force_arr_opt = click.option(
|
|
218
|
+
"--force-array",
|
|
219
|
+
help=(
|
|
220
|
+
"Used to force the use of job arrays, even if the scheduler does not support it. "
|
|
221
|
+
"This is provided for testing purposes only."
|
|
222
|
+
),
|
|
223
|
+
is_flag=True,
|
|
224
|
+
default=False,
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
#: Standard option
|
|
228
|
+
make_status_opt = click.option(
|
|
229
|
+
"--status/--no-status",
|
|
230
|
+
help="If True, display a live status to track workflow creation progress.",
|
|
231
|
+
default=True,
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
#: Standard option
|
|
235
|
+
add_sub_opt = click.option(
|
|
236
|
+
"--add-submission",
|
|
237
|
+
help=("If True, add a submission to the workflow (but do not submit)."),
|
|
238
|
+
is_flag=True,
|
|
239
|
+
default=False,
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
#: Standard option
|
|
243
|
+
zip_path_opt = click.option(
|
|
244
|
+
"--path",
|
|
245
|
+
default=".",
|
|
246
|
+
help=(
|
|
247
|
+
"Path at which to create the new zipped workflow. If this is an existing "
|
|
248
|
+
"directory, the zip file will be created within this directory. Otherwise, this "
|
|
249
|
+
"path is assumed to be the full file path to the new zip file."
|
|
250
|
+
),
|
|
251
|
+
)
|
|
252
|
+
#: Standard option
|
|
253
|
+
zip_overwrite_opt = click.option(
|
|
254
|
+
"--overwrite",
|
|
255
|
+
is_flag=True,
|
|
256
|
+
default=False,
|
|
257
|
+
help="If set, any existing file will be overwritten.",
|
|
258
|
+
)
|
|
259
|
+
#: Standard option
|
|
260
|
+
zip_log_opt = click.option("--log", help="Path to a log file to use during zipping.")
|
|
261
|
+
#: Standard option
|
|
262
|
+
zip_include_execute_opt = click.option("--include-execute", is_flag=True)
|
|
263
|
+
#: Standard option
|
|
264
|
+
zip_include_rechunk_backups_opt = click.option("--include-rechunk-backups", is_flag=True)
|
|
265
|
+
|
|
266
|
+
#: Standard option
|
|
267
|
+
unzip_path_opt = click.option(
|
|
268
|
+
"--path",
|
|
269
|
+
default=".",
|
|
270
|
+
help=(
|
|
271
|
+
"Path at which to create the new unzipped workflow. If this is an existing "
|
|
272
|
+
"directory, the new workflow directory will be created within this directory. "
|
|
273
|
+
"Otherwise, this path will represent the new workflow directory path."
|
|
274
|
+
),
|
|
275
|
+
)
|
|
276
|
+
#: Standard option
|
|
277
|
+
unzip_log_opt = click.option("--log", help="Path to a log file to use during unzipping.")
|
|
278
|
+
|
|
279
|
+
#: Standard option
|
|
280
|
+
rechunk_backup_opt = click.option(
|
|
281
|
+
"--backup/--no-backup",
|
|
282
|
+
default=True,
|
|
283
|
+
help=("First copy a backup of the array to a directory ending in `.bak`."),
|
|
284
|
+
)
|
|
285
|
+
#: Standard option
|
|
286
|
+
rechunk_chunk_size_opt = click.option(
|
|
287
|
+
"--chunk-size",
|
|
288
|
+
type=click.INT,
|
|
289
|
+
default=None,
|
|
290
|
+
help=(
|
|
291
|
+
"New chunk size (array items per chunk). If unset (as by default), the array "
|
|
292
|
+
"will be rechunked to a single chunk array (i.e with a chunk size equal to the "
|
|
293
|
+
"array's shape)."
|
|
294
|
+
),
|
|
295
|
+
)
|
|
296
|
+
#: Standard option
|
|
297
|
+
rechunk_status_opt = click.option(
|
|
298
|
+
"--status/--no-status",
|
|
299
|
+
default=True,
|
|
300
|
+
help="If True, display a live status to track rechunking progress.",
|
|
301
|
+
)
|
|
302
|
+
cancel_status_opt = click.option(
|
|
303
|
+
"--status/--no-status",
|
|
304
|
+
default=True,
|
|
305
|
+
help="If True, display a live status to track cancel progress.",
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
list_js_max_js_opt = click.option(
|
|
309
|
+
"--max-js", type=click.INT, help="Display up to this jobscript only."
|
|
310
|
+
)
|
|
311
|
+
list_js_jobscripts_opt = click.option(
|
|
312
|
+
"--jobscripts", help="Comma-separated list of jobscript indices to show."
|
|
313
|
+
)
|
|
314
|
+
list_task_js_max_js_opt = click.option(
|
|
315
|
+
"--max-js", type=click.INT, help="Include jobscripts up to this jobscript only."
|
|
316
|
+
)
|
|
317
|
+
list_task_js_task_names_opt = click.option(
|
|
318
|
+
"--task-names", help="Comma-separated list of task name sub-strings to show."
|
|
319
|
+
)
|
|
320
|
+
list_js_width_opt = click.option(
|
|
321
|
+
"--width", type=click.INT, help="Width in characters of the table to print."
|
|
322
|
+
)
|
|
323
|
+
jobscript_std_array_idx_opt = click.option(
|
|
324
|
+
"--array-idx",
|
|
325
|
+
type=click.INT,
|
|
326
|
+
help=(
|
|
327
|
+
"For array jobs only, the job array index whose standard stream is to be printed."
|
|
328
|
+
),
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def _add_doc_from_help(*args):
|
|
333
|
+
"""
|
|
334
|
+
Attach the ``help`` field of each of its arguments as its ``__doc__``.
|
|
335
|
+
Only necessary because the wrappers in Click don't do this for us.
|
|
336
|
+
|
|
337
|
+
:meta private:
|
|
338
|
+
"""
|
|
339
|
+
# Yes, this is ugly!
|
|
340
|
+
from types import SimpleNamespace
|
|
341
|
+
|
|
342
|
+
for opt in args:
|
|
343
|
+
ns = SimpleNamespace()
|
|
344
|
+
params = getattr(opt(ns), "__click_params__", [])
|
|
345
|
+
if params:
|
|
346
|
+
help = getattr(params[0], "help", "")
|
|
347
|
+
if help:
|
|
348
|
+
opt.__doc__ = f"Click option decorator: {help}"
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
_add_doc_from_help(
|
|
352
|
+
format_option,
|
|
353
|
+
path_option,
|
|
354
|
+
name_option,
|
|
355
|
+
overwrite_option,
|
|
356
|
+
store_option,
|
|
357
|
+
ts_fmt_option,
|
|
358
|
+
ts_name_fmt_option,
|
|
359
|
+
variables_option,
|
|
360
|
+
js_parallelism_option,
|
|
361
|
+
wait_option,
|
|
362
|
+
add_to_known_opt,
|
|
363
|
+
print_idx_opt,
|
|
364
|
+
tasks_opt,
|
|
365
|
+
cancel_opt,
|
|
366
|
+
submit_status_opt,
|
|
367
|
+
make_status_opt,
|
|
368
|
+
zip_path_opt,
|
|
369
|
+
zip_overwrite_opt,
|
|
370
|
+
zip_log_opt,
|
|
371
|
+
zip_include_execute_opt,
|
|
372
|
+
zip_include_rechunk_backups_opt,
|
|
373
|
+
unzip_path_opt,
|
|
374
|
+
unzip_log_opt,
|
|
375
|
+
rechunk_backup_opt,
|
|
376
|
+
rechunk_chunk_size_opt,
|
|
377
|
+
rechunk_status_opt,
|
|
378
|
+
cancel_status_opt,
|
|
379
|
+
list_js_max_js_opt,
|
|
380
|
+
list_js_jobscripts_opt,
|
|
381
|
+
list_task_js_max_js_opt,
|
|
382
|
+
list_task_js_task_names_opt,
|
|
383
|
+
list_js_width_opt,
|
|
384
|
+
jobscript_std_array_idx_opt,
|
|
385
|
+
)
|
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
"""Module that defines built-in callback functions for configuration item values."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
import fsspec # type: ignore
|
|
7
|
+
from typing import overload, TYPE_CHECKING
|
|
8
|
+
from hpcflow.sdk.core.errors import UnsupportedSchedulerError, UnsupportedShellError
|
|
9
|
+
from hpcflow.sdk.submission.shells import get_supported_shells
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from typing import Any, TypeVar
|
|
13
|
+
from .config import Config
|
|
14
|
+
from ..typing import PathLike
|
|
15
|
+
|
|
16
|
+
T = TypeVar("T")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def callback_vars(config: Config, value) -> str:
|
|
20
|
+
"""
|
|
21
|
+
Callback that substitutes configuration variables.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def vars_repl(match_obj: re.Match[str]) -> str:
|
|
25
|
+
return config._variables[match_obj[1]]
|
|
26
|
+
|
|
27
|
+
vars_regex = rf"\<\<({ '|'.join(config._variables) })\>\>"
|
|
28
|
+
return re.sub(
|
|
29
|
+
pattern=vars_regex,
|
|
30
|
+
repl=vars_repl,
|
|
31
|
+
string=str(value),
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@overload
|
|
36
|
+
def callback_file_paths(config: Config, file_path: PathLike) -> PathLike: ...
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@overload
|
|
40
|
+
def callback_file_paths(config: Config, file_path: list[PathLike]) -> list[PathLike]: ...
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def callback_file_paths(config: Config, file_path: PathLike | list[PathLike]):
|
|
44
|
+
"""
|
|
45
|
+
Callback that resolves file paths.
|
|
46
|
+
"""
|
|
47
|
+
if isinstance(file_path, list):
|
|
48
|
+
return [config._resolve_path(path) for path in file_path]
|
|
49
|
+
else:
|
|
50
|
+
return config._resolve_path(file_path)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def callback_bool(config: Config, value: str | bool) -> bool:
|
|
54
|
+
"""
|
|
55
|
+
Callback that coerces values to boolean.
|
|
56
|
+
"""
|
|
57
|
+
if not isinstance(value, bool):
|
|
58
|
+
if value.lower() == "true":
|
|
59
|
+
return True
|
|
60
|
+
elif value.lower() == "false":
|
|
61
|
+
return False
|
|
62
|
+
else:
|
|
63
|
+
raise TypeError(f"Cannot cast {value!r} to a bool type.")
|
|
64
|
+
return value
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@overload
|
|
68
|
+
def callback_lowercase(config: Config, value: list[str]) -> list[str]: ...
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
@overload
|
|
72
|
+
def callback_lowercase(config: Config, value: dict[str, T]) -> dict[str, T]: ...
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@overload
|
|
76
|
+
def callback_lowercase(config: Config, value: str) -> str: ...
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def callback_lowercase(
|
|
80
|
+
config: Config, value: list[str] | dict[str, T] | str
|
|
81
|
+
) -> list[str] | dict[str, T] | str:
|
|
82
|
+
"""
|
|
83
|
+
Callback that forces a string to lower case.
|
|
84
|
+
"""
|
|
85
|
+
if isinstance(value, list):
|
|
86
|
+
return [item.lower() for item in value]
|
|
87
|
+
elif isinstance(value, dict):
|
|
88
|
+
return {k.lower(): v for k, v in value.items()}
|
|
89
|
+
else:
|
|
90
|
+
return value.lower()
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def exists_in_schedulers(config: Config, value: T) -> T:
|
|
94
|
+
"""
|
|
95
|
+
Callback that tests that a value is a supported scheduler name.
|
|
96
|
+
"""
|
|
97
|
+
if value not in config.schedulers:
|
|
98
|
+
raise ValueError(
|
|
99
|
+
f"Cannot set default scheduler; {value!r} is not a supported scheduler "
|
|
100
|
+
f"according to the config file, which lists these schedulers as available "
|
|
101
|
+
f"on this machine: {config.schedulers!r}."
|
|
102
|
+
)
|
|
103
|
+
return value
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def callback_supported_schedulers(
|
|
107
|
+
config: Config, schedulers: dict[str, Any]
|
|
108
|
+
) -> dict[str, Any]:
|
|
109
|
+
"""
|
|
110
|
+
Callback that tests that all values are names of supported schedulers.
|
|
111
|
+
"""
|
|
112
|
+
# validate against supported schedulers according to the OS - this won't validate that
|
|
113
|
+
# a particular scheduler actually exists on this system:
|
|
114
|
+
available = set(config._app.get_OS_supported_schedulers())
|
|
115
|
+
if any((witness := k) not in available for k in schedulers):
|
|
116
|
+
raise UnsupportedSchedulerError(scheduler=witness, available=available)
|
|
117
|
+
return schedulers
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _hostname_in_invocation(config: Config) -> bool:
|
|
121
|
+
return "hostname" in config._file.get_invocation(config._config_key)["match"]
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def set_scheduler_invocation_match(config: Config, scheduler: str) -> None:
|
|
125
|
+
"""Invoked on set of `default_scheduler`.
|
|
126
|
+
|
|
127
|
+
For clusters with "proper" schedulers (SGE, SLURM, etc.), login nodes are typically
|
|
128
|
+
named using the word "login". So we can use this knowledge to set a default for the
|
|
129
|
+
"hostname" invocation match key, if it is not manually set. However, it is preferable
|
|
130
|
+
that on clusters the hostname match is explicitly set.
|
|
131
|
+
|
|
132
|
+
"""
|
|
133
|
+
sched = config._app.get_scheduler(
|
|
134
|
+
scheduler_name=scheduler,
|
|
135
|
+
os_name=os.name,
|
|
136
|
+
scheduler_args=config.get(f"schedulers.{scheduler}").get("defaults", {}),
|
|
137
|
+
)
|
|
138
|
+
if isinstance(sched, config._app.QueuedScheduler):
|
|
139
|
+
if not _hostname_in_invocation(config):
|
|
140
|
+
config._file.update_invocation(
|
|
141
|
+
config_key=config._config_key,
|
|
142
|
+
match={"hostname": sched.DEFAULT_LOGIN_NODE_MATCH},
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def callback_scheduler_set_up(
|
|
147
|
+
config: Config, schedulers: dict[str, Any]
|
|
148
|
+
) -> dict[str, Any]:
|
|
149
|
+
"""Invoked on set of `schedulers`.
|
|
150
|
+
|
|
151
|
+
Runs scheduler-specific config initialisation.
|
|
152
|
+
"""
|
|
153
|
+
for k, v in schedulers.items():
|
|
154
|
+
sched = config._app.get_scheduler(
|
|
155
|
+
scheduler_name=k,
|
|
156
|
+
os_name=os.name,
|
|
157
|
+
scheduler_args=v.get("defaults", {}),
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
if isinstance(sched, config._app.SGEPosix):
|
|
161
|
+
# some `QueuedScheduler` classes have a `get_login_nodes` method which can be used
|
|
162
|
+
# to populate the names of login nodes explicitly, if not already set:
|
|
163
|
+
if not _hostname_in_invocation(config):
|
|
164
|
+
config._file.update_invocation(
|
|
165
|
+
config_key=config._config_key,
|
|
166
|
+
match={"hostname": sched.get_login_nodes()},
|
|
167
|
+
)
|
|
168
|
+
return schedulers
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def callback_supported_shells(config: Config, shell_name: str) -> str:
|
|
172
|
+
"""
|
|
173
|
+
Callback that tests if a shell names is supported on this OS.
|
|
174
|
+
"""
|
|
175
|
+
supported = get_supported_shells(os.name)
|
|
176
|
+
if shell_name not in supported:
|
|
177
|
+
raise UnsupportedShellError(shell=shell_name, supported=supported)
|
|
178
|
+
return shell_name
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def set_callback_file_paths(config: Config, value: PathLike | list[PathLike]) -> None:
|
|
182
|
+
"""Check the file(s) is/are accessible. This is only done on `config.set` (and not on
|
|
183
|
+
`config.get` or `config._validate`) because it could be expensive in the case of remote
|
|
184
|
+
files."""
|
|
185
|
+
value = callback_file_paths(config, value)
|
|
186
|
+
|
|
187
|
+
to_check = value if isinstance(value, list) else [value]
|
|
188
|
+
|
|
189
|
+
for file_path in to_check:
|
|
190
|
+
if file_path is None:
|
|
191
|
+
continue
|
|
192
|
+
with fsspec.open(file_path, mode="rt") as fh:
|
|
193
|
+
pass
|
|
194
|
+
# TODO: also check something in it?
|
|
195
|
+
print(f"Checked access to: {file_path}")
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def check_load_data_files(config: Config, value: Any) -> None:
|
|
199
|
+
"""Check data files (e.g., task schema files) can be loaded successfully. This is only
|
|
200
|
+
done on `config.set` (and not on `config.get` or `config._validate`) because it could
|
|
201
|
+
be expensive in the case of remote files."""
|
|
202
|
+
config._app.reload_template_components(warn=False)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def callback_log_file_path(config, value):
|
|
206
|
+
value = value.strip()
|
|
207
|
+
if value:
|
|
208
|
+
return config._resolve_path(value)
|
|
209
|
+
else:
|
|
210
|
+
return value
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def callback_update_log_console_level(config: Config, value: str) -> None:
|
|
214
|
+
"""
|
|
215
|
+
Callback to set the logging level.
|
|
216
|
+
"""
|
|
217
|
+
config._app.log.update_console_level(new_level=value)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def callback_unset_log_console_level(config: Config) -> None:
|
|
221
|
+
"""Reset the console handler to the default level."""
|
|
222
|
+
config._app.log.update_console_level()
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def callback_update_log_file_level(config: Config, value: str) -> None:
|
|
226
|
+
"""Callback to set the level of the log file handler."""
|
|
227
|
+
config._app.log.update_file_level(new_level=value)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def callback_update_log_file_path(config: Config, value: str) -> None:
|
|
231
|
+
"""
|
|
232
|
+
Callback to update the log file path, or remove the file handler if no path specifed.
|
|
233
|
+
"""
|
|
234
|
+
config._app.log.remove_file_handler()
|
|
235
|
+
if value:
|
|
236
|
+
config._app.log.add_file_logger(path=value, level=config.get("log_file_level"))
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def callback_unset_log_file_level(config: Config) -> None:
|
|
240
|
+
"""Callback to reset the file handler to the default level."""
|
|
241
|
+
config._app.log.update_file_level()
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def callback_unset_log_file_path(config: Config) -> None:
|
|
245
|
+
"""Callback to remove the log file handler."""
|
|
246
|
+
config._app.log.remove_file_handler()
|