hpcflow 0.1.9__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -462
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.9.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -458
- hpcflow/archive/archive.py +0 -308
- hpcflow/archive/cloud/cloud.py +0 -47
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -432
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -232
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2549
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -323
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -167
- hpcflow/variables.py +0 -544
- hpcflow-0.1.9.dist-info/METADATA +0 -168
- hpcflow-0.1.9.dist-info/RECORD +0 -45
- hpcflow-0.1.9.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.9.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
hpcflow/__init__.py
CHANGED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from PyInstaller.utils.hooks import collect_data_files
|
|
2
|
+
|
|
3
|
+
from hpcflow.sdk import sdk_classes
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
# most of the modules in `sdk_classes` are imported on-demand via the app object:
|
|
7
|
+
hiddenimports = [
|
|
8
|
+
*sdk_classes.values(),
|
|
9
|
+
"hpcflow.sdk.data",
|
|
10
|
+
"hpcflow.data.demo_data_manifest",
|
|
11
|
+
"hpcflow.data.scripts",
|
|
12
|
+
"hpcflow.data.jinja_templates",
|
|
13
|
+
"hpcflow.data.template_components",
|
|
14
|
+
"hpcflow.data.workflows",
|
|
15
|
+
"hpcflow.tests.data",
|
|
16
|
+
"hpcflow.sdk.core.test_utils",
|
|
17
|
+
"hpcflow.sdk.utils.patches",
|
|
18
|
+
"click.testing",
|
|
19
|
+
"requests", # for GitHub fsspec file system
|
|
20
|
+
"fsspec.implementations.github", # for GitHub fsspec file system
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
datas = (
|
|
24
|
+
collect_data_files("hpcflow.sdk.data")
|
|
25
|
+
+ collect_data_files("hpcflow.data.demo_data_manifest")
|
|
26
|
+
+ collect_data_files(
|
|
27
|
+
"hpcflow.data.scripts", include_py_files=True, excludes=("**/__pycache__",)
|
|
28
|
+
)
|
|
29
|
+
+ collect_data_files(
|
|
30
|
+
"hpcflow.data.jinja_templates",
|
|
31
|
+
include_py_files=True,
|
|
32
|
+
excludes=("**/__pycache__",),
|
|
33
|
+
)
|
|
34
|
+
+ collect_data_files("hpcflow.data.template_components")
|
|
35
|
+
+ collect_data_files("hpcflow.data.workflows")
|
|
36
|
+
+ collect_data_files(
|
|
37
|
+
"hpcflow.tests", include_py_files=True, excludes=("**/__pycache__",)
|
|
38
|
+
)
|
|
39
|
+
+ collect_data_files("hpcflow.tests.data")
|
|
40
|
+
)
|
hpcflow/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ =
|
|
1
|
+
__version__ = "0.2.0a271"
|
hpcflow/app.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from hpcflow import __version__, _app_name
|
|
3
|
+
from hpcflow.sdk import app as sdk_app
|
|
4
|
+
from hpcflow.sdk.config import ConfigOptions
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# provide access to app attributes:
|
|
8
|
+
__getattr__ = sdk_app.get_app_attribute
|
|
9
|
+
|
|
10
|
+
# ensure docs/help can see dynamically loaded attributes:
|
|
11
|
+
__all__ = sdk_app.get_app_module_all()
|
|
12
|
+
__dir__ = sdk_app.get_app_module_dir()
|
|
13
|
+
|
|
14
|
+
# set app-level config options:
|
|
15
|
+
config_options = ConfigOptions(
|
|
16
|
+
directory_env_var="HPCFLOW_CONFIG_DIR",
|
|
17
|
+
default_directory="~/.hpcflow",
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
# load built in template components (in this case, for demonstration purposes):
|
|
21
|
+
template_components = sdk_app.BaseApp.load_builtin_template_component_data(
|
|
22
|
+
"hpcflow.data.template_components"
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
# initialise the App object:
|
|
26
|
+
app: sdk_app.BaseApp = sdk_app.BaseApp(
|
|
27
|
+
name=_app_name,
|
|
28
|
+
version=__version__,
|
|
29
|
+
module=__name__,
|
|
30
|
+
docs_import_conv="hf",
|
|
31
|
+
description="Computational workflow management",
|
|
32
|
+
gh_org="hpcflow",
|
|
33
|
+
gh_repo="hpcflow",
|
|
34
|
+
config_options=config_options,
|
|
35
|
+
template_components=template_components,
|
|
36
|
+
scripts_dir="data.scripts", # relative to root package
|
|
37
|
+
jinja_templates_dir="data.jinja_templates", # relative to root package
|
|
38
|
+
programs_dir="data.programs", # relative to root package
|
|
39
|
+
workflows_dir="data.workflows", # relative to root package
|
|
40
|
+
demo_data_dir="hpcflow.data.demo_data",
|
|
41
|
+
demo_data_manifest_dir="hpcflow.data.demo_data_manifest",
|
|
42
|
+
docs_url="https://hpcflow.github.io/docs/stable",
|
|
43
|
+
) #: |app|
|
hpcflow/cli.py
CHANGED
|
@@ -1,464 +1,4 @@
|
|
|
1
|
-
|
|
1
|
+
from hpcflow.app import cli
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
"""
|
|
6
|
-
import socket
|
|
7
|
-
import os
|
|
8
|
-
from pathlib import Path
|
|
9
|
-
from pprint import pprint
|
|
10
|
-
from datetime import datetime
|
|
11
|
-
from random import randint
|
|
12
|
-
|
|
13
|
-
import click
|
|
14
|
-
|
|
15
|
-
from hpcflow import __version__
|
|
16
|
-
from hpcflow import api
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def get_process_stamp():
|
|
20
|
-
return '{} {} {}'.format(
|
|
21
|
-
datetime.now(),
|
|
22
|
-
socket.gethostname(),
|
|
23
|
-
os.getpid(),
|
|
24
|
-
)
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def validate_cloud_provider(ctx, param, value):
|
|
28
|
-
|
|
29
|
-
good_providers = ['dropbox']
|
|
30
|
-
if value not in good_providers:
|
|
31
|
-
msg = ('`provider` must be one of: {}'.format(good_providers))
|
|
32
|
-
raise click.BadParameter(msg)
|
|
33
|
-
|
|
34
|
-
return value
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def validate_task_ranges(ctx, param, value):
|
|
38
|
-
"""Validate the task range.
|
|
39
|
-
|
|
40
|
-
Parameters
|
|
41
|
-
----------
|
|
42
|
-
ctx
|
|
43
|
-
param
|
|
44
|
-
value : str
|
|
45
|
-
Stringified comma-separated list, where each element indicates the
|
|
46
|
-
tasks to submit for that channel of the Workflow. List elements can be
|
|
47
|
-
one of:
|
|
48
|
-
all
|
|
49
|
-
submit all tasks in the given channel.
|
|
50
|
-
n[-m[:s]]
|
|
51
|
-
submit a range of tasks from task `n` to task `m`
|
|
52
|
-
(inclusively), with an optional step size of `s`.
|
|
53
|
-
<empty>
|
|
54
|
-
submit no tasks from the given channel.
|
|
55
|
-
|
|
56
|
-
Returns
|
|
57
|
-
-------
|
|
58
|
-
task_ranges : list of tuple
|
|
59
|
-
(start, stop, step)
|
|
60
|
-
|
|
61
|
-
"""
|
|
62
|
-
|
|
63
|
-
if value is None:
|
|
64
|
-
return
|
|
65
|
-
|
|
66
|
-
if ',' in value:
|
|
67
|
-
value = value.split(',')
|
|
68
|
-
else:
|
|
69
|
-
value = [value]
|
|
70
|
-
|
|
71
|
-
task_ranges = []
|
|
72
|
-
for i in value:
|
|
73
|
-
|
|
74
|
-
if i.strip() == 'all':
|
|
75
|
-
task_ranges.append([1, -1, 1])
|
|
76
|
-
continue
|
|
77
|
-
|
|
78
|
-
elif i.strip() == '':
|
|
79
|
-
task_ranges.append([])
|
|
80
|
-
continue
|
|
81
|
-
|
|
82
|
-
task_step = 1
|
|
83
|
-
|
|
84
|
-
msg = ('Could not understand task range. It should be specified in '
|
|
85
|
-
'the format: `n[-m[:s]]` where `n` is the starting task ID, `m` is '
|
|
86
|
-
' the ending task ID, and `s` is the task step size.')
|
|
87
|
-
|
|
88
|
-
if '-' in i:
|
|
89
|
-
# Task range
|
|
90
|
-
task_start, task_stop = i.split('-')
|
|
91
|
-
|
|
92
|
-
if ':' in task_stop:
|
|
93
|
-
# With step size:
|
|
94
|
-
task_stop, task_step = task_stop.split(':')
|
|
95
|
-
|
|
96
|
-
try:
|
|
97
|
-
task_step = int(task_step)
|
|
98
|
-
except ValueError:
|
|
99
|
-
raise click.BadParameter(msg)
|
|
100
|
-
|
|
101
|
-
try:
|
|
102
|
-
task_start = int(task_start)
|
|
103
|
-
task_stop = int(task_stop)
|
|
104
|
-
except ValueError:
|
|
105
|
-
raise click.BadParameter(msg)
|
|
106
|
-
|
|
107
|
-
else:
|
|
108
|
-
# Single task
|
|
109
|
-
try:
|
|
110
|
-
task = int(i)
|
|
111
|
-
task_start = task
|
|
112
|
-
task_stop = task
|
|
113
|
-
except ValueError:
|
|
114
|
-
raise click.BadParameter(msg)
|
|
115
|
-
|
|
116
|
-
if task_start > task_stop:
|
|
117
|
-
msg = ('Task starting ID must be smaller than or equal to '
|
|
118
|
-
'task ending ID.')
|
|
119
|
-
raise click.BadParameter(msg)
|
|
120
|
-
|
|
121
|
-
task_range = [task_start, task_stop, task_step]
|
|
122
|
-
task_ranges.append(task_range)
|
|
123
|
-
|
|
124
|
-
task_ranges = task_ranges[0] # For now - no channels.
|
|
125
|
-
return task_ranges
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
@click.group()
|
|
129
|
-
@click.version_option(version=__version__)
|
|
130
|
-
def cli():
|
|
131
|
-
pass
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
@cli.command()
|
|
135
|
-
@click.option('--yes', '-y', is_flag=True)
|
|
136
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
137
|
-
def clean(directory=None, yes=True, config_dir=None):
|
|
138
|
-
"""Clean the directory of all content generated by `hpcflow`."""
|
|
139
|
-
msg = ('Do you want to remove all `hpc-flow`-generated files '
|
|
140
|
-
'from {}?')
|
|
141
|
-
if directory:
|
|
142
|
-
msg = msg.format(directory)
|
|
143
|
-
else:
|
|
144
|
-
msg = msg.format('the current directory')
|
|
145
|
-
if yes or click.confirm(msg):
|
|
146
|
-
api.clean(dir_path=directory, config_dir=config_dir)
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
@cli.command()
|
|
150
|
-
@click.option('--directory', '-d')
|
|
151
|
-
@click.option('--json-file')
|
|
152
|
-
@click.option('--json')
|
|
153
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
154
|
-
@click.argument('profiles', nargs=-1, type=click.Path(exists=True))
|
|
155
|
-
def make(directory=None, profiles=None, json_file=None, json=None, config_dir=None):
|
|
156
|
-
"""Generate a new Workflow."""
|
|
157
|
-
print('hpcflow.cli.make', flush=True)
|
|
158
|
-
|
|
159
|
-
workflow_id = api.make_workflow(
|
|
160
|
-
dir_path=directory,
|
|
161
|
-
profile_list=profiles,
|
|
162
|
-
json_file=json_file,
|
|
163
|
-
json_str=json,
|
|
164
|
-
config_dir=config_dir,
|
|
165
|
-
clean=False,
|
|
166
|
-
)
|
|
167
|
-
print('Generated new Workflow with ID {}'.format(workflow_id))
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
@cli.command()
|
|
171
|
-
@click.option('--directory', '-d')
|
|
172
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
173
|
-
@click.argument('cmd_group_sub_id', type=click.INT)
|
|
174
|
-
@click.argument('task_idx', type=click.INT)
|
|
175
|
-
@click.argument('iter_idx', type=click.INT)
|
|
176
|
-
def write_runtime_files(cmd_group_sub_id, task_idx, iter_idx, directory=None,
|
|
177
|
-
config_dir=None):
|
|
178
|
-
print('hpcflow.cli.write_runtime_files', flush=True)
|
|
179
|
-
api.write_runtime_files(
|
|
180
|
-
cmd_group_sub_id,
|
|
181
|
-
task_idx,
|
|
182
|
-
iter_idx,
|
|
183
|
-
dir_path=directory,
|
|
184
|
-
config_dir=config_dir,
|
|
185
|
-
)
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
@cli.command()
|
|
189
|
-
@click.option('--directory', '-d')
|
|
190
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
191
|
-
@click.argument('cmd_group_sub_id', type=click.INT)
|
|
192
|
-
@click.argument('task_idx', type=click.INT)
|
|
193
|
-
@click.argument('iter_idx', type=click.INT)
|
|
194
|
-
def set_task_start(cmd_group_sub_id, task_idx, iter_idx, directory=None, config_dir=None):
|
|
195
|
-
print('hpcflow.cli.set_task_start', flush=True)
|
|
196
|
-
api.set_task_start(cmd_group_sub_id, task_idx, iter_idx, directory, config_dir)
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
@cli.command()
|
|
200
|
-
@click.option('--directory', '-d')
|
|
201
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
202
|
-
@click.argument('cmd_group_sub_id', type=click.INT)
|
|
203
|
-
@click.argument('task_idx', type=click.INT)
|
|
204
|
-
@click.argument('iter_idx', type=click.INT)
|
|
205
|
-
def set_task_end(cmd_group_sub_id, task_idx, iter_idx, directory=None, config_dir=None):
|
|
206
|
-
print('hpcflow.cli.set_task_end', flush=True)
|
|
207
|
-
api.set_task_end(cmd_group_sub_id, task_idx, iter_idx, directory, config_dir)
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
@cli.command()
|
|
211
|
-
@click.option('--directory', '-d')
|
|
212
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
213
|
-
@click.argument('cmd_group_sub_id', type=click.INT)
|
|
214
|
-
@click.argument('task_idx', type=click.INT)
|
|
215
|
-
@click.argument('iter_idx', type=click.INT)
|
|
216
|
-
def archive(cmd_group_sub_id, task_idx, iter_idx, directory=None, config_dir=None):
|
|
217
|
-
print('hpcflow.cli.archive', flush=True)
|
|
218
|
-
api.archive(
|
|
219
|
-
cmd_group_sub_id,
|
|
220
|
-
task_idx,
|
|
221
|
-
iter_idx,
|
|
222
|
-
dir_path=directory,
|
|
223
|
-
config_dir=config_dir,
|
|
224
|
-
)
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
@cli.command()
|
|
228
|
-
@click.option('--directory', '-d')
|
|
229
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
230
|
-
@click.argument('cmd_group_sub_id', type=click.INT)
|
|
231
|
-
@click.argument('task_idx', type=click.INT)
|
|
232
|
-
@click.argument('iter_idx', type=click.INT)
|
|
233
|
-
def get_scheduler_stats(cmd_group_sub_id, task_idx, iter_idx, directory=None,
|
|
234
|
-
config_dir=None):
|
|
235
|
-
print('hpcflow.cli.get_scheduler_stats', flush=True)
|
|
236
|
-
api.get_scheduler_stats(
|
|
237
|
-
cmd_group_sub_id,
|
|
238
|
-
task_idx,
|
|
239
|
-
iter_idx,
|
|
240
|
-
dir_path=directory,
|
|
241
|
-
config_dir=config_dir,
|
|
242
|
-
)
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
@cli.command()
|
|
246
|
-
@click.option('--directory', '-d')
|
|
247
|
-
@click.option('--workflow-id', '-w', type=click.INT)
|
|
248
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
249
|
-
def root_archive(workflow_id, directory=None, config_dir=None):
|
|
250
|
-
print('hpcflow.cli.root_archive', flush=True)
|
|
251
|
-
api.root_archive(
|
|
252
|
-
workflow_id,
|
|
253
|
-
dir_path=directory,
|
|
254
|
-
config_dir=config_dir,
|
|
255
|
-
)
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
@cli.command()
|
|
259
|
-
def stat():
|
|
260
|
-
"""Show the status of running tasks and the number completed tasks."""
|
|
261
|
-
print('hpcflow.cli.stat')
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
@cli.command()
|
|
265
|
-
@click.option('--directory', '-d')
|
|
266
|
-
@click.option('--workflow-id', '-w', type=click.INT)
|
|
267
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
268
|
-
def show_stats(directory=None, workflow_id=None, config_dir=None):
|
|
269
|
-
'Show task statistics, formatted as a table.'
|
|
270
|
-
stats_fmt = api.get_formatted_stats(directory, workflow_id, config_dir=config_dir)
|
|
271
|
-
print(stats_fmt)
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
@cli.command()
|
|
275
|
-
@click.option('--directory', '-d')
|
|
276
|
-
@click.option('--workflow-id', '-w', type=click.INT)
|
|
277
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
278
|
-
@click.argument('save_path', type=click.Path(exists=False, dir_okay=False))
|
|
279
|
-
def save_stats(save_path, directory=None, workflow_id=None, config_dir=None):
|
|
280
|
-
'Save task statistics as a JSON file.'
|
|
281
|
-
api.save_stats(save_path, directory, workflow_id, config_dir=config_dir)
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
@cli.command()
|
|
285
|
-
@click.option('--directory', '-d')
|
|
286
|
-
@click.option('--workflow-id', '-w', type=click.INT)
|
|
287
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
288
|
-
def kill(directory=None, workflow_id=None, config_dir=None):
|
|
289
|
-
api.kill(directory, workflow_id, config_dir=config_dir)
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
@cli.command()
|
|
293
|
-
@click.option('--directory', '-d')
|
|
294
|
-
@click.option('--workflow-id', '-w', type=click.INT)
|
|
295
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
296
|
-
@click.option('--json-file')
|
|
297
|
-
@click.option('--json')
|
|
298
|
-
@click.option('--task-ranges', '-t',
|
|
299
|
-
help=('Task ranges are specified as a comma-separated list whose'
|
|
300
|
-
' elements are one of: "n[-m[:s]]", "all" or "" (empty)'),
|
|
301
|
-
callback=validate_task_ranges)
|
|
302
|
-
@click.argument('profiles', nargs=-1, type=click.Path(exists=True))
|
|
303
|
-
def submit(directory=None, workflow_id=None, task_ranges=None, profiles=None,
|
|
304
|
-
json_file=None, json=None, config_dir=None):
|
|
305
|
-
"""Submit(and optionally generate) a Workflow."""
|
|
306
|
-
|
|
307
|
-
print('hpcflow.cli.submit', flush=True)
|
|
308
|
-
|
|
309
|
-
existing_ids = api.get_workflow_ids(directory, config_dir)
|
|
310
|
-
submit_args = {
|
|
311
|
-
'dir_path': directory,
|
|
312
|
-
'task_range': task_ranges,
|
|
313
|
-
'config_dir': config_dir,
|
|
314
|
-
}
|
|
315
|
-
|
|
316
|
-
if workflow_id:
|
|
317
|
-
# Submit an existing Workflow.
|
|
318
|
-
|
|
319
|
-
if not existing_ids:
|
|
320
|
-
msg = 'There are no existing Workflows in the directory {}'
|
|
321
|
-
raise ValueError(msg.format(directory))
|
|
322
|
-
|
|
323
|
-
submit_args['workflow_id'] = workflow_id
|
|
324
|
-
|
|
325
|
-
if workflow_id not in existing_ids:
|
|
326
|
-
msg = ('The Workflow ID "{}" does not match an existing Workflow '
|
|
327
|
-
'in the directory {}. Existing Workflow IDs are {}')
|
|
328
|
-
raise ValueError(msg.format(workflow_id, directory, existing_ids))
|
|
329
|
-
|
|
330
|
-
submission_id = api.submit_workflow(**submit_args)
|
|
331
|
-
|
|
332
|
-
else:
|
|
333
|
-
# First generate a Workflow, and then submit it.
|
|
334
|
-
|
|
335
|
-
make_workflow = True
|
|
336
|
-
if existing_ids:
|
|
337
|
-
# Check user did not want to submit existing Workflow:
|
|
338
|
-
msg = 'Previous workflows exist with IDs: {}. Add new workflow?'
|
|
339
|
-
make_workflow = click.confirm(msg.format(existing_ids))
|
|
340
|
-
|
|
341
|
-
# TODO: if `make_workflow=False`, show existing IDs and offer to
|
|
342
|
-
# submit one?
|
|
343
|
-
|
|
344
|
-
if make_workflow:
|
|
345
|
-
workflow_id = api.make_workflow(
|
|
346
|
-
dir_path=directory,
|
|
347
|
-
profile_list=profiles,
|
|
348
|
-
json_file=json_file,
|
|
349
|
-
json_str=json,
|
|
350
|
-
config_dir=config_dir,
|
|
351
|
-
)
|
|
352
|
-
print('Generated new Workflow with ID {}'.format(workflow_id))
|
|
353
|
-
|
|
354
|
-
submit_args['workflow_id'] = workflow_id
|
|
355
|
-
submission_id = api.submit_workflow(**submit_args)
|
|
356
|
-
|
|
357
|
-
else:
|
|
358
|
-
print('Exiting.')
|
|
359
|
-
return
|
|
360
|
-
|
|
361
|
-
print('Submitted Workflow (ID {}) with submission '
|
|
362
|
-
'ID {}'.format(workflow_id, submission_id))
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
@cli.command()
|
|
366
|
-
@click.option('--name', '-n', required=True)
|
|
367
|
-
@click.option('--value', '-v', required=True)
|
|
368
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
369
|
-
def update_config(name, value, config_dir=None):
|
|
370
|
-
api.update_config(name, value, config_dir=config_dir)
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
@cli.command()
|
|
374
|
-
@click.option('--provider', '-p', required=True)
|
|
375
|
-
@click.option('--config-dir', type=click.Path(exists=True))
|
|
376
|
-
def cloud_connect(provider, config_dir=None):
|
|
377
|
-
api.cloud_connect(provider, config_dir=config_dir)
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
@cli.group()
|
|
381
|
-
def dummy():
|
|
382
|
-
'Dummy commands for testing/documentation.'
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
@dummy.command('makeSomething')
|
|
386
|
-
@click.option('--name', default='infile')
|
|
387
|
-
@click.option('--num', type=click.INT, default=2)
|
|
388
|
-
@click.argument('extension', nargs=-1, default=None)
|
|
389
|
-
def dummy_make_something(name, num, extension):
|
|
390
|
-
|
|
391
|
-
if not extension:
|
|
392
|
-
extension = ['.txt' for i in range(num)]
|
|
393
|
-
elif len(extension) != num:
|
|
394
|
-
raise click.BadParameter(
|
|
395
|
-
'Number of specified extensions argument must match `num` parameter.')
|
|
396
|
-
|
|
397
|
-
for i in range(num):
|
|
398
|
-
out_path = Path('{}_{}{}'.format(name, i + 1, extension[i]))
|
|
399
|
-
with out_path.open('w') as handle:
|
|
400
|
-
handle.write('{}\n'.format(randint(0, 1e6)))
|
|
401
|
-
handle.write('{} Generated by `makeSomething --name {} --num {} {}`.'.format(
|
|
402
|
-
get_process_stamp(),
|
|
403
|
-
name,
|
|
404
|
-
num,
|
|
405
|
-
' '.join(['"{}"'.format(j) for j in extension])
|
|
406
|
-
))
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
@dummy.command('doSomething')
|
|
410
|
-
@click.option('--infile1', '-i1', type=click.Path(exists=True), required=True)
|
|
411
|
-
@click.option('--infile2', '-i2', type=click.Path(exists=True), required=True)
|
|
412
|
-
@click.option('--value', '-v')
|
|
413
|
-
@click.option('--out', '-o')
|
|
414
|
-
def dummy_do_something(infile1, infile2, value=None, out=None):
|
|
415
|
-
|
|
416
|
-
with Path(infile1).open('r') as handle:
|
|
417
|
-
file_id_1 = int(handle.readline().strip())
|
|
418
|
-
with Path(infile2).open('r') as handle:
|
|
419
|
-
file_id_2 = int(handle.readline().strip())
|
|
420
|
-
|
|
421
|
-
if out is None:
|
|
422
|
-
out = 'outfile.txt'
|
|
423
|
-
out_path = Path(out)
|
|
424
|
-
with out_path.open('a') as handle:
|
|
425
|
-
handle.write('{}\n'.format(randint(0, 1e6)))
|
|
426
|
-
handle.write('{} Generated by `doSomething --infile1 {} --infile2 {}`.\n'.format(
|
|
427
|
-
get_process_stamp(), infile1, infile2))
|
|
428
|
-
if value:
|
|
429
|
-
handle.write('{} Value: {}\n'.format(get_process_stamp(), value))
|
|
430
|
-
handle.write('{} Original file ID: {}: {}\n'.format(
|
|
431
|
-
get_process_stamp(), infile1, file_id_1))
|
|
432
|
-
handle.write('{} Original file ID: {}: {}\n'.format(
|
|
433
|
-
get_process_stamp(), infile2, file_id_2))
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
@dummy.command('splitSomething')
|
|
437
|
-
@click.argument('infile', type=click.Path(exists=True))
|
|
438
|
-
@click.option('--num', '-n', type=click.INT, default=2)
|
|
439
|
-
def dummy_split_something(infile, num):
|
|
440
|
-
|
|
441
|
-
with Path(infile).open('r') as handle:
|
|
442
|
-
file_id = int(handle.readline().strip())
|
|
443
|
-
for i in range(num):
|
|
444
|
-
out_path = Path('outfile_{}.txt'.format(i + 1))
|
|
445
|
-
with out_path.open('w') as handle:
|
|
446
|
-
handle.write('{}\n'.format(randint(0, 1e6)))
|
|
447
|
-
handle.write('{} Generated by `splitSomething {}`.\n'.format(
|
|
448
|
-
get_process_stamp(), infile))
|
|
449
|
-
handle.write('{} Original file ID: {}: {}\n'.format(
|
|
450
|
-
get_process_stamp(), infile, file_id))
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
@dummy.command('processSomething')
|
|
454
|
-
@click.argument('infile', type=click.Path(exists=True))
|
|
455
|
-
def dummy_process_something(infile):
|
|
456
|
-
|
|
457
|
-
with Path(infile).open('a') as handle:
|
|
458
|
-
handle.write('\n{} Modified by `processSomething {}`.\n'.format(
|
|
459
|
-
get_process_stamp(), infile
|
|
460
|
-
))
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
if __name__ == '__main__':
|
|
3
|
+
if __name__ == "__main__":
|
|
464
4
|
cli()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
This directory contains binary programs that are used to test the `Action.program` attribute. These binary programs are generated by the `generate_test_programs.yml` GitHub actions workflow (from `hello_world.c`), and are organised into sub-directories named by the `resources.platform` (e.g. "win", "linux", "macos") attribute.
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
#include <stdio.h>
|
|
2
|
+
#include <stdlib.h>
|
|
3
|
+
#include <string.h>
|
|
4
|
+
#include "cJSON.h"
|
|
5
|
+
|
|
6
|
+
void hello_world()
|
|
7
|
+
{
|
|
8
|
+
printf("hello, world\n");
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
void hello_world_ins_outs(const char *inputs_path, const char *outputs_path)
|
|
12
|
+
{
|
|
13
|
+
printf("hello, world\n");
|
|
14
|
+
|
|
15
|
+
// Read input JSON
|
|
16
|
+
FILE *fp = fopen(inputs_path, "r");
|
|
17
|
+
if (!fp)
|
|
18
|
+
{
|
|
19
|
+
perror("Failed to open input file");
|
|
20
|
+
exit(1);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
fseek(fp, 0, SEEK_END);
|
|
24
|
+
long len = ftell(fp);
|
|
25
|
+
fseek(fp, 0, SEEK_SET);
|
|
26
|
+
|
|
27
|
+
char *data = malloc(len + 1);
|
|
28
|
+
fread(data, 1, len, fp);
|
|
29
|
+
data[len] = '\0';
|
|
30
|
+
fclose(fp);
|
|
31
|
+
|
|
32
|
+
cJSON *json = cJSON_Parse(data);
|
|
33
|
+
if (!json)
|
|
34
|
+
{
|
|
35
|
+
fprintf(stderr, "Error parsing JSON input\n");
|
|
36
|
+
free(data);
|
|
37
|
+
exit(1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
double p1 = cJSON_GetObjectItem(json, "p1")->valuedouble;
|
|
41
|
+
double p2 = cJSON_GetObjectItem(json, "p2")->valuedouble;
|
|
42
|
+
double p3 = cJSON_GetObjectItem(json, "p3")->valuedouble;
|
|
43
|
+
double p4 = p1 + p2 + p3;
|
|
44
|
+
|
|
45
|
+
cJSON_Delete(json);
|
|
46
|
+
free(data);
|
|
47
|
+
|
|
48
|
+
// Create output JSON
|
|
49
|
+
cJSON *output_json = cJSON_CreateObject();
|
|
50
|
+
cJSON_AddNumberToObject(output_json, "p4", p4);
|
|
51
|
+
|
|
52
|
+
char *out_string = cJSON_Print(output_json);
|
|
53
|
+
|
|
54
|
+
fp = fopen(outputs_path, "w");
|
|
55
|
+
if (!fp)
|
|
56
|
+
{
|
|
57
|
+
perror("Failed to open output file");
|
|
58
|
+
cJSON_Delete(output_json);
|
|
59
|
+
free(out_string);
|
|
60
|
+
exit(1);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
fprintf(fp, "%s\n", out_string);
|
|
64
|
+
fclose(fp);
|
|
65
|
+
|
|
66
|
+
cJSON_Delete(output_json);
|
|
67
|
+
free(out_string);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
int main(int argc, char *argv[])
|
|
71
|
+
{
|
|
72
|
+
if (argc == 1)
|
|
73
|
+
{
|
|
74
|
+
hello_world();
|
|
75
|
+
}
|
|
76
|
+
else if (argc == 3)
|
|
77
|
+
{
|
|
78
|
+
hello_world_ins_outs(argv[1], argv[2]);
|
|
79
|
+
}
|
|
80
|
+
else
|
|
81
|
+
{
|
|
82
|
+
fprintf(stderr, "Usage: %s [input.json output.json]\n", argv[0]);
|
|
83
|
+
return 1;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return 0;
|
|
87
|
+
}
|
|
Binary file
|
|
Binary file
|
|
Binary file
|