wrfrun 0.1.7__py3-none-any.whl → 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wrfrun/cli.py +128 -0
- wrfrun/core/__init__.py +33 -0
- wrfrun/core/base.py +246 -75
- wrfrun/core/config.py +286 -236
- wrfrun/core/error.py +47 -17
- wrfrun/core/replay.py +65 -32
- wrfrun/core/server.py +139 -79
- wrfrun/data.py +10 -5
- wrfrun/extension/__init__.py +28 -0
- wrfrun/extension/goos_sst/__init__.py +67 -0
- wrfrun/extension/goos_sst/core.py +111 -0
- wrfrun/extension/goos_sst/res/Vtable.ERA_GOOS_SST +7 -0
- wrfrun/extension/goos_sst/res/__init__.py +26 -0
- wrfrun/extension/goos_sst/utils.py +97 -0
- wrfrun/extension/littler/__init__.py +57 -1
- wrfrun/extension/littler/{utils.py → core.py} +326 -40
- wrfrun/extension/utils.py +22 -21
- wrfrun/model/__init__.py +24 -1
- wrfrun/model/plot.py +253 -35
- wrfrun/model/utils.py +17 -8
- wrfrun/model/wrf/__init__.py +41 -0
- wrfrun/model/wrf/core.py +218 -102
- wrfrun/model/wrf/exec_wrap.py +49 -35
- wrfrun/model/wrf/namelist.py +82 -11
- wrfrun/model/wrf/scheme.py +85 -1
- wrfrun/model/wrf/{_metgrid.py → utils.py} +36 -2
- wrfrun/model/wrf/vtable.py +2 -1
- wrfrun/plot/wps.py +66 -58
- wrfrun/res/__init__.py +8 -5
- wrfrun/res/config/config.template.toml +50 -0
- wrfrun/res/{config.toml.template → config/wrf.template.toml} +10 -47
- wrfrun/res/run.template.sh +10 -0
- wrfrun/res/scheduler/lsf.template +5 -0
- wrfrun/res/{job_scheduler → scheduler}/pbs.template +1 -1
- wrfrun/res/{job_scheduler → scheduler}/slurm.template +2 -1
- wrfrun/run.py +19 -23
- wrfrun/scheduler/__init__.py +35 -0
- wrfrun/scheduler/env.py +44 -0
- wrfrun/scheduler/lsf.py +47 -0
- wrfrun/scheduler/pbs.py +48 -0
- wrfrun/scheduler/script.py +70 -0
- wrfrun/scheduler/slurm.py +48 -0
- wrfrun/scheduler/utils.py +14 -0
- wrfrun/utils.py +8 -3
- wrfrun/workspace/__init__.py +38 -0
- wrfrun/workspace/core.py +92 -0
- wrfrun/workspace/wrf.py +121 -0
- {wrfrun-0.1.7.dist-info → wrfrun-0.1.9.dist-info}/METADATA +4 -3
- wrfrun-0.1.9.dist-info/RECORD +62 -0
- wrfrun-0.1.9.dist-info/entry_points.txt +3 -0
- wrfrun/model/wrf/_ndown.py +0 -39
- wrfrun/pbs.py +0 -86
- wrfrun/res/run.sh.template +0 -16
- wrfrun/workspace.py +0 -88
- wrfrun-0.1.7.dist-info/RECORD +0 -46
- {wrfrun-0.1.7.dist-info → wrfrun-0.1.9.dist-info}/WHEEL +0 -0
|
@@ -1,46 +1,5 @@
|
|
|
1
|
-
# Config
|
|
2
|
-
#
|
|
3
|
-
input_data_path = ""
|
|
4
|
-
|
|
5
|
-
# Path of the directory to store all outputs.
|
|
6
|
-
output_path = "./outputs"
|
|
7
|
-
log_path = "./logs"
|
|
8
|
-
|
|
9
|
-
# wrfrun can launch a socket server during NWP execution to report simulation progress.
|
|
10
|
-
# To enable the server, you need to configure the IP address and port on which it will listen to.
|
|
11
|
-
server_host = "localhost"
|
|
12
|
-
server_port = 54321
|
|
13
|
-
|
|
14
|
-
# How many cores you will use.
|
|
15
|
-
# Note that if you use a job scheduler (like PBS), this value means the number of cores each node you use.
|
|
16
|
-
core_num = 36
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
[job_scheduler]
|
|
20
|
-
# Job scheduler settings.
|
|
21
|
-
# How many nodes you will use.
|
|
22
|
-
node_num = 1
|
|
23
|
-
|
|
24
|
-
# Custom environment settings
|
|
25
|
-
env_settings = {}
|
|
26
|
-
|
|
27
|
-
# Path of the python interpreter that will be used to run wrfrun.
|
|
28
|
-
# You can also give its name only.
|
|
29
|
-
python_interpreter = "/usr/bin/python3" # or just "python3"
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
[model]
|
|
33
|
-
# Model debug level
|
|
34
|
-
debug_level = 100
|
|
35
|
-
|
|
36
|
-
# ################################################### Only settings above is necessary ###########################################
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
# ####################################### You can give more settings about the NWP you will use ##################################
|
|
40
|
-
|
|
41
|
-
[model.wrf]
|
|
42
|
-
# Config for WRF
|
|
43
|
-
# WRF model path
|
|
1
|
+
# Config for WRF model.
|
|
2
|
+
# WRF model path.
|
|
44
3
|
wps_path = '/path/to/your/WPS/folder'
|
|
45
4
|
wrf_path = '/path/to/your/WRF/folder'
|
|
46
5
|
# WRFDA is optional.
|
|
@@ -59,8 +18,10 @@ user_wrfda_namelist = ''
|
|
|
59
18
|
# If you make a restart run?
|
|
60
19
|
restart_mode = false
|
|
61
20
|
|
|
21
|
+
# debug level for WRF model
|
|
22
|
+
debug_level = 100
|
|
62
23
|
|
|
63
|
-
[
|
|
24
|
+
[time]
|
|
64
25
|
# Advance time config for WRF
|
|
65
26
|
# Set the start and end date. It will be used for all domains.
|
|
66
27
|
# You can also provide all the dates as a list, with each date for the corresponding domain.
|
|
@@ -87,7 +48,7 @@ parent_time_step_ratio = [1, 3, 4]
|
|
|
87
48
|
restart_interval = -1
|
|
88
49
|
|
|
89
50
|
|
|
90
|
-
[
|
|
51
|
+
[domain]
|
|
91
52
|
# Advance domain config for WRF.
|
|
92
53
|
# Set domain number.
|
|
93
54
|
domain_num = 3
|
|
@@ -109,7 +70,9 @@ dx = 9000
|
|
|
109
70
|
dy = 9000
|
|
110
71
|
|
|
111
72
|
# Projection.
|
|
112
|
-
map_proj =
|
|
73
|
+
map_proj = 'lambert'
|
|
74
|
+
truelat1 = 34.0
|
|
75
|
+
truelat2 = 40.0
|
|
113
76
|
|
|
114
77
|
# Central point of the first area.
|
|
115
78
|
ref_lat = 37.0
|
|
@@ -117,7 +80,7 @@ ref_lon = 120.5
|
|
|
117
80
|
stand_lon = 120.5
|
|
118
81
|
|
|
119
82
|
|
|
120
|
-
[
|
|
83
|
+
[scheme]
|
|
121
84
|
# Advance physics scheme config for WRF.
|
|
122
85
|
# To loop up the nickname for all physics schemes, please see: https://wrfrun.syize.cn.
|
|
123
86
|
# Option contains many other settings related to the scheme.
|
|
@@ -3,4 +3,5 @@
|
|
|
3
3
|
#SBATCH -n{CORE_NUM} %core num
|
|
4
4
|
#SBATCH --time=9999:00:00 %maximum run time
|
|
5
5
|
#SBATCH --output={STDOUT_LOG_PATH} %stdout output log path
|
|
6
|
-
#SBATCH --error={STDERR_LOG_PATH} %error output log path
|
|
6
|
+
#SBATCH --error={STDERR_LOG_PATH} %error output log path
|
|
7
|
+
#SBATCH -p {QUEUE_NAME}
|
wrfrun/run.py
CHANGED
|
@@ -4,15 +4,15 @@
|
|
|
4
4
|
|
|
5
5
|
import sys
|
|
6
6
|
import threading
|
|
7
|
-
from os.path import abspath, dirname
|
|
7
|
+
from os.path import abspath, dirname
|
|
8
8
|
from typing import Optional, Tuple, Union
|
|
9
9
|
|
|
10
10
|
from .core import ExecConfigRecorder, WRFRUNConfig, WRFRunBasicError, WRFRunServer, WRFRunServerHandler, replay_config_generator, stop_server
|
|
11
11
|
from .data import prepare_wps_input_data
|
|
12
|
-
from .model import clear_model_logs,
|
|
13
|
-
from .
|
|
12
|
+
from .model import clear_model_logs, generate_domain_area
|
|
13
|
+
from .scheduler import in_job_scheduler, prepare_scheduler_script
|
|
14
14
|
from .utils import call_subprocess, logger, logger_add_file_handler
|
|
15
|
-
from .workspace import prepare_workspace
|
|
15
|
+
from .workspace import prepare_workspace, check_workspace
|
|
16
16
|
|
|
17
17
|
|
|
18
18
|
def confirm_model_area():
|
|
@@ -20,9 +20,9 @@ def confirm_model_area():
|
|
|
20
20
|
Ask user to check domain area.
|
|
21
21
|
|
|
22
22
|
"""
|
|
23
|
-
|
|
23
|
+
generate_domain_area()
|
|
24
24
|
|
|
25
|
-
if not
|
|
25
|
+
if not in_job_scheduler():
|
|
26
26
|
# ask user
|
|
27
27
|
logger.warning(f"Check the domain image, is it right?")
|
|
28
28
|
answer = input("Is it right? [y/N]: ")
|
|
@@ -41,14 +41,14 @@ class WRFRun:
|
|
|
41
41
|
_instance = None
|
|
42
42
|
_initialized = False
|
|
43
43
|
|
|
44
|
-
def __init__(self, config_file: str, init_workspace=True, start_server=False,
|
|
44
|
+
def __init__(self, config_file: str, init_workspace=True, start_server=False, submit_job=False, prepare_wps_data=False, wps_data_area: Optional[Tuple[int, int, int, int]] = None):
|
|
45
45
|
"""
|
|
46
46
|
WRFRun, a context class to achieve some goals before and after running WRF, like save a copy of config file, start and close WRFRunServer.
|
|
47
47
|
|
|
48
48
|
:param config_file: ``wrfrun`` config file's path.
|
|
49
49
|
:param init_workspace: If True, clean old files in workspace and re-create it.
|
|
50
50
|
:param start_server: Whether to start WRFRunServer, defaults to True.
|
|
51
|
-
:param
|
|
51
|
+
:param submit_job: If commit this task to the PBS system, defaults to True.
|
|
52
52
|
:param prepare_wps_data: If True, download input datas for WPS first.
|
|
53
53
|
:param wps_data_area: If ``prepare_wps_data==True``, you need to give the area range of input data so download function can download data from ERA5.
|
|
54
54
|
:return:
|
|
@@ -63,7 +63,7 @@ class WRFRun:
|
|
|
63
63
|
self._ip = ""
|
|
64
64
|
self._port = -1
|
|
65
65
|
|
|
66
|
-
self.
|
|
66
|
+
self._submit_job = submit_job
|
|
67
67
|
self._init_workspace = init_workspace
|
|
68
68
|
self._prepare_wps_data = prepare_wps_data
|
|
69
69
|
self._wps_data_area = wps_data_area
|
|
@@ -89,31 +89,27 @@ class WRFRun:
|
|
|
89
89
|
|
|
90
90
|
def __enter__(self):
|
|
91
91
|
# check workspace
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
if not exists(_path) and not self._init_workspace:
|
|
96
|
-
logger.info(f"Force re-create workspace because it is broken.")
|
|
97
|
-
self._init_workspace = True
|
|
98
|
-
break
|
|
92
|
+
if not check_workspace():
|
|
93
|
+
logger.info(f"Force re-create workspace because it is broken.")
|
|
94
|
+
self._init_workspace = True
|
|
99
95
|
|
|
100
96
|
# here is the condition we need to initialize workspace:
|
|
101
|
-
# 1.
|
|
102
|
-
# 2.
|
|
103
|
-
if self.
|
|
97
|
+
# 1. submit_job = True and init_workspace = True, do prepare_workspace before submitting the task to job scheduler.
|
|
98
|
+
# 2. submit_job = False and init_workspace = True, do prepare_workspace.
|
|
99
|
+
if self._submit_job and not in_job_scheduler():
|
|
104
100
|
if self._init_workspace:
|
|
105
101
|
prepare_workspace()
|
|
106
102
|
|
|
107
103
|
# ask user before commit the task
|
|
108
104
|
confirm_model_area()
|
|
109
105
|
|
|
110
|
-
|
|
106
|
+
prepare_scheduler_script(self._entry_file_path)
|
|
111
107
|
|
|
112
108
|
call_subprocess(["qsub", f"{self._entry_file_dir_path}/run.sh"])
|
|
113
109
|
logger.info(f"Work has been submit to PBS system")
|
|
114
110
|
exit(0)
|
|
115
111
|
|
|
116
|
-
elif not self.
|
|
112
|
+
elif not self._submit_job:
|
|
117
113
|
if self._init_workspace:
|
|
118
114
|
prepare_workspace()
|
|
119
115
|
|
|
@@ -139,7 +135,7 @@ class WRFRun:
|
|
|
139
135
|
else:
|
|
140
136
|
prepare_wps_input_data(self._wps_data_area)
|
|
141
137
|
|
|
142
|
-
logger.
|
|
138
|
+
logger.debug(r"Enter wrfrun context")
|
|
143
139
|
|
|
144
140
|
return self
|
|
145
141
|
|
|
@@ -157,7 +153,7 @@ class WRFRun:
|
|
|
157
153
|
|
|
158
154
|
clear_model_logs()
|
|
159
155
|
|
|
160
|
-
logger.
|
|
156
|
+
logger.debug(r"Exit wrfrun context")
|
|
161
157
|
|
|
162
158
|
def _start_wrfrun_server(self):
|
|
163
159
|
"""
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"""
|
|
2
|
+
wrfrun.scheduler
|
|
3
|
+
################
|
|
4
|
+
|
|
5
|
+
``wrfrun`` provides functions to help users take care of the job scheduler.
|
|
6
|
+
|
|
7
|
+
Submodules
|
|
8
|
+
**********
|
|
9
|
+
|
|
10
|
+
======================================= ===========================================================
|
|
11
|
+
:doc:`env </api/scheduler.env>` Functions to manage environment variables in job scheduler.
|
|
12
|
+
:doc:`lsf </api/scheduler.lsf>` Scheduler interface for LSF job scheduler.
|
|
13
|
+
:doc:`pbs </api/scheduler.pbs>` Scheduler interface for PBS job scheduler.
|
|
14
|
+
:doc:`script </api/scheduler.script>` Generate shell scripts for job scheduler.
|
|
15
|
+
:doc:`slurm </api/scheduler.slurm>` Scheduler interface for Slurm job scheduler.
|
|
16
|
+
:doc:`utils </api/scheduler.utils>` Utility functions.
|
|
17
|
+
======================================= ===========================================================
|
|
18
|
+
|
|
19
|
+
.. toctree::
|
|
20
|
+
:maxdepth: 1
|
|
21
|
+
:hidden:
|
|
22
|
+
|
|
23
|
+
env <scheduler.env>
|
|
24
|
+
lsf <scheduler.lsf>
|
|
25
|
+
pbs <scheduler.pbs>
|
|
26
|
+
slurm <scheduler.slurm>
|
|
27
|
+
utils <scheduler.utils>
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
from .env import *
|
|
31
|
+
from .lsf import *
|
|
32
|
+
from .pbs import *
|
|
33
|
+
from .script import *
|
|
34
|
+
from .slurm import *
|
|
35
|
+
from .utils import *
|
wrfrun/scheduler/env.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"""
|
|
2
|
+
wrfrun.scheduler.env
|
|
3
|
+
####################
|
|
4
|
+
|
|
5
|
+
Functions to manage environment variables in job scheduler.
|
|
6
|
+
|
|
7
|
+
.. autosummary::
|
|
8
|
+
:toctree: generated/
|
|
9
|
+
|
|
10
|
+
in_job_scheduler
|
|
11
|
+
|
|
12
|
+
How Does wrfrun Check If It Is In A Job Scheduler?
|
|
13
|
+
**************************************************
|
|
14
|
+
|
|
15
|
+
If you submit your task through ``wrfrun``, that is,
|
|
16
|
+
set ``submit_job = True`` in :class:`WRFRun <wrfrun.run.WRFRun>`,
|
|
17
|
+
an environment variable called ``WRFRUN_ENV_JOB_SCHEDULER`` will be set.
|
|
18
|
+
``wrfrun`` will determine if it is in a job scheduler by checking if ``WRFRUN_ENV_JOB_SCHEDULER`` appears in environment.
|
|
19
|
+
|
|
20
|
+
If you submit your task by your own,
|
|
21
|
+
it is recommended that add ``WRFRUN_ENV_JOB_SCHEDULER`` to the environment,
|
|
22
|
+
which can ensure ``wrfrun`` works properly in the job scheduler.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from os import environ
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def in_job_scheduler() -> bool:
|
|
29
|
+
"""
|
|
30
|
+
Check if ``wrfrun`` runs in a job scheduler task.
|
|
31
|
+
|
|
32
|
+
This function checks the environment variable ``WRFRUN_ENV_JOB_SCHEDULER``
|
|
33
|
+
to determine if ``wrfrun`` is running in a job scheduler task.
|
|
34
|
+
|
|
35
|
+
:return: ``True`` if in a job scheduler task, else ``False``.
|
|
36
|
+
:rtype: bool
|
|
37
|
+
"""
|
|
38
|
+
if "WRFRUN_ENV_JOB_SCHEDULER" in environ:
|
|
39
|
+
return True
|
|
40
|
+
else:
|
|
41
|
+
return False
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
__all__ = ["in_job_scheduler"]
|
wrfrun/scheduler/lsf.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""
|
|
2
|
+
wrfrun.scheduler.lsf
|
|
3
|
+
####################
|
|
4
|
+
|
|
5
|
+
Scheduler interface for LSF system.
|
|
6
|
+
|
|
7
|
+
.. autosummary::
|
|
8
|
+
:toctree: generated/
|
|
9
|
+
|
|
10
|
+
lsf_generate_settings
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from wrfrun.core import WRFRUNConfig
|
|
14
|
+
from wrfrun.res import SCHEDULER_LSF_TEMPLATE
|
|
15
|
+
from .utils import get_core_num
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def lsf_generate_settings(scheduler_config: dict) -> str:
|
|
19
|
+
"""
|
|
20
|
+
This function generate bash settings for LSF job scheduler.
|
|
21
|
+
|
|
22
|
+
:return: Generated settings.
|
|
23
|
+
:rtype: str
|
|
24
|
+
"""
|
|
25
|
+
# get log path and job scheduler config
|
|
26
|
+
log_path = WRFRUNConfig.get_log_path()
|
|
27
|
+
|
|
28
|
+
# get scheduler configs
|
|
29
|
+
stdout_log_path = f"{log_path}/lsf.log"
|
|
30
|
+
stderr_log_path = f"{log_path}/lsf.err"
|
|
31
|
+
node_num = scheduler_config["node_num"]
|
|
32
|
+
queue_name = scheduler_config["queue_name"]
|
|
33
|
+
core_num = get_core_num()
|
|
34
|
+
|
|
35
|
+
template_path = WRFRUNConfig.parse_resource_uri(SCHEDULER_LSF_TEMPLATE)
|
|
36
|
+
with open(template_path, "r") as f:
|
|
37
|
+
template = f.read()
|
|
38
|
+
|
|
39
|
+
return template.format(
|
|
40
|
+
STDOUT_LOG_PATH=stdout_log_path,
|
|
41
|
+
STDERR_LOG_PATH=stderr_log_path,
|
|
42
|
+
CORE_NUM=core_num * node_num,
|
|
43
|
+
QUEUE_NAME=queue_name,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
__all__ = ["lsf_generate_settings"]
|
wrfrun/scheduler/pbs.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""
|
|
2
|
+
wrfrun.scheduler.pbs
|
|
3
|
+
####################
|
|
4
|
+
|
|
5
|
+
Scheduler interface for PBS system.
|
|
6
|
+
|
|
7
|
+
.. autosummary::
|
|
8
|
+
:toctree: generated/
|
|
9
|
+
|
|
10
|
+
pbs_generate_settings
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from wrfrun.core import WRFRUNConfig
|
|
14
|
+
from wrfrun.res import SCHEDULER_PBS_TEMPLATE
|
|
15
|
+
from .utils import get_core_num
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def pbs_generate_settings(scheduler_config: dict) -> str:
|
|
19
|
+
"""
|
|
20
|
+
This function generate bash settings for PBS job scheduler.
|
|
21
|
+
|
|
22
|
+
:return: Generated settings.
|
|
23
|
+
:rtype: str
|
|
24
|
+
"""
|
|
25
|
+
# get log path and job scheduler config
|
|
26
|
+
log_path = WRFRUNConfig.get_log_path()
|
|
27
|
+
|
|
28
|
+
# get scheduler configs
|
|
29
|
+
stdout_log_path = f"{log_path}/pbs.log"
|
|
30
|
+
stderr_log_path = f"{log_path}/pbs.err"
|
|
31
|
+
node_num = scheduler_config["node_num"]
|
|
32
|
+
queue_name = scheduler_config["queue_name"]
|
|
33
|
+
core_num = get_core_num()
|
|
34
|
+
|
|
35
|
+
template_path = WRFRUNConfig.parse_resource_uri(SCHEDULER_PBS_TEMPLATE)
|
|
36
|
+
with open(template_path, "r") as f:
|
|
37
|
+
template = f.read()
|
|
38
|
+
|
|
39
|
+
return template.format(
|
|
40
|
+
STDOUT_LOG_PATH=stdout_log_path,
|
|
41
|
+
STDERR_LOG_PATH=stderr_log_path,
|
|
42
|
+
NODE_NUM=node_num,
|
|
43
|
+
CORE_NUM=core_num,
|
|
44
|
+
QUEUE_NAME=queue_name,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
__all__ = ["pbs_generate_settings"]
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from os.path import exists, abspath, dirname
|
|
2
|
+
|
|
3
|
+
from wrfrun import WRFRUNConfig
|
|
4
|
+
from wrfrun.res import RUN_SH_TEMPLATE
|
|
5
|
+
from wrfrun.utils import logger
|
|
6
|
+
from .lsf import lsf_generate_settings
|
|
7
|
+
from .pbs import pbs_generate_settings
|
|
8
|
+
from .slurm import slurm_generate_settings
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def prepare_scheduler_script(main_file_path: str):
|
|
12
|
+
"""
|
|
13
|
+
Prepare the bash script to be submitted to job scheduler.
|
|
14
|
+
|
|
15
|
+
:param main_file_path: Path of the main entry file.
|
|
16
|
+
:type main_file_path: str
|
|
17
|
+
"""
|
|
18
|
+
# check main file path
|
|
19
|
+
if not exists(main_file_path):
|
|
20
|
+
logger.error(f"Wrong path of main entry file: {main_file_path}")
|
|
21
|
+
raise FileNotFoundError(f"Wrong path of main entry file: {main_file_path}")
|
|
22
|
+
|
|
23
|
+
# get absolute path of main entry file's parent directory
|
|
24
|
+
dir_path = abspath(dirname(main_file_path))
|
|
25
|
+
|
|
26
|
+
scheduler_configs = WRFRUNConfig.get_job_scheduler_config()
|
|
27
|
+
|
|
28
|
+
# generate scheduler settings
|
|
29
|
+
match scheduler_configs["job_scheduler"]:
|
|
30
|
+
case "lsf":
|
|
31
|
+
scheduler_settings = lsf_generate_settings(scheduler_configs)
|
|
32
|
+
|
|
33
|
+
case "pbs":
|
|
34
|
+
scheduler_settings = pbs_generate_settings(scheduler_configs)
|
|
35
|
+
|
|
36
|
+
case "slurm":
|
|
37
|
+
scheduler_settings = slurm_generate_settings(scheduler_configs)
|
|
38
|
+
|
|
39
|
+
case _:
|
|
40
|
+
logger.error(f"Unknown scheduler name: {scheduler_configs['job_scheduler']}")
|
|
41
|
+
raise ValueError(f"Unknown scheduler name: {scheduler_configs['job_scheduler']}")
|
|
42
|
+
|
|
43
|
+
# generate environment settings
|
|
44
|
+
env_settings = 'export WRFRUN_ENV_JOB_SCHEDULER=1\n'
|
|
45
|
+
if len(scheduler_configs["env_settings"]) > 0:
|
|
46
|
+
for key in scheduler_configs["env_settings"]:
|
|
47
|
+
env_settings += f"export {key}={scheduler_configs['env_settings'][key]}\n"
|
|
48
|
+
|
|
49
|
+
# generate command
|
|
50
|
+
exec_cmd = f"{scheduler_configs['python_interpreter']} {main_file_path}"
|
|
51
|
+
|
|
52
|
+
# generate shell script
|
|
53
|
+
shell_template_path = WRFRUNConfig.parse_resource_uri(RUN_SH_TEMPLATE)
|
|
54
|
+
with open(f"{dir_path}/run.sh", "w") as f:
|
|
55
|
+
|
|
56
|
+
with open(shell_template_path, "r") as f_template:
|
|
57
|
+
template = f_template.read()
|
|
58
|
+
|
|
59
|
+
template = template.format(
|
|
60
|
+
SCHEDULER_SETTINGS=scheduler_settings,
|
|
61
|
+
ENV_SETTINGS=env_settings,
|
|
62
|
+
WORK_COMMAND=exec_cmd,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
f.write(template)
|
|
66
|
+
|
|
67
|
+
logger.info(f"Job scheduler script written to {dir_path}/run.sh")
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
__all__ = ["prepare_scheduler_script"]
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""
|
|
2
|
+
wrfrun.scheduler.slurm
|
|
3
|
+
######################
|
|
4
|
+
|
|
5
|
+
Scheduler interface for Slurm system.
|
|
6
|
+
|
|
7
|
+
.. autosummary::
|
|
8
|
+
:toctree: generated/
|
|
9
|
+
|
|
10
|
+
slurm_generate_settings
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from wrfrun.core import WRFRUNConfig
|
|
14
|
+
from wrfrun.res import SCHEDULER_SLURM_TEMPLATE
|
|
15
|
+
from .utils import get_core_num
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def slurm_generate_settings(scheduler_config: dict) -> str:
|
|
19
|
+
"""
|
|
20
|
+
This function generate bash settings for Slurm job scheduler.
|
|
21
|
+
|
|
22
|
+
:return: Generated settings.
|
|
23
|
+
:rtype: str
|
|
24
|
+
"""
|
|
25
|
+
# get log path and job scheduler config
|
|
26
|
+
log_path = WRFRUNConfig.get_log_path()
|
|
27
|
+
|
|
28
|
+
# get scheduler configs
|
|
29
|
+
stdout_log_path = f"{log_path}/slurm.log"
|
|
30
|
+
stderr_log_path = f"{log_path}/slurm.err"
|
|
31
|
+
node_num = scheduler_config["node_num"]
|
|
32
|
+
queue_name = scheduler_config["queue_name"]
|
|
33
|
+
core_num = get_core_num()
|
|
34
|
+
|
|
35
|
+
template_path = WRFRUNConfig.parse_resource_uri(SCHEDULER_SLURM_TEMPLATE)
|
|
36
|
+
with open(template_path, "r") as f:
|
|
37
|
+
template = f.read()
|
|
38
|
+
|
|
39
|
+
return template.format(
|
|
40
|
+
STDOUT_LOG_PATH=stdout_log_path,
|
|
41
|
+
STDERR_LOG_PATH=stderr_log_path,
|
|
42
|
+
CORE_NUM=core_num,
|
|
43
|
+
NODE_NUM=node_num,
|
|
44
|
+
QUEUE_NAME=queue_name,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
__all__ = ["slurm_generate_settings"]
|
wrfrun/utils.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import subprocess
|
|
3
3
|
from datetime import datetime
|
|
4
|
-
from os import chdir, getcwd, makedirs
|
|
4
|
+
from os import chdir, getcwd, makedirs, environ
|
|
5
5
|
from os.path import exists
|
|
6
6
|
from shutil import rmtree
|
|
7
7
|
from time import time
|
|
@@ -45,7 +45,12 @@ def set_logger(logger_list: List[str], logger_level: Optional[Dict] = None):
|
|
|
45
45
|
|
|
46
46
|
# init wrfrun logger
|
|
47
47
|
logger = logging.getLogger("wrfrun")
|
|
48
|
-
|
|
48
|
+
# check environment variables and set logger level
|
|
49
|
+
if "WRFRUN_DEBUG_MODE" in environ and environ["WRFRUN_DEBUG_MODE"]:
|
|
50
|
+
_logger_level = logging.DEBUG
|
|
51
|
+
else:
|
|
52
|
+
_logger_level = logging.INFO
|
|
53
|
+
set_logger(["wrfrun", ], {"wrfrun": _logger_level})
|
|
49
54
|
|
|
50
55
|
|
|
51
56
|
def unify_logger_format():
|
|
@@ -218,7 +223,7 @@ def check_subprocess_status(status: subprocess.CompletedProcess):
|
|
|
218
223
|
logger.error(f"====== ====== ======")
|
|
219
224
|
|
|
220
225
|
# raise error
|
|
221
|
-
raise RuntimeError
|
|
226
|
+
raise RuntimeError(f"Failed to exec command: '{command}'. Please check the log above.")
|
|
222
227
|
|
|
223
228
|
|
|
224
229
|
def call_subprocess(command: list[str], work_path: Optional[str] = None, print_output=False):
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""
|
|
2
|
+
wrfrun.workspace
|
|
3
|
+
################
|
|
4
|
+
|
|
5
|
+
Prepare ``workspace`` for ``wrfrun`` and numerical models.
|
|
6
|
+
|
|
7
|
+
Submodules
|
|
8
|
+
**********
|
|
9
|
+
|
|
10
|
+
================================= ===========================================================
|
|
11
|
+
:doc:`core </api/workspace.core>` Core functions of this submodule.
|
|
12
|
+
:doc:`wrf </api/workspace.wrf>` Functions to prepare workspace for WPS/WRF model.
|
|
13
|
+
================================= ===========================================================
|
|
14
|
+
|
|
15
|
+
Workspace
|
|
16
|
+
*********
|
|
17
|
+
|
|
18
|
+
``workspace`` is a collection of several directories where ``wrfrun``, extensions and numerical model works.
|
|
19
|
+
These directories and their purpose are listed below.
|
|
20
|
+
|
|
21
|
+
=================================== ===========================================================
|
|
22
|
+
Director Path Purpose
|
|
23
|
+
=================================== ===========================================================
|
|
24
|
+
``/tmp/wrfrun`` Store temporary files.
|
|
25
|
+
``$HOME/.config/wrfrun`` Main work directory.
|
|
26
|
+
``$HOME/.config/wrfrun/replay`` Work directory for :doc:`replay <wrfrun.core.replay>`.
|
|
27
|
+
``$HOME/.config/wrfrun/model`` Work directory for numerical models.
|
|
28
|
+
=================================== ===========================================================
|
|
29
|
+
|
|
30
|
+
.. toctree::
|
|
31
|
+
:maxdepth: 1
|
|
32
|
+
:hidden:
|
|
33
|
+
|
|
34
|
+
core <workspace.core>
|
|
35
|
+
wrf <workspace.wrf>
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
from .core import *
|