wrfrun 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wrfrun/__init__.py +8 -3
- wrfrun/cli.py +69 -29
- wrfrun/core/__init__.py +27 -10
- wrfrun/core/_config.py +308 -0
- wrfrun/core/_constant.py +236 -0
- wrfrun/core/_exec_db.py +105 -0
- wrfrun/core/_namelist.py +287 -0
- wrfrun/core/_record.py +178 -0
- wrfrun/core/_resource.py +172 -0
- wrfrun/core/base.py +132 -406
- wrfrun/core/core.py +196 -0
- wrfrun/core/error.py +28 -2
- wrfrun/core/replay.py +10 -96
- wrfrun/core/server.py +52 -27
- wrfrun/core/type.py +171 -0
- wrfrun/data.py +304 -139
- wrfrun/extension/goos_sst/__init__.py +2 -2
- wrfrun/extension/goos_sst/core.py +9 -14
- wrfrun/extension/goos_sst/res/__init__.py +0 -1
- wrfrun/extension/goos_sst/utils.py +50 -44
- wrfrun/extension/littler/core.py +105 -88
- wrfrun/extension/utils.py +4 -3
- wrfrun/log.py +117 -0
- wrfrun/model/__init__.py +11 -7
- wrfrun/model/constants.py +52 -0
- wrfrun/model/palm/__init__.py +30 -0
- wrfrun/model/palm/core.py +145 -0
- wrfrun/model/palm/namelist.py +33 -0
- wrfrun/model/plot.py +99 -119
- wrfrun/model/type.py +116 -0
- wrfrun/model/utils.py +9 -20
- wrfrun/model/wrf/__init__.py +4 -9
- wrfrun/model/wrf/core.py +246 -161
- wrfrun/model/wrf/exec_wrap.py +13 -12
- wrfrun/model/wrf/geodata.py +116 -100
- wrfrun/model/wrf/log.py +103 -0
- wrfrun/model/wrf/namelist.py +90 -73
- wrfrun/model/wrf/plot.py +102 -0
- wrfrun/model/wrf/scheme.py +108 -52
- wrfrun/model/wrf/utils.py +39 -25
- wrfrun/model/wrf/vtable.py +35 -3
- wrfrun/plot/__init__.py +20 -0
- wrfrun/plot/wps.py +90 -73
- wrfrun/res/__init__.py +103 -5
- wrfrun/res/config/config.template.toml +8 -0
- wrfrun/res/config/palm.template.toml +23 -0
- wrfrun/run.py +105 -77
- wrfrun/scheduler/__init__.py +1 -0
- wrfrun/scheduler/lsf.py +3 -2
- wrfrun/scheduler/pbs.py +3 -2
- wrfrun/scheduler/script.py +17 -5
- wrfrun/scheduler/slurm.py +3 -2
- wrfrun/scheduler/utils.py +14 -2
- wrfrun/utils.py +88 -199
- wrfrun/workspace/__init__.py +8 -5
- wrfrun/workspace/core.py +20 -12
- wrfrun/workspace/palm.py +137 -0
- wrfrun/workspace/wrf.py +16 -15
- wrfrun-0.3.0.dist-info/METADATA +240 -0
- wrfrun-0.3.0.dist-info/RECORD +78 -0
- wrfrun/core/config.py +0 -923
- wrfrun/model/base.py +0 -14
- wrfrun-0.2.0.dist-info/METADATA +0 -68
- wrfrun-0.2.0.dist-info/RECORD +0 -62
- {wrfrun-0.2.0.dist-info → wrfrun-0.3.0.dist-info}/WHEEL +0 -0
- {wrfrun-0.2.0.dist-info → wrfrun-0.3.0.dist-info}/entry_points.txt +0 -0
wrfrun/model/wrf/exec_wrap.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
wrfrun.model.wrf.exec_wrap
|
|
3
3
|
##########################
|
|
4
4
|
|
|
5
|
-
Function wrapper of WPS
|
|
5
|
+
Function wrapper of WPS/WRF :doc:`Executables </api/model.wrf.core>`.
|
|
6
6
|
|
|
7
7
|
.. autosummary::
|
|
8
8
|
:toctree: generated/
|
|
@@ -18,8 +18,9 @@ Function wrapper of WPS / WRF :doc:`Executables </api/model.wrf.core>`.
|
|
|
18
18
|
|
|
19
19
|
from typing import Optional, Union
|
|
20
20
|
|
|
21
|
-
from wrfrun import
|
|
22
|
-
|
|
21
|
+
from wrfrun.core import WRFRUN
|
|
22
|
+
|
|
23
|
+
from .core import DFI, WRF, GeoGrid, MetGrid, NDown, Real, UnGrib
|
|
23
24
|
|
|
24
25
|
|
|
25
26
|
def geogrid(geogrid_tbl_file: Union[str, None] = None):
|
|
@@ -28,7 +29,7 @@ def geogrid(geogrid_tbl_file: Union[str, None] = None):
|
|
|
28
29
|
|
|
29
30
|
:param geogrid_tbl_file: Custom GEOGRID.TBL file path. Defaults to None.
|
|
30
31
|
"""
|
|
31
|
-
GeoGrid(geogrid_tbl_file,
|
|
32
|
+
GeoGrid(geogrid_tbl_file, WRFRUN.config.get_core_num())()
|
|
32
33
|
|
|
33
34
|
|
|
34
35
|
def ungrib(vtable_file: Union[str, None] = None, input_data_path: Optional[str] = None, prefix="FILE"):
|
|
@@ -47,7 +48,9 @@ def ungrib(vtable_file: Union[str, None] = None, input_data_path: Optional[str]
|
|
|
47
48
|
UnGrib(vtable_file, input_data_path).set_ungrib_output_prefix(prefix)()
|
|
48
49
|
|
|
49
50
|
|
|
50
|
-
def metgrid(
|
|
51
|
+
def metgrid(
|
|
52
|
+
geogrid_data_path: Optional[str] = None, ungrib_data_path: Optional[str] = None, fg_names: Union[str, list[str]] = "FILE"
|
|
53
|
+
):
|
|
51
54
|
"""
|
|
52
55
|
Function interface for :class:`MetGrid <wrfrun.model.wrf.core.MetGrid>`.
|
|
53
56
|
|
|
@@ -60,9 +63,7 @@ def metgrid(geogrid_data_path: Optional[str] = None, ungrib_data_path: Optional[
|
|
|
60
63
|
:param fg_names: ``fg_name`` of metgrid, a single prefix string or a string list.
|
|
61
64
|
:type fg_names: str | list
|
|
62
65
|
"""
|
|
63
|
-
MetGrid(
|
|
64
|
-
geogrid_data_path, ungrib_data_path, WRFRUNConfig.get_core_num()
|
|
65
|
-
).set_metgrid_fg_names(fg_names)()
|
|
66
|
+
MetGrid(geogrid_data_path, ungrib_data_path, WRFRUN.config.get_core_num()).set_metgrid_fg_names(fg_names)()
|
|
66
67
|
|
|
67
68
|
|
|
68
69
|
def real(metgrid_data_path: Union[str, None] = None):
|
|
@@ -72,7 +73,7 @@ def real(metgrid_data_path: Union[str, None] = None):
|
|
|
72
73
|
:param metgrid_data_path: Directory path of :class:`MetGrid <wrfrun.model.wrf.core.MetGrid>` outputs.
|
|
73
74
|
If is ``None``, try to use the workspace path or output path in the config file.
|
|
74
75
|
"""
|
|
75
|
-
Real(metgrid_data_path,
|
|
76
|
+
Real(metgrid_data_path, WRFRUN.config.get_core_num())()
|
|
76
77
|
|
|
77
78
|
|
|
78
79
|
def wrf(input_file_dir_path: Union[str, None] = None, restart_file_dir_path: Optional[str] = None, save_restarts=False):
|
|
@@ -83,7 +84,7 @@ def wrf(input_file_dir_path: Union[str, None] = None, restart_file_dir_path: Opt
|
|
|
83
84
|
:param restart_file_dir_path: Directory path of restart files.
|
|
84
85
|
:param save_restarts: If saving restart files. Defaults to False.
|
|
85
86
|
"""
|
|
86
|
-
WRF(input_file_dir_path, restart_file_dir_path, save_restarts,
|
|
87
|
+
WRF(input_file_dir_path, restart_file_dir_path, save_restarts, WRFRUN.config.get_core_num())()
|
|
87
88
|
|
|
88
89
|
|
|
89
90
|
def dfi(input_file_dir_path: Optional[str] = None, update_real_output=True):
|
|
@@ -95,7 +96,7 @@ def dfi(input_file_dir_path: Optional[str] = None, update_real_output=True):
|
|
|
95
96
|
:param update_real_output: If update corresponding files in :class:`Real <wrfrun.model.wrf.core.Real>` outputs.
|
|
96
97
|
:type update_real_output: bool
|
|
97
98
|
"""
|
|
98
|
-
DFI(input_file_dir_path, update_real_output,
|
|
99
|
+
DFI(input_file_dir_path, update_real_output, WRFRUN.config.get_core_num())()
|
|
99
100
|
|
|
100
101
|
|
|
101
102
|
def ndown(wrfout_file_path: str, real_output_dir_path: Optional[str] = None, update_namelist=True):
|
|
@@ -109,7 +110,7 @@ def ndown(wrfout_file_path: str, real_output_dir_path: Optional[str] = None, upd
|
|
|
109
110
|
:param update_namelist: If update namelist settings for the final integral.
|
|
110
111
|
:type update_namelist: bool
|
|
111
112
|
"""
|
|
112
|
-
NDown(wrfout_file_path, real_output_dir_path, update_namelist,
|
|
113
|
+
NDown(wrfout_file_path, real_output_dir_path, update_namelist, WRFRUN.config.get_core_num())()
|
|
113
114
|
|
|
114
115
|
|
|
115
116
|
__all__ = ["geogrid", "ungrib", "metgrid", "real", "wrf", "dfi", "ndown"]
|
wrfrun/model/wrf/geodata.py
CHANGED
|
@@ -1,3 +1,19 @@
|
|
|
1
|
+
"""
|
|
2
|
+
wrfrun.model.wrf.geodata
|
|
3
|
+
########################
|
|
4
|
+
|
|
5
|
+
Functions to read and write WPS geographical static data.
|
|
6
|
+
|
|
7
|
+
.. autosummary::
|
|
8
|
+
:toctree: generated/
|
|
9
|
+
|
|
10
|
+
_get_data_type
|
|
11
|
+
_get_clip_area
|
|
12
|
+
parse_geographical_data_index
|
|
13
|
+
parse_geographical_data_file
|
|
14
|
+
read_geographical_static_data
|
|
15
|
+
"""
|
|
16
|
+
|
|
1
17
|
from os import listdir
|
|
2
18
|
from os.path import exists
|
|
3
19
|
from typing import OrderedDict, Union
|
|
@@ -5,44 +21,46 @@ from typing import OrderedDict, Union
|
|
|
5
21
|
import numpy as np
|
|
6
22
|
from xarray import DataArray
|
|
7
23
|
|
|
8
|
-
from wrfrun.core import
|
|
9
|
-
from wrfrun.
|
|
24
|
+
from wrfrun.core import WRFRUN
|
|
25
|
+
from wrfrun.log import logger
|
|
10
26
|
|
|
11
27
|
# for example: 00001-00200.00201-00400
|
|
12
28
|
DATA_NAME_TEMPLATE = "{}-{}.{}-{}"
|
|
13
29
|
|
|
14
30
|
|
|
15
31
|
def _get_data_type(wordsize: int) -> type:
|
|
16
|
-
"""
|
|
17
|
-
|
|
18
|
-
Args:
|
|
19
|
-
wordsize (int): Wordsize in index file.
|
|
32
|
+
"""
|
|
33
|
+
Get data type based on wordsize value in index file.
|
|
20
34
|
|
|
21
|
-
|
|
22
|
-
|
|
35
|
+
:param wordsize: Wordsize in index file.
|
|
36
|
+
:type wordsize: int
|
|
37
|
+
:return: NumPy value type.
|
|
38
|
+
:rtype: type
|
|
23
39
|
"""
|
|
24
40
|
# define map dict
|
|
25
|
-
map_dict = {
|
|
26
|
-
1: np.int8,
|
|
27
|
-
2: np.int16,
|
|
28
|
-
4: np.int32
|
|
29
|
-
}
|
|
41
|
+
map_dict = {1: np.int8, 2: np.int16, 4: np.int32}
|
|
30
42
|
|
|
31
43
|
return map_dict[wordsize]
|
|
32
44
|
|
|
33
45
|
|
|
34
|
-
def _get_clip_area(
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
+
def _get_clip_area(
|
|
47
|
+
index_area: tuple[int, int, int, int], row_num: int, col_num: int, tile_x: int, tile_y: int
|
|
48
|
+
) -> tuple[int, int, int, int]:
|
|
49
|
+
"""
|
|
50
|
+
Get clip area.
|
|
51
|
+
|
|
52
|
+
:param index_area: Full area index.
|
|
53
|
+
:type index_area: tuple[int, int, int, int]
|
|
54
|
+
:param row_num: Row number of the tile.
|
|
55
|
+
:type row_num: int
|
|
56
|
+
:param col_num: Column number of the file.
|
|
57
|
+
:type col_num: int
|
|
58
|
+
:param tile_x: X size of the tile.
|
|
59
|
+
:type tile_x: int
|
|
60
|
+
:param tile_y: Y size of the tile.
|
|
61
|
+
:type tile_y: int
|
|
62
|
+
:return: Clip area.
|
|
63
|
+
:rtype: tuple[int, int, int, int]
|
|
46
64
|
"""
|
|
47
65
|
# calculate tile area
|
|
48
66
|
tile_area = (
|
|
@@ -64,25 +82,24 @@ def _get_clip_area(index_area: tuple[int, int, int, int], row_num: int, col_num:
|
|
|
64
82
|
|
|
65
83
|
|
|
66
84
|
def parse_geographical_data_index(index_path: str) -> OrderedDict:
|
|
67
|
-
"""
|
|
68
|
-
|
|
69
|
-
Args:
|
|
70
|
-
index_path (str): Index file path.
|
|
85
|
+
"""
|
|
86
|
+
Read geographical data index file.
|
|
71
87
|
|
|
72
|
-
|
|
73
|
-
|
|
88
|
+
:param index_path: Index file path.
|
|
89
|
+
:type index_path: str
|
|
90
|
+
:return: Info stored in dict.
|
|
91
|
+
:rtype: OrderedDict[Any, Any]
|
|
74
92
|
"""
|
|
75
93
|
# since the index file is very similar to fortran namelist file,
|
|
76
94
|
# we can manually add "&index" and "/" and parse it as a namelist
|
|
77
95
|
# temp file store path
|
|
78
|
-
WRFRUNConfig =
|
|
96
|
+
WRFRUNConfig = WRFRUN.config
|
|
79
97
|
temp_file = f"{WRFRUNConfig.WRFRUN_TEMP_PATH}/geogrid_data.index"
|
|
80
98
|
temp_file = WRFRUNConfig.parse_resource_uri(temp_file)
|
|
81
99
|
|
|
82
100
|
# open file and add header and tail
|
|
83
101
|
with open(index_path, "r") as _index_file:
|
|
84
102
|
with open(temp_file, "w") as _temp_file:
|
|
85
|
-
|
|
86
103
|
_temp_file.write("&index\n")
|
|
87
104
|
_temp_file.write(_index_file.read())
|
|
88
105
|
_temp_file.write("/")
|
|
@@ -93,20 +110,31 @@ def parse_geographical_data_index(index_path: str) -> OrderedDict:
|
|
|
93
110
|
return WRFRUNConfig.get_namelist("geog_static_data")["index"]
|
|
94
111
|
|
|
95
112
|
|
|
96
|
-
def parse_geographical_data_file(
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
113
|
+
def parse_geographical_data_file(
|
|
114
|
+
file_path: str,
|
|
115
|
+
wordsize: int,
|
|
116
|
+
endian: str,
|
|
117
|
+
tile_shape: tuple[int, ...],
|
|
118
|
+
area: Union[tuple[int, ...], None] = None,
|
|
119
|
+
miss_value: Union[int, float, None] = None,
|
|
120
|
+
) -> np.ndarray:
|
|
121
|
+
"""
|
|
122
|
+
Read geographical data file.
|
|
123
|
+
|
|
124
|
+
:param file_path: File path.
|
|
125
|
+
:type file_path: str
|
|
126
|
+
:param wordsize: How many bytes are used to store value in data file.
|
|
127
|
+
:type wordsize: int
|
|
128
|
+
:param endian: "big" or "little".
|
|
129
|
+
:type endian: str
|
|
130
|
+
:param tile_shape: The raw shape of the tile. Can be 2D or 3D.
|
|
131
|
+
:type tile_shape: tuple[int, ...]
|
|
132
|
+
:param area: The range (x_start, x_stop, y_start, y_stop, ...) of data you want to read. Defaults to None.
|
|
133
|
+
:type area: Union[tuple[int, ...], None]
|
|
134
|
+
:param miss_value: The value which represents NaN. Defaults to None.
|
|
135
|
+
:type miss_value: Union[int, float, None]
|
|
136
|
+
:return: NumPy array object.
|
|
137
|
+
:rtype: ndarray[Any, Any]
|
|
110
138
|
"""
|
|
111
139
|
# get data type
|
|
112
140
|
data_type = _get_data_type(wordsize)
|
|
@@ -114,7 +142,7 @@ def parse_geographical_data_file(file_path: str, wordsize: int, endian: str, til
|
|
|
114
142
|
# read data
|
|
115
143
|
data = np.fromfile(file_path, dtype=data_type)
|
|
116
144
|
|
|
117
|
-
# swap byte if
|
|
145
|
+
# swap byte if it needs
|
|
118
146
|
if endian == "big":
|
|
119
147
|
data = data.byteswap()
|
|
120
148
|
|
|
@@ -125,17 +153,16 @@ def parse_geographical_data_file(file_path: str, wordsize: int, endian: str, til
|
|
|
125
153
|
if area:
|
|
126
154
|
# check area
|
|
127
155
|
if len(area) % 2 != 0:
|
|
128
|
-
logger.error(
|
|
129
|
-
f"The length of `area` must be even, but is {len(area)}")
|
|
156
|
+
logger.error(f"The length of `area` must be even, but is {len(area)}")
|
|
130
157
|
exit(1)
|
|
131
158
|
|
|
132
159
|
area_array = np.asarray(area).reshape(-1, 2)
|
|
133
160
|
slice_index = tuple((slice(i[0], i[1]) for i in area_array))
|
|
134
161
|
|
|
135
162
|
if len(slice_index) == 2:
|
|
136
|
-
slice_index += (slice(None),
|
|
163
|
+
slice_index += (slice(None),)
|
|
137
164
|
|
|
138
|
-
data = data[slice_index[::-1]]
|
|
165
|
+
data = data[slice_index[::-1]] # type: ignore
|
|
139
166
|
|
|
140
167
|
# fill nan
|
|
141
168
|
if miss_value:
|
|
@@ -144,16 +171,20 @@ def parse_geographical_data_file(file_path: str, wordsize: int, endian: str, til
|
|
|
144
171
|
return data
|
|
145
172
|
|
|
146
173
|
|
|
147
|
-
def read_geographical_static_data(
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
174
|
+
def read_geographical_static_data(
|
|
175
|
+
geog_data_folder_path: str, name: str, area: Union[tuple[float, float, float, float], None] = None
|
|
176
|
+
) -> DataArray:
|
|
177
|
+
"""
|
|
178
|
+
Read WPS geographical static data
|
|
179
|
+
|
|
180
|
+
:param geog_data_folder_path: Data folder path.
|
|
181
|
+
:type geog_data_folder_path: str
|
|
182
|
+
:param name: Name that will be used to create DataArray.
|
|
183
|
+
:type name: str
|
|
184
|
+
:param area: Longitude and latitude area (lon_start, lon_stop, lat_start, lat_stop). Defaults to None.
|
|
185
|
+
:type area: Union[tuple[float, float, float, float], None]
|
|
186
|
+
:return: DataArray object.
|
|
187
|
+
:rtype: DataArray
|
|
157
188
|
"""
|
|
158
189
|
# check if folder exists
|
|
159
190
|
if not exists(geog_data_folder_path):
|
|
@@ -167,8 +198,7 @@ def read_geographical_static_data(geog_data_folder_path: str, name: str, area: U
|
|
|
167
198
|
# extract info to read data
|
|
168
199
|
# # check essential key
|
|
169
200
|
if "wordsize" not in index_data:
|
|
170
|
-
logger.error(
|
|
171
|
-
f"Can't find key `wordsize` in index file, maybe it is corrupted.")
|
|
201
|
+
logger.error("Can't find key `wordsize` in index file, maybe it is corrupted.")
|
|
172
202
|
exit(1)
|
|
173
203
|
# # extract info
|
|
174
204
|
wordsize = index_data["wordsize"]
|
|
@@ -194,15 +224,12 @@ def read_geographical_static_data(geog_data_folder_path: str, name: str, area: U
|
|
|
194
224
|
int((area[3] - known_lat) // dy),
|
|
195
225
|
)
|
|
196
226
|
# check if negative value exists
|
|
197
|
-
if
|
|
198
|
-
|
|
199
|
-
index_area[2] < 0
|
|
200
|
-
):
|
|
201
|
-
logger.warning(f"Part of your area has exceeded data's area")
|
|
227
|
+
if index_area[0] < 0 or index_area[2] < 0:
|
|
228
|
+
logger.warning("Part of your area has exceeded data's area")
|
|
202
229
|
# set negative value to 0
|
|
203
230
|
index_area = tuple((i if i >= 0 else 0 for i in index_area))
|
|
204
231
|
else:
|
|
205
|
-
logger.warning(
|
|
232
|
+
logger.warning("You want to read all data, which may be very large")
|
|
206
233
|
index_area = None
|
|
207
234
|
|
|
208
235
|
# find the file we need to read
|
|
@@ -218,58 +245,49 @@ def read_geographical_static_data(geog_data_folder_path: str, name: str, area: U
|
|
|
218
245
|
filenames = []
|
|
219
246
|
# # generate filenames and clip area
|
|
220
247
|
for row_num in range(tile_index_num[2], tile_index_num[3] + 1):
|
|
221
|
-
|
|
222
248
|
_names = []
|
|
223
249
|
for col_num in range(tile_index_num[0], tile_index_num[1] + 1):
|
|
224
|
-
|
|
225
250
|
_names.append(
|
|
226
251
|
[
|
|
227
252
|
DATA_NAME_TEMPLATE.format(
|
|
228
|
-
str(col_num * tile_shape[-1] + 1).rjust(5,
|
|
229
|
-
str((col_num + 1) * tile_shape[-1]).rjust(5,
|
|
230
|
-
str(row_num * tile_shape[-2] + 1).rjust(5,
|
|
231
|
-
str((row_num + 1) * tile_shape[-2]).rjust(5,
|
|
232
|
-
),
|
|
253
|
+
str(col_num * tile_shape[-1] + 1).rjust(5, "0"),
|
|
254
|
+
str((col_num + 1) * tile_shape[-1]).rjust(5, "0"),
|
|
255
|
+
str(row_num * tile_shape[-2] + 1).rjust(5, "0"),
|
|
256
|
+
str((row_num + 1) * tile_shape[-2]).rjust(5, "0"),
|
|
257
|
+
),
|
|
258
|
+
_get_clip_area(index_area, row_num, col_num, tile_shape[-1], tile_shape[-2]), # type: ignore
|
|
233
259
|
]
|
|
234
260
|
)
|
|
235
261
|
|
|
236
262
|
filenames.append(_names)
|
|
237
263
|
else:
|
|
238
|
-
raw_filenames = [x for x in listdir(
|
|
239
|
-
geog_data_folder_path) if x != "index"]
|
|
264
|
+
raw_filenames = [x for x in listdir(geog_data_folder_path) if x != "index"]
|
|
240
265
|
raw_filenames.sort()
|
|
241
266
|
|
|
242
267
|
# parse the last file to get row number and column number
|
|
243
268
|
_last_filename = raw_filenames[-1]
|
|
244
|
-
total_col_num = int(_last_filename.split(
|
|
245
|
-
|
|
246
|
-
total_row_num = int(_last_filename.split(
|
|
247
|
-
".")[1].split("-")[1]) // tile_shape[-2]
|
|
269
|
+
total_col_num = int(_last_filename.split(".")[0].split("-")[1]) // tile_shape[-1]
|
|
270
|
+
total_row_num = int(_last_filename.split(".")[1].split("-")[1]) // tile_shape[-2]
|
|
248
271
|
|
|
249
272
|
filenames = []
|
|
250
273
|
|
|
251
274
|
for row_num in range(total_row_num):
|
|
252
|
-
|
|
253
275
|
_names = []
|
|
254
276
|
for col_num in range(total_col_num):
|
|
255
|
-
|
|
256
|
-
_names.append(
|
|
257
|
-
[
|
|
258
|
-
raw_filenames[row_num * total_col_num + col_num], None
|
|
259
|
-
]
|
|
260
|
-
)
|
|
277
|
+
_names.append([raw_filenames[row_num * total_col_num + col_num], None])
|
|
261
278
|
|
|
262
279
|
filenames.append(_names)
|
|
263
280
|
|
|
264
281
|
# read and concatenate
|
|
265
282
|
array = []
|
|
266
283
|
for _row in filenames:
|
|
267
|
-
|
|
268
284
|
_array = []
|
|
269
285
|
for _col in _row:
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
286
|
+
_array.append(
|
|
287
|
+
parse_geographical_data_file(
|
|
288
|
+
f"{geog_data_folder_path}/{_col[0]}", wordsize, endian, tile_shape, _col[1], miss_value
|
|
289
|
+
)
|
|
290
|
+
)
|
|
273
291
|
|
|
274
292
|
# concatenate _array
|
|
275
293
|
array.append(np.concatenate(_array, axis=-1))
|
|
@@ -289,13 +307,11 @@ def read_geographical_static_data(geog_data_folder_path: str, name: str, area: U
|
|
|
289
307
|
levels = np.arange(array.shape[-3])
|
|
290
308
|
|
|
291
309
|
return DataArray(
|
|
292
|
-
name=name,
|
|
310
|
+
name=name,
|
|
311
|
+
data=array,
|
|
293
312
|
dims=["levels", "latitude", "longitude"],
|
|
294
|
-
coords={
|
|
295
|
-
|
|
296
|
-
"latitude": latitude,
|
|
297
|
-
"levels": levels
|
|
298
|
-
}, attrs=index_data
|
|
313
|
+
coords={"longitude": longitude, "latitude": latitude, "levels": levels},
|
|
314
|
+
attrs=index_data,
|
|
299
315
|
)
|
|
300
316
|
|
|
301
317
|
|
wrfrun/model/wrf/log.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"""
|
|
2
|
+
wrfrun.model.wrf.log
|
|
3
|
+
####################
|
|
4
|
+
|
|
5
|
+
Functions to parse and clear WPS/WRF model logs.
|
|
6
|
+
|
|
7
|
+
.. autosummary::
|
|
8
|
+
:toctree: generated/
|
|
9
|
+
|
|
10
|
+
get_wrf_simulated_seconds
|
|
11
|
+
clear_wrf_logs
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import subprocess
|
|
15
|
+
from datetime import datetime
|
|
16
|
+
from os import listdir
|
|
17
|
+
from os.path import exists
|
|
18
|
+
from shutil import move
|
|
19
|
+
from typing import Optional
|
|
20
|
+
|
|
21
|
+
from wrfrun.core import WRFRUN
|
|
22
|
+
from wrfrun.log import logger
|
|
23
|
+
from wrfrun.utils import check_path
|
|
24
|
+
from wrfrun.workspace.wrf import get_wrf_workspace_path
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def get_wrf_simulated_seconds(start_datetime: datetime, log_file_path: Optional[str] = None) -> int:
|
|
28
|
+
"""
|
|
29
|
+
Read the latest line of WRF's log file and calculate how many seconds WRF has integrated.
|
|
30
|
+
|
|
31
|
+
:param start_datetime: WRF start datetime.
|
|
32
|
+
:type start_datetime: datetime
|
|
33
|
+
:param log_file_path: Absolute path of the log file to be parsed.
|
|
34
|
+
:type log_file_path: str
|
|
35
|
+
:return: Integrated seconds. If this method fails to calculate the time, the returned value is ``-1``.
|
|
36
|
+
:rtype: int
|
|
37
|
+
"""
|
|
38
|
+
# use linux cmd to get the latest line of wrf log files
|
|
39
|
+
if log_file_path is None:
|
|
40
|
+
log_file_path = WRFRUN.config.parse_resource_uri(f"{get_wrf_workspace_path('wrf')}/rsl.out.0000")
|
|
41
|
+
res = subprocess.run(["tail", "-n", "1", log_file_path], capture_output=True)
|
|
42
|
+
log_text = res.stdout.decode()
|
|
43
|
+
|
|
44
|
+
if not (log_text.startswith("d01") or log_text.startswith("d02")):
|
|
45
|
+
return -1
|
|
46
|
+
|
|
47
|
+
time_string = log_text.split()[1]
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
current_datetime = datetime.strptime(time_string, "%Y-%m-%d_%H:%M:%S")
|
|
51
|
+
# remove timezone info so we can calculate.
|
|
52
|
+
date_delta = current_datetime - start_datetime.replace(tzinfo=None)
|
|
53
|
+
seconds = date_delta.days * 24 * 60 * 60 + date_delta.seconds
|
|
54
|
+
|
|
55
|
+
except ValueError:
|
|
56
|
+
seconds = -1
|
|
57
|
+
|
|
58
|
+
return seconds
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def clear_wrf_logs() -> None:
|
|
62
|
+
"""
|
|
63
|
+
Collect unsaved WPS/WRF log files and save them to the corresponding
|
|
64
|
+
output directory of the ``Executable``.
|
|
65
|
+
"""
|
|
66
|
+
WRFRUNConfig = WRFRUN.config
|
|
67
|
+
|
|
68
|
+
# wps
|
|
69
|
+
work_path = WRFRUNConfig.parse_resource_uri(get_wrf_workspace_path("wps"))
|
|
70
|
+
|
|
71
|
+
if exists(work_path):
|
|
72
|
+
log_files = [x for x in listdir(work_path) if x.endswith(".log")]
|
|
73
|
+
|
|
74
|
+
if len(log_files) > 0:
|
|
75
|
+
logger.warning("Found unprocessed log files of WPS model.")
|
|
76
|
+
|
|
77
|
+
log_save_path = f"{WRFRUNConfig.parse_resource_uri(WRFRUNConfig.WRFRUN_OUTPUT_PATH)}/wps_unsaved_logs"
|
|
78
|
+
check_path(log_save_path)
|
|
79
|
+
|
|
80
|
+
for _file in log_files:
|
|
81
|
+
move(f"{work_path}/{_file}", f"{log_save_path}/{_file}")
|
|
82
|
+
|
|
83
|
+
logger.warning(f"Unprocessed log files of WPS model has been saved to {log_save_path}, check it")
|
|
84
|
+
|
|
85
|
+
# wrf
|
|
86
|
+
work_path = WRFRUNConfig.parse_resource_uri(get_wrf_workspace_path("wrf"))
|
|
87
|
+
|
|
88
|
+
if exists(work_path):
|
|
89
|
+
log_files = [x for x in listdir(work_path) if x.startswith("rsl.")]
|
|
90
|
+
|
|
91
|
+
if len(log_files) > 0:
|
|
92
|
+
logger.warning("Found unprocessed log files of WRF model.")
|
|
93
|
+
|
|
94
|
+
log_save_path = f"{WRFRUNConfig.parse_resource_uri(WRFRUNConfig.WRFRUN_OUTPUT_PATH)}/wrf_unsaved_logs"
|
|
95
|
+
check_path(log_save_path)
|
|
96
|
+
|
|
97
|
+
for _file in log_files:
|
|
98
|
+
move(f"{work_path}/{_file}", f"{log_save_path}/{_file}")
|
|
99
|
+
|
|
100
|
+
logger.warning(f"Unprocessed log files of WRF model has been saved to {log_save_path}, check it")
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
__all__ = ["get_wrf_simulated_seconds", "clear_wrf_logs"]
|