vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +75 -47
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +944 -618
- vortex/algo/mpitools.py +802 -497
- vortex/algo/mpitools_templates/__init__.py +1 -0
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +436 -227
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +540 -417
- vortex/data/sync_templates/__init__.py +0 -0
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +416 -275
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +215 -122
- vortex/nwp/algo/monitoring.py +137 -76
- vortex/nwp/algo/mpitools.py +330 -190
- vortex/nwp/algo/odbtools.py +637 -353
- vortex/nwp/algo/oopsroot.py +454 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +110 -121
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +341 -162
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +655 -299
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +403 -334
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1234 -643
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +378 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.1.0.dist-info/METADATA +67 -0
- vortex_nwp-2.1.0.dist-info/RECORD +144 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
vortex/nwp/tools/drhook.py
CHANGED
|
@@ -5,7 +5,11 @@ Common interest classes to help setup the DrHook library environment.
|
|
|
5
5
|
import footprints
|
|
6
6
|
from bronx.fancies import loggers
|
|
7
7
|
|
|
8
|
-
from vortex.algo.components import
|
|
8
|
+
from vortex.algo.components import (
|
|
9
|
+
AlgoComponentDecoMixin,
|
|
10
|
+
Parallel,
|
|
11
|
+
algo_component_deco_mixin_autodoc,
|
|
12
|
+
)
|
|
9
13
|
|
|
10
14
|
#: No automatic export
|
|
11
15
|
__all__ = []
|
|
@@ -28,14 +32,15 @@ class DrHookDecoMixin(AlgoComponentDecoMixin):
|
|
|
28
32
|
footprints.Footprint(
|
|
29
33
|
attr=dict(
|
|
30
34
|
drhookprof=dict(
|
|
31
|
-
info=
|
|
35
|
+
info="Activate the DrHook profiling.",
|
|
32
36
|
optional=True,
|
|
33
37
|
type=bool,
|
|
34
38
|
default=False,
|
|
35
39
|
doc_zorder=-50,
|
|
36
40
|
),
|
|
37
41
|
),
|
|
38
|
-
)
|
|
42
|
+
)
|
|
43
|
+
]
|
|
39
44
|
|
|
40
45
|
def _drhook_varexport(self, rh, opts): # @UnusedVariable
|
|
41
46
|
"""Export proper DrHook variables"""
|
|
@@ -45,8 +50,8 @@ class DrHookDecoMixin(AlgoComponentDecoMixin):
|
|
|
45
50
|
("DR_HOOK_OPT", "prof"),
|
|
46
51
|
("DR_HOOK_IGNORE_SIGNALS", "-1"),
|
|
47
52
|
]
|
|
48
|
-
if self.drhookprof
|
|
49
|
-
[("DR_HOOK", "0"), ("DR_HOOK_IGNORE_SIGNALS", "-1")]
|
|
53
|
+
if self.drhookprof
|
|
54
|
+
else [("DR_HOOK", "0"), ("DR_HOOK_IGNORE_SIGNALS", "-1")]
|
|
50
55
|
)
|
|
51
56
|
if not isinstance(self, Parallel):
|
|
52
57
|
drhook_vars += [
|
|
@@ -55,8 +60,7 @@ class DrHookDecoMixin(AlgoComponentDecoMixin):
|
|
|
55
60
|
("DR_HOOK_ASSERT_MPI_INITIALIZED", "0"),
|
|
56
61
|
]
|
|
57
62
|
for k, v in drhook_vars:
|
|
58
|
-
logger.info(
|
|
63
|
+
logger.info("Setting DRHOOK env %s = %s", k, v)
|
|
59
64
|
self.env[k] = v
|
|
60
65
|
|
|
61
|
-
|
|
62
|
-
_MIXIN_PREPARE_HOOKS = (_drhook_varexport, )
|
|
66
|
+
_MIXIN_PREPARE_HOOKS = (_drhook_varexport,)
|
vortex/nwp/tools/grib.py
CHANGED
|
@@ -71,7 +71,9 @@ class _GenericFilter:
|
|
|
71
71
|
elif isinstance(a_filter, str):
|
|
72
72
|
self._filters.append(json.loads(a_filter))
|
|
73
73
|
elif isinstance(a_filter, Context):
|
|
74
|
-
for a_request in a_filter.sequence.effective_inputs(
|
|
74
|
+
for a_request in a_filter.sequence.effective_inputs(
|
|
75
|
+
kind="filtering_request"
|
|
76
|
+
):
|
|
75
77
|
self._filters.append(a_request.rh.contents.data)
|
|
76
78
|
|
|
77
79
|
def __len__(self):
|
|
@@ -84,7 +86,7 @@ class _GenericFilter:
|
|
|
84
86
|
superset_ok = True
|
|
85
87
|
for k, v in subset.items():
|
|
86
88
|
# Ignore the comments...
|
|
87
|
-
if k.startswith(
|
|
89
|
+
if k.startswith("comment"):
|
|
88
90
|
continue
|
|
89
91
|
# Check for the key inside the full dictionary
|
|
90
92
|
try:
|
|
@@ -105,12 +107,14 @@ class _GenericFilter:
|
|
|
105
107
|
|
|
106
108
|
def _filter_process(self, fid, a_filter):
|
|
107
109
|
"""Check if the data's fid complies with the filter."""
|
|
108
|
-
includes = a_filter.get(
|
|
109
|
-
excludes = a_filter.get(
|
|
110
|
+
includes = a_filter.get("fields_include", [])
|
|
111
|
+
excludes = a_filter.get("fields_exclude", [])
|
|
110
112
|
try:
|
|
111
|
-
fid = fid[a_filter[
|
|
113
|
+
fid = fid[a_filter["fid_format"]]
|
|
112
114
|
except KeyboardInterrupt:
|
|
113
|
-
raise ValueError(
|
|
115
|
+
raise ValueError(
|
|
116
|
+
"Please specify a valid fid_format in the filter description"
|
|
117
|
+
)
|
|
114
118
|
# First process includes
|
|
115
119
|
includes_ok = True
|
|
116
120
|
for include in includes:
|
|
@@ -127,13 +131,13 @@ class _GenericFilter:
|
|
|
127
131
|
|
|
128
132
|
def __call__(self, inputfile, outfile_fmt):
|
|
129
133
|
"""Apply the various filters on *inputfile*. Should be implemented..."""
|
|
130
|
-
raise NotImplementedError(
|
|
134
|
+
raise NotImplementedError("This method have to be overwritten.")
|
|
131
135
|
|
|
132
136
|
|
|
133
137
|
class GRIBFilter(_GenericFilter):
|
|
134
138
|
"""Class in charge of filtering GRIB files."""
|
|
135
139
|
|
|
136
|
-
CONCATENATE_FILTER =
|
|
140
|
+
CONCATENATE_FILTER = "concatenate"
|
|
137
141
|
|
|
138
142
|
def __init__(self, concatenate=False):
|
|
139
143
|
"""
|
|
@@ -142,7 +146,7 @@ class GRIBFilter(_GenericFilter):
|
|
|
142
146
|
"""
|
|
143
147
|
super().__init__()
|
|
144
148
|
self.concatenate = concatenate
|
|
145
|
-
self._xgrib_support =
|
|
149
|
+
self._xgrib_support = "grib" in self._sh.loaded_addons()
|
|
146
150
|
|
|
147
151
|
def __len__(self):
|
|
148
152
|
"""Returns the number of active filters (concatenate included)."""
|
|
@@ -151,16 +155,21 @@ class GRIBFilter(_GenericFilter):
|
|
|
151
155
|
def _simple_cat(self, gribfile, outfile_fmt, intent):
|
|
152
156
|
"""Just concatenate a multipart GRIB."""
|
|
153
157
|
if self._xgrib_support and self._sh.is_xgrib(gribfile):
|
|
154
|
-
self._sh.xgrib_pack(
|
|
155
|
-
|
|
156
|
-
|
|
158
|
+
self._sh.xgrib_pack(
|
|
159
|
+
gribfile,
|
|
160
|
+
outfile_fmt.format(filtername=self.CONCATENATE_FILTER),
|
|
161
|
+
intent=intent,
|
|
162
|
+
)
|
|
157
163
|
else:
|
|
158
164
|
# Just make a copy with the appropriate name...
|
|
159
|
-
self._sh.cp(
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
165
|
+
self._sh.cp(
|
|
166
|
+
gribfile,
|
|
167
|
+
outfile_fmt.format(filtername=self.CONCATENATE_FILTER),
|
|
168
|
+
intent=intent,
|
|
169
|
+
fmt="grib",
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
def __call__(self, gribfile, outfile_fmt, intent="in"):
|
|
164
173
|
"""Apply the various filters on *gribfile*.
|
|
165
174
|
|
|
166
175
|
:param gribfile: The path to the input GRIB file
|
|
@@ -178,52 +187,63 @@ class GRIBFilter(_GenericFilter):
|
|
|
178
187
|
if not self._filters:
|
|
179
188
|
if self.concatenate:
|
|
180
189
|
self._simple_cat(gribfile, outfile_fmt, intent=intent)
|
|
181
|
-
return [
|
|
190
|
+
return [
|
|
191
|
+
outfile_fmt.format(filtername=self.CONCATENATE_FILTER),
|
|
192
|
+
]
|
|
182
193
|
else:
|
|
183
194
|
raise ValueError("Set concatenate=True or provide a filter.")
|
|
184
195
|
|
|
185
196
|
# Open the input file using Epygram
|
|
186
197
|
from ..util import usepygram
|
|
187
|
-
|
|
198
|
+
|
|
199
|
+
if not usepygram.epygram_checker.is_available(version="1.0.0"):
|
|
188
200
|
raise AlgoComponentError("Epygram (v1.0.0) needs to be available")
|
|
189
201
|
|
|
190
202
|
if self._xgrib_support and self._sh.is_xgrib(gribfile):
|
|
191
203
|
idx = self._sh.xgrib_index_get(gribfile)
|
|
192
|
-
in_data = [
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
204
|
+
in_data = [
|
|
205
|
+
footprints.proxy.dataformat(
|
|
206
|
+
filename=self._sh.path.realpath(a_gribfile),
|
|
207
|
+
openmode="r",
|
|
208
|
+
format="GRIB",
|
|
209
|
+
)
|
|
210
|
+
for a_gribfile in idx
|
|
211
|
+
]
|
|
197
212
|
else:
|
|
198
|
-
in_data = [
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
213
|
+
in_data = [
|
|
214
|
+
footprints.proxy.dataformat(
|
|
215
|
+
filename=self._sh.path.realpath(gribfile),
|
|
216
|
+
openmode="r",
|
|
217
|
+
format="GRIB",
|
|
218
|
+
),
|
|
219
|
+
]
|
|
203
220
|
|
|
204
221
|
# Open output files
|
|
205
222
|
out_data = list()
|
|
206
223
|
out_filelist = list()
|
|
207
224
|
for a_filter in self._filters:
|
|
208
|
-
f_name = outfile_fmt.format(filtername=a_filter[
|
|
225
|
+
f_name = outfile_fmt.format(filtername=a_filter["filter_name"])
|
|
209
226
|
out_filelist.append(f_name)
|
|
210
227
|
# It would be a lot better to use io.open but grib_api is very annoying !
|
|
211
|
-
out_data.append(open(f_name,
|
|
228
|
+
out_data.append(open(f_name, "wb"))
|
|
212
229
|
if self.concatenate:
|
|
213
230
|
f_name = outfile_fmt.format(filtername=self.CONCATENATE_FILTER)
|
|
214
231
|
out_filelist.append(f_name)
|
|
215
232
|
# It would be a lot better to use io.open but grib_api is very annoying !
|
|
216
|
-
out_cat = open(f_name,
|
|
233
|
+
out_cat = open(f_name, "wb")
|
|
217
234
|
|
|
218
235
|
with usepygram.epy_env_prepare(sessions.current()):
|
|
219
236
|
for a_in_data in in_data:
|
|
220
237
|
msg = a_in_data.iter_messages(headers_only=False)
|
|
221
238
|
while msg is not None:
|
|
222
|
-
for
|
|
239
|
+
for a_out_data, a_filter in zip(out_data, self._filters):
|
|
223
240
|
thefid = msg.genfid()
|
|
224
241
|
if self._filter_process(thefid, a_filter):
|
|
225
|
-
logger.debug(
|
|
226
|
-
|
|
242
|
+
logger.debug(
|
|
243
|
+
"Select succeed for filter %s: %s",
|
|
244
|
+
a_filter["filter_name"],
|
|
245
|
+
thefid,
|
|
246
|
+
)
|
|
227
247
|
msg.write_to_file(a_out_data)
|
|
228
248
|
if self.concatenate:
|
|
229
249
|
msg.write_to_file(out_cat)
|
|
@@ -248,14 +268,16 @@ def grib_inplace_cat(t, rh):
|
|
|
248
268
|
:param t: A :class:`vortex.sessions.Ticket` object
|
|
249
269
|
:param rh: A :class:`vortex.data.handlers.Handler` object
|
|
250
270
|
"""
|
|
251
|
-
xgrib_support =
|
|
271
|
+
xgrib_support = "grib" in t.sh.loaded_addons()
|
|
252
272
|
if xgrib_support:
|
|
253
273
|
if t.sh.is_xgrib(rh.container.localpath()):
|
|
254
274
|
# Some cleanup...
|
|
255
275
|
rh.reset_contents()
|
|
256
276
|
rh.container.close()
|
|
257
277
|
# Move the index file prior to the concatenation
|
|
258
|
-
tmpfile =
|
|
278
|
+
tmpfile = (
|
|
279
|
+
rh.container.localpath() + "_concat" + t.sh.safe_filesuffix()
|
|
280
|
+
)
|
|
259
281
|
t.sh.move(rh.container.localpath(), tmpfile)
|
|
260
282
|
# Concatenate
|
|
261
283
|
t.sh.xgrib_pack(tmpfile, rh.container.localpath())
|
|
@@ -263,6 +285,10 @@ def grib_inplace_cat(t, rh):
|
|
|
263
285
|
t.sh.grib_remove(tmpfile)
|
|
264
286
|
logger.info("The multipart GRIB has been concatenated.")
|
|
265
287
|
else:
|
|
266
|
-
logger.info(
|
|
288
|
+
logger.info(
|
|
289
|
+
"The localpath is not a multipart GRIB: nothing to do."
|
|
290
|
+
)
|
|
267
291
|
else:
|
|
268
|
-
logger.info(
|
|
292
|
+
logger.info(
|
|
293
|
+
"Multipart GRIB support is not activated: nothing can be done."
|
|
294
|
+
)
|
vortex/nwp/tools/gribdiff.py
CHANGED
|
@@ -18,7 +18,7 @@ class _GRIBDIFF_Plus_St:
|
|
|
18
18
|
self._result = result
|
|
19
19
|
|
|
20
20
|
def __str__(self):
|
|
21
|
-
return
|
|
21
|
+
return "{:s} | rc={:d}>".format(repr(self).rstrip(">"), self.rc)
|
|
22
22
|
|
|
23
23
|
@property
|
|
24
24
|
def result(self):
|
|
@@ -38,8 +38,9 @@ class _GRIBDIFF_Plus_Res:
|
|
|
38
38
|
self._epydiff_res = epydiff_res
|
|
39
39
|
|
|
40
40
|
def __str__(self):
|
|
41
|
-
return
|
|
42
|
-
|
|
41
|
+
return "{0:s} | gribapi_rc={1:d} epydiff_done={2:d}>".format(
|
|
42
|
+
repr(self).rstrip(">"), self._gapi, self._epydiff
|
|
43
|
+
)
|
|
43
44
|
|
|
44
45
|
def differences(self):
|
|
45
46
|
"""Print detailed informations about the diff."""
|
|
@@ -52,17 +53,17 @@ class GRIBDIFF_Plus(GRIBAPI_Tool):
|
|
|
52
53
|
"""
|
|
53
54
|
|
|
54
55
|
_footprint = dict(
|
|
55
|
-
info
|
|
56
|
-
attr
|
|
57
|
-
maxepydiff
|
|
58
|
-
info
|
|
59
|
-
type
|
|
60
|
-
default
|
|
61
|
-
optional
|
|
56
|
+
info="Default GRIBAPI system interface",
|
|
57
|
+
attr=dict(
|
|
58
|
+
maxepydiff=dict(
|
|
59
|
+
info="Epygram diffs are costfull, they will run only maxepydiff times",
|
|
60
|
+
type=int,
|
|
61
|
+
default=2,
|
|
62
|
+
optional=True,
|
|
62
63
|
),
|
|
63
64
|
),
|
|
64
|
-
priority
|
|
65
|
-
level
|
|
65
|
+
priority=dict(
|
|
66
|
+
level=footprints.priorities.top.TOOLBOX # @UndefinedVariable
|
|
66
67
|
),
|
|
67
68
|
)
|
|
68
69
|
|
|
@@ -76,10 +77,12 @@ class GRIBDIFF_Plus(GRIBAPI_Tool):
|
|
|
76
77
|
if not rc:
|
|
77
78
|
if self._epyavail is None:
|
|
78
79
|
from ..util.usepygram import epygram_checker
|
|
79
|
-
|
|
80
|
+
|
|
81
|
+
self._epyavail = epygram_checker.is_available(version="1.0.0")
|
|
80
82
|
if self._epyavail:
|
|
81
83
|
if self._epycount < self.maxepydiff:
|
|
82
84
|
from ..util.diffpygram import EpyGribDiff
|
|
85
|
+
|
|
83
86
|
gdiff = EpyGribDiff(grib2, grib1) # Ref file is first...
|
|
84
87
|
self._epycount += 1
|
|
85
88
|
res = _GRIBDIFF_Plus_Res(rc, True, str(gdiff))
|
|
@@ -88,12 +91,14 @@ class GRIBDIFF_Plus(GRIBAPI_Tool):
|
|
|
88
91
|
outfh.write(gdiff.format_diff(detailed=True))
|
|
89
92
|
else:
|
|
90
93
|
res = _GRIBDIFF_Plus_Res(
|
|
91
|
-
rc,
|
|
92
|
-
|
|
94
|
+
rc,
|
|
95
|
+
False,
|
|
96
|
+
"grib_compare failed (but the Epygram diffs max number is exceeded...)",
|
|
93
97
|
)
|
|
94
98
|
else:
|
|
95
|
-
res = _GRIBDIFF_Plus_Res(
|
|
96
|
-
|
|
99
|
+
res = _GRIBDIFF_Plus_Res(
|
|
100
|
+
rc, False, "grib_compare failed (Epygram unavailable)"
|
|
101
|
+
)
|
|
97
102
|
else:
|
|
98
103
|
res = _GRIBDIFF_Plus_Res(rc, False, "")
|
|
99
104
|
return _GRIBDIFF_Plus_St(rc, res)
|
vortex/nwp/tools/ifstools.py
CHANGED
|
@@ -30,7 +30,7 @@ class _IfsOutputsTimesListDesc:
|
|
|
30
30
|
instance._tlists_store.pop(self._attr, None)
|
|
31
31
|
else:
|
|
32
32
|
if not isinstance(value, list):
|
|
33
|
-
raise ValueError(
|
|
33
|
+
raise ValueError("**value** should be a list.")
|
|
34
34
|
instance._tlists_store[self._attr] = [Time(t) for t in value]
|
|
35
35
|
|
|
36
36
|
def __delete__(self, instance):
|
|
@@ -41,40 +41,41 @@ class IfsOutputsAbstractConfigurator(footprints.FootprintBase):
|
|
|
41
41
|
"""Abstract utility class to configure the IFS model regarding output data."""
|
|
42
42
|
|
|
43
43
|
_abstract = True
|
|
44
|
-
_collector = (
|
|
44
|
+
_collector = ("ifsoutputs_configurator",)
|
|
45
45
|
_footprint = [
|
|
46
46
|
model,
|
|
47
47
|
arpifs_cycle,
|
|
48
48
|
dict(
|
|
49
49
|
attr=dict(
|
|
50
|
-
fcterm_unit
|
|
51
|
-
info
|
|
52
|
-
values
|
|
50
|
+
fcterm_unit=dict(
|
|
51
|
+
info="The unit used in the *fcterm* attribute.",
|
|
52
|
+
values=["h", "t"],
|
|
53
53
|
),
|
|
54
54
|
)
|
|
55
|
-
)
|
|
55
|
+
),
|
|
56
56
|
]
|
|
57
57
|
|
|
58
|
-
def __init__(self, *
|
|
59
|
-
super().__init__(*
|
|
58
|
+
def __init__(self, *kargs, **kwargs):
|
|
59
|
+
super().__init__(*kargs, **kwargs)
|
|
60
60
|
self._tlists_store = dict()
|
|
61
61
|
|
|
62
62
|
modelstate = _IfsOutputsTimesListDesc(
|
|
63
|
-
"modelstate",
|
|
64
|
-
"The list of terms for modelstate outputs."
|
|
63
|
+
"modelstate", "The list of terms for modelstate outputs."
|
|
65
64
|
)
|
|
66
65
|
|
|
67
66
|
surf_modelstate = _IfsOutputsTimesListDesc(
|
|
68
67
|
"surf_modelstate",
|
|
69
|
-
"The list of terms for surface scheme modelstate outputs."
|
|
68
|
+
"The list of terms for surface scheme modelstate outputs.",
|
|
69
|
+
)
|
|
70
70
|
|
|
71
71
|
spectral_diag = _IfsOutputsTimesListDesc(
|
|
72
72
|
"spectral_diag",
|
|
73
|
-
"The list of terms for spectral space diagnostics outputs."
|
|
73
|
+
"The list of terms for spectral space diagnostics outputs.",
|
|
74
|
+
)
|
|
74
75
|
|
|
75
76
|
post_processing = _IfsOutputsTimesListDesc(
|
|
76
77
|
"post_processing",
|
|
77
|
-
"The list of terms for inline post-processing outputs."
|
|
78
|
+
"The list of terms for inline post-processing outputs.",
|
|
78
79
|
)
|
|
79
80
|
|
|
80
81
|
def _setup_nam_obj(self, namelist_object, namelist_name):
|
|
@@ -105,59 +106,98 @@ class IfsOutputsConfigurator(IfsOutputsAbstractConfigurator):
|
|
|
105
106
|
def _set_namvar_value(namblock, var, value, namname):
|
|
106
107
|
"""Set a value in a **namblock** namelist and log it."""
|
|
107
108
|
namblock[var] = value
|
|
108
|
-
logger.info(
|
|
109
|
-
|
|
109
|
+
logger.info(
|
|
110
|
+
"Setup &%s %s=%s / (file: %s)",
|
|
111
|
+
namblock.name,
|
|
112
|
+
var,
|
|
113
|
+
namblock.nice(value),
|
|
114
|
+
namname,
|
|
115
|
+
)
|
|
110
116
|
|
|
111
117
|
@staticmethod
|
|
112
118
|
def _clean_namvar(namblock, var, namname):
|
|
113
119
|
"""Clean the **var** value from the **namblock** namelist."""
|
|
114
|
-
todo = {k for k in namblock.keys() if re.match(var + r
|
|
120
|
+
todo = {k for k in namblock.keys() if re.match(var + r"($|\(|%)", k)}
|
|
115
121
|
if todo:
|
|
116
122
|
for k in todo:
|
|
117
123
|
namblock.delvar(k)
|
|
118
|
-
logger.info(
|
|
119
|
-
|
|
124
|
+
logger.info(
|
|
125
|
+
"Cleaning %s variable in namelist &%s (file: %s)",
|
|
126
|
+
var,
|
|
127
|
+
namblock.name,
|
|
128
|
+
namname,
|
|
129
|
+
)
|
|
120
130
|
|
|
121
131
|
def _generic_terms_setup(self, namct0, namct1, what, terms, namname):
|
|
122
132
|
"""Setup a given kind of output data (in a generic way)."""
|
|
123
133
|
if terms is not None:
|
|
124
|
-
sign = -1 if self.fcterm_unit ==
|
|
134
|
+
sign = -1 if self.fcterm_unit == "h" else 1
|
|
125
135
|
with_minutes = any([t.minute > 0 for t in terms])
|
|
126
|
-
self._clean_namvar(namct0,
|
|
127
|
-
self._clean_namvar(namct0,
|
|
128
|
-
self._clean_namvar(namct0,
|
|
129
|
-
self._set_namvar_value(
|
|
136
|
+
self._clean_namvar(namct0, "NFR{:s}".format(what), namname)
|
|
137
|
+
self._clean_namvar(namct0, "N{:s}TS".format(what), namname)
|
|
138
|
+
self._clean_namvar(namct0, "N{:s}TSMIN".format(what), namname)
|
|
139
|
+
self._set_namvar_value(
|
|
140
|
+
namct1, "N1{:s}".format(what), 1 if terms else 0, namname
|
|
141
|
+
)
|
|
130
142
|
if terms:
|
|
131
|
-
self._set_namvar_value(
|
|
143
|
+
self._set_namvar_value(
|
|
144
|
+
namct0,
|
|
145
|
+
"N{:s}TS(0)".format(what),
|
|
146
|
+
sign * len(terms),
|
|
147
|
+
namname,
|
|
148
|
+
)
|
|
132
149
|
if with_minutes:
|
|
133
|
-
if
|
|
134
|
-
self.
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
150
|
+
if (
|
|
151
|
+
"cy46" <= self.cycle < "cy47"
|
|
152
|
+
): # Temporary fix for cy46 only
|
|
153
|
+
self._set_namvar_value(
|
|
154
|
+
namct0,
|
|
155
|
+
"N{:s}TSMIN(0)".format(what),
|
|
156
|
+
len(terms),
|
|
157
|
+
namname,
|
|
158
|
+
)
|
|
159
|
+
logger.info(
|
|
160
|
+
"Setting up N%sTS and N%sTSMIN in &%s (file: %s)",
|
|
161
|
+
what,
|
|
162
|
+
what,
|
|
163
|
+
namct0.name,
|
|
164
|
+
namname,
|
|
165
|
+
)
|
|
138
166
|
for i, t in enumerate(terms):
|
|
139
|
-
namct0[
|
|
167
|
+
namct0["N{:s}TS({:d})".format(what, i + 1)] = sign * t.hour
|
|
140
168
|
if with_minutes:
|
|
141
169
|
for i, t in enumerate(terms):
|
|
142
|
-
namct0[
|
|
170
|
+
namct0["N{:s}TSMIN({:d})".format(what, i + 1)] = (
|
|
171
|
+
t.minute
|
|
172
|
+
)
|
|
143
173
|
|
|
144
174
|
def _setup_nam_obj(self, namelist_object, namelist_name):
|
|
145
175
|
"""Actualy tweak the IFS namelist."""
|
|
146
|
-
namoph = self._get_namblock(namelist_object,
|
|
147
|
-
namct0 = self._get_namblock(namelist_object,
|
|
148
|
-
namct1 = self._get_namblock(namelist_object,
|
|
176
|
+
namoph = self._get_namblock(namelist_object, "NAMOPH")
|
|
177
|
+
namct0 = self._get_namblock(namelist_object, "NAMCT0")
|
|
178
|
+
namct1 = self._get_namblock(namelist_object, "NAMCT1")
|
|
149
179
|
# First take into account the **fcterm_unit**
|
|
150
|
-
self._set_namvar_value(
|
|
180
|
+
self._set_namvar_value(
|
|
181
|
+
namoph, "LINC", self.fcterm_unit == "h", namelist_name
|
|
182
|
+
)
|
|
151
183
|
# Setup outputs
|
|
152
|
-
self._generic_terms_setup(
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
self._generic_terms_setup(
|
|
184
|
+
self._generic_terms_setup(
|
|
185
|
+
namct0, namct1, "HIS", self.modelstate, namelist_name
|
|
186
|
+
)
|
|
187
|
+
self._generic_terms_setup(
|
|
188
|
+
namct0, namct1, "SFXHIS", self.surf_modelstate, namelist_name
|
|
189
|
+
)
|
|
190
|
+
self._generic_terms_setup(
|
|
191
|
+
namct0, namct1, "SDI", self.spectral_diag, namelist_name
|
|
192
|
+
)
|
|
193
|
+
self._generic_terms_setup(
|
|
194
|
+
namct0, namct1, "POS", self.post_processing, namelist_name
|
|
195
|
+
)
|
|
156
196
|
# Extra fixup for fullpos
|
|
157
197
|
if self.post_processing is not None:
|
|
158
198
|
if not self.post_processing:
|
|
159
|
-
self._set_namvar_value(namct0,
|
|
199
|
+
self._set_namvar_value(namct0, "NFPOS", 0, namelist_name)
|
|
160
200
|
else:
|
|
161
201
|
# Do not overwrite a pre-existing positive value:
|
|
162
|
-
if
|
|
163
|
-
self._set_namvar_value(namct0,
|
|
202
|
+
if "NFPOS" not in namct0 or namct0["NFPOS"] == 0:
|
|
203
|
+
self._set_namvar_value(namct0, "NFPOS", 1, namelist_name)
|