vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract base class for any AlgoComponent leveraging the Arpege/IFS code.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from bronx.fancies import loggers
|
|
6
|
+
import footprints
|
|
7
|
+
|
|
8
|
+
from vortex.algo.components import Parallel, ParallelIoServerMixin, AlgoComponentError
|
|
9
|
+
from vortex.syntax.stdattrs import model
|
|
10
|
+
from vortex.tools import grib
|
|
11
|
+
|
|
12
|
+
from . import ifsnaming # @UnusedImport
|
|
13
|
+
from ..syntax.stdattrs import algo_member
|
|
14
|
+
from ..tools import satrad, drhook
|
|
15
|
+
|
|
16
|
+
#: No automatic export
|
|
17
|
+
__all__ = []
|
|
18
|
+
|
|
19
|
+
logger = loggers.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class IFSParallel(Parallel, ParallelIoServerMixin,
|
|
23
|
+
satrad.SatRadDecoMixin, drhook.DrHookDecoMixin, grib.EcGribDecoMixin):
|
|
24
|
+
"""Abstract IFSModel parallel algo components."""
|
|
25
|
+
|
|
26
|
+
_abstract = True
|
|
27
|
+
_footprint = [
|
|
28
|
+
model, algo_member,
|
|
29
|
+
dict(
|
|
30
|
+
info = 'Abstract AlgoComponent for anything based on Arpege/IFS.',
|
|
31
|
+
attr = dict(
|
|
32
|
+
kind = dict(
|
|
33
|
+
info = 'The kind of processing we want the Arpege/IFS binary to perform.',
|
|
34
|
+
default = 'ifsrun',
|
|
35
|
+
doc_zorder = 90,
|
|
36
|
+
),
|
|
37
|
+
model = dict(
|
|
38
|
+
values = ['arpege', 'arp', 'arp_court', 'aladin', 'ald',
|
|
39
|
+
'arome', 'aro', 'aearp', 'pearp', 'ifs', 'alaro', 'harmoniearome']
|
|
40
|
+
),
|
|
41
|
+
ioname = dict(
|
|
42
|
+
default = 'nwpioserv',
|
|
43
|
+
),
|
|
44
|
+
binarysingle = dict(
|
|
45
|
+
default = 'basicnwp',
|
|
46
|
+
),
|
|
47
|
+
conf = dict(
|
|
48
|
+
info = 'The configuration number given to Arpege/IFS.',
|
|
49
|
+
type = int,
|
|
50
|
+
optional = True,
|
|
51
|
+
default = 1,
|
|
52
|
+
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
53
|
+
),
|
|
54
|
+
timescheme = dict(
|
|
55
|
+
info = 'The timescheme that will be used by Arpege/IFS model.',
|
|
56
|
+
optional = True,
|
|
57
|
+
default = 'sli',
|
|
58
|
+
values = ['eul', 'eulerian', 'sli', 'semilag'],
|
|
59
|
+
remap = dict(
|
|
60
|
+
eulerian = 'eul',
|
|
61
|
+
semilag = 'sli'
|
|
62
|
+
),
|
|
63
|
+
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
64
|
+
),
|
|
65
|
+
timestep = dict(
|
|
66
|
+
info = 'The timestep of the Arpege/IFS model.',
|
|
67
|
+
type = float,
|
|
68
|
+
optional = True,
|
|
69
|
+
default = 600.,
|
|
70
|
+
),
|
|
71
|
+
fcterm = dict(
|
|
72
|
+
info = 'The forecast term of the Arpege/IFS model.',
|
|
73
|
+
type = int,
|
|
74
|
+
optional = True,
|
|
75
|
+
default = 0,
|
|
76
|
+
),
|
|
77
|
+
fcunit = dict(
|
|
78
|
+
info = 'The unit used in the *fcterm* attribute.',
|
|
79
|
+
optional = True,
|
|
80
|
+
default = 'h',
|
|
81
|
+
values = ['h', 'hour', 't', 'step', 'timestep'],
|
|
82
|
+
remap = dict(
|
|
83
|
+
hour = 'h',
|
|
84
|
+
step = 't',
|
|
85
|
+
timestep = 't',
|
|
86
|
+
)
|
|
87
|
+
),
|
|
88
|
+
xpname = dict(
|
|
89
|
+
info = 'The default labelling of files used in Arpege/IFS model.',
|
|
90
|
+
optional = True,
|
|
91
|
+
default = 'XPVT',
|
|
92
|
+
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
93
|
+
),
|
|
94
|
+
)
|
|
95
|
+
)
|
|
96
|
+
]
|
|
97
|
+
|
|
98
|
+
def fstag(self):
|
|
99
|
+
"""Extend default tag with ``kind`` value."""
|
|
100
|
+
return super().fstag() + '.' + self.kind
|
|
101
|
+
|
|
102
|
+
def valid_executable(self, rh):
|
|
103
|
+
"""Be sure that the specifed executable is ifsmodel compatible."""
|
|
104
|
+
valid = super().valid_executable(rh)
|
|
105
|
+
try:
|
|
106
|
+
return valid and bool(rh.resource.realkind == 'ifsmodel')
|
|
107
|
+
except (ValueError, TypeError):
|
|
108
|
+
return False
|
|
109
|
+
|
|
110
|
+
def spawn_hook(self):
|
|
111
|
+
"""Usually a good habit to dump the fort.4 namelist."""
|
|
112
|
+
super().spawn_hook()
|
|
113
|
+
if self.system.path.exists('fort.4'):
|
|
114
|
+
self.system.subtitle('{:s} : dump namelist <fort.4>'.format(self.realkind))
|
|
115
|
+
self.system.cat('fort.4', output=False)
|
|
116
|
+
|
|
117
|
+
def spawn_command_options(self):
|
|
118
|
+
"""Dictionary provided for command line factory."""
|
|
119
|
+
return dict(
|
|
120
|
+
name=(self.xpname + 'xxxx')[:4].upper(),
|
|
121
|
+
conf=self.conf,
|
|
122
|
+
timescheme=self.timescheme,
|
|
123
|
+
timestep=self.timestep,
|
|
124
|
+
fcterm=self.fcterm,
|
|
125
|
+
fcunit=self.fcunit,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
def naming_convention(self, kind, rh, actualfmt=None, **kwargs):
|
|
129
|
+
"""Create an appropriate :class:`IFSNamingConvention`.
|
|
130
|
+
|
|
131
|
+
:param str kind: The :class:`IFSNamingConvention` object kind.
|
|
132
|
+
:param rh: The binary's ResourceHandler.
|
|
133
|
+
:param actualfmt: The format of the target file.
|
|
134
|
+
:param dict kwargs: Any argument you may see fit.
|
|
135
|
+
"""
|
|
136
|
+
nc_args = dict(model=self.model,
|
|
137
|
+
conf=self.conf,
|
|
138
|
+
xpname=self.xpname)
|
|
139
|
+
nc_args.update(kwargs)
|
|
140
|
+
nc = footprints.proxy.ifsnamingconv(kind=kind,
|
|
141
|
+
actualfmt=actualfmt,
|
|
142
|
+
cycle=rh.resource.cycle,
|
|
143
|
+
**nc_args)
|
|
144
|
+
if nc is None:
|
|
145
|
+
raise AlgoComponentError("No IFSNamingConvention was found.")
|
|
146
|
+
return nc
|
|
147
|
+
|
|
148
|
+
def do_climfile_fixer(self, rh, convkind, actualfmt=None, geo=None, **kwargs):
|
|
149
|
+
"""Is it necessary to fix the climatology file ? (i.e link in the appropriate file).
|
|
150
|
+
|
|
151
|
+
:param rh: The binary's ResourceHandler.
|
|
152
|
+
:param str convkind: The :class:`IFSNamingConvention` object kind.
|
|
153
|
+
:param actualfmt: The format of the climatology file.
|
|
154
|
+
:param geo: The geometry of the desired geometry file.
|
|
155
|
+
:param dict kwargs: Any argument you may see fit (used to create and call
|
|
156
|
+
the IFSNamingConvention object.
|
|
157
|
+
"""
|
|
158
|
+
nc = self.naming_convention(kind=convkind, rh=rh, actualfmt=actualfmt, **kwargs)
|
|
159
|
+
nc_args = dict()
|
|
160
|
+
if geo:
|
|
161
|
+
nc_args['area'] = geo.area
|
|
162
|
+
nc_args.update(kwargs)
|
|
163
|
+
return not self.system.path.exists(nc(** nc_args))
|
|
164
|
+
|
|
165
|
+
def climfile_fixer(self, rh, convkind,
|
|
166
|
+
month, geo=None, notgeo=None, actualfmt=None,
|
|
167
|
+
inputrole=None, inputkind=None, **kwargs):
|
|
168
|
+
"""Fix the climatology files (by choosing the appropriate month, geometry, ...)
|
|
169
|
+
|
|
170
|
+
:param rh: The binary's ResourceHandler.
|
|
171
|
+
:param str convkind: The :class:`IFSNamingConvention` object kind.
|
|
172
|
+
:param ~bronx.stdtypes.date.Month month: The climatlogy file month
|
|
173
|
+
:param geo: The climatlogy file geometry
|
|
174
|
+
:param notgeo: Exclude these geometries during the climatology file lookup
|
|
175
|
+
:param actualfmt: The format of the climatology file.
|
|
176
|
+
:param inputrole: The section's role in which Climatology files are looked for.
|
|
177
|
+
:param inputkind: The section's realkind in which Climatology files are looked for/
|
|
178
|
+
:param dict kwargs: Any argument you may see fit (used to create and call
|
|
179
|
+
the IFSNamingConvention object).
|
|
180
|
+
"""
|
|
181
|
+
if geo is not None and notgeo is not None:
|
|
182
|
+
raise ValueError('*geo* and *notgeo* cannot be provided together.')
|
|
183
|
+
|
|
184
|
+
def check_month(actualrh):
|
|
185
|
+
return bool(hasattr(actualrh.resource, 'month') and
|
|
186
|
+
actualrh.resource.month == month)
|
|
187
|
+
|
|
188
|
+
def check_month_and_geo(actualrh):
|
|
189
|
+
return (check_month(actualrh) and
|
|
190
|
+
actualrh.resource.geometry.tag == geo.tag)
|
|
191
|
+
|
|
192
|
+
def check_month_and_notgeo(actualrh):
|
|
193
|
+
return (check_month(actualrh) and
|
|
194
|
+
actualrh.resource.geometry.tag != notgeo.tag)
|
|
195
|
+
|
|
196
|
+
if geo:
|
|
197
|
+
checker = check_month_and_geo
|
|
198
|
+
elif notgeo:
|
|
199
|
+
checker = check_month_and_notgeo
|
|
200
|
+
else:
|
|
201
|
+
checker = check_month
|
|
202
|
+
|
|
203
|
+
nc = self.naming_convention(kind=convkind, rh=rh, actualfmt=actualfmt, **kwargs)
|
|
204
|
+
nc_args = dict()
|
|
205
|
+
if geo:
|
|
206
|
+
nc_args['area'] = geo.area
|
|
207
|
+
nc_args.update(kwargs)
|
|
208
|
+
target_name = nc(** nc_args)
|
|
209
|
+
|
|
210
|
+
self.system.remove(target_name)
|
|
211
|
+
|
|
212
|
+
logger.info("Linking in the %s file (%s) for month %s.", convkind, target_name, month)
|
|
213
|
+
rc = self.setlink(initrole=inputrole, initkind=inputkind, inittest=checker,
|
|
214
|
+
initname=target_name)
|
|
215
|
+
return target_name if rc else None
|
|
216
|
+
|
|
217
|
+
def all_localclim_fixer(self, rh, month, convkind='targetclim', actualfmt=None,
|
|
218
|
+
inputrole=('LocalClim', 'TargetClim', 'BDAPClim'),
|
|
219
|
+
inputkind='clim_bdap', **kwargs):
|
|
220
|
+
"""Fix all the local/BDAP climatology files (by choosing the appropriate month)
|
|
221
|
+
|
|
222
|
+
:param rh: The binary's ResourceHandler.
|
|
223
|
+
:param ~bronx.stdtypes.date.Month month: The climatology file month
|
|
224
|
+
:param str convkind: The :class:`IFSNamingConvention` object kind.
|
|
225
|
+
:param actualfmt: The format of the climatology file.
|
|
226
|
+
:param inputrole: The section's role in which Climatology files are looked for.
|
|
227
|
+
:param inputkind: The section's realkind in which Climatology files are looked for/
|
|
228
|
+
:param dict kwargs: Any argument you may see fit (used to create and call
|
|
229
|
+
the IFSNamingConvention object.
|
|
230
|
+
:return: The list of linked files
|
|
231
|
+
"""
|
|
232
|
+
|
|
233
|
+
def check_month(actualrh):
|
|
234
|
+
return bool(hasattr(actualrh.resource, 'month') and
|
|
235
|
+
actualrh.resource.month == month)
|
|
236
|
+
|
|
237
|
+
nc = self.naming_convention(kind=convkind, rh=rh, actualfmt=actualfmt, **kwargs)
|
|
238
|
+
dealtwith = list()
|
|
239
|
+
|
|
240
|
+
for tclimrh in [x.rh for x in self.context.sequence.effective_inputs(
|
|
241
|
+
role=inputrole, kind=inputkind,
|
|
242
|
+
) if x.rh.resource.month == month]:
|
|
243
|
+
thisclim = tclimrh.container.localpath()
|
|
244
|
+
thisname = nc(area=tclimrh.resource.geometry.area)
|
|
245
|
+
if thisclim != thisname:
|
|
246
|
+
logger.info("Linking in the %s to %s for month %s.", thisclim, thisname, month)
|
|
247
|
+
self.system.symlink(thisclim, thisname)
|
|
248
|
+
dealtwith.append(thisname)
|
|
249
|
+
|
|
250
|
+
return dealtwith
|
|
251
|
+
|
|
252
|
+
def find_namelists(self, opts=None):
|
|
253
|
+
"""Find any namelists candidates in actual context inputs."""
|
|
254
|
+
return [x.rh
|
|
255
|
+
for x in self.context.sequence.effective_inputs(kind=('namelist', 'namelistfp'))]
|
|
256
|
+
|
|
257
|
+
def _set_nam_macro(self, namcontents, namlocal, macro, value):
|
|
258
|
+
"""Set a namelist macro and log it!"""
|
|
259
|
+
namcontents.setmacro(macro, value)
|
|
260
|
+
logger.info('Setup macro %s=%s in %s', macro, str(value), namlocal)
|
|
261
|
+
|
|
262
|
+
def prepare_namelist_delta(self, rh, namcontents, namlocal):
|
|
263
|
+
"""Apply a namelist delta depending on the cycle of the binary."""
|
|
264
|
+
# TODO: The mapping between the dict that contains the settings
|
|
265
|
+
# (i.e elf.spawn_command_options()) and actual namelist keys should
|
|
266
|
+
# be done by an extra class ... and it could be generalized to mpi
|
|
267
|
+
# setup by the way !
|
|
268
|
+
nam_updated = False
|
|
269
|
+
# For cy41 onward, replace some namelist macros with the command line
|
|
270
|
+
# arguments
|
|
271
|
+
if rh.resource.cycle >= 'cy41':
|
|
272
|
+
if 'NAMARG' in namcontents:
|
|
273
|
+
opts_arg = self.spawn_command_options()
|
|
274
|
+
self._set_nam_macro(namcontents, namlocal, 'CEXP', opts_arg['name'])
|
|
275
|
+
self._set_nam_macro(namcontents, namlocal, 'TIMESTEP', opts_arg['timestep'])
|
|
276
|
+
fcstop = '{:s}{:d}'.format(opts_arg['fcunit'], opts_arg['fcterm'])
|
|
277
|
+
self._set_nam_macro(namcontents, namlocal, 'FCSTOP', fcstop)
|
|
278
|
+
nam_updated = True
|
|
279
|
+
else:
|
|
280
|
+
logger.info('No NAMARG block in %s', namlocal)
|
|
281
|
+
|
|
282
|
+
if self.member is not None:
|
|
283
|
+
for macro_name in ('MEMBER', 'PERTURB'):
|
|
284
|
+
self._set_nam_macro(namcontents, namlocal, macro_name, self.member)
|
|
285
|
+
nam_updated = True
|
|
286
|
+
return nam_updated
|
|
287
|
+
|
|
288
|
+
def prepare_namelists(self, rh, opts=None):
|
|
289
|
+
"""Update each of the namelists."""
|
|
290
|
+
namcandidates = self.find_namelists(opts)
|
|
291
|
+
self.system.subtitle('Namelist candidates')
|
|
292
|
+
for nam in namcandidates:
|
|
293
|
+
nam.quickview()
|
|
294
|
+
for namrh in namcandidates:
|
|
295
|
+
namc = namrh.contents
|
|
296
|
+
if self.prepare_namelist_delta(rh, namc, namrh.container.actualpath()):
|
|
297
|
+
if namc.dumps_needs_update:
|
|
298
|
+
logger.info('Rewritting the %s namelists file.', namrh.container.actualpath())
|
|
299
|
+
namc.rewrite(namrh.container)
|
|
300
|
+
|
|
301
|
+
def prepare(self, rh, opts):
|
|
302
|
+
"""Set some variables according to target definition."""
|
|
303
|
+
super().prepare(rh, opts)
|
|
304
|
+
# Namelist fixes
|
|
305
|
+
self.prepare_namelists(rh, opts)
|
|
306
|
+
|
|
307
|
+
def execute_single(self, rh, opts):
|
|
308
|
+
"""Standard IFS-Like execution parallel execution."""
|
|
309
|
+
if rh.resource.cycle < 'cy46':
|
|
310
|
+
self.system.ls(output='dirlst')
|
|
311
|
+
super().execute_single(rh, opts)
|
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AlgoComponents dedicated to computations related to observations monitoring.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from bronx.fancies import loggers
|
|
6
|
+
|
|
7
|
+
from vortex.algo.components import Parallel
|
|
8
|
+
from vortex.syntax.stdattrs import a_date, a_model, a_cutoff
|
|
9
|
+
from ..tools import odb, drhook
|
|
10
|
+
|
|
11
|
+
#: Automatic export of Monitoring class
|
|
12
|
+
__all__ = []
|
|
13
|
+
|
|
14
|
+
logger = loggers.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class OdbMonitoring(Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin):
|
|
18
|
+
"""Compute monitoring statistics."""
|
|
19
|
+
|
|
20
|
+
_footprint = dict(
|
|
21
|
+
attr = dict(
|
|
22
|
+
kind = dict(
|
|
23
|
+
values = ['monitoring'],
|
|
24
|
+
),
|
|
25
|
+
npool = dict(
|
|
26
|
+
default = 1,
|
|
27
|
+
optional = True,
|
|
28
|
+
),
|
|
29
|
+
obs = dict(
|
|
30
|
+
values = ['all', 'used'],
|
|
31
|
+
),
|
|
32
|
+
date = a_date,
|
|
33
|
+
model= a_model,
|
|
34
|
+
cutoff = a_cutoff,
|
|
35
|
+
start = dict(
|
|
36
|
+
type = bool,
|
|
37
|
+
default = False,
|
|
38
|
+
optional = True,
|
|
39
|
+
),
|
|
40
|
+
cumul = dict(
|
|
41
|
+
type = bool,
|
|
42
|
+
default = True,
|
|
43
|
+
optional = True,
|
|
44
|
+
),
|
|
45
|
+
extend = dict(
|
|
46
|
+
type = bool,
|
|
47
|
+
default = False,
|
|
48
|
+
optional = True,
|
|
49
|
+
),
|
|
50
|
+
stage = dict(
|
|
51
|
+
values = ['can', 'surf', 'surface', 'atm', 'atmospheric'],
|
|
52
|
+
remap = dict(can='surf', surface='surf', atmospheric='atm'),
|
|
53
|
+
info = 'The processing stage of the ODB base.',
|
|
54
|
+
),
|
|
55
|
+
)
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
def _fix_nam_macro(self, rh, macro, value):
|
|
59
|
+
"""Set a given namelist macro and issue a log message."""
|
|
60
|
+
rh.contents.setmacro(macro, value)
|
|
61
|
+
logger.info('Setup %s macro to %s in %s', macro, value, rh.container.actualpath())
|
|
62
|
+
|
|
63
|
+
def prepare(self, rh, opts):
|
|
64
|
+
"""Update some variables in the namelist and check the presence of the accumulated statistics file."""
|
|
65
|
+
|
|
66
|
+
sh = self.system
|
|
67
|
+
|
|
68
|
+
# Looking for input observations
|
|
69
|
+
|
|
70
|
+
# Virtual upper-air observations database
|
|
71
|
+
obsatm_virt = [
|
|
72
|
+
x for x in self.lookupodb(fatal=False)
|
|
73
|
+
if (x.rh.resource.stage.startswith('matchup') or
|
|
74
|
+
x.rh.resource.stage.startswith('screening')) and x.rh.resource.part == 'virtual'
|
|
75
|
+
]
|
|
76
|
+
|
|
77
|
+
# Single upper-air observations database
|
|
78
|
+
obsatm_single = [
|
|
79
|
+
x for x in self.lookupodb(fatal=False)
|
|
80
|
+
if x.rh.resource.stage.startswith('matchup') or x.rh.resource.stage.startswith('screening')
|
|
81
|
+
]
|
|
82
|
+
if len(obsatm_single) > 1:
|
|
83
|
+
obsatm_single = []
|
|
84
|
+
|
|
85
|
+
# Surface observations database
|
|
86
|
+
obssurf = [
|
|
87
|
+
x for x in self.lookupodb(fatal=False)
|
|
88
|
+
if x.rh.resource.stage.startswith('canari') and (x.rh.resource.part == 'surf' or
|
|
89
|
+
x.rh.resource.part == 'ground')
|
|
90
|
+
]
|
|
91
|
+
|
|
92
|
+
# One database at a time
|
|
93
|
+
if not (obsatm_virt or obsatm_single) and self.stage == 'atm':
|
|
94
|
+
raise ValueError('Could not find any ODB matchup or screening ECMA database')
|
|
95
|
+
if not obssurf and self.stage == 'surf':
|
|
96
|
+
raise ValueError('Could not find any ODB surface ECMA database')
|
|
97
|
+
|
|
98
|
+
# Set actual ODB paths
|
|
99
|
+
if obsatm_virt:
|
|
100
|
+
ecma = obsatm_virt.pop(0)
|
|
101
|
+
elif obsatm_single:
|
|
102
|
+
ecma = obsatm_single.pop(0)
|
|
103
|
+
else:
|
|
104
|
+
ecma = obssurf.pop(0)
|
|
105
|
+
ecma_path = sh.path.abspath(ecma.rh.container.localpath())
|
|
106
|
+
self.odb.fix_db_path(ecma.rh.resource.layout, ecma_path)
|
|
107
|
+
self.env.IOASSIGN = sh.path.join(ecma_path, 'IOASSIGN')
|
|
108
|
+
logger.info('Setting ODB env %s = %s.', 'IOASSIGN', sh.path.join(ecma_path, 'IOASSIGN'))
|
|
109
|
+
|
|
110
|
+
# Let ancestors handling most of the env setting
|
|
111
|
+
super().prepare(rh, opts)
|
|
112
|
+
|
|
113
|
+
# Force to start a new accumulated statistics file if first day and first hour of the month
|
|
114
|
+
mnt_start = self.start
|
|
115
|
+
|
|
116
|
+
if not mnt_start and int(self.date.day) == 1 and int(self.date.hh) == 0 and not self.extend:
|
|
117
|
+
logger.info('First day and first hour of the month : force start attribute to True.')
|
|
118
|
+
mnt_start = True
|
|
119
|
+
|
|
120
|
+
mnt_cumul = self.cumul
|
|
121
|
+
if self.cutoff == 'production':
|
|
122
|
+
mnt_cumul = False
|
|
123
|
+
logger.info('No output accumulated statistics file will be produced because '
|
|
124
|
+
'cutoff = production : force cumul to False')
|
|
125
|
+
|
|
126
|
+
# Monitoring namelist
|
|
127
|
+
namrh = self.context.sequence.effective_inputs(
|
|
128
|
+
role='Namelist',
|
|
129
|
+
kind='namelist',)
|
|
130
|
+
if len(namrh) != 1:
|
|
131
|
+
logger.critical('There must be exactly one namelist for monitoring. Stop.')
|
|
132
|
+
raise ValueError('There must be exactly one namelist for monitoring. Stop.')
|
|
133
|
+
namrh = namrh[0].rh
|
|
134
|
+
|
|
135
|
+
# Cumulated statistics file
|
|
136
|
+
cumulrh = self.context.sequence.effective_inputs(
|
|
137
|
+
role='Cumulated monitoring statistics',
|
|
138
|
+
kind='accumulated_stats',
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
if len(cumulrh) > 1:
|
|
142
|
+
logger.critical('There must be at most one accumulated statistics file.Stop.')
|
|
143
|
+
raise ValueError('There must be one accumulated statistics file or none.Stop.')
|
|
144
|
+
else:
|
|
145
|
+
if len(cumulrh) == 0:
|
|
146
|
+
if not mnt_start:
|
|
147
|
+
if mnt_cumul:
|
|
148
|
+
logger.critical('There must be one input accumulated statistics file. Stop.')
|
|
149
|
+
raise ValueError('There must be one input accumulated statistics file. Stop.')
|
|
150
|
+
else:
|
|
151
|
+
logger.info('No input accumulated statistics file is necessary.')
|
|
152
|
+
logger.info('No output accumulated statistics file will be produced.')
|
|
153
|
+
else:
|
|
154
|
+
if mnt_cumul:
|
|
155
|
+
logger.info('No input accumulated statistics file. It will be created by the binary.')
|
|
156
|
+
else:
|
|
157
|
+
logger.info('No output accumulated statistics file will be produced.')
|
|
158
|
+
else:
|
|
159
|
+
cumulrh = cumulrh[0].rh
|
|
160
|
+
if not mnt_cumul:
|
|
161
|
+
logger.info('No input accumulated statistics file is necessary(start=False).')
|
|
162
|
+
cumulrh.container.clear()
|
|
163
|
+
else:
|
|
164
|
+
if mnt_start:
|
|
165
|
+
logger.info('No input accumulated statistics file is necessary (start=True)')
|
|
166
|
+
cumulrh.container.clear()
|
|
167
|
+
|
|
168
|
+
self._fix_nam_macro(namrh, 'JOUR', int(self.date.ymd))
|
|
169
|
+
self._fix_nam_macro(namrh, 'RES', int(self.date.hh))
|
|
170
|
+
|
|
171
|
+
self._fix_nam_macro(namrh, 'LLADMON', mnt_cumul)
|
|
172
|
+
self._fix_nam_macro(namrh, 'LLADAJ', mnt_cumul and not mnt_start)
|
|
173
|
+
|
|
174
|
+
self._fix_nam_macro(namrh, 'LLFLAG', self.obs != 'all')
|
|
175
|
+
|
|
176
|
+
self._fix_nam_macro(namrh, 'LLARO', self.model == 'arome')
|
|
177
|
+
self._fix_nam_macro(namrh, 'LLVRP', self.model == 'varpack')
|
|
178
|
+
self._fix_nam_macro(namrh, 'LLCAN', self.stage == 'surf')
|
|
179
|
+
|
|
180
|
+
if namrh.contents.dumps_needs_update:
|
|
181
|
+
namrh.contents.rewrite(namrh.container)
|
|
182
|
+
namrh.container.cat()
|
|
183
|
+
|
|
184
|
+
def postfix(self, rh, opts):
|
|
185
|
+
"""Remove all empty files and find out if any special resources have been produced."""
|
|
186
|
+
|
|
187
|
+
sh = self.system
|
|
188
|
+
self.system.dir(output=False, fatal=False)
|
|
189
|
+
allfiles = sh.ls()
|
|
190
|
+
for f in allfiles:
|
|
191
|
+
if self.system.path.getsize(f) == 0:
|
|
192
|
+
logger.info('Remove %s because size of %s is zero.', f, f)
|
|
193
|
+
sh.remove(f)
|
|
194
|
+
|
|
195
|
+
obspoint_out = sh.ls('point.*')
|
|
196
|
+
if obspoint_out:
|
|
197
|
+
dest = 'obslocationpack'
|
|
198
|
+
logger.info('Creating an OBSLOCATION pack: %s', dest)
|
|
199
|
+
sh.mkdir(dest)
|
|
200
|
+
for fname in obspoint_out:
|
|
201
|
+
sh.mv(fname, dest)
|
|
202
|
+
self.system.dir(output=False, fatal=False)
|