vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1285 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Various Post-Processing AlgoComponents.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import collections
|
|
6
|
+
import json
|
|
7
|
+
import re
|
|
8
|
+
import time
|
|
9
|
+
|
|
10
|
+
from bronx.datagrip.namelist import NamelistBlock
|
|
11
|
+
from bronx.fancies import loggers
|
|
12
|
+
from footprints.stdtypes import FPTuple
|
|
13
|
+
import footprints
|
|
14
|
+
from taylorism import Boss
|
|
15
|
+
|
|
16
|
+
from vortex.layout.monitor import BasicInputMonitor, AutoMetaGang, MetaGang, EntrySt, GangSt
|
|
17
|
+
from vortex.algo.components import AlgoComponentDecoMixin, AlgoComponentError, algo_component_deco_mixin_autodoc
|
|
18
|
+
from vortex.algo.components import TaylorRun, BlindRun, ParaBlindRun, Parallel, Expresso
|
|
19
|
+
from vortex.syntax.stdattrs import DelayedEnvValue, FmtInt
|
|
20
|
+
from vortex.tools.grib import EcGribDecoMixin
|
|
21
|
+
from vortex.tools.parallelism import TaylorVortexWorker, VortexWorkerBlindRun, ParallelResultParser
|
|
22
|
+
from vortex.tools.systems import ExecutionError
|
|
23
|
+
|
|
24
|
+
from ..tools.grib import GRIBFilter
|
|
25
|
+
from ..tools.drhook import DrHookDecoMixin
|
|
26
|
+
|
|
27
|
+
#: No automatic export
|
|
28
|
+
__all__ = []
|
|
29
|
+
|
|
30
|
+
logger = loggers.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class _FA2GribWorker(VortexWorkerBlindRun):
|
|
34
|
+
"""The taylorism worker that actually do the gribing (in parallel).
|
|
35
|
+
|
|
36
|
+
This is called indirectly by taylorism when :class:`Fa2Grib` is used.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
_footprint = dict(
|
|
40
|
+
attr = dict(
|
|
41
|
+
kind = dict(
|
|
42
|
+
values = ['fa2grib']
|
|
43
|
+
),
|
|
44
|
+
# Progrid parameters
|
|
45
|
+
fortnam = dict(),
|
|
46
|
+
fortinput = dict(),
|
|
47
|
+
compact = dict(),
|
|
48
|
+
timeshift = dict(
|
|
49
|
+
type = int
|
|
50
|
+
),
|
|
51
|
+
timeunit = dict(
|
|
52
|
+
type = int
|
|
53
|
+
),
|
|
54
|
+
numod = dict(
|
|
55
|
+
type = int
|
|
56
|
+
),
|
|
57
|
+
sciz = dict(
|
|
58
|
+
type = int
|
|
59
|
+
),
|
|
60
|
+
scizoffset = dict(
|
|
61
|
+
type = int,
|
|
62
|
+
optional = True
|
|
63
|
+
),
|
|
64
|
+
# Input/Output data
|
|
65
|
+
file_in = dict(),
|
|
66
|
+
file_out = dict(),
|
|
67
|
+
member = dict(
|
|
68
|
+
type = FmtInt,
|
|
69
|
+
optional = True,
|
|
70
|
+
)
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
def vortex_task(self, **kwargs):
|
|
75
|
+
|
|
76
|
+
logger.info("Starting the Fa2Grib processing for tag=%s", self.name)
|
|
77
|
+
|
|
78
|
+
thisoutput = 'GRIDOUTPUT'
|
|
79
|
+
rdict = dict(rc=True)
|
|
80
|
+
|
|
81
|
+
# First, check that the hooks were applied
|
|
82
|
+
for thisinput in [x for x in self.context.sequence.inputs()
|
|
83
|
+
if x.rh.container.localpath() == self.file_in]:
|
|
84
|
+
if thisinput.rh.delayhooks:
|
|
85
|
+
thisinput.rh.apply_get_hooks()
|
|
86
|
+
|
|
87
|
+
# Jump into a working directory
|
|
88
|
+
cwd = self.system.pwd()
|
|
89
|
+
tmpwd = self.system.path.join(cwd, self.file_out + '.process.d')
|
|
90
|
+
self.system.mkdir(tmpwd)
|
|
91
|
+
self.system.cd(tmpwd)
|
|
92
|
+
|
|
93
|
+
# Build the local namelist block
|
|
94
|
+
nb = NamelistBlock(name='NAML')
|
|
95
|
+
nb.NBDOM = 1
|
|
96
|
+
nb.CHOPER = self.compact
|
|
97
|
+
nb.INUMOD = self.numod
|
|
98
|
+
if self.scizoffset is not None:
|
|
99
|
+
nb.ISCIZ = self.scizoffset + (self.member if self.member is not None else 0)
|
|
100
|
+
else:
|
|
101
|
+
if self.sciz:
|
|
102
|
+
nb.ISCIZ = self.sciz
|
|
103
|
+
if self.timeshift:
|
|
104
|
+
nb.IHCTPI = self.timeshift
|
|
105
|
+
if self.timeunit:
|
|
106
|
+
nb.ITUNIT = self.timeunit
|
|
107
|
+
nb['CLFSORT(1)'] = thisoutput
|
|
108
|
+
nb['CDNOMF(1)'] = self.fortinput
|
|
109
|
+
with open(self.fortnam, 'w') as namfd:
|
|
110
|
+
namfd.write(nb.dumps())
|
|
111
|
+
|
|
112
|
+
# Finally set the actual init file
|
|
113
|
+
self.system.softlink(self.system.path.join(cwd, self.file_in),
|
|
114
|
+
self.fortinput)
|
|
115
|
+
|
|
116
|
+
# Standard execution
|
|
117
|
+
list_name = self.system.path.join(cwd, self.file_out + ".listing")
|
|
118
|
+
try:
|
|
119
|
+
self.local_spawn(list_name)
|
|
120
|
+
except ExecutionError as e:
|
|
121
|
+
rdict['rc'] = e
|
|
122
|
+
|
|
123
|
+
# Freeze the current output
|
|
124
|
+
if self.system.path.exists(thisoutput):
|
|
125
|
+
self.system.move(thisoutput, self.system.path.join(cwd, self.file_out))
|
|
126
|
+
else:
|
|
127
|
+
logger.warning('Missing some grib output: %s', self.file_out)
|
|
128
|
+
rdict['rc'] = False
|
|
129
|
+
|
|
130
|
+
# Final cleaning
|
|
131
|
+
self.system.cd(cwd)
|
|
132
|
+
self.system.remove(tmpwd)
|
|
133
|
+
|
|
134
|
+
if self.system.path.exists(self.file_out):
|
|
135
|
+
# Deal with promised resources
|
|
136
|
+
expected = [x for x in self.context.sequence.outputs()
|
|
137
|
+
if x.rh.provider.expected and x.rh.container.localpath() == self.file_out]
|
|
138
|
+
for thispromise in expected:
|
|
139
|
+
thispromise.put(incache=True)
|
|
140
|
+
|
|
141
|
+
logger.info("Fa2Grib processing is done for tag=%s", self.name)
|
|
142
|
+
|
|
143
|
+
return rdict
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class _GribFilterWorker(TaylorVortexWorker):
|
|
147
|
+
"""The taylorism worker that actually filter the gribfiles.
|
|
148
|
+
|
|
149
|
+
This is called indirectly by taylorism when :class:`Fa2Grib` is used.
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
_footprint = dict(
|
|
153
|
+
attr = dict(
|
|
154
|
+
kind = dict(
|
|
155
|
+
values = ['gribfilter']
|
|
156
|
+
),
|
|
157
|
+
# Filter settings
|
|
158
|
+
filters = dict(
|
|
159
|
+
type = FPTuple,
|
|
160
|
+
),
|
|
161
|
+
concatenate = dict(
|
|
162
|
+
type = bool,
|
|
163
|
+
),
|
|
164
|
+
# Put files if they are expected
|
|
165
|
+
put_promises = dict(
|
|
166
|
+
type = bool,
|
|
167
|
+
optional = True,
|
|
168
|
+
default = True,
|
|
169
|
+
),
|
|
170
|
+
# Input/Output data
|
|
171
|
+
file_in = dict(),
|
|
172
|
+
file_outfmt = dict(),
|
|
173
|
+
file_outintent = dict(
|
|
174
|
+
optional = True,
|
|
175
|
+
default = 'in',
|
|
176
|
+
),
|
|
177
|
+
)
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
def vortex_task(self, **kwargs):
|
|
181
|
+
|
|
182
|
+
logger.info("Starting the GribFiltering for tag=%s", self.file_in)
|
|
183
|
+
|
|
184
|
+
rdict = dict(rc=True)
|
|
185
|
+
|
|
186
|
+
# Create the filtering object and add filters
|
|
187
|
+
gfilter = GRIBFilter(concatenate=self.concatenate)
|
|
188
|
+
if self.filters:
|
|
189
|
+
gfilter.add_filters(* list(self.filters))
|
|
190
|
+
|
|
191
|
+
# Process the input file
|
|
192
|
+
newfiles = gfilter(self.file_in, self.file_outfmt, self.file_outintent)
|
|
193
|
+
|
|
194
|
+
if newfiles:
|
|
195
|
+
if self.put_promises:
|
|
196
|
+
# Deal with promised resources
|
|
197
|
+
allpromises = [x for x in self.context.sequence.outputs()
|
|
198
|
+
if x.rh.provider.expected]
|
|
199
|
+
for newfile in newfiles:
|
|
200
|
+
expected = [x for x in allpromises
|
|
201
|
+
if x.rh.container.localpath() == newfile]
|
|
202
|
+
for thispromise in expected:
|
|
203
|
+
thispromise.put(incache=True)
|
|
204
|
+
else:
|
|
205
|
+
logger.warning('No file has been generated.')
|
|
206
|
+
rdict['rc'] = False
|
|
207
|
+
|
|
208
|
+
logger.info("GribFiltering is done for tag=%s", self.name)
|
|
209
|
+
|
|
210
|
+
return rdict
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def parallel_grib_filter(context, inputs, outputs, intents=(),
|
|
214
|
+
cat=False, filters=FPTuple(), nthreads=8):
|
|
215
|
+
"""A simple method that calls the GRIBFilter class in parallel.
|
|
216
|
+
|
|
217
|
+
:param vortex.layout.contexts.Context context: the current context
|
|
218
|
+
:param list[str] inputs: the list of input file names
|
|
219
|
+
:param list[str] outputs: the list of output file names
|
|
220
|
+
:param list[str] intents: the list of intent (in|inout) for output files (in if omitted)
|
|
221
|
+
:param bool cat: whether or not to concatenate the input files (False by default)
|
|
222
|
+
:param tuple filters: a list of filters to apply (as a list of JSON dumps)
|
|
223
|
+
:param int nthreads: the maximum number of tasks used concurently (8 by default)
|
|
224
|
+
"""
|
|
225
|
+
if not cat and len(filters) == 0:
|
|
226
|
+
raise AlgoComponentError("cat must be true or filters must be provided")
|
|
227
|
+
if len(inputs) != len(outputs):
|
|
228
|
+
raise AlgoComponentError("inputs and outputs must have the same length")
|
|
229
|
+
if len(intents) != len(outputs):
|
|
230
|
+
intents = FPTuple(['in', ] * len(outputs))
|
|
231
|
+
boss = Boss(scheduler=footprints.proxy.scheduler(limit='threads', max_threads=nthreads))
|
|
232
|
+
common_i = dict(kind='gribfilter', filters=filters, concatenate=cat, put_promises=False)
|
|
233
|
+
for ifile, ofile, intent in zip(inputs, outputs, intents):
|
|
234
|
+
logger.info("%s -> %s (intent: %s) added to the GRIBfilter task's list",
|
|
235
|
+
ifile, ofile, intent)
|
|
236
|
+
boss.set_instructions(common_i, dict(name=[ifile, ],
|
|
237
|
+
file_in=[ifile, ],
|
|
238
|
+
file_outfmt=[ofile, ],
|
|
239
|
+
file_outintent=[intent, ]))
|
|
240
|
+
boss.make_them_work()
|
|
241
|
+
boss.wait_till_finished()
|
|
242
|
+
logger.info("All files are processed.")
|
|
243
|
+
report = boss.get_report()
|
|
244
|
+
prp = ParallelResultParser(context)
|
|
245
|
+
for r in report['workers_report']:
|
|
246
|
+
if isinstance(prp(r), Exception):
|
|
247
|
+
raise AlgoComponentError("An error occurred in GRIBfilter.")
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
class Fa2Grib(ParaBlindRun):
|
|
251
|
+
"""Standard FA conversion, e.g. with PROGRID as a binary resource."""
|
|
252
|
+
|
|
253
|
+
_footprint = dict(
|
|
254
|
+
attr = dict(
|
|
255
|
+
kind = dict(
|
|
256
|
+
values = ['fa2grib'],
|
|
257
|
+
),
|
|
258
|
+
timeout = dict(
|
|
259
|
+
type = int,
|
|
260
|
+
optional = True,
|
|
261
|
+
default = 300,
|
|
262
|
+
),
|
|
263
|
+
refreshtime = dict(
|
|
264
|
+
type = int,
|
|
265
|
+
optional = True,
|
|
266
|
+
default = 20,
|
|
267
|
+
),
|
|
268
|
+
fatal = dict(
|
|
269
|
+
type = bool,
|
|
270
|
+
optional = True,
|
|
271
|
+
default = True,
|
|
272
|
+
),
|
|
273
|
+
fortnam = dict(
|
|
274
|
+
optional = True,
|
|
275
|
+
default = 'fort.4',
|
|
276
|
+
),
|
|
277
|
+
fortinput = dict(
|
|
278
|
+
optional = True,
|
|
279
|
+
default = 'fort.11',
|
|
280
|
+
),
|
|
281
|
+
compact = dict(
|
|
282
|
+
optional = True,
|
|
283
|
+
default = DelayedEnvValue('VORTEX_GRIB_COMPACT', 'L'),
|
|
284
|
+
),
|
|
285
|
+
timeshift = dict(
|
|
286
|
+
type = int,
|
|
287
|
+
optional = True,
|
|
288
|
+
default = DelayedEnvValue('VORTEX_GRIB_SHIFT', 0),
|
|
289
|
+
),
|
|
290
|
+
timeunit = dict(
|
|
291
|
+
type = int,
|
|
292
|
+
optional = True,
|
|
293
|
+
default = DelayedEnvValue('VORTEX_GRIB_TUNIT', 1),
|
|
294
|
+
),
|
|
295
|
+
numod = dict(
|
|
296
|
+
type = int,
|
|
297
|
+
optional = True,
|
|
298
|
+
default = DelayedEnvValue('VORTEX_GRIB_NUMOD', 221),
|
|
299
|
+
),
|
|
300
|
+
sciz = dict(
|
|
301
|
+
type = int,
|
|
302
|
+
optional = True,
|
|
303
|
+
default = DelayedEnvValue('VORTEX_GRIB_SCIZ', 0),
|
|
304
|
+
),
|
|
305
|
+
scizoffset = dict(
|
|
306
|
+
type = int,
|
|
307
|
+
optional = True,
|
|
308
|
+
),
|
|
309
|
+
)
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
def prepare(self, rh, opts):
|
|
313
|
+
"""Set some variables according to target definition."""
|
|
314
|
+
super().prepare(rh, opts)
|
|
315
|
+
self.system.remove(self.fortinput)
|
|
316
|
+
self.env.DR_HOOK_NOT_MPI = 1
|
|
317
|
+
self.system.subtitle('{:s} : directory listing (pre-run)'.format(self.realkind))
|
|
318
|
+
self.system.dir(output=False, fatal=False)
|
|
319
|
+
|
|
320
|
+
def execute(self, rh, opts):
|
|
321
|
+
"""Loop on the various initial conditions provided."""
|
|
322
|
+
|
|
323
|
+
self._default_pre_execute(rh, opts)
|
|
324
|
+
|
|
325
|
+
common_i = self._default_common_instructions(rh, opts)
|
|
326
|
+
# Update the common instructions
|
|
327
|
+
common_i.update(dict(fortnam=self.fortnam, fortinput=self.fortinput,
|
|
328
|
+
compact=self.compact, numod=self.numod,
|
|
329
|
+
sciz=self.sciz, scizoffset=self.scizoffset,
|
|
330
|
+
timeshift=self.timeshift, timeunit=self.timeunit))
|
|
331
|
+
tmout = False
|
|
332
|
+
|
|
333
|
+
# Monitor for the input files
|
|
334
|
+
bm = BasicInputMonitor(self.context, caching_freq=self.refreshtime,
|
|
335
|
+
role='Gridpoint', kind='gridpoint')
|
|
336
|
+
with bm:
|
|
337
|
+
while not bm.all_done or len(bm.available) > 0:
|
|
338
|
+
|
|
339
|
+
while bm.available:
|
|
340
|
+
s = bm.pop_available().section
|
|
341
|
+
file_in = s.rh.container.localpath()
|
|
342
|
+
# Find the name of the output file
|
|
343
|
+
if s.rh.provider.member is not None:
|
|
344
|
+
file_out = 'GRIB{:s}_{!s}+{:s}'.format(s.rh.resource.geometry.area,
|
|
345
|
+
s.rh.provider.member,
|
|
346
|
+
s.rh.resource.term.fmthm)
|
|
347
|
+
else:
|
|
348
|
+
file_out = 'GRIB{:s}+{:s}'.format(s.rh.resource.geometry.area,
|
|
349
|
+
s.rh.resource.term.fmthm)
|
|
350
|
+
logger.info("Adding input file %s to the job list", file_in)
|
|
351
|
+
self._add_instructions(common_i,
|
|
352
|
+
dict(name=[file_in, ],
|
|
353
|
+
file_in=[file_in, ], file_out=[file_out, ],
|
|
354
|
+
member=[s.rh.provider.member, ]))
|
|
355
|
+
|
|
356
|
+
if not (bm.all_done or len(bm.available) > 0):
|
|
357
|
+
# Timeout ?
|
|
358
|
+
tmout = bm.is_timedout(self.timeout)
|
|
359
|
+
if tmout:
|
|
360
|
+
break
|
|
361
|
+
# Wait a little bit :-)
|
|
362
|
+
time.sleep(1)
|
|
363
|
+
bm.health_check(interval=30)
|
|
364
|
+
|
|
365
|
+
self._default_post_execute(rh, opts)
|
|
366
|
+
|
|
367
|
+
for failed_file in [e.section.rh.container.localpath() for e in bm.failed.values()]:
|
|
368
|
+
logger.error("We were unable to fetch the following file: %s", failed_file)
|
|
369
|
+
if self.fatal:
|
|
370
|
+
self.delayed_exception_add(IOError("Unable to fetch {:s}".format(failed_file)),
|
|
371
|
+
traceback=False)
|
|
372
|
+
|
|
373
|
+
if tmout:
|
|
374
|
+
raise OSError("The waiting loop timed out")
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
class StandaloneGRIBFilter(TaylorRun):
|
|
378
|
+
|
|
379
|
+
_footprint = dict(
|
|
380
|
+
attr = dict(
|
|
381
|
+
kind = dict(
|
|
382
|
+
values = ['gribfilter'],
|
|
383
|
+
),
|
|
384
|
+
timeout = dict(
|
|
385
|
+
type = int,
|
|
386
|
+
optional = True,
|
|
387
|
+
default = 300,
|
|
388
|
+
),
|
|
389
|
+
refreshtime = dict(
|
|
390
|
+
type = int,
|
|
391
|
+
optional = True,
|
|
392
|
+
default = 20,
|
|
393
|
+
),
|
|
394
|
+
concatenate = dict(
|
|
395
|
+
type = bool,
|
|
396
|
+
default = False,
|
|
397
|
+
optional = True,
|
|
398
|
+
),
|
|
399
|
+
fatal = dict(
|
|
400
|
+
type = bool,
|
|
401
|
+
optional = True,
|
|
402
|
+
default = True,
|
|
403
|
+
),
|
|
404
|
+
)
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
def prepare(self, rh, opts):
|
|
408
|
+
"""Set some variables according to target definition."""
|
|
409
|
+
super().prepare(rh, opts)
|
|
410
|
+
self.system.subtitle('{:s} : directory listing (pre-run)'.format(self.realkind))
|
|
411
|
+
self.system.dir(output=False, fatal=False)
|
|
412
|
+
|
|
413
|
+
def execute(self, rh, opts):
|
|
414
|
+
|
|
415
|
+
# We re-serialise data because footprints don't like dictionaries
|
|
416
|
+
filters = [json.dumps(x.rh.contents.data)
|
|
417
|
+
for x in self.context.sequence.effective_inputs(role='GRIBFilteringRequest',
|
|
418
|
+
kind='filtering_request')]
|
|
419
|
+
filters = FPTuple(filters)
|
|
420
|
+
|
|
421
|
+
self._default_pre_execute(rh, opts)
|
|
422
|
+
|
|
423
|
+
common_i = self._default_common_instructions(rh, opts)
|
|
424
|
+
# Update the common instructions
|
|
425
|
+
common_i.update(dict(concatenate=self.concatenate,
|
|
426
|
+
filters=filters))
|
|
427
|
+
tmout = False
|
|
428
|
+
|
|
429
|
+
# Monitor for the input files
|
|
430
|
+
bm = BasicInputMonitor(self.context, caching_freq=self.refreshtime,
|
|
431
|
+
role='Gridpoint', kind='gridpoint')
|
|
432
|
+
with bm:
|
|
433
|
+
while not bm.all_done or len(bm.available) > 0:
|
|
434
|
+
|
|
435
|
+
while bm.available:
|
|
436
|
+
s = bm.pop_available().section
|
|
437
|
+
file_in = s.rh.container.localpath()
|
|
438
|
+
file_outfmt = re.sub(r'^(.*?)((:?\.[^.]*)?)$', r'\1_{filtername:s}\2', file_in)
|
|
439
|
+
|
|
440
|
+
logger.info("Adding input file %s to the job list", file_in)
|
|
441
|
+
self._add_instructions(common_i,
|
|
442
|
+
dict(name=[file_in, ],
|
|
443
|
+
file_in=[file_in, ], file_outfmt=[file_outfmt, ]))
|
|
444
|
+
|
|
445
|
+
if not (bm.all_done or len(bm.available) > 0):
|
|
446
|
+
# Timeout ?
|
|
447
|
+
tmout = bm.is_timedout(self.timeout)
|
|
448
|
+
if tmout:
|
|
449
|
+
break
|
|
450
|
+
# Wait a little bit :-)
|
|
451
|
+
time.sleep(1)
|
|
452
|
+
bm.health_check(interval=30)
|
|
453
|
+
|
|
454
|
+
self._default_post_execute(rh, opts)
|
|
455
|
+
|
|
456
|
+
for failed_file in [e.section.rh.container.localpath() for e in bm.failed.values()]:
|
|
457
|
+
logger.error("We were unable to fetch the following file: %s", failed_file)
|
|
458
|
+
if self.fatal:
|
|
459
|
+
self.delayed_exception_add(IOError("Unable to fetch {:s}".format(failed_file)),
|
|
460
|
+
traceback=False)
|
|
461
|
+
|
|
462
|
+
if tmout:
|
|
463
|
+
raise OSError("The waiting loop timed out")
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
class AddField(BlindRun):
|
|
467
|
+
"""Miscellaneous manipulation on input FA resources."""
|
|
468
|
+
|
|
469
|
+
_footprint = dict(
|
|
470
|
+
attr = dict(
|
|
471
|
+
kind = dict(
|
|
472
|
+
values = ['addcst', 'addconst', 'addfield'],
|
|
473
|
+
remap = dict(
|
|
474
|
+
addconst = 'addcst',
|
|
475
|
+
),
|
|
476
|
+
),
|
|
477
|
+
fortnam = dict(
|
|
478
|
+
optional = True,
|
|
479
|
+
default = 'fort.4',
|
|
480
|
+
),
|
|
481
|
+
fortinput = dict(
|
|
482
|
+
optional = True,
|
|
483
|
+
default = 'fort.11',
|
|
484
|
+
),
|
|
485
|
+
fortoutput = dict(
|
|
486
|
+
optional = True,
|
|
487
|
+
default = 'fort.12',
|
|
488
|
+
),
|
|
489
|
+
)
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
def prepare(self, rh, opts):
|
|
493
|
+
"""Set some variables according to target definition."""
|
|
494
|
+
super().prepare(rh, opts)
|
|
495
|
+
self.system.remove(self.fortinput)
|
|
496
|
+
self.env.DR_HOOK_NOT_MPI = 1
|
|
497
|
+
|
|
498
|
+
def execute(self, rh, opts):
|
|
499
|
+
"""Loop on the various initial conditions provided."""
|
|
500
|
+
|
|
501
|
+
# Is there any namelist provided ?
|
|
502
|
+
namrh = [x.rh for x in self.context.sequence.effective_inputs(role=('Namelist'),
|
|
503
|
+
kind='namelist')]
|
|
504
|
+
if namrh:
|
|
505
|
+
self.system.softlink(namrh[0].container.localpath(), self.fortnam)
|
|
506
|
+
else:
|
|
507
|
+
logger.warning('Do not find any namelist for %s', self.kind)
|
|
508
|
+
|
|
509
|
+
# Look for some sources files
|
|
510
|
+
srcrh = [x.rh for x in self.context.sequence.effective_inputs(role=('Gridpoint', 'Sources'),
|
|
511
|
+
kind='gridpoint')]
|
|
512
|
+
srcrh.sort(key=lambda rh: rh.resource.term)
|
|
513
|
+
|
|
514
|
+
for r in srcrh:
|
|
515
|
+
self.system.title('Loop on domain {:s} and term {:s}'.format(r.resource.geometry.area,
|
|
516
|
+
r.resource.term.fmthm))
|
|
517
|
+
|
|
518
|
+
# Some cleaning
|
|
519
|
+
self.system.remove(self.fortinput)
|
|
520
|
+
self.system.remove(self.fortoutput)
|
|
521
|
+
|
|
522
|
+
# Prepare double input
|
|
523
|
+
self.system.link(r.container.localpath(), self.fortinput)
|
|
524
|
+
self.system.cp(r.container.localpath(), self.fortoutput)
|
|
525
|
+
|
|
526
|
+
# Standard execution
|
|
527
|
+
opts['loop'] = r.resource.term
|
|
528
|
+
super().execute(rh, opts)
|
|
529
|
+
|
|
530
|
+
# Some cleaning
|
|
531
|
+
self.system.rmall('DAPDIR', self.fortinput, self.fortoutput)
|
|
532
|
+
|
|
533
|
+
def postfix(self, rh, opts):
|
|
534
|
+
"""Post add cleaning."""
|
|
535
|
+
super().postfix(rh, opts)
|
|
536
|
+
self.system.remove(self.fortnam)
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
class DegradedDiagPEError(AlgoComponentError):
|
|
540
|
+
"""Exception raised when some of the members are missing in the calculations."""
|
|
541
|
+
def __init__(self, ginfo, missings):
|
|
542
|
+
super().__init__()
|
|
543
|
+
self._ginfo = ginfo
|
|
544
|
+
self._missings = missings
|
|
545
|
+
|
|
546
|
+
def __str__(self):
|
|
547
|
+
outstr = "Missing input data for geometry={0.area:s}, term={1!s}:\n".format(self._ginfo['geometry'],
|
|
548
|
+
self._ginfo['term'])
|
|
549
|
+
for k, missing in self._missings.items():
|
|
550
|
+
for member in missing:
|
|
551
|
+
outstr += "{:s}: member #{!s}\n".format(k, member)
|
|
552
|
+
return outstr
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
class DiagPE(BlindRun, DrHookDecoMixin, EcGribDecoMixin):
|
|
556
|
+
"""Execution of diagnostics on grib input (ensemble forecasts specific)."""
|
|
557
|
+
_footprint = dict(
|
|
558
|
+
attr = dict(
|
|
559
|
+
kind = dict(
|
|
560
|
+
values = ['diagpe'],
|
|
561
|
+
),
|
|
562
|
+
method = dict(
|
|
563
|
+
info = 'The method used to compute the diagnosis',
|
|
564
|
+
values = ['neighbour'],
|
|
565
|
+
),
|
|
566
|
+
numod = dict(
|
|
567
|
+
type = int,
|
|
568
|
+
info = 'The GRIB model number',
|
|
569
|
+
optional = True,
|
|
570
|
+
default = DelayedEnvValue('VORTEX_GRIB_NUMOD', 118),
|
|
571
|
+
),
|
|
572
|
+
timeout = dict(
|
|
573
|
+
type = int,
|
|
574
|
+
optional = True,
|
|
575
|
+
default = 900,
|
|
576
|
+
),
|
|
577
|
+
refreshtime = dict(
|
|
578
|
+
type = int,
|
|
579
|
+
optional = True,
|
|
580
|
+
default = 20,
|
|
581
|
+
),
|
|
582
|
+
missinglimit = dict(
|
|
583
|
+
type = int,
|
|
584
|
+
optional = True,
|
|
585
|
+
default = 0,
|
|
586
|
+
),
|
|
587
|
+
waitlimit = dict(
|
|
588
|
+
type = int,
|
|
589
|
+
optional = True,
|
|
590
|
+
default = 900,
|
|
591
|
+
),
|
|
592
|
+
fatal = dict(
|
|
593
|
+
type = bool,
|
|
594
|
+
optional = True,
|
|
595
|
+
default = True,
|
|
596
|
+
),
|
|
597
|
+
gribfilter_tasks = dict(
|
|
598
|
+
type = int,
|
|
599
|
+
optional = True,
|
|
600
|
+
default = 8,
|
|
601
|
+
),
|
|
602
|
+
),
|
|
603
|
+
)
|
|
604
|
+
|
|
605
|
+
_method2output_map = dict(neighbour='GRIB_PE_VOISIN')
|
|
606
|
+
|
|
607
|
+
def spawn_hook(self):
|
|
608
|
+
"""Usually a good habit to dump the fort.4 namelist."""
|
|
609
|
+
super().spawn_hook()
|
|
610
|
+
if self.system.path.exists('fort.4'):
|
|
611
|
+
self.system.subtitle('{:s} : dump namelist <fort.4>'.format(self.realkind))
|
|
612
|
+
self.system.cat('fort.4', output=False)
|
|
613
|
+
|
|
614
|
+
def _actual_execute(self, gmembers, ifilters, filters, basedate, finalterm, rh, opts, gang):
|
|
615
|
+
|
|
616
|
+
mygeometry = gang.info['geometry']
|
|
617
|
+
myterm = gang.info['term']
|
|
618
|
+
|
|
619
|
+
self.system.title('Start processing for geometry={:s}, term={!s}.'.
|
|
620
|
+
format(mygeometry.area, myterm))
|
|
621
|
+
|
|
622
|
+
# Find out what is the common set of members
|
|
623
|
+
members = set(gmembers) # gmembers is mutable: we need a copy of it (hence the explicit set())
|
|
624
|
+
missing_members = dict()
|
|
625
|
+
for subgang in gang.memberslist:
|
|
626
|
+
smembers = {s.section.rh.provider.member for s in subgang.memberslist
|
|
627
|
+
if s.state == EntrySt.available}
|
|
628
|
+
ufomembers = {s.section.rh.provider.member for s in subgang.memberslist
|
|
629
|
+
if s.state == EntrySt.ufo}
|
|
630
|
+
missing_members[subgang.nickname] = gmembers - smembers - ufomembers
|
|
631
|
+
members &= smembers
|
|
632
|
+
# Record an error
|
|
633
|
+
if members != gmembers:
|
|
634
|
+
newexc = DegradedDiagPEError(gang.info, missing_members)
|
|
635
|
+
logger.error("Some of the data are missing for this geometry/term")
|
|
636
|
+
if self.fatal:
|
|
637
|
+
self.delayed_exception_add(newexc, traceback=False)
|
|
638
|
+
else:
|
|
639
|
+
logger.info("Fatal is false consequently no exception is recorded. It would look like this:")
|
|
640
|
+
print(newexc)
|
|
641
|
+
members = sorted(members)
|
|
642
|
+
|
|
643
|
+
# This is hopeless :-(
|
|
644
|
+
if gang.state == GangSt.failed:
|
|
645
|
+
return
|
|
646
|
+
|
|
647
|
+
# If needed, concatenate or filter the "superset" files
|
|
648
|
+
supersets = list()
|
|
649
|
+
for subgang in gang.memberslist:
|
|
650
|
+
supersets.extend([(s.section.rh.container.localpath(),
|
|
651
|
+
re.sub(r'^[a-zA-Z]+_(.*)$', r'\1', s.section.rh.container.localpath()))
|
|
652
|
+
for s in subgang.memberslist
|
|
653
|
+
if s.section.role == 'GridpointSuperset'])
|
|
654
|
+
supersets_todo = [(s, t) for s, t in supersets
|
|
655
|
+
if not self.system.path.exists(t)]
|
|
656
|
+
if supersets_todo:
|
|
657
|
+
if len(ifilters):
|
|
658
|
+
parallel_grib_filter(self.context,
|
|
659
|
+
[s for s, t in supersets_todo],
|
|
660
|
+
[t for s, t in supersets_todo],
|
|
661
|
+
filters=ifilters, nthreads=self.gribfilter_tasks)
|
|
662
|
+
else:
|
|
663
|
+
parallel_grib_filter(self.context,
|
|
664
|
+
[s for s, t in supersets_todo],
|
|
665
|
+
[t for s, t in supersets_todo],
|
|
666
|
+
cat=True, nthreads=self.gribfilter_tasks)
|
|
667
|
+
|
|
668
|
+
# Tweak the namelist
|
|
669
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist', initname='fort.4')
|
|
670
|
+
for nam in [x.rh for x in namsec if 'NAM_PARAM' in x.rh.contents]:
|
|
671
|
+
logger.info("Substitute the date (%s) to AAAAMMJJHH namelist entry", basedate.ymdh)
|
|
672
|
+
nam.contents['NAM_PARAM']['AAAAMMJJHH'] = basedate.ymdh
|
|
673
|
+
logger.info("Substitute the number of members (%d) to NBRUN namelist entry", len(members))
|
|
674
|
+
nam.contents['NAM_PARAM']['NBRUN'] = len(members)
|
|
675
|
+
logger.info("Substitute the the number of terms to NECH(0) namelist entry")
|
|
676
|
+
nam.contents['NAM_PARAM']['NECH(0)'] = 1
|
|
677
|
+
logger.info("Substitute the ressource term to NECH(1) namelist entry")
|
|
678
|
+
# NB: term should be expressed in minutes
|
|
679
|
+
nam.contents['NAM_PARAM']['NECH(1)'] = int(myterm)
|
|
680
|
+
nam.contents['NAM_PARAM']['ECHFINALE'] = finalterm.hour
|
|
681
|
+
# Now, update the model number for the GRIB files
|
|
682
|
+
logger.info("Substitute the model number (%d) to namelist entry", self.numod)
|
|
683
|
+
nam.contents['NAM_PARAM']['NMODELE'] = self.numod
|
|
684
|
+
# Add the NAM_PARAMPE block
|
|
685
|
+
if 'NAM_NMEMBRES' in nam.contents:
|
|
686
|
+
# Cleaning is needed...
|
|
687
|
+
del nam.contents['NAM_NMEMBRES']
|
|
688
|
+
newblock = nam.contents.newblock('NAM_NMEMBRES')
|
|
689
|
+
for i, member in enumerate(members):
|
|
690
|
+
newblock['NMEMBRES({:d})'.format(i + 1)] = int(member)
|
|
691
|
+
# We are done with the namelist
|
|
692
|
+
nam.save()
|
|
693
|
+
|
|
694
|
+
# Standard execution
|
|
695
|
+
opts['loop'] = myterm
|
|
696
|
+
super().execute(rh, opts)
|
|
697
|
+
|
|
698
|
+
actualname = r'{:s}_{:s}\+{:s}'.format(self._method2output_map[self.method],
|
|
699
|
+
mygeometry.area, myterm.fmthm)
|
|
700
|
+
# Find out the output file and filter it
|
|
701
|
+
filtered_out = list()
|
|
702
|
+
if len(filters):
|
|
703
|
+
for candidate in [f for f in self.system.glob(self._method2output_map[self.method] + '*')
|
|
704
|
+
if re.match(actualname, f)]:
|
|
705
|
+
logger.info("Starting GRIB filtering on %s.", candidate)
|
|
706
|
+
filtered_out.extend(filters(candidate, candidate + '_{filtername:s}'))
|
|
707
|
+
|
|
708
|
+
# The diagnostic output may be promised
|
|
709
|
+
expected = [x for x in self.promises
|
|
710
|
+
if (re.match(actualname, x.rh.container.localpath()) or
|
|
711
|
+
x.rh.container.localpath() in filtered_out)]
|
|
712
|
+
for thispromise in expected:
|
|
713
|
+
thispromise.put(incache=True)
|
|
714
|
+
|
|
715
|
+
def execute(self, rh, opts):
|
|
716
|
+
"""Loop on the various grib files provided."""
|
|
717
|
+
|
|
718
|
+
# Intialise a GRIBFilter for output files (at least try to)
|
|
719
|
+
gfilter = GRIBFilter(concatenate=False)
|
|
720
|
+
# We re-serialise data because footprints don't like dictionaries
|
|
721
|
+
ofilters = [x.rh.contents.data
|
|
722
|
+
for x in self.context.sequence.effective_inputs(role='GRIBFilteringRequest',
|
|
723
|
+
kind='filtering_request')]
|
|
724
|
+
gfilter.add_filters(ofilters)
|
|
725
|
+
|
|
726
|
+
# Do we need to filter input files ?
|
|
727
|
+
# We re-serialise data because footprints don't like dictionaries
|
|
728
|
+
ifilters = [json.dumps(x.rh.contents.data)
|
|
729
|
+
for x in self.context.sequence.effective_inputs(role='GRIBInputFilteringRequest')]
|
|
730
|
+
|
|
731
|
+
# Monitor for the input files
|
|
732
|
+
bm = BasicInputMonitor(self.context, caching_freq=self.refreshtime,
|
|
733
|
+
role=(re.compile(r'^Gridpoint'), 'Sources'),
|
|
734
|
+
kind='gridpoint')
|
|
735
|
+
# Check that the date is consistent among inputs
|
|
736
|
+
basedates = set()
|
|
737
|
+
members = set()
|
|
738
|
+
for rhI in [s.section.rh for s in bm.memberslist]:
|
|
739
|
+
basedates.add(rhI.resource.date)
|
|
740
|
+
members.add(rhI.provider.member)
|
|
741
|
+
if len(basedates) > 1:
|
|
742
|
+
raise AlgoComponentError('The date must be consistent among the input resources')
|
|
743
|
+
basedate = basedates.pop()
|
|
744
|
+
# Setup BasicGangs
|
|
745
|
+
basicmeta = AutoMetaGang()
|
|
746
|
+
basicmeta.autofill(bm, ('term', 'safeblock', 'geometry'),
|
|
747
|
+
allowmissing=self.missinglimit, waitlimit=self.waitlimit)
|
|
748
|
+
# Find out what are the terms, domains and blocks
|
|
749
|
+
geometries = set()
|
|
750
|
+
terms = collections.defaultdict(set)
|
|
751
|
+
blocks = collections.defaultdict(set)
|
|
752
|
+
reverse = dict()
|
|
753
|
+
for m in basicmeta.memberslist:
|
|
754
|
+
(geo, term, block) = (m.info['geometry'], m.info['term'], m.info['safeblock'])
|
|
755
|
+
geometries.add(geo)
|
|
756
|
+
terms[geo].add(term)
|
|
757
|
+
blocks[geo].add(block)
|
|
758
|
+
reverse[(geo, term, block)] = m
|
|
759
|
+
for geometry in geometries:
|
|
760
|
+
terms[geometry] = sorted(terms[geometry])
|
|
761
|
+
# Setup the MetaGang that fits our needs
|
|
762
|
+
complexmeta = MetaGang()
|
|
763
|
+
complexgangs = collections.defaultdict(collections.deque)
|
|
764
|
+
for geometry in geometries:
|
|
765
|
+
nterms = len(terms[geometry])
|
|
766
|
+
for i_term, term in enumerate(terms[geometry]):
|
|
767
|
+
elementary_meta = MetaGang()
|
|
768
|
+
elementary_meta.info = dict(geometry=geometry, term=term)
|
|
769
|
+
cterms = [terms[geometry][i] for i in range(i_term,
|
|
770
|
+
min(i_term + 2, nterms))]
|
|
771
|
+
for inside_term in cterms:
|
|
772
|
+
for inside_block in blocks[geometry]:
|
|
773
|
+
try:
|
|
774
|
+
elementary_meta.add_member(reverse[(geometry, inside_term, inside_block)])
|
|
775
|
+
except KeyError:
|
|
776
|
+
raise KeyError("Something is wrong in the inputs: check again !")
|
|
777
|
+
complexmeta.add_member(elementary_meta)
|
|
778
|
+
complexgangs[geometry].append(elementary_meta)
|
|
779
|
+
|
|
780
|
+
# Now, starts monitoring everything
|
|
781
|
+
with bm:
|
|
782
|
+
|
|
783
|
+
current_gang = dict()
|
|
784
|
+
for geometry in geometries:
|
|
785
|
+
try:
|
|
786
|
+
current_gang[geometry] = complexgangs[geometry].popleft()
|
|
787
|
+
except IndexError:
|
|
788
|
+
current_gang[geometry] = None
|
|
789
|
+
|
|
790
|
+
while any([g is not None for g in current_gang.values()]):
|
|
791
|
+
|
|
792
|
+
for geometry, a_gang in [(g, current_gang[g]) for g in geometries
|
|
793
|
+
if (current_gang[g] is not None and
|
|
794
|
+
current_gang[g].state is not GangSt.ufo)]:
|
|
795
|
+
|
|
796
|
+
self._actual_execute(members, ifilters, gfilter, basedate,
|
|
797
|
+
terms[geometry][-1], rh, opts, a_gang)
|
|
798
|
+
|
|
799
|
+
# Next one
|
|
800
|
+
try:
|
|
801
|
+
current_gang[geometry] = complexgangs[geometry].popleft()
|
|
802
|
+
except IndexError:
|
|
803
|
+
current_gang[geometry] = None
|
|
804
|
+
|
|
805
|
+
if not (bm.all_done or any(gang is not None and
|
|
806
|
+
gang.state is not GangSt.ufo
|
|
807
|
+
for gang in current_gang.values())):
|
|
808
|
+
# Timeout ?
|
|
809
|
+
bm.is_timedout(self.timeout, IOError)
|
|
810
|
+
# Wait a little bit :-)
|
|
811
|
+
time.sleep(1)
|
|
812
|
+
bm.health_check(interval=30)
|
|
813
|
+
|
|
814
|
+
|
|
815
|
+
@algo_component_deco_mixin_autodoc
|
|
816
|
+
class _DiagPIDecoMixin(AlgoComponentDecoMixin):
|
|
817
|
+
"""Class variables and methods usefull for DiagPI."""
|
|
818
|
+
|
|
819
|
+
_MIXIN_EXTRA_FOOTPRINTS = [footprints.Footprint(
|
|
820
|
+
attr=dict(
|
|
821
|
+
kind=dict(
|
|
822
|
+
values=['diagpi', 'diaglabo'],
|
|
823
|
+
),
|
|
824
|
+
numod=dict(
|
|
825
|
+
info='The GRIB model number',
|
|
826
|
+
type=int,
|
|
827
|
+
optional=True,
|
|
828
|
+
default=DelayedEnvValue('VORTEX_GRIB_NUMOD', 62),
|
|
829
|
+
),
|
|
830
|
+
gribcat=dict(
|
|
831
|
+
type=bool,
|
|
832
|
+
optional=True,
|
|
833
|
+
default=False
|
|
834
|
+
),
|
|
835
|
+
gribfilter_tasks=dict(
|
|
836
|
+
type=int,
|
|
837
|
+
optional=True,
|
|
838
|
+
default=8,
|
|
839
|
+
),
|
|
840
|
+
),
|
|
841
|
+
)]
|
|
842
|
+
|
|
843
|
+
def _prepare_pihook(self, rh, opts):
|
|
844
|
+
"""Set some variables according to target definition."""
|
|
845
|
+
|
|
846
|
+
# Check for input files to concatenate
|
|
847
|
+
if self.gribcat:
|
|
848
|
+
srcsec = self.context.sequence.effective_inputs(role=('Gridpoint', 'Sources',
|
|
849
|
+
'Preview', 'Previous'),
|
|
850
|
+
kind='gridpoint')
|
|
851
|
+
cat_list_in = [sec for sec in srcsec if not sec.rh.is_expected()]
|
|
852
|
+
outsec = self.context.sequence.effective_inputs(role='GridpointOutputPrepare')
|
|
853
|
+
cat_list_out = [sec for sec in outsec if not sec.rh.is_expected()]
|
|
854
|
+
self._automatic_cat(cat_list_in, cat_list_out)
|
|
855
|
+
|
|
856
|
+
# prepare for delayed filtering
|
|
857
|
+
self._delayed_filtering = []
|
|
858
|
+
|
|
859
|
+
def _postfix_pihook(self, rh, opts):
|
|
860
|
+
"""Filter outputs."""
|
|
861
|
+
if self._delayed_filtering:
|
|
862
|
+
self._batch_filter(self._delayed_filtering)
|
|
863
|
+
|
|
864
|
+
def _spawn_pihook(self):
|
|
865
|
+
"""Usually a good habit to dump the fort.4 namelist."""
|
|
866
|
+
if self.system.path.exists('fort.4'):
|
|
867
|
+
self.system.subtitle('{:s} : dump namelist <fort.4>'.format(self.realkind))
|
|
868
|
+
self.system.cat('fort.4', output=False)
|
|
869
|
+
|
|
870
|
+
_MIXIN_PREPARE_HOOKS = (_prepare_pihook, )
|
|
871
|
+
_MIXIN_POSTFIX_HOOKS = (_postfix_pihook, )
|
|
872
|
+
_MIXIN_SPAWN_HOOKS = (_spawn_pihook, )
|
|
873
|
+
|
|
874
|
+
def _automatic_cat(self, list_in, list_out):
|
|
875
|
+
"""Concatenate the *list_in* and *list_out* input files."""
|
|
876
|
+
if self.gribcat:
|
|
877
|
+
inputs = []
|
|
878
|
+
outputs = []
|
|
879
|
+
intents = []
|
|
880
|
+
for (seclist, intent) in zip((list_in, list_out), ('in', 'inout')):
|
|
881
|
+
for isec in seclist:
|
|
882
|
+
tmpin = isec.rh.container.localpath() + '.tmpcat'
|
|
883
|
+
self.system.move(isec.rh.container.localpath(), tmpin, fmt='grib')
|
|
884
|
+
inputs.append(tmpin)
|
|
885
|
+
outputs.append(isec.rh.container.localpath())
|
|
886
|
+
intents.append(intent)
|
|
887
|
+
parallel_grib_filter(self.context, inputs, outputs, intents,
|
|
888
|
+
cat=True, nthreads=self.gribfilter_tasks)
|
|
889
|
+
for ifile in inputs:
|
|
890
|
+
self.system.rm(ifile, fmt='grib')
|
|
891
|
+
|
|
892
|
+
def _batch_filter(self, candidates):
|
|
893
|
+
"""If no promises are made, the GRIB are filtered at once at the end."""
|
|
894
|
+
# We re-serialise data because footprints don't like dictionaries
|
|
895
|
+
filters = [json.dumps(x.rh.contents.data)
|
|
896
|
+
for x in self.context.sequence.effective_inputs(role='GRIBFilteringRequest',
|
|
897
|
+
kind='filtering_request')]
|
|
898
|
+
parallel_grib_filter(self.context,
|
|
899
|
+
candidates, [f + '_{filtername:s}' for f in candidates],
|
|
900
|
+
filters=FPTuple(filters), nthreads=self.gribfilter_tasks)
|
|
901
|
+
|
|
902
|
+
def _execute_picommons(self, rh, opts):
|
|
903
|
+
"""Loop on the various grib files provided."""
|
|
904
|
+
|
|
905
|
+
# Intialise a GRIBFilter (at least try to)
|
|
906
|
+
gfilter = GRIBFilter(concatenate=False)
|
|
907
|
+
gfilter.add_filters(self.context)
|
|
908
|
+
|
|
909
|
+
srcsec = self.context.sequence.effective_inputs(role=('Gridpoint', 'Sources'),
|
|
910
|
+
kind='gridpoint')
|
|
911
|
+
srcsec.sort(key=lambda s: s.rh.resource.term)
|
|
912
|
+
|
|
913
|
+
outsec = self.context.sequence.effective_inputs(role='GridpointOutputPrepare')
|
|
914
|
+
if outsec:
|
|
915
|
+
outsec.sort(key=lambda s: s.rh.resource.term)
|
|
916
|
+
|
|
917
|
+
for sec in srcsec:
|
|
918
|
+
r = sec.rh
|
|
919
|
+
self.system.title('Loop on domain {:s} and term {:s}'.format(r.resource.geometry.area,
|
|
920
|
+
r.resource.term.fmthm))
|
|
921
|
+
# Tweak the namelist
|
|
922
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist', initname='fort.4')
|
|
923
|
+
for nam in [x.rh for x in namsec if 'NAM_PARAM' in x.rh.contents]:
|
|
924
|
+
logger.info("Substitute the date (%s) to AAAAMMJJHH namelist entry", r.resource.date.ymdh)
|
|
925
|
+
nam.contents['NAM_PARAM']['AAAAMMJJHH'] = r.resource.date.ymdh
|
|
926
|
+
logger.info("Substitute the the number of terms to NECH(0) namelist entry")
|
|
927
|
+
nam.contents['NAM_PARAM']['NECH(0)'] = 1
|
|
928
|
+
logger.info("Substitute the ressource term to NECH(1) namelist entry")
|
|
929
|
+
# NB: term should be expressed in minutes
|
|
930
|
+
nam.contents['NAM_PARAM']['NECH(1)'] = int(r.resource.term)
|
|
931
|
+
# Add the member number in a dedicated namelist block
|
|
932
|
+
if r.provider.member is not None:
|
|
933
|
+
mblock = nam.contents.newblock('NAM_PARAMPE')
|
|
934
|
+
mblock['NMEMBER'] = int(r.provider.member)
|
|
935
|
+
# Now, update the model number for the GRIB files
|
|
936
|
+
if 'NAM_DIAG' in nam.contents:
|
|
937
|
+
nmod = self.numod
|
|
938
|
+
logger.info("Substitute the model number (%d) to namelist entry", nmod)
|
|
939
|
+
for namk in ('CONV', 'BR', 'HIV', 'ECHOT', 'ICA', 'PSN'):
|
|
940
|
+
if namk in nam.contents['NAM_DIAG'] and nam.contents['NAM_DIAG'][namk] != 0:
|
|
941
|
+
nam.contents['NAM_DIAG'][namk] = nmod
|
|
942
|
+
# We are done with the namelist
|
|
943
|
+
nam.save()
|
|
944
|
+
|
|
945
|
+
cat_list_in = []
|
|
946
|
+
cat_list_out = []
|
|
947
|
+
|
|
948
|
+
# Expect the input grib file to be here
|
|
949
|
+
if sec.rh.is_expected():
|
|
950
|
+
cat_list_in.append(sec)
|
|
951
|
+
self.grab(sec, comment='diagpi source')
|
|
952
|
+
if outsec:
|
|
953
|
+
out = outsec.pop(0)
|
|
954
|
+
assert out.rh.resource.term == sec.rh.resource.term
|
|
955
|
+
if out.rh.is_expected():
|
|
956
|
+
cat_list_out.append(out)
|
|
957
|
+
self.grab(out, comment='diagpi output')
|
|
958
|
+
|
|
959
|
+
# Also link in previous grib files in order to compute some winter diagnostics
|
|
960
|
+
srcpsec = [x
|
|
961
|
+
for x in self.context.sequence.effective_inputs(role=('Preview', 'Previous'),
|
|
962
|
+
kind='gridpoint')
|
|
963
|
+
if x.rh.resource.term < r.resource.term]
|
|
964
|
+
for pr in srcpsec:
|
|
965
|
+
if pr.rh.is_expected():
|
|
966
|
+
cat_list_in.append(pr)
|
|
967
|
+
self.grab(pr, comment='diagpi additional source for winter diag')
|
|
968
|
+
|
|
969
|
+
self._automatic_cat(cat_list_in, cat_list_out)
|
|
970
|
+
|
|
971
|
+
# Standard execution
|
|
972
|
+
opts['loop'] = r.resource.term
|
|
973
|
+
super(self.mixin_execute_companion(), self).execute(rh, opts)
|
|
974
|
+
|
|
975
|
+
actualname = r'GRIB[-_A-Z]+{:s}\+{:s}(?:_member\d+)?$'.format(r.resource.geometry.area,
|
|
976
|
+
r.resource.term.fmthm)
|
|
977
|
+
# Find out the output file and filter it
|
|
978
|
+
filtered_out = list()
|
|
979
|
+
if len(gfilter):
|
|
980
|
+
for candidate in [f for f in self.system.glob('GRIB*') if re.match(actualname, f)]:
|
|
981
|
+
if len(self.promises):
|
|
982
|
+
logger.info("Starting GRIB filtering on %s.", candidate)
|
|
983
|
+
filtered_out.extend(gfilter(candidate, candidate + '_{filtername:s}'))
|
|
984
|
+
else:
|
|
985
|
+
self._delayed_filtering.append(candidate)
|
|
986
|
+
|
|
987
|
+
# The diagnostic output may be promised
|
|
988
|
+
expected = [x for x in self.promises
|
|
989
|
+
if (re.match(actualname, x.rh.container.localpath()) or
|
|
990
|
+
x.rh.container.localpath() in filtered_out)]
|
|
991
|
+
for thispromise in expected:
|
|
992
|
+
thispromise.put(incache=True)
|
|
993
|
+
|
|
994
|
+
_MIXIN_EXECUTE_OVERWRITE = _execute_picommons
|
|
995
|
+
|
|
996
|
+
|
|
997
|
+
class DiagPI(BlindRun, _DiagPIDecoMixin, EcGribDecoMixin):
|
|
998
|
+
"""Execution of diagnostics on grib input (deterministic forecasts specific)."""
|
|
999
|
+
pass
|
|
1000
|
+
|
|
1001
|
+
|
|
1002
|
+
class DiagPIMPI(Parallel, _DiagPIDecoMixin, EcGribDecoMixin):
|
|
1003
|
+
"""Execution of diagnostics on grib input (deterministic forecasts specific)."""
|
|
1004
|
+
pass
|
|
1005
|
+
|
|
1006
|
+
|
|
1007
|
+
class Fa2GaussGrib(BlindRun, DrHookDecoMixin):
|
|
1008
|
+
"""Standard FA conversion, e.g. with GOBPTOUT as a binary resource."""
|
|
1009
|
+
|
|
1010
|
+
_footprint = dict(
|
|
1011
|
+
attr = dict(
|
|
1012
|
+
kind = dict(
|
|
1013
|
+
values = ['fa2gaussgrib'],
|
|
1014
|
+
),
|
|
1015
|
+
fortinput = dict(
|
|
1016
|
+
optional = True,
|
|
1017
|
+
default = 'PFFPOS_FIELDS',
|
|
1018
|
+
),
|
|
1019
|
+
numod = dict(
|
|
1020
|
+
type = int,
|
|
1021
|
+
optional = True,
|
|
1022
|
+
default = DelayedEnvValue('VORTEX_GRIB_NUMOD', 212),
|
|
1023
|
+
),
|
|
1024
|
+
verbose = dict(
|
|
1025
|
+
type = bool,
|
|
1026
|
+
optional = True,
|
|
1027
|
+
default = False,
|
|
1028
|
+
),
|
|
1029
|
+
)
|
|
1030
|
+
)
|
|
1031
|
+
|
|
1032
|
+
def execute(self, rh, opts):
|
|
1033
|
+
"""Loop on the various initial conditions provided."""
|
|
1034
|
+
|
|
1035
|
+
thisoutput = 'GRID_' + self.fortinput[7:14] + '1'
|
|
1036
|
+
|
|
1037
|
+
gpsec = self.context.sequence.effective_inputs(role=('Historic', 'ModelState'))
|
|
1038
|
+
gpsec.sort(key=lambda s: s.rh.resource.term)
|
|
1039
|
+
|
|
1040
|
+
for sec in gpsec:
|
|
1041
|
+
r = sec.rh
|
|
1042
|
+
|
|
1043
|
+
self.system.title('Loop on files: {:s}'.format(r.container.localpath()))
|
|
1044
|
+
|
|
1045
|
+
# Some preventive cleaning
|
|
1046
|
+
self.system.remove(thisoutput)
|
|
1047
|
+
self.system.remove('fort.4')
|
|
1048
|
+
|
|
1049
|
+
# Build the local namelist block
|
|
1050
|
+
nb = NamelistBlock(name='NAML')
|
|
1051
|
+
nb.NBDOM = 1
|
|
1052
|
+
nb.INUMOD = self.numod
|
|
1053
|
+
|
|
1054
|
+
nb['LLBAVE'] = self.verbose
|
|
1055
|
+
nb['CDNOMF(1)'] = self.fortinput
|
|
1056
|
+
with open('fort.4', 'w') as namfd:
|
|
1057
|
+
namfd.write(nb.dumps())
|
|
1058
|
+
|
|
1059
|
+
self.system.header('{:s} : local namelist {:s} dump'.format(self.realkind, 'fort.4'))
|
|
1060
|
+
self.system.cat('fort.4', output=False)
|
|
1061
|
+
|
|
1062
|
+
# Expect the input FP file source to be there...
|
|
1063
|
+
self.grab(sec, comment='fullpos source')
|
|
1064
|
+
|
|
1065
|
+
# Finally set the actual init file
|
|
1066
|
+
self.system.softlink(r.container.localpath(), self.fortinput)
|
|
1067
|
+
|
|
1068
|
+
# Standard execution
|
|
1069
|
+
super().execute(rh, opts)
|
|
1070
|
+
|
|
1071
|
+
# Freeze the current output
|
|
1072
|
+
if self.system.path.exists(thisoutput):
|
|
1073
|
+
self.system.move(thisoutput, 'GGRID' + r.container.localpath()[6:], fmt='grib')
|
|
1074
|
+
else:
|
|
1075
|
+
logger.warning('Missing some grib output for %s',
|
|
1076
|
+
thisoutput)
|
|
1077
|
+
|
|
1078
|
+
# Some cleaning
|
|
1079
|
+
self.system.rmall(self.fortinput)
|
|
1080
|
+
|
|
1081
|
+
|
|
1082
|
+
class Reverser(BlindRun, DrHookDecoMixin):
|
|
1083
|
+
"""Compute the initial state for Ctpini."""
|
|
1084
|
+
_footprint = dict(
|
|
1085
|
+
info = "Compute initial state for Ctpini.",
|
|
1086
|
+
attr = dict(
|
|
1087
|
+
kind = dict(
|
|
1088
|
+
values = ['reverser'],
|
|
1089
|
+
),
|
|
1090
|
+
param_iter = dict(
|
|
1091
|
+
type = int,
|
|
1092
|
+
),
|
|
1093
|
+
condlim = dict(
|
|
1094
|
+
type = int,
|
|
1095
|
+
),
|
|
1096
|
+
ano_type = dict(
|
|
1097
|
+
type = int,
|
|
1098
|
+
),
|
|
1099
|
+
)
|
|
1100
|
+
)
|
|
1101
|
+
|
|
1102
|
+
def prepare(self, rh, opts):
|
|
1103
|
+
# Get info about the directives files directory
|
|
1104
|
+
directives = self.context.sequence.effective_inputs(role='Directives',
|
|
1105
|
+
kind='ctpini_directives_file')
|
|
1106
|
+
if len(directives) < 1:
|
|
1107
|
+
logger.error("No directive file found. Stop")
|
|
1108
|
+
raise ValueError("No directive file found.")
|
|
1109
|
+
if len(directives) > 1:
|
|
1110
|
+
logger.warning("Multiple directive files found. This is strange...")
|
|
1111
|
+
# Substitute values in the simili namelist
|
|
1112
|
+
param = self.context.sequence.effective_inputs(role='Param')
|
|
1113
|
+
if len(param) < 1:
|
|
1114
|
+
logger.error("No parameter file found. Stop")
|
|
1115
|
+
raise ValueError("No parameter file found.")
|
|
1116
|
+
elif len(param) > 1:
|
|
1117
|
+
logger.warning("Multiple files for parameter, the first %s is taken",
|
|
1118
|
+
param[0].rh.container.filename)
|
|
1119
|
+
param = param[0].rh
|
|
1120
|
+
paramct = param.contents
|
|
1121
|
+
dictkeyvalue = dict()
|
|
1122
|
+
dictkeyvalue[r'param_iter'] = str(self.param_iter)
|
|
1123
|
+
dictkeyvalue[r'condlim'] = str(self.condlim)
|
|
1124
|
+
dictkeyvalue[r'ano_type'] = str(self.ano_type)
|
|
1125
|
+
paramct.setitems(dictkeyvalue)
|
|
1126
|
+
param.save()
|
|
1127
|
+
logger.info("Here is the parameter file (after substitution):")
|
|
1128
|
+
param.container.cat()
|
|
1129
|
+
# Call the parent's prepare
|
|
1130
|
+
super().prepare(rh, opts)
|
|
1131
|
+
|
|
1132
|
+
|
|
1133
|
+
class DegradedEnsembleDiagError(AlgoComponentError):
|
|
1134
|
+
"""Exception raised when some of the members are missing."""
|
|
1135
|
+
pass
|
|
1136
|
+
|
|
1137
|
+
|
|
1138
|
+
class FailedEnsembleDiagError(DegradedEnsembleDiagError):
|
|
1139
|
+
"""Exception raised when too many members are missing."""
|
|
1140
|
+
pass
|
|
1141
|
+
|
|
1142
|
+
|
|
1143
|
+
class PyEnsembleDiag(Expresso):
|
|
1144
|
+
"""Execution of diagnostics on grib input (ensemble forecasts specific)."""
|
|
1145
|
+
|
|
1146
|
+
_footprint = dict(
|
|
1147
|
+
attr = dict(
|
|
1148
|
+
kind = dict(
|
|
1149
|
+
values = ['py_diag_ens'],
|
|
1150
|
+
),
|
|
1151
|
+
timeout = dict(
|
|
1152
|
+
type = int,
|
|
1153
|
+
optional = True,
|
|
1154
|
+
default = 1200,
|
|
1155
|
+
),
|
|
1156
|
+
refreshtime = dict(
|
|
1157
|
+
type = int,
|
|
1158
|
+
optional = True,
|
|
1159
|
+
default = 20,
|
|
1160
|
+
),
|
|
1161
|
+
missinglimit = dict(
|
|
1162
|
+
type = int,
|
|
1163
|
+
optional = True,
|
|
1164
|
+
default = 0,
|
|
1165
|
+
),
|
|
1166
|
+
waitlimit = dict(
|
|
1167
|
+
type = int,
|
|
1168
|
+
optional = True,
|
|
1169
|
+
default = 900,
|
|
1170
|
+
),
|
|
1171
|
+
),
|
|
1172
|
+
)
|
|
1173
|
+
|
|
1174
|
+
def __init__(self, *kargs, **kwargs):
|
|
1175
|
+
super().__init__(*kargs, **kwargs)
|
|
1176
|
+
self._cl_args = dict()
|
|
1177
|
+
|
|
1178
|
+
def spawn_command_options(self):
|
|
1179
|
+
"""Prepare options for the resource's command line."""
|
|
1180
|
+
return self._cl_args
|
|
1181
|
+
|
|
1182
|
+
def _actual_execute(self, rh, opts, input_rhs, ** infos):
|
|
1183
|
+
"""Actually run the script for a specific bunch of input files (**inpu_rhs**)."""
|
|
1184
|
+
output_fname = ('ensdiag_{safeblock:s}_{geometry.tag:s}_{term.fmthm}.grib'
|
|
1185
|
+
.format(** infos))
|
|
1186
|
+
self._cl_args = dict(flowconf='flowconf.json',
|
|
1187
|
+
output=output_fname)
|
|
1188
|
+
|
|
1189
|
+
# Create the JSON file that will be ingested by the script
|
|
1190
|
+
self.system.json_dump(
|
|
1191
|
+
dict(date=input_rhs[0].resource.date.ymdhm,
|
|
1192
|
+
term=infos['term'].fmthm,
|
|
1193
|
+
geometry=infos['geometry'].tag,
|
|
1194
|
+
area=infos['geometry'].area,
|
|
1195
|
+
block=infos['safeblock'],
|
|
1196
|
+
grib_files=[r.container.localpath() for r in input_rhs],
|
|
1197
|
+
),
|
|
1198
|
+
self._cl_args['flowconf']
|
|
1199
|
+
)
|
|
1200
|
+
|
|
1201
|
+
# Actualy run the post-processing script
|
|
1202
|
+
super().execute(rh, opts)
|
|
1203
|
+
|
|
1204
|
+
# The diagnostic output may be promised
|
|
1205
|
+
for thispromise in [x for x in self.promises
|
|
1206
|
+
if output_fname == x.rh.container.localpath()]:
|
|
1207
|
+
thispromise.put(incache=True)
|
|
1208
|
+
|
|
1209
|
+
@staticmethod
|
|
1210
|
+
def _gang_txt_id(gang):
|
|
1211
|
+
"""A string that identifies the input data currently being processed."""
|
|
1212
|
+
return ("term={term.fmthm:s}, " +
|
|
1213
|
+
"geometry={geometry.tag:s} " +
|
|
1214
|
+
"and block={safeblock:s}").format(** gang.info)
|
|
1215
|
+
|
|
1216
|
+
def _handle_gang_rescue(self, gang):
|
|
1217
|
+
"""If some of the entries are missing, create a delayed exception."""
|
|
1218
|
+
if gang.state in (GangSt.pcollectable, GangSt.failed):
|
|
1219
|
+
txt_id = self._gang_txt_id(gang)
|
|
1220
|
+
self.system.subtitle("WARNING: Missing data for " + txt_id)
|
|
1221
|
+
for st in (EntrySt.ufo, EntrySt.failed, EntrySt.expected):
|
|
1222
|
+
if gang.members[st]:
|
|
1223
|
+
print('Here is the list of Resource Handler with status < {:s} >:'
|
|
1224
|
+
.format(st))
|
|
1225
|
+
for i, e in enumerate(gang.members[st]):
|
|
1226
|
+
e.section.rh.quickview(nb=i + 1, indent=1)
|
|
1227
|
+
self.delayed_exception_add(
|
|
1228
|
+
FailedEnsembleDiagError("Too many inputs are missing for " + txt_id)
|
|
1229
|
+
if gang.state == GangSt.failed else
|
|
1230
|
+
DegradedEnsembleDiagError("Some of the inputs are missing for " + txt_id),
|
|
1231
|
+
traceback=False
|
|
1232
|
+
)
|
|
1233
|
+
|
|
1234
|
+
def execute(self, rh, opts):
|
|
1235
|
+
"""Loop on the various grib files provided."""
|
|
1236
|
+
|
|
1237
|
+
# Monitor for the input files
|
|
1238
|
+
bm = BasicInputMonitor(self.context,
|
|
1239
|
+
caching_freq=self.refreshtime,
|
|
1240
|
+
role='Gridpoint')
|
|
1241
|
+
|
|
1242
|
+
# Check that the date is consistent among inputs
|
|
1243
|
+
basedates = set()
|
|
1244
|
+
members = set()
|
|
1245
|
+
for rhI in [s.section.rh for s in bm.memberslist]:
|
|
1246
|
+
basedates.add(rhI.resource.date)
|
|
1247
|
+
members.add(rhI.provider.member)
|
|
1248
|
+
if len(basedates) > 1:
|
|
1249
|
+
raise AlgoComponentError('The date must be consistent among the input resources')
|
|
1250
|
+
|
|
1251
|
+
# Setup BasicGangs
|
|
1252
|
+
basicmeta = AutoMetaGang()
|
|
1253
|
+
basicmeta.autofill(bm, ('term', 'safeblock', 'geometry'),
|
|
1254
|
+
allowmissing=self.missinglimit, waitlimit=self.waitlimit)
|
|
1255
|
+
|
|
1256
|
+
# Now, starts monitoring everything
|
|
1257
|
+
with bm:
|
|
1258
|
+
while basicmeta.has_ufo() or basicmeta.has_pcollectable():
|
|
1259
|
+
for thegang in basicmeta.consume_pcolectable():
|
|
1260
|
+
txt_id = self._gang_txt_id(thegang)
|
|
1261
|
+
self.system.title("Dealing with " + txt_id)
|
|
1262
|
+
|
|
1263
|
+
available = thegang.members[EntrySt.available]
|
|
1264
|
+
self._handle_gang_rescue(thegang)
|
|
1265
|
+
|
|
1266
|
+
self._actual_execute(rh, opts,
|
|
1267
|
+
[e.section.rh for e in available],
|
|
1268
|
+
**thegang.info)
|
|
1269
|
+
|
|
1270
|
+
self.system.highlight("Done with " + txt_id)
|
|
1271
|
+
|
|
1272
|
+
if not bm.all_done and basicmeta.has_ufo() and not basicmeta.has_pcollectable():
|
|
1273
|
+
# Timeout ?
|
|
1274
|
+
tmout = bm.is_timedout(self.timeout)
|
|
1275
|
+
if tmout:
|
|
1276
|
+
break
|
|
1277
|
+
# Wait a little bit :-)
|
|
1278
|
+
time.sleep(1)
|
|
1279
|
+
bm.health_check(interval=30)
|
|
1280
|
+
|
|
1281
|
+
# Warn for failed gangs
|
|
1282
|
+
if basicmeta.members[GangSt.failed]:
|
|
1283
|
+
self.system.title("One or several (term, geometry, block) group(s) could not be processed")
|
|
1284
|
+
for thegang in basicmeta.members[GangSt.failed]:
|
|
1285
|
+
self._handle_gang_rescue(thegang)
|