vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
vortex/nwp/algo/eps.py
ADDED
|
@@ -0,0 +1,613 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AlgoComponents dedicated to computations related to the Ensemble Prediction System.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import collections
|
|
6
|
+
import copy
|
|
7
|
+
import re
|
|
8
|
+
|
|
9
|
+
import footprints
|
|
10
|
+
from bronx.compat.itertools import pairwise
|
|
11
|
+
from bronx.fancies import loggers
|
|
12
|
+
from bronx.stdtypes.date import Time
|
|
13
|
+
from ..tools.drhook import DrHookDecoMixin
|
|
14
|
+
from vortex.algo.components import BlindRun
|
|
15
|
+
from vortex.layout.dataflow import intent
|
|
16
|
+
from vortex.tools.grib import EcGribDecoMixin
|
|
17
|
+
from vortex.util.structs import ShellEncoder
|
|
18
|
+
|
|
19
|
+
from .ifsroot import IFSParallel
|
|
20
|
+
from .stdpost import parallel_grib_filter
|
|
21
|
+
|
|
22
|
+
#: No automatic export
|
|
23
|
+
__all__ = []
|
|
24
|
+
|
|
25
|
+
logger = loggers.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class Svect(IFSParallel):
|
|
29
|
+
"""Singular vectors computation."""
|
|
30
|
+
|
|
31
|
+
_footprint = dict(
|
|
32
|
+
info='Computation of the singular vectors.',
|
|
33
|
+
attr = dict(
|
|
34
|
+
kind = dict(
|
|
35
|
+
values = ['svectors', 'svector', 'sv', 'svect', 'svarpe'],
|
|
36
|
+
remap = dict(autoremap='first'),
|
|
37
|
+
),
|
|
38
|
+
conf = dict(
|
|
39
|
+
type = int,
|
|
40
|
+
optional = True,
|
|
41
|
+
default = 601,
|
|
42
|
+
),
|
|
43
|
+
xpname = dict(
|
|
44
|
+
optional = True,
|
|
45
|
+
default = 'SVEC',
|
|
46
|
+
),
|
|
47
|
+
)
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def realkind(self):
|
|
52
|
+
return 'svector'
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class Combi(BlindRun, DrHookDecoMixin, EcGribDecoMixin):
|
|
56
|
+
"""Build the initial conditions of the EPS."""
|
|
57
|
+
|
|
58
|
+
_abstract = True
|
|
59
|
+
|
|
60
|
+
def execute(self, rh, opts):
|
|
61
|
+
"""Standard Combi execution."""
|
|
62
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist')
|
|
63
|
+
namsec[0].rh.container.cat()
|
|
64
|
+
super().execute(rh, opts)
|
|
65
|
+
|
|
66
|
+
@property
|
|
67
|
+
def nmod(self):
|
|
68
|
+
raise NotImplementedError("Abstract property")
|
|
69
|
+
|
|
70
|
+
def _addNmod(self, namrh, msg):
|
|
71
|
+
namrh.contents['NAMMOD']['NMOD'] = self.nmod
|
|
72
|
+
logger.info("NMOD set to %d: %s.", self.nmod, msg)
|
|
73
|
+
|
|
74
|
+
def _analysis_cp(self, nb, msg):
|
|
75
|
+
# Copy the analysis
|
|
76
|
+
initsec = self.setlink(initkind='analysis')
|
|
77
|
+
radical = re.sub(r'^(.*?)\d+$', r'\1', initsec[0].rh.container.localpath())
|
|
78
|
+
for num in footprints.util.rangex(1, nb):
|
|
79
|
+
self.system.cp(initsec[0].rh.container.localpath(),
|
|
80
|
+
radical + '{:03d}'.format(num),
|
|
81
|
+
fmt=initsec[0].rh.container.actualfmt, intent=intent.INOUT)
|
|
82
|
+
logger.info("Copy the analysis for the %d %s.", nb, msg)
|
|
83
|
+
|
|
84
|
+
def _coeff_picking(self, kind, msg):
|
|
85
|
+
# Pick up the coeff in the namelist
|
|
86
|
+
for namsec in self.context.sequence.effective_inputs(kind='namelist'):
|
|
87
|
+
namsec.rh.reset_contents()
|
|
88
|
+
if 'NAMCOEF' + kind.upper() in namsec.rh.contents:
|
|
89
|
+
logger.info("Extract the " + msg + " coefficient from the updated namelist.")
|
|
90
|
+
coeff = {'rcoef' + kind:
|
|
91
|
+
float(namsec.rh.contents['NAMCOEF' + kind.upper()]['RCOEF' + kind.upper()])}
|
|
92
|
+
self.system.json_dump(coeff, 'coeff' + kind + '.out', indent=4, cls=ShellEncoder)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class CombiPert(Combi):
|
|
96
|
+
"""Build the initial perturbations of the EPS initial conditions."""
|
|
97
|
+
|
|
98
|
+
_abstract = True
|
|
99
|
+
_footprint = dict(
|
|
100
|
+
attr = dict(
|
|
101
|
+
nbpert = dict(
|
|
102
|
+
type = int,
|
|
103
|
+
),
|
|
104
|
+
)
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
def prepare(self, rh, opts):
|
|
108
|
+
"""Set some variables according to target definition."""
|
|
109
|
+
super().prepare(rh, opts)
|
|
110
|
+
|
|
111
|
+
# Tweak the namelists
|
|
112
|
+
for namsec in self.context.sequence.effective_inputs(role=re.compile('Namelist'),
|
|
113
|
+
kind='namelist'):
|
|
114
|
+
logger.info("Add the NBPERT coefficient to the NAMENS namelist entry")
|
|
115
|
+
namsec.rh.contents['NAMENS']['NBPERT'] = self.nbpert
|
|
116
|
+
namsec.rh.save()
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
#: Definition of a named tuple that holds informations on SV for a given zone
|
|
120
|
+
_SvInfoTuple = collections.namedtuple('SvInfoTuple', ['available', 'expected'])
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class CombiSV(CombiPert):
|
|
124
|
+
"""Combine the SV to create perturbations by gaussian sampling."""
|
|
125
|
+
|
|
126
|
+
_abstract = True
|
|
127
|
+
_footprint = dict(
|
|
128
|
+
attr = dict(
|
|
129
|
+
info_fname = dict(
|
|
130
|
+
default = 'singular_vectors_info.json',
|
|
131
|
+
optional = True,
|
|
132
|
+
),
|
|
133
|
+
)
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
def prepare(self, rh, opts):
|
|
137
|
+
"""Set some variables according to target definition."""
|
|
138
|
+
super().prepare(rh, opts)
|
|
139
|
+
|
|
140
|
+
# Check the number of singular vectors and link them in succession
|
|
141
|
+
nbVectTmp = collections.OrderedDict()
|
|
142
|
+
totalVects = 0
|
|
143
|
+
svec_sections = self.context.sequence.filtered_inputs(role='SingularVectors', kind='svector')
|
|
144
|
+
for svecsec in svec_sections:
|
|
145
|
+
c_match = re.match(r'^([^+,.]+)[+,.][^+,.]+[+,.][^+,.]+(.*)$',
|
|
146
|
+
svecsec.rh.container.localpath())
|
|
147
|
+
if c_match is None:
|
|
148
|
+
logger.critical("The SV name is not formated correctly: %s",
|
|
149
|
+
svecsec.rh.container.actualpath())
|
|
150
|
+
(radical, suffix) = c_match.groups()
|
|
151
|
+
zone = svecsec.rh.resource.zone
|
|
152
|
+
nbVectTmp.setdefault(zone, [0, 0])
|
|
153
|
+
nbVectTmp[zone][1] += 1 # Expected
|
|
154
|
+
if svecsec.stage == 'get':
|
|
155
|
+
totalVects += 1
|
|
156
|
+
nbVectTmp[zone][0] += 1 # Available
|
|
157
|
+
self.system.softlink(svecsec.rh.container.localpath(),
|
|
158
|
+
radical + '{:03d}'.format(totalVects) + suffix)
|
|
159
|
+
# Convert the temporary dictionary to a dictionary of tuples
|
|
160
|
+
nbVect = collections.OrderedDict()
|
|
161
|
+
for k, v in nbVectTmp.items():
|
|
162
|
+
nbVect[k] = _SvInfoTuple(*v)
|
|
163
|
+
logger.info("Number of vectors :\n" +
|
|
164
|
+
'\n'.join(['- {0:8s}: {1.available:3d} ({1.expected:3d} expected).'.format(z, n)
|
|
165
|
+
for z, n in nbVect.items()]))
|
|
166
|
+
# Writing the singular vectors per areas in a json file
|
|
167
|
+
self.system.json_dump(nbVect, self.info_fname)
|
|
168
|
+
|
|
169
|
+
# Tweak the namelists
|
|
170
|
+
namsecs = self.context.sequence.effective_inputs(role=re.compile('Namelist'), kind='namelist')
|
|
171
|
+
for namsec in namsecs:
|
|
172
|
+
namsec.rh.contents['NAMMOD']['LVS'] = True
|
|
173
|
+
namsec.rh.contents['NAMMOD']['LANAP'] = False
|
|
174
|
+
namsec.rh.contents['NAMMOD']['LBRED'] = False
|
|
175
|
+
logger.info("Added to NVSZONE namelist entry")
|
|
176
|
+
namsec.rh.contents['NAMOPTI']['NVSZONE'] = [
|
|
177
|
+
v.available for v in nbVect.values() if v.available
|
|
178
|
+
] # Zones with 0 vectors are discarded
|
|
179
|
+
|
|
180
|
+
nbVectNam = namsec.rh.contents['NAMENS']['NBVECT']
|
|
181
|
+
if int(nbVectNam) != totalVects:
|
|
182
|
+
logger.warning("%s singular vectors expected but only %d accounted for.",
|
|
183
|
+
nbVectNam, totalVects)
|
|
184
|
+
logger.info("Update the total number of vectors in the NBVECT namelist entry")
|
|
185
|
+
namsec.rh.contents['NAMENS']['NBVECT'] = totalVects
|
|
186
|
+
|
|
187
|
+
actualZones = [k for k, v in nbVect.items() if v.available] # Zones with 0 vectors are discarded
|
|
188
|
+
nbzone = len(actualZones)
|
|
189
|
+
namsec.rh.contents['NAMOPTI']['NBZONE'] = nbzone
|
|
190
|
+
namsec.rh.contents['NAMOPTI']['CNOMZONE'] = actualZones
|
|
191
|
+
nbrc = len(namsec.rh.contents['NAMOPTI'].RC)
|
|
192
|
+
if nbrc != nbzone:
|
|
193
|
+
logger.critical("%d zones but NAMOPTI/RC has length %d" % (nbzone, nbrc))
|
|
194
|
+
nbrl = len(namsec.rh.contents['NAMOPTI'].RL)
|
|
195
|
+
if nbrl != nbzone:
|
|
196
|
+
logger.critical("%d zones but NAMOPTI/RL has length %d" % (nbzone, nbrl))
|
|
197
|
+
|
|
198
|
+
self._addNmod(namsec.rh, "combination of the SV")
|
|
199
|
+
namsec.rh.save()
|
|
200
|
+
|
|
201
|
+
# Copy the analysis to give all the perturbations a basis
|
|
202
|
+
self._analysis_cp(self.nbpert, 'perturbations')
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
class CombiSVunit(CombiSV):
|
|
206
|
+
"""Combine the unit SV to create the raw perturbations by gaussian sampling."""
|
|
207
|
+
|
|
208
|
+
_footprint = dict(
|
|
209
|
+
attr = dict(
|
|
210
|
+
kind = dict(
|
|
211
|
+
values = ['sv2unitpert', 'init', 'combi_init', ],
|
|
212
|
+
remap = dict(combi_init='init',),
|
|
213
|
+
),
|
|
214
|
+
)
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
@property
|
|
218
|
+
def nmod(self):
|
|
219
|
+
return 1
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class CombiSVnorm(CombiSV):
|
|
223
|
+
"""
|
|
224
|
+
Compute a norm consistent with the background error
|
|
225
|
+
and combine the normed SV to create the SV perturbations.
|
|
226
|
+
"""
|
|
227
|
+
|
|
228
|
+
_footprint = dict(
|
|
229
|
+
attr = dict(
|
|
230
|
+
kind = dict(
|
|
231
|
+
values = ['sv2normedpert', 'optim', 'combi_optim', ],
|
|
232
|
+
remap = dict(autoremap='first'),
|
|
233
|
+
),
|
|
234
|
+
)
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
def postfix(self, rh, opts):
|
|
238
|
+
"""Post processing cleaning."""
|
|
239
|
+
# Pick up the coeff in the namelist
|
|
240
|
+
self._coeff_picking('vs', 'SV')
|
|
241
|
+
super().postfix(rh, opts)
|
|
242
|
+
|
|
243
|
+
@property
|
|
244
|
+
def nmod(self):
|
|
245
|
+
return 2
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
class CombiIC(Combi):
|
|
249
|
+
"""Combine the SV and AE or breeding perturbations to create the initial conditions."""
|
|
250
|
+
|
|
251
|
+
_footprint = dict(
|
|
252
|
+
attr = dict(
|
|
253
|
+
kind = dict(
|
|
254
|
+
values = ['pert2ic', 'sscales', 'combi_sscales', ],
|
|
255
|
+
remap = dict(autoremap='first'),
|
|
256
|
+
),
|
|
257
|
+
nbic = dict(
|
|
258
|
+
alias = ('nbruns',),
|
|
259
|
+
type = int,
|
|
260
|
+
),
|
|
261
|
+
nbpert = dict(
|
|
262
|
+
type = int,
|
|
263
|
+
optional = True,
|
|
264
|
+
default = 0,
|
|
265
|
+
),
|
|
266
|
+
)
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
@property
|
|
270
|
+
def nmod(self):
|
|
271
|
+
return 3
|
|
272
|
+
|
|
273
|
+
def prepare(self, rh, opts):
|
|
274
|
+
"""Set some variables according to target definition."""
|
|
275
|
+
super().prepare(rh, opts)
|
|
276
|
+
|
|
277
|
+
# Tweak the namelist
|
|
278
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist')
|
|
279
|
+
nammod = namsec[0].rh.contents['NAMMOD']
|
|
280
|
+
|
|
281
|
+
# The footprint's value is always preferred to the calculated one
|
|
282
|
+
nbPert = self.nbpert
|
|
283
|
+
|
|
284
|
+
# Dealing with singular vectors
|
|
285
|
+
sv_sections = self.context.sequence.effective_inputs(role='CoeffSV')
|
|
286
|
+
nammod['LVS'] = bool(sv_sections)
|
|
287
|
+
if sv_sections:
|
|
288
|
+
logger.info("Add the SV coefficient to the NAMCOEFVS namelist entry.")
|
|
289
|
+
namcoefvs = namsec[0].rh.contents.newblock('NAMCOEFVS')
|
|
290
|
+
namcoefvs['RCOEFVS'] = sv_sections[0].rh.contents['rcoefvs']
|
|
291
|
+
# The mean value may be present among the SV inputs: remove it
|
|
292
|
+
svsecs = [sec for sec in self.context.sequence.effective_inputs(role='SVPerturbedState') or
|
|
293
|
+
[sec for sec in self.context.sequence.effective_inputs(role='PerturbedState')
|
|
294
|
+
if 'ICHR' in sec.rh.container.filename] if sec.rh.resource.number]
|
|
295
|
+
nbPert = nbPert or len(svsecs)
|
|
296
|
+
|
|
297
|
+
# Dealing with breeding method's inputs
|
|
298
|
+
bd_sections = self.context.sequence.effective_inputs(role='CoeffBreeding')
|
|
299
|
+
nammod['LBRED'] = bool(bd_sections)
|
|
300
|
+
if bd_sections:
|
|
301
|
+
logger.info("Add the breeding coefficient to the NAMCOEFBM namelist entry.")
|
|
302
|
+
namcoefbm = namsec[0].rh.contents.newblock('NAMCOEFBM')
|
|
303
|
+
namcoefbm['RCOEFBM'] = bd_sections[0].rh.contents['rcoefbm']
|
|
304
|
+
nbBd = len(
|
|
305
|
+
self.context.sequence.effective_inputs(role='BreedingPerturbedState')
|
|
306
|
+
or [
|
|
307
|
+
sec
|
|
308
|
+
for sec in self.context.sequence.effective_inputs(role='PerturbedState')
|
|
309
|
+
if 'BMHR' in sec.rh.container.filename
|
|
310
|
+
]
|
|
311
|
+
)
|
|
312
|
+
# symmetric perturbations except if analysis: one more file
|
|
313
|
+
# or zero if one control ic (hypothesis: odd nbic)
|
|
314
|
+
nbPert = nbPert or (nbBd - 1 if nbBd == self.nbic + 1 or
|
|
315
|
+
(nbBd == self.nbic and self.nbic % 2 != 0)
|
|
316
|
+
else self.nbic // 2)
|
|
317
|
+
|
|
318
|
+
# Dealing with initial conditions from the assimilation ensemble
|
|
319
|
+
# the mean value may be present among the AE inputs: remove it
|
|
320
|
+
aesecs = [sec for sec in self.context.sequence.effective_inputs(
|
|
321
|
+
role=('AEPerturbedState', 'ModelState')) if sec.rh.resource.number]
|
|
322
|
+
nammod['LANAP'] = bool(aesecs)
|
|
323
|
+
nbAe = len(aesecs)
|
|
324
|
+
nbPert = nbPert or nbAe
|
|
325
|
+
# If less AE members (but nor too less) than ic to build
|
|
326
|
+
if nbAe < nbPert <= 2 * nbAe:
|
|
327
|
+
logger.info("%d AE perturbations needed, %d AE members available: the first ones are duplicated.",
|
|
328
|
+
nbPert, nbAe)
|
|
329
|
+
prefix = aesecs[0].rh.container.filename.split('_')[0]
|
|
330
|
+
for num in range(nbAe, nbPert):
|
|
331
|
+
self.system.softlink(aesecs[num - nbAe].rh.container.filename,
|
|
332
|
+
prefix + '_{:03d}'.format(num + 1))
|
|
333
|
+
|
|
334
|
+
logger.info("NAMMOD namelist summary: LANAP=%s, LVS=%s, LBRED=%s.",
|
|
335
|
+
*[nammod[k] for k in ('LANAP', 'LVS', 'LBRED')])
|
|
336
|
+
logger.info("Add the NBPERT=%d coefficient to the NAMENS namelist entry.", nbPert)
|
|
337
|
+
namsec[0].rh.contents['NAMENS']['NBPERT'] = nbPert
|
|
338
|
+
|
|
339
|
+
# symmectric perturbations ?
|
|
340
|
+
if nbPert < self.nbic - 1:
|
|
341
|
+
namsec[0].rh.contents['NAMENS']['LMIRROR'] = True
|
|
342
|
+
logger.info("Add LMIRROR=.TRUE. to the NAMENS namelist entry.")
|
|
343
|
+
elif nbPert != 1: # 1 pert, 2 ic is possible without mirror adding the control
|
|
344
|
+
namsec[0].rh.contents['NAMENS']['LMIRROR'] = False
|
|
345
|
+
logger.info("Add LMIRROR=.FALSE. to the NAMENS namelist entry.")
|
|
346
|
+
|
|
347
|
+
self._addNmod(namsec[0].rh, "final combination of the perturbations")
|
|
348
|
+
namsec[0].rh.save()
|
|
349
|
+
|
|
350
|
+
# Copy the analysis to give all the members a basis
|
|
351
|
+
self._analysis_cp(self.nbic - 1, 'perturbed states')
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
class CombiBreeding(CombiPert):
|
|
355
|
+
"""
|
|
356
|
+
Compute a norm consistent with the background error
|
|
357
|
+
and combine the normed SV to create the SV perturbations.
|
|
358
|
+
"""
|
|
359
|
+
|
|
360
|
+
_footprint = dict(
|
|
361
|
+
attr = dict(
|
|
362
|
+
kind = dict(
|
|
363
|
+
values = ['fc2bredpert', 'breeding', 'combi_breeding', ],
|
|
364
|
+
remap = dict(autoremap='first'),
|
|
365
|
+
),
|
|
366
|
+
)
|
|
367
|
+
)
|
|
368
|
+
|
|
369
|
+
@property
|
|
370
|
+
def nmod(self):
|
|
371
|
+
return 6
|
|
372
|
+
|
|
373
|
+
def prepare(self, rh, opts):
|
|
374
|
+
"""Set some variables according to target definition."""
|
|
375
|
+
super().prepare(rh, opts)
|
|
376
|
+
|
|
377
|
+
# Consistent naming with the Fortran execution
|
|
378
|
+
hst_sections = self.context.sequence.effective_inputs(kind=('pert', 'historic'))
|
|
379
|
+
for num, hst in enumerate(hst_sections):
|
|
380
|
+
self.system.softlink(hst.rh.container.localpath(),
|
|
381
|
+
re.sub(r'^(.*?)\d+$', r'\1', hst.rh.container.localpath()) +
|
|
382
|
+
'{:03d}.grb'.format(num + 1))
|
|
383
|
+
logger.info("Rename the %d grib files consecutively.", num)
|
|
384
|
+
|
|
385
|
+
# Tweak the namelist
|
|
386
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist')
|
|
387
|
+
namsec[0].rh.contents['NAMMOD']['LBRED'] = True
|
|
388
|
+
namsec[0].rh.contents['NAMMOD']['LANAP'] = False
|
|
389
|
+
namsec[0].rh.contents['NAMMOD']['LVS'] = False
|
|
390
|
+
self._addNmod(namsec[0].rh, "compute the coefficient of the bred modes")
|
|
391
|
+
namsec[0].rh.save()
|
|
392
|
+
|
|
393
|
+
def postfix(self, rh, opts):
|
|
394
|
+
"""Post processing cleaning."""
|
|
395
|
+
# Pick up the coeff in the namelist
|
|
396
|
+
self._coeff_picking('bm', 'breeding')
|
|
397
|
+
super().postfix(rh, opts)
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
class SurfCombiIC(BlindRun):
|
|
401
|
+
"""
|
|
402
|
+
Combine the deterministic surface with the perturbed surface
|
|
403
|
+
to create the initial surface conditions.
|
|
404
|
+
"""
|
|
405
|
+
|
|
406
|
+
_footprint = dict(
|
|
407
|
+
attr = dict(
|
|
408
|
+
kind = dict(
|
|
409
|
+
values = ['surf_pert2ic', 'surf2ic', ],
|
|
410
|
+
remap = dict(autoremap='first'),
|
|
411
|
+
),
|
|
412
|
+
member = dict(
|
|
413
|
+
type = int,
|
|
414
|
+
),
|
|
415
|
+
)
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
def prepare(self, rh, opts):
|
|
419
|
+
"""Set some variables according to target definition."""
|
|
420
|
+
super().prepare(rh, opts)
|
|
421
|
+
|
|
422
|
+
icsec = self.setlink(initrole=('SurfaceAnalysis', 'SurfaceInitialCondition'),
|
|
423
|
+
initkind='ic')
|
|
424
|
+
actualdate = icsec[0].rh.resource.date
|
|
425
|
+
seed = int(actualdate.ymdh) + (actualdate.hour + 1) * (self.member + 1)
|
|
426
|
+
|
|
427
|
+
# Tweak the namelist
|
|
428
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist')
|
|
429
|
+
logger.info("ISEED added to NAMSFC namelist entry: %d", seed)
|
|
430
|
+
namsec[0].rh.contents['NAMSFC']['ISEED'] = seed
|
|
431
|
+
namsec[0].rh.save()
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
class Clustering(BlindRun, EcGribDecoMixin):
|
|
435
|
+
"""Select by clustering a sample of members among the whole set."""
|
|
436
|
+
|
|
437
|
+
_footprint = dict(
|
|
438
|
+
attr = dict(
|
|
439
|
+
kind = dict(
|
|
440
|
+
values = ['clustering', 'clust', ],
|
|
441
|
+
remap = dict(autoremap='first'),
|
|
442
|
+
),
|
|
443
|
+
fileoutput = dict(
|
|
444
|
+
optional = True,
|
|
445
|
+
default = '_griblist',
|
|
446
|
+
),
|
|
447
|
+
nbclust = dict(
|
|
448
|
+
type = int,
|
|
449
|
+
),
|
|
450
|
+
nbmembers = dict(
|
|
451
|
+
type = int,
|
|
452
|
+
optional = True,
|
|
453
|
+
access = 'rwx',
|
|
454
|
+
),
|
|
455
|
+
gribfilter_tasks = dict(
|
|
456
|
+
type = int,
|
|
457
|
+
optional = True,
|
|
458
|
+
default = 8,
|
|
459
|
+
),
|
|
460
|
+
)
|
|
461
|
+
)
|
|
462
|
+
|
|
463
|
+
def prepare(self, rh, opts):
|
|
464
|
+
"""Set some variables according to target definition."""
|
|
465
|
+
super().prepare(rh, opts)
|
|
466
|
+
|
|
467
|
+
grib_sections = self.context.sequence.effective_inputs(role='ModelState',
|
|
468
|
+
kind='gridpoint')
|
|
469
|
+
avail_json = self.context.sequence.effective_inputs(role='AvailableMembers',
|
|
470
|
+
kind='mbpopulation')
|
|
471
|
+
|
|
472
|
+
# If no population file is here, just do a sort on the file list,
|
|
473
|
+
# otherwise use the population list
|
|
474
|
+
if avail_json:
|
|
475
|
+
population = avail_json[0].rh.contents.data['population']
|
|
476
|
+
self.nbmembers = len(population)
|
|
477
|
+
file_list = list()
|
|
478
|
+
terms_set = set()
|
|
479
|
+
for elt in population:
|
|
480
|
+
sublist_ids = list()
|
|
481
|
+
for (i, grib) in enumerate(grib_sections):
|
|
482
|
+
# If the grib file matches, let's go
|
|
483
|
+
if all([grib.rh.wide_key_lookup(key, exports=True) == value
|
|
484
|
+
for (key, value) in elt.items()]):
|
|
485
|
+
sublist_ids.append(i)
|
|
486
|
+
# Stack the gribs in file_list
|
|
487
|
+
file_list.extend(sorted([str(grib_sections[i].rh.container.localpath())
|
|
488
|
+
for i in sublist_ids]))
|
|
489
|
+
terms_set.update([grib_sections[i].rh.resource.term for i in sublist_ids])
|
|
490
|
+
for i in reversed(sublist_ids):
|
|
491
|
+
del grib_sections[i]
|
|
492
|
+
else:
|
|
493
|
+
file_list = sorted([str(grib.rh.container.localpath())
|
|
494
|
+
for grib in grib_sections])
|
|
495
|
+
terms_set = {grib.rh.resource.term for grib in grib_sections}
|
|
496
|
+
|
|
497
|
+
# determine what terms are available to the clustering algorithm
|
|
498
|
+
terms = sorted(terms_set - {Time(0)})
|
|
499
|
+
delta = {last - first for first, last in pairwise(terms)}
|
|
500
|
+
if len(delta) == 1:
|
|
501
|
+
cluststep = delta.pop().hour
|
|
502
|
+
else:
|
|
503
|
+
cluststep = -999
|
|
504
|
+
logger.error('Terms are not evenly spaced. What should we do ?')
|
|
505
|
+
logger.error('Terms=' + str(terms) + 'delta=' + str(delta))
|
|
506
|
+
logger.error('Continuing with little hope and cluststep = %d', cluststep)
|
|
507
|
+
clustdeb = terms[0].hour
|
|
508
|
+
clustfin = terms[-1].hour
|
|
509
|
+
logger.info('clustering deb=%d fin=%d step=%d', clustdeb, clustfin, cluststep)
|
|
510
|
+
|
|
511
|
+
# Deal with xGribs
|
|
512
|
+
file_list_cat = [f + '.concatenated' for f in file_list]
|
|
513
|
+
parallel_grib_filter(self.context, file_list, file_list_cat,
|
|
514
|
+
cat=True, nthreads=self.gribfilter_tasks)
|
|
515
|
+
|
|
516
|
+
if self.nbmembers is None or self.nbmembers > self.nbclust:
|
|
517
|
+
|
|
518
|
+
# Tweak the namelist
|
|
519
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist')
|
|
520
|
+
logger.info("NBRCLUST added to NAMCLUST namelist entry: %d", self.nbclust)
|
|
521
|
+
namsec[0].rh.contents['NAMCLUST']['NBRCLUST'] = self.nbclust
|
|
522
|
+
if self.nbmembers is not None:
|
|
523
|
+
logger.info("NBRMB added to NAMCLUST namelist entry: %d", self.nbmembers)
|
|
524
|
+
namsec[0].rh.contents['NAMCLUST']['NBRMB'] = self.nbmembers
|
|
525
|
+
logger.info('Setting namelist macros ECHDEB=%d ECHFIN=%d ECHSTEP=%d',
|
|
526
|
+
clustdeb, clustfin, cluststep)
|
|
527
|
+
namsec[0].rh.contents.setmacro('ECHDEB', clustdeb)
|
|
528
|
+
namsec[0].rh.contents.setmacro('ECHFIN', clustfin)
|
|
529
|
+
namsec[0].rh.contents.setmacro('ECHSTEP', cluststep)
|
|
530
|
+
namsec[0].rh.save()
|
|
531
|
+
namsec[0].rh.container.cat()
|
|
532
|
+
|
|
533
|
+
with open(self.fileoutput, 'w') as optFile:
|
|
534
|
+
optFile.write('\n'.join(file_list_cat))
|
|
535
|
+
|
|
536
|
+
def execute(self, rh, opts):
|
|
537
|
+
# If the number of members is big enough -> normal processing
|
|
538
|
+
if self.nbmembers is None or self.nbmembers > self.nbclust:
|
|
539
|
+
logger.info("Normal clustering run (%d members, %d clusters)",
|
|
540
|
+
self.nbmembers, self.nbclust)
|
|
541
|
+
super().execute(rh, opts)
|
|
542
|
+
# if not, generate face outputs
|
|
543
|
+
else:
|
|
544
|
+
logger.info("Generating fake outputs with %d members", self.nbmembers)
|
|
545
|
+
with open('ASCII_CLUST', 'w') as fdcl:
|
|
546
|
+
fdcl.write("\n".join(['{0:3d} {1:3d} {0:3d}'.format(i, 1)
|
|
547
|
+
for i in range(1, self.nbmembers + 1)]))
|
|
548
|
+
with open('ASCII_RMCLUST', 'w') as fdrm:
|
|
549
|
+
fdrm.write("\n".join([str(i) for i in range(1, self.nbmembers + 1)]))
|
|
550
|
+
with open('ASCII_POPCLUST', 'w') as fdpop:
|
|
551
|
+
fdpop.write("\n".join(['1'] * self.nbmembers))
|
|
552
|
+
|
|
553
|
+
def postfix(self, rh, opts):
|
|
554
|
+
"""Create a JSON with all the clustering informations."""
|
|
555
|
+
avail_json = self.context.sequence.effective_inputs(role='AvailableMembers',
|
|
556
|
+
kind='mbpopulation')
|
|
557
|
+
# If no population file is here, does nothing
|
|
558
|
+
if avail_json:
|
|
559
|
+
logger.info("Creating a JSON output...")
|
|
560
|
+
# Read the clustering information
|
|
561
|
+
if self.system.path.exists('ASCII_CLUST'):
|
|
562
|
+
# New format for clustering outputs
|
|
563
|
+
with open('ASCII_CLUST') as fdcl:
|
|
564
|
+
cluster_members = list()
|
|
565
|
+
cluster_sizes = list()
|
|
566
|
+
for l in [l.split() for l in fdcl.readlines()]:
|
|
567
|
+
cluster_members.append(int(l[0]))
|
|
568
|
+
cluster_sizes.append(int(l[1]))
|
|
569
|
+
else:
|
|
570
|
+
with open('ASCII_RMCLUST') as fdrm:
|
|
571
|
+
cluster_members = [int(m) for m in fdrm.readlines()]
|
|
572
|
+
with open('ASCII_POPCLUST') as fdpop:
|
|
573
|
+
cluster_sizes = [int(s) for s in fdpop.readlines()]
|
|
574
|
+
# Update the population JSON
|
|
575
|
+
mycontent = copy.deepcopy(avail_json[0].rh.contents)
|
|
576
|
+
mycontent.data['resource_kind'] = 'mbsample'
|
|
577
|
+
mycontent.data['drawing'] = list()
|
|
578
|
+
for member_no, cluster_size in zip(cluster_members, cluster_sizes):
|
|
579
|
+
mycontent.data['drawing'].append(copy.copy(mycontent.data['population'][member_no - 1]))
|
|
580
|
+
mycontent.data['drawing'][-1]['cluster_size'] = cluster_size
|
|
581
|
+
# Create a clustering output file
|
|
582
|
+
new_container = footprints.proxy.container(filename='clustering_output.json',
|
|
583
|
+
actualfmt='json')
|
|
584
|
+
mycontent.rewrite(new_container)
|
|
585
|
+
|
|
586
|
+
super().postfix(rh, opts)
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
class Addpearp(BlindRun):
|
|
590
|
+
"""Add the selected PEARP perturbations to the deterministic AROME initial conditions."""
|
|
591
|
+
|
|
592
|
+
_footprint = dict(
|
|
593
|
+
attr = dict(
|
|
594
|
+
kind = dict(
|
|
595
|
+
values = ['addpearp', ],
|
|
596
|
+
remap = dict(autoremap='first'),
|
|
597
|
+
),
|
|
598
|
+
nbpert = dict(
|
|
599
|
+
type = int,
|
|
600
|
+
),
|
|
601
|
+
)
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
def prepare(self, rh, opts):
|
|
605
|
+
"""Set some variables according to target definition."""
|
|
606
|
+
super().prepare(rh, opts)
|
|
607
|
+
|
|
608
|
+
# Tweak the namelist
|
|
609
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist')
|
|
610
|
+
logger.info("NBE added to NAMIC namelist entry: %d", self.nbpert)
|
|
611
|
+
namsec[0].rh.contents['NAMIC']['NBPERT'] = self.nbpert
|
|
612
|
+
namsec[0].rh.save()
|
|
613
|
+
namsec[0].rh.container.cat()
|