vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,609 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AlgoComponents dedicated to the coupling between NWP models.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
import footprints
|
|
7
|
+
|
|
8
|
+
from bronx.compat.functools import cached_property
|
|
9
|
+
from bronx.fancies import loggers
|
|
10
|
+
from bronx.stdtypes import date
|
|
11
|
+
|
|
12
|
+
from .ifsroot import IFSParallel
|
|
13
|
+
from ..tools.drhook import DrHookDecoMixin
|
|
14
|
+
from vortex.algo.components import AlgoComponentError, BlindRun, Parallel
|
|
15
|
+
from vortex.algo.components import AlgoComponentDecoMixin, algo_component_deco_mixin_autodoc
|
|
16
|
+
from vortex.layout.dataflow import intent
|
|
17
|
+
from vortex.tools.grib import EcGribDecoMixin
|
|
18
|
+
|
|
19
|
+
from .forecasts import FullPos
|
|
20
|
+
|
|
21
|
+
#: No automatic export
|
|
22
|
+
__all__ = []
|
|
23
|
+
|
|
24
|
+
logger = loggers.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
coupling_basedate_fp = footprints.Footprint(
|
|
28
|
+
attr=dict(
|
|
29
|
+
basedate=dict(
|
|
30
|
+
info="The run date of the coupling generating process",
|
|
31
|
+
type=date.Date,
|
|
32
|
+
optional=True
|
|
33
|
+
)
|
|
34
|
+
)
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@algo_component_deco_mixin_autodoc
|
|
39
|
+
class CouplingBaseDateNamMixin(AlgoComponentDecoMixin):
|
|
40
|
+
"""Add a basedate attribute and make namelist substitution."""
|
|
41
|
+
|
|
42
|
+
_MIXIN_EXTRA_FOOTPRINTS = (coupling_basedate_fp, )
|
|
43
|
+
|
|
44
|
+
def _prepare_basedate_hook(self, rh, opts):
|
|
45
|
+
"""Update the namelist with date information."""
|
|
46
|
+
|
|
47
|
+
def set_nam_macro(namrh, macro, value):
|
|
48
|
+
namrh.contents.setmacro(macro, value)
|
|
49
|
+
logger.info('Setup macro %s=%s in %s', macro, str(value),
|
|
50
|
+
namrh.container.actualpath())
|
|
51
|
+
|
|
52
|
+
for namsec in self.context.sequence.effective_inputs(kind=('namelist',)):
|
|
53
|
+
if self.basedate is not None:
|
|
54
|
+
set_nam_macro(namsec.rh, 'YYYY', int(self.basedate.year))
|
|
55
|
+
set_nam_macro(namsec.rh, 'MM', int(self.basedate.month))
|
|
56
|
+
set_nam_macro(namsec.rh, 'DD', int(self.basedate.day))
|
|
57
|
+
if namsec.rh.contents.dumps_needs_update:
|
|
58
|
+
namsec.rh.save()
|
|
59
|
+
|
|
60
|
+
_MIXIN_PREPARE_HOOKS = (_prepare_basedate_hook, )
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class Coupling(FullPos):
|
|
64
|
+
"""Coupling for IFS-like LAM Models.
|
|
65
|
+
|
|
66
|
+
OBSOLETE a/c cy46 (use the 903 configuration / fullpos server instead).
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
_footprint = [
|
|
70
|
+
coupling_basedate_fp,
|
|
71
|
+
dict(
|
|
72
|
+
info = "Create coupling files for a Limited Area Model.",
|
|
73
|
+
attr = dict(
|
|
74
|
+
kind = dict(
|
|
75
|
+
values = ['coupling'],
|
|
76
|
+
),
|
|
77
|
+
)
|
|
78
|
+
)
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
@property
|
|
82
|
+
def realkind(self):
|
|
83
|
+
return 'coupling'
|
|
84
|
+
|
|
85
|
+
def prepare(self, rh, opts):
|
|
86
|
+
"""Default pre-link for namelist file and domain change."""
|
|
87
|
+
super().prepare(rh, opts)
|
|
88
|
+
namsec = self.setlink(initrole='Namelist', initkind='namelist', initname='fort.4')
|
|
89
|
+
for nam in [x.rh for x in namsec if 'NAMFPC' in x.rh.contents]:
|
|
90
|
+
logger.info('Substitute "AREA" to CFPDOM namelist entry')
|
|
91
|
+
nam.contents['NAMFPC']['CFPDOM(1)'] = 'AREA'
|
|
92
|
+
nam.save()
|
|
93
|
+
|
|
94
|
+
def execute(self, rh, opts):
|
|
95
|
+
"""Loop on the various initial conditions provided."""
|
|
96
|
+
|
|
97
|
+
sh = self.system
|
|
98
|
+
|
|
99
|
+
cplsec = self.context.sequence.effective_inputs(
|
|
100
|
+
role=('InitialCondition', 'CouplingSource'),
|
|
101
|
+
kind=('historic', 'analysis')
|
|
102
|
+
)
|
|
103
|
+
cplsec.sort(key=lambda s: s.rh.resource.term)
|
|
104
|
+
ininc = self.naming_convention('ic', rh)
|
|
105
|
+
infile = ininc()
|
|
106
|
+
isMany = len(cplsec) > 1
|
|
107
|
+
outprefix = 'PF{:s}AREA'.format(self.xpname)
|
|
108
|
+
|
|
109
|
+
cplguess = self.context.sequence.effective_inputs(role='Guess')
|
|
110
|
+
cplguess.sort(key=lambda s: s.rh.resource.term)
|
|
111
|
+
guessing = bool(cplguess)
|
|
112
|
+
|
|
113
|
+
cplsurf = self.context.sequence.effective_inputs(role=('SurfaceInitialCondition',
|
|
114
|
+
'SurfaceCouplingSource'))
|
|
115
|
+
cplsurf.sort(key=lambda s: s.rh.resource.term)
|
|
116
|
+
surfacing = bool(cplsurf)
|
|
117
|
+
inisurfnc = self.naming_convention('ic', rh, model='surfex')
|
|
118
|
+
infilesurf = inisurfnc()
|
|
119
|
+
if surfacing:
|
|
120
|
+
# Link in the Surfex's PGD
|
|
121
|
+
sclimnc = self.naming_convention(kind='targetclim', rh=rh, model='surfex')
|
|
122
|
+
self.setlink(
|
|
123
|
+
initrole=('ClimPGD',),
|
|
124
|
+
initkind=('pgdfa', 'pgdlfi'),
|
|
125
|
+
initname=sclimnc(area='AREA')
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
for sec in cplsec:
|
|
129
|
+
r = sec.rh
|
|
130
|
+
sh.subtitle('Loop on {!s}'.format(r.resource))
|
|
131
|
+
|
|
132
|
+
# First attempt to set actual date as the one of the source model
|
|
133
|
+
actualdate = r.resource.date + r.resource.term
|
|
134
|
+
|
|
135
|
+
# Expect the coupling source to be there...
|
|
136
|
+
self.grab(sec, comment='coupling source')
|
|
137
|
+
|
|
138
|
+
# Set the actual init file
|
|
139
|
+
if sh.path.exists(infile):
|
|
140
|
+
if isMany:
|
|
141
|
+
logger.critical('Cannot process multiple Historic files if %s exists.', infile)
|
|
142
|
+
else:
|
|
143
|
+
sh.cp(r.container.localpath(), infile, fmt=r.container.actualfmt, intent=intent.IN)
|
|
144
|
+
|
|
145
|
+
# If the surface file is needed, set the actual initsurf file
|
|
146
|
+
if cplsurf:
|
|
147
|
+
# Expecting the coupling surface source to be there...
|
|
148
|
+
cplsurf_in = cplsurf.pop(0)
|
|
149
|
+
self.grab(cplsurf_in, comment='coupling surface source')
|
|
150
|
+
if sh.path.exists(infilesurf):
|
|
151
|
+
if isMany:
|
|
152
|
+
logger.critical('Cannot process multiple surface historic files if %s exists.',
|
|
153
|
+
infilesurf)
|
|
154
|
+
else:
|
|
155
|
+
sh.cp(cplsurf_in.rh.container.localpath(), infilesurf,
|
|
156
|
+
fmt=cplsurf_in.rh.container.actualfmt, intent=intent.IN)
|
|
157
|
+
elif surfacing:
|
|
158
|
+
logger.error('No more surface source to loop on for coupling')
|
|
159
|
+
|
|
160
|
+
# The output could be an input as well
|
|
161
|
+
if cplguess:
|
|
162
|
+
cplout = cplguess.pop(0)
|
|
163
|
+
cplpath = cplout.rh.container.localpath()
|
|
164
|
+
if sh.path.exists(cplpath):
|
|
165
|
+
actualdateguess = cplout.rh.resource.date + cplout.rh.resource.term
|
|
166
|
+
if (actualdate == actualdateguess):
|
|
167
|
+
logger.error('The guess date, %s, is different from the source date %s, !',
|
|
168
|
+
actualdateguess.reallynice(), actualdate.reallynice())
|
|
169
|
+
# Expect the coupling guess to be there...
|
|
170
|
+
self.grab(cplout, comment='coupling guess')
|
|
171
|
+
logger.info('Coupling with existing guess <%s>', cplpath)
|
|
172
|
+
inoutfile = outprefix + '+0000'
|
|
173
|
+
if cplpath != inoutfile:
|
|
174
|
+
sh.remove(inoutfile, fmt=cplout.rh.container.actualfmt)
|
|
175
|
+
sh.move(cplpath, inoutfile,
|
|
176
|
+
fmt=cplout.rh.container.actualfmt,
|
|
177
|
+
intent=intent.INOUT)
|
|
178
|
+
else:
|
|
179
|
+
logger.warning('Missing guess input for coupling <%s>', cplpath)
|
|
180
|
+
elif guessing:
|
|
181
|
+
logger.error('No more guess to loop on for coupling')
|
|
182
|
+
|
|
183
|
+
# Find out actual monthly climatological resource
|
|
184
|
+
actualmonth = date.Month(actualdate)
|
|
185
|
+
self.climfile_fixer(rh, convkind='modelclim', month=actualmonth,
|
|
186
|
+
inputrole=('GlobalClim', 'InitialClim'),
|
|
187
|
+
inputkind='clim_model')
|
|
188
|
+
self.climfile_fixer(rh, convkind='targetclim', month=actualmonth,
|
|
189
|
+
inputrole=('LocalClim', 'TargetClim'),
|
|
190
|
+
inputkind='clim_model', area='AREA')
|
|
191
|
+
|
|
192
|
+
# Standard execution
|
|
193
|
+
super().execute(rh, opts)
|
|
194
|
+
|
|
195
|
+
# Set a local appropriate file
|
|
196
|
+
posfile = [x for x in sh.glob(outprefix + '+*')
|
|
197
|
+
if re.match(outprefix + r'\+\d+(?:\:\d+)?(?:\.sfx)?$', x)]
|
|
198
|
+
if (len(posfile) > 1):
|
|
199
|
+
logger.critical('Many ' + outprefix + ' files, do not know how to adress that')
|
|
200
|
+
posfile = posfile[0]
|
|
201
|
+
if self.basedate is None:
|
|
202
|
+
actualterm = r.resource.term
|
|
203
|
+
else:
|
|
204
|
+
actualterm = (actualdate - self.basedate).time()
|
|
205
|
+
actualname = (re.sub(r'^.+?((?:_\d+)?)(?:\+[:\d]+)?$', r'CPLOUT\1+', r.container.localpath()) +
|
|
206
|
+
actualterm.fmthm)
|
|
207
|
+
if isMany:
|
|
208
|
+
sh.move(sh.path.realpath(posfile), actualname,
|
|
209
|
+
fmt=r.container.actualfmt)
|
|
210
|
+
if sh.path.exists(posfile):
|
|
211
|
+
sh.rm(posfile)
|
|
212
|
+
else:
|
|
213
|
+
# This is here because of legacy with .sfx files
|
|
214
|
+
sh.cp(sh.path.realpath(posfile), actualname,
|
|
215
|
+
fmt=r.container.actualfmt, intent=intent.IN)
|
|
216
|
+
|
|
217
|
+
# promises management
|
|
218
|
+
expected = [x for x in self.promises if x.rh.container.localpath() == actualname]
|
|
219
|
+
if expected:
|
|
220
|
+
for thispromise in expected:
|
|
221
|
+
thispromise.put(incache=True)
|
|
222
|
+
|
|
223
|
+
# The only one listing
|
|
224
|
+
if not self.server_run:
|
|
225
|
+
sh.cat('NODE.001_01', output='NODE.all')
|
|
226
|
+
|
|
227
|
+
# prepares the next execution
|
|
228
|
+
if isMany:
|
|
229
|
+
# Some cleaning
|
|
230
|
+
sh.rmall('PXFPOS*', fmt=r.container.actualfmt)
|
|
231
|
+
sh.remove(infile, fmt=r.container.actualfmt)
|
|
232
|
+
if cplsurf:
|
|
233
|
+
sh.remove(infilesurf, fmt=r.container.actualfmt)
|
|
234
|
+
if not self.server_run:
|
|
235
|
+
sh.rmall('ncf927', 'dirlst', 'NODE.[0123456789]*', 'std*')
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
class CouplingLAM(Coupling):
|
|
239
|
+
"""Coupling for LAM to LAM Models (useless beyond cy40).
|
|
240
|
+
|
|
241
|
+
OBSOLETE a/c cy40.
|
|
242
|
+
"""
|
|
243
|
+
|
|
244
|
+
_footprint = dict(
|
|
245
|
+
info = "Create coupling files for a Limited Area Model (useless beyond cy40).",
|
|
246
|
+
attr = dict(
|
|
247
|
+
kind = dict(
|
|
248
|
+
values = ['lamcoupling'],
|
|
249
|
+
),
|
|
250
|
+
)
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
def spawn_command_options(self):
|
|
254
|
+
"""Dictionary provided for command line factory."""
|
|
255
|
+
opts = super().spawn_command_options()
|
|
256
|
+
opts['model'] = 'aladin'
|
|
257
|
+
return opts
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
@algo_component_deco_mixin_autodoc
|
|
261
|
+
class PrepMixin(AlgoComponentDecoMixin):
|
|
262
|
+
"""Coupling/Interpolation of Surfex files."""
|
|
263
|
+
|
|
264
|
+
_MIXIN_EXTRA_FOOTPRINTS = (footprints.Footprint(
|
|
265
|
+
info="Coupling/Interpolation of Surfex files.",
|
|
266
|
+
attr=dict(
|
|
267
|
+
kind=dict(
|
|
268
|
+
values=['prep'],
|
|
269
|
+
),
|
|
270
|
+
underlyingformat=dict(
|
|
271
|
+
info="The format of input data (as expected by the PREP executable).",
|
|
272
|
+
values=['fa', 'lfi', 'netcdf'],
|
|
273
|
+
optional=True,
|
|
274
|
+
default='fa'
|
|
275
|
+
),
|
|
276
|
+
underlyingoutputformat=dict(
|
|
277
|
+
info=("The format of output data (as expected by the PREP executable)." +
|
|
278
|
+
"If omited, *underlyingformat* is used."),
|
|
279
|
+
values=['fa', 'lfi', 'netcdf', 'txt'],
|
|
280
|
+
optional=True,
|
|
281
|
+
),
|
|
282
|
+
outputformat=dict(
|
|
283
|
+
info=("The format of output data (as expected by the user)." +
|
|
284
|
+
"If omited, same as input data."),
|
|
285
|
+
values=['fa', 'lfi', 'netcdf', 'txt'],
|
|
286
|
+
optional=True,
|
|
287
|
+
),
|
|
288
|
+
)
|
|
289
|
+
), )
|
|
290
|
+
|
|
291
|
+
@cached_property
|
|
292
|
+
def _actual_u_output_format(self):
|
|
293
|
+
return (self.underlyingoutputformat
|
|
294
|
+
if self.underlyingoutputformat is not None else
|
|
295
|
+
self.underlyingformat)
|
|
296
|
+
|
|
297
|
+
def _actual_output_format(self, in_format):
|
|
298
|
+
return (self.outputformat if self.outputformat is not None
|
|
299
|
+
else in_format)
|
|
300
|
+
|
|
301
|
+
@staticmethod
|
|
302
|
+
def _sfx_fmt_remap(fmt):
|
|
303
|
+
return dict(netcdf='nc').get(fmt, fmt)
|
|
304
|
+
|
|
305
|
+
@cached_property
|
|
306
|
+
def _has_sfx_lfi(self):
|
|
307
|
+
addon_checked = ('sfx' in self.system.loaded_addons() and
|
|
308
|
+
'lfi' in self.system.loaded_addons())
|
|
309
|
+
if not addon_checked:
|
|
310
|
+
raise RuntimeError("The sfx addon is needed... please load it.")
|
|
311
|
+
return addon_checked
|
|
312
|
+
|
|
313
|
+
def _do_input_format_change(self, section, output_name, output_fmt):
|
|
314
|
+
(localpath, infmt) = (section.rh.container.localpath(),
|
|
315
|
+
section.rh.container.actualfmt)
|
|
316
|
+
self.system.subtitle("Processing inputs/climatologies")
|
|
317
|
+
if section.rh.container.actualfmt != output_fmt:
|
|
318
|
+
if infmt == 'fa' and output_fmt == 'lfi' and self._has_sfx_lfi:
|
|
319
|
+
if self.system.path.exists(output_name):
|
|
320
|
+
raise OSError("The file {!r} already exists.".format(output_name))
|
|
321
|
+
logger.info("Calling sfxtools' fa2lfi from %s to %s.", localpath, output_name)
|
|
322
|
+
self.system.sfx_fa2lfi(localpath, output_name)
|
|
323
|
+
else:
|
|
324
|
+
raise RuntimeError("Format conversion from {!r} to {!r} is not possible".format(
|
|
325
|
+
infmt, output_fmt))
|
|
326
|
+
else:
|
|
327
|
+
if not self.system.path.exists(output_name):
|
|
328
|
+
logger.info("Linking %s to %s", localpath, output_name)
|
|
329
|
+
self.system.cp(localpath, output_name, intent=intent.IN, fmt=infmt)
|
|
330
|
+
|
|
331
|
+
def _process_outputs(self, binrh, section, output_clim, output_name):
|
|
332
|
+
(radical, outfmt) = (self.system.path.splitext(section.rh.container.localpath())[0],
|
|
333
|
+
self._actual_output_format(section.rh.container.actualfmt))
|
|
334
|
+
finaloutput = '{:s}_interpolated.{:s}'.format(radical, outfmt)
|
|
335
|
+
finallisting = '{:s}_listing'.format(radical)
|
|
336
|
+
self.system.subtitle("Processing outputs")
|
|
337
|
+
if outfmt != self._actual_u_output_format:
|
|
338
|
+
# There is a need for a format change
|
|
339
|
+
if outfmt == 'fa' and self._actual_u_output_format == 'lfi' and self._has_sfx_lfi:
|
|
340
|
+
logger.info("Calling lfitools' faempty from %s to %s.", output_clim, finaloutput)
|
|
341
|
+
self.system.fa_empty(output_clim, finaloutput)
|
|
342
|
+
logger.info("Calling sfxtools' lfi2fa from %s to %s.", output_name, finaloutput)
|
|
343
|
+
self.system.sfx_lfi2fa(output_name, finaloutput)
|
|
344
|
+
finallfi = '{:s}_interpolated.{:s}'.format(radical, self._actual_u_output_format)
|
|
345
|
+
self.system.mv(output_name, finallfi)
|
|
346
|
+
else:
|
|
347
|
+
raise RuntimeError("Format conversion from {!r} to {!r} is not possible".format(
|
|
348
|
+
self._actual_u_output_format, outfmt))
|
|
349
|
+
else:
|
|
350
|
+
# No format change needed
|
|
351
|
+
logger.info("Moving %s to %s", output_name, finaloutput)
|
|
352
|
+
self.system.mv(output_name, finaloutput, fmt=outfmt)
|
|
353
|
+
# Also rename the listing :-)
|
|
354
|
+
if binrh.resource.cycle < 'cy48t1':
|
|
355
|
+
try:
|
|
356
|
+
self.system.mv('LISTING_PREP.txt', finallisting)
|
|
357
|
+
except OSError:
|
|
358
|
+
self.system.mv('LISTING_PREP0.txt', finallisting)
|
|
359
|
+
else:
|
|
360
|
+
self.system.mv('LISTING_PREP0.txt', finallisting)
|
|
361
|
+
return finaloutput
|
|
362
|
+
|
|
363
|
+
def _prepare_prep_hook(self, rh, opts):
|
|
364
|
+
"""Default pre-link for namelist file and domain change."""
|
|
365
|
+
# Convert the initial clim if needed...
|
|
366
|
+
iniclim = self.context.sequence.effective_inputs(role=('InitialClim',))
|
|
367
|
+
if not (len(iniclim) == 1):
|
|
368
|
+
raise AlgoComponentError("One Initial clim have to be provided")
|
|
369
|
+
self._do_input_format_change(iniclim[0],
|
|
370
|
+
'PGD1.' + self._sfx_fmt_remap(self.underlyingformat),
|
|
371
|
+
self.underlyingformat)
|
|
372
|
+
# Convert the target clim if needed...
|
|
373
|
+
targetclim = self.context.sequence.effective_inputs(role=('TargetClim',))
|
|
374
|
+
if not (len(targetclim) == 1):
|
|
375
|
+
raise AlgoComponentError("One Target clim have to be provided")
|
|
376
|
+
self._do_input_format_change(targetclim[0],
|
|
377
|
+
'PGD2.' + self._sfx_fmt_remap(self._actual_u_output_format),
|
|
378
|
+
self._actual_u_output_format)
|
|
379
|
+
|
|
380
|
+
_MIXIN_PREPARE_HOOKS = (_prepare_prep_hook, )
|
|
381
|
+
|
|
382
|
+
def _spawn_hook_prep_hook(self):
|
|
383
|
+
"""Dump the namelists."""
|
|
384
|
+
for namsec in self.context.sequence.effective_inputs(kind=('namelist', )):
|
|
385
|
+
self.system.subtitle("Here is the content of the {:s} namelist"
|
|
386
|
+
.format(namsec.rh.container.actualpath()))
|
|
387
|
+
namsec.rh.container.cat()
|
|
388
|
+
|
|
389
|
+
_MIXIN_SPAWN_HOOKS = (_spawn_hook_prep_hook, )
|
|
390
|
+
|
|
391
|
+
def _execute_prep_common(self, rh, opts):
|
|
392
|
+
"""Loop on the various initial conditions provided."""
|
|
393
|
+
sh = self.system
|
|
394
|
+
|
|
395
|
+
cplsec = self.context.sequence.effective_inputs(
|
|
396
|
+
role=('InitialCondition', 'CouplingSource'),
|
|
397
|
+
kind=('historic', 'analysis')
|
|
398
|
+
)
|
|
399
|
+
cplsec.sort(key=lambda s: s.rh.resource.term)
|
|
400
|
+
infile = 'PREP1.{:s}'.format(self._sfx_fmt_remap(self.underlyingformat))
|
|
401
|
+
outfile = 'PREP2.{:s}'.format(self._sfx_fmt_remap(self._actual_u_output_format))
|
|
402
|
+
targetclim = self.context.sequence.effective_inputs(role=('TargetClim',))
|
|
403
|
+
targetclim = targetclim[0].rh.container.localpath()
|
|
404
|
+
|
|
405
|
+
for sec in cplsec:
|
|
406
|
+
r = sec.rh
|
|
407
|
+
sh.header('Loop on {:s}'.format(r.container.localpath()))
|
|
408
|
+
|
|
409
|
+
# Expect the coupling source to be there...
|
|
410
|
+
self.grab(sec, comment='coupling source')
|
|
411
|
+
|
|
412
|
+
# Set the actual init file
|
|
413
|
+
if sh.path.exists(infile):
|
|
414
|
+
logger.critical('Cannot process input files if %s exists.', infile)
|
|
415
|
+
self._do_input_format_change(sec, infile, self.underlyingformat)
|
|
416
|
+
|
|
417
|
+
# Standard execution
|
|
418
|
+
super(self.mixin_execute_companion(), self).execute(rh, opts)
|
|
419
|
+
sh.subtitle("Listing after PREP")
|
|
420
|
+
sh.dir(output=False, fatal=False)
|
|
421
|
+
|
|
422
|
+
# Deal with outputs
|
|
423
|
+
actualname = self._process_outputs(rh, sec, targetclim, outfile)
|
|
424
|
+
|
|
425
|
+
# promises management
|
|
426
|
+
expected = [x for x in self.promises if x.rh.container.localpath() == actualname]
|
|
427
|
+
if expected:
|
|
428
|
+
for thispromise in expected:
|
|
429
|
+
thispromise.put(incache=True)
|
|
430
|
+
|
|
431
|
+
# Some cleaning
|
|
432
|
+
sh.rmall('*.des')
|
|
433
|
+
sh.rmall('PREP1.*')
|
|
434
|
+
|
|
435
|
+
_MIXIN_EXECUTE_OVERWRITE = _execute_prep_common
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
class Prep(BlindRun, PrepMixin, CouplingBaseDateNamMixin,
|
|
439
|
+
DrHookDecoMixin, EcGribDecoMixin):
|
|
440
|
+
"""Coupling/Interpolation of Surfex files (non-MPI version)."""
|
|
441
|
+
pass
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
class ParallelPrep(Parallel, PrepMixin, CouplingBaseDateNamMixin,
|
|
445
|
+
DrHookDecoMixin, EcGribDecoMixin):
|
|
446
|
+
"""Coupling/Interpolation of Surfex files (MPI version)."""
|
|
447
|
+
pass
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
class C901(IFSParallel):
|
|
451
|
+
"""Run of C901 configuration."""
|
|
452
|
+
|
|
453
|
+
_footprint = dict(
|
|
454
|
+
info = "Run C901 configuration",
|
|
455
|
+
attr = dict(
|
|
456
|
+
kind = dict(
|
|
457
|
+
values = ["c901", ]
|
|
458
|
+
),
|
|
459
|
+
clim = dict(
|
|
460
|
+
type = bool
|
|
461
|
+
),
|
|
462
|
+
xpname = dict(
|
|
463
|
+
default = 'a001'
|
|
464
|
+
)
|
|
465
|
+
)
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
SPECTRAL_FILE_SH = "ICMSH{prefix}INIT{suffix}"
|
|
469
|
+
GRIDPOINT_FILE_UA = "ICMUA{prefix}INIT{suffix}"
|
|
470
|
+
GRIDPOINT_FILE_GG = "ICMGG{prefix}INIT{suffix}"
|
|
471
|
+
OUTPUT_FILE_NAME = "CN90x{}INIT"
|
|
472
|
+
OUTPUT_LISTING_NAME = "NODE.001_01"
|
|
473
|
+
LIST_INPUT_FILES = [("SpectralFileSH", SPECTRAL_FILE_SH),
|
|
474
|
+
("GridpointFileUA", GRIDPOINT_FILE_UA),
|
|
475
|
+
("GridpointFileGG", GRIDPOINT_FILE_GG)]
|
|
476
|
+
LIST_CST_INPUT_FILES = [("ConstantSpectralFileSH", SPECTRAL_FILE_SH),
|
|
477
|
+
("ConstantGridpointFileUA", GRIDPOINT_FILE_UA),
|
|
478
|
+
("ConstantGridpointFileGG", GRIDPOINT_FILE_GG)]
|
|
479
|
+
|
|
480
|
+
@property
|
|
481
|
+
def realkind(self):
|
|
482
|
+
return "c901"
|
|
483
|
+
|
|
484
|
+
def sort_files_per_prefix(self, list_types, unique=False):
|
|
485
|
+
"""Function used to sort the files according to their prefix in a given type"""
|
|
486
|
+
result = dict()
|
|
487
|
+
for (file_role, file_template) in list_types:
|
|
488
|
+
result[file_role] = dict()
|
|
489
|
+
input_files = self.context.sequence.effective_inputs(
|
|
490
|
+
role=file_role
|
|
491
|
+
)
|
|
492
|
+
template = file_template.format(prefix=r"(?P<prefix>\S{4})", suffix=r"(?P<suffix>\S*)")
|
|
493
|
+
for file_s in input_files:
|
|
494
|
+
file_name = file_s.rh.container.filename
|
|
495
|
+
find_elements = re.search(template, file_name)
|
|
496
|
+
if find_elements is None:
|
|
497
|
+
logger.error("The name of the file %s do not follow the template %s.",
|
|
498
|
+
file_name, template)
|
|
499
|
+
raise ValueError("The name of the file do not follow the template.")
|
|
500
|
+
else:
|
|
501
|
+
if find_elements.group("prefix") not in result[file_role]:
|
|
502
|
+
result[file_role][find_elements.group("prefix")] = list()
|
|
503
|
+
else:
|
|
504
|
+
if unique:
|
|
505
|
+
logger.error("Only one file should be present for each type and each suffix.")
|
|
506
|
+
raise ValueError("Only one file should be present for each suffix.")
|
|
507
|
+
result[file_role][find_elements.group("prefix")].append(file_s)
|
|
508
|
+
if result[file_role]:
|
|
509
|
+
for file_prefix in result[file_role]:
|
|
510
|
+
result[file_role][file_prefix].sort(key=lambda s: s.rh.resource.date + s.rh.resource.term)
|
|
511
|
+
else:
|
|
512
|
+
del result[file_role]
|
|
513
|
+
return result
|
|
514
|
+
|
|
515
|
+
def execute(self, rh, opts):
|
|
516
|
+
"""Loop on the various files provided"""
|
|
517
|
+
|
|
518
|
+
sh = self.system
|
|
519
|
+
|
|
520
|
+
# Create the template for files to be removed at each validity date and for the outputname
|
|
521
|
+
deleted_spectral_file_SH = self.SPECTRAL_FILE_SH.format(prefix="*", suffix="")
|
|
522
|
+
deleted_gridpoint_file_UA = self.GRIDPOINT_FILE_UA.format(prefix="*", suffix="")
|
|
523
|
+
deleted_gridpoint_file_GG = self.GRIDPOINT_FILE_GG.format(prefix="*", suffix="")
|
|
524
|
+
output_name = self.OUTPUT_FILE_NAME.format(self.xpname.upper())
|
|
525
|
+
|
|
526
|
+
# Sort input files
|
|
527
|
+
sorted_cst_input_files = self.sort_files_per_prefix(self.LIST_CST_INPUT_FILES, unique=True)
|
|
528
|
+
sorted_input_files = self.sort_files_per_prefix(self.LIST_INPUT_FILES)
|
|
529
|
+
|
|
530
|
+
# Determine the validity present for each non constant input files,
|
|
531
|
+
# check that they are the same for all.
|
|
532
|
+
# Also create the list of the filenames that should be deleted
|
|
533
|
+
input_validity = list()
|
|
534
|
+
for file_role in sorted_input_files:
|
|
535
|
+
for file_prefix in sorted_input_files[file_role]:
|
|
536
|
+
input_validity.append([s.rh.resource.date + s.rh.resource.term
|
|
537
|
+
for s in sorted_input_files[file_role][file_prefix]])
|
|
538
|
+
test_wrong_input_validity = True
|
|
539
|
+
for i in range(1, len(input_validity)):
|
|
540
|
+
test_wrong_input_validity = test_wrong_input_validity and (input_validity[0] == input_validity[i])
|
|
541
|
+
self.algoassert(test_wrong_input_validity,
|
|
542
|
+
"The files of each type must have the same validity dates.")
|
|
543
|
+
|
|
544
|
+
# Modify namelist
|
|
545
|
+
input_namelist = self.context.sequence.effective_inputs(
|
|
546
|
+
role="Namelist",
|
|
547
|
+
kind="namelist"
|
|
548
|
+
)
|
|
549
|
+
for namelist in input_namelist:
|
|
550
|
+
namcontents = namelist.rh.contents
|
|
551
|
+
self._set_nam_macro(namcontents, namelist.rh.container.actualpath(),
|
|
552
|
+
'LLCLIM', self.clim)
|
|
553
|
+
if namcontents.dumps_needs_update:
|
|
554
|
+
namcontents.rewrite(namelist.rh.container)
|
|
555
|
+
|
|
556
|
+
for current_validity in input_validity[0]:
|
|
557
|
+
# Deal with constant input files (gridpoint and spectral)
|
|
558
|
+
for (file_role, file_template) in self.LIST_CST_INPUT_FILES:
|
|
559
|
+
if file_role in sorted_cst_input_files:
|
|
560
|
+
for file_prefix in sorted_cst_input_files[file_role]:
|
|
561
|
+
file_name = file_template.format(prefix=file_prefix, suffix="")
|
|
562
|
+
current_file_input = sorted_cst_input_files[file_role][file_prefix][0]
|
|
563
|
+
self.algoassert(not sh.path.exists(file_name),
|
|
564
|
+
"The file {} already exists. It should not.".format(file_name))
|
|
565
|
+
sh.cp(current_file_input.rh.container.iotarget(), file_name, intent="in")
|
|
566
|
+
|
|
567
|
+
# Deal with other input files (gridpoint and spectral)
|
|
568
|
+
for (file_role, file_template) in self.LIST_INPUT_FILES:
|
|
569
|
+
if file_role in sorted_input_files:
|
|
570
|
+
for file_prefix in sorted_input_files[file_role]:
|
|
571
|
+
file_name = file_template.format(prefix=file_prefix, suffix="")
|
|
572
|
+
current_file_input = sorted_input_files[file_role][file_prefix].pop()
|
|
573
|
+
self.algoassert(not sh.path.exists(file_name),
|
|
574
|
+
"The file {} already exists. It should not.".format(file_name))
|
|
575
|
+
sh.cp(current_file_input.rh.container.iotarget(), file_name, intent="in")
|
|
576
|
+
|
|
577
|
+
if self.clim:
|
|
578
|
+
# Find the right climatology file
|
|
579
|
+
current_month = date.Month(current_validity)
|
|
580
|
+
self.climfile_fixer(rh, convkind='modelclim', month=current_month,
|
|
581
|
+
inputrole=('GlobalClim', 'InitialClim'),
|
|
582
|
+
inputkind='clim_model')
|
|
583
|
+
|
|
584
|
+
# Standard execution
|
|
585
|
+
super().execute(rh, opts)
|
|
586
|
+
# Move the output file
|
|
587
|
+
current_term = current_file_input.rh.resource.term
|
|
588
|
+
sh.move(output_name, output_name + "+{}".format(current_term.fmthm))
|
|
589
|
+
# Cat all the listings into a single one
|
|
590
|
+
sh.cat(self.OUTPUT_LISTING_NAME, output='NODE.all')
|
|
591
|
+
# Remove unneeded files
|
|
592
|
+
sh.rmall(deleted_spectral_file_SH, deleted_gridpoint_file_GG, deleted_gridpoint_file_UA,
|
|
593
|
+
'std*', self.OUTPUT_LISTING_NAME)
|
|
594
|
+
|
|
595
|
+
|
|
596
|
+
class DomeoForcingAtmo(BlindRun, CouplingBaseDateNamMixin):
|
|
597
|
+
"""Correct the Domeo forcing file."""
|
|
598
|
+
|
|
599
|
+
_footprint = dict(
|
|
600
|
+
info='Domeo Forcing Atmo',
|
|
601
|
+
attr=dict(
|
|
602
|
+
kind=dict(
|
|
603
|
+
values=['domeo_forcing'],
|
|
604
|
+
),
|
|
605
|
+
basedate=dict(
|
|
606
|
+
optional=False,
|
|
607
|
+
),
|
|
608
|
+
)
|
|
609
|
+
)
|