vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,745 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AlgoComponents dedicated to NWP direct forecasts.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import math
|
|
6
|
+
import re
|
|
7
|
+
from collections import defaultdict
|
|
8
|
+
|
|
9
|
+
from bronx.fancies import loggers
|
|
10
|
+
from bronx.stdtypes.date import Time, Month, Period
|
|
11
|
+
import footprints
|
|
12
|
+
|
|
13
|
+
from vortex.algo.components import AlgoComponentError, Parallel
|
|
14
|
+
from vortex.layout.dataflow import intent
|
|
15
|
+
from vortex.syntax.stdattrs import model
|
|
16
|
+
from vortex.util.structs import ShellEncoder
|
|
17
|
+
from .ifsroot import IFSParallel
|
|
18
|
+
from ..tools.drhook import DrHookDecoMixin
|
|
19
|
+
from ..syntax.stdattrs import outputid_deco
|
|
20
|
+
|
|
21
|
+
from typing import Any, Callable, Iterable
|
|
22
|
+
from vortex.data.handlers import Handler
|
|
23
|
+
from vortex.layout.dataflow import Section
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
#: No automatic export
|
|
27
|
+
__all__ = []
|
|
28
|
+
|
|
29
|
+
logger = loggers.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class Forecast(IFSParallel):
|
|
33
|
+
"""Forecast for IFS-like Models."""
|
|
34
|
+
|
|
35
|
+
_footprint = [
|
|
36
|
+
outputid_deco,
|
|
37
|
+
dict(
|
|
38
|
+
info = "Run a forecast with Arpege/IFS.",
|
|
39
|
+
attr = dict(
|
|
40
|
+
kind = dict(
|
|
41
|
+
values = ['forecast', 'fc'],
|
|
42
|
+
remap = dict(forecast = 'fc')
|
|
43
|
+
),
|
|
44
|
+
hist_terms = dict(
|
|
45
|
+
info = "The list of terms when historical file production is requested.",
|
|
46
|
+
type = footprints.FPList,
|
|
47
|
+
optional = True,
|
|
48
|
+
),
|
|
49
|
+
surfhist_terms = dict(
|
|
50
|
+
info ="The list of terms when surface file production is requested.",
|
|
51
|
+
type = footprints.FPList,
|
|
52
|
+
optional = True,
|
|
53
|
+
),
|
|
54
|
+
pos_terms=dict(
|
|
55
|
+
info = "The list of terms when post-processed data is requested.",
|
|
56
|
+
type = footprints.FPList,
|
|
57
|
+
optional = True,
|
|
58
|
+
),
|
|
59
|
+
s_norm_terms=dict(
|
|
60
|
+
info = "The list of terms when spectal norms should be computed.",
|
|
61
|
+
type = footprints.FPList,
|
|
62
|
+
optional = True,
|
|
63
|
+
),
|
|
64
|
+
flyargs = dict(
|
|
65
|
+
default = ('ICMSH', 'PF'),
|
|
66
|
+
),
|
|
67
|
+
xpname = dict(
|
|
68
|
+
default = 'FCST'
|
|
69
|
+
),
|
|
70
|
+
ddhpack = dict(
|
|
71
|
+
info = "After run, gather the DDH output file in directories.",
|
|
72
|
+
type = bool,
|
|
73
|
+
optional = True,
|
|
74
|
+
default = False,
|
|
75
|
+
doc_zorder = -5,
|
|
76
|
+
),
|
|
77
|
+
)
|
|
78
|
+
)
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
@property
|
|
82
|
+
def realkind(self):
|
|
83
|
+
return 'forecast'
|
|
84
|
+
|
|
85
|
+
def _outputs_configurator(self, bin_rh):
|
|
86
|
+
return footprints.proxy.ifsoutputs_configurator(
|
|
87
|
+
model=self.model,
|
|
88
|
+
cycle=bin_rh.resource.cycle,
|
|
89
|
+
fcterm_unit=self.fcunit,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def prepare(self, rh, opts):
|
|
93
|
+
"""Default pre-link for the initial condition file"""
|
|
94
|
+
super().prepare(rh, opts)
|
|
95
|
+
|
|
96
|
+
ininc = self.naming_convention('ic', rh)
|
|
97
|
+
analysis = self.setlink(
|
|
98
|
+
initrole=('InitialCondition', 'Analysis'),
|
|
99
|
+
initname=ininc()
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
if analysis:
|
|
103
|
+
analysis = analysis.pop()
|
|
104
|
+
thismonth = analysis.rh.resource.date.month
|
|
105
|
+
|
|
106
|
+
# Possibly fix the model clim
|
|
107
|
+
if self.do_climfile_fixer(rh, convkind='modelclim'):
|
|
108
|
+
self.climfile_fixer(rh, convkind='modelclim', month=thismonth,
|
|
109
|
+
inputrole=('GlobalClim', 'InitialClim'),
|
|
110
|
+
inputkind='clim_model')
|
|
111
|
+
|
|
112
|
+
# Possibly fix post-processing clim files
|
|
113
|
+
self.all_localclim_fixer(rh, thismonth)
|
|
114
|
+
|
|
115
|
+
# File linking for IAU increments
|
|
116
|
+
#
|
|
117
|
+
# In the case of a forecast with IAU, the IFS executable
|
|
118
|
+
# expects to find input increment files (both analysis and
|
|
119
|
+
# background counterpart) names suffixed according to the
|
|
120
|
+
# order by which they are to be applied. In practice
|
|
121
|
+
# input files are not renamed but links with correct names
|
|
122
|
+
# are created pointing to them instead. Both analysed and
|
|
123
|
+
# background states are required: to inject analysis
|
|
124
|
+
# increments over multiple timesteps, the IAU algorithm
|
|
125
|
+
# must be able to compute a difference between analysis
|
|
126
|
+
# and background states.
|
|
127
|
+
#
|
|
128
|
+
# TODO: Clarify where both regexp keys are coming from
|
|
129
|
+
guesses = self.context.sequence.effective_inputs(
|
|
130
|
+
role=re.compile(r'IAU_(Background|Guess)', flags=re.IGNORECASE)
|
|
131
|
+
)
|
|
132
|
+
analyses = self.context.sequence.effective_inputs(
|
|
133
|
+
role=re.compile(r'IAU_(Analysis|Ic)', flags=re.IGNORECASE)
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
def key(s: Section):
|
|
137
|
+
# Increment files are sorted according to date, then
|
|
138
|
+
# effective term.
|
|
139
|
+
return (
|
|
140
|
+
s.rh.resource.date,
|
|
141
|
+
s.rh.resource.date + s.rh.resource.term,
|
|
142
|
+
)
|
|
143
|
+
self._create_ordered_links(
|
|
144
|
+
bin_handler=rh, sections=analyses,
|
|
145
|
+
sort_key=key, nameconv_kind="iau_analysis",
|
|
146
|
+
)
|
|
147
|
+
self._create_ordered_links(
|
|
148
|
+
bin_handler=rh, sections=guesses,
|
|
149
|
+
sort_key=key, nameconv_kind="iau_background",
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
# Promises should be nicely managed by a co-proccess
|
|
153
|
+
if self.promises:
|
|
154
|
+
prefixes_set = set()
|
|
155
|
+
for pr_res in [pr.rh.resource for pr in self.promises]:
|
|
156
|
+
if pr_res.realkind == 'historic':
|
|
157
|
+
prefixes_set.add('ICMSH')
|
|
158
|
+
if pr_res.realkind == 'gridpoint':
|
|
159
|
+
prefixes_set.add('{:s}PF'.format('GRIB' if pr_res.nativefmt == 'grib' else ''))
|
|
160
|
+
self.io_poll_args = tuple(prefixes_set)
|
|
161
|
+
self.flyput = len(self.io_poll_args) > 0
|
|
162
|
+
|
|
163
|
+
def _create_ordered_links(
|
|
164
|
+
self,
|
|
165
|
+
bin_handler: Handler,
|
|
166
|
+
sections: Iterable[Section],
|
|
167
|
+
sort_key: Callable[[Section], Any],
|
|
168
|
+
nameconv_kind: str,
|
|
169
|
+
):
|
|
170
|
+
"""Create links to local files, with ordered names
|
|
171
|
+
|
|
172
|
+
For an iterable of sections objects, this function creates
|
|
173
|
+
symlinks to the corresponding local files (described by the
|
|
174
|
+
assocatied "container" object".
|
|
175
|
+
|
|
176
|
+
Link names are suffixed by a number string based on their
|
|
177
|
+
order after sorting sections by the sort key. Example:
|
|
178
|
+
ICIAUFCSTBK01,
|
|
179
|
+
ICIAUFCSTBK02,
|
|
180
|
+
ICIAUFCSTBK03...
|
|
181
|
+
"""
|
|
182
|
+
for i, sec in enumerate(sorted(sections, key=sort_key)):
|
|
183
|
+
nameconv = self.naming_convention(
|
|
184
|
+
nameconv_kind, bin_handler,
|
|
185
|
+
actualfmt=sec.rh.container.actualfmt,
|
|
186
|
+
)
|
|
187
|
+
target = nameconv(number=(i + 1))
|
|
188
|
+
link_name = sec.rh.container.localpath()
|
|
189
|
+
if self.system.path.exists(target):
|
|
190
|
+
logger.warning(
|
|
191
|
+
"%s should be linked to %s but %s already exists.",
|
|
192
|
+
link_name, target, target
|
|
193
|
+
)
|
|
194
|
+
continue
|
|
195
|
+
logger.info("Linking %s to %s.", link_name, target)
|
|
196
|
+
self.grab(sec, comment=nameconv_kind)
|
|
197
|
+
self.system.softlink(link_name, target)
|
|
198
|
+
|
|
199
|
+
def find_namelists(self, opts=None):
|
|
200
|
+
"""Find any namelists candidates in actual context inputs."""
|
|
201
|
+
return [x.rh
|
|
202
|
+
for x in self.context.sequence.effective_inputs(role='Namelist',
|
|
203
|
+
kind='namelist')]
|
|
204
|
+
|
|
205
|
+
def prepare_namelist_delta(self, rh, namcontents, namlocal):
|
|
206
|
+
nam_updated = super().prepare_namelist_delta(
|
|
207
|
+
rh, namcontents, namlocal
|
|
208
|
+
)
|
|
209
|
+
if namlocal == 'fort.4':
|
|
210
|
+
o_conf = self._outputs_configurator(rh)
|
|
211
|
+
o_conf.modelstate = self.hist_terms
|
|
212
|
+
o_conf.surf_modelstate = self.surfhist_terms
|
|
213
|
+
o_conf.post_processing = self.pos_terms
|
|
214
|
+
o_conf.spectral_diag = self.s_norm_terms
|
|
215
|
+
nam_updated_bis = o_conf(namcontents, namlocal)
|
|
216
|
+
nam_updated = nam_updated or nam_updated_bis
|
|
217
|
+
return nam_updated
|
|
218
|
+
|
|
219
|
+
def postfix(self, rh, opts):
|
|
220
|
+
"""Find out if any special resources have been produced."""
|
|
221
|
+
|
|
222
|
+
sh = self.system
|
|
223
|
+
|
|
224
|
+
# Look up for the gridpoint files
|
|
225
|
+
gp_out = sh.ls('PF{}*'.format(self.xpname))
|
|
226
|
+
gp_map = defaultdict(list)
|
|
227
|
+
if gp_out:
|
|
228
|
+
re_pf = re.compile(r'^PF{}(\w+)\+(\d+(?::\d+)?)$'.format(self.xpname))
|
|
229
|
+
for fname in gp_out:
|
|
230
|
+
match_pf = re_pf.match(fname)
|
|
231
|
+
if match_pf:
|
|
232
|
+
gp_map[match_pf.group(1).lower()].append(Time(match_pf.group(2)))
|
|
233
|
+
for k, v in gp_map.items():
|
|
234
|
+
v.sort()
|
|
235
|
+
logger.info('Gridpoint files found: domain=%s, terms=%s',
|
|
236
|
+
k,
|
|
237
|
+
','.join([str(t) for t in v]))
|
|
238
|
+
if len(gp_map) == 0:
|
|
239
|
+
logger.info('No gridpoint file was found.')
|
|
240
|
+
sh.json_dump(gp_map, 'gridpoint_map.out', indent=4, cls=ShellEncoder)
|
|
241
|
+
|
|
242
|
+
# Gather DDH in folders
|
|
243
|
+
if self.ddhpack:
|
|
244
|
+
ddhmap = dict(DL='dlimited', GL='global', ZO='zonal')
|
|
245
|
+
for (prefix, ddhkind) in ddhmap.items():
|
|
246
|
+
flist = sh.glob('DHF{}{}+*'.format(prefix, self.xpname))
|
|
247
|
+
if flist:
|
|
248
|
+
dest = 'ddhpack_{}'.format(ddhkind)
|
|
249
|
+
logger.info('Creating a DDH pack: %s', dest)
|
|
250
|
+
sh.mkdir(dest)
|
|
251
|
+
for lfa in flist:
|
|
252
|
+
sh.mv(lfa, dest, fmt='lfa')
|
|
253
|
+
|
|
254
|
+
super().postfix(rh, opts)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
class LAMForecast(Forecast):
|
|
258
|
+
"""Forecast for IFS-like Limited Area Models."""
|
|
259
|
+
|
|
260
|
+
_footprint = dict(
|
|
261
|
+
info = "Run a forecast with an Arpege/IFS like Limited Area Model.",
|
|
262
|
+
attr = dict(
|
|
263
|
+
kind = dict(
|
|
264
|
+
values = ['lamfc', 'lamforecast'],
|
|
265
|
+
remap = dict(lamforecast = 'lamfc'),
|
|
266
|
+
),
|
|
267
|
+
synctool = dict(
|
|
268
|
+
info = 'The name of the script called when waiting for coupling files',
|
|
269
|
+
optional = True,
|
|
270
|
+
default = 'atcp.alad',
|
|
271
|
+
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
272
|
+
),
|
|
273
|
+
synctpl = dict(
|
|
274
|
+
info = 'The template used to generate the *synctool* script',
|
|
275
|
+
optional = True,
|
|
276
|
+
default = '@sync-fetch.tpl',
|
|
277
|
+
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
278
|
+
),
|
|
279
|
+
)
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
def spawn_command_options(self):
|
|
283
|
+
"""Dictionary provided for command line factory."""
|
|
284
|
+
return dict(
|
|
285
|
+
name=(self.xpname + 'xxxx')[:4].upper(),
|
|
286
|
+
timescheme=self.timescheme,
|
|
287
|
+
timestep=self.timestep,
|
|
288
|
+
fcterm=self.fcterm,
|
|
289
|
+
fcunit=self.fcunit,
|
|
290
|
+
model='aladin',
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
def prepare(self, rh, opts):
|
|
294
|
+
"""Default pre-link for boundary conditions files."""
|
|
295
|
+
super().prepare(rh, opts)
|
|
296
|
+
|
|
297
|
+
sh = self.system
|
|
298
|
+
|
|
299
|
+
# Check boundaries conditions
|
|
300
|
+
cplrh = [x.rh for x in self.context.sequence.effective_inputs(
|
|
301
|
+
role='BoundaryConditions',
|
|
302
|
+
kind='boundary'
|
|
303
|
+
)]
|
|
304
|
+
cplrh.sort(key=lambda rh: rh.resource.date + rh.resource.term)
|
|
305
|
+
|
|
306
|
+
# Ordered pre-linking of boundaring and building ot the synchronization tools
|
|
307
|
+
firstsync = None
|
|
308
|
+
sh.header('Check boundaries...')
|
|
309
|
+
if any([x.is_expected() for x in cplrh]):
|
|
310
|
+
logger.info('Some boundaries conditions are still expected')
|
|
311
|
+
self.mksync = True
|
|
312
|
+
else:
|
|
313
|
+
logger.info('All boundaries conditions available')
|
|
314
|
+
self.mksync = False
|
|
315
|
+
|
|
316
|
+
for i, bound in enumerate(cplrh):
|
|
317
|
+
thisbound = bound.container.localpath()
|
|
318
|
+
lbcnc = self.naming_convention('lbc', rh, actualfmt=bound.container.actualfmt)
|
|
319
|
+
sh.softlink(thisbound, lbcnc(number=i))
|
|
320
|
+
if self.mksync:
|
|
321
|
+
thistool = self.synctool + '.{:03d}'.format(i)
|
|
322
|
+
bound.mkgetpr(pr_getter=thistool, tplfetch=self.synctpl)
|
|
323
|
+
if firstsync is None:
|
|
324
|
+
firstsync = thistool
|
|
325
|
+
|
|
326
|
+
# Set up the first synchronization step
|
|
327
|
+
if firstsync is not None:
|
|
328
|
+
sh.symlink(firstsync, self.synctool)
|
|
329
|
+
|
|
330
|
+
def postfix(self, rh, opts):
|
|
331
|
+
"""Post forecast information and cleaning."""
|
|
332
|
+
sh = self.system
|
|
333
|
+
|
|
334
|
+
if self.mksync:
|
|
335
|
+
synclog = self.synctool + '.log'
|
|
336
|
+
if sh.path.exists(synclog):
|
|
337
|
+
sh.subtitle(synclog)
|
|
338
|
+
sh.cat(synclog, output=False)
|
|
339
|
+
|
|
340
|
+
super().postfix(rh, opts)
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
class DFIForecast(LAMForecast):
|
|
344
|
+
"""OBSOLETE CODE: do not use."""
|
|
345
|
+
|
|
346
|
+
_footprint = dict(
|
|
347
|
+
info = "Run a forecast with an Arpege/IFS like Limited Area Model (with DFIs).",
|
|
348
|
+
attr = dict(
|
|
349
|
+
kind = dict(
|
|
350
|
+
values = ['fcdfi'],
|
|
351
|
+
),
|
|
352
|
+
)
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
def prepare(self, rh, opts):
|
|
356
|
+
"""Pre-link boundary conditions as special DFI files."""
|
|
357
|
+
super().prepare(rh, opts)
|
|
358
|
+
ininc = self.naming_convention('ic', rh)
|
|
359
|
+
lbcnc = self.naming_convention('lbc', rh, actualfmt='fa')
|
|
360
|
+
for pseudoterm in (999, 0, 1):
|
|
361
|
+
self.system.softlink(ininc(), lbcnc(number=pseudoterm))
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
class FullPos(IFSParallel):
|
|
365
|
+
"""Fullpos for geometries transforms in IFS-like Models.
|
|
366
|
+
|
|
367
|
+
OBSOLETE a/c cy46 (use the 903 configuration / fullpos server instead).
|
|
368
|
+
"""
|
|
369
|
+
|
|
370
|
+
_abstract = True
|
|
371
|
+
_footprint = dict(
|
|
372
|
+
attr = dict(
|
|
373
|
+
xpname = dict(
|
|
374
|
+
default = 'FPOS'
|
|
375
|
+
),
|
|
376
|
+
flyput = dict(
|
|
377
|
+
default = False,
|
|
378
|
+
values = [False],
|
|
379
|
+
),
|
|
380
|
+
server_run = dict(
|
|
381
|
+
values = [True, False],
|
|
382
|
+
),
|
|
383
|
+
serversync_method = dict(
|
|
384
|
+
default = 'simple_socket',
|
|
385
|
+
),
|
|
386
|
+
serversync_medium = dict(
|
|
387
|
+
default = 'cnt3_wait',
|
|
388
|
+
),
|
|
389
|
+
)
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
@property
|
|
393
|
+
def realkind(self):
|
|
394
|
+
return 'fullpos'
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
class FullPosGeo(FullPos):
|
|
398
|
+
"""Fullpos for geometries transforms in IFS-like Models.
|
|
399
|
+
|
|
400
|
+
OBSOLETE a/c cy46 (use the 903 configuration / fullpos server instead).
|
|
401
|
+
"""
|
|
402
|
+
|
|
403
|
+
_footprint = dict(
|
|
404
|
+
info = "Run a fullpos to interpolate to a new geometry",
|
|
405
|
+
attr = dict(
|
|
406
|
+
kind = dict(
|
|
407
|
+
values = ['l2h', 'h2l'],
|
|
408
|
+
),
|
|
409
|
+
)
|
|
410
|
+
)
|
|
411
|
+
|
|
412
|
+
_RUNSTORE = 'RUNOUT'
|
|
413
|
+
|
|
414
|
+
def _compute_target_name(self, r):
|
|
415
|
+
return ('PF' + re.sub('^(?:ICMSH)(.*?)(?:INIT)(.*)$', r'\1\2',
|
|
416
|
+
r.container.localpath()).format(self.xpname))
|
|
417
|
+
|
|
418
|
+
def execute(self, rh, opts):
|
|
419
|
+
"""Loop on the various initial conditions provided."""
|
|
420
|
+
|
|
421
|
+
sh = self.system
|
|
422
|
+
|
|
423
|
+
initrh = [x.rh for x in self.context.sequence.effective_inputs(
|
|
424
|
+
role=('Analysis', 'Guess', 'InitialCondition'),
|
|
425
|
+
kind=('analysis', 'historic', 'ic', re.compile('(stp|ana)min'),
|
|
426
|
+
re.compile('pert'), ),
|
|
427
|
+
)]
|
|
428
|
+
|
|
429
|
+
# is there one (deterministic forecast) or many (ensemble forecast) fullpos to perform ?
|
|
430
|
+
isMany = len(initrh) > 1
|
|
431
|
+
do_fix_input_clim = self.do_climfile_fixer(rh, convkind='modelclim')
|
|
432
|
+
do_fix_output_clim = self.do_climfile_fixer(rh, convkind='targetclim', area='000')
|
|
433
|
+
ininc = self.naming_convention('ic', rh)
|
|
434
|
+
infile = ininc()
|
|
435
|
+
|
|
436
|
+
for num, r in enumerate(initrh):
|
|
437
|
+
str_subtitle = 'Fullpos execution on {}'.format(r.container.localpath())
|
|
438
|
+
sh.subtitle(str_subtitle)
|
|
439
|
+
|
|
440
|
+
# Set the actual init file
|
|
441
|
+
if sh.path.exists(infile):
|
|
442
|
+
if isMany:
|
|
443
|
+
logger.critical('Cannot process multiple Historic files if %s exists.', infile)
|
|
444
|
+
else:
|
|
445
|
+
sh.cp(r.container.localpath(), infile, fmt=r.container.actualfmt, intent=intent.IN)
|
|
446
|
+
|
|
447
|
+
# Fix links for climatology files
|
|
448
|
+
actualmonth = Month(r.resource.date + r.resource.term)
|
|
449
|
+
startingclim = r.resource.geometry
|
|
450
|
+
|
|
451
|
+
if do_fix_input_clim:
|
|
452
|
+
self.climfile_fixer(rh, convkind='modelclim', month=actualmonth, geo=startingclim,
|
|
453
|
+
inputrole=(re.compile('^Clim'), re.compile('Clim$')),
|
|
454
|
+
inputkind='clim_model')
|
|
455
|
+
|
|
456
|
+
if do_fix_output_clim:
|
|
457
|
+
self.climfile_fixer(rh, convkind='targetclim', month=actualmonth, notgeo=startingclim,
|
|
458
|
+
inputrole=(re.compile('^Clim'), re.compile('Clim$')),
|
|
459
|
+
inputkind='clim_model', area='000')
|
|
460
|
+
|
|
461
|
+
# Standard execution
|
|
462
|
+
super().execute(rh, opts)
|
|
463
|
+
|
|
464
|
+
# Find the output filename
|
|
465
|
+
output_file = [x for x in sh.glob('PF{:s}*+*'.format(self.xpname))]
|
|
466
|
+
if len(output_file) != 1:
|
|
467
|
+
raise AlgoComponentError("No or multiple output files found.")
|
|
468
|
+
output_file = output_file[0]
|
|
469
|
+
|
|
470
|
+
# prepares the next execution
|
|
471
|
+
if isMany:
|
|
472
|
+
# Set a local storage place
|
|
473
|
+
sh.mkdir(self._RUNSTORE)
|
|
474
|
+
# Freeze the current output
|
|
475
|
+
sh.move(output_file, sh.path.join(self._RUNSTORE, 'pfout_{:d}'.format(num)),
|
|
476
|
+
fmt=r.container.actualfmt)
|
|
477
|
+
sh.remove(infile, fmt=r.container.actualfmt)
|
|
478
|
+
# Cleaning/Log management
|
|
479
|
+
if not self.server_run:
|
|
480
|
+
# The only one listing
|
|
481
|
+
sh.cat('NODE.001_01', output='NODE.all')
|
|
482
|
+
# Some cleaning
|
|
483
|
+
sh.rmall('ncf927', 'dirlst')
|
|
484
|
+
else:
|
|
485
|
+
# Link the output files to new style names
|
|
486
|
+
sh.cp(output_file, self._compute_target_name(r),
|
|
487
|
+
fmt=r.container.actualfmt, intent='in')
|
|
488
|
+
# Link the listing to NODE.all
|
|
489
|
+
sh.cp('NODE.001_01', 'NODE.all', intent='in')
|
|
490
|
+
|
|
491
|
+
def postfix(self, rh, opts):
|
|
492
|
+
"""Post processing cleaning."""
|
|
493
|
+
sh = self.system
|
|
494
|
+
|
|
495
|
+
initrh = [x.rh for x in self.context.sequence.effective_inputs(
|
|
496
|
+
role=('Analysis', 'Guess', 'InitialCondition'),
|
|
497
|
+
kind=('analysis', 'historic', 'ic', re.compile('(stp|ana)min'),
|
|
498
|
+
re.compile('pert'), ),
|
|
499
|
+
)]
|
|
500
|
+
if len(initrh) > 1:
|
|
501
|
+
for num, r in enumerate(initrh):
|
|
502
|
+
sh.move('{:s}/pfout_{:d}'.format(self._RUNSTORE, num),
|
|
503
|
+
self._compute_target_name(r), fmt=r.container.actualfmt)
|
|
504
|
+
|
|
505
|
+
super().postfix(rh, opts)
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
class FullPosBDAP(FullPos):
|
|
509
|
+
"""Post-processing for IFS-like Models.
|
|
510
|
+
|
|
511
|
+
OBSOLETE a/c cy46 (use the 903 configuration / fullpos server instead).
|
|
512
|
+
"""
|
|
513
|
+
|
|
514
|
+
_footprint = dict(
|
|
515
|
+
info = "Run a fullpos to post-process raw model outputs",
|
|
516
|
+
attr = dict(
|
|
517
|
+
kind = dict(
|
|
518
|
+
values = ['fullpos', 'fp'],
|
|
519
|
+
remap = dict(fp= 'fullpos')
|
|
520
|
+
),
|
|
521
|
+
fcterm = dict(
|
|
522
|
+
values = [0, ],
|
|
523
|
+
),
|
|
524
|
+
outputid = dict(
|
|
525
|
+
info = "The identifier for the encoding of post-processed fields.",
|
|
526
|
+
optional = True,
|
|
527
|
+
),
|
|
528
|
+
server_run = dict(
|
|
529
|
+
values = [False, ],
|
|
530
|
+
),
|
|
531
|
+
),
|
|
532
|
+
)
|
|
533
|
+
|
|
534
|
+
def prepare(self, rh, opts):
|
|
535
|
+
"""Some additional checks."""
|
|
536
|
+
if self.system.path.exists('xxt00000000'):
|
|
537
|
+
raise AlgoComponentError('There should be no file named xxt00000000 in the working directory')
|
|
538
|
+
super().prepare(rh, opts)
|
|
539
|
+
|
|
540
|
+
def execute(self, rh, opts):
|
|
541
|
+
"""Loop on the various initial conditions provided."""
|
|
542
|
+
|
|
543
|
+
sh = self.system
|
|
544
|
+
|
|
545
|
+
namrh = [x.rh for x in self.context.sequence.effective_inputs(
|
|
546
|
+
kind='namelistfp'
|
|
547
|
+
)]
|
|
548
|
+
|
|
549
|
+
namxx = [x.rh for x in self.context.sequence.effective_inputs(
|
|
550
|
+
role='FullPosSelection',
|
|
551
|
+
kind='namselect',
|
|
552
|
+
)]
|
|
553
|
+
|
|
554
|
+
initsec = [x for x in self.context.sequence.effective_inputs(
|
|
555
|
+
role=('InitialCondition', 'ModelState'),
|
|
556
|
+
kind='historic',
|
|
557
|
+
)]
|
|
558
|
+
initsec.sort(key=lambda sec: sec.rh.resource.term)
|
|
559
|
+
|
|
560
|
+
do_fix_input_clim = self.do_climfile_fixer(rh, convkind='modelclim')
|
|
561
|
+
|
|
562
|
+
ininc = self.naming_convention('ic', rh)
|
|
563
|
+
infile = ininc()
|
|
564
|
+
|
|
565
|
+
for sec in initsec:
|
|
566
|
+
r = sec.rh
|
|
567
|
+
sh.subtitle('Loop on {:s}'.format(r.resource.term.fmthm))
|
|
568
|
+
|
|
569
|
+
thisdate = r.resource.date + r.resource.term
|
|
570
|
+
thismonth = thisdate.month
|
|
571
|
+
logger.info('Fullpos <month:%s>' % thismonth)
|
|
572
|
+
|
|
573
|
+
if do_fix_input_clim:
|
|
574
|
+
self.climfile_fixer(rh, convkind='modelclim',
|
|
575
|
+
month=thismonth, geo=r.resource.geometry,
|
|
576
|
+
inputrole=(re.compile('^Clim'), re.compile('Clim$')),
|
|
577
|
+
inputkind='clim_model')
|
|
578
|
+
|
|
579
|
+
thesenames = self.all_localclim_fixer(rh, thismonth)
|
|
580
|
+
|
|
581
|
+
# Set a local storage place
|
|
582
|
+
runstore = 'RUNOUT' + r.resource.term.fmtraw
|
|
583
|
+
sh.mkdir(runstore)
|
|
584
|
+
|
|
585
|
+
# Define an input namelist
|
|
586
|
+
try:
|
|
587
|
+
namfp = [x for x in namrh if x.resource.term == r.resource.term].pop()
|
|
588
|
+
namfplocal = namfp.container.localpath()
|
|
589
|
+
if self.outputid is not None:
|
|
590
|
+
self._set_nam_macro(namfp.contents, namfplocal, 'OUTPUTID', self.outputid)
|
|
591
|
+
namfp.contents.rewrite(namfp.container)
|
|
592
|
+
sh.remove('fort.4')
|
|
593
|
+
sh.symlink(namfplocal, 'fort.4')
|
|
594
|
+
except Exception:
|
|
595
|
+
logger.critical('Could not get a fullpos namelist for term %s', r.resource.term)
|
|
596
|
+
raise
|
|
597
|
+
|
|
598
|
+
# Define an selection namelist
|
|
599
|
+
if namxx:
|
|
600
|
+
namxt = [x for x in namxx if x.resource.term == r.resource.term]
|
|
601
|
+
if namxt:
|
|
602
|
+
sh.remove('xxt00000000')
|
|
603
|
+
sh.symlink(namxt.pop().container.localpath(), 'xxt00000000')
|
|
604
|
+
else:
|
|
605
|
+
logger.critical('Could not get a selection namelist for term %s', r.resource.term)
|
|
606
|
+
raise AlgoComponentError()
|
|
607
|
+
else:
|
|
608
|
+
logger.info("No selection namelist are provided.")
|
|
609
|
+
|
|
610
|
+
# Finally set the actual init file
|
|
611
|
+
sh.remove(infile)
|
|
612
|
+
self.grab(sec, comment='Fullpos source (term={:s})'.format(r.resource.term.fmthm))
|
|
613
|
+
sh.softlink(r.container.localpath(), infile)
|
|
614
|
+
|
|
615
|
+
# Standard execution
|
|
616
|
+
super().execute(rh, opts)
|
|
617
|
+
|
|
618
|
+
# Freeze the current output
|
|
619
|
+
for posfile in [x for x in (sh.glob('PF{:s}*+*'.format(self.xpname)) +
|
|
620
|
+
sh.glob('GRIBPF{:s}*+*'.format(self.xpname)))]:
|
|
621
|
+
rootpos = re.sub('0+$', '', posfile)
|
|
622
|
+
fmtpos = 'grib' if posfile.startswith('GRIB') else 'lfi'
|
|
623
|
+
targetfile = sh.path.join(runstore, rootpos + r.resource.term.fmthm)
|
|
624
|
+
targetbase = sh.path.basename(targetfile)
|
|
625
|
+
|
|
626
|
+
# Deal with potential promises
|
|
627
|
+
expected = [x for x in self.promises
|
|
628
|
+
if x.rh.container.localpath() == targetbase]
|
|
629
|
+
if expected:
|
|
630
|
+
logger.info("Start dealing with promises for: %s.",
|
|
631
|
+
", ".join([x.rh.container.localpath() for x in expected]))
|
|
632
|
+
if posfile != targetbase:
|
|
633
|
+
sh.move(posfile, targetbase, fmt=fmtpos)
|
|
634
|
+
posfile = targetbase
|
|
635
|
+
for thispromise in expected:
|
|
636
|
+
thispromise.put(incache=True)
|
|
637
|
+
|
|
638
|
+
sh.move(posfile, targetfile, fmt=fmtpos)
|
|
639
|
+
|
|
640
|
+
for logfile in sh.glob('NODE.*', 'std*'):
|
|
641
|
+
sh.move(logfile, sh.path.join(runstore, logfile))
|
|
642
|
+
|
|
643
|
+
# Some cleaning
|
|
644
|
+
sh.rmall('PX{:s}*'.format(self.xpname), fmt='lfi')
|
|
645
|
+
sh.rmall('ncf927', 'dirlst')
|
|
646
|
+
for clim in thesenames:
|
|
647
|
+
sh.rm(clim)
|
|
648
|
+
|
|
649
|
+
def postfix(self, rh, opts):
|
|
650
|
+
"""Post processing cleaning."""
|
|
651
|
+
sh = self.system
|
|
652
|
+
|
|
653
|
+
for fpfile in [x for x in (sh.glob('RUNOUT*/PF{:s}*'.format(self.xpname)) +
|
|
654
|
+
sh.glob('RUNOUT*/GRIBPF{:s}*+*'.format(self.xpname)))
|
|
655
|
+
if sh.path.isfile(x)]:
|
|
656
|
+
sh.move(fpfile, sh.path.basename(fpfile),
|
|
657
|
+
fmt='grib' if 'GRIBPF' in fpfile else 'lfi')
|
|
658
|
+
sh.cat('RUNOUT*/NODE.001_01', output='NODE.all')
|
|
659
|
+
|
|
660
|
+
super().postfix(rh, opts)
|
|
661
|
+
|
|
662
|
+
|
|
663
|
+
class OfflineSurfex(Parallel, DrHookDecoMixin):
|
|
664
|
+
"""Run a forecast with the SURFEX's offline binary."""
|
|
665
|
+
|
|
666
|
+
_footprint = [
|
|
667
|
+
model,
|
|
668
|
+
dict(
|
|
669
|
+
info = "Run a forecast with the SURFEX's offline binary.",
|
|
670
|
+
attr = dict(
|
|
671
|
+
kind = dict(
|
|
672
|
+
values = ['offline_forecast', ],
|
|
673
|
+
),
|
|
674
|
+
model = dict(
|
|
675
|
+
values = ['surfex', ],
|
|
676
|
+
),
|
|
677
|
+
model_tstep = dict(
|
|
678
|
+
info = "The timestep of the model",
|
|
679
|
+
type = Period,
|
|
680
|
+
),
|
|
681
|
+
diag_tstep = dict(
|
|
682
|
+
info = "The timestep for writing diagnostics outputs",
|
|
683
|
+
type = Period,
|
|
684
|
+
),
|
|
685
|
+
fcterm = dict(
|
|
686
|
+
info = "The forecast's term",
|
|
687
|
+
type = Period,
|
|
688
|
+
),
|
|
689
|
+
forcing_read_interval = dict(
|
|
690
|
+
info = "Read the forcing file every...",
|
|
691
|
+
type = Period,
|
|
692
|
+
default = Period('PT12H'),
|
|
693
|
+
optional = True,
|
|
694
|
+
)
|
|
695
|
+
)
|
|
696
|
+
)
|
|
697
|
+
]
|
|
698
|
+
|
|
699
|
+
def valid_executable(self, rh):
|
|
700
|
+
"""Check the executable's resource."""
|
|
701
|
+
bmodel = getattr(rh.resource, 'model', None)
|
|
702
|
+
rc = bmodel == 'surfex' and rh.resource.realkind == 'offline'
|
|
703
|
+
if not rc:
|
|
704
|
+
logger.error('Inapropriate binary provided')
|
|
705
|
+
return rc and super().valid_executable(rh)
|
|
706
|
+
|
|
707
|
+
@staticmethod
|
|
708
|
+
def _fix_nam_macro(sec, macro, value):
|
|
709
|
+
"""Set a given namelist macro and issue a log message."""
|
|
710
|
+
sec.rh.contents.setmacro(macro, value)
|
|
711
|
+
logger.info('Setup %s macro to %s.', macro, str(value))
|
|
712
|
+
|
|
713
|
+
def prepare(self, rh, opts):
|
|
714
|
+
"""Setup the appropriate namelist macros."""
|
|
715
|
+
self.system.subtitle("Offline SURFEX Settings.")
|
|
716
|
+
# Find the run/final date
|
|
717
|
+
ic = self.context.sequence.effective_inputs(
|
|
718
|
+
role=('InitialConditions', 'ModelState', 'Analysis'))
|
|
719
|
+
if ic:
|
|
720
|
+
if len(ic) > 1:
|
|
721
|
+
logger.warning('Multiple initial conditions, using only the first one...')
|
|
722
|
+
rundate = ic[0].rh.resource.date
|
|
723
|
+
if hasattr(ic[0].rh.resource, 'term'):
|
|
724
|
+
rundate += ic[0].rh.resource.term
|
|
725
|
+
finaldate = rundate + self.fcterm
|
|
726
|
+
finaldate = [finaldate.year, finaldate.month, finaldate.day,
|
|
727
|
+
finaldate.hour * 3600 + finaldate.minute * 60 + finaldate.second]
|
|
728
|
+
logger.info('The final date is : %s', str(finaldate))
|
|
729
|
+
nbreads = int(math.ceil((finaldate - rundate).length /
|
|
730
|
+
self.forcing_read_interval.length))
|
|
731
|
+
else:
|
|
732
|
+
logger.warning('No initial conditions were found. Hope you know what you are doing...')
|
|
733
|
+
finaldate = None
|
|
734
|
+
# Ok, let's find the namelist
|
|
735
|
+
namsecs = self.context.sequence.effective_inputs(role=('Namelist', 'Namelistsurf'))
|
|
736
|
+
for namsec in namsecs:
|
|
737
|
+
logger.info("Processing: %s", namsec.rh.container.localpath())
|
|
738
|
+
self._fix_nam_macro(namsec, 'TSTEP', self.model_tstep.length)
|
|
739
|
+
self._fix_nam_macro(namsec, 'TSTEP_OUTPUTS', self.diag_tstep.length)
|
|
740
|
+
if finaldate:
|
|
741
|
+
self._fix_nam_macro(namsec, 'FINAL_STOP', finaldate)
|
|
742
|
+
self._fix_nam_macro(namsec, 'NB_READS', nbreads)
|
|
743
|
+
if namsec.rh.contents.dumps_needs_update:
|
|
744
|
+
namsec.rh.save()
|
|
745
|
+
logger.info("Namelist dump: \n%s", namsec.rh.container.read())
|