vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,927 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AlgoComponents for the next generation of Fullpos runs (based on the 903
|
|
3
|
+
configuration).
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import collections
|
|
7
|
+
import functools
|
|
8
|
+
import math
|
|
9
|
+
import re
|
|
10
|
+
from itertools import filterfalse
|
|
11
|
+
import time
|
|
12
|
+
|
|
13
|
+
from bronx.compat.functools import cached_property
|
|
14
|
+
from bronx.datagrip.namelist import NamelistBlock
|
|
15
|
+
from bronx.stdtypes.date import Time, Date
|
|
16
|
+
from bronx.fancies import loggers
|
|
17
|
+
|
|
18
|
+
import footprints
|
|
19
|
+
|
|
20
|
+
from vortex.algo.components import AlgoComponentError
|
|
21
|
+
import vortex.layout.monitor as _lmonitor
|
|
22
|
+
|
|
23
|
+
from .ifsroot import IFSParallel
|
|
24
|
+
from ..syntax.stdattrs import outputid_deco
|
|
25
|
+
|
|
26
|
+
#: No automatic export
|
|
27
|
+
__all__ = []
|
|
28
|
+
|
|
29
|
+
logger = loggers.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
fullpos_server_flypoll_pickle = '.fullpos_server_flypoll'
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class FullPosServerFlyPollPersistantState:
|
|
36
|
+
"""Persistent storage object for Fullpos's polling method."""
|
|
37
|
+
|
|
38
|
+
def __init__(self):
|
|
39
|
+
self.cursor = collections.defaultdict(functools.partial(Time, -9999))
|
|
40
|
+
self.found = collections.defaultdict(list)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def fullpos_server_flypoll(sh, outputprefix, termfile, directories=('.', ), **kwargs): # @UnusedVariable
|
|
44
|
+
"""Check sub-**directories** to determine wether new output files are available or not."""
|
|
45
|
+
new = list()
|
|
46
|
+
for directory in directories:
|
|
47
|
+
with sh.cdcontext(directory, create=True):
|
|
48
|
+
if sh.path.exists(fullpos_server_flypoll_pickle):
|
|
49
|
+
fpoll_st = sh.pickle_load(fullpos_server_flypoll_pickle)
|
|
50
|
+
else:
|
|
51
|
+
fpoll_st = FullPosServerFlyPollPersistantState()
|
|
52
|
+
try:
|
|
53
|
+
if sh.path.exists(termfile):
|
|
54
|
+
with open(termfile) as wfh:
|
|
55
|
+
rawcursor = wfh.readline().rstrip('\n')
|
|
56
|
+
try:
|
|
57
|
+
cursor = Time(rawcursor)
|
|
58
|
+
except TypeError:
|
|
59
|
+
logger.warning('Unable to convert "%s" to a Time object', rawcursor)
|
|
60
|
+
return new
|
|
61
|
+
pre = re.compile(r'^{:s}\w*\+(\d+(?::\d\d)?)(?:\.\w+)?$'.format(outputprefix))
|
|
62
|
+
candidates = [pre.match(f) for f in sh.listdir()]
|
|
63
|
+
lnew = list()
|
|
64
|
+
for candidate in filterfalse(lambda c: c is None, candidates):
|
|
65
|
+
if candidate.group(0).endswith('.d'):
|
|
66
|
+
continue
|
|
67
|
+
ctime = Time(candidate.group(1))
|
|
68
|
+
if ctime > fpoll_st.cursor[outputprefix] and ctime <= cursor:
|
|
69
|
+
lnew.append(candidate.group(0))
|
|
70
|
+
fpoll_st.cursor[outputprefix] = cursor
|
|
71
|
+
fpoll_st.found[outputprefix].extend(lnew)
|
|
72
|
+
new.extend([sh.path.normpath(sh.path.join(directory, anew))
|
|
73
|
+
for anew in lnew])
|
|
74
|
+
finally:
|
|
75
|
+
sh.pickle_dump(fpoll_st, fullpos_server_flypoll_pickle)
|
|
76
|
+
return new
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class FullposServerDiscoveredInputs:
|
|
80
|
+
"""Holds all kind of information on input files."""
|
|
81
|
+
|
|
82
|
+
def __init__(self):
|
|
83
|
+
self.inidata = dict()
|
|
84
|
+
self.tododata = list()
|
|
85
|
+
self.guessdata = list()
|
|
86
|
+
self.termscount = collections.defaultdict(int)
|
|
87
|
+
self.anyexpected = False
|
|
88
|
+
self.inputsminlen = 0
|
|
89
|
+
self.firstprefix = None
|
|
90
|
+
|
|
91
|
+
def actual_suffixlen(self, minlen=None):
|
|
92
|
+
"""Find out the required suffixlen."""
|
|
93
|
+
if minlen is None:
|
|
94
|
+
minlen = self.inputsminlen
|
|
95
|
+
return max(minlen,
|
|
96
|
+
int(math.floor(math.log10(len(self.tododata)))))
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class FullPosServer(IFSParallel):
|
|
100
|
+
"""Fullpos Server for geometry transforms & post-processing in IFS-like Models.
|
|
101
|
+
|
|
102
|
+
Input/Output files are labelled as follows:
|
|
103
|
+
|
|
104
|
+
* Let ``INPUTFILE_0`` denote an input file name (the user can choose
|
|
105
|
+
whichever name she/he wants provided that the associated input's
|
|
106
|
+
section has the "ModelState" role).
|
|
107
|
+
* For FA files: the corresponding output file will be
|
|
108
|
+
``NPUTFILE_0.domain.out`` where ``domain`` is the domain name chosen
|
|
109
|
+
by the user in the namelist. If a Surfex output file is also created
|
|
110
|
+
(it depends on the namelist) it will be named ``NPUTFILE_0.domain.sfx.out``
|
|
111
|
+
* For GRIB files: the corresponding output file will be
|
|
112
|
+
``NPUTFILE_0.domain.grib.out``.
|
|
113
|
+
* Some or all output files may be pre-positionned (data generated by the
|
|
114
|
+
c903 will be appended on them). The corresponding section's role needs
|
|
115
|
+
to be "OutputGuess". The local filename of the section, needs to match
|
|
116
|
+
the expected output filename. In our exemple it would be
|
|
117
|
+
``NPUTFILE_0.domain.out``.
|
|
118
|
+
|
|
119
|
+
:note: To use this algocomponent, the c903's server needs to be activated
|
|
120
|
+
in the namelist (NFPSERVER != 0).
|
|
121
|
+
|
|
122
|
+
:note: With the current IFS/Arpege code, in order for the output's polling
|
|
123
|
+
to work properly, an ``ECHFP`` whiteness file have to be incremented
|
|
124
|
+
by the server, in each of the output directories.
|
|
125
|
+
|
|
126
|
+
:note: Climatology files are not managed (only few sanity checks are
|
|
127
|
+
performed). The user needs to name the input climatology file
|
|
128
|
+
consistently with the c903' namelist. For role="InitialClim" sections,
|
|
129
|
+
the filename should be something like ``Const.Clim.m[month]``. For
|
|
130
|
+
role="TargetClim" sections the filename should be something like
|
|
131
|
+
``const.clim.[domain].m[month]`` where ``[domain]`` corresponds to
|
|
132
|
+
the name of the output domain (as specified in the namelist file)
|
|
133
|
+
and ``[month]`` corresponds to the month being dealt with (on 2 digits).
|
|
134
|
+
|
|
135
|
+
Interesting features:
|
|
136
|
+
|
|
137
|
+
* Input files can be expected (for on the fly processing)
|
|
138
|
+
* Input files are dealt with in arbitrary order depending on their
|
|
139
|
+
availability (useful for ensemble processing).
|
|
140
|
+
* Output files can be promised
|
|
141
|
+
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
_INITIALCONDITION_ROLE = re.compile(r'InitialCondition((?:\w+)?)')
|
|
145
|
+
_INPUTDATA_ROLE_STR = 'ModelState'
|
|
146
|
+
_INPUTDATA_ROLE = re.compile(r'ModelState((?:\w+)?)')
|
|
147
|
+
_OUTPUTGUESS_ROLE = 'OutputGuess'
|
|
148
|
+
|
|
149
|
+
_MODELSIDE_INPUTPREFIX0 = 'ICM'
|
|
150
|
+
_MODELSIDE_INPUTPREFIX1 = 'SH'
|
|
151
|
+
_MODELSIDE_OUTPUTPREFIX = 'PF'
|
|
152
|
+
_MODELSIDE_OUTPUTPREFIX_GRIB = 'GRIBPF'
|
|
153
|
+
_MODELSIDE_TERMFILE = './ECHFP'
|
|
154
|
+
_MODELSIDE_OUT_SUFFIXLEN_MIN = 4
|
|
155
|
+
_MODELSIDE_IND_SUFFIXLEN_MIN = 4
|
|
156
|
+
_MODELSIDE_INE_SUFFIXLEN_MIN = dict(grib=6)
|
|
157
|
+
|
|
158
|
+
_SERVERSYNC_RAISEONEXIT = False
|
|
159
|
+
_SERVERSYNC_RUNONSTARTUP = False
|
|
160
|
+
_SERVERSYNC_STOPONEXIT = False
|
|
161
|
+
|
|
162
|
+
_footprint = [
|
|
163
|
+
outputid_deco,
|
|
164
|
+
dict(
|
|
165
|
+
attr = dict(
|
|
166
|
+
kind = dict(
|
|
167
|
+
values = ['fpserver', ],
|
|
168
|
+
),
|
|
169
|
+
outdirectories = dict(
|
|
170
|
+
info = "The list of possible output directories.",
|
|
171
|
+
type = footprints.stdtypes.FPList,
|
|
172
|
+
default = footprints.stdtypes.FPList(['.', ]),
|
|
173
|
+
optional = True,
|
|
174
|
+
),
|
|
175
|
+
append_domain = dict(
|
|
176
|
+
info = ("If defined, the output file for domain append_domain " +
|
|
177
|
+
"will be made a copy of the input file (prior to the " +
|
|
178
|
+
"server run"),
|
|
179
|
+
optional = True,
|
|
180
|
+
),
|
|
181
|
+
basedate=dict(
|
|
182
|
+
info = "The run date of the coupling generating process",
|
|
183
|
+
type = Date,
|
|
184
|
+
optional = True
|
|
185
|
+
),
|
|
186
|
+
xpname = dict(
|
|
187
|
+
default = 'FPOS'
|
|
188
|
+
),
|
|
189
|
+
conf = dict(
|
|
190
|
+
default = 903,
|
|
191
|
+
),
|
|
192
|
+
timestep=dict(
|
|
193
|
+
default = 1.,
|
|
194
|
+
),
|
|
195
|
+
timeout = dict(
|
|
196
|
+
type = int,
|
|
197
|
+
optional = True,
|
|
198
|
+
default = 300,
|
|
199
|
+
),
|
|
200
|
+
refreshtime = dict(
|
|
201
|
+
info = "How frequently are the expected input files looked for ? (seconds)",
|
|
202
|
+
type = int,
|
|
203
|
+
optional = True,
|
|
204
|
+
default = 20,
|
|
205
|
+
),
|
|
206
|
+
server_run = dict(
|
|
207
|
+
# This is a rw attribute: it will be managed internally
|
|
208
|
+
values = [True, False]
|
|
209
|
+
),
|
|
210
|
+
serversync_method = dict(
|
|
211
|
+
default = 'simple_socket',
|
|
212
|
+
),
|
|
213
|
+
serversync_medium = dict(
|
|
214
|
+
default = 'nextfile_wait',
|
|
215
|
+
),
|
|
216
|
+
maxpollingthreads = dict(
|
|
217
|
+
type = int,
|
|
218
|
+
optional = True,
|
|
219
|
+
default = 8,
|
|
220
|
+
),
|
|
221
|
+
flypoll = dict(
|
|
222
|
+
default = 'internal',
|
|
223
|
+
),
|
|
224
|
+
defaultformat = dict(
|
|
225
|
+
info = "Format for the legacy output files.",
|
|
226
|
+
default = 'fa',
|
|
227
|
+
optional = True
|
|
228
|
+
)
|
|
229
|
+
)
|
|
230
|
+
)
|
|
231
|
+
]
|
|
232
|
+
|
|
233
|
+
@property
|
|
234
|
+
def realkind(self):
|
|
235
|
+
return 'fullpos'
|
|
236
|
+
|
|
237
|
+
def __init__(self, *args, **kw):
|
|
238
|
+
super().__init__(*args, **kw)
|
|
239
|
+
self._flyput_mapping_d = dict()
|
|
240
|
+
|
|
241
|
+
def flyput_outputmapping(self, item):
|
|
242
|
+
"""Map an output file to its final name."""
|
|
243
|
+
sh = self.system
|
|
244
|
+
for out_re, data in self._flyput_mapping_d.items():
|
|
245
|
+
m_re = out_re.match(sh.path.basename(item))
|
|
246
|
+
if m_re:
|
|
247
|
+
return (sh.path.join(sh.path.dirname(item),
|
|
248
|
+
data[0].format(m_re.group('fpdom'),
|
|
249
|
+
m_re.group('suffix'))),
|
|
250
|
+
data[1])
|
|
251
|
+
|
|
252
|
+
@cached_property
|
|
253
|
+
def inputs(self):
|
|
254
|
+
"""Retrieve the lists in input sections/ResourceHandlers."""
|
|
255
|
+
discovered = FullposServerDiscoveredInputs()
|
|
256
|
+
|
|
257
|
+
# Initial conditions
|
|
258
|
+
inisec = self.context.sequence.effective_inputs(role=self._INITIALCONDITION_ROLE)
|
|
259
|
+
if inisec:
|
|
260
|
+
for s in inisec:
|
|
261
|
+
iprefix = (self._INITIALCONDITION_ROLE.match(s.alternate
|
|
262
|
+
if s.role is None else
|
|
263
|
+
s.role).group(1) or
|
|
264
|
+
self._MODELSIDE_INPUTPREFIX1)
|
|
265
|
+
fprefix = self._MODELSIDE_INPUTPREFIX0 + iprefix
|
|
266
|
+
if fprefix in discovered.inidata:
|
|
267
|
+
raise AlgoComponentError('Only one Initial Condition is allowed.')
|
|
268
|
+
else:
|
|
269
|
+
discovered.inidata[fprefix] = s
|
|
270
|
+
|
|
271
|
+
# Model states
|
|
272
|
+
todosec0 = self.context.sequence.effective_inputs(role=self._INPUTDATA_ROLE)
|
|
273
|
+
todosec1 = collections.defaultdict(list)
|
|
274
|
+
discovered.anyexpected = any([isec.rh.is_expected() for isec in todosec0])
|
|
275
|
+
hasterms = all([hasattr(isec.rh.resource, 'term') for isec in todosec0])
|
|
276
|
+
# Sort things up (if possible)
|
|
277
|
+
if hasterms:
|
|
278
|
+
logger.info('Sorting input data based on the actual term.')
|
|
279
|
+
todosec0 = sorted(todosec0, key=lambda s: self._actual_term(s.rh))
|
|
280
|
+
if todosec0:
|
|
281
|
+
for iseq, s in enumerate(todosec0):
|
|
282
|
+
rprefix = (self._INPUTDATA_ROLE.match(s.alternate
|
|
283
|
+
if s.role is None else
|
|
284
|
+
s.role).group(1) or
|
|
285
|
+
self._MODELSIDE_INPUTPREFIX1)
|
|
286
|
+
todosec1[rprefix].append(s)
|
|
287
|
+
if iseq == 0:
|
|
288
|
+
# Find the "default" prefix and suffix len based on the first section
|
|
289
|
+
discovered.firstprefix = rprefix
|
|
290
|
+
discovered.inputsminlen = self._MODELSIDE_INE_SUFFIXLEN_MIN.get(
|
|
291
|
+
s.rh.container.actualfmt, self._MODELSIDE_IND_SUFFIXLEN_MIN
|
|
292
|
+
)
|
|
293
|
+
iprefixes = sorted(todosec1.keys())
|
|
294
|
+
if len(iprefixes) == 1:
|
|
295
|
+
for s in todosec0:
|
|
296
|
+
discovered.tododata.append({self._MODELSIDE_INPUTPREFIX0 + iprefixes[0]: s})
|
|
297
|
+
else:
|
|
298
|
+
if len({len(secs) for secs in todosec1.values()}) > 1:
|
|
299
|
+
raise AlgoComponentError('Inconsistent number of input data.')
|
|
300
|
+
for sections in zip(* [iter(todosec1[i]) for i in iprefixes]):
|
|
301
|
+
discovered.tododata.append({self._MODELSIDE_INPUTPREFIX0 + k: v
|
|
302
|
+
for k, v in zip(iprefixes, sections)})
|
|
303
|
+
|
|
304
|
+
# Detect the number of terms based on the firstprefix
|
|
305
|
+
if hasterms:
|
|
306
|
+
for sections in discovered.tododata:
|
|
307
|
+
act_term = self._actual_term(sections[self._MODELSIDE_INPUTPREFIX0 +
|
|
308
|
+
discovered.firstprefix].rh)
|
|
309
|
+
discovered.termscount[act_term] += 1
|
|
310
|
+
|
|
311
|
+
# Look for guesses of output files
|
|
312
|
+
guesses_sec0 = collections.defaultdict(list)
|
|
313
|
+
guess_entry = collections.namedtuple('guess_entry', ('sdir', 'prefix', 'domain', 'suffix', 'sec'))
|
|
314
|
+
for sec in self.context.sequence.effective_inputs(role=self._OUTPUTGUESS_ROLE):
|
|
315
|
+
s_lpath = sec.rh.container.localpath()
|
|
316
|
+
s_match = self._o_algo_re.match(self.system.path.basename(s_lpath))
|
|
317
|
+
if s_match:
|
|
318
|
+
guesses_sec0[s_match.group('base')].append(guess_entry(
|
|
319
|
+
self.system.path.dirname(s_lpath),
|
|
320
|
+
self._o_auto_prefix('grib' if s_match.group('grib') else self.defaultformat),
|
|
321
|
+
s_match.group('fpdom'),
|
|
322
|
+
s_match.group('suffix'),
|
|
323
|
+
sec
|
|
324
|
+
))
|
|
325
|
+
discovered.anyexpected = discovered.anyexpected or sec.rh.is_expected()
|
|
326
|
+
else:
|
|
327
|
+
logger.warning('Improper name for the following output guess < %s >. Ignoring it.', s_lpath)
|
|
328
|
+
# Pair them with input file (based on their name)
|
|
329
|
+
for iinput in discovered.tododata:
|
|
330
|
+
isec = iinput[self._MODELSIDE_INPUTPREFIX0 + discovered.firstprefix]
|
|
331
|
+
discovered.guessdata.append(
|
|
332
|
+
guesses_sec0.pop(self.system.path.basename(isec.rh.container.localpath()), ())
|
|
333
|
+
)
|
|
334
|
+
if guesses_sec0:
|
|
335
|
+
logger.warning('Some input data were left unsed: < %s >', guesses_sec0)
|
|
336
|
+
logger.info('discovered guessdata are: < %s >', discovered.guessdata)
|
|
337
|
+
|
|
338
|
+
return discovered
|
|
339
|
+
|
|
340
|
+
@cached_property
|
|
341
|
+
def object_namelists(self):
|
|
342
|
+
"""The list of object's namelists."""
|
|
343
|
+
namrhs = [isec.rh
|
|
344
|
+
for isec in self.context.sequence.effective_inputs(role='ObjectNamelist')
|
|
345
|
+
if isec.rh.resource.realkind == 'namelist_fpobject']
|
|
346
|
+
# Update the object's content
|
|
347
|
+
for namrh in namrhs:
|
|
348
|
+
namsave = False
|
|
349
|
+
if namrh.resource.fp_cmodel is not None:
|
|
350
|
+
self._set_nam_macro(namrh.contents, namrh.container.localpath(),
|
|
351
|
+
'FP_CMODEL', namrh.resource.fp_cmodel)
|
|
352
|
+
namsave = True
|
|
353
|
+
if namrh.resource.fp_lextern is not None:
|
|
354
|
+
self._set_nam_macro(namrh.contents, namrh.container.localpath(),
|
|
355
|
+
'FP_LEXTERN', namrh.resource.fp_lextern)
|
|
356
|
+
namsave = True
|
|
357
|
+
if namrh.resource.fp_terms is not None:
|
|
358
|
+
if not self.inputs.termscount:
|
|
359
|
+
raise AlgoComponentError('In this use case, all input data must have a term attribute')
|
|
360
|
+
active_terms = {Time(t) for t in namrh.resource.fp_terms}
|
|
361
|
+
# Generate the list of NFPOSTS
|
|
362
|
+
global_i = 0
|
|
363
|
+
nfposts = list()
|
|
364
|
+
for term, n_term in sorted(self.inputs.termscount.items()):
|
|
365
|
+
if term in active_terms:
|
|
366
|
+
nfposts.extend(range(global_i, global_i + n_term))
|
|
367
|
+
global_i += n_term
|
|
368
|
+
# Get the NAMFPC block
|
|
369
|
+
try:
|
|
370
|
+
nfpc = namrh.contents['NAMFPC']
|
|
371
|
+
except KeyError:
|
|
372
|
+
raise AlgoComponentError('NAMFPC should be defined in {:s}'
|
|
373
|
+
.format(namrh.container.localpath()))
|
|
374
|
+
# Sanity check
|
|
375
|
+
for k in nfpc.keys():
|
|
376
|
+
if k.startswith('NFPOSTS'):
|
|
377
|
+
raise AlgoComponentError('&NAMFPC NFPOSTS*(*) / entries should not be defined in {:s}'
|
|
378
|
+
.format(namrh.container.localpath()))
|
|
379
|
+
# Write NFPOSTS to NAMFPC
|
|
380
|
+
nfpc['NFPOSTS(0)'] = - len(nfposts)
|
|
381
|
+
for i, v in enumerate(nfposts):
|
|
382
|
+
nfpc['NFPOSTS({:d})'.format(i + 1)] = - v
|
|
383
|
+
logger.info("The NAMFPC namelist in %s was updated.",
|
|
384
|
+
namrh.container.localpath())
|
|
385
|
+
logger.debug("The updated NAMFPC namelist in %s is:\n%s",
|
|
386
|
+
namrh.container.localpath(), nfpc)
|
|
387
|
+
namsave = True
|
|
388
|
+
if namsave:
|
|
389
|
+
namrh.save()
|
|
390
|
+
return namrhs
|
|
391
|
+
|
|
392
|
+
@cached_property
|
|
393
|
+
def xxtmapping(self):
|
|
394
|
+
"""A handy dictionary about selection namelists."""
|
|
395
|
+
namxxrh = collections.defaultdict(dict)
|
|
396
|
+
for isec in self.context.sequence.effective_inputs(role='FullPosSelection',
|
|
397
|
+
kind='namselect'):
|
|
398
|
+
dpath = self.system.path.dirname(isec.rh.container.localpath())
|
|
399
|
+
namxxrh[dpath][isec.rh.resource.term] = isec.rh
|
|
400
|
+
if namxxrh and not self.inputs.termscount:
|
|
401
|
+
raise AlgoComponentError('In this use case, all input data must have a term attribute')
|
|
402
|
+
return namxxrh
|
|
403
|
+
|
|
404
|
+
@cached_property
|
|
405
|
+
def _i_fmt(self):
|
|
406
|
+
"""The input files format (as expected by the c903)."""
|
|
407
|
+
return ('{:s}' + '{:s}+'.format(self.xpname) +
|
|
408
|
+
'{:0' + str(self.inputs.actual_suffixlen()) + 'd}')
|
|
409
|
+
|
|
410
|
+
@cached_property
|
|
411
|
+
def _o_raw_fmt(self):
|
|
412
|
+
"""The output files format (as imposed by the c903)."""
|
|
413
|
+
return ('{:s}' + '{:s}'.format(self.xpname) + '{:s}+' +
|
|
414
|
+
'{:0' + str(self.inputs.actual_suffixlen(self._MODELSIDE_OUT_SUFFIXLEN_MIN)) + 'd}{:s}')
|
|
415
|
+
|
|
416
|
+
@cached_property
|
|
417
|
+
def _o_re_fmt(self):
|
|
418
|
+
"""The output files regex (as imposed by the c903)."""
|
|
419
|
+
return ('^{:s}' + '{:s}'.format(self.xpname) + r'(?P<fpdom>\w+)\+' +
|
|
420
|
+
'{:0' + str(self.inputs.actual_suffixlen(self._MODELSIDE_OUT_SUFFIXLEN_MIN)) +
|
|
421
|
+
r'd}(?P<suffix>(?:\.sfx)?)$')
|
|
422
|
+
|
|
423
|
+
@cached_property
|
|
424
|
+
def _o_init_re_fmt(self):
|
|
425
|
+
"""The output files regex (as imposed by the c903)."""
|
|
426
|
+
return ('^{:s}' + '{:s}'.format(self.xpname) +
|
|
427
|
+
r'(?P<fpdom>\w+){:s}(?P<suffix>(?:\.sfx)?)$')
|
|
428
|
+
|
|
429
|
+
@cached_property
|
|
430
|
+
def _o_algo_re(self):
|
|
431
|
+
"""The regex for any output (as imposed by our AlgoComponent)."""
|
|
432
|
+
return re.compile(r'(?P<base>.+)\.(?P<fpdom>\w+)(?P<suffix>(?:\.sfx)?)(?P<grib>(?:\.grib)?)\.out$')
|
|
433
|
+
|
|
434
|
+
@cached_property
|
|
435
|
+
def _o_suffix(self):
|
|
436
|
+
"""The FAs output suffix (as imposed by our AlgoComponent)."""
|
|
437
|
+
return '.{:s}{:s}.out'
|
|
438
|
+
|
|
439
|
+
@cached_property
|
|
440
|
+
def _o_grb_suffix(self):
|
|
441
|
+
"""The GRIBs output suffix (as imposed by our AlgoComponent)."""
|
|
442
|
+
return '.{:s}{:s}.grib.out'
|
|
443
|
+
|
|
444
|
+
def _o_auto_prefix(self, fmt):
|
|
445
|
+
"""Return the appropriate output files prefix (as imposed by the c903)."""
|
|
446
|
+
return dict(grib=self._MODELSIDE_OUTPUTPREFIX_GRIB).get(
|
|
447
|
+
fmt, self._MODELSIDE_OUTPUTPREFIX
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
def _actual_term(self, rhandler):
|
|
451
|
+
"""Compute the actual Resource Handler term."""
|
|
452
|
+
rterm = rhandler.resource.term
|
|
453
|
+
if self.basedate is not None:
|
|
454
|
+
rterm += rhandler.resource.date - self.basedate
|
|
455
|
+
return rterm
|
|
456
|
+
|
|
457
|
+
def _add_output_mapping(self, outputs_mapping, i, out_re, out_fname):
|
|
458
|
+
"""Add mappings for output file."""
|
|
459
|
+
# FA/default file
|
|
460
|
+
re_default = out_re.format(self._MODELSIDE_OUTPUTPREFIX, i)
|
|
461
|
+
what_default = (out_fname + self._o_suffix, self.defaultformat)
|
|
462
|
+
outputs_mapping[re.compile(re_default)] = what_default
|
|
463
|
+
# GRIB files
|
|
464
|
+
re_grib = out_re.format(self._MODELSIDE_OUTPUTPREFIX_GRIB, i)
|
|
465
|
+
what_grib = (out_fname + self._o_grb_suffix, 'grib')
|
|
466
|
+
outputs_mapping[re.compile(re_grib)] = what_grib
|
|
467
|
+
logger.info('Output %s mapped as %s. Output %s mapped as %s.',
|
|
468
|
+
re_default, what_default[0], re_grib, what_grib[0])
|
|
469
|
+
|
|
470
|
+
def _link_input(self, iprefix, irh, i, inputs_mapping, outputs_mapping):
|
|
471
|
+
"""Link an input file and update the mappings dictionaries."""
|
|
472
|
+
sourcepath = irh.container.localpath()
|
|
473
|
+
inputs_mapping[sourcepath] = self._i_fmt.format(iprefix, i)
|
|
474
|
+
self.system.cp(sourcepath, inputs_mapping[sourcepath], intent='in', fmt=irh.container.actualfmt)
|
|
475
|
+
logger.info('%s copied as %s.', sourcepath, inputs_mapping[sourcepath])
|
|
476
|
+
if iprefix == self._MODELSIDE_INPUTPREFIX0 + self.inputs.firstprefix:
|
|
477
|
+
self._add_output_mapping(outputs_mapping, i, self._o_re_fmt,
|
|
478
|
+
self.system.path.basename(sourcepath))
|
|
479
|
+
if self.append_domain:
|
|
480
|
+
outputpath = self._o_raw_fmt.format(
|
|
481
|
+
self._o_auto_prefix(irh.container.actualfmt), self.append_domain, i, ''
|
|
482
|
+
)
|
|
483
|
+
|
|
484
|
+
if self.outdirectories:
|
|
485
|
+
todo = [self.system.path.join(d, outputpath) for d in self.outdirectories]
|
|
486
|
+
else:
|
|
487
|
+
todo = [outputpath, ]
|
|
488
|
+
for a_outputpath in todo:
|
|
489
|
+
self.system.cp(sourcepath, a_outputpath, intent='inout', fmt=irh.container.actualfmt)
|
|
490
|
+
logger.info('output file prepared: %s copied (rw) to %s.', sourcepath, a_outputpath)
|
|
491
|
+
|
|
492
|
+
def _move_output_guess(self, iguess, i):
|
|
493
|
+
"""Move the output file guesses to their final location."""
|
|
494
|
+
sourcepath = iguess.sec.rh.container.localpath()
|
|
495
|
+
destpath = self.system.path.join(
|
|
496
|
+
iguess.sdir,
|
|
497
|
+
self._o_raw_fmt.format(iguess.prefix, iguess.domain, i, iguess.suffix)
|
|
498
|
+
)
|
|
499
|
+
self.system.mv(sourcepath, destpath, fmt=iguess.sec.rh.container.actualfmt)
|
|
500
|
+
logger.info('output guess %s was moved to %s.', sourcepath, destpath)
|
|
501
|
+
|
|
502
|
+
def _link_xxt(self, todorh, i):
|
|
503
|
+
"""If necessary, link in the appropriate xxtNNNNNNMM file."""
|
|
504
|
+
for sdir, tdict in self.xxtmapping.items():
|
|
505
|
+
xxtrh = tdict.get(self._actual_term(todorh), None)
|
|
506
|
+
if xxtrh is not None:
|
|
507
|
+
xxtsource = self.system.path.relpath(xxtrh.container.abspath,
|
|
508
|
+
sdir)
|
|
509
|
+
# The file is expected to follow the xxtDDDDHHMM syntax where DDDD
|
|
510
|
+
# is the number of days
|
|
511
|
+
days_hours = (i // 24) * 100 + i % 24
|
|
512
|
+
xxttarget = 'xxt{:06d}00'.format(days_hours)
|
|
513
|
+
xxttarget = self.system.path.join(sdir, xxttarget)
|
|
514
|
+
self.system.symlink(xxtsource, xxttarget)
|
|
515
|
+
logger.info('XXT %s linked in as %s.', xxtsource, xxttarget)
|
|
516
|
+
|
|
517
|
+
def _init_poll_and_move(self, outputs_mapping):
|
|
518
|
+
"""Deal with the PF*INIT file."""
|
|
519
|
+
sh = self.system
|
|
520
|
+
candidates = self.system.glob('{:s}{:s}*INIT'.format(self._MODELSIDE_OUTPUTPREFIX, self.xpname))
|
|
521
|
+
outputnames = list()
|
|
522
|
+
for thisdata in candidates:
|
|
523
|
+
mappeddata = None
|
|
524
|
+
for out_re, data in outputs_mapping.items():
|
|
525
|
+
m_re = out_re.match(thisdata)
|
|
526
|
+
if m_re:
|
|
527
|
+
mappeddata = (sh.path.join(sh.path.dirname(thisdata),
|
|
528
|
+
data[0].format(m_re.group('fpdom'),
|
|
529
|
+
m_re.group('suffix'))),
|
|
530
|
+
data[1])
|
|
531
|
+
break
|
|
532
|
+
if mappeddata is None:
|
|
533
|
+
raise AlgoComponentError('The mapping failed for {:s}.'.format(thisdata))
|
|
534
|
+
# Already dealt with ?
|
|
535
|
+
if not self.system.path.exists(mappeddata[0]):
|
|
536
|
+
logger.info('Linking <%s> to <%s> (fmt=%s).', thisdata, mappeddata[0], mappeddata[1])
|
|
537
|
+
outputnames.append(mappeddata[0])
|
|
538
|
+
self.system.cp(thisdata, mappeddata[0], intent='in', fmt=mappeddata[1])
|
|
539
|
+
return outputnames
|
|
540
|
+
|
|
541
|
+
def _poll_and_move(self, outputs_mapping):
|
|
542
|
+
"""Call **io_poll** and rename available output files."""
|
|
543
|
+
sh = self.system
|
|
544
|
+
data = self.manual_flypolling()
|
|
545
|
+
outputnames = list()
|
|
546
|
+
for thisdata in data:
|
|
547
|
+
mappeddata = None
|
|
548
|
+
for out_re, data in outputs_mapping.items():
|
|
549
|
+
m_re = out_re.match(sh.path.basename(thisdata))
|
|
550
|
+
if m_re:
|
|
551
|
+
mappeddata = (sh.path.join(sh.path.dirname(thisdata),
|
|
552
|
+
data[0].format(m_re.group('fpdom'),
|
|
553
|
+
m_re.group('suffix'))),
|
|
554
|
+
data[1])
|
|
555
|
+
break
|
|
556
|
+
if mappeddata is None:
|
|
557
|
+
raise AlgoComponentError('The mapping failed for {:s}.'.format(thisdata))
|
|
558
|
+
logger.info('Linking <%s> to <%s> (fmt=%s).', thisdata, mappeddata[0], mappeddata[1])
|
|
559
|
+
outputnames.append(mappeddata[0])
|
|
560
|
+
self.system.cp(thisdata, mappeddata[0], intent='in', fmt=mappeddata[1])
|
|
561
|
+
return outputnames
|
|
562
|
+
|
|
563
|
+
def _deal_with_promises(self, outputs_mapping, pollingcb):
|
|
564
|
+
if self.promises:
|
|
565
|
+
seen = pollingcb(outputs_mapping)
|
|
566
|
+
for afile in seen:
|
|
567
|
+
candidates = [x for x in self.promises
|
|
568
|
+
if x.rh.container.abspath == self.system.path.abspath(afile)]
|
|
569
|
+
if candidates:
|
|
570
|
+
logger.info('The output data is promised <%s>', afile)
|
|
571
|
+
bingo = candidates.pop()
|
|
572
|
+
bingo.put(incache=True)
|
|
573
|
+
|
|
574
|
+
def prepare(self, rh, opts):
|
|
575
|
+
"""Various sanity checks + namelist tweaking."""
|
|
576
|
+
super().prepare(rh, opts)
|
|
577
|
+
|
|
578
|
+
if self.object_namelists:
|
|
579
|
+
self.system.subtitle('Object Namelists customisation')
|
|
580
|
+
for o_nam in self.object_namelists:
|
|
581
|
+
# a/c cy44: &NAMFPIOS NFPDIGITS=__SUFFIXLEN__, /
|
|
582
|
+
self._set_nam_macro(o_nam.contents, o_nam.container.localpath(), 'SUFFIXLEN',
|
|
583
|
+
self.inputs.actual_suffixlen(self._MODELSIDE_OUT_SUFFIXLEN_MIN))
|
|
584
|
+
if o_nam.contents.dumps_needs_update:
|
|
585
|
+
logger.info('Rewritting the %s namelists file.', o_nam.container.actualpath())
|
|
586
|
+
o_nam.save()
|
|
587
|
+
|
|
588
|
+
self.system.subtitle('Dealing with various input files')
|
|
589
|
+
|
|
590
|
+
# Sanity check over climfiles and geometries
|
|
591
|
+
input_geo = {sec.rh.resource.geometry
|
|
592
|
+
for sdict in self.inputs.tododata for sec in sdict.values()}
|
|
593
|
+
if len(input_geo) == 0:
|
|
594
|
+
raise AlgoComponentError('No input data are provided, ...')
|
|
595
|
+
elif len(input_geo) > 1:
|
|
596
|
+
raise AlgoComponentError('Multiple geometries are not allowed for input data.')
|
|
597
|
+
else:
|
|
598
|
+
input_geo = input_geo.pop()
|
|
599
|
+
|
|
600
|
+
input_climgeo = {x.rh.resource.geometry
|
|
601
|
+
for x in self.context.sequence.effective_inputs(role=('InputClim',
|
|
602
|
+
'InitialClim'))}
|
|
603
|
+
if len(input_climgeo) == 0:
|
|
604
|
+
logger.info('No input clim provided. Going on without it...')
|
|
605
|
+
elif len(input_climgeo) > 1:
|
|
606
|
+
raise AlgoComponentError('Multiple geometries are not allowed for input climatology.')
|
|
607
|
+
else:
|
|
608
|
+
if input_climgeo.pop() != input_geo:
|
|
609
|
+
raise AlgoComponentError('The input data and input climatology geometries does not match.')
|
|
610
|
+
|
|
611
|
+
# Initial Condition geometry sanity check
|
|
612
|
+
if self.inputs.inidata and any([sec.rh.resource.geometry != input_geo
|
|
613
|
+
for sec in self.inputs.inidata.values()]):
|
|
614
|
+
raise AlgoComponentError('The Initial Condition geometry differs from other input data.')
|
|
615
|
+
|
|
616
|
+
# Sanity check on target climatology files
|
|
617
|
+
target_climgeos = {x.rh.resource.geometry
|
|
618
|
+
for x in self.context.sequence.effective_inputs(role='TargetClim')}
|
|
619
|
+
if len(target_climgeos) == 0:
|
|
620
|
+
logger.info('No target clim are provided. Going on without it...')
|
|
621
|
+
|
|
622
|
+
# Sanity check on selection namelists
|
|
623
|
+
if self.xxtmapping:
|
|
624
|
+
for tdict in self.xxtmapping.values():
|
|
625
|
+
if ({self._actual_term(sec.rh)
|
|
626
|
+
for sdict in self.inputs.tododata
|
|
627
|
+
for sec in sdict.values()} < set(tdict.keys())):
|
|
628
|
+
raise AlgoComponentError("The list of terms between input data and selection namelists differs")
|
|
629
|
+
else:
|
|
630
|
+
logger.info("No selection namelists detected. That's fine")
|
|
631
|
+
|
|
632
|
+
# Link in the initial condition file (if necessary)
|
|
633
|
+
for iprefix, isec in self.inputs.inidata.items():
|
|
634
|
+
i_init = '{:s}{:s}INIT'.format(iprefix, self.xpname)
|
|
635
|
+
if isec.rh.container.basename != i_init:
|
|
636
|
+
self.system.cp(isec.rh.container.localpath(), i_init,
|
|
637
|
+
intent='in', fmt=isec.rh.container.actualfmt)
|
|
638
|
+
logger.info('Initial condition file %s copied as %s.',
|
|
639
|
+
isec.rh.container.localpath(), i_init)
|
|
640
|
+
|
|
641
|
+
def find_namelists(self, opts=None):
|
|
642
|
+
"""Find any namelists candidates in actual context inputs."""
|
|
643
|
+
return [x.rh
|
|
644
|
+
for x in self.context.sequence.effective_inputs(role='Namelist',
|
|
645
|
+
kind='namelist')]
|
|
646
|
+
|
|
647
|
+
def prepare_namelist_delta(self, rh, namcontents, namlocal):
|
|
648
|
+
super().prepare_namelist_delta(rh, namcontents, namlocal)
|
|
649
|
+
# With cy43: &NAMCT0 CSCRIPT_PPSERVER=__SERVERSYNC_SCRIPT__, /
|
|
650
|
+
if self.inputs.anyexpected:
|
|
651
|
+
self._set_nam_macro(namcontents, namlocal, 'SERVERSYNC_SCRIPT',
|
|
652
|
+
self.system.path.join('.', self.serversync_medium))
|
|
653
|
+
else:
|
|
654
|
+
# Do not harass the filesystem...
|
|
655
|
+
self._set_nam_macro(namcontents, namlocal, 'SERVERSYNC_SCRIPT', ' ')
|
|
656
|
+
# With cy43: &NAMCT0 CFPNCF=__IOPOLL_WHITNESSFILE__, /
|
|
657
|
+
self._set_nam_macro(namcontents, namlocal, 'IOPOLL_WHITNESSFILE', self._MODELSIDE_TERMFILE)
|
|
658
|
+
# With cy43: No matching namelist key
|
|
659
|
+
# a/c cy44: &NAMFPIOS NFPDIGITS=__SUFFIXLEN__, /
|
|
660
|
+
self._set_nam_macro(namcontents, namlocal, 'SUFFIXLEN',
|
|
661
|
+
self.inputs.actual_suffixlen(self._MODELSIDE_OUT_SUFFIXLEN_MIN))
|
|
662
|
+
# No matching namelist yet
|
|
663
|
+
self._set_nam_macro(namcontents, namlocal, 'INPUT_SUFFIXLEN',
|
|
664
|
+
self.inputs.actual_suffixlen())
|
|
665
|
+
# With cy43: &NAMCT0 NFRPOS=__INPUTDATALEN__, /
|
|
666
|
+
self._set_nam_macro(namcontents, namlocal, 'INPUTDATALEN', - len(self.inputs.tododata))
|
|
667
|
+
# Auto generate the list of namelists for the various objects
|
|
668
|
+
if self.object_namelists:
|
|
669
|
+
if 'NAMFPOBJ' not in namcontents or len(namcontents['NAMFPOBJ']) == 0:
|
|
670
|
+
nb_o = NamelistBlock('NAMFPOBJ')
|
|
671
|
+
nb_o['NFPOBJ'] = len(self.object_namelists)
|
|
672
|
+
for i_nam, nam in enumerate(self.object_namelists):
|
|
673
|
+
if nam.resource.fp_conf:
|
|
674
|
+
nb_o['NFPCONF({:d})'.format(i_nam + 1)] = nam.resource.fp_conf
|
|
675
|
+
nb_o['CNAMELIST({:d})'.format(i_nam + 1)] = nam.container.localpath()
|
|
676
|
+
namcontents['NAMFPOBJ'] = nb_o
|
|
677
|
+
logger.info('The following namelist block has been added to "%s":\n%s',
|
|
678
|
+
namlocal, nb_o.dumps())
|
|
679
|
+
else:
|
|
680
|
+
logger.warning('The NAMFPOBJ namelist in "%s" is not empty. Leaving it as it is',
|
|
681
|
+
namlocal)
|
|
682
|
+
# Just in case FP_CMODEL is defined in the main namelist
|
|
683
|
+
if self.outputid is not None and any(['FP_CMODEL' in nam_b.macros()
|
|
684
|
+
for nam_b in namcontents.values()]):
|
|
685
|
+
self._set_nam_macro(namcontents, namlocal, 'FP_CMODEL', self.outputid)
|
|
686
|
+
return True
|
|
687
|
+
|
|
688
|
+
def spawn_pre_dirlisting(self):
|
|
689
|
+
"""Print a directory listing just before run."""
|
|
690
|
+
super().spawn_pre_dirlisting()
|
|
691
|
+
for sdir in self.outdirectories:
|
|
692
|
+
self.system.subtitle('{:s} : {:s} sub-directory listing (pre-execution)'
|
|
693
|
+
.format(self.realkind, sdir))
|
|
694
|
+
self.system.dir(sdir, output=False, fatal=False)
|
|
695
|
+
|
|
696
|
+
def spawn_hook(self):
|
|
697
|
+
"""Usually a good habit to dump the fort.4 namelist."""
|
|
698
|
+
super().spawn_hook()
|
|
699
|
+
for o_nam in self.object_namelists:
|
|
700
|
+
self.system.subtitle('{:s} : dump namelist <{:s}>'
|
|
701
|
+
.format(self.realkind, o_nam.container.localpath()))
|
|
702
|
+
self.system.cat(o_nam.container.localpath(), output=False)
|
|
703
|
+
|
|
704
|
+
def execute(self, rh, opts):
|
|
705
|
+
"""Server still or Normal execution depending on the input sequence."""
|
|
706
|
+
sh = self.system
|
|
707
|
+
|
|
708
|
+
# Input and Output mapping
|
|
709
|
+
inputs_mapping = dict()
|
|
710
|
+
outputs_mapping = dict()
|
|
711
|
+
|
|
712
|
+
# Initial condition file ?
|
|
713
|
+
if self.inputs.inidata:
|
|
714
|
+
for iprefix, isec in self.inputs.inidata.items():
|
|
715
|
+
# The initial condition resource may be expected
|
|
716
|
+
self.grab(isec)
|
|
717
|
+
# Fix potential links and output mappings
|
|
718
|
+
sourcepath = isec.rh.container.basename
|
|
719
|
+
if iprefix == self._MODELSIDE_INPUTPREFIX0 + self.inputs.firstprefix:
|
|
720
|
+
self._add_output_mapping(outputs_mapping, 'INIT',
|
|
721
|
+
self._o_init_re_fmt, sourcepath)
|
|
722
|
+
i_init = '{:s}{:s}INIT'.format(iprefix, self.xpname)
|
|
723
|
+
if isec.rh.container.basename != i_init:
|
|
724
|
+
self.system.cp(sourcepath, i_init,
|
|
725
|
+
intent='in', fmt=isec.rh.container.actualfmt)
|
|
726
|
+
logger.info('Initial condition file %s copied as %s.',
|
|
727
|
+
isec.rh.container.localpath(), i_init)
|
|
728
|
+
else:
|
|
729
|
+
if self.inputs.tododata:
|
|
730
|
+
# Just in case the INIT file is transformed
|
|
731
|
+
fakesource = self._MODELSIDE_INPUTPREFIX0 + self.inputs.firstprefix + self.xpname + 'INIT'
|
|
732
|
+
self._add_output_mapping(outputs_mapping, 'INIT', self._o_init_re_fmt, fakesource)
|
|
733
|
+
|
|
734
|
+
# Initialise the flying stuff
|
|
735
|
+
self.flyput = False # Do not use flyput every time...
|
|
736
|
+
flyprefixes = set()
|
|
737
|
+
for s in self.promises:
|
|
738
|
+
lpath = s.rh.container.localpath()
|
|
739
|
+
if lpath.endswith('.grib.out'):
|
|
740
|
+
flyprefixes.add(self._MODELSIDE_OUTPUTPREFIX_GRIB)
|
|
741
|
+
elif lpath.endswith('.out'):
|
|
742
|
+
flyprefixes.add(self._MODELSIDE_OUTPUTPREFIX)
|
|
743
|
+
self.io_poll_args = tuple(flyprefixes)
|
|
744
|
+
self.io_poll_kwargs = dict(directories=tuple(set(self.outdirectories)))
|
|
745
|
+
for directory in set(self.outdirectories):
|
|
746
|
+
sh.mkdir(directory) # Create possible output directories
|
|
747
|
+
if self.flypoll == 'internal':
|
|
748
|
+
self.io_poll_method = functools.partial(fullpos_server_flypoll, sh)
|
|
749
|
+
self.io_poll_kwargs['termfile'] = sh.path.basename(self._MODELSIDE_TERMFILE)
|
|
750
|
+
self.flymapping = True
|
|
751
|
+
self._flyput_mapping_d = outputs_mapping
|
|
752
|
+
|
|
753
|
+
# Deal with XXT files
|
|
754
|
+
if self.xxtmapping:
|
|
755
|
+
for i, istuff in enumerate(self.inputs.tododata):
|
|
756
|
+
self._link_xxt(istuff[self._MODELSIDE_INPUTPREFIX0 +
|
|
757
|
+
self.inputs.firstprefix].rh, i)
|
|
758
|
+
|
|
759
|
+
if self.inputs.anyexpected:
|
|
760
|
+
# Some server sync here...
|
|
761
|
+
self.server_run = True
|
|
762
|
+
self.system.subtitle('Starting computation with server_run=T')
|
|
763
|
+
|
|
764
|
+
# Process the data in chronological order ?
|
|
765
|
+
ordered_processing = (self.xxtmapping or
|
|
766
|
+
any([o_rh.resource.fp_terms is not None
|
|
767
|
+
for o_rh in self.object_namelists]))
|
|
768
|
+
if ordered_processing:
|
|
769
|
+
logger.info('Input data will be processed chronologicaly.')
|
|
770
|
+
|
|
771
|
+
# IO poll settings
|
|
772
|
+
self.io_poll_kwargs['nthreads'] = self.maxpollingthreads
|
|
773
|
+
|
|
774
|
+
# Is there already an Initial Condition file ?
|
|
775
|
+
# If so, start the binary...
|
|
776
|
+
if self.inputs.inidata:
|
|
777
|
+
super().execute(rh, opts)
|
|
778
|
+
# Did the server stopped ?
|
|
779
|
+
if not self.server_alive():
|
|
780
|
+
logger.error("Server initialisation failed.")
|
|
781
|
+
return
|
|
782
|
+
self._deal_with_promises(outputs_mapping, self._init_poll_and_move)
|
|
783
|
+
|
|
784
|
+
# Setup the InputMonitor
|
|
785
|
+
all_entries = set()
|
|
786
|
+
metagang = _lmonitor.MetaGang()
|
|
787
|
+
cur_term = None
|
|
788
|
+
cur_term_gangs = set()
|
|
789
|
+
prev_term_gangs = set()
|
|
790
|
+
for istuff, iguesses in zip(self.inputs.tododata, self.inputs.guessdata):
|
|
791
|
+
iinputs = {_lmonitor.InputMonitorEntry(s) for s in istuff.values()}
|
|
792
|
+
iinputs |= {_lmonitor.InputMonitorEntry(g.sec) for g in iguesses}
|
|
793
|
+
iterm = self._actual_term(istuff[self._MODELSIDE_INPUTPREFIX0 +
|
|
794
|
+
self.inputs.firstprefix].rh)
|
|
795
|
+
all_entries.update(iinputs)
|
|
796
|
+
bgang = _lmonitor.BasicGang()
|
|
797
|
+
bgang.add_member(* iinputs)
|
|
798
|
+
igang = _lmonitor.MetaGang()
|
|
799
|
+
igang.info = (istuff, iguesses, iterm)
|
|
800
|
+
igang.add_member(bgang)
|
|
801
|
+
# If needed, wait for the previous terms to complete
|
|
802
|
+
if ordered_processing:
|
|
803
|
+
if cur_term is not None and cur_term != iterm:
|
|
804
|
+
# Detect term's change
|
|
805
|
+
prev_term_gangs = cur_term_gangs
|
|
806
|
+
cur_term_gangs = set()
|
|
807
|
+
if prev_term_gangs:
|
|
808
|
+
# Wait for the gangs of the previous terms
|
|
809
|
+
igang.add_member(* prev_term_gangs)
|
|
810
|
+
# Save things up for the next time
|
|
811
|
+
cur_term_gangs.add(igang)
|
|
812
|
+
cur_term = iterm
|
|
813
|
+
metagang.add_member(igang)
|
|
814
|
+
bm = _lmonitor.ManualInputMonitor(self.context, all_entries,
|
|
815
|
+
caching_freq=self.refreshtime,)
|
|
816
|
+
|
|
817
|
+
# Start the InputMonitor
|
|
818
|
+
tmout = False
|
|
819
|
+
current_i = 0
|
|
820
|
+
server_stopped = False
|
|
821
|
+
with bm:
|
|
822
|
+
while not bm.all_done or len(bm.available) > 0:
|
|
823
|
+
|
|
824
|
+
# Fetch available inputs and sort them
|
|
825
|
+
ibatch = list()
|
|
826
|
+
while metagang.has_collectable():
|
|
827
|
+
thegang = metagang.pop_collectable()
|
|
828
|
+
ibatch.append(thegang.info)
|
|
829
|
+
ibatch.sort(key=lambda item: item[2]) # Sort according to the term
|
|
830
|
+
|
|
831
|
+
# Deal with the various available inputs
|
|
832
|
+
for (istuff, iguesses, iterm) in ibatch:
|
|
833
|
+
sh.highlight("The Fullpos Server is triggered (step={:d})..."
|
|
834
|
+
.format(current_i))
|
|
835
|
+
|
|
836
|
+
# Link for the init file (if needed)
|
|
837
|
+
if current_i == 0 and not self.inputs.inidata:
|
|
838
|
+
for iprefix, isec in istuff.items():
|
|
839
|
+
i_init = '{:s}{:s}INIT'.format(iprefix, self.xpname)
|
|
840
|
+
if not sh.path.exists(i_init):
|
|
841
|
+
sh.cp(isec.rh.container.localpath(), i_init,
|
|
842
|
+
intent='in', fmt=isec.rh.container.actualfmt)
|
|
843
|
+
logger.info('%s copied as %s. For initialisation purposes only.',
|
|
844
|
+
isec.rh.container.localpath(), i_init)
|
|
845
|
+
super().execute(rh, opts)
|
|
846
|
+
# Did the server stopped ?
|
|
847
|
+
if not self.server_alive():
|
|
848
|
+
logger.error("Server initialisation failed.")
|
|
849
|
+
return
|
|
850
|
+
self._deal_with_promises(outputs_mapping, self._init_poll_and_move)
|
|
851
|
+
|
|
852
|
+
# Link input files
|
|
853
|
+
for iprefix, isec in istuff.items():
|
|
854
|
+
self._link_input(iprefix, isec.rh, current_i,
|
|
855
|
+
inputs_mapping, outputs_mapping)
|
|
856
|
+
for iguess in iguesses:
|
|
857
|
+
self._move_output_guess(iguess, current_i)
|
|
858
|
+
|
|
859
|
+
# Let's go...
|
|
860
|
+
super().execute(rh, opts)
|
|
861
|
+
self._deal_with_promises(outputs_mapping, self._poll_and_move)
|
|
862
|
+
current_i += 1
|
|
863
|
+
|
|
864
|
+
# Did the server stopped ?
|
|
865
|
+
if not self.server_alive():
|
|
866
|
+
server_stopped = True
|
|
867
|
+
if not bm.all_done:
|
|
868
|
+
logger.error("The server stopped but everything wasn't processed...")
|
|
869
|
+
break
|
|
870
|
+
|
|
871
|
+
if server_stopped:
|
|
872
|
+
break
|
|
873
|
+
|
|
874
|
+
if not (bm.all_done or metagang.has_collectable()):
|
|
875
|
+
# Timeout ?
|
|
876
|
+
tmout = bm.is_timedout(self.timeout)
|
|
877
|
+
if tmout:
|
|
878
|
+
break
|
|
879
|
+
# Wait a little bit :-)
|
|
880
|
+
time.sleep(1)
|
|
881
|
+
bm.health_check(interval=30)
|
|
882
|
+
|
|
883
|
+
for failed_file in [e.section.rh.container.localpath()
|
|
884
|
+
for e in bm.failed.values()]:
|
|
885
|
+
logger.error("We were unable to fetch the following file: %s", failed_file)
|
|
886
|
+
if self.fatal:
|
|
887
|
+
self.delayed_exception_add(IOError("Unable to fetch {:s}".format(failed_file)),
|
|
888
|
+
traceback=False)
|
|
889
|
+
|
|
890
|
+
if tmout:
|
|
891
|
+
raise OSError("The waiting loop timed out")
|
|
892
|
+
|
|
893
|
+
else:
|
|
894
|
+
# Direct Run !
|
|
895
|
+
self.server_run = False
|
|
896
|
+
self.system.subtitle('Starting computation with server_run=F')
|
|
897
|
+
|
|
898
|
+
# Link for the inifile (if needed)
|
|
899
|
+
if not self.inputs.inidata:
|
|
900
|
+
for iprefix, isec in self.inputs.tododata[0].items():
|
|
901
|
+
i_init = '{:s}{:s}INIT'.format(iprefix, self.xpname)
|
|
902
|
+
if not sh.path.exists(i_init):
|
|
903
|
+
sh.cp(isec.rh.container.localpath(), i_init,
|
|
904
|
+
intent='in', fmt=isec.rh.container.actualfmt)
|
|
905
|
+
logger.info('%s copied as %s. For initialisation purposes only.',
|
|
906
|
+
isec.rh.container.localpath(), i_init)
|
|
907
|
+
|
|
908
|
+
# Create all links well in advance
|
|
909
|
+
for i, (iinputs, iguesses) in enumerate(zip(self.inputs.tododata,
|
|
910
|
+
self.inputs.guessdata)):
|
|
911
|
+
for iprefix, isec in iinputs.items():
|
|
912
|
+
self._link_input(iprefix, isec.rh, i, inputs_mapping, outputs_mapping)
|
|
913
|
+
for iguess in iguesses:
|
|
914
|
+
self._move_output_guess(iguess, i)
|
|
915
|
+
|
|
916
|
+
# On the fly ?
|
|
917
|
+
if self.promises:
|
|
918
|
+
self.flyput = True
|
|
919
|
+
|
|
920
|
+
# Let's roll !
|
|
921
|
+
super().execute(rh, opts)
|
|
922
|
+
|
|
923
|
+
# Map all outputs to destination (using io_poll)
|
|
924
|
+
self.io_poll_args = tuple([self._MODELSIDE_OUTPUTPREFIX,
|
|
925
|
+
self._MODELSIDE_OUTPUTPREFIX_GRIB, ])
|
|
926
|
+
self._init_poll_and_move(outputs_mapping)
|
|
927
|
+
self._poll_and_move(outputs_mapping)
|