vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1303 @@
1
+ """
2
+ AlgoComponents for the next generation of Fullpos runs (based on the 903
3
+ configuration).
4
+ """
5
+
6
+ import collections
7
+ import functools
8
+ import math
9
+ import re
10
+ from itertools import filterfalse
11
+ import time
12
+
13
+ from bronx.compat.functools import cached_property
14
+ from bronx.datagrip.namelist import NamelistBlock
15
+ from bronx.stdtypes.date import Time, Date
16
+ from bronx.fancies import loggers
17
+
18
+ import footprints
19
+
20
+ from vortex.algo.components import AlgoComponentError
21
+ import vortex.layout.monitor as _lmonitor
22
+
23
+ from .ifsroot import IFSParallel
24
+ from ..syntax.stdattrs import outputid_deco
25
+
26
+ #: No automatic export
27
+ __all__ = []
28
+
29
+ logger = loggers.getLogger(__name__)
30
+
31
+
32
+ fullpos_server_flypoll_pickle = ".fullpos_server_flypoll"
33
+
34
+
35
+ class FullPosServerFlyPollPersistantState:
36
+ """Persistent storage object for Fullpos's polling method."""
37
+
38
+ def __init__(self):
39
+ self.cursor = collections.defaultdict(functools.partial(Time, -9999))
40
+ self.found = collections.defaultdict(list)
41
+
42
+
43
+ def fullpos_server_flypoll(
44
+ sh, outputprefix, termfile, directories=(".",), **kwargs
45
+ ): # @UnusedVariable
46
+ """Check sub-**directories** to determine wether new output files are available or not."""
47
+ new = list()
48
+ for directory in directories:
49
+ with sh.cdcontext(directory, create=True):
50
+ if sh.path.exists(fullpos_server_flypoll_pickle):
51
+ fpoll_st = sh.pickle_load(fullpos_server_flypoll_pickle)
52
+ else:
53
+ fpoll_st = FullPosServerFlyPollPersistantState()
54
+ try:
55
+ if sh.path.exists(termfile):
56
+ with open(termfile) as wfh:
57
+ rawcursor = wfh.readline().rstrip("\n")
58
+ try:
59
+ cursor = Time(rawcursor)
60
+ except TypeError:
61
+ logger.warning(
62
+ 'Unable to convert "%s" to a Time object',
63
+ rawcursor,
64
+ )
65
+ return new
66
+ pre = re.compile(
67
+ r"^{:s}\w*\+(\d+(?::\d\d)?)(?:\.\w+)?$".format(
68
+ outputprefix
69
+ )
70
+ )
71
+ candidates = [pre.match(f) for f in sh.listdir()]
72
+ lnew = list()
73
+ for candidate in filterfalse(
74
+ lambda c: c is None, candidates
75
+ ):
76
+ if candidate.group(0).endswith(".d"):
77
+ continue
78
+ ctime = Time(candidate.group(1))
79
+ if (
80
+ ctime > fpoll_st.cursor[outputprefix]
81
+ and ctime <= cursor
82
+ ):
83
+ lnew.append(candidate.group(0))
84
+ fpoll_st.cursor[outputprefix] = cursor
85
+ fpoll_st.found[outputprefix].extend(lnew)
86
+ new.extend(
87
+ [
88
+ sh.path.normpath(sh.path.join(directory, anew))
89
+ for anew in lnew
90
+ ]
91
+ )
92
+ finally:
93
+ sh.pickle_dump(fpoll_st, fullpos_server_flypoll_pickle)
94
+ return new
95
+
96
+
97
+ class FullposServerDiscoveredInputs:
98
+ """Holds all kind of information on input files."""
99
+
100
+ def __init__(self):
101
+ self.inidata = dict()
102
+ self.tododata = list()
103
+ self.guessdata = list()
104
+ self.termscount = collections.defaultdict(int)
105
+ self.anyexpected = False
106
+ self.inputsminlen = 0
107
+ self.firstprefix = None
108
+
109
+ def actual_suffixlen(self, minlen=None):
110
+ """Find out the required suffixlen."""
111
+ if minlen is None:
112
+ minlen = self.inputsminlen
113
+ return max(minlen, int(math.floor(math.log10(len(self.tododata)))))
114
+
115
+
116
+ class FullPosServer(IFSParallel):
117
+ """Fullpos Server for geometry transforms & post-processing in IFS-like Models.
118
+
119
+ Input/Output files are labelled as follows:
120
+
121
+ * Let ``INPUTFILE_0`` denote an input file name (the user can choose
122
+ whichever name she/he wants provided that the associated input's
123
+ section has the "ModelState" role).
124
+ * For FA files: the corresponding output file will be
125
+ ``NPUTFILE_0.domain.out`` where ``domain`` is the domain name chosen
126
+ by the user in the namelist. If a Surfex output file is also created
127
+ (it depends on the namelist) it will be named ``NPUTFILE_0.domain.sfx.out``
128
+ * For GRIB files: the corresponding output file will be
129
+ ``NPUTFILE_0.domain.grib.out``.
130
+ * Some or all output files may be pre-positionned (data generated by the
131
+ c903 will be appended on them). The corresponding section's role needs
132
+ to be "OutputGuess". The local filename of the section, needs to match
133
+ the expected output filename. In our exemple it would be
134
+ ``NPUTFILE_0.domain.out``.
135
+
136
+ :note: To use this algocomponent, the c903's server needs to be activated
137
+ in the namelist (NFPSERVER != 0).
138
+
139
+ :note: With the current IFS/Arpege code, in order for the output's polling
140
+ to work properly, an ``ECHFP`` whiteness file have to be incremented
141
+ by the server, in each of the output directories.
142
+
143
+ :note: Climatology files are not managed (only few sanity checks are
144
+ performed). The user needs to name the input climatology file
145
+ consistently with the c903' namelist. For role="InitialClim" sections,
146
+ the filename should be something like ``Const.Clim.m[month]``. For
147
+ role="TargetClim" sections the filename should be something like
148
+ ``const.clim.[domain].m[month]`` where ``[domain]`` corresponds to
149
+ the name of the output domain (as specified in the namelist file)
150
+ and ``[month]`` corresponds to the month being dealt with (on 2 digits).
151
+
152
+ Interesting features:
153
+
154
+ * Input files can be expected (for on the fly processing)
155
+ * Input files are dealt with in arbitrary order depending on their
156
+ availability (useful for ensemble processing).
157
+ * Output files can be promised
158
+
159
+ """
160
+
161
+ _INITIALCONDITION_ROLE = re.compile(r"InitialCondition((?:\w+)?)")
162
+ _INPUTDATA_ROLE_STR = "ModelState"
163
+ _INPUTDATA_ROLE = re.compile(r"ModelState((?:\w+)?)")
164
+ _OUTPUTGUESS_ROLE = "OutputGuess"
165
+
166
+ _MODELSIDE_INPUTPREFIX0 = "ICM"
167
+ _MODELSIDE_INPUTPREFIX1 = "SH"
168
+ _MODELSIDE_OUTPUTPREFIX = "PF"
169
+ _MODELSIDE_OUTPUTPREFIX_GRIB = "GRIBPF"
170
+ _MODELSIDE_TERMFILE = "./ECHFP"
171
+ _MODELSIDE_OUT_SUFFIXLEN_MIN = 4
172
+ _MODELSIDE_IND_SUFFIXLEN_MIN = 4
173
+ _MODELSIDE_INE_SUFFIXLEN_MIN = dict(grib=6)
174
+
175
+ _SERVERSYNC_RAISEONEXIT = False
176
+ _SERVERSYNC_RUNONSTARTUP = False
177
+ _SERVERSYNC_STOPONEXIT = False
178
+
179
+ _footprint = [
180
+ outputid_deco,
181
+ dict(
182
+ attr=dict(
183
+ kind=dict(
184
+ values=[
185
+ "fpserver",
186
+ ],
187
+ ),
188
+ outdirectories=dict(
189
+ info="The list of possible output directories.",
190
+ type=footprints.stdtypes.FPList,
191
+ default=footprints.stdtypes.FPList(
192
+ [
193
+ ".",
194
+ ]
195
+ ),
196
+ optional=True,
197
+ ),
198
+ append_domain=dict(
199
+ info=(
200
+ "If defined, the output file for domain append_domain "
201
+ + "will be made a copy of the input file (prior to the "
202
+ + "server run"
203
+ ),
204
+ optional=True,
205
+ ),
206
+ basedate=dict(
207
+ info="The run date of the coupling generating process",
208
+ type=Date,
209
+ optional=True,
210
+ ),
211
+ xpname=dict(default="FPOS"),
212
+ conf=dict(
213
+ default=903,
214
+ ),
215
+ timestep=dict(
216
+ default=1.0,
217
+ ),
218
+ timeout=dict(
219
+ type=int,
220
+ optional=True,
221
+ default=300,
222
+ ),
223
+ refreshtime=dict(
224
+ info="How frequently are the expected input files looked for ? (seconds)",
225
+ type=int,
226
+ optional=True,
227
+ default=20,
228
+ ),
229
+ server_run=dict(
230
+ # This is a rw attribute: it will be managed internally
231
+ values=[True, False]
232
+ ),
233
+ serversync_method=dict(
234
+ default="simple_socket",
235
+ ),
236
+ serversync_medium=dict(
237
+ default="nextfile_wait",
238
+ ),
239
+ maxpollingthreads=dict(
240
+ type=int,
241
+ optional=True,
242
+ default=8,
243
+ ),
244
+ flypoll=dict(
245
+ default="internal",
246
+ ),
247
+ defaultformat=dict(
248
+ info="Format for the legacy output files.",
249
+ default="fa",
250
+ optional=True,
251
+ ),
252
+ )
253
+ ),
254
+ ]
255
+
256
+ @property
257
+ def realkind(self):
258
+ return "fullpos"
259
+
260
+ def __init__(self, *args, **kw):
261
+ super().__init__(*args, **kw)
262
+ self._flyput_mapping_d = dict()
263
+
264
+ def flyput_outputmapping(self, item):
265
+ """Map an output file to its final name."""
266
+ sh = self.system
267
+ for out_re, data in self._flyput_mapping_d.items():
268
+ m_re = out_re.match(sh.path.basename(item))
269
+ if m_re:
270
+ return (
271
+ sh.path.join(
272
+ sh.path.dirname(item),
273
+ data[0].format(
274
+ m_re.group("fpdom"), m_re.group("suffix")
275
+ ),
276
+ ),
277
+ data[1],
278
+ )
279
+
280
+ @cached_property
281
+ def inputs(self):
282
+ """Retrieve the lists in input sections/ResourceHandlers."""
283
+ discovered = FullposServerDiscoveredInputs()
284
+
285
+ # Initial conditions
286
+ inisec = self.context.sequence.effective_inputs(
287
+ role=self._INITIALCONDITION_ROLE
288
+ )
289
+ if inisec:
290
+ for s in inisec:
291
+ iprefix = (
292
+ self._INITIALCONDITION_ROLE.match(
293
+ s.alternate if s.role is None else s.role
294
+ ).group(1)
295
+ or self._MODELSIDE_INPUTPREFIX1
296
+ )
297
+ fprefix = self._MODELSIDE_INPUTPREFIX0 + iprefix
298
+ if fprefix in discovered.inidata:
299
+ raise AlgoComponentError(
300
+ "Only one Initial Condition is allowed."
301
+ )
302
+ else:
303
+ discovered.inidata[fprefix] = s
304
+
305
+ # Model states
306
+ todosec0 = self.context.sequence.effective_inputs(
307
+ role=self._INPUTDATA_ROLE
308
+ )
309
+ todosec1 = collections.defaultdict(list)
310
+ discovered.anyexpected = any(
311
+ [isec.rh.is_expected() for isec in todosec0]
312
+ )
313
+ hasterms = all(
314
+ [hasattr(isec.rh.resource, "term") for isec in todosec0]
315
+ )
316
+ # Sort things up (if possible)
317
+ if hasterms:
318
+ logger.info("Sorting input data based on the actual term.")
319
+ todosec0 = sorted(todosec0, key=lambda s: self._actual_term(s.rh))
320
+ if todosec0:
321
+ for iseq, s in enumerate(todosec0):
322
+ rprefix = (
323
+ self._INPUTDATA_ROLE.match(
324
+ s.alternate if s.role is None else s.role
325
+ ).group(1)
326
+ or self._MODELSIDE_INPUTPREFIX1
327
+ )
328
+ todosec1[rprefix].append(s)
329
+ if iseq == 0:
330
+ # Find the "default" prefix and suffix len based on the first section
331
+ discovered.firstprefix = rprefix
332
+ discovered.inputsminlen = (
333
+ self._MODELSIDE_INE_SUFFIXLEN_MIN.get(
334
+ s.rh.container.actualfmt,
335
+ self._MODELSIDE_IND_SUFFIXLEN_MIN,
336
+ )
337
+ )
338
+ iprefixes = sorted(todosec1.keys())
339
+ if len(iprefixes) == 1:
340
+ for s in todosec0:
341
+ discovered.tododata.append(
342
+ {self._MODELSIDE_INPUTPREFIX0 + iprefixes[0]: s}
343
+ )
344
+ else:
345
+ if len({len(secs) for secs in todosec1.values()}) > 1:
346
+ raise AlgoComponentError(
347
+ "Inconsistent number of input data."
348
+ )
349
+ for sections in zip(*[iter(todosec1[i]) for i in iprefixes]):
350
+ discovered.tododata.append(
351
+ {
352
+ self._MODELSIDE_INPUTPREFIX0 + k: v
353
+ for k, v in zip(iprefixes, sections)
354
+ }
355
+ )
356
+
357
+ # Detect the number of terms based on the firstprefix
358
+ if hasterms:
359
+ for sections in discovered.tododata:
360
+ act_term = self._actual_term(
361
+ sections[
362
+ self._MODELSIDE_INPUTPREFIX0 + discovered.firstprefix
363
+ ].rh
364
+ )
365
+ discovered.termscount[act_term] += 1
366
+
367
+ # Look for guesses of output files
368
+ guesses_sec0 = collections.defaultdict(list)
369
+ guess_entry = collections.namedtuple(
370
+ "guess_entry", ("sdir", "prefix", "domain", "suffix", "sec")
371
+ )
372
+ for sec in self.context.sequence.effective_inputs(
373
+ role=self._OUTPUTGUESS_ROLE
374
+ ):
375
+ s_lpath = sec.rh.container.localpath()
376
+ s_match = self._o_algo_re.match(self.system.path.basename(s_lpath))
377
+ if s_match:
378
+ guesses_sec0[s_match.group("base")].append(
379
+ guess_entry(
380
+ self.system.path.dirname(s_lpath),
381
+ self._o_auto_prefix(
382
+ "grib"
383
+ if s_match.group("grib")
384
+ else self.defaultformat
385
+ ),
386
+ s_match.group("fpdom"),
387
+ s_match.group("suffix"),
388
+ sec,
389
+ )
390
+ )
391
+ discovered.anyexpected = (
392
+ discovered.anyexpected or sec.rh.is_expected()
393
+ )
394
+ else:
395
+ logger.warning(
396
+ "Improper name for the following output guess < %s >. Ignoring it.",
397
+ s_lpath,
398
+ )
399
+ # Pair them with input file (based on their name)
400
+ for iinput in discovered.tododata:
401
+ isec = iinput[
402
+ self._MODELSIDE_INPUTPREFIX0 + discovered.firstprefix
403
+ ]
404
+ discovered.guessdata.append(
405
+ guesses_sec0.pop(
406
+ self.system.path.basename(isec.rh.container.localpath()),
407
+ (),
408
+ )
409
+ )
410
+ if guesses_sec0:
411
+ logger.warning(
412
+ "Some input data were left unsed: < %s >", guesses_sec0
413
+ )
414
+ logger.info(
415
+ "discovered guessdata are: < %s >", discovered.guessdata
416
+ )
417
+
418
+ return discovered
419
+
420
+ @cached_property
421
+ def object_namelists(self):
422
+ """The list of object's namelists."""
423
+ namrhs = [
424
+ isec.rh
425
+ for isec in self.context.sequence.effective_inputs(
426
+ role="ObjectNamelist"
427
+ )
428
+ if isec.rh.resource.realkind == "namelist_fpobject"
429
+ ]
430
+ # Update the object's content
431
+ for namrh in namrhs:
432
+ namsave = False
433
+ if namrh.resource.fp_cmodel is not None:
434
+ self._set_nam_macro(
435
+ namrh.contents,
436
+ namrh.container.localpath(),
437
+ "FP_CMODEL",
438
+ namrh.resource.fp_cmodel,
439
+ )
440
+ namsave = True
441
+ if namrh.resource.fp_lextern is not None:
442
+ self._set_nam_macro(
443
+ namrh.contents,
444
+ namrh.container.localpath(),
445
+ "FP_LEXTERN",
446
+ namrh.resource.fp_lextern,
447
+ )
448
+ namsave = True
449
+ if namrh.resource.fp_terms is not None:
450
+ if not self.inputs.termscount:
451
+ raise AlgoComponentError(
452
+ "In this use case, all input data must have a term attribute"
453
+ )
454
+ active_terms = {Time(t) for t in namrh.resource.fp_terms}
455
+ # Generate the list of NFPOSTS
456
+ global_i = 0
457
+ nfposts = list()
458
+ for term, n_term in sorted(self.inputs.termscount.items()):
459
+ if term in active_terms:
460
+ nfposts.extend(range(global_i, global_i + n_term))
461
+ global_i += n_term
462
+ # Get the NAMFPC block
463
+ try:
464
+ nfpc = namrh.contents["NAMFPC"]
465
+ except KeyError:
466
+ raise AlgoComponentError(
467
+ "NAMFPC should be defined in {:s}".format(
468
+ namrh.container.localpath()
469
+ )
470
+ )
471
+ # Sanity check
472
+ for k in nfpc.keys():
473
+ if k.startswith("NFPOSTS"):
474
+ raise AlgoComponentError(
475
+ "&NAMFPC NFPOSTS*(*) / entries should not be defined in {:s}".format(
476
+ namrh.container.localpath()
477
+ )
478
+ )
479
+ # Write NFPOSTS to NAMFPC
480
+ nfpc["NFPOSTS(0)"] = -len(nfposts)
481
+ for i, v in enumerate(nfposts):
482
+ nfpc["NFPOSTS({:d})".format(i + 1)] = -v
483
+ logger.info(
484
+ "The NAMFPC namelist in %s was updated.",
485
+ namrh.container.localpath(),
486
+ )
487
+ logger.debug(
488
+ "The updated NAMFPC namelist in %s is:\n%s",
489
+ namrh.container.localpath(),
490
+ nfpc,
491
+ )
492
+ namsave = True
493
+ if namsave:
494
+ namrh.save()
495
+ return namrhs
496
+
497
+ @cached_property
498
+ def xxtmapping(self):
499
+ """A handy dictionary about selection namelists."""
500
+ namxxrh = collections.defaultdict(dict)
501
+ for isec in self.context.sequence.effective_inputs(
502
+ role="FullPosSelection", kind="namselect"
503
+ ):
504
+ dpath = self.system.path.dirname(isec.rh.container.localpath())
505
+ namxxrh[dpath][isec.rh.resource.term] = isec.rh
506
+ if namxxrh and not self.inputs.termscount:
507
+ raise AlgoComponentError(
508
+ "In this use case, all input data must have a term attribute"
509
+ )
510
+ return namxxrh
511
+
512
+ @cached_property
513
+ def _i_fmt(self):
514
+ """The input files format (as expected by the c903)."""
515
+ return (
516
+ "{:s}"
517
+ + "{:s}+".format(self.xpname)
518
+ + "{:0"
519
+ + str(self.inputs.actual_suffixlen())
520
+ + "d}"
521
+ )
522
+
523
+ @cached_property
524
+ def _o_raw_fmt(self):
525
+ """The output files format (as imposed by the c903)."""
526
+ return (
527
+ "{:s}"
528
+ + "{:s}".format(self.xpname)
529
+ + "{:s}+"
530
+ + "{:0"
531
+ + str(
532
+ self.inputs.actual_suffixlen(self._MODELSIDE_OUT_SUFFIXLEN_MIN)
533
+ )
534
+ + "d}{:s}"
535
+ )
536
+
537
+ @cached_property
538
+ def _o_re_fmt(self):
539
+ """The output files regex (as imposed by the c903)."""
540
+ return (
541
+ "^{:s}"
542
+ + "{:s}".format(self.xpname)
543
+ + r"(?P<fpdom>\w+)\+"
544
+ + "{:0"
545
+ + str(
546
+ self.inputs.actual_suffixlen(self._MODELSIDE_OUT_SUFFIXLEN_MIN)
547
+ )
548
+ + r"d}(?P<suffix>(?:\.sfx)?)$"
549
+ )
550
+
551
+ @cached_property
552
+ def _o_init_re_fmt(self):
553
+ """The output files regex (as imposed by the c903)."""
554
+ return (
555
+ "^{:s}"
556
+ + "{:s}".format(self.xpname)
557
+ + r"(?P<fpdom>\w+){:s}(?P<suffix>(?:\.sfx)?)$"
558
+ )
559
+
560
+ @cached_property
561
+ def _o_algo_re(self):
562
+ """The regex for any output (as imposed by our AlgoComponent)."""
563
+ return re.compile(
564
+ r"(?P<base>.+)\.(?P<fpdom>\w+)(?P<suffix>(?:\.sfx)?)(?P<grib>(?:\.grib)?)\.out$"
565
+ )
566
+
567
+ @cached_property
568
+ def _o_suffix(self):
569
+ """The FAs output suffix (as imposed by our AlgoComponent)."""
570
+ return ".{:s}{:s}.out"
571
+
572
+ @cached_property
573
+ def _o_grb_suffix(self):
574
+ """The GRIBs output suffix (as imposed by our AlgoComponent)."""
575
+ return ".{:s}{:s}.grib.out"
576
+
577
+ def _o_auto_prefix(self, fmt):
578
+ """Return the appropriate output files prefix (as imposed by the c903)."""
579
+ return dict(grib=self._MODELSIDE_OUTPUTPREFIX_GRIB).get(
580
+ fmt, self._MODELSIDE_OUTPUTPREFIX
581
+ )
582
+
583
+ def _actual_term(self, rhandler):
584
+ """Compute the actual Resource Handler term."""
585
+ rterm = rhandler.resource.term
586
+ if self.basedate is not None:
587
+ rterm += rhandler.resource.date - self.basedate
588
+ return rterm
589
+
590
+ def _add_output_mapping(self, outputs_mapping, i, out_re, out_fname):
591
+ """Add mappings for output file."""
592
+ # FA/default file
593
+ re_default = out_re.format(self._MODELSIDE_OUTPUTPREFIX, i)
594
+ what_default = (out_fname + self._o_suffix, self.defaultformat)
595
+ outputs_mapping[re.compile(re_default)] = what_default
596
+ # GRIB files
597
+ re_grib = out_re.format(self._MODELSIDE_OUTPUTPREFIX_GRIB, i)
598
+ what_grib = (out_fname + self._o_grb_suffix, "grib")
599
+ outputs_mapping[re.compile(re_grib)] = what_grib
600
+ logger.info(
601
+ "Output %s mapped as %s. Output %s mapped as %s.",
602
+ re_default,
603
+ what_default[0],
604
+ re_grib,
605
+ what_grib[0],
606
+ )
607
+
608
+ def _link_input(self, iprefix, irh, i, inputs_mapping, outputs_mapping):
609
+ """Link an input file and update the mappings dictionaries."""
610
+ sourcepath = irh.container.localpath()
611
+ inputs_mapping[sourcepath] = self._i_fmt.format(iprefix, i)
612
+ self.system.cp(
613
+ sourcepath,
614
+ inputs_mapping[sourcepath],
615
+ intent="in",
616
+ fmt=irh.container.actualfmt,
617
+ )
618
+ logger.info("%s copied as %s.", sourcepath, inputs_mapping[sourcepath])
619
+ if iprefix == self._MODELSIDE_INPUTPREFIX0 + self.inputs.firstprefix:
620
+ self._add_output_mapping(
621
+ outputs_mapping,
622
+ i,
623
+ self._o_re_fmt,
624
+ self.system.path.basename(sourcepath),
625
+ )
626
+ if self.append_domain:
627
+ outputpath = self._o_raw_fmt.format(
628
+ self._o_auto_prefix(irh.container.actualfmt),
629
+ self.append_domain,
630
+ i,
631
+ "",
632
+ )
633
+
634
+ if self.outdirectories:
635
+ todo = [
636
+ self.system.path.join(d, outputpath)
637
+ for d in self.outdirectories
638
+ ]
639
+ else:
640
+ todo = [
641
+ outputpath,
642
+ ]
643
+ for a_outputpath in todo:
644
+ self.system.cp(
645
+ sourcepath,
646
+ a_outputpath,
647
+ intent="inout",
648
+ fmt=irh.container.actualfmt,
649
+ )
650
+ logger.info(
651
+ "output file prepared: %s copied (rw) to %s.",
652
+ sourcepath,
653
+ a_outputpath,
654
+ )
655
+
656
+ def _move_output_guess(self, iguess, i):
657
+ """Move the output file guesses to their final location."""
658
+ sourcepath = iguess.sec.rh.container.localpath()
659
+ destpath = self.system.path.join(
660
+ iguess.sdir,
661
+ self._o_raw_fmt.format(
662
+ iguess.prefix, iguess.domain, i, iguess.suffix
663
+ ),
664
+ )
665
+ self.system.mv(
666
+ sourcepath, destpath, fmt=iguess.sec.rh.container.actualfmt
667
+ )
668
+ logger.info("output guess %s was moved to %s.", sourcepath, destpath)
669
+
670
+ def _link_xxt(self, todorh, i):
671
+ """If necessary, link in the appropriate xxtNNNNNNMM file."""
672
+ for sdir, tdict in self.xxtmapping.items():
673
+ xxtrh = tdict.get(self._actual_term(todorh), None)
674
+ if xxtrh is not None:
675
+ xxtsource = self.system.path.relpath(
676
+ xxtrh.container.abspath, sdir
677
+ )
678
+ # The file is expected to follow the xxtDDDDHHMM syntax where DDDD
679
+ # is the number of days
680
+ days_hours = (i // 24) * 100 + i % 24
681
+ xxttarget = "xxt{:06d}00".format(days_hours)
682
+ xxttarget = self.system.path.join(sdir, xxttarget)
683
+ self.system.symlink(xxtsource, xxttarget)
684
+ logger.info("XXT %s linked in as %s.", xxtsource, xxttarget)
685
+
686
+ def _init_poll_and_move(self, outputs_mapping):
687
+ """Deal with the PF*INIT file."""
688
+ sh = self.system
689
+ candidates = self.system.glob(
690
+ "{:s}{:s}*INIT".format(self._MODELSIDE_OUTPUTPREFIX, self.xpname)
691
+ )
692
+ outputnames = list()
693
+ for thisdata in candidates:
694
+ mappeddata = None
695
+ for out_re, data in outputs_mapping.items():
696
+ m_re = out_re.match(thisdata)
697
+ if m_re:
698
+ mappeddata = (
699
+ sh.path.join(
700
+ sh.path.dirname(thisdata),
701
+ data[0].format(
702
+ m_re.group("fpdom"), m_re.group("suffix")
703
+ ),
704
+ ),
705
+ data[1],
706
+ )
707
+ break
708
+ if mappeddata is None:
709
+ raise AlgoComponentError(
710
+ "The mapping failed for {:s}.".format(thisdata)
711
+ )
712
+ # Already dealt with ?
713
+ if not self.system.path.exists(mappeddata[0]):
714
+ logger.info(
715
+ "Linking <%s> to <%s> (fmt=%s).",
716
+ thisdata,
717
+ mappeddata[0],
718
+ mappeddata[1],
719
+ )
720
+ outputnames.append(mappeddata[0])
721
+ self.system.cp(
722
+ thisdata, mappeddata[0], intent="in", fmt=mappeddata[1]
723
+ )
724
+ return outputnames
725
+
726
+ def _poll_and_move(self, outputs_mapping):
727
+ """Call **io_poll** and rename available output files."""
728
+ sh = self.system
729
+ data = self.manual_flypolling()
730
+ outputnames = list()
731
+ for thisdata in data:
732
+ mappeddata = None
733
+ for out_re, data in outputs_mapping.items():
734
+ m_re = out_re.match(sh.path.basename(thisdata))
735
+ if m_re:
736
+ mappeddata = (
737
+ sh.path.join(
738
+ sh.path.dirname(thisdata),
739
+ data[0].format(
740
+ m_re.group("fpdom"), m_re.group("suffix")
741
+ ),
742
+ ),
743
+ data[1],
744
+ )
745
+ break
746
+ if mappeddata is None:
747
+ raise AlgoComponentError(
748
+ "The mapping failed for {:s}.".format(thisdata)
749
+ )
750
+ logger.info(
751
+ "Linking <%s> to <%s> (fmt=%s).",
752
+ thisdata,
753
+ mappeddata[0],
754
+ mappeddata[1],
755
+ )
756
+ outputnames.append(mappeddata[0])
757
+ self.system.cp(
758
+ thisdata, mappeddata[0], intent="in", fmt=mappeddata[1]
759
+ )
760
+ return outputnames
761
+
762
+ def _deal_with_promises(self, outputs_mapping, pollingcb):
763
+ if self.promises:
764
+ seen = pollingcb(outputs_mapping)
765
+ for afile in seen:
766
+ candidates = [
767
+ x
768
+ for x in self.promises
769
+ if x.rh.container.abspath
770
+ == self.system.path.abspath(afile)
771
+ ]
772
+ if candidates:
773
+ logger.info("The output data is promised <%s>", afile)
774
+ bingo = candidates.pop()
775
+ bingo.put(incache=True)
776
+
777
+ def prepare(self, rh, opts):
778
+ """Various sanity checks + namelist tweaking."""
779
+ super().prepare(rh, opts)
780
+
781
+ if self.object_namelists:
782
+ self.system.subtitle("Object Namelists customisation")
783
+ for o_nam in self.object_namelists:
784
+ # a/c cy44: &NAMFPIOS NFPDIGITS=__SUFFIXLEN__, /
785
+ self._set_nam_macro(
786
+ o_nam.contents,
787
+ o_nam.container.localpath(),
788
+ "SUFFIXLEN",
789
+ self.inputs.actual_suffixlen(
790
+ self._MODELSIDE_OUT_SUFFIXLEN_MIN
791
+ ),
792
+ )
793
+ if o_nam.contents.dumps_needs_update:
794
+ logger.info(
795
+ "Rewritting the %s namelists file.",
796
+ o_nam.container.actualpath(),
797
+ )
798
+ o_nam.save()
799
+
800
+ self.system.subtitle("Dealing with various input files")
801
+
802
+ # Sanity check over climfiles and geometries
803
+ input_geo = {
804
+ sec.rh.resource.geometry
805
+ for sdict in self.inputs.tododata
806
+ for sec in sdict.values()
807
+ }
808
+ if len(input_geo) == 0:
809
+ raise AlgoComponentError("No input data are provided, ...")
810
+ elif len(input_geo) > 1:
811
+ raise AlgoComponentError(
812
+ "Multiple geometries are not allowed for input data."
813
+ )
814
+ else:
815
+ input_geo = input_geo.pop()
816
+
817
+ input_climgeo = {
818
+ x.rh.resource.geometry
819
+ for x in self.context.sequence.effective_inputs(
820
+ role=("InputClim", "InitialClim")
821
+ )
822
+ }
823
+ if len(input_climgeo) == 0:
824
+ logger.info("No input clim provided. Going on without it...")
825
+ elif len(input_climgeo) > 1:
826
+ raise AlgoComponentError(
827
+ "Multiple geometries are not allowed for input climatology."
828
+ )
829
+ else:
830
+ if input_climgeo.pop() != input_geo:
831
+ raise AlgoComponentError(
832
+ "The input data and input climatology geometries does not match."
833
+ )
834
+
835
+ # Initial Condition geometry sanity check
836
+ if self.inputs.inidata and any(
837
+ [
838
+ sec.rh.resource.geometry != input_geo
839
+ for sec in self.inputs.inidata.values()
840
+ ]
841
+ ):
842
+ raise AlgoComponentError(
843
+ "The Initial Condition geometry differs from other input data."
844
+ )
845
+
846
+ # Sanity check on target climatology files
847
+ target_climgeos = {
848
+ x.rh.resource.geometry
849
+ for x in self.context.sequence.effective_inputs(role="TargetClim")
850
+ }
851
+ if len(target_climgeos) == 0:
852
+ logger.info("No target clim are provided. Going on without it...")
853
+
854
+ # Sanity check on selection namelists
855
+ if self.xxtmapping:
856
+ for tdict in self.xxtmapping.values():
857
+ if {
858
+ self._actual_term(sec.rh)
859
+ for sdict in self.inputs.tododata
860
+ for sec in sdict.values()
861
+ } < set(tdict.keys()):
862
+ raise AlgoComponentError(
863
+ "The list of terms between input data and selection namelists differs"
864
+ )
865
+ else:
866
+ logger.info("No selection namelists detected. That's fine")
867
+
868
+ # Link in the initial condition file (if necessary)
869
+ for iprefix, isec in self.inputs.inidata.items():
870
+ i_init = "{:s}{:s}INIT".format(iprefix, self.xpname)
871
+ if isec.rh.container.basename != i_init:
872
+ self.system.cp(
873
+ isec.rh.container.localpath(),
874
+ i_init,
875
+ intent="in",
876
+ fmt=isec.rh.container.actualfmt,
877
+ )
878
+ logger.info(
879
+ "Initial condition file %s copied as %s.",
880
+ isec.rh.container.localpath(),
881
+ i_init,
882
+ )
883
+
884
+ def find_namelists(self, opts=None):
885
+ """Find any namelists candidates in actual context inputs."""
886
+ return [
887
+ x.rh
888
+ for x in self.context.sequence.effective_inputs(
889
+ role="Namelist", kind="namelist"
890
+ )
891
+ ]
892
+
893
+ def prepare_namelist_delta(self, rh, namcontents, namlocal):
894
+ super().prepare_namelist_delta(rh, namcontents, namlocal)
895
+ # With cy43: &NAMCT0 CSCRIPT_PPSERVER=__SERVERSYNC_SCRIPT__, /
896
+ if self.inputs.anyexpected:
897
+ self._set_nam_macro(
898
+ namcontents,
899
+ namlocal,
900
+ "SERVERSYNC_SCRIPT",
901
+ self.system.path.join(".", self.serversync_medium),
902
+ )
903
+ else:
904
+ # Do not harass the filesystem...
905
+ self._set_nam_macro(
906
+ namcontents, namlocal, "SERVERSYNC_SCRIPT", " "
907
+ )
908
+ # With cy43: &NAMCT0 CFPNCF=__IOPOLL_WHITNESSFILE__, /
909
+ self._set_nam_macro(
910
+ namcontents,
911
+ namlocal,
912
+ "IOPOLL_WHITNESSFILE",
913
+ self._MODELSIDE_TERMFILE,
914
+ )
915
+ # With cy43: No matching namelist key
916
+ # a/c cy44: &NAMFPIOS NFPDIGITS=__SUFFIXLEN__, /
917
+ self._set_nam_macro(
918
+ namcontents,
919
+ namlocal,
920
+ "SUFFIXLEN",
921
+ self.inputs.actual_suffixlen(self._MODELSIDE_OUT_SUFFIXLEN_MIN),
922
+ )
923
+ # No matching namelist yet
924
+ self._set_nam_macro(
925
+ namcontents,
926
+ namlocal,
927
+ "INPUT_SUFFIXLEN",
928
+ self.inputs.actual_suffixlen(),
929
+ )
930
+ # With cy43: &NAMCT0 NFRPOS=__INPUTDATALEN__, /
931
+ self._set_nam_macro(
932
+ namcontents, namlocal, "INPUTDATALEN", -len(self.inputs.tododata)
933
+ )
934
+ # Auto generate the list of namelists for the various objects
935
+ if self.object_namelists:
936
+ if (
937
+ "NAMFPOBJ" not in namcontents
938
+ or len(namcontents["NAMFPOBJ"]) == 0
939
+ ):
940
+ nb_o = NamelistBlock("NAMFPOBJ")
941
+ nb_o["NFPOBJ"] = len(self.object_namelists)
942
+ for i_nam, nam in enumerate(self.object_namelists):
943
+ if nam.resource.fp_conf:
944
+ nb_o["NFPCONF({:d})".format(i_nam + 1)] = (
945
+ nam.resource.fp_conf
946
+ )
947
+ nb_o["CNAMELIST({:d})".format(i_nam + 1)] = (
948
+ nam.container.localpath()
949
+ )
950
+ namcontents["NAMFPOBJ"] = nb_o
951
+ logger.info(
952
+ 'The following namelist block has been added to "%s":\n%s',
953
+ namlocal,
954
+ nb_o.dumps(),
955
+ )
956
+ else:
957
+ logger.warning(
958
+ 'The NAMFPOBJ namelist in "%s" is not empty. Leaving it as it is',
959
+ namlocal,
960
+ )
961
+ # Just in case FP_CMODEL is defined in the main namelist
962
+ if self.outputid is not None and any(
963
+ ["FP_CMODEL" in nam_b.macros() for nam_b in namcontents.values()]
964
+ ):
965
+ self._set_nam_macro(
966
+ namcontents, namlocal, "FP_CMODEL", self.outputid
967
+ )
968
+ return True
969
+
970
+ def spawn_pre_dirlisting(self):
971
+ """Print a directory listing just before run."""
972
+ super().spawn_pre_dirlisting()
973
+ for sdir in self.outdirectories:
974
+ self.system.subtitle(
975
+ "{:s} : {:s} sub-directory listing (pre-execution)".format(
976
+ self.realkind, sdir
977
+ )
978
+ )
979
+ self.system.dir(sdir, output=False, fatal=False)
980
+
981
+ def spawn_hook(self):
982
+ """Usually a good habit to dump the fort.4 namelist."""
983
+ super().spawn_hook()
984
+ for o_nam in self.object_namelists:
985
+ self.system.subtitle(
986
+ "{:s} : dump namelist <{:s}>".format(
987
+ self.realkind, o_nam.container.localpath()
988
+ )
989
+ )
990
+ self.system.cat(o_nam.container.localpath(), output=False)
991
+
992
+ def execute(self, rh, opts):
993
+ """Server still or Normal execution depending on the input sequence."""
994
+ sh = self.system
995
+
996
+ # Input and Output mapping
997
+ inputs_mapping = dict()
998
+ outputs_mapping = dict()
999
+
1000
+ # Initial condition file ?
1001
+ if self.inputs.inidata:
1002
+ for iprefix, isec in self.inputs.inidata.items():
1003
+ # The initial condition resource may be expected
1004
+ self.grab(isec)
1005
+ # Fix potential links and output mappings
1006
+ sourcepath = isec.rh.container.basename
1007
+ if (
1008
+ iprefix
1009
+ == self._MODELSIDE_INPUTPREFIX0 + self.inputs.firstprefix
1010
+ ):
1011
+ self._add_output_mapping(
1012
+ outputs_mapping,
1013
+ "INIT",
1014
+ self._o_init_re_fmt,
1015
+ sourcepath,
1016
+ )
1017
+ i_init = "{:s}{:s}INIT".format(iprefix, self.xpname)
1018
+ if isec.rh.container.basename != i_init:
1019
+ self.system.cp(
1020
+ sourcepath,
1021
+ i_init,
1022
+ intent="in",
1023
+ fmt=isec.rh.container.actualfmt,
1024
+ )
1025
+ logger.info(
1026
+ "Initial condition file %s copied as %s.",
1027
+ isec.rh.container.localpath(),
1028
+ i_init,
1029
+ )
1030
+ else:
1031
+ if self.inputs.tododata:
1032
+ # Just in case the INIT file is transformed
1033
+ fakesource = (
1034
+ self._MODELSIDE_INPUTPREFIX0
1035
+ + self.inputs.firstprefix
1036
+ + self.xpname
1037
+ + "INIT"
1038
+ )
1039
+ self._add_output_mapping(
1040
+ outputs_mapping, "INIT", self._o_init_re_fmt, fakesource
1041
+ )
1042
+
1043
+ # Initialise the flying stuff
1044
+ self.flyput = False # Do not use flyput every time...
1045
+ flyprefixes = set()
1046
+ for s in self.promises:
1047
+ lpath = s.rh.container.localpath()
1048
+ if lpath.endswith(".grib.out"):
1049
+ flyprefixes.add(self._MODELSIDE_OUTPUTPREFIX_GRIB)
1050
+ elif lpath.endswith(".out"):
1051
+ flyprefixes.add(self._MODELSIDE_OUTPUTPREFIX)
1052
+ self.io_poll_args = tuple(flyprefixes)
1053
+ self.io_poll_kwargs = dict(directories=tuple(set(self.outdirectories)))
1054
+ for directory in set(self.outdirectories):
1055
+ sh.mkdir(directory) # Create possible output directories
1056
+ if self.flypoll == "internal":
1057
+ self.io_poll_method = functools.partial(fullpos_server_flypoll, sh)
1058
+ self.io_poll_kwargs["termfile"] = sh.path.basename(
1059
+ self._MODELSIDE_TERMFILE
1060
+ )
1061
+ self.flymapping = True
1062
+ self._flyput_mapping_d = outputs_mapping
1063
+
1064
+ # Deal with XXT files
1065
+ if self.xxtmapping:
1066
+ for i, istuff in enumerate(self.inputs.tododata):
1067
+ self._link_xxt(
1068
+ istuff[
1069
+ self._MODELSIDE_INPUTPREFIX0 + self.inputs.firstprefix
1070
+ ].rh,
1071
+ i,
1072
+ )
1073
+
1074
+ if self.inputs.anyexpected:
1075
+ # Some server sync here...
1076
+ self.server_run = True
1077
+ self.system.subtitle("Starting computation with server_run=T")
1078
+
1079
+ # Process the data in chronological order ?
1080
+ ordered_processing = self.xxtmapping or any(
1081
+ [
1082
+ o_rh.resource.fp_terms is not None
1083
+ for o_rh in self.object_namelists
1084
+ ]
1085
+ )
1086
+ if ordered_processing:
1087
+ logger.info("Input data will be processed chronologicaly.")
1088
+
1089
+ # IO poll settings
1090
+ self.io_poll_kwargs["nthreads"] = self.maxpollingthreads
1091
+
1092
+ # Is there already an Initial Condition file ?
1093
+ # If so, start the binary...
1094
+ if self.inputs.inidata:
1095
+ super().execute(rh, opts)
1096
+ # Did the server stopped ?
1097
+ if not self.server_alive():
1098
+ logger.error("Server initialisation failed.")
1099
+ return
1100
+ self._deal_with_promises(
1101
+ outputs_mapping, self._init_poll_and_move
1102
+ )
1103
+
1104
+ # Setup the InputMonitor
1105
+ all_entries = set()
1106
+ metagang = _lmonitor.MetaGang()
1107
+ cur_term = None
1108
+ cur_term_gangs = set()
1109
+ prev_term_gangs = set()
1110
+ for istuff, iguesses in zip(
1111
+ self.inputs.tododata, self.inputs.guessdata
1112
+ ):
1113
+ iinputs = {
1114
+ _lmonitor.InputMonitorEntry(s) for s in istuff.values()
1115
+ }
1116
+ iinputs |= {
1117
+ _lmonitor.InputMonitorEntry(g.sec) for g in iguesses
1118
+ }
1119
+ iterm = self._actual_term(
1120
+ istuff[
1121
+ self._MODELSIDE_INPUTPREFIX0 + self.inputs.firstprefix
1122
+ ].rh
1123
+ )
1124
+ all_entries.update(iinputs)
1125
+ bgang = _lmonitor.BasicGang()
1126
+ bgang.add_member(*iinputs)
1127
+ igang = _lmonitor.MetaGang()
1128
+ igang.info = (istuff, iguesses, iterm)
1129
+ igang.add_member(bgang)
1130
+ # If needed, wait for the previous terms to complete
1131
+ if ordered_processing:
1132
+ if cur_term is not None and cur_term != iterm:
1133
+ # Detect term's change
1134
+ prev_term_gangs = cur_term_gangs
1135
+ cur_term_gangs = set()
1136
+ if prev_term_gangs:
1137
+ # Wait for the gangs of the previous terms
1138
+ igang.add_member(*prev_term_gangs)
1139
+ # Save things up for the next time
1140
+ cur_term_gangs.add(igang)
1141
+ cur_term = iterm
1142
+ metagang.add_member(igang)
1143
+ bm = _lmonitor.ManualInputMonitor(
1144
+ self.context,
1145
+ all_entries,
1146
+ caching_freq=self.refreshtime,
1147
+ )
1148
+
1149
+ # Start the InputMonitor
1150
+ tmout = False
1151
+ current_i = 0
1152
+ server_stopped = False
1153
+ with bm:
1154
+ while not bm.all_done or len(bm.available) > 0:
1155
+ # Fetch available inputs and sort them
1156
+ ibatch = list()
1157
+ while metagang.has_collectable():
1158
+ thegang = metagang.pop_collectable()
1159
+ ibatch.append(thegang.info)
1160
+ ibatch.sort(
1161
+ key=lambda item: item[2]
1162
+ ) # Sort according to the term
1163
+
1164
+ # Deal with the various available inputs
1165
+ for istuff, iguesses, iterm in ibatch:
1166
+ sh.highlight(
1167
+ "The Fullpos Server is triggered (step={:d})...".format(
1168
+ current_i
1169
+ )
1170
+ )
1171
+
1172
+ # Link for the init file (if needed)
1173
+ if current_i == 0 and not self.inputs.inidata:
1174
+ for iprefix, isec in istuff.items():
1175
+ i_init = "{:s}{:s}INIT".format(
1176
+ iprefix, self.xpname
1177
+ )
1178
+ if not sh.path.exists(i_init):
1179
+ sh.cp(
1180
+ isec.rh.container.localpath(),
1181
+ i_init,
1182
+ intent="in",
1183
+ fmt=isec.rh.container.actualfmt,
1184
+ )
1185
+ logger.info(
1186
+ "%s copied as %s. For initialisation purposes only.",
1187
+ isec.rh.container.localpath(),
1188
+ i_init,
1189
+ )
1190
+ super().execute(rh, opts)
1191
+ # Did the server stopped ?
1192
+ if not self.server_alive():
1193
+ logger.error("Server initialisation failed.")
1194
+ return
1195
+ self._deal_with_promises(
1196
+ outputs_mapping, self._init_poll_and_move
1197
+ )
1198
+
1199
+ # Link input files
1200
+ for iprefix, isec in istuff.items():
1201
+ self._link_input(
1202
+ iprefix,
1203
+ isec.rh,
1204
+ current_i,
1205
+ inputs_mapping,
1206
+ outputs_mapping,
1207
+ )
1208
+ for iguess in iguesses:
1209
+ self._move_output_guess(iguess, current_i)
1210
+
1211
+ # Let's go...
1212
+ super().execute(rh, opts)
1213
+ self._deal_with_promises(
1214
+ outputs_mapping, self._poll_and_move
1215
+ )
1216
+ current_i += 1
1217
+
1218
+ # Did the server stopped ?
1219
+ if not self.server_alive():
1220
+ server_stopped = True
1221
+ if not bm.all_done:
1222
+ logger.error(
1223
+ "The server stopped but everything wasn't processed..."
1224
+ )
1225
+ break
1226
+
1227
+ if server_stopped:
1228
+ break
1229
+
1230
+ if not (bm.all_done or metagang.has_collectable()):
1231
+ # Timeout ?
1232
+ tmout = bm.is_timedout(self.timeout)
1233
+ if tmout:
1234
+ break
1235
+ # Wait a little bit :-)
1236
+ time.sleep(1)
1237
+ bm.health_check(interval=30)
1238
+
1239
+ for failed_file in [
1240
+ e.section.rh.container.localpath() for e in bm.failed.values()
1241
+ ]:
1242
+ logger.error(
1243
+ "We were unable to fetch the following file: %s",
1244
+ failed_file,
1245
+ )
1246
+ if self.fatal:
1247
+ self.delayed_exception_add(
1248
+ IOError("Unable to fetch {:s}".format(failed_file)),
1249
+ traceback=False,
1250
+ )
1251
+
1252
+ if tmout:
1253
+ raise OSError("The waiting loop timed out")
1254
+
1255
+ else:
1256
+ # Direct Run !
1257
+ self.server_run = False
1258
+ self.system.subtitle("Starting computation with server_run=F")
1259
+
1260
+ # Link for the inifile (if needed)
1261
+ if not self.inputs.inidata:
1262
+ for iprefix, isec in self.inputs.tododata[0].items():
1263
+ i_init = "{:s}{:s}INIT".format(iprefix, self.xpname)
1264
+ if not sh.path.exists(i_init):
1265
+ sh.cp(
1266
+ isec.rh.container.localpath(),
1267
+ i_init,
1268
+ intent="in",
1269
+ fmt=isec.rh.container.actualfmt,
1270
+ )
1271
+ logger.info(
1272
+ "%s copied as %s. For initialisation purposes only.",
1273
+ isec.rh.container.localpath(),
1274
+ i_init,
1275
+ )
1276
+
1277
+ # Create all links well in advance
1278
+ for i, (iinputs, iguesses) in enumerate(
1279
+ zip(self.inputs.tododata, self.inputs.guessdata)
1280
+ ):
1281
+ for iprefix, isec in iinputs.items():
1282
+ self._link_input(
1283
+ iprefix, isec.rh, i, inputs_mapping, outputs_mapping
1284
+ )
1285
+ for iguess in iguesses:
1286
+ self._move_output_guess(iguess, i)
1287
+
1288
+ # On the fly ?
1289
+ if self.promises:
1290
+ self.flyput = True
1291
+
1292
+ # Let's roll !
1293
+ super().execute(rh, opts)
1294
+
1295
+ # Map all outputs to destination (using io_poll)
1296
+ self.io_poll_args = tuple(
1297
+ [
1298
+ self._MODELSIDE_OUTPUTPREFIX,
1299
+ self._MODELSIDE_OUTPUTPREFIX_GRIB,
1300
+ ]
1301
+ )
1302
+ self._init_poll_and_move(outputs_mapping)
1303
+ self._poll_and_move(outputs_mapping)