vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,831 @@
1
+ """
2
+ AlgoComponents dedicated to the coupling between NWP models.
3
+ """
4
+
5
+ import re
6
+ import footprints
7
+
8
+ from bronx.compat.functools import cached_property
9
+ from bronx.fancies import loggers
10
+ from bronx.stdtypes import date
11
+
12
+ from .ifsroot import IFSParallel
13
+ from ..tools.drhook import DrHookDecoMixin
14
+ from vortex.algo.components import AlgoComponentError, BlindRun, Parallel
15
+ from vortex.algo.components import (
16
+ AlgoComponentDecoMixin,
17
+ algo_component_deco_mixin_autodoc,
18
+ )
19
+ from vortex.layout.dataflow import intent
20
+ from vortex.tools.grib import EcGribDecoMixin
21
+
22
+ from .forecasts import FullPos
23
+
24
+ #: No automatic export
25
+ __all__ = []
26
+
27
+ logger = loggers.getLogger(__name__)
28
+
29
+
30
+ coupling_basedate_fp = footprints.Footprint(
31
+ attr=dict(
32
+ basedate=dict(
33
+ info="The run date of the coupling generating process",
34
+ type=date.Date,
35
+ optional=True,
36
+ )
37
+ )
38
+ )
39
+
40
+
41
+ @algo_component_deco_mixin_autodoc
42
+ class CouplingBaseDateNamMixin(AlgoComponentDecoMixin):
43
+ """Add a basedate attribute and make namelist substitution."""
44
+
45
+ _MIXIN_EXTRA_FOOTPRINTS = (coupling_basedate_fp,)
46
+
47
+ def _prepare_basedate_hook(self, rh, opts):
48
+ """Update the namelist with date information."""
49
+
50
+ def set_nam_macro(namrh, macro, value):
51
+ namrh.contents.setmacro(macro, value)
52
+ logger.info(
53
+ "Setup macro %s=%s in %s",
54
+ macro,
55
+ str(value),
56
+ namrh.container.actualpath(),
57
+ )
58
+
59
+ for namsec in self.context.sequence.effective_inputs(
60
+ kind=("namelist",)
61
+ ):
62
+ if self.basedate is not None:
63
+ set_nam_macro(namsec.rh, "YYYY", int(self.basedate.year))
64
+ set_nam_macro(namsec.rh, "MM", int(self.basedate.month))
65
+ set_nam_macro(namsec.rh, "DD", int(self.basedate.day))
66
+ if namsec.rh.contents.dumps_needs_update:
67
+ namsec.rh.save()
68
+
69
+ _MIXIN_PREPARE_HOOKS = (_prepare_basedate_hook,)
70
+
71
+
72
+ class Coupling(FullPos):
73
+ """Coupling for IFS-like LAM Models.
74
+
75
+ OBSOLETE a/c cy46 (use the 903 configuration / fullpos server instead).
76
+ """
77
+
78
+ _footprint = [
79
+ coupling_basedate_fp,
80
+ dict(
81
+ info="Create coupling files for a Limited Area Model.",
82
+ attr=dict(
83
+ kind=dict(
84
+ values=["coupling"],
85
+ ),
86
+ ),
87
+ ),
88
+ ]
89
+
90
+ @property
91
+ def realkind(self):
92
+ return "coupling"
93
+
94
+ def prepare(self, rh, opts):
95
+ """Default pre-link for namelist file and domain change."""
96
+ super().prepare(rh, opts)
97
+ namsec = self.setlink(
98
+ initrole="Namelist", initkind="namelist", initname="fort.4"
99
+ )
100
+ for nam in [x.rh for x in namsec if "NAMFPC" in x.rh.contents]:
101
+ logger.info('Substitute "AREA" to CFPDOM namelist entry')
102
+ nam.contents["NAMFPC"]["CFPDOM(1)"] = "AREA"
103
+ nam.save()
104
+
105
+ def execute(self, rh, opts):
106
+ """Loop on the various initial conditions provided."""
107
+
108
+ sh = self.system
109
+
110
+ cplsec = self.context.sequence.effective_inputs(
111
+ role=("InitialCondition", "CouplingSource"),
112
+ kind=("historic", "analysis"),
113
+ )
114
+ cplsec.sort(key=lambda s: s.rh.resource.term)
115
+ ininc = self.naming_convention("ic", rh)
116
+ infile = ininc()
117
+ isMany = len(cplsec) > 1
118
+ outprefix = "PF{:s}AREA".format(self.xpname)
119
+
120
+ cplguess = self.context.sequence.effective_inputs(role="Guess")
121
+ cplguess.sort(key=lambda s: s.rh.resource.term)
122
+ guessing = bool(cplguess)
123
+
124
+ cplsurf = self.context.sequence.effective_inputs(
125
+ role=("SurfaceInitialCondition", "SurfaceCouplingSource")
126
+ )
127
+ cplsurf.sort(key=lambda s: s.rh.resource.term)
128
+ surfacing = bool(cplsurf)
129
+ inisurfnc = self.naming_convention("ic", rh, model="surfex")
130
+ infilesurf = inisurfnc()
131
+ if surfacing:
132
+ # Link in the Surfex's PGD
133
+ sclimnc = self.naming_convention(
134
+ kind="targetclim", rh=rh, model="surfex"
135
+ )
136
+ self.setlink(
137
+ initrole=("ClimPGD",),
138
+ initkind=("pgdfa", "pgdlfi"),
139
+ initname=sclimnc(area="AREA"),
140
+ )
141
+
142
+ for sec in cplsec:
143
+ r = sec.rh
144
+ sh.subtitle("Loop on {!s}".format(r.resource))
145
+
146
+ # First attempt to set actual date as the one of the source model
147
+ actualdate = r.resource.date + r.resource.term
148
+
149
+ # Expect the coupling source to be there...
150
+ self.grab(sec, comment="coupling source")
151
+
152
+ # Set the actual init file
153
+ if sh.path.exists(infile):
154
+ if isMany:
155
+ logger.critical(
156
+ "Cannot process multiple Historic files if %s exists.",
157
+ infile,
158
+ )
159
+ else:
160
+ sh.cp(
161
+ r.container.localpath(),
162
+ infile,
163
+ fmt=r.container.actualfmt,
164
+ intent=intent.IN,
165
+ )
166
+
167
+ # If the surface file is needed, set the actual initsurf file
168
+ if cplsurf:
169
+ # Expecting the coupling surface source to be there...
170
+ cplsurf_in = cplsurf.pop(0)
171
+ self.grab(cplsurf_in, comment="coupling surface source")
172
+ if sh.path.exists(infilesurf):
173
+ if isMany:
174
+ logger.critical(
175
+ "Cannot process multiple surface historic files if %s exists.",
176
+ infilesurf,
177
+ )
178
+ else:
179
+ sh.cp(
180
+ cplsurf_in.rh.container.localpath(),
181
+ infilesurf,
182
+ fmt=cplsurf_in.rh.container.actualfmt,
183
+ intent=intent.IN,
184
+ )
185
+ elif surfacing:
186
+ logger.error("No more surface source to loop on for coupling")
187
+
188
+ # The output could be an input as well
189
+ if cplguess:
190
+ cplout = cplguess.pop(0)
191
+ cplpath = cplout.rh.container.localpath()
192
+ if sh.path.exists(cplpath):
193
+ actualdateguess = (
194
+ cplout.rh.resource.date + cplout.rh.resource.term
195
+ )
196
+ if actualdate == actualdateguess:
197
+ logger.error(
198
+ "The guess date, %s, is different from the source date %s, !",
199
+ actualdateguess.reallynice(),
200
+ actualdate.reallynice(),
201
+ )
202
+ # Expect the coupling guess to be there...
203
+ self.grab(cplout, comment="coupling guess")
204
+ logger.info("Coupling with existing guess <%s>", cplpath)
205
+ inoutfile = outprefix + "+0000"
206
+ if cplpath != inoutfile:
207
+ sh.remove(inoutfile, fmt=cplout.rh.container.actualfmt)
208
+ sh.move(
209
+ cplpath,
210
+ inoutfile,
211
+ fmt=cplout.rh.container.actualfmt,
212
+ intent=intent.INOUT,
213
+ )
214
+ else:
215
+ logger.warning(
216
+ "Missing guess input for coupling <%s>", cplpath
217
+ )
218
+ elif guessing:
219
+ logger.error("No more guess to loop on for coupling")
220
+
221
+ # Find out actual monthly climatological resource
222
+ actualmonth = date.Month(actualdate)
223
+ self.climfile_fixer(
224
+ rh,
225
+ convkind="modelclim",
226
+ month=actualmonth,
227
+ inputrole=("GlobalClim", "InitialClim"),
228
+ inputkind="clim_model",
229
+ )
230
+ self.climfile_fixer(
231
+ rh,
232
+ convkind="targetclim",
233
+ month=actualmonth,
234
+ inputrole=("LocalClim", "TargetClim"),
235
+ inputkind="clim_model",
236
+ area="AREA",
237
+ )
238
+
239
+ # Standard execution
240
+ super().execute(rh, opts)
241
+
242
+ # Set a local appropriate file
243
+ posfile = [
244
+ x
245
+ for x in sh.glob(outprefix + "+*")
246
+ if re.match(outprefix + r"\+\d+(?:\:\d+)?(?:\.sfx)?$", x)
247
+ ]
248
+ if len(posfile) > 1:
249
+ logger.critical(
250
+ "Many "
251
+ + outprefix
252
+ + " files, do not know how to adress that"
253
+ )
254
+ posfile = posfile[0]
255
+ if self.basedate is None:
256
+ actualterm = r.resource.term
257
+ else:
258
+ actualterm = (actualdate - self.basedate).time()
259
+ actualname = (
260
+ re.sub(
261
+ r"^.+?((?:_\d+)?)(?:\+[:\d]+)?$",
262
+ r"CPLOUT\1+",
263
+ r.container.localpath(),
264
+ )
265
+ + actualterm.fmthm
266
+ )
267
+ if isMany:
268
+ sh.move(
269
+ sh.path.realpath(posfile),
270
+ actualname,
271
+ fmt=r.container.actualfmt,
272
+ )
273
+ if sh.path.exists(posfile):
274
+ sh.rm(posfile)
275
+ else:
276
+ # This is here because of legacy with .sfx files
277
+ sh.cp(
278
+ sh.path.realpath(posfile),
279
+ actualname,
280
+ fmt=r.container.actualfmt,
281
+ intent=intent.IN,
282
+ )
283
+
284
+ # promises management
285
+ expected = [
286
+ x
287
+ for x in self.promises
288
+ if x.rh.container.localpath() == actualname
289
+ ]
290
+ if expected:
291
+ for thispromise in expected:
292
+ thispromise.put(incache=True)
293
+
294
+ # The only one listing
295
+ if not self.server_run:
296
+ sh.cat("NODE.001_01", output="NODE.all")
297
+
298
+ # prepares the next execution
299
+ if isMany:
300
+ # Some cleaning
301
+ sh.rmall("PXFPOS*", fmt=r.container.actualfmt)
302
+ sh.remove(infile, fmt=r.container.actualfmt)
303
+ if cplsurf:
304
+ sh.remove(infilesurf, fmt=r.container.actualfmt)
305
+ if not self.server_run:
306
+ sh.rmall("ncf927", "dirlst", "NODE.[0123456789]*", "std*")
307
+
308
+
309
+ class CouplingLAM(Coupling):
310
+ """Coupling for LAM to LAM Models (useless beyond cy40).
311
+
312
+ OBSOLETE a/c cy40.
313
+ """
314
+
315
+ _footprint = dict(
316
+ info="Create coupling files for a Limited Area Model (useless beyond cy40).",
317
+ attr=dict(
318
+ kind=dict(
319
+ values=["lamcoupling"],
320
+ ),
321
+ ),
322
+ )
323
+
324
+ def spawn_command_options(self):
325
+ """Dictionary provided for command line factory."""
326
+ opts = super().spawn_command_options()
327
+ opts["model"] = "aladin"
328
+ return opts
329
+
330
+
331
+ @algo_component_deco_mixin_autodoc
332
+ class PrepMixin(AlgoComponentDecoMixin):
333
+ """Coupling/Interpolation of Surfex files."""
334
+
335
+ _MIXIN_EXTRA_FOOTPRINTS = (
336
+ footprints.Footprint(
337
+ info="Coupling/Interpolation of Surfex files.",
338
+ attr=dict(
339
+ kind=dict(
340
+ values=["prep"],
341
+ ),
342
+ underlyingformat=dict(
343
+ info="The format of input data (as expected by the PREP executable).",
344
+ values=["fa", "lfi", "netcdf"],
345
+ optional=True,
346
+ default="fa",
347
+ ),
348
+ underlyingoutputformat=dict(
349
+ info=(
350
+ "The format of output data (as expected by the PREP executable)."
351
+ + "If omited, *underlyingformat* is used."
352
+ ),
353
+ values=["fa", "lfi", "netcdf", "txt"],
354
+ optional=True,
355
+ ),
356
+ outputformat=dict(
357
+ info=(
358
+ "The format of output data (as expected by the user)."
359
+ + "If omited, same as input data."
360
+ ),
361
+ values=["fa", "lfi", "netcdf", "txt"],
362
+ optional=True,
363
+ ),
364
+ ),
365
+ ),
366
+ )
367
+
368
+ @cached_property
369
+ def _actual_u_output_format(self):
370
+ return (
371
+ self.underlyingoutputformat
372
+ if self.underlyingoutputformat is not None
373
+ else self.underlyingformat
374
+ )
375
+
376
+ def _actual_output_format(self, in_format):
377
+ return (
378
+ self.outputformat if self.outputformat is not None else in_format
379
+ )
380
+
381
+ @staticmethod
382
+ def _sfx_fmt_remap(fmt):
383
+ return dict(netcdf="nc").get(fmt, fmt)
384
+
385
+ @cached_property
386
+ def _has_sfx_lfi(self):
387
+ addon_checked = (
388
+ "sfx" in self.system.loaded_addons()
389
+ and "lfi" in self.system.loaded_addons()
390
+ )
391
+ if not addon_checked:
392
+ raise RuntimeError("The sfx addon is needed... please load it.")
393
+ return addon_checked
394
+
395
+ def _do_input_format_change(self, section, output_name, output_fmt):
396
+ (localpath, infmt) = (
397
+ section.rh.container.localpath(),
398
+ section.rh.container.actualfmt,
399
+ )
400
+ self.system.subtitle("Processing inputs/climatologies")
401
+ if section.rh.container.actualfmt != output_fmt:
402
+ if infmt == "fa" and output_fmt == "lfi" and self._has_sfx_lfi:
403
+ if self.system.path.exists(output_name):
404
+ raise OSError(
405
+ "The file {!r} already exists.".format(output_name)
406
+ )
407
+ logger.info(
408
+ "Calling sfxtools' fa2lfi from %s to %s.",
409
+ localpath,
410
+ output_name,
411
+ )
412
+ self.system.sfx_fa2lfi(localpath, output_name)
413
+ else:
414
+ raise RuntimeError(
415
+ "Format conversion from {!r} to {!r} is not possible".format(
416
+ infmt, output_fmt
417
+ )
418
+ )
419
+ else:
420
+ if not self.system.path.exists(output_name):
421
+ logger.info("Linking %s to %s", localpath, output_name)
422
+ self.system.cp(
423
+ localpath, output_name, intent=intent.IN, fmt=infmt
424
+ )
425
+
426
+ def _process_outputs(self, binrh, section, output_clim, output_name):
427
+ (radical, outfmt) = (
428
+ self.system.path.splitext(section.rh.container.localpath())[0],
429
+ self._actual_output_format(section.rh.container.actualfmt),
430
+ )
431
+ finaloutput = "{:s}_interpolated.{:s}".format(radical, outfmt)
432
+ finallisting = "{:s}_listing".format(radical)
433
+ self.system.subtitle("Processing outputs")
434
+ if outfmt != self._actual_u_output_format:
435
+ # There is a need for a format change
436
+ if (
437
+ outfmt == "fa"
438
+ and self._actual_u_output_format == "lfi"
439
+ and self._has_sfx_lfi
440
+ ):
441
+ logger.info(
442
+ "Calling lfitools' faempty from %s to %s.",
443
+ output_clim,
444
+ finaloutput,
445
+ )
446
+ self.system.fa_empty(output_clim, finaloutput)
447
+ logger.info(
448
+ "Calling sfxtools' lfi2fa from %s to %s.",
449
+ output_name,
450
+ finaloutput,
451
+ )
452
+ self.system.sfx_lfi2fa(output_name, finaloutput)
453
+ finallfi = "{:s}_interpolated.{:s}".format(
454
+ radical, self._actual_u_output_format
455
+ )
456
+ self.system.mv(output_name, finallfi)
457
+ else:
458
+ raise RuntimeError(
459
+ "Format conversion from {!r} to {!r} is not possible".format(
460
+ self._actual_u_output_format, outfmt
461
+ )
462
+ )
463
+ else:
464
+ # No format change needed
465
+ logger.info("Moving %s to %s", output_name, finaloutput)
466
+ self.system.mv(output_name, finaloutput, fmt=outfmt)
467
+ # Also rename the listing :-)
468
+ if binrh.resource.cycle < "cy48t1":
469
+ try:
470
+ self.system.mv("LISTING_PREP.txt", finallisting)
471
+ except OSError:
472
+ self.system.mv("LISTING_PREP0.txt", finallisting)
473
+ else:
474
+ self.system.mv("LISTING_PREP0.txt", finallisting)
475
+ return finaloutput
476
+
477
+ def _prepare_prep_hook(self, rh, opts):
478
+ """Default pre-link for namelist file and domain change."""
479
+ # Convert the initial clim if needed...
480
+ iniclim = self.context.sequence.effective_inputs(role=("InitialClim",))
481
+ if not (len(iniclim) == 1):
482
+ raise AlgoComponentError("One Initial clim have to be provided")
483
+ self._do_input_format_change(
484
+ iniclim[0],
485
+ "PGD1." + self._sfx_fmt_remap(self.underlyingformat),
486
+ self.underlyingformat,
487
+ )
488
+ # Convert the target clim if needed...
489
+ targetclim = self.context.sequence.effective_inputs(
490
+ role=("TargetClim",)
491
+ )
492
+ if not (len(targetclim) == 1):
493
+ raise AlgoComponentError("One Target clim have to be provided")
494
+ self._do_input_format_change(
495
+ targetclim[0],
496
+ "PGD2." + self._sfx_fmt_remap(self._actual_u_output_format),
497
+ self._actual_u_output_format,
498
+ )
499
+
500
+ _MIXIN_PREPARE_HOOKS = (_prepare_prep_hook,)
501
+
502
+ def _spawn_hook_prep_hook(self):
503
+ """Dump the namelists."""
504
+ for namsec in self.context.sequence.effective_inputs(
505
+ kind=("namelist",)
506
+ ):
507
+ self.system.subtitle(
508
+ "Here is the content of the {:s} namelist".format(
509
+ namsec.rh.container.actualpath()
510
+ )
511
+ )
512
+ namsec.rh.container.cat()
513
+
514
+ _MIXIN_SPAWN_HOOKS = (_spawn_hook_prep_hook,)
515
+
516
+ def _execute_prep_common(self, rh, opts):
517
+ """Loop on the various initial conditions provided."""
518
+ sh = self.system
519
+
520
+ cplsec = self.context.sequence.effective_inputs(
521
+ role=("InitialCondition", "CouplingSource"),
522
+ kind=("historic", "analysis"),
523
+ )
524
+ cplsec.sort(key=lambda s: s.rh.resource.term)
525
+ infile = "PREP1.{:s}".format(
526
+ self._sfx_fmt_remap(self.underlyingformat)
527
+ )
528
+ outfile = "PREP2.{:s}".format(
529
+ self._sfx_fmt_remap(self._actual_u_output_format)
530
+ )
531
+ targetclim = self.context.sequence.effective_inputs(
532
+ role=("TargetClim",)
533
+ )
534
+ targetclim = targetclim[0].rh.container.localpath()
535
+
536
+ for sec in cplsec:
537
+ r = sec.rh
538
+ sh.header("Loop on {:s}".format(r.container.localpath()))
539
+
540
+ # Expect the coupling source to be there...
541
+ self.grab(sec, comment="coupling source")
542
+
543
+ # Set the actual init file
544
+ if sh.path.exists(infile):
545
+ logger.critical(
546
+ "Cannot process input files if %s exists.", infile
547
+ )
548
+ self._do_input_format_change(sec, infile, self.underlyingformat)
549
+
550
+ # Standard execution
551
+ super(self.mixin_execute_companion(), self).execute(rh, opts)
552
+ sh.subtitle("Listing after PREP")
553
+ sh.dir(output=False, fatal=False)
554
+
555
+ # Deal with outputs
556
+ actualname = self._process_outputs(rh, sec, targetclim, outfile)
557
+
558
+ # promises management
559
+ expected = [
560
+ x
561
+ for x in self.promises
562
+ if x.rh.container.localpath() == actualname
563
+ ]
564
+ if expected:
565
+ for thispromise in expected:
566
+ thispromise.put(incache=True)
567
+
568
+ # Some cleaning
569
+ sh.rmall("*.des")
570
+ sh.rmall("PREP1.*")
571
+
572
+ _MIXIN_EXECUTE_OVERWRITE = _execute_prep_common
573
+
574
+
575
+ class Prep(
576
+ BlindRun,
577
+ PrepMixin,
578
+ CouplingBaseDateNamMixin,
579
+ DrHookDecoMixin,
580
+ EcGribDecoMixin,
581
+ ):
582
+ """Coupling/Interpolation of Surfex files (non-MPI version)."""
583
+
584
+ pass
585
+
586
+
587
+ class ParallelPrep(
588
+ Parallel,
589
+ PrepMixin,
590
+ CouplingBaseDateNamMixin,
591
+ DrHookDecoMixin,
592
+ EcGribDecoMixin,
593
+ ):
594
+ """Coupling/Interpolation of Surfex files (MPI version)."""
595
+
596
+ pass
597
+
598
+
599
+ class C901(IFSParallel):
600
+ """Run of C901 configuration."""
601
+
602
+ _footprint = dict(
603
+ info="Run C901 configuration",
604
+ attr=dict(
605
+ kind=dict(
606
+ values=[
607
+ "c901",
608
+ ]
609
+ ),
610
+ clim=dict(type=bool),
611
+ xpname=dict(default="a001"),
612
+ ),
613
+ )
614
+
615
+ SPECTRAL_FILE_SH = "ICMSH{prefix}INIT{suffix}"
616
+ GRIDPOINT_FILE_UA = "ICMUA{prefix}INIT{suffix}"
617
+ GRIDPOINT_FILE_GG = "ICMGG{prefix}INIT{suffix}"
618
+ OUTPUT_FILE_NAME = "CN90x{}INIT"
619
+ OUTPUT_LISTING_NAME = "NODE.001_01"
620
+ LIST_INPUT_FILES = [
621
+ ("SpectralFileSH", SPECTRAL_FILE_SH),
622
+ ("GridpointFileUA", GRIDPOINT_FILE_UA),
623
+ ("GridpointFileGG", GRIDPOINT_FILE_GG),
624
+ ]
625
+ LIST_CST_INPUT_FILES = [
626
+ ("ConstantSpectralFileSH", SPECTRAL_FILE_SH),
627
+ ("ConstantGridpointFileUA", GRIDPOINT_FILE_UA),
628
+ ("ConstantGridpointFileGG", GRIDPOINT_FILE_GG),
629
+ ]
630
+
631
+ @property
632
+ def realkind(self):
633
+ return "c901"
634
+
635
+ def sort_files_per_prefix(self, list_types, unique=False):
636
+ """Function used to sort the files according to their prefix in a given type"""
637
+ result = dict()
638
+ for file_role, file_template in list_types:
639
+ result[file_role] = dict()
640
+ input_files = self.context.sequence.effective_inputs(
641
+ role=file_role
642
+ )
643
+ template = file_template.format(
644
+ prefix=r"(?P<prefix>\S{4})", suffix=r"(?P<suffix>\S*)"
645
+ )
646
+ for file_s in input_files:
647
+ file_name = file_s.rh.container.filename
648
+ find_elements = re.search(template, file_name)
649
+ if find_elements is None:
650
+ logger.error(
651
+ "The name of the file %s do not follow the template %s.",
652
+ file_name,
653
+ template,
654
+ )
655
+ raise ValueError(
656
+ "The name of the file do not follow the template."
657
+ )
658
+ else:
659
+ if find_elements.group("prefix") not in result[file_role]:
660
+ result[file_role][find_elements.group("prefix")] = (
661
+ list()
662
+ )
663
+ else:
664
+ if unique:
665
+ logger.error(
666
+ "Only one file should be present for each type and each suffix."
667
+ )
668
+ raise ValueError(
669
+ "Only one file should be present for each suffix."
670
+ )
671
+ result[file_role][find_elements.group("prefix")].append(
672
+ file_s
673
+ )
674
+ if result[file_role]:
675
+ for file_prefix in result[file_role]:
676
+ result[file_role][file_prefix].sort(
677
+ key=lambda s: s.rh.resource.date + s.rh.resource.term
678
+ )
679
+ else:
680
+ del result[file_role]
681
+ return result
682
+
683
+ def execute(self, rh, opts):
684
+ """Loop on the various files provided"""
685
+
686
+ sh = self.system
687
+
688
+ # Create the template for files to be removed at each validity date and for the outputname
689
+ deleted_spectral_file_SH = self.SPECTRAL_FILE_SH.format(
690
+ prefix="*", suffix=""
691
+ )
692
+ deleted_gridpoint_file_UA = self.GRIDPOINT_FILE_UA.format(
693
+ prefix="*", suffix=""
694
+ )
695
+ deleted_gridpoint_file_GG = self.GRIDPOINT_FILE_GG.format(
696
+ prefix="*", suffix=""
697
+ )
698
+ output_name = self.OUTPUT_FILE_NAME.format(self.xpname.upper())
699
+
700
+ # Sort input files
701
+ sorted_cst_input_files = self.sort_files_per_prefix(
702
+ self.LIST_CST_INPUT_FILES, unique=True
703
+ )
704
+ sorted_input_files = self.sort_files_per_prefix(self.LIST_INPUT_FILES)
705
+
706
+ # Determine the validity present for each non constant input files,
707
+ # check that they are the same for all.
708
+ # Also create the list of the filenames that should be deleted
709
+ input_validity = list()
710
+ for file_role in sorted_input_files:
711
+ for file_prefix in sorted_input_files[file_role]:
712
+ input_validity.append(
713
+ [
714
+ s.rh.resource.date + s.rh.resource.term
715
+ for s in sorted_input_files[file_role][file_prefix]
716
+ ]
717
+ )
718
+ test_wrong_input_validity = True
719
+ for i in range(1, len(input_validity)):
720
+ test_wrong_input_validity = test_wrong_input_validity and (
721
+ input_validity[0] == input_validity[i]
722
+ )
723
+ self.algoassert(
724
+ test_wrong_input_validity,
725
+ "The files of each type must have the same validity dates.",
726
+ )
727
+
728
+ # Modify namelist
729
+ input_namelist = self.context.sequence.effective_inputs(
730
+ role="Namelist", kind="namelist"
731
+ )
732
+ for namelist in input_namelist:
733
+ namcontents = namelist.rh.contents
734
+ self._set_nam_macro(
735
+ namcontents,
736
+ namelist.rh.container.actualpath(),
737
+ "LLCLIM",
738
+ self.clim,
739
+ )
740
+ if namcontents.dumps_needs_update:
741
+ namcontents.rewrite(namelist.rh.container)
742
+
743
+ for current_validity in input_validity[0]:
744
+ # Deal with constant input files (gridpoint and spectral)
745
+ for file_role, file_template in self.LIST_CST_INPUT_FILES:
746
+ if file_role in sorted_cst_input_files:
747
+ for file_prefix in sorted_cst_input_files[file_role]:
748
+ file_name = file_template.format(
749
+ prefix=file_prefix, suffix=""
750
+ )
751
+ current_file_input = sorted_cst_input_files[file_role][
752
+ file_prefix
753
+ ][0]
754
+ self.algoassert(
755
+ not sh.path.exists(file_name),
756
+ "The file {} already exists. It should not.".format(
757
+ file_name
758
+ ),
759
+ )
760
+ sh.cp(
761
+ current_file_input.rh.container.iotarget(),
762
+ file_name,
763
+ intent="in",
764
+ )
765
+
766
+ # Deal with other input files (gridpoint and spectral)
767
+ for file_role, file_template in self.LIST_INPUT_FILES:
768
+ if file_role in sorted_input_files:
769
+ for file_prefix in sorted_input_files[file_role]:
770
+ file_name = file_template.format(
771
+ prefix=file_prefix, suffix=""
772
+ )
773
+ current_file_input = sorted_input_files[file_role][
774
+ file_prefix
775
+ ].pop()
776
+ self.algoassert(
777
+ not sh.path.exists(file_name),
778
+ "The file {} already exists. It should not.".format(
779
+ file_name
780
+ ),
781
+ )
782
+ sh.cp(
783
+ current_file_input.rh.container.iotarget(),
784
+ file_name,
785
+ intent="in",
786
+ )
787
+
788
+ if self.clim:
789
+ # Find the right climatology file
790
+ current_month = date.Month(current_validity)
791
+ self.climfile_fixer(
792
+ rh,
793
+ convkind="modelclim",
794
+ month=current_month,
795
+ inputrole=("GlobalClim", "InitialClim"),
796
+ inputkind="clim_model",
797
+ )
798
+
799
+ # Standard execution
800
+ super().execute(rh, opts)
801
+ # Move the output file
802
+ current_term = current_file_input.rh.resource.term
803
+ sh.move(
804
+ output_name, output_name + "+{}".format(current_term.fmthm)
805
+ )
806
+ # Cat all the listings into a single one
807
+ sh.cat(self.OUTPUT_LISTING_NAME, output="NODE.all")
808
+ # Remove unneeded files
809
+ sh.rmall(
810
+ deleted_spectral_file_SH,
811
+ deleted_gridpoint_file_GG,
812
+ deleted_gridpoint_file_UA,
813
+ "std*",
814
+ self.OUTPUT_LISTING_NAME,
815
+ )
816
+
817
+
818
+ class DomeoForcingAtmo(BlindRun, CouplingBaseDateNamMixin):
819
+ """Correct the Domeo forcing file."""
820
+
821
+ _footprint = dict(
822
+ info="Domeo Forcing Atmo",
823
+ attr=dict(
824
+ kind=dict(
825
+ values=["domeo_forcing"],
826
+ ),
827
+ basedate=dict(
828
+ optional=False,
829
+ ),
830
+ ),
831
+ )