vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
vortex/nwp/algo/eps.py ADDED
@@ -0,0 +1,785 @@
1
+ """
2
+ AlgoComponents dedicated to computations related to the Ensemble Prediction System.
3
+ """
4
+
5
+ import collections
6
+ import copy
7
+ import re
8
+
9
+ import footprints
10
+ from bronx.compat.itertools import pairwise
11
+ from bronx.fancies import loggers
12
+ from bronx.stdtypes.date import Time
13
+ from ..tools.drhook import DrHookDecoMixin
14
+ from vortex.algo.components import BlindRun
15
+ from vortex.layout.dataflow import intent
16
+ from vortex.tools.grib import EcGribDecoMixin
17
+ from vortex.util.structs import ShellEncoder
18
+
19
+ from .ifsroot import IFSParallel
20
+ from .stdpost import parallel_grib_filter
21
+
22
+ #: No automatic export
23
+ __all__ = []
24
+
25
+ logger = loggers.getLogger(__name__)
26
+
27
+
28
+ class Svect(IFSParallel):
29
+ """Singular vectors computation."""
30
+
31
+ _footprint = dict(
32
+ info="Computation of the singular vectors.",
33
+ attr=dict(
34
+ kind=dict(
35
+ values=["svectors", "svector", "sv", "svect", "svarpe"],
36
+ remap=dict(autoremap="first"),
37
+ ),
38
+ conf=dict(
39
+ type=int,
40
+ optional=True,
41
+ default=601,
42
+ ),
43
+ xpname=dict(
44
+ optional=True,
45
+ default="SVEC",
46
+ ),
47
+ ),
48
+ )
49
+
50
+ @property
51
+ def realkind(self):
52
+ return "svector"
53
+
54
+
55
+ class Combi(BlindRun, DrHookDecoMixin, EcGribDecoMixin):
56
+ """Build the initial conditions of the EPS."""
57
+
58
+ _abstract = True
59
+
60
+ def execute(self, rh, opts):
61
+ """Standard Combi execution."""
62
+ namsec = self.setlink(initrole="Namelist", initkind="namelist")
63
+ namsec[0].rh.container.cat()
64
+ super().execute(rh, opts)
65
+
66
+ @property
67
+ def nmod(self):
68
+ raise NotImplementedError("Abstract property")
69
+
70
+ def _addNmod(self, namrh, msg):
71
+ namrh.contents["NAMMOD"]["NMOD"] = self.nmod
72
+ logger.info("NMOD set to %d: %s.", self.nmod, msg)
73
+
74
+ def _analysis_cp(self, nb, msg):
75
+ # Copy the analysis
76
+ initsec = self.setlink(initkind="analysis")
77
+ radical = re.sub(
78
+ r"^(.*?)\d+$", r"\1", initsec[0].rh.container.localpath()
79
+ )
80
+ for num in footprints.util.rangex(1, nb):
81
+ self.system.cp(
82
+ initsec[0].rh.container.localpath(),
83
+ radical + "{:03d}".format(num),
84
+ fmt=initsec[0].rh.container.actualfmt,
85
+ intent=intent.INOUT,
86
+ )
87
+ logger.info("Copy the analysis for the %d %s.", nb, msg)
88
+
89
+ def _coeff_picking(self, kind, msg):
90
+ # Pick up the coeff in the namelist
91
+ for namsec in self.context.sequence.effective_inputs(kind="namelist"):
92
+ namsec.rh.reset_contents()
93
+ if "NAMCOEF" + kind.upper() in namsec.rh.contents:
94
+ logger.info(
95
+ "Extract the "
96
+ + msg
97
+ + " coefficient from the updated namelist."
98
+ )
99
+ coeff = {
100
+ "rcoef" + kind: float(
101
+ namsec.rh.contents["NAMCOEF" + kind.upper()][
102
+ "RCOEF" + kind.upper()
103
+ ]
104
+ )
105
+ }
106
+ self.system.json_dump(
107
+ coeff, "coeff" + kind + ".out", indent=4, cls=ShellEncoder
108
+ )
109
+
110
+
111
+ class CombiPert(Combi):
112
+ """Build the initial perturbations of the EPS initial conditions."""
113
+
114
+ _abstract = True
115
+ _footprint = dict(
116
+ attr=dict(
117
+ nbpert=dict(
118
+ type=int,
119
+ ),
120
+ )
121
+ )
122
+
123
+ def prepare(self, rh, opts):
124
+ """Set some variables according to target definition."""
125
+ super().prepare(rh, opts)
126
+
127
+ # Tweak the namelists
128
+ for namsec in self.context.sequence.effective_inputs(
129
+ role=re.compile("Namelist"), kind="namelist"
130
+ ):
131
+ logger.info(
132
+ "Add the NBPERT coefficient to the NAMENS namelist entry"
133
+ )
134
+ namsec.rh.contents["NAMENS"]["NBPERT"] = self.nbpert
135
+ namsec.rh.save()
136
+
137
+
138
+ #: Definition of a named tuple that holds informations on SV for a given zone
139
+ _SvInfoTuple = collections.namedtuple("SvInfoTuple", ["available", "expected"])
140
+
141
+
142
+ class CombiSV(CombiPert):
143
+ """Combine the SV to create perturbations by gaussian sampling."""
144
+
145
+ _abstract = True
146
+ _footprint = dict(
147
+ attr=dict(
148
+ info_fname=dict(
149
+ default="singular_vectors_info.json",
150
+ optional=True,
151
+ ),
152
+ )
153
+ )
154
+
155
+ def prepare(self, rh, opts):
156
+ """Set some variables according to target definition."""
157
+ super().prepare(rh, opts)
158
+
159
+ # Check the number of singular vectors and link them in succession
160
+ nbVectTmp = collections.OrderedDict()
161
+ totalVects = 0
162
+ svec_sections = self.context.sequence.filtered_inputs(
163
+ role="SingularVectors", kind="svector"
164
+ )
165
+ for svecsec in svec_sections:
166
+ c_match = re.match(
167
+ r"^([^+,.]+)[+,.][^+,.]+[+,.][^+,.]+(.*)$",
168
+ svecsec.rh.container.localpath(),
169
+ )
170
+ if c_match is None:
171
+ logger.critical(
172
+ "The SV name is not formated correctly: %s",
173
+ svecsec.rh.container.actualpath(),
174
+ )
175
+ (radical, suffix) = c_match.groups()
176
+ zone = svecsec.rh.resource.zone
177
+ nbVectTmp.setdefault(zone, [0, 0])
178
+ nbVectTmp[zone][1] += 1 # Expected
179
+ if svecsec.stage == "get":
180
+ totalVects += 1
181
+ nbVectTmp[zone][0] += 1 # Available
182
+ self.system.softlink(
183
+ svecsec.rh.container.localpath(),
184
+ radical + "{:03d}".format(totalVects) + suffix,
185
+ )
186
+ # Convert the temporary dictionary to a dictionary of tuples
187
+ nbVect = collections.OrderedDict()
188
+ for k, v in nbVectTmp.items():
189
+ nbVect[k] = _SvInfoTuple(*v)
190
+ logger.info(
191
+ "Number of vectors :\n"
192
+ + "\n".join(
193
+ [
194
+ "- {0:8s}: {1.available:3d} ({1.expected:3d} expected).".format(
195
+ z, n
196
+ )
197
+ for z, n in nbVect.items()
198
+ ]
199
+ )
200
+ )
201
+ # Writing the singular vectors per areas in a json file
202
+ self.system.json_dump(nbVect, self.info_fname)
203
+
204
+ # Tweak the namelists
205
+ namsecs = self.context.sequence.effective_inputs(
206
+ role=re.compile("Namelist"), kind="namelist"
207
+ )
208
+ for namsec in namsecs:
209
+ namsec.rh.contents["NAMMOD"]["LVS"] = True
210
+ namsec.rh.contents["NAMMOD"]["LANAP"] = False
211
+ namsec.rh.contents["NAMMOD"]["LBRED"] = False
212
+ logger.info("Added to NVSZONE namelist entry")
213
+ namsec.rh.contents["NAMOPTI"]["NVSZONE"] = [
214
+ v.available for v in nbVect.values() if v.available
215
+ ] # Zones with 0 vectors are discarded
216
+
217
+ nbVectNam = namsec.rh.contents["NAMENS"]["NBVECT"]
218
+ if int(nbVectNam) != totalVects:
219
+ logger.warning(
220
+ "%s singular vectors expected but only %d accounted for.",
221
+ nbVectNam,
222
+ totalVects,
223
+ )
224
+ logger.info(
225
+ "Update the total number of vectors in the NBVECT namelist entry"
226
+ )
227
+ namsec.rh.contents["NAMENS"]["NBVECT"] = totalVects
228
+
229
+ actualZones = [
230
+ k for k, v in nbVect.items() if v.available
231
+ ] # Zones with 0 vectors are discarded
232
+ nbzone = len(actualZones)
233
+ namsec.rh.contents["NAMOPTI"]["NBZONE"] = nbzone
234
+ namsec.rh.contents["NAMOPTI"]["CNOMZONE"] = actualZones
235
+ nbrc = len(namsec.rh.contents["NAMOPTI"].RC)
236
+ if nbrc != nbzone:
237
+ logger.critical(
238
+ "%d zones but NAMOPTI/RC has length %d" % (nbzone, nbrc)
239
+ )
240
+ nbrl = len(namsec.rh.contents["NAMOPTI"].RL)
241
+ if nbrl != nbzone:
242
+ logger.critical(
243
+ "%d zones but NAMOPTI/RL has length %d" % (nbzone, nbrl)
244
+ )
245
+
246
+ self._addNmod(namsec.rh, "combination of the SV")
247
+ namsec.rh.save()
248
+
249
+ # Copy the analysis to give all the perturbations a basis
250
+ self._analysis_cp(self.nbpert, "perturbations")
251
+
252
+
253
+ class CombiSVunit(CombiSV):
254
+ """Combine the unit SV to create the raw perturbations by gaussian sampling."""
255
+
256
+ _footprint = dict(
257
+ attr=dict(
258
+ kind=dict(
259
+ values=[
260
+ "sv2unitpert",
261
+ "init",
262
+ "combi_init",
263
+ ],
264
+ remap=dict(
265
+ combi_init="init",
266
+ ),
267
+ ),
268
+ )
269
+ )
270
+
271
+ @property
272
+ def nmod(self):
273
+ return 1
274
+
275
+
276
+ class CombiSVnorm(CombiSV):
277
+ """
278
+ Compute a norm consistent with the background error
279
+ and combine the normed SV to create the SV perturbations.
280
+ """
281
+
282
+ _footprint = dict(
283
+ attr=dict(
284
+ kind=dict(
285
+ values=[
286
+ "sv2normedpert",
287
+ "optim",
288
+ "combi_optim",
289
+ ],
290
+ remap=dict(autoremap="first"),
291
+ ),
292
+ )
293
+ )
294
+
295
+ def postfix(self, rh, opts):
296
+ """Post processing cleaning."""
297
+ # Pick up the coeff in the namelist
298
+ self._coeff_picking("vs", "SV")
299
+ super().postfix(rh, opts)
300
+
301
+ @property
302
+ def nmod(self):
303
+ return 2
304
+
305
+
306
+ class CombiIC(Combi):
307
+ """Combine the SV and AE or breeding perturbations to create the initial conditions."""
308
+
309
+ _footprint = dict(
310
+ attr=dict(
311
+ kind=dict(
312
+ values=[
313
+ "pert2ic",
314
+ "sscales",
315
+ "combi_sscales",
316
+ ],
317
+ remap=dict(autoremap="first"),
318
+ ),
319
+ nbic=dict(
320
+ alias=("nbruns",),
321
+ type=int,
322
+ ),
323
+ nbpert=dict(
324
+ type=int,
325
+ optional=True,
326
+ default=0,
327
+ ),
328
+ )
329
+ )
330
+
331
+ @property
332
+ def nmod(self):
333
+ return 3
334
+
335
+ def prepare(self, rh, opts):
336
+ """Set some variables according to target definition."""
337
+ super().prepare(rh, opts)
338
+
339
+ # Tweak the namelist
340
+ namsec = self.setlink(initrole="Namelist", initkind="namelist")
341
+ nammod = namsec[0].rh.contents["NAMMOD"]
342
+
343
+ # The footprint's value is always preferred to the calculated one
344
+ nbPert = self.nbpert
345
+
346
+ # Dealing with singular vectors
347
+ sv_sections = self.context.sequence.effective_inputs(role="CoeffSV")
348
+ nammod["LVS"] = bool(sv_sections)
349
+ if sv_sections:
350
+ logger.info(
351
+ "Add the SV coefficient to the NAMCOEFVS namelist entry."
352
+ )
353
+ namcoefvs = namsec[0].rh.contents.newblock("NAMCOEFVS")
354
+ namcoefvs["RCOEFVS"] = sv_sections[0].rh.contents["rcoefvs"]
355
+ # The mean value may be present among the SV inputs: remove it
356
+ svsecs = [
357
+ sec
358
+ for sec in self.context.sequence.effective_inputs(
359
+ role="SVPerturbedState"
360
+ )
361
+ or [
362
+ sec
363
+ for sec in self.context.sequence.effective_inputs(
364
+ role="PerturbedState"
365
+ )
366
+ if "ICHR" in sec.rh.container.filename
367
+ ]
368
+ if sec.rh.resource.number
369
+ ]
370
+ nbPert = nbPert or len(svsecs)
371
+
372
+ # Dealing with breeding method's inputs
373
+ bd_sections = self.context.sequence.effective_inputs(
374
+ role="CoeffBreeding"
375
+ )
376
+ nammod["LBRED"] = bool(bd_sections)
377
+ if bd_sections:
378
+ logger.info(
379
+ "Add the breeding coefficient to the NAMCOEFBM namelist entry."
380
+ )
381
+ namcoefbm = namsec[0].rh.contents.newblock("NAMCOEFBM")
382
+ namcoefbm["RCOEFBM"] = bd_sections[0].rh.contents["rcoefbm"]
383
+ nbBd = len(
384
+ self.context.sequence.effective_inputs(
385
+ role="BreedingPerturbedState"
386
+ )
387
+ or [
388
+ sec
389
+ for sec in self.context.sequence.effective_inputs(
390
+ role="PerturbedState"
391
+ )
392
+ if "BMHR" in sec.rh.container.filename
393
+ ]
394
+ )
395
+ # symmetric perturbations except if analysis: one more file
396
+ # or zero if one control ic (hypothesis: odd nbic)
397
+ nbPert = nbPert or (
398
+ nbBd - 1
399
+ if nbBd == self.nbic + 1
400
+ or (nbBd == self.nbic and self.nbic % 2 != 0)
401
+ else self.nbic // 2
402
+ )
403
+
404
+ # Dealing with initial conditions from the assimilation ensemble
405
+ # the mean value may be present among the AE inputs: remove it
406
+ aesecs = [
407
+ sec
408
+ for sec in self.context.sequence.effective_inputs(
409
+ role=("AEPerturbedState", "ModelState")
410
+ )
411
+ if sec.rh.resource.number
412
+ ]
413
+ nammod["LANAP"] = bool(aesecs)
414
+ nbAe = len(aesecs)
415
+ nbPert = nbPert or nbAe
416
+ # If less AE members (but nor too less) than ic to build
417
+ if nbAe < nbPert <= 2 * nbAe:
418
+ logger.info(
419
+ "%d AE perturbations needed, %d AE members available: the first ones are duplicated.",
420
+ nbPert,
421
+ nbAe,
422
+ )
423
+ prefix = aesecs[0].rh.container.filename.split("_")[0]
424
+ for num in range(nbAe, nbPert):
425
+ self.system.softlink(
426
+ aesecs[num - nbAe].rh.container.filename,
427
+ prefix + "_{:03d}".format(num + 1),
428
+ )
429
+
430
+ logger.info(
431
+ "NAMMOD namelist summary: LANAP=%s, LVS=%s, LBRED=%s.",
432
+ *[nammod[k] for k in ("LANAP", "LVS", "LBRED")],
433
+ )
434
+ logger.info(
435
+ "Add the NBPERT=%d coefficient to the NAMENS namelist entry.",
436
+ nbPert,
437
+ )
438
+ namsec[0].rh.contents["NAMENS"]["NBPERT"] = nbPert
439
+
440
+ # symmectric perturbations ?
441
+ if nbPert < self.nbic - 1:
442
+ namsec[0].rh.contents["NAMENS"]["LMIRROR"] = True
443
+ logger.info("Add LMIRROR=.TRUE. to the NAMENS namelist entry.")
444
+ elif (
445
+ nbPert != 1
446
+ ): # 1 pert, 2 ic is possible without mirror adding the control
447
+ namsec[0].rh.contents["NAMENS"]["LMIRROR"] = False
448
+ logger.info("Add LMIRROR=.FALSE. to the NAMENS namelist entry.")
449
+
450
+ self._addNmod(namsec[0].rh, "final combination of the perturbations")
451
+ namsec[0].rh.save()
452
+
453
+ # Copy the analysis to give all the members a basis
454
+ self._analysis_cp(self.nbic - 1, "perturbed states")
455
+
456
+
457
+ class CombiBreeding(CombiPert):
458
+ """
459
+ Compute a norm consistent with the background error
460
+ and combine the normed SV to create the SV perturbations.
461
+ """
462
+
463
+ _footprint = dict(
464
+ attr=dict(
465
+ kind=dict(
466
+ values=[
467
+ "fc2bredpert",
468
+ "breeding",
469
+ "combi_breeding",
470
+ ],
471
+ remap=dict(autoremap="first"),
472
+ ),
473
+ )
474
+ )
475
+
476
+ @property
477
+ def nmod(self):
478
+ return 6
479
+
480
+ def prepare(self, rh, opts):
481
+ """Set some variables according to target definition."""
482
+ super().prepare(rh, opts)
483
+
484
+ # Consistent naming with the Fortran execution
485
+ hst_sections = self.context.sequence.effective_inputs(
486
+ kind=("pert", "historic")
487
+ )
488
+ for num, hst in enumerate(hst_sections):
489
+ self.system.softlink(
490
+ hst.rh.container.localpath(),
491
+ re.sub(r"^(.*?)\d+$", r"\1", hst.rh.container.localpath())
492
+ + "{:03d}.grb".format(num + 1),
493
+ )
494
+ logger.info("Rename the %d grib files consecutively.", num)
495
+
496
+ # Tweak the namelist
497
+ namsec = self.setlink(initrole="Namelist", initkind="namelist")
498
+ namsec[0].rh.contents["NAMMOD"]["LBRED"] = True
499
+ namsec[0].rh.contents["NAMMOD"]["LANAP"] = False
500
+ namsec[0].rh.contents["NAMMOD"]["LVS"] = False
501
+ self._addNmod(
502
+ namsec[0].rh, "compute the coefficient of the bred modes"
503
+ )
504
+ namsec[0].rh.save()
505
+
506
+ def postfix(self, rh, opts):
507
+ """Post processing cleaning."""
508
+ # Pick up the coeff in the namelist
509
+ self._coeff_picking("bm", "breeding")
510
+ super().postfix(rh, opts)
511
+
512
+
513
+ class SurfCombiIC(BlindRun):
514
+ """
515
+ Combine the deterministic surface with the perturbed surface
516
+ to create the initial surface conditions.
517
+ """
518
+
519
+ _footprint = dict(
520
+ attr=dict(
521
+ kind=dict(
522
+ values=[
523
+ "surf_pert2ic",
524
+ "surf2ic",
525
+ ],
526
+ remap=dict(autoremap="first"),
527
+ ),
528
+ member=dict(
529
+ type=int,
530
+ ),
531
+ )
532
+ )
533
+
534
+ def prepare(self, rh, opts):
535
+ """Set some variables according to target definition."""
536
+ super().prepare(rh, opts)
537
+
538
+ icsec = self.setlink(
539
+ initrole=("SurfaceAnalysis", "SurfaceInitialCondition"),
540
+ initkind="ic",
541
+ )
542
+ actualdate = icsec[0].rh.resource.date
543
+ seed = int(actualdate.ymdh) + (actualdate.hour + 1) * (self.member + 1)
544
+
545
+ # Tweak the namelist
546
+ namsec = self.setlink(initrole="Namelist", initkind="namelist")
547
+ logger.info("ISEED added to NAMSFC namelist entry: %d", seed)
548
+ namsec[0].rh.contents["NAMSFC"]["ISEED"] = seed
549
+ namsec[0].rh.save()
550
+
551
+
552
+ class Clustering(BlindRun, EcGribDecoMixin):
553
+ """Select by clustering a sample of members among the whole set."""
554
+
555
+ _footprint = dict(
556
+ attr=dict(
557
+ kind=dict(
558
+ values=[
559
+ "clustering",
560
+ "clust",
561
+ ],
562
+ remap=dict(autoremap="first"),
563
+ ),
564
+ fileoutput=dict(
565
+ optional=True,
566
+ default="_griblist",
567
+ ),
568
+ nbclust=dict(
569
+ type=int,
570
+ ),
571
+ nbmembers=dict(
572
+ type=int,
573
+ optional=True,
574
+ access="rwx",
575
+ ),
576
+ gribfilter_tasks=dict(
577
+ type=int,
578
+ optional=True,
579
+ default=8,
580
+ ),
581
+ )
582
+ )
583
+
584
+ def prepare(self, rh, opts):
585
+ """Set some variables according to target definition."""
586
+ super().prepare(rh, opts)
587
+
588
+ grib_sections = self.context.sequence.effective_inputs(
589
+ role="ModelState", kind="gridpoint"
590
+ )
591
+ avail_json = self.context.sequence.effective_inputs(
592
+ role="AvailableMembers", kind="mbpopulation"
593
+ )
594
+
595
+ # If no population file is here, just do a sort on the file list,
596
+ # otherwise use the population list
597
+ if avail_json:
598
+ population = avail_json[0].rh.contents.data["population"]
599
+ self.nbmembers = len(population)
600
+ file_list = list()
601
+ terms_set = set()
602
+ for elt in population:
603
+ sublist_ids = list()
604
+ for i, grib in enumerate(grib_sections):
605
+ # If the grib file matches, let's go
606
+ if all(
607
+ [
608
+ grib.rh.wide_key_lookup(key, exports=True) == value
609
+ for (key, value) in elt.items()
610
+ ]
611
+ ):
612
+ sublist_ids.append(i)
613
+ # Stack the gribs in file_list
614
+ file_list.extend(
615
+ sorted(
616
+ [
617
+ str(grib_sections[i].rh.container.localpath())
618
+ for i in sublist_ids
619
+ ]
620
+ )
621
+ )
622
+ terms_set.update(
623
+ [grib_sections[i].rh.resource.term for i in sublist_ids]
624
+ )
625
+ for i in reversed(sublist_ids):
626
+ del grib_sections[i]
627
+ else:
628
+ file_list = sorted(
629
+ [str(grib.rh.container.localpath()) for grib in grib_sections]
630
+ )
631
+ terms_set = {grib.rh.resource.term for grib in grib_sections}
632
+
633
+ # determine what terms are available to the clustering algorithm
634
+ terms = sorted(terms_set - {Time(0)})
635
+ delta = {last - first for first, last in pairwise(terms)}
636
+ if len(delta) == 1:
637
+ cluststep = delta.pop().hour
638
+ else:
639
+ cluststep = -999
640
+ logger.error("Terms are not evenly spaced. What should we do ?")
641
+ logger.error("Terms=" + str(terms) + "delta=" + str(delta))
642
+ logger.error(
643
+ "Continuing with little hope and cluststep = %d", cluststep
644
+ )
645
+ clustdeb = terms[0].hour
646
+ clustfin = terms[-1].hour
647
+ logger.info(
648
+ "clustering deb=%d fin=%d step=%d", clustdeb, clustfin, cluststep
649
+ )
650
+
651
+ # Deal with xGribs
652
+ file_list_cat = [f + ".concatenated" for f in file_list]
653
+ parallel_grib_filter(
654
+ self.context,
655
+ file_list,
656
+ file_list_cat,
657
+ cat=True,
658
+ nthreads=self.gribfilter_tasks,
659
+ )
660
+
661
+ if self.nbmembers is None or self.nbmembers > self.nbclust:
662
+ # Tweak the namelist
663
+ namsec = self.setlink(initrole="Namelist", initkind="namelist")
664
+ logger.info(
665
+ "NBRCLUST added to NAMCLUST namelist entry: %d", self.nbclust
666
+ )
667
+ namsec[0].rh.contents["NAMCLUST"]["NBRCLUST"] = self.nbclust
668
+ if self.nbmembers is not None:
669
+ logger.info(
670
+ "NBRMB added to NAMCLUST namelist entry: %d",
671
+ self.nbmembers,
672
+ )
673
+ namsec[0].rh.contents["NAMCLUST"]["NBRMB"] = self.nbmembers
674
+ logger.info(
675
+ "Setting namelist macros ECHDEB=%d ECHFIN=%d ECHSTEP=%d",
676
+ clustdeb,
677
+ clustfin,
678
+ cluststep,
679
+ )
680
+ namsec[0].rh.contents.setmacro("ECHDEB", clustdeb)
681
+ namsec[0].rh.contents.setmacro("ECHFIN", clustfin)
682
+ namsec[0].rh.contents.setmacro("ECHSTEP", cluststep)
683
+ namsec[0].rh.save()
684
+ namsec[0].rh.container.cat()
685
+
686
+ with open(self.fileoutput, "w") as optFile:
687
+ optFile.write("\n".join(file_list_cat))
688
+
689
+ def execute(self, rh, opts):
690
+ # If the number of members is big enough -> normal processing
691
+ if self.nbmembers is None or self.nbmembers > self.nbclust:
692
+ logger.info(
693
+ "Normal clustering run (%d members, %d clusters)",
694
+ self.nbmembers,
695
+ self.nbclust,
696
+ )
697
+ super().execute(rh, opts)
698
+ # if not, generate face outputs
699
+ else:
700
+ logger.info(
701
+ "Generating fake outputs with %d members", self.nbmembers
702
+ )
703
+ with open("ASCII_CLUST", "w") as fdcl:
704
+ fdcl.write(
705
+ "\n".join(
706
+ [
707
+ "{0:3d} {1:3d} {0:3d}".format(i, 1)
708
+ for i in range(1, self.nbmembers + 1)
709
+ ]
710
+ )
711
+ )
712
+ with open("ASCII_RMCLUST", "w") as fdrm:
713
+ fdrm.write(
714
+ "\n".join([str(i) for i in range(1, self.nbmembers + 1)])
715
+ )
716
+ with open("ASCII_POPCLUST", "w") as fdpop:
717
+ fdpop.write("\n".join(["1"] * self.nbmembers))
718
+
719
+ def postfix(self, rh, opts):
720
+ """Create a JSON with all the clustering informations."""
721
+ avail_json = self.context.sequence.effective_inputs(
722
+ role="AvailableMembers", kind="mbpopulation"
723
+ )
724
+ # If no population file is here, does nothing
725
+ if avail_json:
726
+ logger.info("Creating a JSON output...")
727
+ # Read the clustering information
728
+ if self.system.path.exists("ASCII_CLUST"):
729
+ # New format for clustering outputs
730
+ with open("ASCII_CLUST") as fdcl:
731
+ cluster_members = list()
732
+ cluster_sizes = list()
733
+ for l in [l.split() for l in fdcl.readlines()]:
734
+ cluster_members.append(int(l[0]))
735
+ cluster_sizes.append(int(l[1]))
736
+ else:
737
+ with open("ASCII_RMCLUST") as fdrm:
738
+ cluster_members = [int(m) for m in fdrm.readlines()]
739
+ with open("ASCII_POPCLUST") as fdpop:
740
+ cluster_sizes = [int(s) for s in fdpop.readlines()]
741
+ # Update the population JSON
742
+ mycontent = copy.deepcopy(avail_json[0].rh.contents)
743
+ mycontent.data["resource_kind"] = "mbsample"
744
+ mycontent.data["drawing"] = list()
745
+ for member_no, cluster_size in zip(cluster_members, cluster_sizes):
746
+ mycontent.data["drawing"].append(
747
+ copy.copy(mycontent.data["population"][member_no - 1])
748
+ )
749
+ mycontent.data["drawing"][-1]["cluster_size"] = cluster_size
750
+ # Create a clustering output file
751
+ new_container = footprints.proxy.container(
752
+ filename="clustering_output.json", actualfmt="json"
753
+ )
754
+ mycontent.rewrite(new_container)
755
+
756
+ super().postfix(rh, opts)
757
+
758
+
759
+ class Addpearp(BlindRun):
760
+ """Add the selected PEARP perturbations to the deterministic AROME initial conditions."""
761
+
762
+ _footprint = dict(
763
+ attr=dict(
764
+ kind=dict(
765
+ values=[
766
+ "addpearp",
767
+ ],
768
+ remap=dict(autoremap="first"),
769
+ ),
770
+ nbpert=dict(
771
+ type=int,
772
+ ),
773
+ )
774
+ )
775
+
776
+ def prepare(self, rh, opts):
777
+ """Set some variables according to target definition."""
778
+ super().prepare(rh, opts)
779
+
780
+ # Tweak the namelist
781
+ namsec = self.setlink(initrole="Namelist", initkind="namelist")
782
+ logger.info("NBE added to NAMIC namelist entry: %d", self.nbpert)
783
+ namsec[0].rh.contents["NAMIC"]["NBPERT"] = self.nbpert
784
+ namsec[0].rh.save()
785
+ namsec[0].rh.container.cat()