vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,404 @@
1
+ """
2
+ Abstract base class for any AlgoComponent leveraging the Arpege/IFS code.
3
+ """
4
+
5
+ from bronx.fancies import loggers
6
+ import footprints
7
+
8
+ from vortex.algo.components import (
9
+ Parallel,
10
+ ParallelIoServerMixin,
11
+ AlgoComponentError,
12
+ )
13
+ from vortex.syntax.stdattrs import model
14
+ from vortex.tools import grib
15
+
16
+ # footprints import
17
+ from . import ifsnaming as ifsnaming
18
+
19
+ from ..syntax.stdattrs import algo_member
20
+ from ..tools import satrad, drhook
21
+
22
+ #: No automatic export
23
+ __all__ = []
24
+
25
+ logger = loggers.getLogger(__name__)
26
+
27
+
28
+ class IFSParallel(
29
+ Parallel,
30
+ ParallelIoServerMixin,
31
+ satrad.SatRadDecoMixin,
32
+ drhook.DrHookDecoMixin,
33
+ grib.EcGribDecoMixin,
34
+ ):
35
+ """Abstract IFSModel parallel algo components."""
36
+
37
+ _abstract = True
38
+ _footprint = [
39
+ model,
40
+ algo_member,
41
+ dict(
42
+ info="Abstract AlgoComponent for anything based on Arpege/IFS.",
43
+ attr=dict(
44
+ kind=dict(
45
+ info="The kind of processing we want the Arpege/IFS binary to perform.",
46
+ default="ifsrun",
47
+ doc_zorder=90,
48
+ ),
49
+ model=dict(
50
+ values=[
51
+ "arpege",
52
+ "arp",
53
+ "arp_court",
54
+ "aladin",
55
+ "ald",
56
+ "arome",
57
+ "aro",
58
+ "aearp",
59
+ "pearp",
60
+ "ifs",
61
+ "alaro",
62
+ "harmoniearome",
63
+ ]
64
+ ),
65
+ ioname=dict(
66
+ default="nwpioserv",
67
+ ),
68
+ binarysingle=dict(
69
+ default="basicnwp",
70
+ ),
71
+ conf=dict(
72
+ info="The configuration number given to Arpege/IFS.",
73
+ type=int,
74
+ optional=True,
75
+ default=1,
76
+ doc_visibility=footprints.doc.visibility.ADVANCED,
77
+ ),
78
+ timescheme=dict(
79
+ info="The timescheme that will be used by Arpege/IFS model.",
80
+ optional=True,
81
+ default="sli",
82
+ values=["eul", "eulerian", "sli", "semilag"],
83
+ remap=dict(eulerian="eul", semilag="sli"),
84
+ doc_visibility=footprints.doc.visibility.ADVANCED,
85
+ ),
86
+ timestep=dict(
87
+ info="The timestep of the Arpege/IFS model.",
88
+ type=float,
89
+ optional=True,
90
+ default=600.0,
91
+ ),
92
+ fcterm=dict(
93
+ info="The forecast term of the Arpege/IFS model.",
94
+ type=int,
95
+ optional=True,
96
+ default=0,
97
+ ),
98
+ fcunit=dict(
99
+ info="The unit used in the *fcterm* attribute.",
100
+ optional=True,
101
+ default="h",
102
+ values=["h", "hour", "t", "step", "timestep"],
103
+ remap=dict(
104
+ hour="h",
105
+ step="t",
106
+ timestep="t",
107
+ ),
108
+ ),
109
+ xpname=dict(
110
+ info="The default labelling of files used in Arpege/IFS model.",
111
+ optional=True,
112
+ default="XPVT",
113
+ doc_visibility=footprints.doc.visibility.ADVANCED,
114
+ ),
115
+ ),
116
+ ),
117
+ ]
118
+
119
+ def fstag(self):
120
+ """Extend default tag with ``kind`` value."""
121
+ return super().fstag() + "." + self.kind
122
+
123
+ def _mpitool_attributes(self, opts):
124
+ conf_dict = super()._mpitool_attributes(opts)
125
+ conf_dict.update({"mplbased": True})
126
+ return conf_dict
127
+
128
+ def valid_executable(self, rh):
129
+ """Be sure that the specifed executable is ifsmodel compatible."""
130
+ valid = super().valid_executable(rh)
131
+ try:
132
+ return valid and bool(rh.resource.realkind == "ifsmodel")
133
+ except (ValueError, TypeError):
134
+ return False
135
+
136
+ def spawn_hook(self):
137
+ """Usually a good habit to dump the fort.4 namelist."""
138
+ super().spawn_hook()
139
+ if self.system.path.exists("fort.4"):
140
+ self.system.subtitle(
141
+ "{:s} : dump namelist <fort.4>".format(self.realkind)
142
+ )
143
+ self.system.cat("fort.4", output=False)
144
+
145
+ def spawn_command_options(self):
146
+ """Dictionary provided for command line factory."""
147
+ return dict(
148
+ name=(self.xpname + "xxxx")[:4].upper(),
149
+ conf=self.conf,
150
+ timescheme=self.timescheme,
151
+ timestep=self.timestep,
152
+ fcterm=self.fcterm,
153
+ fcunit=self.fcunit,
154
+ )
155
+
156
+ def naming_convention(self, kind, rh, actualfmt=None, **kwargs):
157
+ """Create an appropriate :class:`IFSNamingConvention`.
158
+
159
+ :param str kind: The :class:`IFSNamingConvention` object kind.
160
+ :param rh: The binary's ResourceHandler.
161
+ :param actualfmt: The format of the target file.
162
+ :param dict kwargs: Any argument you may see fit.
163
+ """
164
+ nc_args = dict(model=self.model, conf=self.conf, xpname=self.xpname)
165
+ nc_args.update(kwargs)
166
+ nc = footprints.proxy.ifsnamingconv(
167
+ kind=kind, actualfmt=actualfmt, cycle=rh.resource.cycle, **nc_args
168
+ )
169
+ if nc is None:
170
+ raise AlgoComponentError("No IFSNamingConvention was found.")
171
+ return nc
172
+
173
+ def do_climfile_fixer(
174
+ self, rh, convkind, actualfmt=None, geo=None, **kwargs
175
+ ):
176
+ """Is it necessary to fix the climatology file ? (i.e link in the appropriate file).
177
+
178
+ :param rh: The binary's ResourceHandler.
179
+ :param str convkind: The :class:`IFSNamingConvention` object kind.
180
+ :param actualfmt: The format of the climatology file.
181
+ :param geo: The geometry of the desired geometry file.
182
+ :param dict kwargs: Any argument you may see fit (used to create and call
183
+ the IFSNamingConvention object.
184
+ """
185
+ nc = self.naming_convention(
186
+ kind=convkind, rh=rh, actualfmt=actualfmt, **kwargs
187
+ )
188
+ nc_args = dict()
189
+ if geo:
190
+ nc_args["area"] = geo.area
191
+ nc_args.update(kwargs)
192
+ return not self.system.path.exists(nc(**nc_args))
193
+
194
+ def climfile_fixer(
195
+ self,
196
+ rh,
197
+ convkind,
198
+ month,
199
+ geo=None,
200
+ notgeo=None,
201
+ actualfmt=None,
202
+ inputrole=None,
203
+ inputkind=None,
204
+ **kwargs,
205
+ ):
206
+ """Fix the climatology files (by choosing the appropriate month, geometry, ...)
207
+
208
+ :param rh: The binary's ResourceHandler.
209
+ :param str convkind: The :class:`IFSNamingConvention` object kind.
210
+ :param ~bronx.stdtypes.date.Month month: The climatlogy file month
211
+ :param geo: The climatlogy file geometry
212
+ :param notgeo: Exclude these geometries during the climatology file lookup
213
+ :param actualfmt: The format of the climatology file.
214
+ :param inputrole: The section's role in which Climatology files are looked for.
215
+ :param inputkind: The section's realkind in which Climatology files are looked for/
216
+ :param dict kwargs: Any argument you may see fit (used to create and call
217
+ the IFSNamingConvention object).
218
+ """
219
+ if geo is not None and notgeo is not None:
220
+ raise ValueError("*geo* and *notgeo* cannot be provided together.")
221
+
222
+ def check_month(actualrh):
223
+ return bool(
224
+ hasattr(actualrh.resource, "month")
225
+ and actualrh.resource.month == month
226
+ )
227
+
228
+ def check_month_and_geo(actualrh):
229
+ return (
230
+ check_month(actualrh)
231
+ and actualrh.resource.geometry.tag == geo.tag
232
+ )
233
+
234
+ def check_month_and_notgeo(actualrh):
235
+ return (
236
+ check_month(actualrh)
237
+ and actualrh.resource.geometry.tag != notgeo.tag
238
+ )
239
+
240
+ if geo:
241
+ checker = check_month_and_geo
242
+ elif notgeo:
243
+ checker = check_month_and_notgeo
244
+ else:
245
+ checker = check_month
246
+
247
+ nc = self.naming_convention(
248
+ kind=convkind, rh=rh, actualfmt=actualfmt, **kwargs
249
+ )
250
+ nc_args = dict()
251
+ if geo:
252
+ nc_args["area"] = geo.area
253
+ nc_args.update(kwargs)
254
+ target_name = nc(**nc_args)
255
+
256
+ self.system.remove(target_name)
257
+
258
+ logger.info(
259
+ "Linking in the %s file (%s) for month %s.",
260
+ convkind,
261
+ target_name,
262
+ month,
263
+ )
264
+ rc = self.setlink(
265
+ initrole=inputrole,
266
+ initkind=inputkind,
267
+ inittest=checker,
268
+ initname=target_name,
269
+ )
270
+ return target_name if rc else None
271
+
272
+ def all_localclim_fixer(
273
+ self,
274
+ rh,
275
+ month,
276
+ convkind="targetclim",
277
+ actualfmt=None,
278
+ inputrole=("LocalClim", "TargetClim", "BDAPClim"),
279
+ inputkind="clim_bdap",
280
+ **kwargs,
281
+ ):
282
+ """Fix all the local/BDAP climatology files (by choosing the appropriate month)
283
+
284
+ :param rh: The binary's ResourceHandler.
285
+ :param ~bronx.stdtypes.date.Month month: The climatology file month
286
+ :param str convkind: The :class:`IFSNamingConvention` object kind.
287
+ :param actualfmt: The format of the climatology file.
288
+ :param inputrole: The section's role in which Climatology files are looked for.
289
+ :param inputkind: The section's realkind in which Climatology files are looked for/
290
+ :param dict kwargs: Any argument you may see fit (used to create and call
291
+ the IFSNamingConvention object.
292
+ :return: The list of linked files
293
+ """
294
+
295
+ def check_month(actualrh):
296
+ return bool(
297
+ hasattr(actualrh.resource, "month")
298
+ and actualrh.resource.month == month
299
+ )
300
+
301
+ nc = self.naming_convention(
302
+ kind=convkind, rh=rh, actualfmt=actualfmt, **kwargs
303
+ )
304
+ dealtwith = list()
305
+
306
+ for tclimrh in [
307
+ x.rh
308
+ for x in self.context.sequence.effective_inputs(
309
+ role=inputrole,
310
+ kind=inputkind,
311
+ )
312
+ if x.rh.resource.month == month
313
+ ]:
314
+ thisclim = tclimrh.container.localpath()
315
+ thisname = nc(area=tclimrh.resource.geometry.area)
316
+ if thisclim != thisname:
317
+ logger.info(
318
+ "Linking in the %s to %s for month %s.",
319
+ thisclim,
320
+ thisname,
321
+ month,
322
+ )
323
+ self.system.symlink(thisclim, thisname)
324
+ dealtwith.append(thisname)
325
+
326
+ return dealtwith
327
+
328
+ def find_namelists(self, opts=None):
329
+ """Find any namelists candidates in actual context inputs."""
330
+ return [
331
+ x.rh
332
+ for x in self.context.sequence.effective_inputs(
333
+ kind=("namelist", "namelistfp")
334
+ )
335
+ ]
336
+
337
+ def _set_nam_macro(self, namcontents, namlocal, macro, value):
338
+ """Set a namelist macro and log it!"""
339
+ namcontents.setmacro(macro, value)
340
+ logger.info("Setup macro %s=%s in %s", macro, str(value), namlocal)
341
+
342
+ def prepare_namelist_delta(self, rh, namcontents, namlocal):
343
+ """Apply a namelist delta depending on the cycle of the binary."""
344
+ # TODO: The mapping between the dict that contains the settings
345
+ # (i.e elf.spawn_command_options()) and actual namelist keys should
346
+ # be done by an extra class ... and it could be generalized to mpi
347
+ # setup by the way !
348
+ nam_updated = False
349
+ # For cy41 onward, replace some namelist macros with the command line
350
+ # arguments
351
+ if rh.resource.cycle >= "cy41":
352
+ if "NAMARG" in namcontents:
353
+ opts_arg = self.spawn_command_options()
354
+ self._set_nam_macro(
355
+ namcontents, namlocal, "CEXP", opts_arg["name"]
356
+ )
357
+ self._set_nam_macro(
358
+ namcontents, namlocal, "TIMESTEP", opts_arg["timestep"]
359
+ )
360
+ fcstop = "{:s}{:d}".format(
361
+ opts_arg["fcunit"], opts_arg["fcterm"]
362
+ )
363
+ self._set_nam_macro(namcontents, namlocal, "FCSTOP", fcstop)
364
+ nam_updated = True
365
+ else:
366
+ logger.info("No NAMARG block in %s", namlocal)
367
+
368
+ if self.member is not None:
369
+ for macro_name in ("MEMBER", "PERTURB"):
370
+ self._set_nam_macro(
371
+ namcontents, namlocal, macro_name, self.member
372
+ )
373
+ nam_updated = True
374
+ return nam_updated
375
+
376
+ def prepare_namelists(self, rh, opts=None):
377
+ """Update each of the namelists."""
378
+ namcandidates = self.find_namelists(opts)
379
+ self.system.subtitle("Namelist candidates")
380
+ for nam in namcandidates:
381
+ nam.quickview()
382
+ for namrh in namcandidates:
383
+ namc = namrh.contents
384
+ if self.prepare_namelist_delta(
385
+ rh, namc, namrh.container.actualpath()
386
+ ):
387
+ if namc.dumps_needs_update:
388
+ logger.info(
389
+ "Rewritting the %s namelists file.",
390
+ namrh.container.actualpath(),
391
+ )
392
+ namc.rewrite(namrh.container)
393
+
394
+ def prepare(self, rh, opts):
395
+ """Set some variables according to target definition."""
396
+ super().prepare(rh, opts)
397
+ # Namelist fixes
398
+ self.prepare_namelists(rh, opts)
399
+
400
+ def execute_single(self, rh, opts):
401
+ """Standard IFS-Like execution parallel execution."""
402
+ if rh.resource.cycle < "cy46":
403
+ self.system.ls(output="dirlst")
404
+ super().execute_single(rh, opts)
@@ -0,0 +1,263 @@
1
+ """
2
+ AlgoComponents dedicated to computations related to observations monitoring.
3
+ """
4
+
5
+ from bronx.fancies import loggers
6
+
7
+ from vortex.algo.components import Parallel
8
+ from vortex.syntax.stdattrs import a_date, a_model, a_cutoff
9
+ from ..tools import odb, drhook
10
+
11
+ #: Automatic export of Monitoring class
12
+ __all__ = []
13
+
14
+ logger = loggers.getLogger(__name__)
15
+
16
+
17
+ class OdbMonitoring(
18
+ Parallel, odb.OdbComponentDecoMixin, drhook.DrHookDecoMixin
19
+ ):
20
+ """Compute monitoring statistics."""
21
+
22
+ _footprint = dict(
23
+ attr=dict(
24
+ kind=dict(
25
+ values=["monitoring"],
26
+ ),
27
+ npool=dict(
28
+ default=1,
29
+ optional=True,
30
+ ),
31
+ obs=dict(
32
+ values=["all", "used"],
33
+ ),
34
+ date=a_date,
35
+ model=a_model,
36
+ cutoff=a_cutoff,
37
+ start=dict(
38
+ type=bool,
39
+ default=False,
40
+ optional=True,
41
+ ),
42
+ cumul=dict(
43
+ type=bool,
44
+ default=True,
45
+ optional=True,
46
+ ),
47
+ extend=dict(
48
+ type=bool,
49
+ default=False,
50
+ optional=True,
51
+ ),
52
+ stage=dict(
53
+ values=["can", "surf", "surface", "atm", "atmospheric"],
54
+ remap=dict(can="surf", surface="surf", atmospheric="atm"),
55
+ info="The processing stage of the ODB base.",
56
+ ),
57
+ )
58
+ )
59
+
60
+ def _mpitool_attributes(self, opts):
61
+ conf_dict = super()._mpitool_attributes(opts)
62
+ conf_dict.update({"mplbased": True})
63
+ return conf_dict
64
+
65
+ def _fix_nam_macro(self, rh, macro, value):
66
+ """Set a given namelist macro and issue a log message."""
67
+ rh.contents.setmacro(macro, value)
68
+ logger.info(
69
+ "Setup %s macro to %s in %s",
70
+ macro,
71
+ value,
72
+ rh.container.actualpath(),
73
+ )
74
+
75
+ def prepare(self, rh, opts):
76
+ """Update some variables in the namelist and check the presence of the accumulated statistics file."""
77
+
78
+ sh = self.system
79
+
80
+ # Looking for input observations
81
+
82
+ # Virtual upper-air observations database
83
+ obsatm_virt = [
84
+ x
85
+ for x in self.lookupodb(fatal=False)
86
+ if (
87
+ x.rh.resource.stage.startswith("matchup")
88
+ or x.rh.resource.stage.startswith("screening")
89
+ )
90
+ and x.rh.resource.part == "virtual"
91
+ ]
92
+
93
+ # Single upper-air observations database
94
+ obsatm_single = [
95
+ x
96
+ for x in self.lookupodb(fatal=False)
97
+ if x.rh.resource.stage.startswith("matchup")
98
+ or x.rh.resource.stage.startswith("screening")
99
+ ]
100
+ if len(obsatm_single) > 1:
101
+ obsatm_single = []
102
+
103
+ # Surface observations database
104
+ obssurf = [
105
+ x
106
+ for x in self.lookupodb(fatal=False)
107
+ if x.rh.resource.stage.startswith("canari")
108
+ and (
109
+ x.rh.resource.part == "surf" or x.rh.resource.part == "ground"
110
+ )
111
+ ]
112
+
113
+ # One database at a time
114
+ if not (obsatm_virt or obsatm_single) and self.stage == "atm":
115
+ raise ValueError(
116
+ "Could not find any ODB matchup or screening ECMA database"
117
+ )
118
+ if not obssurf and self.stage == "surf":
119
+ raise ValueError("Could not find any ODB surface ECMA database")
120
+
121
+ # Set actual ODB paths
122
+ if obsatm_virt:
123
+ ecma = obsatm_virt.pop(0)
124
+ elif obsatm_single:
125
+ ecma = obsatm_single.pop(0)
126
+ else:
127
+ ecma = obssurf.pop(0)
128
+ ecma_path = sh.path.abspath(ecma.rh.container.localpath())
129
+ self.odb.fix_db_path(ecma.rh.resource.layout, ecma_path)
130
+ self.env.IOASSIGN = sh.path.join(ecma_path, "IOASSIGN")
131
+ logger.info(
132
+ "Setting ODB env %s = %s.",
133
+ "IOASSIGN",
134
+ sh.path.join(ecma_path, "IOASSIGN"),
135
+ )
136
+
137
+ # Let ancestors handling most of the env setting
138
+ super().prepare(rh, opts)
139
+
140
+ # Force to start a new accumulated statistics file if first day and first hour of the month
141
+ mnt_start = self.start
142
+
143
+ if (
144
+ not mnt_start
145
+ and int(self.date.day) == 1
146
+ and int(self.date.hh) == 0
147
+ and not self.extend
148
+ ):
149
+ logger.info(
150
+ "First day and first hour of the month : force start attribute to True."
151
+ )
152
+ mnt_start = True
153
+
154
+ mnt_cumul = self.cumul
155
+ if self.cutoff == "production":
156
+ mnt_cumul = False
157
+ logger.info(
158
+ "No output accumulated statistics file will be produced because "
159
+ "cutoff = production : force cumul to False"
160
+ )
161
+
162
+ # Monitoring namelist
163
+ namrh = self.context.sequence.effective_inputs(
164
+ role="Namelist",
165
+ kind="namelist",
166
+ )
167
+ if len(namrh) != 1:
168
+ logger.critical(
169
+ "There must be exactly one namelist for monitoring. Stop."
170
+ )
171
+ raise ValueError(
172
+ "There must be exactly one namelist for monitoring. Stop."
173
+ )
174
+ namrh = namrh[0].rh
175
+
176
+ # Cumulated statistics file
177
+ cumulrh = self.context.sequence.effective_inputs(
178
+ role="Cumulated monitoring statistics",
179
+ kind="accumulated_stats",
180
+ )
181
+
182
+ if len(cumulrh) > 1:
183
+ logger.critical(
184
+ "There must be at most one accumulated statistics file.Stop."
185
+ )
186
+ raise ValueError(
187
+ "There must be one accumulated statistics file or none.Stop."
188
+ )
189
+ else:
190
+ if len(cumulrh) == 0:
191
+ if not mnt_start:
192
+ if mnt_cumul:
193
+ logger.critical(
194
+ "There must be one input accumulated statistics file. Stop."
195
+ )
196
+ raise ValueError(
197
+ "There must be one input accumulated statistics file. Stop."
198
+ )
199
+ else:
200
+ logger.info(
201
+ "No input accumulated statistics file is necessary."
202
+ )
203
+ logger.info(
204
+ "No output accumulated statistics file will be produced."
205
+ )
206
+ else:
207
+ if mnt_cumul:
208
+ logger.info(
209
+ "No input accumulated statistics file. It will be created by the binary."
210
+ )
211
+ else:
212
+ logger.info(
213
+ "No output accumulated statistics file will be produced."
214
+ )
215
+ else:
216
+ cumulrh = cumulrh[0].rh
217
+ if not mnt_cumul:
218
+ logger.info(
219
+ "No input accumulated statistics file is necessary(start=False)."
220
+ )
221
+ cumulrh.container.clear()
222
+ else:
223
+ if mnt_start:
224
+ logger.info(
225
+ "No input accumulated statistics file is necessary (start=True)"
226
+ )
227
+ cumulrh.container.clear()
228
+
229
+ self._fix_nam_macro(namrh, "JOUR", int(self.date.ymd))
230
+ self._fix_nam_macro(namrh, "RES", int(self.date.hh))
231
+
232
+ self._fix_nam_macro(namrh, "LLADMON", mnt_cumul)
233
+ self._fix_nam_macro(namrh, "LLADAJ", mnt_cumul and not mnt_start)
234
+
235
+ self._fix_nam_macro(namrh, "LLFLAG", self.obs != "all")
236
+
237
+ self._fix_nam_macro(namrh, "LLARO", self.model == "arome")
238
+ self._fix_nam_macro(namrh, "LLVRP", self.model == "varpack")
239
+ self._fix_nam_macro(namrh, "LLCAN", self.stage == "surf")
240
+
241
+ if namrh.contents.dumps_needs_update:
242
+ namrh.contents.rewrite(namrh.container)
243
+ namrh.container.cat()
244
+
245
+ def postfix(self, rh, opts):
246
+ """Remove all empty files and find out if any special resources have been produced."""
247
+
248
+ sh = self.system
249
+ self.system.dir(output=False, fatal=False)
250
+ allfiles = sh.ls()
251
+ for f in allfiles:
252
+ if self.system.path.getsize(f) == 0:
253
+ logger.info("Remove %s because size of %s is zero.", f, f)
254
+ sh.remove(f)
255
+
256
+ obspoint_out = sh.ls("point.*")
257
+ if obspoint_out:
258
+ dest = "obslocationpack"
259
+ logger.info("Creating an OBSLOCATION pack: %s", dest)
260
+ sh.mkdir(dest)
261
+ for fname in obspoint_out:
262
+ sh.mv(fname, dest)
263
+ self.system.dir(output=False, fatal=False)