vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,886 @@
1
+ """
2
+ AlgoComponents dedicated to NWP direct forecasts.
3
+ """
4
+
5
+ import math
6
+ import re
7
+ from collections import defaultdict
8
+
9
+ from bronx.fancies import loggers
10
+ from bronx.stdtypes.date import Time, Month, Period
11
+ import footprints
12
+
13
+ from vortex.algo.components import AlgoComponentError, Parallel
14
+ from vortex.layout.dataflow import intent
15
+ from vortex.syntax.stdattrs import model
16
+ from vortex.util.structs import ShellEncoder
17
+ from .ifsroot import IFSParallel
18
+ from ..tools.drhook import DrHookDecoMixin
19
+ from ..syntax.stdattrs import outputid_deco
20
+
21
+ from typing import Any, Callable, Iterable
22
+ from vortex.data.handlers import Handler
23
+ from vortex.layout.dataflow import Section
24
+
25
+
26
+ #: No automatic export
27
+ __all__ = []
28
+
29
+ logger = loggers.getLogger(__name__)
30
+
31
+
32
+ class Forecast(IFSParallel):
33
+ """Forecast for IFS-like Models."""
34
+
35
+ _footprint = [
36
+ outputid_deco,
37
+ dict(
38
+ info="Run a forecast with Arpege/IFS.",
39
+ attr=dict(
40
+ kind=dict(
41
+ values=["forecast", "fc"], remap=dict(forecast="fc")
42
+ ),
43
+ hist_terms=dict(
44
+ info="The list of terms when historical file production is requested.",
45
+ type=footprints.FPList,
46
+ optional=True,
47
+ ),
48
+ surfhist_terms=dict(
49
+ info="The list of terms when surface file production is requested.",
50
+ type=footprints.FPList,
51
+ optional=True,
52
+ ),
53
+ pos_terms=dict(
54
+ info="The list of terms when post-processed data is requested.",
55
+ type=footprints.FPList,
56
+ optional=True,
57
+ ),
58
+ s_norm_terms=dict(
59
+ info="The list of terms when spectal norms should be computed.",
60
+ type=footprints.FPList,
61
+ optional=True,
62
+ ),
63
+ flyargs=dict(
64
+ default=("ICMSH", "PF"),
65
+ ),
66
+ xpname=dict(default="FCST"),
67
+ ddhpack=dict(
68
+ info="After run, gather the DDH output file in directories.",
69
+ type=bool,
70
+ optional=True,
71
+ default=False,
72
+ doc_zorder=-5,
73
+ ),
74
+ ),
75
+ ),
76
+ ]
77
+
78
+ @property
79
+ def realkind(self):
80
+ return "forecast"
81
+
82
+ def _outputs_configurator(self, bin_rh):
83
+ return footprints.proxy.ifsoutputs_configurator(
84
+ model=self.model,
85
+ cycle=bin_rh.resource.cycle,
86
+ fcterm_unit=self.fcunit,
87
+ )
88
+
89
+ def prepare(self, rh, opts):
90
+ """Default pre-link for the initial condition file"""
91
+ super().prepare(rh, opts)
92
+
93
+ ininc = self.naming_convention("ic", rh)
94
+ analysis = self.setlink(
95
+ initrole=("InitialCondition", "Analysis"), initname=ininc()
96
+ )
97
+
98
+ if analysis:
99
+ analysis = analysis.pop()
100
+ thismonth = analysis.rh.resource.date.month
101
+
102
+ # Possibly fix the model clim
103
+ if self.do_climfile_fixer(rh, convkind="modelclim"):
104
+ self.climfile_fixer(
105
+ rh,
106
+ convkind="modelclim",
107
+ month=thismonth,
108
+ inputrole=("GlobalClim", "InitialClim"),
109
+ inputkind="clim_model",
110
+ )
111
+
112
+ # Possibly fix post-processing clim files
113
+ self.all_localclim_fixer(rh, thismonth)
114
+
115
+ # File linking for IAU increments
116
+ #
117
+ # In the case of a forecast with IAU, the IFS executable
118
+ # expects to find input increment files (both analysis and
119
+ # background counterpart) names suffixed according to the
120
+ # order by which they are to be applied. In practice
121
+ # input files are not renamed but links with correct names
122
+ # are created pointing to them instead. Both analysed and
123
+ # background states are required: to inject analysis
124
+ # increments over multiple timesteps, the IAU algorithm
125
+ # must be able to compute a difference between analysis
126
+ # and background states.
127
+ #
128
+ # TODO: Clarify where both regexp keys are coming from
129
+ guesses = self.context.sequence.effective_inputs(
130
+ role=re.compile(r"IAU_(Background|Guess)", flags=re.IGNORECASE)
131
+ )
132
+ analyses = self.context.sequence.effective_inputs(
133
+ role=re.compile(r"IAU_(Analysis|Ic)", flags=re.IGNORECASE)
134
+ )
135
+
136
+ def key(s: Section):
137
+ # Increment files are sorted according to date, then
138
+ # effective term.
139
+ return (
140
+ s.rh.resource.date,
141
+ s.rh.resource.date + s.rh.resource.term,
142
+ )
143
+
144
+ self._create_ordered_links(
145
+ bin_handler=rh,
146
+ sections=analyses,
147
+ sort_key=key,
148
+ nameconv_kind="iau_analysis",
149
+ )
150
+ self._create_ordered_links(
151
+ bin_handler=rh,
152
+ sections=guesses,
153
+ sort_key=key,
154
+ nameconv_kind="iau_background",
155
+ )
156
+
157
+ # Promises should be nicely managed by a co-proccess
158
+ if self.promises:
159
+ prefixes_set = set()
160
+ for pr_res in [pr.rh.resource for pr in self.promises]:
161
+ if pr_res.realkind == "historic":
162
+ prefixes_set.add("ICMSH")
163
+ if pr_res.realkind == "gridpoint":
164
+ prefixes_set.add(
165
+ "{:s}PF".format(
166
+ "GRIB" if pr_res.nativefmt == "grib" else ""
167
+ )
168
+ )
169
+ self.io_poll_args = tuple(prefixes_set)
170
+ self.flyput = len(self.io_poll_args) > 0
171
+
172
+ def _create_ordered_links(
173
+ self,
174
+ bin_handler: Handler,
175
+ sections: Iterable[Section],
176
+ sort_key: Callable[[Section], Any],
177
+ nameconv_kind: str,
178
+ ):
179
+ """Create links to local files, with ordered names
180
+
181
+ For an iterable of sections objects, this function creates
182
+ symlinks to the corresponding local files (described by the
183
+ assocatied "container" object".
184
+
185
+ Link names are suffixed by a number string based on their
186
+ order after sorting sections by the sort key. Example:
187
+ ICIAUFCSTBK01,
188
+ ICIAUFCSTBK02,
189
+ ICIAUFCSTBK03...
190
+ """
191
+ for i, sec in enumerate(sorted(sections, key=sort_key)):
192
+ nameconv = self.naming_convention(
193
+ nameconv_kind,
194
+ bin_handler,
195
+ actualfmt=sec.rh.container.actualfmt,
196
+ )
197
+ target = nameconv(number=(i + 1))
198
+ link_name = sec.rh.container.localpath()
199
+ if self.system.path.exists(target):
200
+ logger.warning(
201
+ "%s should be linked to %s but %s already exists.",
202
+ link_name,
203
+ target,
204
+ target,
205
+ )
206
+ continue
207
+ logger.info("Linking %s to %s.", link_name, target)
208
+ self.grab(sec, comment=nameconv_kind)
209
+ self.system.softlink(link_name, target)
210
+
211
+ def find_namelists(self, opts=None):
212
+ """Find any namelists candidates in actual context inputs."""
213
+ return [
214
+ x.rh
215
+ for x in self.context.sequence.effective_inputs(
216
+ role="Namelist", kind="namelist"
217
+ )
218
+ ]
219
+
220
+ def prepare_namelist_delta(self, rh, namcontents, namlocal):
221
+ nam_updated = super().prepare_namelist_delta(rh, namcontents, namlocal)
222
+ if namlocal == "fort.4":
223
+ o_conf = self._outputs_configurator(rh)
224
+ o_conf.modelstate = self.hist_terms
225
+ o_conf.surf_modelstate = self.surfhist_terms
226
+ o_conf.post_processing = self.pos_terms
227
+ o_conf.spectral_diag = self.s_norm_terms
228
+ nam_updated_bis = o_conf(namcontents, namlocal)
229
+ nam_updated = nam_updated or nam_updated_bis
230
+ return nam_updated
231
+
232
+ def postfix(self, rh, opts):
233
+ """Find out if any special resources have been produced."""
234
+
235
+ sh = self.system
236
+
237
+ # Look up for the gridpoint files
238
+ gp_out = sh.ls("PF{}*".format(self.xpname))
239
+ gp_map = defaultdict(list)
240
+ if gp_out:
241
+ re_pf = re.compile(
242
+ r"^PF{}(\w+)\+(\d+(?::\d+)?)$".format(self.xpname)
243
+ )
244
+ for fname in gp_out:
245
+ match_pf = re_pf.match(fname)
246
+ if match_pf:
247
+ gp_map[match_pf.group(1).lower()].append(
248
+ Time(match_pf.group(2))
249
+ )
250
+ for k, v in gp_map.items():
251
+ v.sort()
252
+ logger.info(
253
+ "Gridpoint files found: domain=%s, terms=%s",
254
+ k,
255
+ ",".join([str(t) for t in v]),
256
+ )
257
+ if len(gp_map) == 0:
258
+ logger.info("No gridpoint file was found.")
259
+ sh.json_dump(gp_map, "gridpoint_map.out", indent=4, cls=ShellEncoder)
260
+
261
+ # Gather DDH in folders
262
+ if self.ddhpack:
263
+ ddhmap = dict(DL="dlimited", GL="global", ZO="zonal")
264
+ for prefix, ddhkind in ddhmap.items():
265
+ flist = sh.glob("DHF{}{}+*".format(prefix, self.xpname))
266
+ if flist:
267
+ dest = "ddhpack_{}".format(ddhkind)
268
+ logger.info("Creating a DDH pack: %s", dest)
269
+ sh.mkdir(dest)
270
+ for lfa in flist:
271
+ sh.mv(lfa, dest, fmt="lfa")
272
+
273
+ super().postfix(rh, opts)
274
+
275
+
276
+ class LAMForecast(Forecast):
277
+ """Forecast for IFS-like Limited Area Models."""
278
+
279
+ _footprint = dict(
280
+ info="Run a forecast with an Arpege/IFS like Limited Area Model.",
281
+ attr=dict(
282
+ kind=dict(
283
+ values=["lamfc", "lamforecast"],
284
+ remap=dict(lamforecast="lamfc"),
285
+ ),
286
+ ),
287
+ )
288
+
289
+ synctool = "atcp.alad"
290
+ synctpl = "sync-fetch.tpl"
291
+
292
+ def spawn_command_options(self):
293
+ """Dictionary provided for command line factory."""
294
+ return dict(
295
+ name=(self.xpname + "xxxx")[:4].upper(),
296
+ timescheme=self.timescheme,
297
+ timestep=self.timestep,
298
+ fcterm=self.fcterm,
299
+ fcunit=self.fcunit,
300
+ model="aladin",
301
+ )
302
+
303
+ def prepare(self, rh, opts):
304
+ """Default pre-link for boundary conditions files."""
305
+ super().prepare(rh, opts)
306
+
307
+ sh = self.system
308
+
309
+ # Check boundaries conditions
310
+ cplrh = [
311
+ x.rh
312
+ for x in self.context.sequence.effective_inputs(
313
+ role="BoundaryConditions", kind="boundary"
314
+ )
315
+ ]
316
+ cplrh.sort(key=lambda rh: rh.resource.date + rh.resource.term)
317
+
318
+ # Ordered pre-linking of boundaring and building ot the synchronization tools
319
+ firstsync = None
320
+ sh.header("Check boundaries...")
321
+ if any([x.is_expected() for x in cplrh]):
322
+ logger.info("Some boundaries conditions are still expected")
323
+ self.mksync = True
324
+ else:
325
+ logger.info("All boundaries conditions available")
326
+ self.mksync = False
327
+
328
+ for i, bound in enumerate(cplrh):
329
+ thisbound = bound.container.localpath()
330
+ lbcnc = self.naming_convention(
331
+ "lbc", rh, actualfmt=bound.container.actualfmt
332
+ )
333
+ sh.softlink(thisbound, lbcnc(number=i))
334
+ if self.mksync:
335
+ bound.mkgetpr(
336
+ pr_getter=self.synctool + ".{:03d}".format(i),
337
+ )
338
+ if firstsync is None:
339
+ firstsync = self.synctool + ".{:03d}".format(i)
340
+
341
+ # Set up the first synchronization step
342
+ if firstsync is not None:
343
+ sh.symlink(firstsync, self.synctool)
344
+
345
+ def postfix(self, rh, opts):
346
+ """Post forecast information and cleaning."""
347
+ sh = self.system
348
+
349
+ if self.mksync:
350
+ synclog = self.synctool + ".log"
351
+ if sh.path.exists(synclog):
352
+ sh.subtitle(synclog)
353
+ sh.cat(synclog, output=False)
354
+
355
+ super().postfix(rh, opts)
356
+
357
+
358
+ class DFIForecast(LAMForecast):
359
+ """OBSOLETE CODE: do not use."""
360
+
361
+ _footprint = dict(
362
+ info="Run a forecast with an Arpege/IFS like Limited Area Model (with DFIs).",
363
+ attr=dict(
364
+ kind=dict(
365
+ values=["fcdfi"],
366
+ ),
367
+ ),
368
+ )
369
+
370
+ def prepare(self, rh, opts):
371
+ """Pre-link boundary conditions as special DFI files."""
372
+ super().prepare(rh, opts)
373
+ ininc = self.naming_convention("ic", rh)
374
+ lbcnc = self.naming_convention("lbc", rh, actualfmt="fa")
375
+ for pseudoterm in (999, 0, 1):
376
+ self.system.softlink(ininc(), lbcnc(number=pseudoterm))
377
+
378
+
379
+ class FullPos(IFSParallel):
380
+ """Fullpos for geometries transforms in IFS-like Models.
381
+
382
+ OBSOLETE a/c cy46 (use the 903 configuration / fullpos server instead).
383
+ """
384
+
385
+ _abstract = True
386
+ _footprint = dict(
387
+ attr=dict(
388
+ xpname=dict(default="FPOS"),
389
+ flyput=dict(
390
+ default=False,
391
+ values=[False],
392
+ ),
393
+ server_run=dict(
394
+ values=[True, False],
395
+ ),
396
+ serversync_method=dict(
397
+ default="simple_socket",
398
+ ),
399
+ serversync_medium=dict(
400
+ default="cnt3_wait",
401
+ ),
402
+ )
403
+ )
404
+
405
+ @property
406
+ def realkind(self):
407
+ return "fullpos"
408
+
409
+
410
+ class FullPosGeo(FullPos):
411
+ """Fullpos for geometries transforms in IFS-like Models.
412
+
413
+ OBSOLETE a/c cy46 (use the 903 configuration / fullpos server instead).
414
+ """
415
+
416
+ _footprint = dict(
417
+ info="Run a fullpos to interpolate to a new geometry",
418
+ attr=dict(
419
+ kind=dict(
420
+ values=["l2h", "h2l"],
421
+ ),
422
+ ),
423
+ )
424
+
425
+ _RUNSTORE = "RUNOUT"
426
+
427
+ def _compute_target_name(self, r):
428
+ return "PF" + re.sub(
429
+ "^(?:ICMSH)(.*?)(?:INIT)(.*)$", r"\1\2", r.container.localpath()
430
+ ).format(self.xpname)
431
+
432
+ def execute(self, rh, opts):
433
+ """Loop on the various initial conditions provided."""
434
+
435
+ sh = self.system
436
+
437
+ initrh = [
438
+ x.rh
439
+ for x in self.context.sequence.effective_inputs(
440
+ role=("Analysis", "Guess", "InitialCondition"),
441
+ kind=(
442
+ "analysis",
443
+ "historic",
444
+ "ic",
445
+ re.compile("(stp|ana)min"),
446
+ re.compile("pert"),
447
+ ),
448
+ )
449
+ ]
450
+
451
+ # is there one (deterministic forecast) or many (ensemble forecast) fullpos to perform ?
452
+ isMany = len(initrh) > 1
453
+ do_fix_input_clim = self.do_climfile_fixer(rh, convkind="modelclim")
454
+ do_fix_output_clim = self.do_climfile_fixer(
455
+ rh, convkind="targetclim", area="000"
456
+ )
457
+ ininc = self.naming_convention("ic", rh)
458
+ infile = ininc()
459
+
460
+ for num, r in enumerate(initrh):
461
+ str_subtitle = "Fullpos execution on {}".format(
462
+ r.container.localpath()
463
+ )
464
+ sh.subtitle(str_subtitle)
465
+
466
+ # Set the actual init file
467
+ if sh.path.exists(infile):
468
+ if isMany:
469
+ logger.critical(
470
+ "Cannot process multiple Historic files if %s exists.",
471
+ infile,
472
+ )
473
+ else:
474
+ sh.cp(
475
+ r.container.localpath(),
476
+ infile,
477
+ fmt=r.container.actualfmt,
478
+ intent=intent.IN,
479
+ )
480
+
481
+ # Fix links for climatology files
482
+ actualmonth = Month(r.resource.date + r.resource.term)
483
+ startingclim = r.resource.geometry
484
+
485
+ if do_fix_input_clim:
486
+ self.climfile_fixer(
487
+ rh,
488
+ convkind="modelclim",
489
+ month=actualmonth,
490
+ geo=startingclim,
491
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
492
+ inputkind="clim_model",
493
+ )
494
+
495
+ if do_fix_output_clim:
496
+ self.climfile_fixer(
497
+ rh,
498
+ convkind="targetclim",
499
+ month=actualmonth,
500
+ notgeo=startingclim,
501
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
502
+ inputkind="clim_model",
503
+ area="000",
504
+ )
505
+
506
+ # Standard execution
507
+ super().execute(rh, opts)
508
+
509
+ # Find the output filename
510
+ output_file = [x for x in sh.glob("PF{:s}*+*".format(self.xpname))]
511
+ if len(output_file) != 1:
512
+ raise AlgoComponentError("No or multiple output files found.")
513
+ output_file = output_file[0]
514
+
515
+ # prepares the next execution
516
+ if isMany:
517
+ # Set a local storage place
518
+ sh.mkdir(self._RUNSTORE)
519
+ # Freeze the current output
520
+ sh.move(
521
+ output_file,
522
+ sh.path.join(self._RUNSTORE, "pfout_{:d}".format(num)),
523
+ fmt=r.container.actualfmt,
524
+ )
525
+ sh.remove(infile, fmt=r.container.actualfmt)
526
+ # Cleaning/Log management
527
+ if not self.server_run:
528
+ # The only one listing
529
+ sh.cat("NODE.001_01", output="NODE.all")
530
+ # Some cleaning
531
+ sh.rmall("ncf927", "dirlst")
532
+ else:
533
+ # Link the output files to new style names
534
+ sh.cp(
535
+ output_file,
536
+ self._compute_target_name(r),
537
+ fmt=r.container.actualfmt,
538
+ intent="in",
539
+ )
540
+ # Link the listing to NODE.all
541
+ sh.cp("NODE.001_01", "NODE.all", intent="in")
542
+
543
+ def postfix(self, rh, opts):
544
+ """Post processing cleaning."""
545
+ sh = self.system
546
+
547
+ initrh = [
548
+ x.rh
549
+ for x in self.context.sequence.effective_inputs(
550
+ role=("Analysis", "Guess", "InitialCondition"),
551
+ kind=(
552
+ "analysis",
553
+ "historic",
554
+ "ic",
555
+ re.compile("(stp|ana)min"),
556
+ re.compile("pert"),
557
+ ),
558
+ )
559
+ ]
560
+ if len(initrh) > 1:
561
+ for num, r in enumerate(initrh):
562
+ sh.move(
563
+ "{:s}/pfout_{:d}".format(self._RUNSTORE, num),
564
+ self._compute_target_name(r),
565
+ fmt=r.container.actualfmt,
566
+ )
567
+
568
+ super().postfix(rh, opts)
569
+
570
+
571
+ class FullPosBDAP(FullPos):
572
+ """Post-processing for IFS-like Models.
573
+
574
+ OBSOLETE a/c cy46 (use the 903 configuration / fullpos server instead).
575
+ """
576
+
577
+ _footprint = dict(
578
+ info="Run a fullpos to post-process raw model outputs",
579
+ attr=dict(
580
+ kind=dict(values=["fullpos", "fp"], remap=dict(fp="fullpos")),
581
+ fcterm=dict(
582
+ values=[
583
+ 0,
584
+ ],
585
+ ),
586
+ outputid=dict(
587
+ info="The identifier for the encoding of post-processed fields.",
588
+ optional=True,
589
+ ),
590
+ server_run=dict(
591
+ values=[
592
+ False,
593
+ ],
594
+ ),
595
+ ),
596
+ )
597
+
598
+ def prepare(self, rh, opts):
599
+ """Some additional checks."""
600
+ if self.system.path.exists("xxt00000000"):
601
+ raise AlgoComponentError(
602
+ "There should be no file named xxt00000000 in the working directory"
603
+ )
604
+ super().prepare(rh, opts)
605
+
606
+ def execute(self, rh, opts):
607
+ """Loop on the various initial conditions provided."""
608
+
609
+ sh = self.system
610
+
611
+ namrh = [
612
+ x.rh
613
+ for x in self.context.sequence.effective_inputs(kind="namelistfp")
614
+ ]
615
+
616
+ namxx = [
617
+ x.rh
618
+ for x in self.context.sequence.effective_inputs(
619
+ role="FullPosSelection",
620
+ kind="namselect",
621
+ )
622
+ ]
623
+
624
+ initsec = [
625
+ x
626
+ for x in self.context.sequence.effective_inputs(
627
+ role=("InitialCondition", "ModelState"),
628
+ kind="historic",
629
+ )
630
+ ]
631
+ initsec.sort(key=lambda sec: sec.rh.resource.term)
632
+
633
+ do_fix_input_clim = self.do_climfile_fixer(rh, convkind="modelclim")
634
+
635
+ ininc = self.naming_convention("ic", rh)
636
+ infile = ininc()
637
+
638
+ for sec in initsec:
639
+ r = sec.rh
640
+ sh.subtitle("Loop on {:s}".format(r.resource.term.fmthm))
641
+
642
+ thisdate = r.resource.date + r.resource.term
643
+ thismonth = thisdate.month
644
+ logger.info("Fullpos <month:%s>" % thismonth)
645
+
646
+ if do_fix_input_clim:
647
+ self.climfile_fixer(
648
+ rh,
649
+ convkind="modelclim",
650
+ month=thismonth,
651
+ geo=r.resource.geometry,
652
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
653
+ inputkind="clim_model",
654
+ )
655
+
656
+ thesenames = self.all_localclim_fixer(rh, thismonth)
657
+
658
+ # Set a local storage place
659
+ runstore = "RUNOUT" + r.resource.term.fmtraw
660
+ sh.mkdir(runstore)
661
+
662
+ # Define an input namelist
663
+ try:
664
+ namfp = [
665
+ x for x in namrh if x.resource.term == r.resource.term
666
+ ].pop()
667
+ namfplocal = namfp.container.localpath()
668
+ if self.outputid is not None:
669
+ self._set_nam_macro(
670
+ namfp.contents, namfplocal, "OUTPUTID", self.outputid
671
+ )
672
+ namfp.contents.rewrite(namfp.container)
673
+ sh.remove("fort.4")
674
+ sh.symlink(namfplocal, "fort.4")
675
+ except Exception:
676
+ logger.critical(
677
+ "Could not get a fullpos namelist for term %s",
678
+ r.resource.term,
679
+ )
680
+ raise
681
+
682
+ # Define an selection namelist
683
+ if namxx:
684
+ namxt = [
685
+ x for x in namxx if x.resource.term == r.resource.term
686
+ ]
687
+ if namxt:
688
+ sh.remove("xxt00000000")
689
+ sh.symlink(
690
+ namxt.pop().container.localpath(), "xxt00000000"
691
+ )
692
+ else:
693
+ logger.critical(
694
+ "Could not get a selection namelist for term %s",
695
+ r.resource.term,
696
+ )
697
+ raise AlgoComponentError()
698
+ else:
699
+ logger.info("No selection namelist are provided.")
700
+
701
+ # Finally set the actual init file
702
+ sh.remove(infile)
703
+ self.grab(
704
+ sec,
705
+ comment="Fullpos source (term={:s})".format(
706
+ r.resource.term.fmthm
707
+ ),
708
+ )
709
+ sh.softlink(r.container.localpath(), infile)
710
+
711
+ # Standard execution
712
+ super().execute(rh, opts)
713
+
714
+ # Freeze the current output
715
+ for posfile in [
716
+ x
717
+ for x in (
718
+ sh.glob("PF{:s}*+*".format(self.xpname))
719
+ + sh.glob("GRIBPF{:s}*+*".format(self.xpname))
720
+ )
721
+ ]:
722
+ rootpos = re.sub("0+$", "", posfile)
723
+ fmtpos = "grib" if posfile.startswith("GRIB") else "lfi"
724
+ targetfile = sh.path.join(
725
+ runstore, rootpos + r.resource.term.fmthm
726
+ )
727
+ targetbase = sh.path.basename(targetfile)
728
+
729
+ # Deal with potential promises
730
+ expected = [
731
+ x
732
+ for x in self.promises
733
+ if x.rh.container.localpath() == targetbase
734
+ ]
735
+ if expected:
736
+ logger.info(
737
+ "Start dealing with promises for: %s.",
738
+ ", ".join(
739
+ [x.rh.container.localpath() for x in expected]
740
+ ),
741
+ )
742
+ if posfile != targetbase:
743
+ sh.move(posfile, targetbase, fmt=fmtpos)
744
+ posfile = targetbase
745
+ for thispromise in expected:
746
+ thispromise.put(incache=True)
747
+
748
+ sh.move(posfile, targetfile, fmt=fmtpos)
749
+
750
+ for logfile in sh.glob("NODE.*", "std*"):
751
+ sh.move(logfile, sh.path.join(runstore, logfile))
752
+
753
+ # Some cleaning
754
+ sh.rmall("PX{:s}*".format(self.xpname), fmt="lfi")
755
+ sh.rmall("ncf927", "dirlst")
756
+ for clim in thesenames:
757
+ sh.rm(clim)
758
+
759
+ def postfix(self, rh, opts):
760
+ """Post processing cleaning."""
761
+ sh = self.system
762
+
763
+ for fpfile in [
764
+ x
765
+ for x in (
766
+ sh.glob("RUNOUT*/PF{:s}*".format(self.xpname))
767
+ + sh.glob("RUNOUT*/GRIBPF{:s}*+*".format(self.xpname))
768
+ )
769
+ if sh.path.isfile(x)
770
+ ]:
771
+ sh.move(
772
+ fpfile,
773
+ sh.path.basename(fpfile),
774
+ fmt="grib" if "GRIBPF" in fpfile else "lfi",
775
+ )
776
+ sh.cat("RUNOUT*/NODE.001_01", output="NODE.all")
777
+
778
+ super().postfix(rh, opts)
779
+
780
+
781
+ class OfflineSurfex(Parallel, DrHookDecoMixin):
782
+ """Run a forecast with the SURFEX's offline binary."""
783
+
784
+ _footprint = [
785
+ model,
786
+ dict(
787
+ info="Run a forecast with the SURFEX's offline binary.",
788
+ attr=dict(
789
+ kind=dict(
790
+ values=[
791
+ "offline_forecast",
792
+ ],
793
+ ),
794
+ model=dict(
795
+ values=[
796
+ "surfex",
797
+ ],
798
+ ),
799
+ model_tstep=dict(
800
+ info="The timestep of the model",
801
+ type=Period,
802
+ ),
803
+ diag_tstep=dict(
804
+ info="The timestep for writing diagnostics outputs",
805
+ type=Period,
806
+ ),
807
+ fcterm=dict(
808
+ info="The forecast's term",
809
+ type=Period,
810
+ ),
811
+ forcing_read_interval=dict(
812
+ info="Read the forcing file every...",
813
+ type=Period,
814
+ default=Period("PT12H"),
815
+ optional=True,
816
+ ),
817
+ ),
818
+ ),
819
+ ]
820
+
821
+ def valid_executable(self, rh):
822
+ """Check the executable's resource."""
823
+ bmodel = getattr(rh.resource, "model", None)
824
+ rc = bmodel == "surfex" and rh.resource.realkind == "offline"
825
+ if not rc:
826
+ logger.error("Inapropriate binary provided")
827
+ return rc and super().valid_executable(rh)
828
+
829
+ @staticmethod
830
+ def _fix_nam_macro(sec, macro, value):
831
+ """Set a given namelist macro and issue a log message."""
832
+ sec.rh.contents.setmacro(macro, value)
833
+ logger.info("Setup %s macro to %s.", macro, str(value))
834
+
835
+ def prepare(self, rh, opts):
836
+ """Setup the appropriate namelist macros."""
837
+ self.system.subtitle("Offline SURFEX Settings.")
838
+ # Find the run/final date
839
+ ic = self.context.sequence.effective_inputs(
840
+ role=("InitialConditions", "ModelState", "Analysis")
841
+ )
842
+ if ic:
843
+ if len(ic) > 1:
844
+ logger.warning(
845
+ "Multiple initial conditions, using only the first one..."
846
+ )
847
+ rundate = ic[0].rh.resource.date
848
+ if hasattr(ic[0].rh.resource, "term"):
849
+ rundate += ic[0].rh.resource.term
850
+ finaldate = rundate + self.fcterm
851
+ finaldate = [
852
+ finaldate.year,
853
+ finaldate.month,
854
+ finaldate.day,
855
+ finaldate.hour * 3600
856
+ + finaldate.minute * 60
857
+ + finaldate.second,
858
+ ]
859
+ logger.info("The final date is : %s", str(finaldate))
860
+ nbreads = int(
861
+ math.ceil(
862
+ (finaldate - rundate).length
863
+ / self.forcing_read_interval.length
864
+ )
865
+ )
866
+ else:
867
+ logger.warning(
868
+ "No initial conditions were found. Hope you know what you are doing..."
869
+ )
870
+ finaldate = None
871
+ # Ok, let's find the namelist
872
+ namsecs = self.context.sequence.effective_inputs(
873
+ role=("Namelist", "Namelistsurf")
874
+ )
875
+ for namsec in namsecs:
876
+ logger.info("Processing: %s", namsec.rh.container.localpath())
877
+ self._fix_nam_macro(namsec, "TSTEP", self.model_tstep.length)
878
+ self._fix_nam_macro(
879
+ namsec, "TSTEP_OUTPUTS", self.diag_tstep.length
880
+ )
881
+ if finaldate:
882
+ self._fix_nam_macro(namsec, "FINAL_STOP", finaldate)
883
+ self._fix_nam_macro(namsec, "NB_READS", nbreads)
884
+ if namsec.rh.contents.dumps_needs_update:
885
+ namsec.rh.save()
886
+ logger.info("Namelist dump: \n%s", namsec.rh.container.read())