vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,66 @@
1
+ """
2
+ Common interest classes to help setup the DrHook library environment.
3
+ """
4
+
5
+ import footprints
6
+ from bronx.fancies import loggers
7
+
8
+ from vortex.algo.components import (
9
+ AlgoComponentDecoMixin,
10
+ Parallel,
11
+ algo_component_deco_mixin_autodoc,
12
+ )
13
+
14
+ #: No automatic export
15
+ __all__ = []
16
+
17
+ logger = loggers.getLogger(__name__)
18
+
19
+
20
+ @algo_component_deco_mixin_autodoc
21
+ class DrHookDecoMixin(AlgoComponentDecoMixin):
22
+ """Handle DrHook settings in AlgoComponents.
23
+
24
+ This mixin class is intended to be used with AlgoComponent classes. It will
25
+ automatically add footprints' arguments related to DrHook (namely the
26
+ drhookprof boolean attribute that is optional and False by default),
27
+ and set up DrHook environment variables (:meth:`_drhook_varexport`) depending
28
+ on the context (MPI run or not).
29
+ """
30
+
31
+ _MIXIN_EXTRA_FOOTPRINTS = [
32
+ footprints.Footprint(
33
+ attr=dict(
34
+ drhookprof=dict(
35
+ info="Activate the DrHook profiling.",
36
+ optional=True,
37
+ type=bool,
38
+ default=False,
39
+ doc_zorder=-50,
40
+ ),
41
+ ),
42
+ )
43
+ ]
44
+
45
+ def _drhook_varexport(self, rh, opts): # @UnusedVariable
46
+ """Export proper DrHook variables"""
47
+ drhook_vars = (
48
+ [
49
+ ("DR_HOOK", "1"),
50
+ ("DR_HOOK_OPT", "prof"),
51
+ ("DR_HOOK_IGNORE_SIGNALS", "-1"),
52
+ ]
53
+ if self.drhookprof
54
+ else [("DR_HOOK", "0"), ("DR_HOOK_IGNORE_SIGNALS", "-1")]
55
+ )
56
+ if not isinstance(self, Parallel):
57
+ drhook_vars += [
58
+ ("DR_HOOK_SILENT", "1"),
59
+ ("DR_HOOK_NOT_MPI", "1"),
60
+ ("DR_HOOK_ASSERT_MPI_INITIALIZED", "0"),
61
+ ]
62
+ for k, v in drhook_vars:
63
+ logger.info("Setting DRHOOK env %s = %s", k, v)
64
+ self.env[k] = v
65
+
66
+ _MIXIN_PREPARE_HOOKS = (_drhook_varexport,)
@@ -0,0 +1,294 @@
1
+ """
2
+ TODO: Module documentation.
3
+ """
4
+
5
+ import json
6
+
7
+ import footprints
8
+ from bronx.fancies import loggers
9
+ from vortex import sessions
10
+ from vortex.algo.components import AlgoComponentError
11
+ from vortex.layout.contexts import Context
12
+
13
+ #: No automatic export
14
+ __all__ = []
15
+
16
+ logger = loggers.getLogger(__name__)
17
+
18
+
19
+ class _GenericFilter:
20
+ """This class could be the start of filtering classes for different formats."""
21
+
22
+ def __init__(self):
23
+ """
24
+
25
+ No parameters.
26
+ """
27
+ self._filters = list()
28
+ self._sh = sessions.system()
29
+
30
+ def add_filters(self, *filters):
31
+ """Add one or more filters to the filters list.
32
+
33
+ :param filters: a list of filters
34
+
35
+ Filters are described using dictionaries. Here is an example of such a
36
+ dictionary::
37
+
38
+ {
39
+ "fields_include": [
40
+ {
41
+ "indicatorOfTypeOfLevel": 100,
42
+ "shortName": "t",
43
+ "level": [
44
+ 850,
45
+ 500,
46
+ 300
47
+ ]
48
+ }
49
+ ],
50
+ "fid_format": "GRIB1",
51
+ "filter_name": "toto"
52
+ }
53
+
54
+ **fields_include** or **fields_exclude** lists depends on the data
55
+ format specified with the **fid_format** key.
56
+
57
+ The filters argument of this function accepts dictionaries but also
58
+ strings or Context objects :
59
+
60
+ * If a string is provided, it will be converted to a dictionary
61
+ using json.loads
62
+ * If a Context object is provided, the Context's sequence will be
63
+ used to find available resources of filtering_request kind. The
64
+ content of such resources will be used as a filter.
65
+
66
+ """
67
+
68
+ for a_filter in filters:
69
+ if isinstance(a_filter, dict):
70
+ self._filters.append(a_filter)
71
+ elif isinstance(a_filter, str):
72
+ self._filters.append(json.loads(a_filter))
73
+ elif isinstance(a_filter, Context):
74
+ for a_request in a_filter.sequence.effective_inputs(
75
+ kind="filtering_request"
76
+ ):
77
+ self._filters.append(a_request.rh.contents.data)
78
+
79
+ def __len__(self):
80
+ """Returns the number of active filters."""
81
+ return len(self._filters)
82
+
83
+ @staticmethod
84
+ def _is_dict_superset(full, subset):
85
+ """Finds out if the full dictionary contains and matches subset."""
86
+ superset_ok = True
87
+ for k, v in subset.items():
88
+ # Ignore the comments...
89
+ if k.startswith("comment"):
90
+ continue
91
+ # Check for the key inside the full dictionary
92
+ try:
93
+ fullvalue = full[str(k)]
94
+ except KeyError:
95
+ superset_ok = False
96
+ break
97
+ # Does the key match ?
98
+ if isinstance(v, (list, tuple)):
99
+ if fullvalue not in v:
100
+ superset_ok = False
101
+ break
102
+ else:
103
+ if fullvalue != v:
104
+ superset_ok = False
105
+ break
106
+ return superset_ok
107
+
108
+ def _filter_process(self, fid, a_filter):
109
+ """Check if the data's fid complies with the filter."""
110
+ includes = a_filter.get("fields_include", [])
111
+ excludes = a_filter.get("fields_exclude", [])
112
+ try:
113
+ fid = fid[a_filter["fid_format"]]
114
+ except KeyboardInterrupt:
115
+ raise ValueError(
116
+ "Please specify a valid fid_format in the filter description"
117
+ )
118
+ # First process includes
119
+ includes_ok = True
120
+ for include in includes:
121
+ includes_ok = self._is_dict_superset(fid, include)
122
+ if includes_ok:
123
+ break
124
+ # Process the excludes if necessary
125
+ if includes_ok:
126
+ for exclude in excludes:
127
+ includes_ok = not self._is_dict_superset(fid, exclude)
128
+ if not includes_ok:
129
+ break
130
+ return includes_ok
131
+
132
+ def __call__(self, inputfile, outfile_fmt):
133
+ """Apply the various filters on *inputfile*. Should be implemented..."""
134
+ raise NotImplementedError("This method have to be overwritten.")
135
+
136
+
137
+ class GRIBFilter(_GenericFilter):
138
+ """Class in charge of filtering GRIB files."""
139
+
140
+ CONCATENATE_FILTER = "concatenate"
141
+
142
+ def __init__(self, concatenate=False):
143
+ """
144
+
145
+ :param bool concatenate: Wether to generate a concatenated GRIB file
146
+ """
147
+ super().__init__()
148
+ self.concatenate = concatenate
149
+ self._xgrib_support = "grib" in self._sh.loaded_addons()
150
+
151
+ def __len__(self):
152
+ """Returns the number of active filters (concatenate included)."""
153
+ return super().__len__() + (1 if self.concatenate else 0)
154
+
155
+ def _simple_cat(self, gribfile, outfile_fmt, intent):
156
+ """Just concatenate a multipart GRIB."""
157
+ if self._xgrib_support and self._sh.is_xgrib(gribfile):
158
+ self._sh.xgrib_pack(
159
+ gribfile,
160
+ outfile_fmt.format(filtername=self.CONCATENATE_FILTER),
161
+ intent=intent,
162
+ )
163
+ else:
164
+ # Just make a copy with the appropriate name...
165
+ self._sh.cp(
166
+ gribfile,
167
+ outfile_fmt.format(filtername=self.CONCATENATE_FILTER),
168
+ intent=intent,
169
+ fmt="grib",
170
+ )
171
+
172
+ def __call__(self, gribfile, outfile_fmt, intent="in"):
173
+ """Apply the various filters on *gribfile*.
174
+
175
+ :param gribfile: The path to the input GRIB file
176
+ :param outfile_fmt: The path of output files
177
+
178
+ The *outfile_fmt* must be a format string such as
179
+ **GRIBOUTPUT_{filtername:s}.grib** where **filtername** will be replaced
180
+ by the name of the filter.
181
+ """
182
+
183
+ if not self._sh.path.exists(gribfile):
184
+ raise OSError("{!s} doesn't exist".format(gribfile))
185
+
186
+ # We just want to concatenate files...
187
+ if not self._filters:
188
+ if self.concatenate:
189
+ self._simple_cat(gribfile, outfile_fmt, intent=intent)
190
+ return [
191
+ outfile_fmt.format(filtername=self.CONCATENATE_FILTER),
192
+ ]
193
+ else:
194
+ raise ValueError("Set concatenate=True or provide a filter.")
195
+
196
+ # Open the input file using Epygram
197
+ from ..util import usepygram
198
+
199
+ if not usepygram.epygram_checker.is_available(version="1.0.0"):
200
+ raise AlgoComponentError("Epygram (v1.0.0) needs to be available")
201
+
202
+ if self._xgrib_support and self._sh.is_xgrib(gribfile):
203
+ idx = self._sh.xgrib_index_get(gribfile)
204
+ in_data = [
205
+ footprints.proxy.dataformat(
206
+ filename=self._sh.path.realpath(a_gribfile),
207
+ openmode="r",
208
+ format="GRIB",
209
+ )
210
+ for a_gribfile in idx
211
+ ]
212
+ else:
213
+ in_data = [
214
+ footprints.proxy.dataformat(
215
+ filename=self._sh.path.realpath(gribfile),
216
+ openmode="r",
217
+ format="GRIB",
218
+ ),
219
+ ]
220
+
221
+ # Open output files
222
+ out_data = list()
223
+ out_filelist = list()
224
+ for a_filter in self._filters:
225
+ f_name = outfile_fmt.format(filtername=a_filter["filter_name"])
226
+ out_filelist.append(f_name)
227
+ # It would be a lot better to use io.open but grib_api is very annoying !
228
+ out_data.append(open(f_name, "wb"))
229
+ if self.concatenate:
230
+ f_name = outfile_fmt.format(filtername=self.CONCATENATE_FILTER)
231
+ out_filelist.append(f_name)
232
+ # It would be a lot better to use io.open but grib_api is very annoying !
233
+ out_cat = open(f_name, "wb")
234
+
235
+ with usepygram.epy_env_prepare(sessions.current()):
236
+ for a_in_data in in_data:
237
+ msg = a_in_data.iter_messages(headers_only=False)
238
+ while msg is not None:
239
+ for a_out_data, a_filter in zip(out_data, self._filters):
240
+ thefid = msg.genfid()
241
+ if self._filter_process(thefid, a_filter):
242
+ logger.debug(
243
+ "Select succeed for filter %s: %s",
244
+ a_filter["filter_name"],
245
+ thefid,
246
+ )
247
+ msg.write_to_file(a_out_data)
248
+ if self.concatenate:
249
+ msg.write_to_file(out_cat)
250
+ msg = a_in_data.iter_messages(headers_only=False)
251
+
252
+ # Close outpout files
253
+ for a_in_data in in_data:
254
+ a_in_data.close()
255
+ for a_out_data in out_data:
256
+ a_out_data.close()
257
+ if self.concatenate:
258
+ out_cat.close()
259
+
260
+ return out_filelist
261
+
262
+
263
+ def grib_inplace_cat(t, rh):
264
+ """Ensure that a GRIB file is a usual single file (if not, concatenate it).
265
+
266
+ This function is designed to be used as a hook function.
267
+
268
+ :param t: A :class:`vortex.sessions.Ticket` object
269
+ :param rh: A :class:`vortex.data.handlers.Handler` object
270
+ """
271
+ xgrib_support = "grib" in t.sh.loaded_addons()
272
+ if xgrib_support:
273
+ if t.sh.is_xgrib(rh.container.localpath()):
274
+ # Some cleanup...
275
+ rh.reset_contents()
276
+ rh.container.close()
277
+ # Move the index file prior to the concatenation
278
+ tmpfile = (
279
+ rh.container.localpath() + "_concat" + t.sh.safe_filesuffix()
280
+ )
281
+ t.sh.move(rh.container.localpath(), tmpfile)
282
+ # Concatenate
283
+ t.sh.xgrib_pack(tmpfile, rh.container.localpath())
284
+ # Remove the multipart file
285
+ t.sh.grib_remove(tmpfile)
286
+ logger.info("The multipart GRIB has been concatenated.")
287
+ else:
288
+ logger.info(
289
+ "The localpath is not a multipart GRIB: nothing to do."
290
+ )
291
+ else:
292
+ logger.info(
293
+ "Multipart GRIB support is not activated: nothing can be done."
294
+ )
@@ -0,0 +1,104 @@
1
+ """
2
+ TODO: Module documentation.
3
+ """
4
+
5
+ import footprints
6
+
7
+ from vortex.tools.grib import GRIBAPI_Tool
8
+
9
+ #: No automatic export
10
+ __all__ = []
11
+
12
+
13
+ class _GRIBDIFF_Plus_St:
14
+ """Status of the GRIB comparison."""
15
+
16
+ def __init__(self, rc, result):
17
+ self.rc = rc
18
+ self._result = result
19
+
20
+ def __str__(self):
21
+ return "{:s} | rc={:d}>".format(repr(self).rstrip(">"), self.rc)
22
+
23
+ @property
24
+ def result(self):
25
+ """Indicates whether the diff succeeded or not."""
26
+ return self._result
27
+
28
+ def __bool__(self):
29
+ return self.rc
30
+
31
+
32
+ class _GRIBDIFF_Plus_Res:
33
+ """Detailed result of the GRIB comparison."""
34
+
35
+ def __init__(self, gapi, epydiff, epydiff_res):
36
+ self._gapi = gapi
37
+ self._epydiff = epydiff
38
+ self._epydiff_res = epydiff_res
39
+
40
+ def __str__(self):
41
+ return "{0:s} | gribapi_rc={1:d} epydiff_done={2:d}>".format(
42
+ repr(self).rstrip(">"), self._gapi, self._epydiff
43
+ )
44
+
45
+ def differences(self):
46
+ """Print detailed informations about the diff."""
47
+ print(self._epydiff_res)
48
+
49
+
50
+ class GRIBDIFF_Plus(GRIBAPI_Tool):
51
+ """
52
+ Interface to gribapi commands + epygram diff (designed as a shell Addon).
53
+ """
54
+
55
+ _footprint = dict(
56
+ info="Default GRIBAPI system interface",
57
+ attr=dict(
58
+ maxepydiff=dict(
59
+ info="Epygram diffs are costfull, they will run only maxepydiff times",
60
+ type=int,
61
+ default=2,
62
+ optional=True,
63
+ ),
64
+ ),
65
+ priority=dict(
66
+ level=footprints.priorities.top.TOOLBOX # @UndefinedVariable
67
+ ),
68
+ )
69
+
70
+ def __init__(self, *kargs, **kwargs):
71
+ super().__init__(*kargs, **kwargs)
72
+ self._epycount = 0
73
+ self._epyavail = None
74
+
75
+ def _actual_diff(self, grib1, grib2, skipkeys, **kw):
76
+ rc = super()._actual_diff(grib1, grib2, skipkeys, **kw)
77
+ if not rc:
78
+ if self._epyavail is None:
79
+ from ..util.usepygram import epygram_checker
80
+
81
+ self._epyavail = epygram_checker.is_available(version="1.0.0")
82
+ if self._epyavail:
83
+ if self._epycount < self.maxepydiff:
84
+ from ..util.diffpygram import EpyGribDiff
85
+
86
+ gdiff = EpyGribDiff(grib2, grib1) # Ref file is first...
87
+ self._epycount += 1
88
+ res = _GRIBDIFF_Plus_Res(rc, True, str(gdiff))
89
+ # Save the detailed diff
90
+ with open(grib1 + "_epygram_diffstats.log", "w") as outfh:
91
+ outfh.write(gdiff.format_diff(detailed=True))
92
+ else:
93
+ res = _GRIBDIFF_Plus_Res(
94
+ rc,
95
+ False,
96
+ "grib_compare failed (but the Epygram diffs max number is exceeded...)",
97
+ )
98
+ else:
99
+ res = _GRIBDIFF_Plus_Res(
100
+ rc, False, "grib_compare failed (Epygram unavailable)"
101
+ )
102
+ else:
103
+ res = _GRIBDIFF_Plus_Res(rc, False, "")
104
+ return _GRIBDIFF_Plus_St(rc, res)
@@ -0,0 +1,203 @@
1
+ """
2
+ Various tools related to the IFS code.
3
+ """
4
+
5
+ import re
6
+
7
+ from bronx.fancies import loggers
8
+ from bronx.stdtypes.date import Time
9
+
10
+ import footprints
11
+
12
+ from vortex.syntax.stdattrs import model
13
+ from ..syntax.stdattrs import arpifs_cycle
14
+
15
+ logger = loggers.getLogger(__name__)
16
+
17
+
18
+ class _IfsOutputsTimesListDesc:
19
+ """Convert the input data to Time objects."""
20
+
21
+ def __init__(self, attr, doc):
22
+ self._attr = attr
23
+ self.__doc__ = doc
24
+
25
+ def __get__(self, instance, owner):
26
+ return instance._tlists_store.get(self._attr, None)
27
+
28
+ def __set__(self, instance, value):
29
+ if value is None:
30
+ instance._tlists_store.pop(self._attr, None)
31
+ else:
32
+ if not isinstance(value, list):
33
+ raise ValueError("**value** should be a list.")
34
+ instance._tlists_store[self._attr] = [Time(t) for t in value]
35
+
36
+ def __delete__(self, instance):
37
+ instance._tlists_store.pop(self._attr, None)
38
+
39
+
40
+ class IfsOutputsAbstractConfigurator(footprints.FootprintBase):
41
+ """Abstract utility class to configure the IFS model regarding output data."""
42
+
43
+ _abstract = True
44
+ _collector = ("ifsoutputs_configurator",)
45
+ _footprint = [
46
+ model,
47
+ arpifs_cycle,
48
+ dict(
49
+ attr=dict(
50
+ fcterm_unit=dict(
51
+ info="The unit used in the *fcterm* attribute.",
52
+ values=["h", "t"],
53
+ ),
54
+ )
55
+ ),
56
+ ]
57
+
58
+ def __init__(self, *kargs, **kwargs):
59
+ super().__init__(*kargs, **kwargs)
60
+ self._tlists_store = dict()
61
+
62
+ modelstate = _IfsOutputsTimesListDesc(
63
+ "modelstate", "The list of terms for modelstate outputs."
64
+ )
65
+
66
+ surf_modelstate = _IfsOutputsTimesListDesc(
67
+ "surf_modelstate",
68
+ "The list of terms for surface scheme modelstate outputs.",
69
+ )
70
+
71
+ spectral_diag = _IfsOutputsTimesListDesc(
72
+ "spectral_diag",
73
+ "The list of terms for spectral space diagnostics outputs.",
74
+ )
75
+
76
+ post_processing = _IfsOutputsTimesListDesc(
77
+ "post_processing",
78
+ "The list of terms for inline post-processing outputs.",
79
+ )
80
+
81
+ def _setup_nam_obj(self, namelist_object, namelist_name):
82
+ """Actualy tweak the IFS namelist."""
83
+ raise NotImplementedError()
84
+
85
+ def __call__(self, namelist_object, namelist_name):
86
+ """Tweak **namelist_object** that was read from the **namelist_name** file."""
87
+ if self._tlists_store:
88
+ self._setup_nam_obj(namelist_object, namelist_name)
89
+ return True
90
+ else:
91
+ return False
92
+
93
+
94
+ class IfsOutputsConfigurator(IfsOutputsAbstractConfigurator):
95
+ """Utility class to configure the IFS model regarding output data."""
96
+
97
+ @staticmethod
98
+ def _get_namblock(namelist_object, nam_block):
99
+ """Get or create a **nam_block** namelist."""
100
+ if nam_block in namelist_object:
101
+ return namelist_object[nam_block]
102
+ else:
103
+ return namelist_object.newblock(nam_block)
104
+
105
+ @staticmethod
106
+ def _set_namvar_value(namblock, var, value, namname):
107
+ """Set a value in a **namblock** namelist and log it."""
108
+ namblock[var] = value
109
+ logger.info(
110
+ "Setup &%s %s=%s / (file: %s)",
111
+ namblock.name,
112
+ var,
113
+ namblock.nice(value),
114
+ namname,
115
+ )
116
+
117
+ @staticmethod
118
+ def _clean_namvar(namblock, var, namname):
119
+ """Clean the **var** value from the **namblock** namelist."""
120
+ todo = {k for k in namblock.keys() if re.match(var + r"($|\(|%)", k)}
121
+ if todo:
122
+ for k in todo:
123
+ namblock.delvar(k)
124
+ logger.info(
125
+ "Cleaning %s variable in namelist &%s (file: %s)",
126
+ var,
127
+ namblock.name,
128
+ namname,
129
+ )
130
+
131
+ def _generic_terms_setup(self, namct0, namct1, what, terms, namname):
132
+ """Setup a given kind of output data (in a generic way)."""
133
+ if terms is not None:
134
+ sign = -1 if self.fcterm_unit == "h" else 1
135
+ with_minutes = any([t.minute > 0 for t in terms])
136
+ self._clean_namvar(namct0, "NFR{:s}".format(what), namname)
137
+ self._clean_namvar(namct0, "N{:s}TS".format(what), namname)
138
+ self._clean_namvar(namct0, "N{:s}TSMIN".format(what), namname)
139
+ self._set_namvar_value(
140
+ namct1, "N1{:s}".format(what), 1 if terms else 0, namname
141
+ )
142
+ if terms:
143
+ self._set_namvar_value(
144
+ namct0,
145
+ "N{:s}TS(0)".format(what),
146
+ sign * len(terms),
147
+ namname,
148
+ )
149
+ if with_minutes:
150
+ if (
151
+ "cy46" <= self.cycle < "cy47"
152
+ ): # Temporary fix for cy46 only
153
+ self._set_namvar_value(
154
+ namct0,
155
+ "N{:s}TSMIN(0)".format(what),
156
+ len(terms),
157
+ namname,
158
+ )
159
+ logger.info(
160
+ "Setting up N%sTS and N%sTSMIN in &%s (file: %s)",
161
+ what,
162
+ what,
163
+ namct0.name,
164
+ namname,
165
+ )
166
+ for i, t in enumerate(terms):
167
+ namct0["N{:s}TS({:d})".format(what, i + 1)] = sign * t.hour
168
+ if with_minutes:
169
+ for i, t in enumerate(terms):
170
+ namct0["N{:s}TSMIN({:d})".format(what, i + 1)] = (
171
+ t.minute
172
+ )
173
+
174
+ def _setup_nam_obj(self, namelist_object, namelist_name):
175
+ """Actualy tweak the IFS namelist."""
176
+ namoph = self._get_namblock(namelist_object, "NAMOPH")
177
+ namct0 = self._get_namblock(namelist_object, "NAMCT0")
178
+ namct1 = self._get_namblock(namelist_object, "NAMCT1")
179
+ # First take into account the **fcterm_unit**
180
+ self._set_namvar_value(
181
+ namoph, "LINC", self.fcterm_unit == "h", namelist_name
182
+ )
183
+ # Setup outputs
184
+ self._generic_terms_setup(
185
+ namct0, namct1, "HIS", self.modelstate, namelist_name
186
+ )
187
+ self._generic_terms_setup(
188
+ namct0, namct1, "SFXHIS", self.surf_modelstate, namelist_name
189
+ )
190
+ self._generic_terms_setup(
191
+ namct0, namct1, "SDI", self.spectral_diag, namelist_name
192
+ )
193
+ self._generic_terms_setup(
194
+ namct0, namct1, "POS", self.post_processing, namelist_name
195
+ )
196
+ # Extra fixup for fullpos
197
+ if self.post_processing is not None:
198
+ if not self.post_processing:
199
+ self._set_namvar_value(namct0, "NFPOS", 0, namelist_name)
200
+ else:
201
+ # Do not overwrite a pre-existing positive value:
202
+ if "NFPOS" not in namct0 or namct0["NFPOS"] == 0:
203
+ self._set_namvar_value(namct0, "NFPOS", 1, namelist_name)