vortex-nwp 2.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. vortex/__init__.py +135 -0
  2. vortex/algo/__init__.py +12 -0
  3. vortex/algo/components.py +2136 -0
  4. vortex/algo/mpitools.py +1648 -0
  5. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  7. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  8. vortex/algo/serversynctools.py +170 -0
  9. vortex/config.py +115 -0
  10. vortex/data/__init__.py +13 -0
  11. vortex/data/abstractstores.py +1572 -0
  12. vortex/data/containers.py +780 -0
  13. vortex/data/contents.py +596 -0
  14. vortex/data/executables.py +284 -0
  15. vortex/data/flow.py +113 -0
  16. vortex/data/geometries.ini +2689 -0
  17. vortex/data/geometries.py +703 -0
  18. vortex/data/handlers.py +1021 -0
  19. vortex/data/outflow.py +67 -0
  20. vortex/data/providers.py +465 -0
  21. vortex/data/resources.py +201 -0
  22. vortex/data/stores.py +1271 -0
  23. vortex/gloves.py +282 -0
  24. vortex/layout/__init__.py +27 -0
  25. vortex/layout/appconf.py +109 -0
  26. vortex/layout/contexts.py +511 -0
  27. vortex/layout/dataflow.py +1069 -0
  28. vortex/layout/jobs.py +1276 -0
  29. vortex/layout/monitor.py +833 -0
  30. vortex/layout/nodes.py +1424 -0
  31. vortex/layout/subjobs.py +464 -0
  32. vortex/nwp/__init__.py +11 -0
  33. vortex/nwp/algo/__init__.py +12 -0
  34. vortex/nwp/algo/assim.py +483 -0
  35. vortex/nwp/algo/clim.py +920 -0
  36. vortex/nwp/algo/coupling.py +609 -0
  37. vortex/nwp/algo/eda.py +632 -0
  38. vortex/nwp/algo/eps.py +613 -0
  39. vortex/nwp/algo/forecasts.py +745 -0
  40. vortex/nwp/algo/fpserver.py +927 -0
  41. vortex/nwp/algo/ifsnaming.py +403 -0
  42. vortex/nwp/algo/ifsroot.py +311 -0
  43. vortex/nwp/algo/monitoring.py +202 -0
  44. vortex/nwp/algo/mpitools.py +554 -0
  45. vortex/nwp/algo/odbtools.py +974 -0
  46. vortex/nwp/algo/oopsroot.py +735 -0
  47. vortex/nwp/algo/oopstests.py +186 -0
  48. vortex/nwp/algo/request.py +579 -0
  49. vortex/nwp/algo/stdpost.py +1285 -0
  50. vortex/nwp/data/__init__.py +12 -0
  51. vortex/nwp/data/assim.py +392 -0
  52. vortex/nwp/data/boundaries.py +261 -0
  53. vortex/nwp/data/climfiles.py +539 -0
  54. vortex/nwp/data/configfiles.py +149 -0
  55. vortex/nwp/data/consts.py +929 -0
  56. vortex/nwp/data/ctpini.py +133 -0
  57. vortex/nwp/data/diagnostics.py +181 -0
  58. vortex/nwp/data/eda.py +148 -0
  59. vortex/nwp/data/eps.py +383 -0
  60. vortex/nwp/data/executables.py +1039 -0
  61. vortex/nwp/data/fields.py +96 -0
  62. vortex/nwp/data/gridfiles.py +308 -0
  63. vortex/nwp/data/logs.py +551 -0
  64. vortex/nwp/data/modelstates.py +334 -0
  65. vortex/nwp/data/monitoring.py +220 -0
  66. vortex/nwp/data/namelists.py +644 -0
  67. vortex/nwp/data/obs.py +748 -0
  68. vortex/nwp/data/oopsexec.py +72 -0
  69. vortex/nwp/data/providers.py +182 -0
  70. vortex/nwp/data/query.py +217 -0
  71. vortex/nwp/data/stores.py +147 -0
  72. vortex/nwp/data/surfex.py +338 -0
  73. vortex/nwp/syntax/__init__.py +9 -0
  74. vortex/nwp/syntax/stdattrs.py +375 -0
  75. vortex/nwp/tools/__init__.py +10 -0
  76. vortex/nwp/tools/addons.py +35 -0
  77. vortex/nwp/tools/agt.py +55 -0
  78. vortex/nwp/tools/bdap.py +48 -0
  79. vortex/nwp/tools/bdcp.py +38 -0
  80. vortex/nwp/tools/bdm.py +21 -0
  81. vortex/nwp/tools/bdmp.py +49 -0
  82. vortex/nwp/tools/conftools.py +1311 -0
  83. vortex/nwp/tools/drhook.py +62 -0
  84. vortex/nwp/tools/grib.py +268 -0
  85. vortex/nwp/tools/gribdiff.py +99 -0
  86. vortex/nwp/tools/ifstools.py +163 -0
  87. vortex/nwp/tools/igastuff.py +249 -0
  88. vortex/nwp/tools/mars.py +56 -0
  89. vortex/nwp/tools/odb.py +548 -0
  90. vortex/nwp/tools/partitioning.py +234 -0
  91. vortex/nwp/tools/satrad.py +56 -0
  92. vortex/nwp/util/__init__.py +6 -0
  93. vortex/nwp/util/async.py +184 -0
  94. vortex/nwp/util/beacon.py +40 -0
  95. vortex/nwp/util/diffpygram.py +359 -0
  96. vortex/nwp/util/ens.py +198 -0
  97. vortex/nwp/util/hooks.py +128 -0
  98. vortex/nwp/util/taskdeco.py +81 -0
  99. vortex/nwp/util/usepygram.py +591 -0
  100. vortex/nwp/util/usetnt.py +87 -0
  101. vortex/proxy.py +6 -0
  102. vortex/sessions.py +341 -0
  103. vortex/syntax/__init__.py +9 -0
  104. vortex/syntax/stdattrs.py +628 -0
  105. vortex/syntax/stddeco.py +176 -0
  106. vortex/toolbox.py +982 -0
  107. vortex/tools/__init__.py +11 -0
  108. vortex/tools/actions.py +457 -0
  109. vortex/tools/addons.py +297 -0
  110. vortex/tools/arm.py +76 -0
  111. vortex/tools/compression.py +322 -0
  112. vortex/tools/date.py +20 -0
  113. vortex/tools/ddhpack.py +10 -0
  114. vortex/tools/delayedactions.py +672 -0
  115. vortex/tools/env.py +513 -0
  116. vortex/tools/folder.py +663 -0
  117. vortex/tools/grib.py +559 -0
  118. vortex/tools/lfi.py +746 -0
  119. vortex/tools/listings.py +354 -0
  120. vortex/tools/names.py +575 -0
  121. vortex/tools/net.py +1790 -0
  122. vortex/tools/odb.py +10 -0
  123. vortex/tools/parallelism.py +336 -0
  124. vortex/tools/prestaging.py +186 -0
  125. vortex/tools/rawfiles.py +10 -0
  126. vortex/tools/schedulers.py +413 -0
  127. vortex/tools/services.py +871 -0
  128. vortex/tools/storage.py +1061 -0
  129. vortex/tools/surfex.py +61 -0
  130. vortex/tools/systems.py +3396 -0
  131. vortex/tools/targets.py +384 -0
  132. vortex/util/__init__.py +9 -0
  133. vortex/util/config.py +1071 -0
  134. vortex/util/empty.py +24 -0
  135. vortex/util/helpers.py +184 -0
  136. vortex/util/introspection.py +63 -0
  137. vortex/util/iosponge.py +76 -0
  138. vortex/util/roles.py +51 -0
  139. vortex/util/storefunctions.py +103 -0
  140. vortex/util/structs.py +26 -0
  141. vortex/util/worker.py +150 -0
  142. vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
  143. vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
  144. vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
  145. vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
  146. vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,62 @@
1
+ """
2
+ Common interest classes to help setup the DrHook library environment.
3
+ """
4
+
5
+ import footprints
6
+ from bronx.fancies import loggers
7
+
8
+ from vortex.algo.components import AlgoComponentDecoMixin, Parallel, algo_component_deco_mixin_autodoc
9
+
10
+ #: No automatic export
11
+ __all__ = []
12
+
13
+ logger = loggers.getLogger(__name__)
14
+
15
+
16
+ @algo_component_deco_mixin_autodoc
17
+ class DrHookDecoMixin(AlgoComponentDecoMixin):
18
+ """Handle DrHook settings in AlgoComponents.
19
+
20
+ This mixin class is intended to be used with AlgoComponent classes. It will
21
+ automatically add footprints' arguments related to DrHook (namely the
22
+ drhookprof boolean attribute that is optional and False by default),
23
+ and set up DrHook environment variables (:meth:`_drhook_varexport`) depending
24
+ on the context (MPI run or not).
25
+ """
26
+
27
+ _MIXIN_EXTRA_FOOTPRINTS = [
28
+ footprints.Footprint(
29
+ attr=dict(
30
+ drhookprof=dict(
31
+ info='Activate the DrHook profiling.',
32
+ optional=True,
33
+ type=bool,
34
+ default=False,
35
+ doc_zorder=-50,
36
+ ),
37
+ ),
38
+ )]
39
+
40
+ def _drhook_varexport(self, rh, opts): # @UnusedVariable
41
+ """Export proper DrHook variables"""
42
+ drhook_vars = (
43
+ [
44
+ ("DR_HOOK", "1"),
45
+ ("DR_HOOK_OPT", "prof"),
46
+ ("DR_HOOK_IGNORE_SIGNALS", "-1"),
47
+ ]
48
+ if self.drhookprof else
49
+ [("DR_HOOK", "0"), ("DR_HOOK_IGNORE_SIGNALS", "-1")]
50
+ )
51
+ if not isinstance(self, Parallel):
52
+ drhook_vars += [
53
+ ("DR_HOOK_SILENT", "1"),
54
+ ("DR_HOOK_NOT_MPI", "1"),
55
+ ("DR_HOOK_ASSERT_MPI_INITIALIZED", "0"),
56
+ ]
57
+ for k, v in drhook_vars:
58
+ logger.info('Setting DRHOOK env %s = %s', k, v)
59
+ self.env[k] = v
60
+
61
+
62
+ _MIXIN_PREPARE_HOOKS = (_drhook_varexport, )
@@ -0,0 +1,268 @@
1
+ """
2
+ TODO: Module documentation.
3
+ """
4
+
5
+ import json
6
+
7
+ import footprints
8
+ from bronx.fancies import loggers
9
+ from vortex import sessions
10
+ from vortex.algo.components import AlgoComponentError
11
+ from vortex.layout.contexts import Context
12
+
13
+ #: No automatic export
14
+ __all__ = []
15
+
16
+ logger = loggers.getLogger(__name__)
17
+
18
+
19
+ class _GenericFilter:
20
+ """This class could be the start of filtering classes for different formats."""
21
+
22
+ def __init__(self):
23
+ """
24
+
25
+ No parameters.
26
+ """
27
+ self._filters = list()
28
+ self._sh = sessions.system()
29
+
30
+ def add_filters(self, *filters):
31
+ """Add one or more filters to the filters list.
32
+
33
+ :param filters: a list of filters
34
+
35
+ Filters are described using dictionaries. Here is an example of such a
36
+ dictionary::
37
+
38
+ {
39
+ "fields_include": [
40
+ {
41
+ "indicatorOfTypeOfLevel": 100,
42
+ "shortName": "t",
43
+ "level": [
44
+ 850,
45
+ 500,
46
+ 300
47
+ ]
48
+ }
49
+ ],
50
+ "fid_format": "GRIB1",
51
+ "filter_name": "toto"
52
+ }
53
+
54
+ **fields_include** or **fields_exclude** lists depends on the data
55
+ format specified with the **fid_format** key.
56
+
57
+ The filters argument of this function accepts dictionaries but also
58
+ strings or Context objects :
59
+
60
+ * If a string is provided, it will be converted to a dictionary
61
+ using json.loads
62
+ * If a Context object is provided, the Context's sequence will be
63
+ used to find available resources of filtering_request kind. The
64
+ content of such resources will be used as a filter.
65
+
66
+ """
67
+
68
+ for a_filter in filters:
69
+ if isinstance(a_filter, dict):
70
+ self._filters.append(a_filter)
71
+ elif isinstance(a_filter, str):
72
+ self._filters.append(json.loads(a_filter))
73
+ elif isinstance(a_filter, Context):
74
+ for a_request in a_filter.sequence.effective_inputs(kind='filtering_request'):
75
+ self._filters.append(a_request.rh.contents.data)
76
+
77
+ def __len__(self):
78
+ """Returns the number of active filters."""
79
+ return len(self._filters)
80
+
81
+ @staticmethod
82
+ def _is_dict_superset(full, subset):
83
+ """Finds out if the full dictionary contains and matches subset."""
84
+ superset_ok = True
85
+ for k, v in subset.items():
86
+ # Ignore the comments...
87
+ if k.startswith('comment'):
88
+ continue
89
+ # Check for the key inside the full dictionary
90
+ try:
91
+ fullvalue = full[str(k)]
92
+ except KeyError:
93
+ superset_ok = False
94
+ break
95
+ # Does the key match ?
96
+ if isinstance(v, (list, tuple)):
97
+ if fullvalue not in v:
98
+ superset_ok = False
99
+ break
100
+ else:
101
+ if fullvalue != v:
102
+ superset_ok = False
103
+ break
104
+ return superset_ok
105
+
106
+ def _filter_process(self, fid, a_filter):
107
+ """Check if the data's fid complies with the filter."""
108
+ includes = a_filter.get('fields_include', [])
109
+ excludes = a_filter.get('fields_exclude', [])
110
+ try:
111
+ fid = fid[a_filter['fid_format']]
112
+ except KeyboardInterrupt:
113
+ raise ValueError("Please specify a valid fid_format in the filter description")
114
+ # First process includes
115
+ includes_ok = True
116
+ for include in includes:
117
+ includes_ok = self._is_dict_superset(fid, include)
118
+ if includes_ok:
119
+ break
120
+ # Process the excludes if necessary
121
+ if includes_ok:
122
+ for exclude in excludes:
123
+ includes_ok = not self._is_dict_superset(fid, exclude)
124
+ if not includes_ok:
125
+ break
126
+ return includes_ok
127
+
128
+ def __call__(self, inputfile, outfile_fmt):
129
+ """Apply the various filters on *inputfile*. Should be implemented..."""
130
+ raise NotImplementedError('This method have to be overwritten.')
131
+
132
+
133
+ class GRIBFilter(_GenericFilter):
134
+ """Class in charge of filtering GRIB files."""
135
+
136
+ CONCATENATE_FILTER = 'concatenate'
137
+
138
+ def __init__(self, concatenate=False):
139
+ """
140
+
141
+ :param bool concatenate: Wether to generate a concatenated GRIB file
142
+ """
143
+ super().__init__()
144
+ self.concatenate = concatenate
145
+ self._xgrib_support = 'grib' in self._sh.loaded_addons()
146
+
147
+ def __len__(self):
148
+ """Returns the number of active filters (concatenate included)."""
149
+ return super().__len__() + (1 if self.concatenate else 0)
150
+
151
+ def _simple_cat(self, gribfile, outfile_fmt, intent):
152
+ """Just concatenate a multipart GRIB."""
153
+ if self._xgrib_support and self._sh.is_xgrib(gribfile):
154
+ self._sh.xgrib_pack(gribfile,
155
+ outfile_fmt.format(filtername=self.CONCATENATE_FILTER),
156
+ intent=intent)
157
+ else:
158
+ # Just make a copy with the appropriate name...
159
+ self._sh.cp(gribfile,
160
+ outfile_fmt.format(filtername=self.CONCATENATE_FILTER),
161
+ intent=intent, fmt='grib')
162
+
163
+ def __call__(self, gribfile, outfile_fmt, intent='in'):
164
+ """Apply the various filters on *gribfile*.
165
+
166
+ :param gribfile: The path to the input GRIB file
167
+ :param outfile_fmt: The path of output files
168
+
169
+ The *outfile_fmt* must be a format string such as
170
+ **GRIBOUTPUT_{filtername:s}.grib** where **filtername** will be replaced
171
+ by the name of the filter.
172
+ """
173
+
174
+ if not self._sh.path.exists(gribfile):
175
+ raise OSError("{!s} doesn't exist".format(gribfile))
176
+
177
+ # We just want to concatenate files...
178
+ if not self._filters:
179
+ if self.concatenate:
180
+ self._simple_cat(gribfile, outfile_fmt, intent=intent)
181
+ return [outfile_fmt.format(filtername=self.CONCATENATE_FILTER), ]
182
+ else:
183
+ raise ValueError("Set concatenate=True or provide a filter.")
184
+
185
+ # Open the input file using Epygram
186
+ from ..util import usepygram
187
+ if not usepygram.epygram_checker.is_available(version='1.0.0'):
188
+ raise AlgoComponentError("Epygram (v1.0.0) needs to be available")
189
+
190
+ if self._xgrib_support and self._sh.is_xgrib(gribfile):
191
+ idx = self._sh.xgrib_index_get(gribfile)
192
+ in_data = [footprints.proxy.dataformat(
193
+ filename=self._sh.path.realpath(a_gribfile),
194
+ openmode='r',
195
+ format='GRIB',
196
+ ) for a_gribfile in idx]
197
+ else:
198
+ in_data = [footprints.proxy.dataformat(
199
+ filename=self._sh.path.realpath(gribfile),
200
+ openmode='r',
201
+ format='GRIB',
202
+ ), ]
203
+
204
+ # Open output files
205
+ out_data = list()
206
+ out_filelist = list()
207
+ for a_filter in self._filters:
208
+ f_name = outfile_fmt.format(filtername=a_filter['filter_name'])
209
+ out_filelist.append(f_name)
210
+ # It would be a lot better to use io.open but grib_api is very annoying !
211
+ out_data.append(open(f_name, 'wb'))
212
+ if self.concatenate:
213
+ f_name = outfile_fmt.format(filtername=self.CONCATENATE_FILTER)
214
+ out_filelist.append(f_name)
215
+ # It would be a lot better to use io.open but grib_api is very annoying !
216
+ out_cat = open(f_name, 'wb')
217
+
218
+ with usepygram.epy_env_prepare(sessions.current()):
219
+ for a_in_data in in_data:
220
+ msg = a_in_data.iter_messages(headers_only=False)
221
+ while msg is not None:
222
+ for (a_out_data, a_filter) in zip(out_data, self._filters):
223
+ thefid = msg.genfid()
224
+ if self._filter_process(thefid, a_filter):
225
+ logger.debug("Select succeed for filter %s: %s",
226
+ a_filter['filter_name'], thefid)
227
+ msg.write_to_file(a_out_data)
228
+ if self.concatenate:
229
+ msg.write_to_file(out_cat)
230
+ msg = a_in_data.iter_messages(headers_only=False)
231
+
232
+ # Close outpout files
233
+ for a_in_data in in_data:
234
+ a_in_data.close()
235
+ for a_out_data in out_data:
236
+ a_out_data.close()
237
+ if self.concatenate:
238
+ out_cat.close()
239
+
240
+ return out_filelist
241
+
242
+
243
+ def grib_inplace_cat(t, rh):
244
+ """Ensure that a GRIB file is a usual single file (if not, concatenate it).
245
+
246
+ This function is designed to be used as a hook function.
247
+
248
+ :param t: A :class:`vortex.sessions.Ticket` object
249
+ :param rh: A :class:`vortex.data.handlers.Handler` object
250
+ """
251
+ xgrib_support = 'grib' in t.sh.loaded_addons()
252
+ if xgrib_support:
253
+ if t.sh.is_xgrib(rh.container.localpath()):
254
+ # Some cleanup...
255
+ rh.reset_contents()
256
+ rh.container.close()
257
+ # Move the index file prior to the concatenation
258
+ tmpfile = rh.container.localpath() + '_concat' + t.sh.safe_filesuffix()
259
+ t.sh.move(rh.container.localpath(), tmpfile)
260
+ # Concatenate
261
+ t.sh.xgrib_pack(tmpfile, rh.container.localpath())
262
+ # Remove the multipart file
263
+ t.sh.grib_remove(tmpfile)
264
+ logger.info("The multipart GRIB has been concatenated.")
265
+ else:
266
+ logger.info("The localpath is not a multipart GRIB: nothing to do.")
267
+ else:
268
+ logger.info("Multipart GRIB support is not activated: nothing can be done.")
@@ -0,0 +1,99 @@
1
+ """
2
+ TODO: Module documentation.
3
+ """
4
+
5
+ import footprints
6
+
7
+ from vortex.tools.grib import GRIBAPI_Tool
8
+
9
+ #: No automatic export
10
+ __all__ = []
11
+
12
+
13
+ class _GRIBDIFF_Plus_St:
14
+ """Status of the GRIB comparison."""
15
+
16
+ def __init__(self, rc, result):
17
+ self.rc = rc
18
+ self._result = result
19
+
20
+ def __str__(self):
21
+ return '{:s} | rc={:d}>'.format(repr(self).rstrip('>'), self.rc)
22
+
23
+ @property
24
+ def result(self):
25
+ """Indicates whether the diff succeeded or not."""
26
+ return self._result
27
+
28
+ def __bool__(self):
29
+ return self.rc
30
+
31
+
32
+ class _GRIBDIFF_Plus_Res:
33
+ """Detailed result of the GRIB comparison."""
34
+
35
+ def __init__(self, gapi, epydiff, epydiff_res):
36
+ self._gapi = gapi
37
+ self._epydiff = epydiff
38
+ self._epydiff_res = epydiff_res
39
+
40
+ def __str__(self):
41
+ return ('{0:s} | gribapi_rc={1:d} epydiff_done={2:d}>'.
42
+ format(repr(self).rstrip('>'), self._gapi, self._epydiff))
43
+
44
+ def differences(self):
45
+ """Print detailed informations about the diff."""
46
+ print(self._epydiff_res)
47
+
48
+
49
+ class GRIBDIFF_Plus(GRIBAPI_Tool):
50
+ """
51
+ Interface to gribapi commands + epygram diff (designed as a shell Addon).
52
+ """
53
+
54
+ _footprint = dict(
55
+ info = 'Default GRIBAPI system interface',
56
+ attr = dict(
57
+ maxepydiff = dict(
58
+ info = 'Epygram diffs are costfull, they will run only maxepydiff times',
59
+ type = int,
60
+ default = 2,
61
+ optional = True,
62
+ ),
63
+ ),
64
+ priority = dict(
65
+ level = footprints.priorities.top.TOOLBOX # @UndefinedVariable
66
+ ),
67
+ )
68
+
69
+ def __init__(self, *kargs, **kwargs):
70
+ super().__init__(*kargs, **kwargs)
71
+ self._epycount = 0
72
+ self._epyavail = None
73
+
74
+ def _actual_diff(self, grib1, grib2, skipkeys, **kw):
75
+ rc = super()._actual_diff(grib1, grib2, skipkeys, **kw)
76
+ if not rc:
77
+ if self._epyavail is None:
78
+ from ..util.usepygram import epygram_checker
79
+ self._epyavail = epygram_checker.is_available(version='1.0.0')
80
+ if self._epyavail:
81
+ if self._epycount < self.maxepydiff:
82
+ from ..util.diffpygram import EpyGribDiff
83
+ gdiff = EpyGribDiff(grib2, grib1) # Ref file is first...
84
+ self._epycount += 1
85
+ res = _GRIBDIFF_Plus_Res(rc, True, str(gdiff))
86
+ # Save the detailed diff
87
+ with open(grib1 + "_epygram_diffstats.log", "w") as outfh:
88
+ outfh.write(gdiff.format_diff(detailed=True))
89
+ else:
90
+ res = _GRIBDIFF_Plus_Res(
91
+ rc, False,
92
+ "grib_compare failed (but the Epygram diffs max number is exceeded...)"
93
+ )
94
+ else:
95
+ res = _GRIBDIFF_Plus_Res(rc, False,
96
+ "grib_compare failed (Epygram unavailable)")
97
+ else:
98
+ res = _GRIBDIFF_Plus_Res(rc, False, "")
99
+ return _GRIBDIFF_Plus_St(rc, res)
@@ -0,0 +1,163 @@
1
+ """
2
+ Various tools related to the IFS code.
3
+ """
4
+
5
+ import re
6
+
7
+ from bronx.fancies import loggers
8
+ from bronx.stdtypes.date import Time
9
+
10
+ import footprints
11
+
12
+ from vortex.syntax.stdattrs import model
13
+ from ..syntax.stdattrs import arpifs_cycle
14
+
15
+ logger = loggers.getLogger(__name__)
16
+
17
+
18
+ class _IfsOutputsTimesListDesc:
19
+ """Convert the input data to Time objects."""
20
+
21
+ def __init__(self, attr, doc):
22
+ self._attr = attr
23
+ self.__doc__ = doc
24
+
25
+ def __get__(self, instance, owner):
26
+ return instance._tlists_store.get(self._attr, None)
27
+
28
+ def __set__(self, instance, value):
29
+ if value is None:
30
+ instance._tlists_store.pop(self._attr, None)
31
+ else:
32
+ if not isinstance(value, list):
33
+ raise ValueError('**value** should be a list.')
34
+ instance._tlists_store[self._attr] = [Time(t) for t in value]
35
+
36
+ def __delete__(self, instance):
37
+ instance._tlists_store.pop(self._attr, None)
38
+
39
+
40
+ class IfsOutputsAbstractConfigurator(footprints.FootprintBase):
41
+ """Abstract utility class to configure the IFS model regarding output data."""
42
+
43
+ _abstract = True
44
+ _collector = ('ifsoutputs_configurator', )
45
+ _footprint = [
46
+ model,
47
+ arpifs_cycle,
48
+ dict(
49
+ attr=dict(
50
+ fcterm_unit = dict(
51
+ info = 'The unit used in the *fcterm* attribute.',
52
+ values = ['h', 't']
53
+ ),
54
+ )
55
+ )
56
+ ]
57
+
58
+ def __init__(self, * kargs, ** kwargs):
59
+ super().__init__(* kargs, ** kwargs)
60
+ self._tlists_store = dict()
61
+
62
+ modelstate = _IfsOutputsTimesListDesc(
63
+ "modelstate",
64
+ "The list of terms for modelstate outputs."
65
+ )
66
+
67
+ surf_modelstate = _IfsOutputsTimesListDesc(
68
+ "surf_modelstate",
69
+ "The list of terms for surface scheme modelstate outputs.")
70
+
71
+ spectral_diag = _IfsOutputsTimesListDesc(
72
+ "spectral_diag",
73
+ "The list of terms for spectral space diagnostics outputs.")
74
+
75
+ post_processing = _IfsOutputsTimesListDesc(
76
+ "post_processing",
77
+ "The list of terms for inline post-processing outputs."
78
+ )
79
+
80
+ def _setup_nam_obj(self, namelist_object, namelist_name):
81
+ """Actualy tweak the IFS namelist."""
82
+ raise NotImplementedError()
83
+
84
+ def __call__(self, namelist_object, namelist_name):
85
+ """Tweak **namelist_object** that was read from the **namelist_name** file."""
86
+ if self._tlists_store:
87
+ self._setup_nam_obj(namelist_object, namelist_name)
88
+ return True
89
+ else:
90
+ return False
91
+
92
+
93
+ class IfsOutputsConfigurator(IfsOutputsAbstractConfigurator):
94
+ """Utility class to configure the IFS model regarding output data."""
95
+
96
+ @staticmethod
97
+ def _get_namblock(namelist_object, nam_block):
98
+ """Get or create a **nam_block** namelist."""
99
+ if nam_block in namelist_object:
100
+ return namelist_object[nam_block]
101
+ else:
102
+ return namelist_object.newblock(nam_block)
103
+
104
+ @staticmethod
105
+ def _set_namvar_value(namblock, var, value, namname):
106
+ """Set a value in a **namblock** namelist and log it."""
107
+ namblock[var] = value
108
+ logger.info('Setup &%s %s=%s / (file: %s)',
109
+ namblock.name, var, namblock.nice(value), namname)
110
+
111
+ @staticmethod
112
+ def _clean_namvar(namblock, var, namname):
113
+ """Clean the **var** value from the **namblock** namelist."""
114
+ todo = {k for k in namblock.keys() if re.match(var + r'($|\(|%)', k)}
115
+ if todo:
116
+ for k in todo:
117
+ namblock.delvar(k)
118
+ logger.info('Cleaning %s variable in namelist &%s (file: %s)',
119
+ var, namblock.name, namname)
120
+
121
+ def _generic_terms_setup(self, namct0, namct1, what, terms, namname):
122
+ """Setup a given kind of output data (in a generic way)."""
123
+ if terms is not None:
124
+ sign = -1 if self.fcterm_unit == 'h' else 1
125
+ with_minutes = any([t.minute > 0 for t in terms])
126
+ self._clean_namvar(namct0, 'NFR{:s}'.format(what), namname)
127
+ self._clean_namvar(namct0, 'N{:s}TS'.format(what), namname)
128
+ self._clean_namvar(namct0, 'N{:s}TSMIN'.format(what), namname)
129
+ self._set_namvar_value(namct1, 'N1{:s}'.format(what), 1 if terms else 0, namname)
130
+ if terms:
131
+ self._set_namvar_value(namct0, 'N{:s}TS(0)'.format(what), sign * len(terms), namname)
132
+ if with_minutes:
133
+ if 'cy46' <= self.cycle < 'cy47': # Temporary fix for cy46 only
134
+ self._set_namvar_value(namct0, 'N{:s}TSMIN(0)'.format(what),
135
+ len(terms), namname)
136
+ logger.info('Setting up N%sTS and N%sTSMIN in &%s (file: %s)',
137
+ what, what, namct0.name, namname)
138
+ for i, t in enumerate(terms):
139
+ namct0['N{:s}TS({:d})'.format(what, i + 1)] = sign * t.hour
140
+ if with_minutes:
141
+ for i, t in enumerate(terms):
142
+ namct0['N{:s}TSMIN({:d})'.format(what, i + 1)] = t.minute
143
+
144
+ def _setup_nam_obj(self, namelist_object, namelist_name):
145
+ """Actualy tweak the IFS namelist."""
146
+ namoph = self._get_namblock(namelist_object, 'NAMOPH')
147
+ namct0 = self._get_namblock(namelist_object, 'NAMCT0')
148
+ namct1 = self._get_namblock(namelist_object, 'NAMCT1')
149
+ # First take into account the **fcterm_unit**
150
+ self._set_namvar_value(namoph, 'LINC', self.fcterm_unit == 'h', namelist_name)
151
+ # Setup outputs
152
+ self._generic_terms_setup(namct0, namct1, 'HIS', self.modelstate, namelist_name)
153
+ self._generic_terms_setup(namct0, namct1, 'SFXHIS', self.surf_modelstate, namelist_name)
154
+ self._generic_terms_setup(namct0, namct1, 'SDI', self.spectral_diag, namelist_name)
155
+ self._generic_terms_setup(namct0, namct1, 'POS', self.post_processing, namelist_name)
156
+ # Extra fixup for fullpos
157
+ if self.post_processing is not None:
158
+ if not self.post_processing:
159
+ self._set_namvar_value(namct0, 'NFPOS', 0, namelist_name)
160
+ else:
161
+ # Do not overwrite a pre-existing positive value:
162
+ if 'NFPOS' not in namct0 or namct0['NFPOS'] == 0:
163
+ self._set_namvar_value(namct0, 'NFPOS', 1, namelist_name)