vortex-nwp 2.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. vortex/__init__.py +135 -0
  2. vortex/algo/__init__.py +12 -0
  3. vortex/algo/components.py +2136 -0
  4. vortex/algo/mpitools.py +1648 -0
  5. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  7. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  8. vortex/algo/serversynctools.py +170 -0
  9. vortex/config.py +115 -0
  10. vortex/data/__init__.py +13 -0
  11. vortex/data/abstractstores.py +1572 -0
  12. vortex/data/containers.py +780 -0
  13. vortex/data/contents.py +596 -0
  14. vortex/data/executables.py +284 -0
  15. vortex/data/flow.py +113 -0
  16. vortex/data/geometries.ini +2689 -0
  17. vortex/data/geometries.py +703 -0
  18. vortex/data/handlers.py +1021 -0
  19. vortex/data/outflow.py +67 -0
  20. vortex/data/providers.py +465 -0
  21. vortex/data/resources.py +201 -0
  22. vortex/data/stores.py +1271 -0
  23. vortex/gloves.py +282 -0
  24. vortex/layout/__init__.py +27 -0
  25. vortex/layout/appconf.py +109 -0
  26. vortex/layout/contexts.py +511 -0
  27. vortex/layout/dataflow.py +1069 -0
  28. vortex/layout/jobs.py +1276 -0
  29. vortex/layout/monitor.py +833 -0
  30. vortex/layout/nodes.py +1424 -0
  31. vortex/layout/subjobs.py +464 -0
  32. vortex/nwp/__init__.py +11 -0
  33. vortex/nwp/algo/__init__.py +12 -0
  34. vortex/nwp/algo/assim.py +483 -0
  35. vortex/nwp/algo/clim.py +920 -0
  36. vortex/nwp/algo/coupling.py +609 -0
  37. vortex/nwp/algo/eda.py +632 -0
  38. vortex/nwp/algo/eps.py +613 -0
  39. vortex/nwp/algo/forecasts.py +745 -0
  40. vortex/nwp/algo/fpserver.py +927 -0
  41. vortex/nwp/algo/ifsnaming.py +403 -0
  42. vortex/nwp/algo/ifsroot.py +311 -0
  43. vortex/nwp/algo/monitoring.py +202 -0
  44. vortex/nwp/algo/mpitools.py +554 -0
  45. vortex/nwp/algo/odbtools.py +974 -0
  46. vortex/nwp/algo/oopsroot.py +735 -0
  47. vortex/nwp/algo/oopstests.py +186 -0
  48. vortex/nwp/algo/request.py +579 -0
  49. vortex/nwp/algo/stdpost.py +1285 -0
  50. vortex/nwp/data/__init__.py +12 -0
  51. vortex/nwp/data/assim.py +392 -0
  52. vortex/nwp/data/boundaries.py +261 -0
  53. vortex/nwp/data/climfiles.py +539 -0
  54. vortex/nwp/data/configfiles.py +149 -0
  55. vortex/nwp/data/consts.py +929 -0
  56. vortex/nwp/data/ctpini.py +133 -0
  57. vortex/nwp/data/diagnostics.py +181 -0
  58. vortex/nwp/data/eda.py +148 -0
  59. vortex/nwp/data/eps.py +383 -0
  60. vortex/nwp/data/executables.py +1039 -0
  61. vortex/nwp/data/fields.py +96 -0
  62. vortex/nwp/data/gridfiles.py +308 -0
  63. vortex/nwp/data/logs.py +551 -0
  64. vortex/nwp/data/modelstates.py +334 -0
  65. vortex/nwp/data/monitoring.py +220 -0
  66. vortex/nwp/data/namelists.py +644 -0
  67. vortex/nwp/data/obs.py +748 -0
  68. vortex/nwp/data/oopsexec.py +72 -0
  69. vortex/nwp/data/providers.py +182 -0
  70. vortex/nwp/data/query.py +217 -0
  71. vortex/nwp/data/stores.py +147 -0
  72. vortex/nwp/data/surfex.py +338 -0
  73. vortex/nwp/syntax/__init__.py +9 -0
  74. vortex/nwp/syntax/stdattrs.py +375 -0
  75. vortex/nwp/tools/__init__.py +10 -0
  76. vortex/nwp/tools/addons.py +35 -0
  77. vortex/nwp/tools/agt.py +55 -0
  78. vortex/nwp/tools/bdap.py +48 -0
  79. vortex/nwp/tools/bdcp.py +38 -0
  80. vortex/nwp/tools/bdm.py +21 -0
  81. vortex/nwp/tools/bdmp.py +49 -0
  82. vortex/nwp/tools/conftools.py +1311 -0
  83. vortex/nwp/tools/drhook.py +62 -0
  84. vortex/nwp/tools/grib.py +268 -0
  85. vortex/nwp/tools/gribdiff.py +99 -0
  86. vortex/nwp/tools/ifstools.py +163 -0
  87. vortex/nwp/tools/igastuff.py +249 -0
  88. vortex/nwp/tools/mars.py +56 -0
  89. vortex/nwp/tools/odb.py +548 -0
  90. vortex/nwp/tools/partitioning.py +234 -0
  91. vortex/nwp/tools/satrad.py +56 -0
  92. vortex/nwp/util/__init__.py +6 -0
  93. vortex/nwp/util/async.py +184 -0
  94. vortex/nwp/util/beacon.py +40 -0
  95. vortex/nwp/util/diffpygram.py +359 -0
  96. vortex/nwp/util/ens.py +198 -0
  97. vortex/nwp/util/hooks.py +128 -0
  98. vortex/nwp/util/taskdeco.py +81 -0
  99. vortex/nwp/util/usepygram.py +591 -0
  100. vortex/nwp/util/usetnt.py +87 -0
  101. vortex/proxy.py +6 -0
  102. vortex/sessions.py +341 -0
  103. vortex/syntax/__init__.py +9 -0
  104. vortex/syntax/stdattrs.py +628 -0
  105. vortex/syntax/stddeco.py +176 -0
  106. vortex/toolbox.py +982 -0
  107. vortex/tools/__init__.py +11 -0
  108. vortex/tools/actions.py +457 -0
  109. vortex/tools/addons.py +297 -0
  110. vortex/tools/arm.py +76 -0
  111. vortex/tools/compression.py +322 -0
  112. vortex/tools/date.py +20 -0
  113. vortex/tools/ddhpack.py +10 -0
  114. vortex/tools/delayedactions.py +672 -0
  115. vortex/tools/env.py +513 -0
  116. vortex/tools/folder.py +663 -0
  117. vortex/tools/grib.py +559 -0
  118. vortex/tools/lfi.py +746 -0
  119. vortex/tools/listings.py +354 -0
  120. vortex/tools/names.py +575 -0
  121. vortex/tools/net.py +1790 -0
  122. vortex/tools/odb.py +10 -0
  123. vortex/tools/parallelism.py +336 -0
  124. vortex/tools/prestaging.py +186 -0
  125. vortex/tools/rawfiles.py +10 -0
  126. vortex/tools/schedulers.py +413 -0
  127. vortex/tools/services.py +871 -0
  128. vortex/tools/storage.py +1061 -0
  129. vortex/tools/surfex.py +61 -0
  130. vortex/tools/systems.py +3396 -0
  131. vortex/tools/targets.py +384 -0
  132. vortex/util/__init__.py +9 -0
  133. vortex/util/config.py +1071 -0
  134. vortex/util/empty.py +24 -0
  135. vortex/util/helpers.py +184 -0
  136. vortex/util/introspection.py +63 -0
  137. vortex/util/iosponge.py +76 -0
  138. vortex/util/roles.py +51 -0
  139. vortex/util/storefunctions.py +103 -0
  140. vortex/util/structs.py +26 -0
  141. vortex/util/worker.py +150 -0
  142. vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
  143. vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
  144. vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
  145. vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
  146. vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,359 @@
1
+ """
2
+ Usage of the EPyGrAM package to compute diffs.
3
+ """
4
+
5
+ import collections
6
+ import copy
7
+ import functools
8
+ import hashlib
9
+ import io
10
+ import json
11
+ import operator
12
+ import pprint
13
+
14
+ import footprints
15
+ from vortex import sessions
16
+ from . import usepygram
17
+
18
+
19
+ class HGeoDesc:
20
+ """Holds Epygram's horizontal geometry data."""
21
+
22
+ def __init__(self, epyfield):
23
+ """
24
+ :param epyfied: An epygram fild object.
25
+ """
26
+ geo = epyfield.geometry
27
+ self.grid = geo.grid
28
+ self.dimensions = geo.dimensions
29
+ self.name = geo.name
30
+ self.projection = None if not geo.projected_geometry else geo.projection
31
+ sio = io.StringIO()
32
+ geo.what(out=sio, vertical_geometry=False)
33
+ sio.seek(0)
34
+ self._what = sio.readlines()[3:]
35
+
36
+ def __eq__(self, other):
37
+ return ((self.grid == other.grid) and
38
+ (self.dimensions == other.dimensions) and
39
+ (self.name == other.name) and
40
+ (self.projection == other.projection))
41
+
42
+ def __str__(self):
43
+ return ''.join(self._what)
44
+
45
+
46
+ class DataDesc:
47
+ """Holds information about an Epygram's field data (basic stats + checksum)."""
48
+
49
+ def __init__(self, epyfield):
50
+ """
51
+ :param epyfied: An epygram fild object.
52
+ """
53
+ self.stats = epyfield.stats()
54
+ self.stats.pop('quadmean', None) # We do not want quadmean
55
+ s256 = hashlib.sha256()
56
+ s256.update(epyfield.data.tobytes())
57
+ self.checksum = s256.digest()
58
+
59
+ def __eq__(self, other):
60
+ return self.checksum == other.checksum
61
+
62
+ def __str__(self):
63
+ return ', '.join(['{:s}={!s}'.format(k, v) for k, v in self.stats.items()])
64
+
65
+
66
+ class HGeoLibrary:
67
+ """A collection/library of :class:`HGeoDesc` objects."""
68
+
69
+ def __init__(self):
70
+ self._geolist = list()
71
+
72
+ def register(self, hgeo_desc):
73
+ """Check if an :class:`HGeoDesc` object is already in the library.
74
+
75
+ If the *hgeo_desc* object is not already in the library it is inserted.
76
+ In any case, the index of the *hgeo_desc* geometry within the library is
77
+ returned.
78
+ """
79
+ found = (None, None)
80
+ for i, g in enumerate(self._geolist):
81
+ if hgeo_desc == g:
82
+ found = (i, g)
83
+ break
84
+ if found == (None, None):
85
+ found = (len(self._geolist), hgeo_desc)
86
+ self._geolist.append(hgeo_desc)
87
+ return found[0]
88
+
89
+ def __str__(self):
90
+ outstr = ''
91
+ for i, g in enumerate(self._geolist):
92
+ outstr += 'HORIZONTAL GEOMETRY #{:d}\n\n'.format(i)
93
+ outstr += str(g)
94
+ outstr += '\n'
95
+ return outstr
96
+
97
+
98
+ class FieldDesc:
99
+ """Holds various information about an Epygram field."""
100
+
101
+ def __init__(self, hgeoid, vgeo, datadesc, fid, valid):
102
+ self.hgeoid = hgeoid
103
+ self.vgeo = vgeo
104
+ self.datadesc = datadesc
105
+ self.fid = fid
106
+ self.valid = valid
107
+
108
+ def ranking(self, other):
109
+ """
110
+ Compute the comparison score of the present field with respect to a
111
+ reference one (*other*).
112
+ """
113
+ fidscore = functools.reduce(operator.add,
114
+ [int(self.fid[k] == other.fid[k])
115
+ for k in self.fid.keys() if k in other.fid])
116
+ fidscore = 5. * float(fidscore) / float(max(len(self.fid), len(other.fid)))
117
+ return (int(self.valid != other.valid) * -5. +
118
+ int(self.hgeoid != other.hgeoid) * -5. +
119
+ int(self.vgeo != other.vgeo) * -4. +
120
+ int(self.datadesc == other.datadesc) * 5. +
121
+ fidscore)
122
+
123
+ def ranking_summary(self, other):
124
+ """Returns detailed comparison information (including the ranking)."""
125
+ return (self.datadesc == other.datadesc,
126
+ self.valid == other.valid,
127
+ self.hgeoid == other.hgeoid and self.vgeo == other.vgeo,
128
+ self.ranking(other))
129
+
130
+ def __str__(self):
131
+ out = "HGeo=#{:d} ; Validity={!s} ; metadata are:\n".format(self.hgeoid, self.valid)
132
+ out += pprint.pformat(self.fid) + "\n"
133
+ out += "Data: {!s}".format(self.datadesc)
134
+ return out
135
+
136
+ def prefixed_str(self, prefix):
137
+ """A representation of this object prefixed with the *prefix* string."""
138
+ return '\n'.join([prefix + l for l in str(self).split('\n')])
139
+
140
+
141
+ class FieldBundle:
142
+ """A collection of FieldDesc objects."""
143
+
144
+ def __init__(self, hgeolib):
145
+ self._hgeolib = hgeolib
146
+ self._fields = list()
147
+
148
+ @property
149
+ def fields(self):
150
+ """The list of fields in the present collection."""
151
+ return self._fields
152
+
153
+ def _common_processing(self, fld, fid):
154
+ hgeo = HGeoDesc(fld)
155
+ vgeo = fld.geometry.vcoordinate
156
+ valid = fld.validity.get()
157
+ ddesc = DataDesc(fld)
158
+ hgeo_id = self._hgeolib.register(hgeo)
159
+ fid = copy.copy(fid)
160
+ fid['datebasis'] = fld.validity.getbasis()
161
+ fid['term'] = fld.validity.term()
162
+ fid['cumulativeduration'] = fld.validity.cumulativeduration()
163
+ return FieldDesc(hgeo_id, vgeo, ddesc, fid, valid)
164
+
165
+ @usepygram.epygram_checker.disabled_if_unavailable(version='1.0.0')
166
+ def read_grib(self, filename):
167
+ """Read in a GRIB file."""
168
+ with usepygram.epy_env_prepare(sessions.current()):
169
+ gribdata = footprints.proxy.dataformat(filename=filename,
170
+ openmode='r', format='GRIB')
171
+ fld = gribdata.iter_fields(get_info_as_json=('centre', 'subCentre'))
172
+ while fld:
173
+ fid = fld.fid.get('GRIB2', fld.fid.get('GRIB1'))
174
+ fid.update(json.loads(fld.comment))
175
+ self._fields.append(self._common_processing(fld, fid))
176
+ fld = gribdata.iter_fields(get_info_as_json=('centre', 'subCentre'))
177
+
178
+
179
+ class FieldBundles:
180
+ """A collection of :class:`FieldBundle` objects."""
181
+
182
+ def __init__(self):
183
+ self._hgeolib = HGeoLibrary()
184
+ self._bundles = dict()
185
+
186
+ @property
187
+ def hgeo_library(self):
188
+ """The :class:`HGeoLibrary` onject being used in this collection."""
189
+ return self._hgeolib
190
+
191
+ def new_bundle(self, name):
192
+ """Create a new :class:`FieldBundle` object in this collection."""
193
+ fbd = FieldBundle(self._hgeolib)
194
+ self._bundles[name] = fbd
195
+ return fbd
196
+
197
+ @property
198
+ def bundles(self):
199
+ """The dictionary of bundles in the present collection."""
200
+ return self._bundles
201
+
202
+
203
+ class EpyGribDiff(FieldBundles):
204
+ """A specialised version of :class:`FieldBundles` that deals with GRIB files."""
205
+
206
+ _FMT_COUNTER = '[{:04d}] '
207
+ _HEAD_COUNTER = ' ' * len(_FMT_COUNTER.format(0))
208
+
209
+ _FMT_SHORT = "#{n:>4d} id={id:16s} l={level:<6d} c={centre:<3d},{scentre:3d}"
210
+ _HEAD_SHORT = "Mess. ParamId/ShortN Level Centre,S "
211
+
212
+ _FMT_MIDDLE = " | {0:1s} {1:1s} {2:1s} {3:6s} | "
213
+ _HEAD_MIDDLE = " | {:1s} {:1s} {:1s} {:5s} | "
214
+ _ELTS_MIDDLE = ('data', 'valid', 'geo', 'score')
215
+
216
+ _SPACER = (_HEAD_COUNTER +
217
+ 'REF ' + '-' * (len(_HEAD_SHORT) - 4) +
218
+ ' | ----- ----- | ' +
219
+ 'NEW ' + '-' * (len(_HEAD_SHORT) - 4))
220
+
221
+ _DETAILED_SUMARY = 'Data: {0:1s} ; Validity Date: {1:1s} ; HGeometry: {2:s} ; Score: {3:6s}'
222
+
223
+ def __init__(self, ref, new):
224
+ """
225
+ :param str ref: Path to the reference GRIB file
226
+ :param str new: Path to the new GRIB file
227
+ """
228
+ super().__init__()
229
+ self._new = self.new_bundle('New')
230
+ self._new.read_grib(new)
231
+ self._ref = self.new_bundle('Ref')
232
+ self._ref.read_grib(ref)
233
+
234
+ def _compute_diff(self):
235
+ """Explore all possible field combinations and find the closest match.
236
+
237
+ :return: tuple (newfield_id, list of matching reffield_ids, rankingscore,
238
+ list of ranking_summaries)
239
+ """
240
+ found = set()
241
+ couples = list()
242
+ for i, field in enumerate(self._new.fields):
243
+ rscore = collections.defaultdict(list)
244
+ rsummary = collections.defaultdict(list)
245
+ for j, rfield in enumerate(self._ref.fields):
246
+ tsummary = field.ranking_summary(rfield)
247
+ rscore[tsummary[-1]].append(j)
248
+ rsummary[tsummary[-1]].append(tsummary)
249
+ highest = max(rscore.keys())
250
+ # If the score is >= 3 the fields are paired...
251
+ # Note: Their might be several field combinations with the same
252
+ # ranking score
253
+ if highest >= 3.:
254
+ refs = rscore[highest]
255
+ couples.append((i, refs, highest, rsummary[highest]))
256
+ found.update(refs)
257
+ else:
258
+ couples.append((i, (), None, None))
259
+ missings = set(range(len(self._ref.fields))) - found
260
+ if missings:
261
+ couples.append((None, list(missings), None, None))
262
+ return couples
263
+
264
+ @classmethod
265
+ def _str_header(cls):
266
+ """Returns the comparison table header."""
267
+ out = cls._SPACER + "\n"
268
+ e_len = max([len(e) for e in cls._ELTS_MIDDLE])
269
+ e_new = [('{:>' + str(e_len) + 's}').format(e.upper()) for e in cls._ELTS_MIDDLE]
270
+ if e_len > 1:
271
+ for i in range(e_len - 1):
272
+ out += (cls._HEAD_COUNTER + ' ' * len(cls._HEAD_SHORT) +
273
+ cls._HEAD_MIDDLE.format(* [e[i] for e in e_new]) +
274
+ ' ' * len(cls._HEAD_SHORT) + "\n")
275
+ out += (cls._HEAD_COUNTER + cls._HEAD_SHORT +
276
+ cls._HEAD_MIDDLE.format(* [e[-1] for e in e_new]) +
277
+ cls._HEAD_SHORT + "\n") + cls._SPACER + "\n"
278
+ return out
279
+
280
+ @classmethod
281
+ def _str_field_summary(cls, n, field):
282
+ """Returns a string that summarise a field properties."""
283
+ if 'paramId' in field.fid:
284
+ # GRIB1
285
+ sid = str(field.fid['paramId']) + '/' + field.fid['shortName']
286
+ else:
287
+ # GRIB2
288
+ sid = (str(field.fid['parameterCategory']) + '-' +
289
+ str(field.fid['parameterNumber']) + '/' +
290
+ field.fid['shortName'])
291
+ if len(sid) > 16: # Truncate if the string is too long
292
+ sid = sid[:15] + '*'
293
+ return cls._FMT_SHORT.format(n=n, id=sid, level=field.fid.get('level', -99),
294
+ centre=field.fid['centre'],
295
+ scentre=field.fid.get('subCentre', -99))
296
+
297
+ @classmethod
298
+ def _str_rsummary_format(cls, rsum, fmt):
299
+ """Format the ranking_summary output."""
300
+ dmap = {True: '=', False: '!'}
301
+ return fmt.format(dmap[rsum[0]], dmap[rsum[1]], dmap[rsum[2]],
302
+ '======' if rsum[3] == 10 else '{:6.2f}'.format(rsum[3]))
303
+
304
+ @staticmethod
305
+ def _embedded_counter(c):
306
+ """Return the formatted comparison counter."""
307
+ return '[{:04d}] '.format(c)
308
+
309
+ def format_diff(self, detailed=True):
310
+ """Return a string that contains the comparison results.
311
+
312
+ :param bool detailed: If False, just returns the comparison table.
313
+ """
314
+ out = ''
315
+ counter = 0
316
+ for couple in self._compute_diff():
317
+ if couple[0] is None:
318
+ for n in couple[1]:
319
+ counter += 1
320
+ out += self._embedded_counter(counter)
321
+ if detailed:
322
+ out += 'Unmatched reference field\n'
323
+ out += self.bundles['Ref'].fields[n].prefixed_str(' REF| ') + "\n"
324
+ else:
325
+ out += self._str_field_summary(n, self.bundles['Ref'].fields[n])
326
+ out += self._FMT_MIDDLE.format('?', '?', '?', ' ?') + '\n'
327
+ else:
328
+ new = self.bundles['New'].fields[couple[0]]
329
+ if len(couple[1]):
330
+ for i, n in enumerate(couple[1]):
331
+ counter += 1
332
+ ref = self.bundles['Ref'].fields[n]
333
+ out += self._embedded_counter(counter)
334
+ if detailed:
335
+ out += self._str_rsummary_format(couple[3][i], self._DETAILED_SUMARY) + "\n"
336
+ out += ref.prefixed_str(' REF| ') + "\n vs\n" + new.prefixed_str(' NEW| ') + "\n"
337
+ else:
338
+ out += self._str_field_summary(n, ref)
339
+ out += self._str_rsummary_format(couple[3][i], self._FMT_MIDDLE)
340
+ out += self._str_field_summary(couple[0], new) if i == 0 else ' idem.'
341
+ out += '\n'
342
+ else:
343
+ counter += 1
344
+ out += self._embedded_counter(counter)
345
+ if detailed:
346
+ out += 'Unmatched new field \n'
347
+ out += self.bundles['New'].fields[couple[0]].prefixed_str(' NEW| ') + "\n"
348
+ else:
349
+ out += ' ' * len(self._HEAD_SHORT)
350
+ out += self._FMT_MIDDLE.format('?', '?', '?', ' ?')
351
+ out += self._str_field_summary(couple[0], new) + '\n'
352
+ out += "\n" if detailed else ''
353
+ if detailed:
354
+ out += 'LIST OF HORIZONTAL GEOMETRIES:\n\n'
355
+ out += str(self.hgeo_library)
356
+ return out
357
+
358
+ def __str__(self):
359
+ return self._str_header() + self.format_diff(detailed=False)
vortex/nwp/util/ens.py ADDED
@@ -0,0 +1,198 @@
1
+ """
2
+ A collection of utility functions used in the context of Ensemble forecasts.
3
+ """
4
+
5
+ import io
6
+ import json
7
+ import re
8
+ import time
9
+
10
+ from bronx.compat import random
11
+ from bronx.fancies import loggers
12
+ from bronx.stdtypes.date import Period
13
+
14
+ from vortex import sessions
15
+ from vortex.data.stores import FunctionStoreCallbackError
16
+ from vortex.util import helpers
17
+
18
+ #: No automatic export
19
+ __all__ = []
20
+
21
+ logger = loggers.getLogger(__name__)
22
+
23
+
24
+ def drawingfunction(options):
25
+ """Draw a random sample from a *set* of values.
26
+
27
+ This function is designed to be executed by a
28
+ :obj:`vortex.data.stores.FunctionStore` object.
29
+
30
+ The *set* of values is computed using the resource's argument:
31
+ *set = [resource.start, resource.start + resource.nbset - 1]*. If
32
+ *resource.start* does not exists, *resource.start=1* is assumed.
33
+
34
+ The size of the sample is given by the *nblot* argument of the resource
35
+
36
+ The random generator is initialised using the resource's date. Consequently,
37
+ for a given date, the drawing is reproducible.
38
+
39
+ :param dict options: All the options passed to the store plus anything from
40
+ the query part of the URI.
41
+
42
+ :return: Content of a :obj:`nwp.data.ens.Sample` resource
43
+
44
+ :rtype: A file like object
45
+ """
46
+ rhdict = options.get('rhandler', None)
47
+ if rhdict:
48
+ date = rhdict['resource']['date']
49
+ rgen = random.Random()
50
+ rgen.seed(int(date[:-2]))
51
+ nbsample = rhdict['resource'].get('nbsample', 0)
52
+ if not nbsample:
53
+ raise FunctionStoreCallbackError('The resource must hold a non-null nbsample attribute')
54
+ population = rhdict['resource'].get('population', [])
55
+ if not population:
56
+ raise FunctionStoreCallbackError('The resource must hold a non-empty population attribute')
57
+ nbset = len(population)
58
+
59
+ tirage = (rgen.sample(population * (nbsample // nbset), (nbsample // nbset) * nbset) +
60
+ rgen.sample(population, nbsample % nbset))
61
+ logger.info('List of random elements: %s', ', '.join([str(x) for x in tirage]))
62
+ else:
63
+ raise FunctionStoreCallbackError("no resource handler here :-(")
64
+ # NB: The result have to be a file like object !
65
+ outdict = dict(vapp=rhdict['provider'].get('vapp', None),
66
+ vconf=rhdict['provider'].get('vconf', None),
67
+ cutoff=rhdict['resource'].get('cutoff', None),
68
+ date=rhdict['resource'].get('date', None),
69
+ resource_kind=rhdict['resource'].get('kind', None),
70
+ drawing=tirage,
71
+ population=population)
72
+ if rhdict['provider'].get('experiment', None) is not None:
73
+ outdict['experiment'] = rhdict['provider']['experiment']
74
+ return io.BytesIO(json.dumps(outdict, indent=4).encode(encoding='utf_8'))
75
+
76
+
77
+ def _checkingfunction_dict(options):
78
+ """
79
+ Internal function that returns a dictionnary that describes the available
80
+ inputs.
81
+ """
82
+ rhdict = options.get('rhandler', None)
83
+ if rhdict:
84
+ # If no nbsample is provided, easy to achieve...
85
+ nbsample = rhdict['resource'].get('nbsample', None)
86
+ # ...and if no explicit minimum of resources, nbsample is the minimum
87
+ nbmin = int(options.get('min', [(0 if nbsample is None else nbsample), ]).pop())
88
+ if nbsample is not None and nbsample < nbmin:
89
+ logger.warning('%d resources needed, %d required: sin of gluttony ?', nbsample, nbmin)
90
+ # What to look for ?
91
+ checkrole = rhdict['resource'].get('checkrole', None)
92
+ if not checkrole:
93
+ raise FunctionStoreCallbackError('The resource must hold a non-empty checkrole attribute')
94
+ rolematch = re.match(r'(\w+)(?:\+(\w+))?$', checkrole)
95
+ cur_t = sessions.current()
96
+ if rolematch:
97
+ ctx = cur_t.context
98
+ checklist = [sec.rh for sec in ctx.sequence.filtered_inputs(role=rolematch.group(1))]
99
+ mandatorylist = ([sec.rh for sec in ctx.sequence.filtered_inputs(role=rolematch.group(2))]
100
+ if rolematch.group(2) else [])
101
+ else:
102
+ raise FunctionStoreCallbackError('checkrole is not properly formatted')
103
+ # Other options
104
+ nretries = int(options.get('nretries', [0, ]).pop())
105
+ retry_wait = Period(options.get('retry_wait', ['PT5M', ]).pop())
106
+ comp_delay = Period(options.get('comp_delay', [0, ]).pop())
107
+ fakecheck = options.get('fakecheck', [False, ]).pop()
108
+
109
+ def _retry_cond(the_ntries, the_acceptable_time):
110
+ return ((the_acceptable_time is None and
111
+ the_ntries <= nretries) or
112
+ (the_acceptable_time and
113
+ (time.time() - the_acceptable_time) < comp_delay.total_seconds()))
114
+
115
+ # Ok let's work...
116
+ ntries = 0
117
+ acceptable_time = None
118
+ found = []
119
+ while _retry_cond(ntries, acceptable_time):
120
+ if ntries:
121
+ logger.info("Let's sleep %d sec. before the next check round...",
122
+ retry_wait.total_seconds())
123
+ cur_t.sh.sleep(retry_wait.total_seconds())
124
+ ntries += 1
125
+ try:
126
+ logger.info("Starting an input check...")
127
+ found, candidates = helpers.colorfull_input_checker(nbmin,
128
+ checklist,
129
+ mandatory=mandatorylist,
130
+ fakecheck=fakecheck)
131
+ if acceptable_time is None and (found or nbmin == 0):
132
+ acceptable_time = time.time()
133
+ if comp_delay.total_seconds() and len(found) != len(candidates):
134
+ logger.info("The minimum required size was reached (nbmin=%d). " +
135
+ "That's great but we are waiting a little longer " +
136
+ "(for at most %d sec.)",
137
+ nbmin, comp_delay.total_seconds())
138
+
139
+ if len(found) == len(candidates):
140
+ # No need to wait any longer...
141
+ break
142
+ except helpers.InputCheckerError as e:
143
+ if not _retry_cond(ntries, acceptable_time):
144
+ raise FunctionStoreCallbackError('The input checher failed ({!s})'.format(e))
145
+ return found
146
+ else:
147
+ raise FunctionStoreCallbackError("no resource handler here :-(\n")
148
+
149
+
150
+ def checkingfunction(options):
151
+ """Check what are the available resources and returns the list.
152
+
153
+ This function is designed to be executed by a
154
+ :obj:`vortex.data.stores.FunctionStore` object.
155
+
156
+ The *checkrole* resource attribute is used to look into the current context
157
+ in order to establish the list of resources that will checked.
158
+
159
+ :param dict options: All the options passed to the store plus anything from
160
+ the query part of the URI.
161
+
162
+ :return: Content of a :obj:`nwp.data.ens.PopulationList` resource
163
+
164
+ :rtype: A file like object
165
+ """
166
+ rhdict = options.get('rhandler', None)
167
+ avail_list = _checkingfunction_dict(options)
168
+ outdict = dict(vapp=rhdict['provider'].get('vapp', None),
169
+ vconf=rhdict['provider'].get('vconf', None),
170
+ cutoff=rhdict['resource'].get('cutoff', None),
171
+ date=rhdict['resource'].get('date', None),
172
+ resource_kind=rhdict['resource'].get('kind', None),
173
+ population=avail_list)
174
+ if rhdict['provider'].get('experiment', None) is not None:
175
+ outdict['experiment'] = rhdict['provider']['experiment']
176
+ return io.BytesIO(json.dumps(outdict, indent=4).encode(encoding='utf_8'))
177
+
178
+
179
+ def safedrawingfunction(options):
180
+ """Combined called to :func:`checkingfunction` and :func:`drawingfunction`.
181
+
182
+ See the documentation of these two functions for more details.
183
+ """
184
+ checkedlist = _checkingfunction_dict(options)
185
+ options['rhandler']['resource']['population'] = checkedlist
186
+ return drawingfunction(options)
187
+
188
+
189
+ def unsafedrawingfunction(options):
190
+ """Combined called to :func:`checkingfunction` and :func:`drawingfunction`...
191
+ but with a big lie on the checking: no real check, all the resources are assumed ok.
192
+
193
+ See the documentation of these two functions for more details.
194
+ """
195
+ options['fakecheck'] = [True, ]
196
+ checkedlist = _checkingfunction_dict(options)
197
+ options['rhandler']['resource']['population'] = checkedlist
198
+ return drawingfunction(options)
@@ -0,0 +1,128 @@
1
+ """
2
+ Some useful hooks.
3
+ """
4
+
5
+ import collections.abc
6
+ import functools
7
+
8
+ from bronx.fancies import loggers
9
+ from bronx.stdtypes.date import Date, Period, Time
10
+
11
+ from ..data.query import StaticCutoffDispenser
12
+
13
+ #: No automatic export
14
+ __all__ = []
15
+
16
+ logger = loggers.getLogger(__name__)
17
+
18
+
19
+ def update_namelist(t, rh, *completive_rh):
20
+ """Update namelist with resource handler(s) given in **completive_rh**."""
21
+ touched = False
22
+ for crh in completive_rh:
23
+ if not isinstance(crh, (list, tuple)):
24
+ crh = [crh, ]
25
+ for arh in crh:
26
+ logger.info('Merging: {!r} :\n{:s}'.format(arh.container,
27
+ arh.contents.dumps()))
28
+ rh.contents.merge(arh.contents)
29
+ touched = True
30
+ if touched:
31
+ rh.save()
32
+
33
+
34
+ def concatenate(t, rh, *rhlist):
35
+ """Concatenate *rhlist* after *rh*."""
36
+ blocksize = 32 * 1024 * 1024 # 32Mb
37
+ rh.container.close()
38
+ with rh.container.iod_context():
39
+ myfh = rh.container.iodesc(mode='ab')
40
+ for crh in rhlist:
41
+ if not isinstance(crh, (list, tuple)):
42
+ crh = [crh, ]
43
+ for arh in crh:
44
+ logger.info('Appending %s to self.', str(arh.container))
45
+ with arh.container.iod_context():
46
+ afh = arh.container.iodesc(mode='rb')
47
+ stuff = afh.read(blocksize)
48
+ while stuff:
49
+ myfh.write(stuff)
50
+ stuff = afh.read(blocksize)
51
+
52
+
53
+ def insert_cutoffs(t, rh, rh_cutoff_source, fuse_per_obstype=False):
54
+ """Read the cutoff from *rh_cutoff_source* and feed them into *rh*.
55
+
56
+ If *fuse_per_obstype* is ``True``, the latest cutoff of a given obstype
57
+ will be used for all the occurences of this obstype.
58
+ """
59
+ # rh_cutoff_source may be a list
60
+ if isinstance(rh_cutoff_source, list):
61
+ if rh_cutoff_source:
62
+ rh_cutoff_source = rh_cutoff_source[0]
63
+ else:
64
+ ValueError("The resource handler's list is empty.")
65
+ # Get the CutoffDispenser
66
+ import vortex.tools.listings
67
+ assert vortex.tools.listings
68
+ if rh_cutoff_source.container.actualfmt == 'bdmbufr_listing':
69
+ c_disp_callback = functools.partial(
70
+ rh_cutoff_source.contents.data.cutoffs_dispenser,
71
+ fuse_per_obstype=fuse_per_obstype
72
+ )
73
+ else:
74
+ raise RuntimeError("Incompatible < {!s} > ressource handler".format(rh_cutoff_source))
75
+ # Fill the gaps in the original request
76
+ rh.contents.add_cutoff_info(c_disp_callback())
77
+ # Actually save the result to file
78
+ rh.save()
79
+
80
+
81
+ def _new_static_cutoff_dispencer(base_date, cutoffs_def):
82
+
83
+ def x_period(p):
84
+ try:
85
+ return Period(p)
86
+ except ValueError:
87
+ return Period(Time(p))
88
+
89
+ if not isinstance(base_date, Date):
90
+ base_date = Date(base_date)
91
+ if isinstance(cutoffs_def, collections.abc.Mapping):
92
+ cutoffs_def = {(k if isinstance(k, Period) else x_period(k)): v
93
+ for k, v in cutoffs_def.items()}
94
+ cutoffs = {base_date + k: v for k, v in cutoffs_def.items()}
95
+ c_disp = StaticCutoffDispenser(max(cutoffs.keys()), cutoffs)
96
+ else:
97
+ if not isinstance(cutoffs_def, Period):
98
+ cutoffs_def = x_period(cutoffs_def)
99
+ c_disp = StaticCutoffDispenser(base_date + cutoffs_def)
100
+ return c_disp
101
+
102
+
103
+ def insert_static_cutoffs(t, rh, base_date, cutoffs_def):
104
+ """Compute the cutoff from *cutoffs_def* and feed them into *rh*.
105
+
106
+ :param base_date: The current analysis time
107
+ :param cutoffs_def: The cutoff time represented as time offset with respect
108
+ to *base_date*. *cutoffs_defs* may be a single value or
109
+ a dictionary. If *cutoffs_def* is a dictionary, it
110
+ associates a cutoff with a list of `obstypes`.
111
+ """
112
+ # Fill the gaps in the original request
113
+ rh.contents.add_cutoff_info(_new_static_cutoff_dispencer(base_date, cutoffs_def))
114
+ # Actually save the result to files
115
+ rh.save()
116
+
117
+
118
+ def arpifs_obs_error_correl_legacy2oops(t, rh):
119
+ """Convert a constant file that contains observation errors correlations."""
120
+ if rh.resource.realkind != 'correlations':
121
+ raise ValueError('Incompatible resource: {!s}'.format(rh))
122
+ if rh.contents[0].startswith("SIGMAO"):
123
+ logger.warning("Non conversion is needed...")
124
+ else:
125
+ rh.contents[:0] = ["SIGMAO unused\n",
126
+ "1 1.2\n",
127
+ "CORRELATIONS\n"]
128
+ rh.save()