vortex-nwp 2.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. vortex/__init__.py +135 -0
  2. vortex/algo/__init__.py +12 -0
  3. vortex/algo/components.py +2136 -0
  4. vortex/algo/mpitools.py +1648 -0
  5. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  7. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  8. vortex/algo/serversynctools.py +170 -0
  9. vortex/config.py +115 -0
  10. vortex/data/__init__.py +13 -0
  11. vortex/data/abstractstores.py +1572 -0
  12. vortex/data/containers.py +780 -0
  13. vortex/data/contents.py +596 -0
  14. vortex/data/executables.py +284 -0
  15. vortex/data/flow.py +113 -0
  16. vortex/data/geometries.ini +2689 -0
  17. vortex/data/geometries.py +703 -0
  18. vortex/data/handlers.py +1021 -0
  19. vortex/data/outflow.py +67 -0
  20. vortex/data/providers.py +465 -0
  21. vortex/data/resources.py +201 -0
  22. vortex/data/stores.py +1271 -0
  23. vortex/gloves.py +282 -0
  24. vortex/layout/__init__.py +27 -0
  25. vortex/layout/appconf.py +109 -0
  26. vortex/layout/contexts.py +511 -0
  27. vortex/layout/dataflow.py +1069 -0
  28. vortex/layout/jobs.py +1276 -0
  29. vortex/layout/monitor.py +833 -0
  30. vortex/layout/nodes.py +1424 -0
  31. vortex/layout/subjobs.py +464 -0
  32. vortex/nwp/__init__.py +11 -0
  33. vortex/nwp/algo/__init__.py +12 -0
  34. vortex/nwp/algo/assim.py +483 -0
  35. vortex/nwp/algo/clim.py +920 -0
  36. vortex/nwp/algo/coupling.py +609 -0
  37. vortex/nwp/algo/eda.py +632 -0
  38. vortex/nwp/algo/eps.py +613 -0
  39. vortex/nwp/algo/forecasts.py +745 -0
  40. vortex/nwp/algo/fpserver.py +927 -0
  41. vortex/nwp/algo/ifsnaming.py +403 -0
  42. vortex/nwp/algo/ifsroot.py +311 -0
  43. vortex/nwp/algo/monitoring.py +202 -0
  44. vortex/nwp/algo/mpitools.py +554 -0
  45. vortex/nwp/algo/odbtools.py +974 -0
  46. vortex/nwp/algo/oopsroot.py +735 -0
  47. vortex/nwp/algo/oopstests.py +186 -0
  48. vortex/nwp/algo/request.py +579 -0
  49. vortex/nwp/algo/stdpost.py +1285 -0
  50. vortex/nwp/data/__init__.py +12 -0
  51. vortex/nwp/data/assim.py +392 -0
  52. vortex/nwp/data/boundaries.py +261 -0
  53. vortex/nwp/data/climfiles.py +539 -0
  54. vortex/nwp/data/configfiles.py +149 -0
  55. vortex/nwp/data/consts.py +929 -0
  56. vortex/nwp/data/ctpini.py +133 -0
  57. vortex/nwp/data/diagnostics.py +181 -0
  58. vortex/nwp/data/eda.py +148 -0
  59. vortex/nwp/data/eps.py +383 -0
  60. vortex/nwp/data/executables.py +1039 -0
  61. vortex/nwp/data/fields.py +96 -0
  62. vortex/nwp/data/gridfiles.py +308 -0
  63. vortex/nwp/data/logs.py +551 -0
  64. vortex/nwp/data/modelstates.py +334 -0
  65. vortex/nwp/data/monitoring.py +220 -0
  66. vortex/nwp/data/namelists.py +644 -0
  67. vortex/nwp/data/obs.py +748 -0
  68. vortex/nwp/data/oopsexec.py +72 -0
  69. vortex/nwp/data/providers.py +182 -0
  70. vortex/nwp/data/query.py +217 -0
  71. vortex/nwp/data/stores.py +147 -0
  72. vortex/nwp/data/surfex.py +338 -0
  73. vortex/nwp/syntax/__init__.py +9 -0
  74. vortex/nwp/syntax/stdattrs.py +375 -0
  75. vortex/nwp/tools/__init__.py +10 -0
  76. vortex/nwp/tools/addons.py +35 -0
  77. vortex/nwp/tools/agt.py +55 -0
  78. vortex/nwp/tools/bdap.py +48 -0
  79. vortex/nwp/tools/bdcp.py +38 -0
  80. vortex/nwp/tools/bdm.py +21 -0
  81. vortex/nwp/tools/bdmp.py +49 -0
  82. vortex/nwp/tools/conftools.py +1311 -0
  83. vortex/nwp/tools/drhook.py +62 -0
  84. vortex/nwp/tools/grib.py +268 -0
  85. vortex/nwp/tools/gribdiff.py +99 -0
  86. vortex/nwp/tools/ifstools.py +163 -0
  87. vortex/nwp/tools/igastuff.py +249 -0
  88. vortex/nwp/tools/mars.py +56 -0
  89. vortex/nwp/tools/odb.py +548 -0
  90. vortex/nwp/tools/partitioning.py +234 -0
  91. vortex/nwp/tools/satrad.py +56 -0
  92. vortex/nwp/util/__init__.py +6 -0
  93. vortex/nwp/util/async.py +184 -0
  94. vortex/nwp/util/beacon.py +40 -0
  95. vortex/nwp/util/diffpygram.py +359 -0
  96. vortex/nwp/util/ens.py +198 -0
  97. vortex/nwp/util/hooks.py +128 -0
  98. vortex/nwp/util/taskdeco.py +81 -0
  99. vortex/nwp/util/usepygram.py +591 -0
  100. vortex/nwp/util/usetnt.py +87 -0
  101. vortex/proxy.py +6 -0
  102. vortex/sessions.py +341 -0
  103. vortex/syntax/__init__.py +9 -0
  104. vortex/syntax/stdattrs.py +628 -0
  105. vortex/syntax/stddeco.py +176 -0
  106. vortex/toolbox.py +982 -0
  107. vortex/tools/__init__.py +11 -0
  108. vortex/tools/actions.py +457 -0
  109. vortex/tools/addons.py +297 -0
  110. vortex/tools/arm.py +76 -0
  111. vortex/tools/compression.py +322 -0
  112. vortex/tools/date.py +20 -0
  113. vortex/tools/ddhpack.py +10 -0
  114. vortex/tools/delayedactions.py +672 -0
  115. vortex/tools/env.py +513 -0
  116. vortex/tools/folder.py +663 -0
  117. vortex/tools/grib.py +559 -0
  118. vortex/tools/lfi.py +746 -0
  119. vortex/tools/listings.py +354 -0
  120. vortex/tools/names.py +575 -0
  121. vortex/tools/net.py +1790 -0
  122. vortex/tools/odb.py +10 -0
  123. vortex/tools/parallelism.py +336 -0
  124. vortex/tools/prestaging.py +186 -0
  125. vortex/tools/rawfiles.py +10 -0
  126. vortex/tools/schedulers.py +413 -0
  127. vortex/tools/services.py +871 -0
  128. vortex/tools/storage.py +1061 -0
  129. vortex/tools/surfex.py +61 -0
  130. vortex/tools/systems.py +3396 -0
  131. vortex/tools/targets.py +384 -0
  132. vortex/util/__init__.py +9 -0
  133. vortex/util/config.py +1071 -0
  134. vortex/util/empty.py +24 -0
  135. vortex/util/helpers.py +184 -0
  136. vortex/util/introspection.py +63 -0
  137. vortex/util/iosponge.py +76 -0
  138. vortex/util/roles.py +51 -0
  139. vortex/util/storefunctions.py +103 -0
  140. vortex/util/structs.py +26 -0
  141. vortex/util/worker.py +150 -0
  142. vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
  143. vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
  144. vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
  145. vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
  146. vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,81 @@
1
+ """
2
+ A collection of Tasks decorators (to add usual inputs/outputs to existing classes).
3
+ """
4
+
5
+ from bronx.fancies import loggers
6
+
7
+ from vortex import toolbox
8
+
9
+ #: No automatic export
10
+ __all__ = []
11
+
12
+ logger = loggers.getLogger(__name__)
13
+
14
+
15
+ def process_needs_lfi_stuff(*kargs, **kwargs):
16
+ """
17
+ Decorator that update the tasks's ``process`` method in order to retrieve the
18
+ things needed with the FA/LFI file format.
19
+
20
+ Example (the self.conf.cycle Genv/Uenv cycle will be used)::
21
+
22
+ @process_needs_lfi_stuff
23
+ class MyTask(Task):
24
+
25
+ def process(self)
26
+ pass
27
+
28
+ Example (the self.conf.arpege_cycle Genv/Uenv cycle will be used)::
29
+
30
+ @process_needs_lfi_stuff(cyclekey='arpege_cycle')
31
+ class MyOtherTask(Task):
32
+
33
+ def process(self)
34
+ pass
35
+
36
+ """
37
+ cyclekey = kwargs.pop('cyclekey', 'cycle')
38
+
39
+ def decorate_process(cls):
40
+ """Decorator for Task: get LFI stuff before calling process."""
41
+ original_process = getattr(cls, 'process', None)
42
+ if original_process is not None:
43
+ def process(self, *args, **kwargs):
44
+ _get_lfi_stuff(self, cyclekey)
45
+ original_process(self, *args, **kwargs)
46
+ process.__doc__ = original_process.__doc__
47
+ cls.process = process
48
+ return cls
49
+
50
+ if kargs:
51
+ return decorate_process(kargs[0])
52
+ else:
53
+ return decorate_process
54
+
55
+
56
+ def _get_lfi_stuff(self, cyclekey):
57
+ """Get LFI stuff method (called from process)."""
58
+ if 'early-fetch' in self.steps or 'fetch' in self.steps:
59
+
60
+ actualcycle = getattr(self.conf, cyclekey)
61
+
62
+ self.sh.title('Toolbox input tblfiscripts')
63
+ toolbox.input(role='LFIScripts',
64
+ genv=actualcycle,
65
+ kind='lfiscripts',
66
+ local='usualtools/tools.lfi.tgz',
67
+ )
68
+ self.sh.title('Toolbox input tbiopoll')
69
+ toolbox.input(role='IOPoll',
70
+ format='unknown',
71
+ genv=actualcycle,
72
+ kind='iopoll',
73
+ language='perl',
74
+ local='usualtools/io_poll',
75
+ )
76
+ self.sh.title('Toolbox input tblfitools')
77
+ toolbox.input(role='LFITOOLS',
78
+ genv=actualcycle,
79
+ kind='lfitools',
80
+ local='usualtools/lfitools'
81
+ )
@@ -0,0 +1,591 @@
1
+ """
2
+ Usage of EPyGrAM package.
3
+
4
+ When loaded, this module discards any FootprintBase resource collected as a container
5
+ in EPyGrAM package.
6
+ """
7
+
8
+ import copy
9
+
10
+ import footprints
11
+ from bronx.fancies import loggers
12
+ from bronx.stdtypes import date
13
+ from bronx.stdtypes.date import Date, Period, Time
14
+ from bronx.syntax.externalcode import ExternalCodeImportChecker
15
+ from footprints import proxy as fpx
16
+ from vortex import sessions
17
+ from vortex.data.contents import MetaDataReader
18
+ from vortex.data.handlers import Handler
19
+
20
+ logger = loggers.getLogger(__name__)
21
+
22
+ epygram_checker = ExternalCodeImportChecker('epygram')
23
+ with epygram_checker as ec_register:
24
+ import epygram # @UnusedImport
25
+
26
+ try:
27
+ ec_register.update(version=epygram.__version__)
28
+ except AttributeError:
29
+ raise ImportError('Improper eypgram module.')
30
+ try:
31
+ u_unused = epygram.formats.FA
32
+ hasFA = True
33
+ except AttributeError:
34
+ hasFA = False
35
+ ec_register.update(needFA=hasFA)
36
+ try:
37
+ u_unused = epygram.formats.GRIB
38
+ hasGRIB = True
39
+ except AttributeError:
40
+ hasGRIB = False
41
+ ec_register.update(needGRIB=hasGRIB)
42
+ logger.info('Epygram %s loaded (GRIB support=%s, FA support=%s).',
43
+ epygram.__version__, hasGRIB, hasFA)
44
+
45
+ np_checker = ExternalCodeImportChecker('numpy')
46
+ with np_checker as npregister:
47
+ import numpy as np
48
+ npregister.update(version=np.__version__)
49
+
50
+ footprints.proxy.containers.discard_package('epygram', verbose=False)
51
+
52
+ __all__ = []
53
+
54
+
55
+ def _sources_and_names_fixup(sources, names=None):
56
+ """Fix **sources** and **names** lists."""
57
+ # Prepare sources names
58
+ if not isinstance(sources, (list, tuple, set)):
59
+ sources = [sources, ]
60
+ sources = [source.upper() for source in sources]
61
+ # Prepare output names
62
+ if names is None:
63
+ names = sources
64
+ else:
65
+ if not isinstance(names, (list, tuple, set)):
66
+ names = [names, ]
67
+ names = [name.upper().replace(' ', '.') for name in names]
68
+ # Fill the sources list if necessary
69
+ if len(sources) == 1 and len(names) > 1:
70
+ sources *= len(names)
71
+ if len(sources) != len(names):
72
+ raise ValueError('Sizes of sources and names do not fit the requirements.')
73
+ return sources, names
74
+
75
+
76
+ @epygram_checker.disabled_if_unavailable
77
+ def clone_fields(datain, dataout, sources, names=None, value=None, pack=None, overwrite=False):
78
+ """Clone any existing fields ending with``source`` to some new field."""
79
+ datain.open()
80
+ sources, names = _sources_and_names_fixup(sources, names)
81
+
82
+ tablein = datain.listfields()
83
+ tableout = dataout.listfields()
84
+ addedfields = 0
85
+
86
+ # Look for the input fields,
87
+ for source, name in zip(sources, names):
88
+ fx = None
89
+ comprpack = None
90
+ for fieldname in [x for x in sorted(tablein) if x.endswith(source)]:
91
+ newfield = fieldname.replace(source, '') + name
92
+ if not overwrite and newfield in tableout:
93
+ logger.warning('Field <%s> already in output file', newfield)
94
+ else:
95
+ # If the values are to be overwritten : do not read the input
96
+ # field several times...
97
+ if value is None or fx is None or comprpack is None:
98
+ fx = datain.readfield(fieldname)
99
+ comprpack = datain.fieldscompression.get(fieldname)
100
+ if pack is not None:
101
+ comprpack.update(pack)
102
+ fy = fx.clone({x: newfield for x in fx.fid.keys()})
103
+ if value is not None:
104
+ fy.data.fill(value)
105
+ # If fy is re-used, change the field names
106
+ if value is not None:
107
+ for fidk in fx.fid.keys():
108
+ fy.fid[fidk] = newfield
109
+ # On the first append, open the output file
110
+ if addedfields == 0:
111
+ dataout.close()
112
+ dataout.open(openmode='a')
113
+ # Actually add the new field
114
+ logger.info('Add field {} pack={}'.format(fy.fid, comprpack))
115
+ dataout.writefield(fy, compression=comprpack)
116
+ addedfields += 1
117
+
118
+ if addedfields:
119
+ dataout.close()
120
+ datain.close()
121
+ return addedfields
122
+
123
+
124
+ def epy_env_prepare(t):
125
+ localenv = t.sh.env.clone()
126
+ localenv.verbose(True, t.sh)
127
+ if localenv.OMP_NUM_THREADS is None:
128
+ localenv.OMP_NUM_THREADS = 1
129
+ localenv.update(
130
+ LFI_HNDL_SPEC=':1',
131
+ DR_HOOK_SILENT=1,
132
+ DR_HOOK_NOT_MPI=1,
133
+ )
134
+ # Clean trash...
135
+ del localenv.GRIB_SAMPLES_PATH
136
+ del localenv.GRIB_DEFINITION_PATH
137
+ del localenv.ECCODES_SAMPLES_PATH
138
+ del localenv.ECCODES_DEFINITION_PATH
139
+ return localenv
140
+
141
+
142
+ @epygram_checker.disabled_if_unavailable
143
+ def addfield(t, rh, fieldsource, fieldtarget, constvalue, pack=None):
144
+ """Provider hook for adding a field through cloning."""
145
+ if rh.container.exists():
146
+ with epy_env_prepare(t):
147
+ clone_fields(rh.contents.data, rh.contents.data,
148
+ fieldsource, names=fieldtarget, value=constvalue,
149
+ pack=pack)
150
+ else:
151
+ logger.warning('Try to add field on a missing resource <%s>',
152
+ rh.container.localpath())
153
+
154
+
155
+ @epygram_checker.disabled_if_unavailable
156
+ def copyfield(t, rh, rhsource, fieldsource, fieldtarget, pack=None):
157
+ """Provider hook for copying fields between FA files (but do not overwrite existing fields)."""
158
+ if rh.container.exists():
159
+ with epy_env_prepare(t):
160
+ clone_fields(rhsource.contents.data, rh.contents.data,
161
+ fieldsource, fieldtarget, pack=pack)
162
+ else:
163
+ logger.warning('Try to copy field on a missing resource <%s>',
164
+ rh.container.localpath())
165
+
166
+
167
+ @epygram_checker.disabled_if_unavailable
168
+ def overwritefield(t, rh, rhsource, fieldsource, fieldtarget, pack=None):
169
+ """Provider hook for copying fields between FA files (overwrite existing fields)."""
170
+ if rh.container.exists():
171
+ with epy_env_prepare(t):
172
+ clone_fields(rhsource.contents.data, rh.contents.data,
173
+ fieldsource, fieldtarget, overwrite=True, pack=pack)
174
+ else:
175
+ logger.warning('Try to copy field on a missing resource <%s>',
176
+ rh.container.localpath())
177
+
178
+
179
+ @np_checker.disabled_if_unavailable
180
+ @epygram_checker.disabled_if_unavailable
181
+ def updatefield(t, rh, rhsource, fieldsource, fieldtarget, masktype, *kargs):
182
+ """
183
+ Provider hook for updating fields in the **rh** FA files.
184
+
185
+ The content (not the field itself) of **fieldsource** will be copied to
186
+ **fieldtarget**. Some kind of masking is performed. Depending on
187
+ **masktype**, only a subset of the field content might be updated.
188
+ **masktype** can take the following values:
189
+
190
+ * ``none``: no mask, the whole content is copied;
191
+ * ``np.ma.masked``: masked values are ignored during the copy.
192
+
193
+ """
194
+ if rh.container.exists():
195
+ with epy_env_prepare(t):
196
+ # Various initialisations
197
+ fieldsource, fieldtarget = _sources_and_names_fixup(fieldsource, fieldtarget)
198
+ datain = rhsource.contents.data
199
+ datain.open()
200
+ dataout = rh.contents.data
201
+ dataout.close()
202
+ dataout.open(openmode='a')
203
+ tablein = datain.listfields()
204
+ tableout = dataout.listfields()
205
+ updatedfields = 0
206
+
207
+ # Function that creates the subset of elements to update
208
+ if masktype == 'none':
209
+
210
+ def subsetfunc(epyobj):
211
+ return Ellipsis
212
+
213
+ elif masktype == 'np.ma.masked':
214
+
215
+ def subsetfunc(epyobj):
216
+ if np.ma.is_masked(epyobj.data):
217
+ return np.logical_not(epyobj.data.mask)
218
+ else:
219
+ return Ellipsis
220
+
221
+ else:
222
+ raise ValueError('Unsupported masktype in the updatefield hook.')
223
+
224
+ # Look for the input fields and update them
225
+ for source, target in zip(fieldsource, fieldtarget):
226
+ for fieldname in [x for x in sorted(tablein) if x.endswith(source)]:
227
+ targetfield = fieldname.replace(source, '') + target
228
+ if targetfield in tableout:
229
+ fx = datain.readfield(fieldname)
230
+ fy = dataout.readfield(targetfield)
231
+ subset = subsetfunc(fx)
232
+ fy.data[subset] = fx.data[subset]
233
+ dataout.writefield(fy)
234
+ updatedfields += 1
235
+ else:
236
+ logger.warning('Field <%s> is missing in the output file', targetfield)
237
+
238
+ dataout.close()
239
+ datain.close()
240
+ return updatedfields
241
+ else:
242
+ logger.warning('Try to copy field on a missing resource <%s>',
243
+ rh.container.localpath())
244
+
245
+
246
+ class EpygramMetadataReader(MetaDataReader):
247
+
248
+ _abstract = True
249
+ _footprint = dict(
250
+ info = 'Abstract MetaDataReader for formats handled by epygram',
251
+ )
252
+
253
+ def _do_delayed_init(self):
254
+ epyf = self._content_in
255
+ if not epyf.isopen:
256
+ epyf.open()
257
+ date_epy, term_epy = self._process_epy(epyf)
258
+ self._datahide = {
259
+ 'date': Date(date_epy) if date_epy else date_epy,
260
+ 'term': Time(hour=int(term_epy.total_seconds() / 3600),
261
+ minute=int(term_epy.total_seconds() / 60) % 60)
262
+ }
263
+
264
+ def _process_epy(self, epyf):
265
+ """Abstract method that does the actual processing using epygram."""
266
+ raise NotImplementedError("Abstract method")
267
+
268
+
269
+ @epygram_checker.disabled_if_unavailable
270
+ class FaMetadataReader(EpygramMetadataReader):
271
+
272
+ _footprint = dict(
273
+ info = 'MetaDataReader for the FA file format',
274
+ attr = dict(
275
+ format = dict(
276
+ values = ('FA',)
277
+ )
278
+ )
279
+ )
280
+
281
+ def _process_epy(self, epyf):
282
+ # Just call the epygram function !
283
+ with epy_env_prepare(sessions.current()):
284
+ return epyf.validity.getbasis(), epyf.validity.term()
285
+
286
+
287
+ @epygram_checker.disabled_if_unavailable(version='1.0.0')
288
+ class GribMetadataReader(EpygramMetadataReader):
289
+ _footprint = dict(
290
+ info = 'MetaDataReader for the GRIB file format',
291
+ attr = dict(
292
+ format = dict(
293
+ values = ('GRIB',)
294
+ )
295
+ )
296
+ )
297
+
298
+ def _process_epy(self, epyf):
299
+ # Loop over the fields and check the unicity of date/term
300
+ bundle = set()
301
+ with epy_env_prepare(sessions.current()):
302
+ epyfld = epyf.iter_fields(getdata=False)
303
+ while epyfld:
304
+ bundle.add((epyfld.validity.getbasis(), epyfld.validity.term()))
305
+ epyfld = epyf.iter_fields(getdata=False)
306
+ if len(bundle) > 1:
307
+ logger.error("The GRIB file contains fileds with different date and terms.")
308
+ if len(bundle) == 0:
309
+ logger.warning("The GRIB file doesn't contains any fields")
310
+ return None, 0
311
+ else:
312
+ return bundle.pop()
313
+
314
+
315
+ @epygram_checker.disabled_if_unavailable(version='1.2.11')
316
+ def mk_pgdfa923_from_pgdlfi(t, rh_pgdlfi, nam923blocks,
317
+ outname=None,
318
+ fieldslist=None,
319
+ field_prefix='S1D_',
320
+ pack=None):
321
+ """
322
+ Hook to convert fields from a PGD.lfi to well-formatted for clim923 FA format.
323
+
324
+ :param t: session ticket
325
+ :param rh_pgdlfi: resource handler of source PGD.lfi to process
326
+ :param nam923blocks: namelist blocks of geometry for clim923
327
+ :param outname: output filename
328
+ :param fieldslist: list of fields to convert
329
+ :param field_prefix: prefix to add to field name in FA
330
+ :param pack: packing for fields to write
331
+ """
332
+ dm = epygram.geometries.domain_making
333
+
334
+ def sfxlfi2fa_field(fld, geom):
335
+ fldout = fpx.fields.almost_clone(fld,
336
+ geometry=geom,
337
+ fid={'FA': field_prefix + fld.fid['LFI']})
338
+ fldout.setdata(fld.data[1:-1, 1:-1])
339
+ return fldout
340
+
341
+ if fieldslist is None:
342
+ fieldslist = ['ZS', 'COVER001', 'COVER002']
343
+ if pack is None:
344
+ pack = {'KNGRIB': -1}
345
+ if outname is None:
346
+ outname = rh_pgdlfi.container.abspath + '.fa923'
347
+ if not t.sh.path.exists(outname):
348
+ with epy_env_prepare(t):
349
+ pgdin = fpx.dataformats.almost_clone(rh_pgdlfi.contents.data,
350
+ true3d=True)
351
+ geom, spgeom = dm.build.build_geom_from_e923nam(nam923blocks) # TODO: Arpege case
352
+ validity = epygram.base.FieldValidity(date_time=Date(1994, 5, 31, 0), # Date of birth of ALADIN
353
+ term=Period(0))
354
+ pgdout = epygram.formats.resource(filename=outname,
355
+ openmode='w',
356
+ fmt='FA',
357
+ processtype='initialization',
358
+ validity=validity,
359
+ geometry=geom,
360
+ spectral_geometry=spgeom)
361
+ for f in fieldslist:
362
+ fldout = sfxlfi2fa_field(pgdin.readfield(f), geom)
363
+ pgdout.writefield(fldout, compression=pack)
364
+ else:
365
+ logger.warning('Try to create an already existing resource <%s>',
366
+ outname)
367
+
368
+
369
+ @epygram_checker.disabled_if_unavailable(version='1.0.0')
370
+ def empty_fa(t, rh, empty_name):
371
+ """
372
+ Create an empty FA file with fieldname **empty_name**,
373
+ creating header from given existing FA resource handler **rh**.
374
+
375
+ :return: the empty epygram resource, closed
376
+ """
377
+ if rh.container.exists():
378
+ with epy_env_prepare(t):
379
+ rh.contents.data.open()
380
+ assert not t.sh.path.exists(empty_name), \
381
+ 'Empty target filename already exist: {}'.format(empty_name)
382
+ e = epygram.formats.resource(empty_name, 'w', fmt='FA',
383
+ headername=rh.contents.data.headername,
384
+ validity=rh.contents.data.validity,
385
+ processtype=rh.contents.data.processtype,
386
+ cdiden=rh.contents.cdiden)
387
+ e.close()
388
+ rh.contents.data.close()
389
+ return e
390
+ else:
391
+ raise OSError('Try to copy header from a missing resource <{!s}>'.format(rh.container.localpath()))
392
+
393
+
394
+ @epygram_checker.disabled_if_unavailable(version='1.0.0')
395
+ def geopotentiel2zs(t, rh, rhsource, pack=None):
396
+ """Copy surface geopotential from clim to zs in PGD."""
397
+ from bronx.meteo.constants import g0
398
+ if rh.container.exists():
399
+ with epy_env_prepare(t):
400
+ orog = rhsource.contents.data.readfield('SURFGEOPOTENTIEL')
401
+ orog.operation('/', g0)
402
+ orog.fid['FA'] = 'SFX.ZS'
403
+ rh.contents.data.close()
404
+ rh.contents.data.open(openmode='a')
405
+ rh.contents.data.writefield(orog, compression=pack)
406
+ else:
407
+ logger.warning('Try to copy field on a missing resource <%s>',
408
+ rh.container.localpath())
409
+
410
+
411
+ @epygram_checker.disabled_if_unavailable(version='1.3.4')
412
+ def add_poles_to_GLOB_file(filename):
413
+ """
414
+ DEPRECATED: please use add_poles_to_reglonlat_file instead
415
+ Add poles to a GLOB* regular FA Lon/Lat file that do not contain them.
416
+ """
417
+ import numpy
418
+ rin = epygram.formats.resource(filename, 'r')
419
+ filename_out = filename + '+poles'
420
+ rout = epygram.formats.resource(filename_out, 'w', fmt=rin.format,
421
+ validity=epygram.base.FieldValidity(
422
+ date_time=date.today(),
423
+ term=date.Period(0, 0, 0)),
424
+ processtype=rin.processtype,
425
+ cdiden=rin.cdiden)
426
+ assert rin.geometry.gimme_corners_ll()['ul'][1] < 90., \
427
+ 'This file already contains poles.'
428
+ for f in rin.listfields():
429
+ if f == 'SPECSURFGEOPOTEN':
430
+ continue
431
+ fld = rin.readfield(f)
432
+ write_args = {}
433
+ if isinstance(fld, epygram.fields.H2DField):
434
+ # create new geometry
435
+ newdims = copy.deepcopy(fld.geometry.dimensions)
436
+ newdims['Y'] += 2
437
+ newgrid = copy.deepcopy(fld.geometry.grid)
438
+ newgrid['input_position'] = (newgrid['input_position'][0],
439
+ newgrid['input_position'][1] + 1)
440
+ newgeom = fpx.geometrys.almost_clone(fld.geometry,
441
+ dimensions=newdims,
442
+ grid=newgrid)
443
+ # compute poles data value as mean of last latitude circle
444
+ newdata = numpy.zeros((newdims['Y'], newdims['X']))
445
+ newdata[1:-1, :] = fld.data[...]
446
+ newdata[0, :] = newdata[1, :].mean()
447
+ newdata[-1, :] = newdata[-2, :].mean()
448
+ # clone field with new geometry
449
+ fld = fpx.fields.almost_clone(fld, geometry=newgeom)
450
+ fld.data = newdata
451
+ # get initial compression
452
+ write_args = dict(compression=rin.fieldscompression[fld.fid['FA']])
453
+ rout.writefield(fld, **write_args)
454
+
455
+
456
+ @epygram_checker.disabled_if_unavailable(version='1.3.4')
457
+ def add_poles_to_reglonlat_file(filename):
458
+ """
459
+ Add pole(s) to a regular FA Lon/Lat file that do not contain them.
460
+ """
461
+ import numpy
462
+ rin = epygram.formats.resource(filename, 'r')
463
+ filename_out = filename + '+poles'
464
+ rout = epygram.formats.resource(filename_out, 'w', fmt=rin.format,
465
+ validity=epygram.base.FieldValidity(
466
+ date_time=rin.validity.get(),
467
+ term=date.Period(0, 0, 0)
468
+ ),
469
+ processtype=rin.processtype,
470
+ cdiden=rin.cdiden)
471
+ assert rin.geometry.name == 'regular_lonlat', \
472
+ "This file's geometry is not regular lon/lat, cannot add pole(s)."
473
+ # determine what is to be done
474
+ resolution = rin.geometry.grid['Y_resolution'].get('degrees')
475
+ latmin = rin.geometry.gimme_corners_ll()['ll'][1]
476
+ latmax = rin.geometry.gimme_corners_ll()['ul'][1]
477
+ # south
478
+ south = False
479
+ if abs(-90. - latmin) <= epygram.config.epsilon:
480
+ logger.info("This file already contains south pole")
481
+ elif abs((-90. + resolution) - latmin) <= epygram.config.epsilon:
482
+ south = True
483
+ else:
484
+ logger.info("This file south border is too far from south pole to add it.")
485
+ # north
486
+ north = False
487
+ if abs(90. - latmax) <= epygram.config.epsilon:
488
+ logger.info("This file already contains north pole")
489
+ elif abs((90. - resolution) - latmax) <= epygram.config.epsilon:
490
+ north = True
491
+ else:
492
+ logger.info("This file north border is too far from north pole to add it.")
493
+ if not north and not south:
494
+ raise epygram.epygramError("Nothing to do")
495
+ # prepare new geom
496
+ geom = rin.readfield('SURFGEOPOTENTIEL').geometry
497
+ newdims = copy.deepcopy(geom.dimensions)
498
+ newgrid = copy.deepcopy(geom.grid)
499
+ if north and south:
500
+ newdims['Y'] += 2
501
+ else:
502
+ newdims['Y'] += 1
503
+ if south:
504
+ newgrid['input_lon'] = epygram.util.Angle(geom.gimme_corners_ll()['ll'][0], 'degrees')
505
+ newgrid['input_lat'] = epygram.util.Angle(geom.gimme_corners_ll()['ll'][1] - resolution, 'degrees')
506
+ newgrid['input_position'] = (0, 0)
507
+ else: # north only: 0,0 has not changed
508
+ newgrid['input_lon'] = epygram.util.Angle(geom.gimme_corners_ll()['ll'][0], 'degrees')
509
+ newgrid['input_lat'] = epygram.util.Angle(geom.gimme_corners_ll()['ll'][1], 'degrees')
510
+ newgrid['input_position'] = (0, 0)
511
+ newgeom = fpx.geometrys.almost_clone(geom,
512
+ dimensions=newdims,
513
+ grid=newgrid)
514
+ # loop on fields
515
+ for f in rin.listfields():
516
+ if f == 'SPECSURFGEOPOTEN':
517
+ continue # meaningless in lonlat clims
518
+ fld = rin.readfield(f)
519
+ write_args = {}
520
+ if isinstance(fld, epygram.fields.H2DField):
521
+ # compute poles data value as mean of last latitude circle
522
+ newdata = numpy.zeros((newdims['Y'], newdims['X']))
523
+ if south and north:
524
+ newdata[1:-1, :] = fld.data[...]
525
+ newdata[0, :] = newdata[1, :].mean()
526
+ newdata[-1, :] = newdata[-2, :].mean()
527
+ elif south:
528
+ newdata[1:, :] = fld.data[...]
529
+ newdata[0, :] = newdata[1, :].mean()
530
+ elif north:
531
+ newdata[:-1, :] = fld.data[...]
532
+ newdata[-1, :] = newdata[-2, :].mean()
533
+ # clone field with new geometry
534
+ fld = fpx.fields.almost_clone(fld, geometry=newgeom)
535
+ fld.data = newdata
536
+ # get initial compression
537
+ write_args = dict(compression=rin.fieldscompression[fld.fid['FA']])
538
+ rout.writefield(fld, **write_args)
539
+
540
+
541
+ @epygram_checker.disabled_if_unavailable()
542
+ def split_errgrib_on_shortname(t, rh):
543
+ """Split a Background Error GRIB file into pieces (based on the GRIB shortName)."""
544
+ # Sanity checks
545
+ if rh.resource.realkind != 'bgstderr' or getattr(rh.resource, 'variable', None) is not None:
546
+ raise ValueError('Incompatible resource: {!s}'.format(rh))
547
+
548
+ def create_section(sn):
549
+ """Create a new section object for a given shortName (**sn**)."""
550
+ sn_r = fpx.resource(variable=sn, ** rh.resource.footprint_as_shallow_dict())
551
+ sn_p = fpx.provider(magic='magic:///')
552
+ sn_c = fpx.container(filename=rh.container.localpath() + sn,
553
+ format='grib', mode='ab+')
554
+ secs = t.context.sequence.input(rh=Handler(dict(resource=sn_r,
555
+ provider=sn_p,
556
+ container=sn_c)),
557
+ role='BackgroundStdError')
558
+ secs[0].get()
559
+ return secs[0]
560
+
561
+ # Iterate over the GRIB messages
562
+ gribs = rh.contents.data
563
+ sections = dict()
564
+ try:
565
+ grb = gribs.iter_messages(headers_only=False)
566
+ while grb is not None:
567
+ # Find the ShortName
568
+ fid = grb.genfid()
569
+ for k in sorted(fid.keys()):
570
+ sn = fid[k].get('shortName', None)
571
+ if sn is not None:
572
+ break
573
+ if sn is None:
574
+ raise OSError('No ShortName was found')
575
+ # Set up the appropriate section
576
+ if sn not in sections:
577
+ sections[sn] = create_section(sn)
578
+ # Write the field
579
+ grb.write_to_file(sections[sn].rh.container.iodesc())
580
+ # Next field (if any)
581
+ grb = gribs.iter_messages(headers_only=False)
582
+ finally:
583
+ for sec in sections.values():
584
+ sec.rh.container.close()
585
+
586
+ # Summary
587
+ if sections:
588
+ logger.info('%d new sections created. See details below:', len(sections))
589
+ for i, sec in enumerate(sorted(sections.values(),
590
+ key=lambda s: s.rh.resource.variable)):
591
+ sec.rh.quickview(nb=i)