vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,697 @@
1
+ """
2
+ Usage of EPyGrAM package.
3
+
4
+ When loaded, this module discards any FootprintBase resource collected as a container
5
+ in EPyGrAM package.
6
+ """
7
+
8
+ import copy
9
+
10
+ import footprints
11
+ from bronx.fancies import loggers
12
+ from bronx.stdtypes import date
13
+ from bronx.stdtypes.date import Date, Period, Time
14
+ from bronx.syntax.externalcode import ExternalCodeImportChecker
15
+ from footprints import proxy as fpx
16
+ from vortex import sessions
17
+ from vortex.data.contents import MetaDataReader
18
+ from vortex.data.handlers import Handler
19
+
20
+ logger = loggers.getLogger(__name__)
21
+
22
+ epygram_checker = ExternalCodeImportChecker("epygram")
23
+ with epygram_checker as ec_register:
24
+ import epygram # @UnusedImport
25
+
26
+ try:
27
+ ec_register.update(version=epygram.__version__)
28
+ except AttributeError:
29
+ raise ImportError("Improper eypgram module.")
30
+ try:
31
+ u_unused = epygram.formats.FA
32
+ hasFA = True
33
+ except AttributeError:
34
+ hasFA = False
35
+ ec_register.update(needFA=hasFA)
36
+ try:
37
+ u_unused = epygram.formats.GRIB
38
+ hasGRIB = True
39
+ except AttributeError:
40
+ hasGRIB = False
41
+ ec_register.update(needGRIB=hasGRIB)
42
+ logger.info(
43
+ "Epygram %s loaded (GRIB support=%s, FA support=%s).",
44
+ epygram.__version__,
45
+ hasGRIB,
46
+ hasFA,
47
+ )
48
+
49
+ np_checker = ExternalCodeImportChecker("numpy")
50
+ with np_checker as npregister:
51
+ import numpy as np
52
+
53
+ npregister.update(version=np.__version__)
54
+
55
+ footprints.proxy.containers.discard_package("epygram", verbose=False)
56
+
57
+ __all__ = []
58
+
59
+
60
+ def _sources_and_names_fixup(sources, names=None):
61
+ """Fix **sources** and **names** lists."""
62
+ # Prepare sources names
63
+ if not isinstance(sources, (list, tuple, set)):
64
+ sources = [
65
+ sources,
66
+ ]
67
+ sources = [source.upper() for source in sources]
68
+ # Prepare output names
69
+ if names is None:
70
+ names = sources
71
+ else:
72
+ if not isinstance(names, (list, tuple, set)):
73
+ names = [
74
+ names,
75
+ ]
76
+ names = [name.upper().replace(" ", ".") for name in names]
77
+ # Fill the sources list if necessary
78
+ if len(sources) == 1 and len(names) > 1:
79
+ sources *= len(names)
80
+ if len(sources) != len(names):
81
+ raise ValueError(
82
+ "Sizes of sources and names do not fit the requirements."
83
+ )
84
+ return sources, names
85
+
86
+
87
+ @epygram_checker.disabled_if_unavailable
88
+ def clone_fields(
89
+ datain,
90
+ dataout,
91
+ sources,
92
+ names=None,
93
+ value=None,
94
+ pack=None,
95
+ overwrite=False,
96
+ ):
97
+ """Clone any existing fields ending with``source`` to some new field."""
98
+ datain.open()
99
+ sources, names = _sources_and_names_fixup(sources, names)
100
+
101
+ tablein = datain.listfields()
102
+ tableout = dataout.listfields()
103
+ addedfields = 0
104
+
105
+ # Look for the input fields,
106
+ for source, name in zip(sources, names):
107
+ fx = None
108
+ comprpack = None
109
+ for fieldname in [x for x in sorted(tablein) if x.endswith(source)]:
110
+ newfield = fieldname.replace(source, "") + name
111
+ if not overwrite and newfield in tableout:
112
+ logger.warning("Field <%s> already in output file", newfield)
113
+ else:
114
+ # If the values are to be overwritten : do not read the input
115
+ # field several times...
116
+ if value is None or fx is None or comprpack is None:
117
+ fx = datain.readfield(fieldname)
118
+ comprpack = datain.fieldscompression.get(fieldname)
119
+ if pack is not None:
120
+ comprpack.update(pack)
121
+ fy = fx.clone({x: newfield for x in fx.fid.keys()})
122
+ if value is not None:
123
+ fy.data.fill(value)
124
+ # If fy is re-used, change the field names
125
+ if value is not None:
126
+ for fidk in fx.fid.keys():
127
+ fy.fid[fidk] = newfield
128
+ # On the first append, open the output file
129
+ if addedfields == 0:
130
+ dataout.close()
131
+ dataout.open(openmode="a")
132
+ # Actually add the new field
133
+ logger.info("Add field {} pack={}".format(fy.fid, comprpack))
134
+ dataout.writefield(fy, compression=comprpack)
135
+ addedfields += 1
136
+
137
+ if addedfields:
138
+ dataout.close()
139
+ datain.close()
140
+ return addedfields
141
+
142
+
143
+ def epy_env_prepare(t):
144
+ localenv = t.sh.env.clone()
145
+ localenv.verbose(True, t.sh)
146
+ if localenv.OMP_NUM_THREADS is None:
147
+ localenv.OMP_NUM_THREADS = 1
148
+ localenv.update(
149
+ LFI_HNDL_SPEC=":1",
150
+ DR_HOOK_SILENT=1,
151
+ DR_HOOK_NOT_MPI=1,
152
+ )
153
+ # Clean trash...
154
+ del localenv.GRIB_SAMPLES_PATH
155
+ del localenv.GRIB_DEFINITION_PATH
156
+ del localenv.ECCODES_SAMPLES_PATH
157
+ del localenv.ECCODES_DEFINITION_PATH
158
+ return localenv
159
+
160
+
161
+ @epygram_checker.disabled_if_unavailable
162
+ def addfield(t, rh, fieldsource, fieldtarget, constvalue, pack=None):
163
+ """Provider hook for adding a field through cloning."""
164
+ if rh.container.exists():
165
+ with epy_env_prepare(t):
166
+ clone_fields(
167
+ rh.contents.data,
168
+ rh.contents.data,
169
+ fieldsource,
170
+ names=fieldtarget,
171
+ value=constvalue,
172
+ pack=pack,
173
+ )
174
+ else:
175
+ logger.warning(
176
+ "Try to add field on a missing resource <%s>",
177
+ rh.container.localpath(),
178
+ )
179
+
180
+
181
+ @epygram_checker.disabled_if_unavailable
182
+ def copyfield(t, rh, rhsource, fieldsource, fieldtarget, pack=None):
183
+ """Provider hook for copying fields between FA files (but do not overwrite existing fields)."""
184
+ if rh.container.exists():
185
+ with epy_env_prepare(t):
186
+ clone_fields(
187
+ rhsource.contents.data,
188
+ rh.contents.data,
189
+ fieldsource,
190
+ fieldtarget,
191
+ pack=pack,
192
+ )
193
+ else:
194
+ logger.warning(
195
+ "Try to copy field on a missing resource <%s>",
196
+ rh.container.localpath(),
197
+ )
198
+
199
+
200
+ @epygram_checker.disabled_if_unavailable
201
+ def overwritefield(t, rh, rhsource, fieldsource, fieldtarget, pack=None):
202
+ """Provider hook for copying fields between FA files (overwrite existing fields)."""
203
+ if rh.container.exists():
204
+ with epy_env_prepare(t):
205
+ clone_fields(
206
+ rhsource.contents.data,
207
+ rh.contents.data,
208
+ fieldsource,
209
+ fieldtarget,
210
+ overwrite=True,
211
+ pack=pack,
212
+ )
213
+ else:
214
+ logger.warning(
215
+ "Try to copy field on a missing resource <%s>",
216
+ rh.container.localpath(),
217
+ )
218
+
219
+
220
+ @np_checker.disabled_if_unavailable
221
+ @epygram_checker.disabled_if_unavailable
222
+ def updatefield(t, rh, rhsource, fieldsource, fieldtarget, masktype, *kargs):
223
+ """
224
+ Provider hook for updating fields in the **rh** FA files.
225
+
226
+ The content (not the field itself) of **fieldsource** will be copied to
227
+ **fieldtarget**. Some kind of masking is performed. Depending on
228
+ **masktype**, only a subset of the field content might be updated.
229
+ **masktype** can take the following values:
230
+
231
+ * ``none``: no mask, the whole content is copied;
232
+ * ``np.ma.masked``: masked values are ignored during the copy.
233
+
234
+ """
235
+ if rh.container.exists():
236
+ with epy_env_prepare(t):
237
+ # Various initialisations
238
+ fieldsource, fieldtarget = _sources_and_names_fixup(
239
+ fieldsource, fieldtarget
240
+ )
241
+ datain = rhsource.contents.data
242
+ datain.open()
243
+ dataout = rh.contents.data
244
+ dataout.close()
245
+ dataout.open(openmode="a")
246
+ tablein = datain.listfields()
247
+ tableout = dataout.listfields()
248
+ updatedfields = 0
249
+
250
+ # Function that creates the subset of elements to update
251
+ if masktype == "none":
252
+
253
+ def subsetfunc(epyobj):
254
+ return Ellipsis
255
+
256
+ elif masktype == "np.ma.masked":
257
+
258
+ def subsetfunc(epyobj):
259
+ if np.ma.is_masked(epyobj.data):
260
+ return np.logical_not(epyobj.data.mask)
261
+ else:
262
+ return Ellipsis
263
+
264
+ else:
265
+ raise ValueError(
266
+ "Unsupported masktype in the updatefield hook."
267
+ )
268
+
269
+ # Look for the input fields and update them
270
+ for source, target in zip(fieldsource, fieldtarget):
271
+ for fieldname in [
272
+ x for x in sorted(tablein) if x.endswith(source)
273
+ ]:
274
+ targetfield = fieldname.replace(source, "") + target
275
+ if targetfield in tableout:
276
+ fx = datain.readfield(fieldname)
277
+ fy = dataout.readfield(targetfield)
278
+ subset = subsetfunc(fx)
279
+ fy.data[subset] = fx.data[subset]
280
+ dataout.writefield(fy)
281
+ updatedfields += 1
282
+ else:
283
+ logger.warning(
284
+ "Field <%s> is missing in the output file",
285
+ targetfield,
286
+ )
287
+
288
+ dataout.close()
289
+ datain.close()
290
+ return updatedfields
291
+ else:
292
+ logger.warning(
293
+ "Try to copy field on a missing resource <%s>",
294
+ rh.container.localpath(),
295
+ )
296
+
297
+
298
+ class EpygramMetadataReader(MetaDataReader):
299
+ _abstract = True
300
+ _footprint = dict(
301
+ info="Abstract MetaDataReader for formats handled by epygram",
302
+ )
303
+
304
+ def _do_delayed_init(self):
305
+ epyf = self._content_in
306
+ if not epyf.isopen:
307
+ epyf.open()
308
+ date_epy, term_epy = self._process_epy(epyf)
309
+ self._datahide = {
310
+ "date": Date(date_epy) if date_epy else date_epy,
311
+ "term": Time(
312
+ hour=int(term_epy.total_seconds() / 3600),
313
+ minute=int(term_epy.total_seconds() / 60) % 60,
314
+ ),
315
+ }
316
+
317
+ def _process_epy(self, epyf):
318
+ """Abstract method that does the actual processing using epygram."""
319
+ raise NotImplementedError("Abstract method")
320
+
321
+
322
+ @epygram_checker.disabled_if_unavailable
323
+ class FaMetadataReader(EpygramMetadataReader):
324
+ _footprint = dict(
325
+ info="MetaDataReader for the FA file format",
326
+ attr=dict(format=dict(values=("FA",))),
327
+ )
328
+
329
+ def _process_epy(self, epyf):
330
+ # Just call the epygram function !
331
+ with epy_env_prepare(sessions.current()):
332
+ return epyf.validity.getbasis(), epyf.validity.term()
333
+
334
+
335
+ @epygram_checker.disabled_if_unavailable(version="1.0.0")
336
+ class GribMetadataReader(EpygramMetadataReader):
337
+ _footprint = dict(
338
+ info="MetaDataReader for the GRIB file format",
339
+ attr=dict(format=dict(values=("GRIB",))),
340
+ )
341
+
342
+ def _process_epy(self, epyf):
343
+ # Loop over the fields and check the unicity of date/term
344
+ bundle = set()
345
+ with epy_env_prepare(sessions.current()):
346
+ epyfld = epyf.iter_fields(getdata=False)
347
+ while epyfld:
348
+ bundle.add(
349
+ (epyfld.validity.getbasis(), epyfld.validity.term())
350
+ )
351
+ epyfld = epyf.iter_fields(getdata=False)
352
+ if len(bundle) > 1:
353
+ logger.error(
354
+ "The GRIB file contains fileds with different date and terms."
355
+ )
356
+ if len(bundle) == 0:
357
+ logger.warning("The GRIB file doesn't contains any fields")
358
+ return None, 0
359
+ else:
360
+ return bundle.pop()
361
+
362
+
363
+ @epygram_checker.disabled_if_unavailable(version="1.2.11")
364
+ def mk_pgdfa923_from_pgdlfi(
365
+ t,
366
+ rh_pgdlfi,
367
+ nam923blocks,
368
+ outname=None,
369
+ fieldslist=None,
370
+ field_prefix="S1D_",
371
+ pack=None,
372
+ ):
373
+ """
374
+ Hook to convert fields from a PGD.lfi to well-formatted for clim923 FA format.
375
+
376
+ :param t: session ticket
377
+ :param rh_pgdlfi: resource handler of source PGD.lfi to process
378
+ :param nam923blocks: namelist blocks of geometry for clim923
379
+ :param outname: output filename
380
+ :param fieldslist: list of fields to convert
381
+ :param field_prefix: prefix to add to field name in FA
382
+ :param pack: packing for fields to write
383
+ """
384
+ dm = epygram.geometries.domain_making
385
+
386
+ def sfxlfi2fa_field(fld, geom):
387
+ fldout = fpx.fields.almost_clone(
388
+ fld, geometry=geom, fid={"FA": field_prefix + fld.fid["LFI"]}
389
+ )
390
+ fldout.setdata(fld.data[1:-1, 1:-1])
391
+ return fldout
392
+
393
+ if fieldslist is None:
394
+ fieldslist = ["ZS", "COVER001", "COVER002"]
395
+ if pack is None:
396
+ pack = {"KNGRIB": -1}
397
+ if outname is None:
398
+ outname = rh_pgdlfi.container.abspath + ".fa923"
399
+ if not t.sh.path.exists(outname):
400
+ with epy_env_prepare(t):
401
+ pgdin = fpx.dataformats.almost_clone(
402
+ rh_pgdlfi.contents.data, true3d=True
403
+ )
404
+ geom, spgeom = dm.build.build_geom_from_e923nam(
405
+ nam923blocks
406
+ ) # TODO: Arpege case
407
+ validity = epygram.base.FieldValidity(
408
+ date_time=Date(1994, 5, 31, 0), # Date of birth of ALADIN
409
+ term=Period(0),
410
+ )
411
+ pgdout = epygram.formats.resource(
412
+ filename=outname,
413
+ openmode="w",
414
+ fmt="FA",
415
+ processtype="initialization",
416
+ validity=validity,
417
+ geometry=geom,
418
+ spectral_geometry=spgeom,
419
+ )
420
+ for f in fieldslist:
421
+ fldout = sfxlfi2fa_field(pgdin.readfield(f), geom)
422
+ pgdout.writefield(fldout, compression=pack)
423
+ else:
424
+ logger.warning(
425
+ "Try to create an already existing resource <%s>", outname
426
+ )
427
+
428
+
429
+ @epygram_checker.disabled_if_unavailable(version="1.0.0")
430
+ def empty_fa(t, rh, empty_name):
431
+ """
432
+ Create an empty FA file with fieldname **empty_name**,
433
+ creating header from given existing FA resource handler **rh**.
434
+
435
+ :return: the empty epygram resource, closed
436
+ """
437
+ if rh.container.exists():
438
+ with epy_env_prepare(t):
439
+ rh.contents.data.open()
440
+ assert not t.sh.path.exists(empty_name), (
441
+ "Empty target filename already exist: {}".format(empty_name)
442
+ )
443
+ e = epygram.formats.resource(
444
+ empty_name,
445
+ "w",
446
+ fmt="FA",
447
+ headername=rh.contents.data.headername,
448
+ validity=rh.contents.data.validity,
449
+ processtype=rh.contents.data.processtype,
450
+ cdiden=rh.contents.cdiden,
451
+ )
452
+ e.close()
453
+ rh.contents.data.close()
454
+ return e
455
+ else:
456
+ raise OSError(
457
+ "Try to copy header from a missing resource <{!s}>".format(
458
+ rh.container.localpath()
459
+ )
460
+ )
461
+
462
+
463
+ @epygram_checker.disabled_if_unavailable(version="1.0.0")
464
+ def geopotentiel2zs(t, rh, rhsource, pack=None):
465
+ """Copy surface geopotential from clim to zs in PGD."""
466
+ from bronx.meteo.constants import g0
467
+
468
+ if rh.container.exists():
469
+ with epy_env_prepare(t):
470
+ orog = rhsource.contents.data.readfield("SURFGEOPOTENTIEL")
471
+ orog.operation("/", g0)
472
+ orog.fid["FA"] = "SFX.ZS"
473
+ rh.contents.data.close()
474
+ rh.contents.data.open(openmode="a")
475
+ rh.contents.data.writefield(orog, compression=pack)
476
+ else:
477
+ logger.warning(
478
+ "Try to copy field on a missing resource <%s>",
479
+ rh.container.localpath(),
480
+ )
481
+
482
+
483
+ @epygram_checker.disabled_if_unavailable(version="1.3.4")
484
+ def add_poles_to_GLOB_file(filename):
485
+ """
486
+ DEPRECATED: please use add_poles_to_reglonlat_file instead
487
+ Add poles to a GLOB* regular FA Lon/Lat file that do not contain them.
488
+ """
489
+ import numpy
490
+
491
+ rin = epygram.formats.resource(filename, "r")
492
+ filename_out = filename + "+poles"
493
+ rout = epygram.formats.resource(
494
+ filename_out,
495
+ "w",
496
+ fmt=rin.format,
497
+ validity=epygram.base.FieldValidity(
498
+ date_time=date.today(), term=date.Period(0, 0, 0)
499
+ ),
500
+ processtype=rin.processtype,
501
+ cdiden=rin.cdiden,
502
+ )
503
+ assert rin.geometry.gimme_corners_ll()["ul"][1] < 90.0, (
504
+ "This file already contains poles."
505
+ )
506
+ for f in rin.listfields():
507
+ if f == "SPECSURFGEOPOTEN":
508
+ continue
509
+ fld = rin.readfield(f)
510
+ write_args = {}
511
+ if isinstance(fld, epygram.fields.H2DField):
512
+ # create new geometry
513
+ newdims = copy.deepcopy(fld.geometry.dimensions)
514
+ newdims["Y"] += 2
515
+ newgrid = copy.deepcopy(fld.geometry.grid)
516
+ newgrid["input_position"] = (
517
+ newgrid["input_position"][0],
518
+ newgrid["input_position"][1] + 1,
519
+ )
520
+ newgeom = fpx.geometrys.almost_clone(
521
+ fld.geometry, dimensions=newdims, grid=newgrid
522
+ )
523
+ # compute poles data value as mean of last latitude circle
524
+ newdata = numpy.zeros((newdims["Y"], newdims["X"]))
525
+ newdata[1:-1, :] = fld.data[...]
526
+ newdata[0, :] = newdata[1, :].mean()
527
+ newdata[-1, :] = newdata[-2, :].mean()
528
+ # clone field with new geometry
529
+ fld = fpx.fields.almost_clone(fld, geometry=newgeom)
530
+ fld.data = newdata
531
+ # get initial compression
532
+ write_args = dict(compression=rin.fieldscompression[fld.fid["FA"]])
533
+ rout.writefield(fld, **write_args)
534
+
535
+
536
+ @epygram_checker.disabled_if_unavailable(version="1.3.4")
537
+ def add_poles_to_reglonlat_file(filename):
538
+ """
539
+ Add pole(s) to a regular FA Lon/Lat file that do not contain them.
540
+ """
541
+ import numpy
542
+
543
+ rin = epygram.formats.resource(filename, "r")
544
+ filename_out = filename + "+poles"
545
+ rout = epygram.formats.resource(
546
+ filename_out,
547
+ "w",
548
+ fmt=rin.format,
549
+ validity=epygram.base.FieldValidity(
550
+ date_time=rin.validity.get(), term=date.Period(0, 0, 0)
551
+ ),
552
+ processtype=rin.processtype,
553
+ cdiden=rin.cdiden,
554
+ )
555
+ assert rin.geometry.name == "regular_lonlat", (
556
+ "This file's geometry is not regular lon/lat, cannot add pole(s)."
557
+ )
558
+ # determine what is to be done
559
+ resolution = rin.geometry.grid["Y_resolution"].get("degrees")
560
+ latmin = rin.geometry.gimme_corners_ll()["ll"][1]
561
+ latmax = rin.geometry.gimme_corners_ll()["ul"][1]
562
+ # south
563
+ south = False
564
+ if abs(-90.0 - latmin) <= epygram.config.epsilon:
565
+ logger.info("This file already contains south pole")
566
+ elif abs((-90.0 + resolution) - latmin) <= epygram.config.epsilon:
567
+ south = True
568
+ else:
569
+ logger.info(
570
+ "This file south border is too far from south pole to add it."
571
+ )
572
+ # north
573
+ north = False
574
+ if abs(90.0 - latmax) <= epygram.config.epsilon:
575
+ logger.info("This file already contains north pole")
576
+ elif abs((90.0 - resolution) - latmax) <= epygram.config.epsilon:
577
+ north = True
578
+ else:
579
+ logger.info(
580
+ "This file north border is too far from north pole to add it."
581
+ )
582
+ if not north and not south:
583
+ raise epygram.epygramError("Nothing to do")
584
+ # prepare new geom
585
+ geom = rin.readfield("SURFGEOPOTENTIEL").geometry
586
+ newdims = copy.deepcopy(geom.dimensions)
587
+ newgrid = copy.deepcopy(geom.grid)
588
+ if north and south:
589
+ newdims["Y"] += 2
590
+ else:
591
+ newdims["Y"] += 1
592
+ if south:
593
+ newgrid["input_lon"] = epygram.util.Angle(
594
+ geom.gimme_corners_ll()["ll"][0], "degrees"
595
+ )
596
+ newgrid["input_lat"] = epygram.util.Angle(
597
+ geom.gimme_corners_ll()["ll"][1] - resolution, "degrees"
598
+ )
599
+ newgrid["input_position"] = (0, 0)
600
+ else: # north only: 0,0 has not changed
601
+ newgrid["input_lon"] = epygram.util.Angle(
602
+ geom.gimme_corners_ll()["ll"][0], "degrees"
603
+ )
604
+ newgrid["input_lat"] = epygram.util.Angle(
605
+ geom.gimme_corners_ll()["ll"][1], "degrees"
606
+ )
607
+ newgrid["input_position"] = (0, 0)
608
+ newgeom = fpx.geometrys.almost_clone(
609
+ geom, dimensions=newdims, grid=newgrid
610
+ )
611
+ # loop on fields
612
+ for f in rin.listfields():
613
+ if f == "SPECSURFGEOPOTEN":
614
+ continue # meaningless in lonlat clims
615
+ fld = rin.readfield(f)
616
+ write_args = {}
617
+ if isinstance(fld, epygram.fields.H2DField):
618
+ # compute poles data value as mean of last latitude circle
619
+ newdata = numpy.zeros((newdims["Y"], newdims["X"]))
620
+ if south and north:
621
+ newdata[1:-1, :] = fld.data[...]
622
+ newdata[0, :] = newdata[1, :].mean()
623
+ newdata[-1, :] = newdata[-2, :].mean()
624
+ elif south:
625
+ newdata[1:, :] = fld.data[...]
626
+ newdata[0, :] = newdata[1, :].mean()
627
+ elif north:
628
+ newdata[:-1, :] = fld.data[...]
629
+ newdata[-1, :] = newdata[-2, :].mean()
630
+ # clone field with new geometry
631
+ fld = fpx.fields.almost_clone(fld, geometry=newgeom)
632
+ fld.data = newdata
633
+ # get initial compression
634
+ write_args = dict(compression=rin.fieldscompression[fld.fid["FA"]])
635
+ rout.writefield(fld, **write_args)
636
+
637
+
638
+ @epygram_checker.disabled_if_unavailable()
639
+ def split_errgrib_on_shortname(t, rh):
640
+ """Split a Background Error GRIB file into pieces (based on the GRIB shortName)."""
641
+ # Sanity checks
642
+ if (
643
+ rh.resource.realkind != "bgstderr"
644
+ or getattr(rh.resource, "variable", None) is not None
645
+ ):
646
+ raise ValueError("Incompatible resource: {!s}".format(rh))
647
+
648
+ def create_section(sn):
649
+ """Create a new section object for a given shortName (**sn**)."""
650
+ sn_r = fpx.resource(
651
+ variable=sn, **rh.resource.footprint_as_shallow_dict()
652
+ )
653
+ sn_p = fpx.provider(magic="magic:///")
654
+ sn_c = fpx.container(
655
+ filename=rh.container.localpath() + sn, format="grib", mode="ab+"
656
+ )
657
+ secs = t.context.sequence.input(
658
+ rh=Handler(dict(resource=sn_r, provider=sn_p, container=sn_c)),
659
+ role="BackgroundStdError",
660
+ )
661
+ secs[0].get()
662
+ return secs[0]
663
+
664
+ # Iterate over the GRIB messages
665
+ gribs = rh.contents.data
666
+ sections = dict()
667
+ try:
668
+ grb = gribs.iter_messages(headers_only=False)
669
+ while grb is not None:
670
+ # Find the ShortName
671
+ fid = grb.genfid()
672
+ for k in sorted(fid.keys()):
673
+ sn = fid[k].get("shortName", None)
674
+ if sn is not None:
675
+ break
676
+ if sn is None:
677
+ raise OSError("No ShortName was found")
678
+ # Set up the appropriate section
679
+ if sn not in sections:
680
+ sections[sn] = create_section(sn)
681
+ # Write the field
682
+ grb.write_to_file(sections[sn].rh.container.iodesc())
683
+ # Next field (if any)
684
+ grb = gribs.iter_messages(headers_only=False)
685
+ finally:
686
+ for sec in sections.values():
687
+ sec.rh.container.close()
688
+
689
+ # Summary
690
+ if sections:
691
+ logger.info(
692
+ "%d new sections created. See details below:", len(sections)
693
+ )
694
+ for i, sec in enumerate(
695
+ sorted(sections.values(), key=lambda s: s.rh.resource.variable)
696
+ ):
697
+ sec.rh.quickview(nb=i)