vortex-nwp 2.0.0b1__py3-none-any.whl → 2.0.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. vortex/__init__.py +59 -45
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +940 -614
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/serversynctools.py +34 -33
  6. vortex/config.py +19 -22
  7. vortex/data/__init__.py +9 -3
  8. vortex/data/abstractstores.py +593 -655
  9. vortex/data/containers.py +217 -162
  10. vortex/data/contents.py +65 -39
  11. vortex/data/executables.py +93 -102
  12. vortex/data/flow.py +40 -34
  13. vortex/data/geometries.py +228 -132
  14. vortex/data/handlers.py +428 -225
  15. vortex/data/outflow.py +15 -15
  16. vortex/data/providers.py +185 -163
  17. vortex/data/resources.py +48 -42
  18. vortex/data/stores.py +544 -413
  19. vortex/gloves.py +114 -87
  20. vortex/layout/__init__.py +1 -8
  21. vortex/layout/contexts.py +150 -84
  22. vortex/layout/dataflow.py +353 -202
  23. vortex/layout/monitor.py +264 -128
  24. vortex/nwp/__init__.py +5 -2
  25. vortex/nwp/algo/__init__.py +14 -5
  26. vortex/nwp/algo/assim.py +205 -151
  27. vortex/nwp/algo/clim.py +683 -517
  28. vortex/nwp/algo/coupling.py +447 -225
  29. vortex/nwp/algo/eda.py +437 -229
  30. vortex/nwp/algo/eps.py +403 -231
  31. vortex/nwp/algo/forecasts.py +420 -271
  32. vortex/nwp/algo/fpserver.py +683 -307
  33. vortex/nwp/algo/ifsnaming.py +205 -145
  34. vortex/nwp/algo/ifsroot.py +210 -122
  35. vortex/nwp/algo/monitoring.py +132 -76
  36. vortex/nwp/algo/mpitools.py +321 -191
  37. vortex/nwp/algo/odbtools.py +617 -353
  38. vortex/nwp/algo/oopsroot.py +449 -273
  39. vortex/nwp/algo/oopstests.py +90 -56
  40. vortex/nwp/algo/request.py +287 -206
  41. vortex/nwp/algo/stdpost.py +878 -522
  42. vortex/nwp/data/__init__.py +22 -4
  43. vortex/nwp/data/assim.py +125 -137
  44. vortex/nwp/data/boundaries.py +121 -68
  45. vortex/nwp/data/climfiles.py +193 -211
  46. vortex/nwp/data/configfiles.py +73 -69
  47. vortex/nwp/data/consts.py +426 -401
  48. vortex/nwp/data/ctpini.py +59 -43
  49. vortex/nwp/data/diagnostics.py +94 -66
  50. vortex/nwp/data/eda.py +50 -51
  51. vortex/nwp/data/eps.py +195 -146
  52. vortex/nwp/data/executables.py +440 -434
  53. vortex/nwp/data/fields.py +63 -48
  54. vortex/nwp/data/gridfiles.py +183 -111
  55. vortex/nwp/data/logs.py +250 -217
  56. vortex/nwp/data/modelstates.py +180 -151
  57. vortex/nwp/data/monitoring.py +72 -99
  58. vortex/nwp/data/namelists.py +254 -202
  59. vortex/nwp/data/obs.py +400 -308
  60. vortex/nwp/data/oopsexec.py +22 -20
  61. vortex/nwp/data/providers.py +90 -65
  62. vortex/nwp/data/query.py +71 -82
  63. vortex/nwp/data/stores.py +49 -36
  64. vortex/nwp/data/surfex.py +136 -137
  65. vortex/nwp/syntax/__init__.py +1 -1
  66. vortex/nwp/syntax/stdattrs.py +173 -111
  67. vortex/nwp/tools/__init__.py +2 -2
  68. vortex/nwp/tools/addons.py +22 -17
  69. vortex/nwp/tools/agt.py +24 -12
  70. vortex/nwp/tools/bdap.py +16 -5
  71. vortex/nwp/tools/bdcp.py +4 -1
  72. vortex/nwp/tools/bdm.py +3 -0
  73. vortex/nwp/tools/bdmp.py +14 -9
  74. vortex/nwp/tools/conftools.py +728 -378
  75. vortex/nwp/tools/drhook.py +12 -8
  76. vortex/nwp/tools/grib.py +65 -39
  77. vortex/nwp/tools/gribdiff.py +22 -17
  78. vortex/nwp/tools/ifstools.py +82 -42
  79. vortex/nwp/tools/igastuff.py +167 -143
  80. vortex/nwp/tools/mars.py +14 -2
  81. vortex/nwp/tools/odb.py +234 -125
  82. vortex/nwp/tools/partitioning.py +61 -37
  83. vortex/nwp/tools/satrad.py +27 -12
  84. vortex/nwp/util/async.py +83 -55
  85. vortex/nwp/util/beacon.py +10 -10
  86. vortex/nwp/util/diffpygram.py +174 -86
  87. vortex/nwp/util/ens.py +144 -63
  88. vortex/nwp/util/hooks.py +30 -19
  89. vortex/nwp/util/taskdeco.py +28 -24
  90. vortex/nwp/util/usepygram.py +278 -172
  91. vortex/nwp/util/usetnt.py +31 -17
  92. vortex/sessions.py +72 -39
  93. vortex/syntax/__init__.py +1 -1
  94. vortex/syntax/stdattrs.py +410 -171
  95. vortex/syntax/stddeco.py +31 -22
  96. vortex/toolbox.py +327 -192
  97. vortex/tools/__init__.py +11 -2
  98. vortex/tools/actions.py +125 -59
  99. vortex/tools/addons.py +111 -92
  100. vortex/tools/arm.py +42 -22
  101. vortex/tools/compression.py +72 -69
  102. vortex/tools/date.py +11 -4
  103. vortex/tools/delayedactions.py +242 -132
  104. vortex/tools/env.py +75 -47
  105. vortex/tools/folder.py +342 -171
  106. vortex/tools/grib.py +311 -149
  107. vortex/tools/lfi.py +423 -216
  108. vortex/tools/listings.py +109 -40
  109. vortex/tools/names.py +218 -156
  110. vortex/tools/net.py +632 -298
  111. vortex/tools/parallelism.py +93 -61
  112. vortex/tools/prestaging.py +55 -31
  113. vortex/tools/schedulers.py +172 -105
  114. vortex/tools/services.py +402 -333
  115. vortex/tools/storage.py +293 -358
  116. vortex/tools/surfex.py +24 -24
  117. vortex/tools/systems.py +1211 -631
  118. vortex/tools/targets.py +156 -100
  119. vortex/util/__init__.py +1 -1
  120. vortex/util/config.py +377 -327
  121. vortex/util/empty.py +2 -2
  122. vortex/util/helpers.py +56 -24
  123. vortex/util/introspection.py +18 -12
  124. vortex/util/iosponge.py +8 -4
  125. vortex/util/roles.py +4 -6
  126. vortex/util/storefunctions.py +39 -13
  127. vortex/util/structs.py +3 -3
  128. vortex/util/worker.py +29 -17
  129. vortex_nwp-2.0.0b2.dist-info/METADATA +66 -0
  130. vortex_nwp-2.0.0b2.dist-info/RECORD +142 -0
  131. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/WHEEL +1 -1
  132. vortex/layout/appconf.py +0 -109
  133. vortex/layout/jobs.py +0 -1276
  134. vortex/layout/nodes.py +0 -1424
  135. vortex/layout/subjobs.py +0 -464
  136. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  137. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  138. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/LICENSE +0 -0
  139. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/top_level.txt +0 -0
@@ -19,14 +19,14 @@ from vortex.data.handlers import Handler
19
19
 
20
20
  logger = loggers.getLogger(__name__)
21
21
 
22
- epygram_checker = ExternalCodeImportChecker('epygram')
22
+ epygram_checker = ExternalCodeImportChecker("epygram")
23
23
  with epygram_checker as ec_register:
24
24
  import epygram # @UnusedImport
25
25
 
26
26
  try:
27
27
  ec_register.update(version=epygram.__version__)
28
28
  except AttributeError:
29
- raise ImportError('Improper eypgram module.')
29
+ raise ImportError("Improper eypgram module.")
30
30
  try:
31
31
  u_unused = epygram.formats.FA
32
32
  hasFA = True
@@ -39,15 +39,20 @@ with epygram_checker as ec_register:
39
39
  except AttributeError:
40
40
  hasGRIB = False
41
41
  ec_register.update(needGRIB=hasGRIB)
42
- logger.info('Epygram %s loaded (GRIB support=%s, FA support=%s).',
43
- epygram.__version__, hasGRIB, hasFA)
42
+ logger.info(
43
+ "Epygram %s loaded (GRIB support=%s, FA support=%s).",
44
+ epygram.__version__,
45
+ hasGRIB,
46
+ hasFA,
47
+ )
44
48
 
45
- np_checker = ExternalCodeImportChecker('numpy')
49
+ np_checker = ExternalCodeImportChecker("numpy")
46
50
  with np_checker as npregister:
47
51
  import numpy as np
52
+
48
53
  npregister.update(version=np.__version__)
49
54
 
50
- footprints.proxy.containers.discard_package('epygram', verbose=False)
55
+ footprints.proxy.containers.discard_package("epygram", verbose=False)
51
56
 
52
57
  __all__ = []
53
58
 
@@ -56,25 +61,39 @@ def _sources_and_names_fixup(sources, names=None):
56
61
  """Fix **sources** and **names** lists."""
57
62
  # Prepare sources names
58
63
  if not isinstance(sources, (list, tuple, set)):
59
- sources = [sources, ]
64
+ sources = [
65
+ sources,
66
+ ]
60
67
  sources = [source.upper() for source in sources]
61
68
  # Prepare output names
62
69
  if names is None:
63
70
  names = sources
64
71
  else:
65
72
  if not isinstance(names, (list, tuple, set)):
66
- names = [names, ]
67
- names = [name.upper().replace(' ', '.') for name in names]
73
+ names = [
74
+ names,
75
+ ]
76
+ names = [name.upper().replace(" ", ".") for name in names]
68
77
  # Fill the sources list if necessary
69
78
  if len(sources) == 1 and len(names) > 1:
70
79
  sources *= len(names)
71
80
  if len(sources) != len(names):
72
- raise ValueError('Sizes of sources and names do not fit the requirements.')
81
+ raise ValueError(
82
+ "Sizes of sources and names do not fit the requirements."
83
+ )
73
84
  return sources, names
74
85
 
75
86
 
76
87
  @epygram_checker.disabled_if_unavailable
77
- def clone_fields(datain, dataout, sources, names=None, value=None, pack=None, overwrite=False):
88
+ def clone_fields(
89
+ datain,
90
+ dataout,
91
+ sources,
92
+ names=None,
93
+ value=None,
94
+ pack=None,
95
+ overwrite=False,
96
+ ):
78
97
  """Clone any existing fields ending with``source`` to some new field."""
79
98
  datain.open()
80
99
  sources, names = _sources_and_names_fixup(sources, names)
@@ -88,9 +107,9 @@ def clone_fields(datain, dataout, sources, names=None, value=None, pack=None, ov
88
107
  fx = None
89
108
  comprpack = None
90
109
  for fieldname in [x for x in sorted(tablein) if x.endswith(source)]:
91
- newfield = fieldname.replace(source, '') + name
110
+ newfield = fieldname.replace(source, "") + name
92
111
  if not overwrite and newfield in tableout:
93
- logger.warning('Field <%s> already in output file', newfield)
112
+ logger.warning("Field <%s> already in output file", newfield)
94
113
  else:
95
114
  # If the values are to be overwritten : do not read the input
96
115
  # field several times...
@@ -109,9 +128,9 @@ def clone_fields(datain, dataout, sources, names=None, value=None, pack=None, ov
109
128
  # On the first append, open the output file
110
129
  if addedfields == 0:
111
130
  dataout.close()
112
- dataout.open(openmode='a')
131
+ dataout.open(openmode="a")
113
132
  # Actually add the new field
114
- logger.info('Add field {} pack={}'.format(fy.fid, comprpack))
133
+ logger.info("Add field {} pack={}".format(fy.fid, comprpack))
115
134
  dataout.writefield(fy, compression=comprpack)
116
135
  addedfields += 1
117
136
 
@@ -127,7 +146,7 @@ def epy_env_prepare(t):
127
146
  if localenv.OMP_NUM_THREADS is None:
128
147
  localenv.OMP_NUM_THREADS = 1
129
148
  localenv.update(
130
- LFI_HNDL_SPEC=':1',
149
+ LFI_HNDL_SPEC=":1",
131
150
  DR_HOOK_SILENT=1,
132
151
  DR_HOOK_NOT_MPI=1,
133
152
  )
@@ -144,12 +163,19 @@ def addfield(t, rh, fieldsource, fieldtarget, constvalue, pack=None):
144
163
  """Provider hook for adding a field through cloning."""
145
164
  if rh.container.exists():
146
165
  with epy_env_prepare(t):
147
- clone_fields(rh.contents.data, rh.contents.data,
148
- fieldsource, names=fieldtarget, value=constvalue,
149
- pack=pack)
166
+ clone_fields(
167
+ rh.contents.data,
168
+ rh.contents.data,
169
+ fieldsource,
170
+ names=fieldtarget,
171
+ value=constvalue,
172
+ pack=pack,
173
+ )
150
174
  else:
151
- logger.warning('Try to add field on a missing resource <%s>',
152
- rh.container.localpath())
175
+ logger.warning(
176
+ "Try to add field on a missing resource <%s>",
177
+ rh.container.localpath(),
178
+ )
153
179
 
154
180
 
155
181
  @epygram_checker.disabled_if_unavailable
@@ -157,11 +183,18 @@ def copyfield(t, rh, rhsource, fieldsource, fieldtarget, pack=None):
157
183
  """Provider hook for copying fields between FA files (but do not overwrite existing fields)."""
158
184
  if rh.container.exists():
159
185
  with epy_env_prepare(t):
160
- clone_fields(rhsource.contents.data, rh.contents.data,
161
- fieldsource, fieldtarget, pack=pack)
186
+ clone_fields(
187
+ rhsource.contents.data,
188
+ rh.contents.data,
189
+ fieldsource,
190
+ fieldtarget,
191
+ pack=pack,
192
+ )
162
193
  else:
163
- logger.warning('Try to copy field on a missing resource <%s>',
164
- rh.container.localpath())
194
+ logger.warning(
195
+ "Try to copy field on a missing resource <%s>",
196
+ rh.container.localpath(),
197
+ )
165
198
 
166
199
 
167
200
  @epygram_checker.disabled_if_unavailable
@@ -169,11 +202,19 @@ def overwritefield(t, rh, rhsource, fieldsource, fieldtarget, pack=None):
169
202
  """Provider hook for copying fields between FA files (overwrite existing fields)."""
170
203
  if rh.container.exists():
171
204
  with epy_env_prepare(t):
172
- clone_fields(rhsource.contents.data, rh.contents.data,
173
- fieldsource, fieldtarget, overwrite=True, pack=pack)
205
+ clone_fields(
206
+ rhsource.contents.data,
207
+ rh.contents.data,
208
+ fieldsource,
209
+ fieldtarget,
210
+ overwrite=True,
211
+ pack=pack,
212
+ )
174
213
  else:
175
- logger.warning('Try to copy field on a missing resource <%s>',
176
- rh.container.localpath())
214
+ logger.warning(
215
+ "Try to copy field on a missing resource <%s>",
216
+ rh.container.localpath(),
217
+ )
177
218
 
178
219
 
179
220
  @np_checker.disabled_if_unavailable
@@ -194,23 +235,25 @@ def updatefield(t, rh, rhsource, fieldsource, fieldtarget, masktype, *kargs):
194
235
  if rh.container.exists():
195
236
  with epy_env_prepare(t):
196
237
  # Various initialisations
197
- fieldsource, fieldtarget = _sources_and_names_fixup(fieldsource, fieldtarget)
238
+ fieldsource, fieldtarget = _sources_and_names_fixup(
239
+ fieldsource, fieldtarget
240
+ )
198
241
  datain = rhsource.contents.data
199
242
  datain.open()
200
243
  dataout = rh.contents.data
201
244
  dataout.close()
202
- dataout.open(openmode='a')
245
+ dataout.open(openmode="a")
203
246
  tablein = datain.listfields()
204
247
  tableout = dataout.listfields()
205
248
  updatedfields = 0
206
249
 
207
250
  # Function that creates the subset of elements to update
208
- if masktype == 'none':
251
+ if masktype == "none":
209
252
 
210
253
  def subsetfunc(epyobj):
211
254
  return Ellipsis
212
255
 
213
- elif masktype == 'np.ma.masked':
256
+ elif masktype == "np.ma.masked":
214
257
 
215
258
  def subsetfunc(epyobj):
216
259
  if np.ma.is_masked(epyobj.data):
@@ -219,12 +262,16 @@ def updatefield(t, rh, rhsource, fieldsource, fieldtarget, masktype, *kargs):
219
262
  return Ellipsis
220
263
 
221
264
  else:
222
- raise ValueError('Unsupported masktype in the updatefield hook.')
265
+ raise ValueError(
266
+ "Unsupported masktype in the updatefield hook."
267
+ )
223
268
 
224
269
  # Look for the input fields and update them
225
270
  for source, target in zip(fieldsource, fieldtarget):
226
- for fieldname in [x for x in sorted(tablein) if x.endswith(source)]:
227
- targetfield = fieldname.replace(source, '') + target
271
+ for fieldname in [
272
+ x for x in sorted(tablein) if x.endswith(source)
273
+ ]:
274
+ targetfield = fieldname.replace(source, "") + target
228
275
  if targetfield in tableout:
229
276
  fx = datain.readfield(fieldname)
230
277
  fy = dataout.readfield(targetfield)
@@ -233,21 +280,25 @@ def updatefield(t, rh, rhsource, fieldsource, fieldtarget, masktype, *kargs):
233
280
  dataout.writefield(fy)
234
281
  updatedfields += 1
235
282
  else:
236
- logger.warning('Field <%s> is missing in the output file', targetfield)
283
+ logger.warning(
284
+ "Field <%s> is missing in the output file",
285
+ targetfield,
286
+ )
237
287
 
238
288
  dataout.close()
239
289
  datain.close()
240
290
  return updatedfields
241
291
  else:
242
- logger.warning('Try to copy field on a missing resource <%s>',
243
- rh.container.localpath())
292
+ logger.warning(
293
+ "Try to copy field on a missing resource <%s>",
294
+ rh.container.localpath(),
295
+ )
244
296
 
245
297
 
246
298
  class EpygramMetadataReader(MetaDataReader):
247
-
248
299
  _abstract = True
249
300
  _footprint = dict(
250
- info = 'Abstract MetaDataReader for formats handled by epygram',
301
+ info="Abstract MetaDataReader for formats handled by epygram",
251
302
  )
252
303
 
253
304
  def _do_delayed_init(self):
@@ -256,9 +307,11 @@ class EpygramMetadataReader(MetaDataReader):
256
307
  epyf.open()
257
308
  date_epy, term_epy = self._process_epy(epyf)
258
309
  self._datahide = {
259
- 'date': Date(date_epy) if date_epy else date_epy,
260
- 'term': Time(hour=int(term_epy.total_seconds() / 3600),
261
- minute=int(term_epy.total_seconds() / 60) % 60)
310
+ "date": Date(date_epy) if date_epy else date_epy,
311
+ "term": Time(
312
+ hour=int(term_epy.total_seconds() / 3600),
313
+ minute=int(term_epy.total_seconds() / 60) % 60,
314
+ ),
262
315
  }
263
316
 
264
317
  def _process_epy(self, epyf):
@@ -268,14 +321,9 @@ class EpygramMetadataReader(MetaDataReader):
268
321
 
269
322
  @epygram_checker.disabled_if_unavailable
270
323
  class FaMetadataReader(EpygramMetadataReader):
271
-
272
324
  _footprint = dict(
273
- info = 'MetaDataReader for the FA file format',
274
- attr = dict(
275
- format = dict(
276
- values = ('FA',)
277
- )
278
- )
325
+ info="MetaDataReader for the FA file format",
326
+ attr=dict(format=dict(values=("FA",))),
279
327
  )
280
328
 
281
329
  def _process_epy(self, epyf):
@@ -284,15 +332,11 @@ class FaMetadataReader(EpygramMetadataReader):
284
332
  return epyf.validity.getbasis(), epyf.validity.term()
285
333
 
286
334
 
287
- @epygram_checker.disabled_if_unavailable(version='1.0.0')
335
+ @epygram_checker.disabled_if_unavailable(version="1.0.0")
288
336
  class GribMetadataReader(EpygramMetadataReader):
289
337
  _footprint = dict(
290
- info = 'MetaDataReader for the GRIB file format',
291
- attr = dict(
292
- format = dict(
293
- values = ('GRIB',)
294
- )
295
- )
338
+ info="MetaDataReader for the GRIB file format",
339
+ attr=dict(format=dict(values=("GRIB",))),
296
340
  )
297
341
 
298
342
  def _process_epy(self, epyf):
@@ -301,10 +345,14 @@ class GribMetadataReader(EpygramMetadataReader):
301
345
  with epy_env_prepare(sessions.current()):
302
346
  epyfld = epyf.iter_fields(getdata=False)
303
347
  while epyfld:
304
- bundle.add((epyfld.validity.getbasis(), epyfld.validity.term()))
348
+ bundle.add(
349
+ (epyfld.validity.getbasis(), epyfld.validity.term())
350
+ )
305
351
  epyfld = epyf.iter_fields(getdata=False)
306
352
  if len(bundle) > 1:
307
- logger.error("The GRIB file contains fileds with different date and terms.")
353
+ logger.error(
354
+ "The GRIB file contains fileds with different date and terms."
355
+ )
308
356
  if len(bundle) == 0:
309
357
  logger.warning("The GRIB file doesn't contains any fields")
310
358
  return None, 0
@@ -312,12 +360,16 @@ class GribMetadataReader(EpygramMetadataReader):
312
360
  return bundle.pop()
313
361
 
314
362
 
315
- @epygram_checker.disabled_if_unavailable(version='1.2.11')
316
- def mk_pgdfa923_from_pgdlfi(t, rh_pgdlfi, nam923blocks,
317
- outname=None,
318
- fieldslist=None,
319
- field_prefix='S1D_',
320
- pack=None):
363
+ @epygram_checker.disabled_if_unavailable(version="1.2.11")
364
+ def mk_pgdfa923_from_pgdlfi(
365
+ t,
366
+ rh_pgdlfi,
367
+ nam923blocks,
368
+ outname=None,
369
+ fieldslist=None,
370
+ field_prefix="S1D_",
371
+ pack=None,
372
+ ):
321
373
  """
322
374
  Hook to convert fields from a PGD.lfi to well-formatted for clim923 FA format.
323
375
 
@@ -332,41 +384,49 @@ def mk_pgdfa923_from_pgdlfi(t, rh_pgdlfi, nam923blocks,
332
384
  dm = epygram.geometries.domain_making
333
385
 
334
386
  def sfxlfi2fa_field(fld, geom):
335
- fldout = fpx.fields.almost_clone(fld,
336
- geometry=geom,
337
- fid={'FA': field_prefix + fld.fid['LFI']})
387
+ fldout = fpx.fields.almost_clone(
388
+ fld, geometry=geom, fid={"FA": field_prefix + fld.fid["LFI"]}
389
+ )
338
390
  fldout.setdata(fld.data[1:-1, 1:-1])
339
391
  return fldout
340
392
 
341
393
  if fieldslist is None:
342
- fieldslist = ['ZS', 'COVER001', 'COVER002']
394
+ fieldslist = ["ZS", "COVER001", "COVER002"]
343
395
  if pack is None:
344
- pack = {'KNGRIB': -1}
396
+ pack = {"KNGRIB": -1}
345
397
  if outname is None:
346
- outname = rh_pgdlfi.container.abspath + '.fa923'
398
+ outname = rh_pgdlfi.container.abspath + ".fa923"
347
399
  if not t.sh.path.exists(outname):
348
400
  with epy_env_prepare(t):
349
- pgdin = fpx.dataformats.almost_clone(rh_pgdlfi.contents.data,
350
- true3d=True)
351
- geom, spgeom = dm.build.build_geom_from_e923nam(nam923blocks) # TODO: Arpege case
352
- validity = epygram.base.FieldValidity(date_time=Date(1994, 5, 31, 0), # Date of birth of ALADIN
353
- term=Period(0))
354
- pgdout = epygram.formats.resource(filename=outname,
355
- openmode='w',
356
- fmt='FA',
357
- processtype='initialization',
358
- validity=validity,
359
- geometry=geom,
360
- spectral_geometry=spgeom)
401
+ pgdin = fpx.dataformats.almost_clone(
402
+ rh_pgdlfi.contents.data, true3d=True
403
+ )
404
+ geom, spgeom = dm.build.build_geom_from_e923nam(
405
+ nam923blocks
406
+ ) # TODO: Arpege case
407
+ validity = epygram.base.FieldValidity(
408
+ date_time=Date(1994, 5, 31, 0), # Date of birth of ALADIN
409
+ term=Period(0),
410
+ )
411
+ pgdout = epygram.formats.resource(
412
+ filename=outname,
413
+ openmode="w",
414
+ fmt="FA",
415
+ processtype="initialization",
416
+ validity=validity,
417
+ geometry=geom,
418
+ spectral_geometry=spgeom,
419
+ )
361
420
  for f in fieldslist:
362
421
  fldout = sfxlfi2fa_field(pgdin.readfield(f), geom)
363
422
  pgdout.writefield(fldout, compression=pack)
364
423
  else:
365
- logger.warning('Try to create an already existing resource <%s>',
366
- outname)
424
+ logger.warning(
425
+ "Try to create an already existing resource <%s>", outname
426
+ )
367
427
 
368
428
 
369
- @epygram_checker.disabled_if_unavailable(version='1.0.0')
429
+ @epygram_checker.disabled_if_unavailable(version="1.0.0")
370
430
  def empty_fa(t, rh, empty_name):
371
431
  """
372
432
  Create an empty FA file with fieldname **empty_name**,
@@ -377,71 +437,91 @@ def empty_fa(t, rh, empty_name):
377
437
  if rh.container.exists():
378
438
  with epy_env_prepare(t):
379
439
  rh.contents.data.open()
380
- assert not t.sh.path.exists(empty_name), \
381
- 'Empty target filename already exist: {}'.format(empty_name)
382
- e = epygram.formats.resource(empty_name, 'w', fmt='FA',
383
- headername=rh.contents.data.headername,
384
- validity=rh.contents.data.validity,
385
- processtype=rh.contents.data.processtype,
386
- cdiden=rh.contents.cdiden)
440
+ assert not t.sh.path.exists(empty_name), (
441
+ "Empty target filename already exist: {}".format(empty_name)
442
+ )
443
+ e = epygram.formats.resource(
444
+ empty_name,
445
+ "w",
446
+ fmt="FA",
447
+ headername=rh.contents.data.headername,
448
+ validity=rh.contents.data.validity,
449
+ processtype=rh.contents.data.processtype,
450
+ cdiden=rh.contents.cdiden,
451
+ )
387
452
  e.close()
388
453
  rh.contents.data.close()
389
454
  return e
390
455
  else:
391
- raise OSError('Try to copy header from a missing resource <{!s}>'.format(rh.container.localpath()))
456
+ raise OSError(
457
+ "Try to copy header from a missing resource <{!s}>".format(
458
+ rh.container.localpath()
459
+ )
460
+ )
392
461
 
393
462
 
394
- @epygram_checker.disabled_if_unavailable(version='1.0.0')
463
+ @epygram_checker.disabled_if_unavailable(version="1.0.0")
395
464
  def geopotentiel2zs(t, rh, rhsource, pack=None):
396
465
  """Copy surface geopotential from clim to zs in PGD."""
397
466
  from bronx.meteo.constants import g0
467
+
398
468
  if rh.container.exists():
399
469
  with epy_env_prepare(t):
400
- orog = rhsource.contents.data.readfield('SURFGEOPOTENTIEL')
401
- orog.operation('/', g0)
402
- orog.fid['FA'] = 'SFX.ZS'
470
+ orog = rhsource.contents.data.readfield("SURFGEOPOTENTIEL")
471
+ orog.operation("/", g0)
472
+ orog.fid["FA"] = "SFX.ZS"
403
473
  rh.contents.data.close()
404
- rh.contents.data.open(openmode='a')
474
+ rh.contents.data.open(openmode="a")
405
475
  rh.contents.data.writefield(orog, compression=pack)
406
476
  else:
407
- logger.warning('Try to copy field on a missing resource <%s>',
408
- rh.container.localpath())
477
+ logger.warning(
478
+ "Try to copy field on a missing resource <%s>",
479
+ rh.container.localpath(),
480
+ )
409
481
 
410
482
 
411
- @epygram_checker.disabled_if_unavailable(version='1.3.4')
483
+ @epygram_checker.disabled_if_unavailable(version="1.3.4")
412
484
  def add_poles_to_GLOB_file(filename):
413
485
  """
414
486
  DEPRECATED: please use add_poles_to_reglonlat_file instead
415
487
  Add poles to a GLOB* regular FA Lon/Lat file that do not contain them.
416
488
  """
417
489
  import numpy
418
- rin = epygram.formats.resource(filename, 'r')
419
- filename_out = filename + '+poles'
420
- rout = epygram.formats.resource(filename_out, 'w', fmt=rin.format,
421
- validity=epygram.base.FieldValidity(
422
- date_time=date.today(),
423
- term=date.Period(0, 0, 0)),
424
- processtype=rin.processtype,
425
- cdiden=rin.cdiden)
426
- assert rin.geometry.gimme_corners_ll()['ul'][1] < 90., \
427
- 'This file already contains poles.'
490
+
491
+ rin = epygram.formats.resource(filename, "r")
492
+ filename_out = filename + "+poles"
493
+ rout = epygram.formats.resource(
494
+ filename_out,
495
+ "w",
496
+ fmt=rin.format,
497
+ validity=epygram.base.FieldValidity(
498
+ date_time=date.today(), term=date.Period(0, 0, 0)
499
+ ),
500
+ processtype=rin.processtype,
501
+ cdiden=rin.cdiden,
502
+ )
503
+ assert rin.geometry.gimme_corners_ll()["ul"][1] < 90.0, (
504
+ "This file already contains poles."
505
+ )
428
506
  for f in rin.listfields():
429
- if f == 'SPECSURFGEOPOTEN':
507
+ if f == "SPECSURFGEOPOTEN":
430
508
  continue
431
509
  fld = rin.readfield(f)
432
510
  write_args = {}
433
511
  if isinstance(fld, epygram.fields.H2DField):
434
512
  # create new geometry
435
513
  newdims = copy.deepcopy(fld.geometry.dimensions)
436
- newdims['Y'] += 2
514
+ newdims["Y"] += 2
437
515
  newgrid = copy.deepcopy(fld.geometry.grid)
438
- newgrid['input_position'] = (newgrid['input_position'][0],
439
- newgrid['input_position'][1] + 1)
440
- newgeom = fpx.geometrys.almost_clone(fld.geometry,
441
- dimensions=newdims,
442
- grid=newgrid)
516
+ newgrid["input_position"] = (
517
+ newgrid["input_position"][0],
518
+ newgrid["input_position"][1] + 1,
519
+ )
520
+ newgeom = fpx.geometrys.almost_clone(
521
+ fld.geometry, dimensions=newdims, grid=newgrid
522
+ )
443
523
  # compute poles data value as mean of last latitude circle
444
- newdata = numpy.zeros((newdims['Y'], newdims['X']))
524
+ newdata = numpy.zeros((newdims["Y"], newdims["X"]))
445
525
  newdata[1:-1, :] = fld.data[...]
446
526
  newdata[0, :] = newdata[1, :].mean()
447
527
  newdata[-1, :] = newdata[-2, :].mean()
@@ -449,77 +529,94 @@ def add_poles_to_GLOB_file(filename):
449
529
  fld = fpx.fields.almost_clone(fld, geometry=newgeom)
450
530
  fld.data = newdata
451
531
  # get initial compression
452
- write_args = dict(compression=rin.fieldscompression[fld.fid['FA']])
532
+ write_args = dict(compression=rin.fieldscompression[fld.fid["FA"]])
453
533
  rout.writefield(fld, **write_args)
454
534
 
455
535
 
456
- @epygram_checker.disabled_if_unavailable(version='1.3.4')
536
+ @epygram_checker.disabled_if_unavailable(version="1.3.4")
457
537
  def add_poles_to_reglonlat_file(filename):
458
538
  """
459
539
  Add pole(s) to a regular FA Lon/Lat file that do not contain them.
460
540
  """
461
541
  import numpy
462
- rin = epygram.formats.resource(filename, 'r')
463
- filename_out = filename + '+poles'
464
- rout = epygram.formats.resource(filename_out, 'w', fmt=rin.format,
465
- validity=epygram.base.FieldValidity(
466
- date_time=rin.validity.get(),
467
- term=date.Period(0, 0, 0)
468
- ),
469
- processtype=rin.processtype,
470
- cdiden=rin.cdiden)
471
- assert rin.geometry.name == 'regular_lonlat', \
542
+
543
+ rin = epygram.formats.resource(filename, "r")
544
+ filename_out = filename + "+poles"
545
+ rout = epygram.formats.resource(
546
+ filename_out,
547
+ "w",
548
+ fmt=rin.format,
549
+ validity=epygram.base.FieldValidity(
550
+ date_time=rin.validity.get(), term=date.Period(0, 0, 0)
551
+ ),
552
+ processtype=rin.processtype,
553
+ cdiden=rin.cdiden,
554
+ )
555
+ assert rin.geometry.name == "regular_lonlat", (
472
556
  "This file's geometry is not regular lon/lat, cannot add pole(s)."
557
+ )
473
558
  # determine what is to be done
474
- resolution = rin.geometry.grid['Y_resolution'].get('degrees')
475
- latmin = rin.geometry.gimme_corners_ll()['ll'][1]
476
- latmax = rin.geometry.gimme_corners_ll()['ul'][1]
559
+ resolution = rin.geometry.grid["Y_resolution"].get("degrees")
560
+ latmin = rin.geometry.gimme_corners_ll()["ll"][1]
561
+ latmax = rin.geometry.gimme_corners_ll()["ul"][1]
477
562
  # south
478
563
  south = False
479
- if abs(-90. - latmin) <= epygram.config.epsilon:
564
+ if abs(-90.0 - latmin) <= epygram.config.epsilon:
480
565
  logger.info("This file already contains south pole")
481
- elif abs((-90. + resolution) - latmin) <= epygram.config.epsilon:
566
+ elif abs((-90.0 + resolution) - latmin) <= epygram.config.epsilon:
482
567
  south = True
483
568
  else:
484
- logger.info("This file south border is too far from south pole to add it.")
569
+ logger.info(
570
+ "This file south border is too far from south pole to add it."
571
+ )
485
572
  # north
486
573
  north = False
487
- if abs(90. - latmax) <= epygram.config.epsilon:
574
+ if abs(90.0 - latmax) <= epygram.config.epsilon:
488
575
  logger.info("This file already contains north pole")
489
- elif abs((90. - resolution) - latmax) <= epygram.config.epsilon:
576
+ elif abs((90.0 - resolution) - latmax) <= epygram.config.epsilon:
490
577
  north = True
491
578
  else:
492
- logger.info("This file north border is too far from north pole to add it.")
579
+ logger.info(
580
+ "This file north border is too far from north pole to add it."
581
+ )
493
582
  if not north and not south:
494
583
  raise epygram.epygramError("Nothing to do")
495
584
  # prepare new geom
496
- geom = rin.readfield('SURFGEOPOTENTIEL').geometry
585
+ geom = rin.readfield("SURFGEOPOTENTIEL").geometry
497
586
  newdims = copy.deepcopy(geom.dimensions)
498
587
  newgrid = copy.deepcopy(geom.grid)
499
588
  if north and south:
500
- newdims['Y'] += 2
589
+ newdims["Y"] += 2
501
590
  else:
502
- newdims['Y'] += 1
591
+ newdims["Y"] += 1
503
592
  if south:
504
- newgrid['input_lon'] = epygram.util.Angle(geom.gimme_corners_ll()['ll'][0], 'degrees')
505
- newgrid['input_lat'] = epygram.util.Angle(geom.gimme_corners_ll()['ll'][1] - resolution, 'degrees')
506
- newgrid['input_position'] = (0, 0)
593
+ newgrid["input_lon"] = epygram.util.Angle(
594
+ geom.gimme_corners_ll()["ll"][0], "degrees"
595
+ )
596
+ newgrid["input_lat"] = epygram.util.Angle(
597
+ geom.gimme_corners_ll()["ll"][1] - resolution, "degrees"
598
+ )
599
+ newgrid["input_position"] = (0, 0)
507
600
  else: # north only: 0,0 has not changed
508
- newgrid['input_lon'] = epygram.util.Angle(geom.gimme_corners_ll()['ll'][0], 'degrees')
509
- newgrid['input_lat'] = epygram.util.Angle(geom.gimme_corners_ll()['ll'][1], 'degrees')
510
- newgrid['input_position'] = (0, 0)
511
- newgeom = fpx.geometrys.almost_clone(geom,
512
- dimensions=newdims,
513
- grid=newgrid)
601
+ newgrid["input_lon"] = epygram.util.Angle(
602
+ geom.gimme_corners_ll()["ll"][0], "degrees"
603
+ )
604
+ newgrid["input_lat"] = epygram.util.Angle(
605
+ geom.gimme_corners_ll()["ll"][1], "degrees"
606
+ )
607
+ newgrid["input_position"] = (0, 0)
608
+ newgeom = fpx.geometrys.almost_clone(
609
+ geom, dimensions=newdims, grid=newgrid
610
+ )
514
611
  # loop on fields
515
612
  for f in rin.listfields():
516
- if f == 'SPECSURFGEOPOTEN':
613
+ if f == "SPECSURFGEOPOTEN":
517
614
  continue # meaningless in lonlat clims
518
615
  fld = rin.readfield(f)
519
616
  write_args = {}
520
617
  if isinstance(fld, epygram.fields.H2DField):
521
618
  # compute poles data value as mean of last latitude circle
522
- newdata = numpy.zeros((newdims['Y'], newdims['X']))
619
+ newdata = numpy.zeros((newdims["Y"], newdims["X"]))
523
620
  if south and north:
524
621
  newdata[1:-1, :] = fld.data[...]
525
622
  newdata[0, :] = newdata[1, :].mean()
@@ -534,7 +631,7 @@ def add_poles_to_reglonlat_file(filename):
534
631
  fld = fpx.fields.almost_clone(fld, geometry=newgeom)
535
632
  fld.data = newdata
536
633
  # get initial compression
537
- write_args = dict(compression=rin.fieldscompression[fld.fid['FA']])
634
+ write_args = dict(compression=rin.fieldscompression[fld.fid["FA"]])
538
635
  rout.writefield(fld, **write_args)
539
636
 
540
637
 
@@ -542,19 +639,25 @@ def add_poles_to_reglonlat_file(filename):
542
639
  def split_errgrib_on_shortname(t, rh):
543
640
  """Split a Background Error GRIB file into pieces (based on the GRIB shortName)."""
544
641
  # Sanity checks
545
- if rh.resource.realkind != 'bgstderr' or getattr(rh.resource, 'variable', None) is not None:
546
- raise ValueError('Incompatible resource: {!s}'.format(rh))
642
+ if (
643
+ rh.resource.realkind != "bgstderr"
644
+ or getattr(rh.resource, "variable", None) is not None
645
+ ):
646
+ raise ValueError("Incompatible resource: {!s}".format(rh))
547
647
 
548
648
  def create_section(sn):
549
649
  """Create a new section object for a given shortName (**sn**)."""
550
- sn_r = fpx.resource(variable=sn, ** rh.resource.footprint_as_shallow_dict())
551
- sn_p = fpx.provider(magic='magic:///')
552
- sn_c = fpx.container(filename=rh.container.localpath() + sn,
553
- format='grib', mode='ab+')
554
- secs = t.context.sequence.input(rh=Handler(dict(resource=sn_r,
555
- provider=sn_p,
556
- container=sn_c)),
557
- role='BackgroundStdError')
650
+ sn_r = fpx.resource(
651
+ variable=sn, **rh.resource.footprint_as_shallow_dict()
652
+ )
653
+ sn_p = fpx.provider(magic="magic:///")
654
+ sn_c = fpx.container(
655
+ filename=rh.container.localpath() + sn, format="grib", mode="ab+"
656
+ )
657
+ secs = t.context.sequence.input(
658
+ rh=Handler(dict(resource=sn_r, provider=sn_p, container=sn_c)),
659
+ role="BackgroundStdError",
660
+ )
558
661
  secs[0].get()
559
662
  return secs[0]
560
663
 
@@ -567,11 +670,11 @@ def split_errgrib_on_shortname(t, rh):
567
670
  # Find the ShortName
568
671
  fid = grb.genfid()
569
672
  for k in sorted(fid.keys()):
570
- sn = fid[k].get('shortName', None)
673
+ sn = fid[k].get("shortName", None)
571
674
  if sn is not None:
572
675
  break
573
676
  if sn is None:
574
- raise OSError('No ShortName was found')
677
+ raise OSError("No ShortName was found")
575
678
  # Set up the appropriate section
576
679
  if sn not in sections:
577
680
  sections[sn] = create_section(sn)
@@ -585,7 +688,10 @@ def split_errgrib_on_shortname(t, rh):
585
688
 
586
689
  # Summary
587
690
  if sections:
588
- logger.info('%d new sections created. See details below:', len(sections))
589
- for i, sec in enumerate(sorted(sections.values(),
590
- key=lambda s: s.rh.resource.variable)):
691
+ logger.info(
692
+ "%d new sections created. See details below:", len(sections)
693
+ )
694
+ for i, sec in enumerate(
695
+ sorted(sections.values(), key=lambda s: s.rh.resource.variable)
696
+ ):
591
697
  sec.rh.quickview(nb=i)