vortex-nwp 2.0.0b1__py3-none-any.whl → 2.0.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. vortex/__init__.py +59 -45
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +940 -614
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/serversynctools.py +34 -33
  6. vortex/config.py +19 -22
  7. vortex/data/__init__.py +9 -3
  8. vortex/data/abstractstores.py +593 -655
  9. vortex/data/containers.py +217 -162
  10. vortex/data/contents.py +65 -39
  11. vortex/data/executables.py +93 -102
  12. vortex/data/flow.py +40 -34
  13. vortex/data/geometries.py +228 -132
  14. vortex/data/handlers.py +428 -225
  15. vortex/data/outflow.py +15 -15
  16. vortex/data/providers.py +185 -163
  17. vortex/data/resources.py +48 -42
  18. vortex/data/stores.py +544 -413
  19. vortex/gloves.py +114 -87
  20. vortex/layout/__init__.py +1 -8
  21. vortex/layout/contexts.py +150 -84
  22. vortex/layout/dataflow.py +353 -202
  23. vortex/layout/monitor.py +264 -128
  24. vortex/nwp/__init__.py +5 -2
  25. vortex/nwp/algo/__init__.py +14 -5
  26. vortex/nwp/algo/assim.py +205 -151
  27. vortex/nwp/algo/clim.py +683 -517
  28. vortex/nwp/algo/coupling.py +447 -225
  29. vortex/nwp/algo/eda.py +437 -229
  30. vortex/nwp/algo/eps.py +403 -231
  31. vortex/nwp/algo/forecasts.py +420 -271
  32. vortex/nwp/algo/fpserver.py +683 -307
  33. vortex/nwp/algo/ifsnaming.py +205 -145
  34. vortex/nwp/algo/ifsroot.py +210 -122
  35. vortex/nwp/algo/monitoring.py +132 -76
  36. vortex/nwp/algo/mpitools.py +321 -191
  37. vortex/nwp/algo/odbtools.py +617 -353
  38. vortex/nwp/algo/oopsroot.py +449 -273
  39. vortex/nwp/algo/oopstests.py +90 -56
  40. vortex/nwp/algo/request.py +287 -206
  41. vortex/nwp/algo/stdpost.py +878 -522
  42. vortex/nwp/data/__init__.py +22 -4
  43. vortex/nwp/data/assim.py +125 -137
  44. vortex/nwp/data/boundaries.py +121 -68
  45. vortex/nwp/data/climfiles.py +193 -211
  46. vortex/nwp/data/configfiles.py +73 -69
  47. vortex/nwp/data/consts.py +426 -401
  48. vortex/nwp/data/ctpini.py +59 -43
  49. vortex/nwp/data/diagnostics.py +94 -66
  50. vortex/nwp/data/eda.py +50 -51
  51. vortex/nwp/data/eps.py +195 -146
  52. vortex/nwp/data/executables.py +440 -434
  53. vortex/nwp/data/fields.py +63 -48
  54. vortex/nwp/data/gridfiles.py +183 -111
  55. vortex/nwp/data/logs.py +250 -217
  56. vortex/nwp/data/modelstates.py +180 -151
  57. vortex/nwp/data/monitoring.py +72 -99
  58. vortex/nwp/data/namelists.py +254 -202
  59. vortex/nwp/data/obs.py +400 -308
  60. vortex/nwp/data/oopsexec.py +22 -20
  61. vortex/nwp/data/providers.py +90 -65
  62. vortex/nwp/data/query.py +71 -82
  63. vortex/nwp/data/stores.py +49 -36
  64. vortex/nwp/data/surfex.py +136 -137
  65. vortex/nwp/syntax/__init__.py +1 -1
  66. vortex/nwp/syntax/stdattrs.py +173 -111
  67. vortex/nwp/tools/__init__.py +2 -2
  68. vortex/nwp/tools/addons.py +22 -17
  69. vortex/nwp/tools/agt.py +24 -12
  70. vortex/nwp/tools/bdap.py +16 -5
  71. vortex/nwp/tools/bdcp.py +4 -1
  72. vortex/nwp/tools/bdm.py +3 -0
  73. vortex/nwp/tools/bdmp.py +14 -9
  74. vortex/nwp/tools/conftools.py +728 -378
  75. vortex/nwp/tools/drhook.py +12 -8
  76. vortex/nwp/tools/grib.py +65 -39
  77. vortex/nwp/tools/gribdiff.py +22 -17
  78. vortex/nwp/tools/ifstools.py +82 -42
  79. vortex/nwp/tools/igastuff.py +167 -143
  80. vortex/nwp/tools/mars.py +14 -2
  81. vortex/nwp/tools/odb.py +234 -125
  82. vortex/nwp/tools/partitioning.py +61 -37
  83. vortex/nwp/tools/satrad.py +27 -12
  84. vortex/nwp/util/async.py +83 -55
  85. vortex/nwp/util/beacon.py +10 -10
  86. vortex/nwp/util/diffpygram.py +174 -86
  87. vortex/nwp/util/ens.py +144 -63
  88. vortex/nwp/util/hooks.py +30 -19
  89. vortex/nwp/util/taskdeco.py +28 -24
  90. vortex/nwp/util/usepygram.py +278 -172
  91. vortex/nwp/util/usetnt.py +31 -17
  92. vortex/sessions.py +72 -39
  93. vortex/syntax/__init__.py +1 -1
  94. vortex/syntax/stdattrs.py +410 -171
  95. vortex/syntax/stddeco.py +31 -22
  96. vortex/toolbox.py +327 -192
  97. vortex/tools/__init__.py +11 -2
  98. vortex/tools/actions.py +125 -59
  99. vortex/tools/addons.py +111 -92
  100. vortex/tools/arm.py +42 -22
  101. vortex/tools/compression.py +72 -69
  102. vortex/tools/date.py +11 -4
  103. vortex/tools/delayedactions.py +242 -132
  104. vortex/tools/env.py +75 -47
  105. vortex/tools/folder.py +342 -171
  106. vortex/tools/grib.py +311 -149
  107. vortex/tools/lfi.py +423 -216
  108. vortex/tools/listings.py +109 -40
  109. vortex/tools/names.py +218 -156
  110. vortex/tools/net.py +632 -298
  111. vortex/tools/parallelism.py +93 -61
  112. vortex/tools/prestaging.py +55 -31
  113. vortex/tools/schedulers.py +172 -105
  114. vortex/tools/services.py +402 -333
  115. vortex/tools/storage.py +293 -358
  116. vortex/tools/surfex.py +24 -24
  117. vortex/tools/systems.py +1211 -631
  118. vortex/tools/targets.py +156 -100
  119. vortex/util/__init__.py +1 -1
  120. vortex/util/config.py +377 -327
  121. vortex/util/empty.py +2 -2
  122. vortex/util/helpers.py +56 -24
  123. vortex/util/introspection.py +18 -12
  124. vortex/util/iosponge.py +8 -4
  125. vortex/util/roles.py +4 -6
  126. vortex/util/storefunctions.py +39 -13
  127. vortex/util/structs.py +3 -3
  128. vortex/util/worker.py +29 -17
  129. vortex_nwp-2.0.0b2.dist-info/METADATA +66 -0
  130. vortex_nwp-2.0.0b2.dist-info/RECORD +142 -0
  131. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/WHEEL +1 -1
  132. vortex/layout/appconf.py +0 -109
  133. vortex/layout/jobs.py +0 -1276
  134. vortex/layout/nodes.py +0 -1424
  135. vortex/layout/subjobs.py +0 -464
  136. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  137. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  138. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/LICENSE +0 -0
  139. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/top_level.txt +0 -0
vortex/nwp/__init__.py CHANGED
@@ -3,9 +3,12 @@ The NWP VORTEX extension package.
3
3
  """
4
4
 
5
5
  # Recursive inclusion of packages with potential FootprintBase classes
6
- from . import algo, data, tools, syntax
6
+ from . import algo as algo
7
+ from . import data as data
8
+ from . import tools as tools
9
+ from . import syntax as syntax
7
10
 
8
11
  #: No automatic export
9
12
  __all__ = []
10
13
 
11
- __tocinfoline__ = 'The NWP VORTEX extension'
14
+ __tocinfoline__ = "The NWP VORTEX extension"
@@ -3,10 +3,19 @@ AlgoComponents for NWP
3
3
  """
4
4
 
5
5
  # Recursive inclusion of packages with potential FootprintBase classes
6
- from . import forecasts, fpserver, coupling, mpitools, odbtools, stdpost, assim, \
7
- eps, eda, request, monitoring, clim
8
- from . import oopsroot, oopstests
6
+ from . import forecasts as forecasts
7
+ from . import fpserver as fpserver
8
+ from . import coupling as coupling
9
+ from . import mpitools as mpitools
10
+ from . import odbtools as odbtools
11
+ from . import stdpost as stdpost
12
+ from . import assim as assim
13
+ from . import eps as eps
14
+ from . import eda as eda
15
+ from . import request as request
16
+ from . import monitoring as monitoring
17
+ from . import clim as clim
18
+ from . import oopsroot as oopsroot
19
+ from . import oopstests as oopstests
9
20
 
10
-
11
- #: No automatic export
12
21
  __all__ = []
vortex/nwp/algo/assim.py CHANGED
@@ -24,13 +24,13 @@ class MergeVarBC(Parallel):
24
24
  """
25
25
 
26
26
  _footprint = dict(
27
- attr = dict(
28
- kind = dict(
29
- values = ['mergevarbc'],
27
+ attr=dict(
28
+ kind=dict(
29
+ values=["mergevarbc"],
30
30
  ),
31
- varbcout = dict(
32
- optional = True,
33
- default = 'VARBC.cycle_out',
31
+ varbcout=dict(
32
+ optional=True,
33
+ default="VARBC.cycle_out",
34
34
  ),
35
35
  )
36
36
  )
@@ -50,21 +50,21 @@ class Anamix(IFSParallel):
50
50
  """Merge the surface and atmospheric analyses into a single file"""
51
51
 
52
52
  _footprint = dict(
53
- info='Merge surface and atmospheric analyses',
53
+ info="Merge surface and atmospheric analyses",
54
54
  attr=dict(
55
55
  kind=dict(
56
- values=['anamix'],
56
+ values=["anamix"],
57
57
  ),
58
58
  conf=dict(
59
59
  default=701,
60
60
  ),
61
61
  xpname=dict(
62
- default='CANS',
62
+ default="CANS",
63
63
  ),
64
64
  timestep=dict(
65
65
  default=1,
66
- )
67
- )
66
+ ),
67
+ ),
68
68
  )
69
69
 
70
70
 
@@ -72,19 +72,19 @@ class SstAnalysis(IFSParallel):
72
72
  """SST (Sea Surface Temperature) Analysis"""
73
73
 
74
74
  _footprint = dict(
75
- attr = dict(
76
- kind = dict(
77
- values = ['sstana', 'sst_ana', 'sst_analysis', 'c931'],
78
- remap = dict(autoremap = 'first'),
75
+ attr=dict(
76
+ kind=dict(
77
+ values=["sstana", "sst_ana", "sst_analysis", "c931"],
78
+ remap=dict(autoremap="first"),
79
79
  ),
80
- conf = dict(
81
- default = 931,
80
+ conf=dict(
81
+ default=931,
82
82
  ),
83
- xpname = dict(
84
- default = 'ANAL',
83
+ xpname=dict(
84
+ default="ANAL",
85
85
  ),
86
- timestep = dict(
87
- default = '1.',
86
+ timestep=dict(
87
+ default="1.",
88
88
  ),
89
89
  )
90
90
  )
@@ -94,36 +94,36 @@ class SeaIceAnalysis(IFSParallel):
94
94
  """Sea Ice Analysis"""
95
95
 
96
96
  _footprint = dict(
97
- attr = dict(
98
- kind = dict(
99
- values = ['seaiceana', 'seaice_ana', 'seaice_analysis', 'c932'],
100
- remap = dict(autoremap = 'first'),
97
+ attr=dict(
98
+ kind=dict(
99
+ values=["seaiceana", "seaice_ana", "seaice_analysis", "c932"],
100
+ remap=dict(autoremap="first"),
101
+ ),
102
+ conf=dict(
103
+ default=932,
101
104
  ),
102
- conf = dict(
103
- default = 932,
105
+ xpname=dict(
106
+ default="ANAL",
104
107
  ),
105
- xpname = dict(
106
- default = 'ANAL',
108
+ timestep=dict(
109
+ default="1.",
107
110
  ),
108
- timestep = dict(
109
- default = '1.',
111
+ date=dict(
112
+ type=Date,
110
113
  ),
111
- date = dict(
112
- type = Date,
113
- )
114
114
  )
115
115
  )
116
116
 
117
117
  def find_namelists(self, opts=None):
118
118
  namrh_list = super().find_namelists(opts)
119
119
  if not namrh_list:
120
- logger.critical('No namelist was found.')
121
- raise ValueError('No namelist was found for seaice analysis')
120
+ logger.critical("No namelist was found.")
121
+ raise ValueError("No namelist was found for seaice analysis")
122
122
  return namrh_list
123
123
 
124
124
  def prepare_namelist_delta(self, rh, namcontents, namlocal):
125
125
  super().prepare_namelist_delta(rh, namcontents, namlocal)
126
- self._set_nam_macro(namcontents, namlocal, 'IDAT', int(self.date.ymd))
126
+ self._set_nam_macro(namcontents, namlocal, "IDAT", int(self.date.ymd))
127
127
  return True
128
128
 
129
129
 
@@ -131,21 +131,21 @@ class Canari(IFSParallel, odb.OdbComponentDecoMixin):
131
131
  """Surface analysis."""
132
132
 
133
133
  _footprint = dict(
134
- info = 'Surface assimilation based on optimal interpolation',
135
- attr = dict(
136
- kind = dict(
137
- values = ['canari'],
134
+ info="Surface assimilation based on optimal interpolation",
135
+ attr=dict(
136
+ kind=dict(
137
+ values=["canari"],
138
138
  ),
139
- binarysingle = dict(
140
- default = 'basicnwpobsort',
139
+ binarysingle=dict(
140
+ default="basicnwpobsort",
141
141
  ),
142
- conf = dict(
143
- default = 701,
142
+ conf=dict(
143
+ default=701,
144
144
  ),
145
- xpname = dict(
146
- default = 'CANS',
145
+ xpname=dict(
146
+ default="CANS",
147
147
  ),
148
- )
148
+ ),
149
149
  )
150
150
 
151
151
  def prepare(self, rh, opts):
@@ -153,18 +153,30 @@ class Canari(IFSParallel, odb.OdbComponentDecoMixin):
153
153
  super().prepare(rh, opts)
154
154
 
155
155
  # Looking for input observations
156
- obsodb = [x for x in self.lookupodb() if x.rh.resource.part.startswith('surf')]
156
+ obsodb = [
157
+ x
158
+ for x in self.lookupodb()
159
+ if x.rh.resource.part.startswith("surf")
160
+ ]
157
161
  if not obsodb:
158
- raise ValueError('No surface obsdata for canari')
162
+ raise ValueError("No surface obsdata for canari")
159
163
  self.odb_date_and_layout_from_sections(obsodb)
160
164
 
161
165
  # Find the unique input ODb database
162
166
  ssurf = obsodb.pop()
163
167
  if obsodb:
164
- logger.error('More than one surface obsdata provided')
165
- logger.error('Using : %s / %s', ssurf.rh.resource.layout, ssurf.rh.resource.part)
168
+ logger.error("More than one surface obsdata provided")
169
+ logger.error(
170
+ "Using : %s / %s",
171
+ ssurf.rh.resource.layout,
172
+ ssurf.rh.resource.part,
173
+ )
166
174
  for sobs in obsodb:
167
- logger.error('Skip : %s / %s', sobs.rh.resource.layout, sobs.rh.resource.part)
175
+ logger.error(
176
+ "Skip : %s / %s",
177
+ sobs.rh.resource.layout,
178
+ sobs.rh.resource.part,
179
+ )
168
180
 
169
181
  # Fix paths + generate a global IOASSING file
170
182
  cma_path = self.system.path.abspath(ssurf.rh.container.localpath())
@@ -188,25 +200,25 @@ class Screening(IFSParallel, odb.OdbComponentDecoMixin):
188
200
  """Observation screening."""
189
201
 
190
202
  _footprint = dict(
191
- info = 'Observations screening.',
192
- attr = dict(
193
- kind = dict(
194
- values = ['screening', 'screen', 'thinning'],
195
- remap = dict(autoremap = 'first'),
203
+ info="Observations screening.",
204
+ attr=dict(
205
+ kind=dict(
206
+ values=["screening", "screen", "thinning"],
207
+ remap=dict(autoremap="first"),
196
208
  ),
197
- binarysingle = dict(
198
- default = 'basicnwpobsort',
209
+ binarysingle=dict(
210
+ default="basicnwpobsort",
199
211
  ),
200
- ioassign = dict(
201
- optional = False,
212
+ ioassign=dict(
213
+ optional=False,
202
214
  ),
203
- conf = dict(
204
- default = 2,
215
+ conf=dict(
216
+ default=2,
205
217
  ),
206
- xpname = dict(
207
- default = 'SCRE',
218
+ xpname=dict(
219
+ default="SCRE",
208
220
  ),
209
- )
221
+ ),
210
222
  )
211
223
 
212
224
  def prepare(self, rh, opts):
@@ -220,11 +232,11 @@ class Screening(IFSParallel, odb.OdbComponentDecoMixin):
220
232
  # Perform the pre-merging stuff (this will create the ECMA virtual DB)
221
233
  virtualdb_path = self.odb_merge_if_needed(allodb)
222
234
  # Prepare the CCMA DB
223
- ccma_path = self.odb_create_db(layout='CCMA')
235
+ ccma_path = self.odb_create_db(layout="CCMA")
224
236
 
225
237
  # Fix paths + generate a global IOASSING file
226
238
  self.odb.fix_db_path(self.virtualdb, virtualdb_path)
227
- self.odb.fix_db_path('CCMA', ccma_path)
239
+ self.odb.fix_db_path("CCMA", ccma_path)
228
240
  self.odb.ioassign_gather(virtualdb_path, ccma_path)
229
241
 
230
242
  # Some extra settings
@@ -235,7 +247,7 @@ class Screening(IFSParallel, odb.OdbComponentDecoMixin):
235
247
  self.odb_handle_raw_dbs()
236
248
 
237
249
  # Fix the input databases intent
238
- self.odb_rw_or_overwrite_method(* allodb)
250
+ self.odb_rw_or_overwrite_method(*allodb)
239
251
 
240
252
  # Look for channels namelists and set appropriate links
241
253
  self.setchannels()
@@ -246,12 +258,12 @@ class IFSODBCCMA(IFSParallel, odb.OdbComponentDecoMixin):
246
258
 
247
259
  _abstract = True
248
260
  _footprint = dict(
249
- attr = dict(
250
- virtualdb = dict(
251
- default = 'ccma',
261
+ attr=dict(
262
+ virtualdb=dict(
263
+ default="ccma",
252
264
  ),
253
- binarysingle = dict(
254
- default = 'basicnwpobsort',
265
+ binarysingle=dict(
266
+ default="basicnwpobsort",
255
267
  ),
256
268
  )
257
269
  )
@@ -264,17 +276,23 @@ class IFSODBCCMA(IFSParallel, odb.OdbComponentDecoMixin):
264
276
 
265
277
  # Looking for input observations
266
278
  allodb = self.lookupodb()
267
- allccma = [x for x in allodb if x.rh.resource.layout.lower() == 'ccma']
279
+ allccma = [x for x in allodb if x.rh.resource.layout.lower() == "ccma"]
268
280
  if allccma:
269
281
  if len(allccma) > 1:
270
- logger.error('Multiple CCMA databases detected: only the first one is taken into account')
282
+ logger.error(
283
+ "Multiple CCMA databases detected: only the first one is taken into account"
284
+ )
271
285
  else:
272
- raise ValueError('Missing CCMA input data for ' + self.kind)
286
+ raise ValueError("Missing CCMA input data for " + self.kind)
273
287
 
274
288
  # Set env and IOASSIGN
275
289
  ccma = allccma.pop()
276
290
  ccma_path = sh.path.abspath(ccma.rh.container.localpath())
277
- self.odb_date_and_layout_from_sections([ccma, ])
291
+ self.odb_date_and_layout_from_sections(
292
+ [
293
+ ccma,
294
+ ]
295
+ )
278
296
  self.odb.fix_db_path(ccma.rh.resource.layout, ccma_path)
279
297
  self.odb.ioassign_gather(ccma_path)
280
298
 
@@ -289,19 +307,19 @@ class Minim(IFSODBCCMA):
289
307
  """Observation minimisation."""
290
308
 
291
309
  _footprint = dict(
292
- info='Minimisation in the assimilation process.',
310
+ info="Minimisation in the assimilation process.",
293
311
  attr=dict(
294
312
  kind=dict(
295
- values=['minim', 'min', 'minimisation'],
296
- remap=dict(autoremap='first'),
313
+ values=["minim", "min", "minimisation"],
314
+ remap=dict(autoremap="first"),
297
315
  ),
298
316
  conf=dict(
299
317
  default=131,
300
318
  ),
301
319
  xpname=dict(
302
- default='MINI',
320
+ default="MINI",
303
321
  ),
304
- )
322
+ ),
305
323
  )
306
324
 
307
325
  def prepare(self, rh, opts):
@@ -309,28 +327,44 @@ class Minim(IFSODBCCMA):
309
327
  super().prepare(rh, opts)
310
328
 
311
329
  # Check if a preconditioning EV map is here
312
- evmaprh = self.context.sequence.effective_inputs(role=('PreconEVMap',
313
- 'PreconditionningEVMap'),
314
- kind='precevmap')
330
+ evmaprh = self.context.sequence.effective_inputs(
331
+ role=("PreconEVMap", "PreconditionningEVMap"), kind="precevmap"
332
+ )
315
333
  if evmaprh:
316
334
  if len(evmaprh) > 1:
317
- logger.warning("Several preconditioning EV maps provided. Using the first one.")
318
- nprec_ev = evmaprh[0].rh.contents.data['evlen']
335
+ logger.warning(
336
+ "Several preconditioning EV maps provided. Using the first one."
337
+ )
338
+ nprec_ev = evmaprh[0].rh.contents.data["evlen"]
319
339
  # If there are preconditioning EV: update the namelist
320
340
  if nprec_ev > 0:
321
- for namrh in [x.rh for x in self.context.sequence.effective_inputs(role='Namelist',
322
- kind='namelist',)]:
341
+ for namrh in [
342
+ x.rh
343
+ for x in self.context.sequence.effective_inputs(
344
+ role="Namelist",
345
+ kind="namelist",
346
+ )
347
+ ]:
323
348
  namc = namrh.contents
324
349
  try:
325
- namc['NAMVAR'].NPCVECS = nprec_ev
350
+ namc["NAMVAR"].NPCVECS = nprec_ev
326
351
  namc.rewrite(namrh.container)
327
352
  except Exception:
328
- logger.critical('Could not fix NAMVAR in %s', namrh.container.actualpath())
353
+ logger.critical(
354
+ "Could not fix NAMVAR in %s",
355
+ namrh.container.actualpath(),
356
+ )
329
357
  raise
330
- logger.info("%d preconditioning EV will by used (NPCVECS=%d).", nprec_ev, nprec_ev)
358
+ logger.info(
359
+ "%d preconditioning EV will by used (NPCVECS=%d).",
360
+ nprec_ev,
361
+ nprec_ev,
362
+ )
331
363
  else:
332
- logger.warning("A preconditioning EV map was found, " +
333
- "but no preconditioning EV are available.")
364
+ logger.warning(
365
+ "A preconditioning EV map was found, "
366
+ + "but no preconditioning EV are available."
367
+ )
334
368
  else:
335
369
  logger.info("No preconditioning EV were found.")
336
370
 
@@ -339,11 +373,11 @@ class Minim(IFSODBCCMA):
339
373
  sh = self.system
340
374
 
341
375
  # Look up for PREConditionning Eigen Vectors
342
- prec = sh.ls('MEMINI*')
376
+ prec = sh.ls("MEMINI*")
343
377
  if prec:
344
378
  prec_info = dict(evlen=len(prec))
345
- prec_info['evnum'] = [int(x[6:]) for x in prec]
346
- sh.json_dump(prec_info, 'precev_map.out', indent=4)
379
+ prec_info["evnum"] = [int(x[6:]) for x in prec]
380
+ sh.json_dump(prec_info, "precev_map.out", indent=4)
347
381
 
348
382
  super().postfix(rh, opts)
349
383
 
@@ -352,29 +386,30 @@ class Trajectory(IFSODBCCMA):
352
386
  """Observation trajectory."""
353
387
 
354
388
  _footprint = dict(
355
- info='Trajectory in the assimilation process.',
389
+ info="Trajectory in the assimilation process.",
356
390
  attr=dict(
357
391
  kind=dict(
358
- values=['traj', 'trajectory'],
359
- remap=dict(autoremap='first'),
392
+ values=["traj", "trajectory"],
393
+ remap=dict(autoremap="first"),
360
394
  ),
361
395
  conf=dict(
362
396
  default=2,
363
397
  ),
364
398
  xpname=dict(
365
- default='TRAJ',
399
+ default="TRAJ",
366
400
  ),
367
- )
401
+ ),
368
402
  )
369
403
 
370
404
 
371
405
  class PseudoTrajectory(BlindRun, drhook.DrHookDecoMixin):
372
406
  """Copy a few fields from the Guess file into the Analysis file"""
407
+
373
408
  _footprint = dict(
374
- attr = dict(
375
- kind = dict(
376
- values = ['pseudotraj', 'traj', 'trajectory'],
377
- remap = dict(autoremap = 'first'),
409
+ attr=dict(
410
+ kind=dict(
411
+ values=["pseudotraj", "traj", "trajectory"],
412
+ remap=dict(autoremap="first"),
378
413
  ),
379
414
  )
380
415
  )
@@ -382,35 +417,43 @@ class PseudoTrajectory(BlindRun, drhook.DrHookDecoMixin):
382
417
 
383
418
  class SstGrb2Ascii(BlindRun):
384
419
  """Transform sst grib files from the BDAP into ascii files"""
420
+
385
421
  _footprint = dict(
386
- info = 'Binary to change the format of sst BDAP files.',
387
- attr = dict(
388
- kind = dict(
389
- values = ['lect_bdap'],
422
+ info="Binary to change the format of sst BDAP files.",
423
+ attr=dict(
424
+ kind=dict(
425
+ values=["lect_bdap"],
390
426
  ),
391
- date = a_date,
392
- nlat = dict(
393
- default = 0,
427
+ date=a_date,
428
+ nlat=dict(
429
+ default=0,
394
430
  ),
395
- nlon = dict(
396
- default = 0,
397
- )
398
- )
431
+ nlon=dict(
432
+ default=0,
433
+ ),
434
+ ),
399
435
  )
400
436
 
401
437
  def prepare(self, rh, opts):
402
438
  """Add namelist delta, prepare the environment and build the arguments needed."""
403
439
  super().prepare(rh, opts)
404
- for namrh in [x.rh for x in self.context.sequence.effective_inputs(role='Namelist',
405
- kind='namelist', )]:
440
+ for namrh in [
441
+ x.rh
442
+ for x in self.context.sequence.effective_inputs(
443
+ role="Namelist",
444
+ kind="namelist",
445
+ )
446
+ ]:
406
447
  namc = namrh.contents
407
448
  try:
408
- namc.newblock('NAMFILE')
409
- namc['NAMFILE'].NBFICH = 1
410
- namc['NAMFILE']['CCNFICH(1)'] = 'GRIB_SST'
449
+ namc.newblock("NAMFILE")
450
+ namc["NAMFILE"].NBFICH = 1
451
+ namc["NAMFILE"]["CCNFICH(1)"] = "GRIB_SST"
411
452
  namc.rewrite(namrh.container)
412
453
  except Exception:
413
- logger.critical('Could not fix NAMFILE in %s', namrh.container.actualpath())
454
+ logger.critical(
455
+ "Could not fix NAMFILE in %s", namrh.container.actualpath()
456
+ )
414
457
  raise
415
458
 
416
459
  def spawn_command_options(self):
@@ -427,49 +470,60 @@ class SstGrb2Ascii(BlindRun):
427
470
 
428
471
  class IceNetCDF2Ascii(BlindRun):
429
472
  """Transform ice NetCDF files from the BDPE into ascii files"""
473
+
430
474
  _footprint = dict(
431
- info = 'Binary to change the format of ice BDPE files.',
432
- attr = dict(
433
- kind = dict(
434
- values = ['ice_nc2ascii'],
435
- ),
436
- output_file = dict(
437
- optional = True,
438
- default = "ice_concent"
475
+ info="Binary to change the format of ice BDPE files.",
476
+ attr=dict(
477
+ kind=dict(
478
+ values=["ice_nc2ascii"],
439
479
  ),
440
- param = dict(
441
- optional = True,
442
- default = "ice_conc",
480
+ output_file=dict(optional=True, default="ice_concent"),
481
+ param=dict(
482
+ optional=True,
483
+ default="ice_conc",
443
484
  ),
444
- )
485
+ ),
445
486
  )
446
487
 
447
488
  def prepare(self, rh, opts):
448
489
  super().prepare(rh, opts)
449
490
  # Look for the input files
450
- list_netcdf = self.context.sequence.effective_inputs(role='NetCDFfiles',
451
- kind='observations')
452
- hn_file = ''
453
- hs_file = ''
491
+ list_netcdf = self.context.sequence.effective_inputs(
492
+ role="NetCDFfiles", kind="observations"
493
+ )
494
+ hn_file = ""
495
+ hs_file = ""
454
496
  for sect in list_netcdf:
455
497
  part = sect.rh.resource.part
456
498
  filename = sect.rh.container.filename
457
499
  if part == "ice_hn":
458
- if hn_file == '':
500
+ if hn_file == "":
459
501
  hn_file = filename
460
- logger.info('The input file for the North hemisphere is: %s.', hn_file)
502
+ logger.info(
503
+ "The input file for the North hemisphere is: %s.",
504
+ hn_file,
505
+ )
461
506
  else:
462
- logger.warning('There was already one file for the North hemisphere. '
463
- 'The following one, %s, is not used.', filename)
507
+ logger.warning(
508
+ "There was already one file for the North hemisphere. "
509
+ "The following one, %s, is not used.",
510
+ filename,
511
+ )
464
512
  elif part == "ice_hs":
465
- if hs_file == '':
513
+ if hs_file == "":
466
514
  hs_file = filename
467
- logger.info('The input file for the South hemisphere is: %s.', hs_file)
515
+ logger.info(
516
+ "The input file for the South hemisphere is: %s.",
517
+ hs_file,
518
+ )
468
519
  else:
469
- logger.warning('There was already one file for the South hemisphere. '
470
- 'The following one, %s, is not used.', filename)
520
+ logger.warning(
521
+ "There was already one file for the South hemisphere. "
522
+ "The following one, %s, is not used.",
523
+ filename,
524
+ )
471
525
  else:
472
- logger.warning('The following file is not used: %s.', filename)
526
+ logger.warning("The following file is not used: %s.", filename)
473
527
  self.input_file_hn = hn_file
474
528
  self.input_file_hs = hs_file
475
529
 
@@ -479,5 +533,5 @@ class IceNetCDF2Ascii(BlindRun):
479
533
  file_in_hn=self.input_file_hn,
480
534
  file_in_hs=self.input_file_hs,
481
535
  param=self.param,
482
- file_out=self.output_file
536
+ file_out=self.output_file,
483
537
  )