vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. vortex/__init__.py +75 -47
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +944 -618
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/serversynctools.py +34 -33
  7. vortex/config.py +19 -22
  8. vortex/data/__init__.py +9 -3
  9. vortex/data/abstractstores.py +593 -655
  10. vortex/data/containers.py +217 -162
  11. vortex/data/contents.py +65 -39
  12. vortex/data/executables.py +93 -102
  13. vortex/data/flow.py +40 -34
  14. vortex/data/geometries.py +228 -132
  15. vortex/data/handlers.py +436 -227
  16. vortex/data/outflow.py +15 -15
  17. vortex/data/providers.py +185 -163
  18. vortex/data/resources.py +48 -42
  19. vortex/data/stores.py +540 -417
  20. vortex/data/sync_templates/__init__.py +0 -0
  21. vortex/gloves.py +114 -87
  22. vortex/layout/__init__.py +1 -8
  23. vortex/layout/contexts.py +150 -84
  24. vortex/layout/dataflow.py +353 -202
  25. vortex/layout/monitor.py +264 -128
  26. vortex/nwp/__init__.py +5 -2
  27. vortex/nwp/algo/__init__.py +14 -5
  28. vortex/nwp/algo/assim.py +205 -151
  29. vortex/nwp/algo/clim.py +683 -517
  30. vortex/nwp/algo/coupling.py +447 -225
  31. vortex/nwp/algo/eda.py +437 -229
  32. vortex/nwp/algo/eps.py +403 -231
  33. vortex/nwp/algo/forecasts.py +416 -275
  34. vortex/nwp/algo/fpserver.py +683 -307
  35. vortex/nwp/algo/ifsnaming.py +205 -145
  36. vortex/nwp/algo/ifsroot.py +215 -122
  37. vortex/nwp/algo/monitoring.py +137 -76
  38. vortex/nwp/algo/mpitools.py +330 -190
  39. vortex/nwp/algo/odbtools.py +637 -353
  40. vortex/nwp/algo/oopsroot.py +454 -273
  41. vortex/nwp/algo/oopstests.py +90 -56
  42. vortex/nwp/algo/request.py +287 -206
  43. vortex/nwp/algo/stdpost.py +878 -522
  44. vortex/nwp/data/__init__.py +22 -4
  45. vortex/nwp/data/assim.py +125 -137
  46. vortex/nwp/data/boundaries.py +121 -68
  47. vortex/nwp/data/climfiles.py +193 -211
  48. vortex/nwp/data/configfiles.py +73 -69
  49. vortex/nwp/data/consts.py +426 -401
  50. vortex/nwp/data/ctpini.py +59 -43
  51. vortex/nwp/data/diagnostics.py +94 -66
  52. vortex/nwp/data/eda.py +50 -51
  53. vortex/nwp/data/eps.py +195 -146
  54. vortex/nwp/data/executables.py +440 -434
  55. vortex/nwp/data/fields.py +63 -48
  56. vortex/nwp/data/gridfiles.py +183 -111
  57. vortex/nwp/data/logs.py +250 -217
  58. vortex/nwp/data/modelstates.py +180 -151
  59. vortex/nwp/data/monitoring.py +72 -99
  60. vortex/nwp/data/namelists.py +254 -202
  61. vortex/nwp/data/obs.py +400 -308
  62. vortex/nwp/data/oopsexec.py +22 -20
  63. vortex/nwp/data/providers.py +90 -65
  64. vortex/nwp/data/query.py +71 -82
  65. vortex/nwp/data/stores.py +49 -36
  66. vortex/nwp/data/surfex.py +136 -137
  67. vortex/nwp/syntax/__init__.py +1 -1
  68. vortex/nwp/syntax/stdattrs.py +173 -111
  69. vortex/nwp/tools/__init__.py +2 -2
  70. vortex/nwp/tools/addons.py +22 -17
  71. vortex/nwp/tools/agt.py +24 -12
  72. vortex/nwp/tools/bdap.py +16 -5
  73. vortex/nwp/tools/bdcp.py +4 -1
  74. vortex/nwp/tools/bdm.py +3 -0
  75. vortex/nwp/tools/bdmp.py +14 -9
  76. vortex/nwp/tools/conftools.py +728 -378
  77. vortex/nwp/tools/drhook.py +12 -8
  78. vortex/nwp/tools/grib.py +65 -39
  79. vortex/nwp/tools/gribdiff.py +22 -17
  80. vortex/nwp/tools/ifstools.py +82 -42
  81. vortex/nwp/tools/igastuff.py +167 -143
  82. vortex/nwp/tools/mars.py +14 -2
  83. vortex/nwp/tools/odb.py +234 -125
  84. vortex/nwp/tools/partitioning.py +61 -37
  85. vortex/nwp/tools/satrad.py +27 -12
  86. vortex/nwp/util/async.py +83 -55
  87. vortex/nwp/util/beacon.py +10 -10
  88. vortex/nwp/util/diffpygram.py +174 -86
  89. vortex/nwp/util/ens.py +144 -63
  90. vortex/nwp/util/hooks.py +30 -19
  91. vortex/nwp/util/taskdeco.py +28 -24
  92. vortex/nwp/util/usepygram.py +278 -172
  93. vortex/nwp/util/usetnt.py +31 -17
  94. vortex/sessions.py +72 -39
  95. vortex/syntax/__init__.py +1 -1
  96. vortex/syntax/stdattrs.py +410 -171
  97. vortex/syntax/stddeco.py +31 -22
  98. vortex/toolbox.py +327 -192
  99. vortex/tools/__init__.py +11 -2
  100. vortex/tools/actions.py +110 -121
  101. vortex/tools/addons.py +111 -92
  102. vortex/tools/arm.py +42 -22
  103. vortex/tools/compression.py +72 -69
  104. vortex/tools/date.py +11 -4
  105. vortex/tools/delayedactions.py +242 -132
  106. vortex/tools/env.py +75 -47
  107. vortex/tools/folder.py +342 -171
  108. vortex/tools/grib.py +341 -162
  109. vortex/tools/lfi.py +423 -216
  110. vortex/tools/listings.py +109 -40
  111. vortex/tools/names.py +218 -156
  112. vortex/tools/net.py +655 -299
  113. vortex/tools/parallelism.py +93 -61
  114. vortex/tools/prestaging.py +55 -31
  115. vortex/tools/schedulers.py +172 -105
  116. vortex/tools/services.py +403 -334
  117. vortex/tools/storage.py +293 -358
  118. vortex/tools/surfex.py +24 -24
  119. vortex/tools/systems.py +1234 -643
  120. vortex/tools/targets.py +156 -100
  121. vortex/util/__init__.py +1 -1
  122. vortex/util/config.py +378 -327
  123. vortex/util/empty.py +2 -2
  124. vortex/util/helpers.py +56 -24
  125. vortex/util/introspection.py +18 -12
  126. vortex/util/iosponge.py +8 -4
  127. vortex/util/roles.py +4 -6
  128. vortex/util/storefunctions.py +39 -13
  129. vortex/util/structs.py +3 -3
  130. vortex/util/worker.py +29 -17
  131. vortex_nwp-2.1.0.dist-info/METADATA +67 -0
  132. vortex_nwp-2.1.0.dist-info/RECORD +144 -0
  133. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
  134. vortex/layout/appconf.py +0 -109
  135. vortex/layout/jobs.py +0 -1276
  136. vortex/layout/nodes.py +0 -1424
  137. vortex/layout/subjobs.py +0 -464
  138. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  139. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  140. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
  141. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
@@ -12,7 +12,10 @@ from bronx.stdtypes import date
12
12
  from .ifsroot import IFSParallel
13
13
  from ..tools.drhook import DrHookDecoMixin
14
14
  from vortex.algo.components import AlgoComponentError, BlindRun, Parallel
15
- from vortex.algo.components import AlgoComponentDecoMixin, algo_component_deco_mixin_autodoc
15
+ from vortex.algo.components import (
16
+ AlgoComponentDecoMixin,
17
+ algo_component_deco_mixin_autodoc,
18
+ )
16
19
  from vortex.layout.dataflow import intent
17
20
  from vortex.tools.grib import EcGribDecoMixin
18
21
 
@@ -29,7 +32,7 @@ coupling_basedate_fp = footprints.Footprint(
29
32
  basedate=dict(
30
33
  info="The run date of the coupling generating process",
31
34
  type=date.Date,
32
- optional=True
35
+ optional=True,
33
36
  )
34
37
  )
35
38
  )
@@ -39,25 +42,31 @@ coupling_basedate_fp = footprints.Footprint(
39
42
  class CouplingBaseDateNamMixin(AlgoComponentDecoMixin):
40
43
  """Add a basedate attribute and make namelist substitution."""
41
44
 
42
- _MIXIN_EXTRA_FOOTPRINTS = (coupling_basedate_fp, )
45
+ _MIXIN_EXTRA_FOOTPRINTS = (coupling_basedate_fp,)
43
46
 
44
47
  def _prepare_basedate_hook(self, rh, opts):
45
48
  """Update the namelist with date information."""
46
49
 
47
50
  def set_nam_macro(namrh, macro, value):
48
51
  namrh.contents.setmacro(macro, value)
49
- logger.info('Setup macro %s=%s in %s', macro, str(value),
50
- namrh.container.actualpath())
52
+ logger.info(
53
+ "Setup macro %s=%s in %s",
54
+ macro,
55
+ str(value),
56
+ namrh.container.actualpath(),
57
+ )
51
58
 
52
- for namsec in self.context.sequence.effective_inputs(kind=('namelist',)):
59
+ for namsec in self.context.sequence.effective_inputs(
60
+ kind=("namelist",)
61
+ ):
53
62
  if self.basedate is not None:
54
- set_nam_macro(namsec.rh, 'YYYY', int(self.basedate.year))
55
- set_nam_macro(namsec.rh, 'MM', int(self.basedate.month))
56
- set_nam_macro(namsec.rh, 'DD', int(self.basedate.day))
63
+ set_nam_macro(namsec.rh, "YYYY", int(self.basedate.year))
64
+ set_nam_macro(namsec.rh, "MM", int(self.basedate.month))
65
+ set_nam_macro(namsec.rh, "DD", int(self.basedate.day))
57
66
  if namsec.rh.contents.dumps_needs_update:
58
67
  namsec.rh.save()
59
68
 
60
- _MIXIN_PREPARE_HOOKS = (_prepare_basedate_hook, )
69
+ _MIXIN_PREPARE_HOOKS = (_prepare_basedate_hook,)
61
70
 
62
71
 
63
72
  class Coupling(FullPos):
@@ -69,26 +78,28 @@ class Coupling(FullPos):
69
78
  _footprint = [
70
79
  coupling_basedate_fp,
71
80
  dict(
72
- info = "Create coupling files for a Limited Area Model.",
73
- attr = dict(
74
- kind = dict(
75
- values = ['coupling'],
81
+ info="Create coupling files for a Limited Area Model.",
82
+ attr=dict(
83
+ kind=dict(
84
+ values=["coupling"],
76
85
  ),
77
- )
78
- )
86
+ ),
87
+ ),
79
88
  ]
80
89
 
81
90
  @property
82
91
  def realkind(self):
83
- return 'coupling'
92
+ return "coupling"
84
93
 
85
94
  def prepare(self, rh, opts):
86
95
  """Default pre-link for namelist file and domain change."""
87
96
  super().prepare(rh, opts)
88
- namsec = self.setlink(initrole='Namelist', initkind='namelist', initname='fort.4')
89
- for nam in [x.rh for x in namsec if 'NAMFPC' in x.rh.contents]:
97
+ namsec = self.setlink(
98
+ initrole="Namelist", initkind="namelist", initname="fort.4"
99
+ )
100
+ for nam in [x.rh for x in namsec if "NAMFPC" in x.rh.contents]:
90
101
  logger.info('Substitute "AREA" to CFPDOM namelist entry')
91
- nam.contents['NAMFPC']['CFPDOM(1)'] = 'AREA'
102
+ nam.contents["NAMFPC"]["CFPDOM(1)"] = "AREA"
92
103
  nam.save()
93
104
 
94
105
  def execute(self, rh, opts):
@@ -97,142 +108,202 @@ class Coupling(FullPos):
97
108
  sh = self.system
98
109
 
99
110
  cplsec = self.context.sequence.effective_inputs(
100
- role=('InitialCondition', 'CouplingSource'),
101
- kind=('historic', 'analysis')
111
+ role=("InitialCondition", "CouplingSource"),
112
+ kind=("historic", "analysis"),
102
113
  )
103
114
  cplsec.sort(key=lambda s: s.rh.resource.term)
104
- ininc = self.naming_convention('ic', rh)
115
+ ininc = self.naming_convention("ic", rh)
105
116
  infile = ininc()
106
117
  isMany = len(cplsec) > 1
107
- outprefix = 'PF{:s}AREA'.format(self.xpname)
118
+ outprefix = "PF{:s}AREA".format(self.xpname)
108
119
 
109
- cplguess = self.context.sequence.effective_inputs(role='Guess')
120
+ cplguess = self.context.sequence.effective_inputs(role="Guess")
110
121
  cplguess.sort(key=lambda s: s.rh.resource.term)
111
122
  guessing = bool(cplguess)
112
123
 
113
- cplsurf = self.context.sequence.effective_inputs(role=('SurfaceInitialCondition',
114
- 'SurfaceCouplingSource'))
124
+ cplsurf = self.context.sequence.effective_inputs(
125
+ role=("SurfaceInitialCondition", "SurfaceCouplingSource")
126
+ )
115
127
  cplsurf.sort(key=lambda s: s.rh.resource.term)
116
128
  surfacing = bool(cplsurf)
117
- inisurfnc = self.naming_convention('ic', rh, model='surfex')
129
+ inisurfnc = self.naming_convention("ic", rh, model="surfex")
118
130
  infilesurf = inisurfnc()
119
131
  if surfacing:
120
132
  # Link in the Surfex's PGD
121
- sclimnc = self.naming_convention(kind='targetclim', rh=rh, model='surfex')
133
+ sclimnc = self.naming_convention(
134
+ kind="targetclim", rh=rh, model="surfex"
135
+ )
122
136
  self.setlink(
123
- initrole=('ClimPGD',),
124
- initkind=('pgdfa', 'pgdlfi'),
125
- initname=sclimnc(area='AREA')
137
+ initrole=("ClimPGD",),
138
+ initkind=("pgdfa", "pgdlfi"),
139
+ initname=sclimnc(area="AREA"),
126
140
  )
127
141
 
128
142
  for sec in cplsec:
129
143
  r = sec.rh
130
- sh.subtitle('Loop on {!s}'.format(r.resource))
144
+ sh.subtitle("Loop on {!s}".format(r.resource))
131
145
 
132
146
  # First attempt to set actual date as the one of the source model
133
147
  actualdate = r.resource.date + r.resource.term
134
148
 
135
149
  # Expect the coupling source to be there...
136
- self.grab(sec, comment='coupling source')
150
+ self.grab(sec, comment="coupling source")
137
151
 
138
152
  # Set the actual init file
139
153
  if sh.path.exists(infile):
140
154
  if isMany:
141
- logger.critical('Cannot process multiple Historic files if %s exists.', infile)
155
+ logger.critical(
156
+ "Cannot process multiple Historic files if %s exists.",
157
+ infile,
158
+ )
142
159
  else:
143
- sh.cp(r.container.localpath(), infile, fmt=r.container.actualfmt, intent=intent.IN)
160
+ sh.cp(
161
+ r.container.localpath(),
162
+ infile,
163
+ fmt=r.container.actualfmt,
164
+ intent=intent.IN,
165
+ )
144
166
 
145
167
  # If the surface file is needed, set the actual initsurf file
146
168
  if cplsurf:
147
169
  # Expecting the coupling surface source to be there...
148
170
  cplsurf_in = cplsurf.pop(0)
149
- self.grab(cplsurf_in, comment='coupling surface source')
171
+ self.grab(cplsurf_in, comment="coupling surface source")
150
172
  if sh.path.exists(infilesurf):
151
173
  if isMany:
152
- logger.critical('Cannot process multiple surface historic files if %s exists.',
153
- infilesurf)
174
+ logger.critical(
175
+ "Cannot process multiple surface historic files if %s exists.",
176
+ infilesurf,
177
+ )
154
178
  else:
155
- sh.cp(cplsurf_in.rh.container.localpath(), infilesurf,
156
- fmt=cplsurf_in.rh.container.actualfmt, intent=intent.IN)
179
+ sh.cp(
180
+ cplsurf_in.rh.container.localpath(),
181
+ infilesurf,
182
+ fmt=cplsurf_in.rh.container.actualfmt,
183
+ intent=intent.IN,
184
+ )
157
185
  elif surfacing:
158
- logger.error('No more surface source to loop on for coupling')
186
+ logger.error("No more surface source to loop on for coupling")
159
187
 
160
188
  # The output could be an input as well
161
189
  if cplguess:
162
190
  cplout = cplguess.pop(0)
163
191
  cplpath = cplout.rh.container.localpath()
164
192
  if sh.path.exists(cplpath):
165
- actualdateguess = cplout.rh.resource.date + cplout.rh.resource.term
166
- if (actualdate == actualdateguess):
167
- logger.error('The guess date, %s, is different from the source date %s, !',
168
- actualdateguess.reallynice(), actualdate.reallynice())
193
+ actualdateguess = (
194
+ cplout.rh.resource.date + cplout.rh.resource.term
195
+ )
196
+ if actualdate == actualdateguess:
197
+ logger.error(
198
+ "The guess date, %s, is different from the source date %s, !",
199
+ actualdateguess.reallynice(),
200
+ actualdate.reallynice(),
201
+ )
169
202
  # Expect the coupling guess to be there...
170
- self.grab(cplout, comment='coupling guess')
171
- logger.info('Coupling with existing guess <%s>', cplpath)
172
- inoutfile = outprefix + '+0000'
203
+ self.grab(cplout, comment="coupling guess")
204
+ logger.info("Coupling with existing guess <%s>", cplpath)
205
+ inoutfile = outprefix + "+0000"
173
206
  if cplpath != inoutfile:
174
207
  sh.remove(inoutfile, fmt=cplout.rh.container.actualfmt)
175
- sh.move(cplpath, inoutfile,
176
- fmt=cplout.rh.container.actualfmt,
177
- intent=intent.INOUT)
208
+ sh.move(
209
+ cplpath,
210
+ inoutfile,
211
+ fmt=cplout.rh.container.actualfmt,
212
+ intent=intent.INOUT,
213
+ )
178
214
  else:
179
- logger.warning('Missing guess input for coupling <%s>', cplpath)
215
+ logger.warning(
216
+ "Missing guess input for coupling <%s>", cplpath
217
+ )
180
218
  elif guessing:
181
- logger.error('No more guess to loop on for coupling')
219
+ logger.error("No more guess to loop on for coupling")
182
220
 
183
221
  # Find out actual monthly climatological resource
184
222
  actualmonth = date.Month(actualdate)
185
- self.climfile_fixer(rh, convkind='modelclim', month=actualmonth,
186
- inputrole=('GlobalClim', 'InitialClim'),
187
- inputkind='clim_model')
188
- self.climfile_fixer(rh, convkind='targetclim', month=actualmonth,
189
- inputrole=('LocalClim', 'TargetClim'),
190
- inputkind='clim_model', area='AREA')
223
+ self.climfile_fixer(
224
+ rh,
225
+ convkind="modelclim",
226
+ month=actualmonth,
227
+ inputrole=("GlobalClim", "InitialClim"),
228
+ inputkind="clim_model",
229
+ )
230
+ self.climfile_fixer(
231
+ rh,
232
+ convkind="targetclim",
233
+ month=actualmonth,
234
+ inputrole=("LocalClim", "TargetClim"),
235
+ inputkind="clim_model",
236
+ area="AREA",
237
+ )
191
238
 
192
239
  # Standard execution
193
240
  super().execute(rh, opts)
194
241
 
195
242
  # Set a local appropriate file
196
- posfile = [x for x in sh.glob(outprefix + '+*')
197
- if re.match(outprefix + r'\+\d+(?:\:\d+)?(?:\.sfx)?$', x)]
198
- if (len(posfile) > 1):
199
- logger.critical('Many ' + outprefix + ' files, do not know how to adress that')
243
+ posfile = [
244
+ x
245
+ for x in sh.glob(outprefix + "+*")
246
+ if re.match(outprefix + r"\+\d+(?:\:\d+)?(?:\.sfx)?$", x)
247
+ ]
248
+ if len(posfile) > 1:
249
+ logger.critical(
250
+ "Many "
251
+ + outprefix
252
+ + " files, do not know how to adress that"
253
+ )
200
254
  posfile = posfile[0]
201
255
  if self.basedate is None:
202
256
  actualterm = r.resource.term
203
257
  else:
204
258
  actualterm = (actualdate - self.basedate).time()
205
- actualname = (re.sub(r'^.+?((?:_\d+)?)(?:\+[:\d]+)?$', r'CPLOUT\1+', r.container.localpath()) +
206
- actualterm.fmthm)
259
+ actualname = (
260
+ re.sub(
261
+ r"^.+?((?:_\d+)?)(?:\+[:\d]+)?$",
262
+ r"CPLOUT\1+",
263
+ r.container.localpath(),
264
+ )
265
+ + actualterm.fmthm
266
+ )
207
267
  if isMany:
208
- sh.move(sh.path.realpath(posfile), actualname,
209
- fmt=r.container.actualfmt)
268
+ sh.move(
269
+ sh.path.realpath(posfile),
270
+ actualname,
271
+ fmt=r.container.actualfmt,
272
+ )
210
273
  if sh.path.exists(posfile):
211
274
  sh.rm(posfile)
212
275
  else:
213
276
  # This is here because of legacy with .sfx files
214
- sh.cp(sh.path.realpath(posfile), actualname,
215
- fmt=r.container.actualfmt, intent=intent.IN)
277
+ sh.cp(
278
+ sh.path.realpath(posfile),
279
+ actualname,
280
+ fmt=r.container.actualfmt,
281
+ intent=intent.IN,
282
+ )
216
283
 
217
284
  # promises management
218
- expected = [x for x in self.promises if x.rh.container.localpath() == actualname]
285
+ expected = [
286
+ x
287
+ for x in self.promises
288
+ if x.rh.container.localpath() == actualname
289
+ ]
219
290
  if expected:
220
291
  for thispromise in expected:
221
292
  thispromise.put(incache=True)
222
293
 
223
294
  # The only one listing
224
295
  if not self.server_run:
225
- sh.cat('NODE.001_01', output='NODE.all')
296
+ sh.cat("NODE.001_01", output="NODE.all")
226
297
 
227
298
  # prepares the next execution
228
299
  if isMany:
229
300
  # Some cleaning
230
- sh.rmall('PXFPOS*', fmt=r.container.actualfmt)
301
+ sh.rmall("PXFPOS*", fmt=r.container.actualfmt)
231
302
  sh.remove(infile, fmt=r.container.actualfmt)
232
303
  if cplsurf:
233
304
  sh.remove(infilesurf, fmt=r.container.actualfmt)
234
305
  if not self.server_run:
235
- sh.rmall('ncf927', 'dirlst', 'NODE.[0123456789]*', 'std*')
306
+ sh.rmall("ncf927", "dirlst", "NODE.[0123456789]*", "std*")
236
307
 
237
308
 
238
309
  class CouplingLAM(Coupling):
@@ -242,18 +313,18 @@ class CouplingLAM(Coupling):
242
313
  """
243
314
 
244
315
  _footprint = dict(
245
- info = "Create coupling files for a Limited Area Model (useless beyond cy40).",
246
- attr = dict(
247
- kind = dict(
248
- values = ['lamcoupling'],
316
+ info="Create coupling files for a Limited Area Model (useless beyond cy40).",
317
+ attr=dict(
318
+ kind=dict(
319
+ values=["lamcoupling"],
249
320
  ),
250
- )
321
+ ),
251
322
  )
252
323
 
253
324
  def spawn_command_options(self):
254
325
  """Dictionary provided for command line factory."""
255
326
  opts = super().spawn_command_options()
256
- opts['model'] = 'aladin'
327
+ opts["model"] = "aladin"
257
328
  return opts
258
329
 
259
330
 
@@ -261,157 +332,219 @@ class CouplingLAM(Coupling):
261
332
  class PrepMixin(AlgoComponentDecoMixin):
262
333
  """Coupling/Interpolation of Surfex files."""
263
334
 
264
- _MIXIN_EXTRA_FOOTPRINTS = (footprints.Footprint(
265
- info="Coupling/Interpolation of Surfex files.",
266
- attr=dict(
267
- kind=dict(
268
- values=['prep'],
269
- ),
270
- underlyingformat=dict(
271
- info="The format of input data (as expected by the PREP executable).",
272
- values=['fa', 'lfi', 'netcdf'],
273
- optional=True,
274
- default='fa'
275
- ),
276
- underlyingoutputformat=dict(
277
- info=("The format of output data (as expected by the PREP executable)." +
278
- "If omited, *underlyingformat* is used."),
279
- values=['fa', 'lfi', 'netcdf', 'txt'],
280
- optional=True,
281
- ),
282
- outputformat=dict(
283
- info=("The format of output data (as expected by the user)." +
284
- "If omited, same as input data."),
285
- values=['fa', 'lfi', 'netcdf', 'txt'],
286
- optional=True,
335
+ _MIXIN_EXTRA_FOOTPRINTS = (
336
+ footprints.Footprint(
337
+ info="Coupling/Interpolation of Surfex files.",
338
+ attr=dict(
339
+ kind=dict(
340
+ values=["prep"],
341
+ ),
342
+ underlyingformat=dict(
343
+ info="The format of input data (as expected by the PREP executable).",
344
+ values=["fa", "lfi", "netcdf"],
345
+ optional=True,
346
+ default="fa",
347
+ ),
348
+ underlyingoutputformat=dict(
349
+ info=(
350
+ "The format of output data (as expected by the PREP executable)."
351
+ + "If omited, *underlyingformat* is used."
352
+ ),
353
+ values=["fa", "lfi", "netcdf", "txt"],
354
+ optional=True,
355
+ ),
356
+ outputformat=dict(
357
+ info=(
358
+ "The format of output data (as expected by the user)."
359
+ + "If omited, same as input data."
360
+ ),
361
+ values=["fa", "lfi", "netcdf", "txt"],
362
+ optional=True,
363
+ ),
287
364
  ),
288
- )
289
- ), )
365
+ ),
366
+ )
290
367
 
291
368
  @cached_property
292
369
  def _actual_u_output_format(self):
293
- return (self.underlyingoutputformat
294
- if self.underlyingoutputformat is not None else
295
- self.underlyingformat)
370
+ return (
371
+ self.underlyingoutputformat
372
+ if self.underlyingoutputformat is not None
373
+ else self.underlyingformat
374
+ )
296
375
 
297
376
  def _actual_output_format(self, in_format):
298
- return (self.outputformat if self.outputformat is not None
299
- else in_format)
377
+ return (
378
+ self.outputformat if self.outputformat is not None else in_format
379
+ )
300
380
 
301
381
  @staticmethod
302
382
  def _sfx_fmt_remap(fmt):
303
- return dict(netcdf='nc').get(fmt, fmt)
383
+ return dict(netcdf="nc").get(fmt, fmt)
304
384
 
305
385
  @cached_property
306
386
  def _has_sfx_lfi(self):
307
- addon_checked = ('sfx' in self.system.loaded_addons() and
308
- 'lfi' in self.system.loaded_addons())
387
+ addon_checked = (
388
+ "sfx" in self.system.loaded_addons()
389
+ and "lfi" in self.system.loaded_addons()
390
+ )
309
391
  if not addon_checked:
310
392
  raise RuntimeError("The sfx addon is needed... please load it.")
311
393
  return addon_checked
312
394
 
313
395
  def _do_input_format_change(self, section, output_name, output_fmt):
314
- (localpath, infmt) = (section.rh.container.localpath(),
315
- section.rh.container.actualfmt)
396
+ (localpath, infmt) = (
397
+ section.rh.container.localpath(),
398
+ section.rh.container.actualfmt,
399
+ )
316
400
  self.system.subtitle("Processing inputs/climatologies")
317
401
  if section.rh.container.actualfmt != output_fmt:
318
- if infmt == 'fa' and output_fmt == 'lfi' and self._has_sfx_lfi:
402
+ if infmt == "fa" and output_fmt == "lfi" and self._has_sfx_lfi:
319
403
  if self.system.path.exists(output_name):
320
- raise OSError("The file {!r} already exists.".format(output_name))
321
- logger.info("Calling sfxtools' fa2lfi from %s to %s.", localpath, output_name)
404
+ raise OSError(
405
+ "The file {!r} already exists.".format(output_name)
406
+ )
407
+ logger.info(
408
+ "Calling sfxtools' fa2lfi from %s to %s.",
409
+ localpath,
410
+ output_name,
411
+ )
322
412
  self.system.sfx_fa2lfi(localpath, output_name)
323
413
  else:
324
- raise RuntimeError("Format conversion from {!r} to {!r} is not possible".format(
325
- infmt, output_fmt))
414
+ raise RuntimeError(
415
+ "Format conversion from {!r} to {!r} is not possible".format(
416
+ infmt, output_fmt
417
+ )
418
+ )
326
419
  else:
327
420
  if not self.system.path.exists(output_name):
328
421
  logger.info("Linking %s to %s", localpath, output_name)
329
- self.system.cp(localpath, output_name, intent=intent.IN, fmt=infmt)
422
+ self.system.cp(
423
+ localpath, output_name, intent=intent.IN, fmt=infmt
424
+ )
330
425
 
331
426
  def _process_outputs(self, binrh, section, output_clim, output_name):
332
- (radical, outfmt) = (self.system.path.splitext(section.rh.container.localpath())[0],
333
- self._actual_output_format(section.rh.container.actualfmt))
334
- finaloutput = '{:s}_interpolated.{:s}'.format(radical, outfmt)
335
- finallisting = '{:s}_listing'.format(radical)
427
+ (radical, outfmt) = (
428
+ self.system.path.splitext(section.rh.container.localpath())[0],
429
+ self._actual_output_format(section.rh.container.actualfmt),
430
+ )
431
+ finaloutput = "{:s}_interpolated.{:s}".format(radical, outfmt)
432
+ finallisting = "{:s}_listing".format(radical)
336
433
  self.system.subtitle("Processing outputs")
337
434
  if outfmt != self._actual_u_output_format:
338
435
  # There is a need for a format change
339
- if outfmt == 'fa' and self._actual_u_output_format == 'lfi' and self._has_sfx_lfi:
340
- logger.info("Calling lfitools' faempty from %s to %s.", output_clim, finaloutput)
436
+ if (
437
+ outfmt == "fa"
438
+ and self._actual_u_output_format == "lfi"
439
+ and self._has_sfx_lfi
440
+ ):
441
+ logger.info(
442
+ "Calling lfitools' faempty from %s to %s.",
443
+ output_clim,
444
+ finaloutput,
445
+ )
341
446
  self.system.fa_empty(output_clim, finaloutput)
342
- logger.info("Calling sfxtools' lfi2fa from %s to %s.", output_name, finaloutput)
447
+ logger.info(
448
+ "Calling sfxtools' lfi2fa from %s to %s.",
449
+ output_name,
450
+ finaloutput,
451
+ )
343
452
  self.system.sfx_lfi2fa(output_name, finaloutput)
344
- finallfi = '{:s}_interpolated.{:s}'.format(radical, self._actual_u_output_format)
453
+ finallfi = "{:s}_interpolated.{:s}".format(
454
+ radical, self._actual_u_output_format
455
+ )
345
456
  self.system.mv(output_name, finallfi)
346
457
  else:
347
- raise RuntimeError("Format conversion from {!r} to {!r} is not possible".format(
348
- self._actual_u_output_format, outfmt))
458
+ raise RuntimeError(
459
+ "Format conversion from {!r} to {!r} is not possible".format(
460
+ self._actual_u_output_format, outfmt
461
+ )
462
+ )
349
463
  else:
350
464
  # No format change needed
351
465
  logger.info("Moving %s to %s", output_name, finaloutput)
352
466
  self.system.mv(output_name, finaloutput, fmt=outfmt)
353
467
  # Also rename the listing :-)
354
- if binrh.resource.cycle < 'cy48t1':
468
+ if binrh.resource.cycle < "cy48t1":
355
469
  try:
356
- self.system.mv('LISTING_PREP.txt', finallisting)
470
+ self.system.mv("LISTING_PREP.txt", finallisting)
357
471
  except OSError:
358
- self.system.mv('LISTING_PREP0.txt', finallisting)
472
+ self.system.mv("LISTING_PREP0.txt", finallisting)
359
473
  else:
360
- self.system.mv('LISTING_PREP0.txt', finallisting)
474
+ self.system.mv("LISTING_PREP0.txt", finallisting)
361
475
  return finaloutput
362
476
 
363
477
  def _prepare_prep_hook(self, rh, opts):
364
478
  """Default pre-link for namelist file and domain change."""
365
479
  # Convert the initial clim if needed...
366
- iniclim = self.context.sequence.effective_inputs(role=('InitialClim',))
480
+ iniclim = self.context.sequence.effective_inputs(role=("InitialClim",))
367
481
  if not (len(iniclim) == 1):
368
482
  raise AlgoComponentError("One Initial clim have to be provided")
369
- self._do_input_format_change(iniclim[0],
370
- 'PGD1.' + self._sfx_fmt_remap(self.underlyingformat),
371
- self.underlyingformat)
483
+ self._do_input_format_change(
484
+ iniclim[0],
485
+ "PGD1." + self._sfx_fmt_remap(self.underlyingformat),
486
+ self.underlyingformat,
487
+ )
372
488
  # Convert the target clim if needed...
373
- targetclim = self.context.sequence.effective_inputs(role=('TargetClim',))
489
+ targetclim = self.context.sequence.effective_inputs(
490
+ role=("TargetClim",)
491
+ )
374
492
  if not (len(targetclim) == 1):
375
493
  raise AlgoComponentError("One Target clim have to be provided")
376
- self._do_input_format_change(targetclim[0],
377
- 'PGD2.' + self._sfx_fmt_remap(self._actual_u_output_format),
378
- self._actual_u_output_format)
494
+ self._do_input_format_change(
495
+ targetclim[0],
496
+ "PGD2." + self._sfx_fmt_remap(self._actual_u_output_format),
497
+ self._actual_u_output_format,
498
+ )
379
499
 
380
- _MIXIN_PREPARE_HOOKS = (_prepare_prep_hook, )
500
+ _MIXIN_PREPARE_HOOKS = (_prepare_prep_hook,)
381
501
 
382
502
  def _spawn_hook_prep_hook(self):
383
503
  """Dump the namelists."""
384
- for namsec in self.context.sequence.effective_inputs(kind=('namelist', )):
385
- self.system.subtitle("Here is the content of the {:s} namelist"
386
- .format(namsec.rh.container.actualpath()))
504
+ for namsec in self.context.sequence.effective_inputs(
505
+ kind=("namelist",)
506
+ ):
507
+ self.system.subtitle(
508
+ "Here is the content of the {:s} namelist".format(
509
+ namsec.rh.container.actualpath()
510
+ )
511
+ )
387
512
  namsec.rh.container.cat()
388
513
 
389
- _MIXIN_SPAWN_HOOKS = (_spawn_hook_prep_hook, )
514
+ _MIXIN_SPAWN_HOOKS = (_spawn_hook_prep_hook,)
390
515
 
391
516
  def _execute_prep_common(self, rh, opts):
392
517
  """Loop on the various initial conditions provided."""
393
518
  sh = self.system
394
519
 
395
520
  cplsec = self.context.sequence.effective_inputs(
396
- role=('InitialCondition', 'CouplingSource'),
397
- kind=('historic', 'analysis')
521
+ role=("InitialCondition", "CouplingSource"),
522
+ kind=("historic", "analysis"),
398
523
  )
399
524
  cplsec.sort(key=lambda s: s.rh.resource.term)
400
- infile = 'PREP1.{:s}'.format(self._sfx_fmt_remap(self.underlyingformat))
401
- outfile = 'PREP2.{:s}'.format(self._sfx_fmt_remap(self._actual_u_output_format))
402
- targetclim = self.context.sequence.effective_inputs(role=('TargetClim',))
525
+ infile = "PREP1.{:s}".format(
526
+ self._sfx_fmt_remap(self.underlyingformat)
527
+ )
528
+ outfile = "PREP2.{:s}".format(
529
+ self._sfx_fmt_remap(self._actual_u_output_format)
530
+ )
531
+ targetclim = self.context.sequence.effective_inputs(
532
+ role=("TargetClim",)
533
+ )
403
534
  targetclim = targetclim[0].rh.container.localpath()
404
535
 
405
536
  for sec in cplsec:
406
537
  r = sec.rh
407
- sh.header('Loop on {:s}'.format(r.container.localpath()))
538
+ sh.header("Loop on {:s}".format(r.container.localpath()))
408
539
 
409
540
  # Expect the coupling source to be there...
410
- self.grab(sec, comment='coupling source')
541
+ self.grab(sec, comment="coupling source")
411
542
 
412
543
  # Set the actual init file
413
544
  if sh.path.exists(infile):
414
- logger.critical('Cannot process input files if %s exists.', infile)
545
+ logger.critical(
546
+ "Cannot process input files if %s exists.", infile
547
+ )
415
548
  self._do_input_format_change(sec, infile, self.underlyingformat)
416
549
 
417
550
  # Standard execution
@@ -423,27 +556,43 @@ class PrepMixin(AlgoComponentDecoMixin):
423
556
  actualname = self._process_outputs(rh, sec, targetclim, outfile)
424
557
 
425
558
  # promises management
426
- expected = [x for x in self.promises if x.rh.container.localpath() == actualname]
559
+ expected = [
560
+ x
561
+ for x in self.promises
562
+ if x.rh.container.localpath() == actualname
563
+ ]
427
564
  if expected:
428
565
  for thispromise in expected:
429
566
  thispromise.put(incache=True)
430
567
 
431
568
  # Some cleaning
432
- sh.rmall('*.des')
433
- sh.rmall('PREP1.*')
569
+ sh.rmall("*.des")
570
+ sh.rmall("PREP1.*")
434
571
 
435
572
  _MIXIN_EXECUTE_OVERWRITE = _execute_prep_common
436
573
 
437
574
 
438
- class Prep(BlindRun, PrepMixin, CouplingBaseDateNamMixin,
439
- DrHookDecoMixin, EcGribDecoMixin):
575
+ class Prep(
576
+ BlindRun,
577
+ PrepMixin,
578
+ CouplingBaseDateNamMixin,
579
+ DrHookDecoMixin,
580
+ EcGribDecoMixin,
581
+ ):
440
582
  """Coupling/Interpolation of Surfex files (non-MPI version)."""
583
+
441
584
  pass
442
585
 
443
586
 
444
- class ParallelPrep(Parallel, PrepMixin, CouplingBaseDateNamMixin,
445
- DrHookDecoMixin, EcGribDecoMixin):
587
+ class ParallelPrep(
588
+ Parallel,
589
+ PrepMixin,
590
+ CouplingBaseDateNamMixin,
591
+ DrHookDecoMixin,
592
+ EcGribDecoMixin,
593
+ ):
446
594
  """Coupling/Interpolation of Surfex files (MPI version)."""
595
+
447
596
  pass
448
597
 
449
598
 
@@ -451,18 +600,16 @@ class C901(IFSParallel):
451
600
  """Run of C901 configuration."""
452
601
 
453
602
  _footprint = dict(
454
- info = "Run C901 configuration",
455
- attr = dict(
456
- kind = dict(
457
- values = ["c901", ]
458
- ),
459
- clim = dict(
460
- type = bool
603
+ info="Run C901 configuration",
604
+ attr=dict(
605
+ kind=dict(
606
+ values=[
607
+ "c901",
608
+ ]
461
609
  ),
462
- xpname = dict(
463
- default = 'a001'
464
- )
465
- )
610
+ clim=dict(type=bool),
611
+ xpname=dict(default="a001"),
612
+ ),
466
613
  )
467
614
 
468
615
  SPECTRAL_FILE_SH = "ICMSH{prefix}INIT{suffix}"
@@ -470,12 +617,16 @@ class C901(IFSParallel):
470
617
  GRIDPOINT_FILE_GG = "ICMGG{prefix}INIT{suffix}"
471
618
  OUTPUT_FILE_NAME = "CN90x{}INIT"
472
619
  OUTPUT_LISTING_NAME = "NODE.001_01"
473
- LIST_INPUT_FILES = [("SpectralFileSH", SPECTRAL_FILE_SH),
474
- ("GridpointFileUA", GRIDPOINT_FILE_UA),
475
- ("GridpointFileGG", GRIDPOINT_FILE_GG)]
476
- LIST_CST_INPUT_FILES = [("ConstantSpectralFileSH", SPECTRAL_FILE_SH),
477
- ("ConstantGridpointFileUA", GRIDPOINT_FILE_UA),
478
- ("ConstantGridpointFileGG", GRIDPOINT_FILE_GG)]
620
+ LIST_INPUT_FILES = [
621
+ ("SpectralFileSH", SPECTRAL_FILE_SH),
622
+ ("GridpointFileUA", GRIDPOINT_FILE_UA),
623
+ ("GridpointFileGG", GRIDPOINT_FILE_GG),
624
+ ]
625
+ LIST_CST_INPUT_FILES = [
626
+ ("ConstantSpectralFileSH", SPECTRAL_FILE_SH),
627
+ ("ConstantGridpointFileUA", GRIDPOINT_FILE_UA),
628
+ ("ConstantGridpointFileGG", GRIDPOINT_FILE_GG),
629
+ ]
479
630
 
480
631
  @property
481
632
  def realkind(self):
@@ -484,30 +635,47 @@ class C901(IFSParallel):
484
635
  def sort_files_per_prefix(self, list_types, unique=False):
485
636
  """Function used to sort the files according to their prefix in a given type"""
486
637
  result = dict()
487
- for (file_role, file_template) in list_types:
638
+ for file_role, file_template in list_types:
488
639
  result[file_role] = dict()
489
640
  input_files = self.context.sequence.effective_inputs(
490
641
  role=file_role
491
642
  )
492
- template = file_template.format(prefix=r"(?P<prefix>\S{4})", suffix=r"(?P<suffix>\S*)")
643
+ template = file_template.format(
644
+ prefix=r"(?P<prefix>\S{4})", suffix=r"(?P<suffix>\S*)"
645
+ )
493
646
  for file_s in input_files:
494
647
  file_name = file_s.rh.container.filename
495
648
  find_elements = re.search(template, file_name)
496
649
  if find_elements is None:
497
- logger.error("The name of the file %s do not follow the template %s.",
498
- file_name, template)
499
- raise ValueError("The name of the file do not follow the template.")
650
+ logger.error(
651
+ "The name of the file %s do not follow the template %s.",
652
+ file_name,
653
+ template,
654
+ )
655
+ raise ValueError(
656
+ "The name of the file do not follow the template."
657
+ )
500
658
  else:
501
659
  if find_elements.group("prefix") not in result[file_role]:
502
- result[file_role][find_elements.group("prefix")] = list()
660
+ result[file_role][find_elements.group("prefix")] = (
661
+ list()
662
+ )
503
663
  else:
504
664
  if unique:
505
- logger.error("Only one file should be present for each type and each suffix.")
506
- raise ValueError("Only one file should be present for each suffix.")
507
- result[file_role][find_elements.group("prefix")].append(file_s)
665
+ logger.error(
666
+ "Only one file should be present for each type and each suffix."
667
+ )
668
+ raise ValueError(
669
+ "Only one file should be present for each suffix."
670
+ )
671
+ result[file_role][find_elements.group("prefix")].append(
672
+ file_s
673
+ )
508
674
  if result[file_role]:
509
675
  for file_prefix in result[file_role]:
510
- result[file_role][file_prefix].sort(key=lambda s: s.rh.resource.date + s.rh.resource.term)
676
+ result[file_role][file_prefix].sort(
677
+ key=lambda s: s.rh.resource.date + s.rh.resource.term
678
+ )
511
679
  else:
512
680
  del result[file_role]
513
681
  return result
@@ -518,13 +686,21 @@ class C901(IFSParallel):
518
686
  sh = self.system
519
687
 
520
688
  # Create the template for files to be removed at each validity date and for the outputname
521
- deleted_spectral_file_SH = self.SPECTRAL_FILE_SH.format(prefix="*", suffix="")
522
- deleted_gridpoint_file_UA = self.GRIDPOINT_FILE_UA.format(prefix="*", suffix="")
523
- deleted_gridpoint_file_GG = self.GRIDPOINT_FILE_GG.format(prefix="*", suffix="")
689
+ deleted_spectral_file_SH = self.SPECTRAL_FILE_SH.format(
690
+ prefix="*", suffix=""
691
+ )
692
+ deleted_gridpoint_file_UA = self.GRIDPOINT_FILE_UA.format(
693
+ prefix="*", suffix=""
694
+ )
695
+ deleted_gridpoint_file_GG = self.GRIDPOINT_FILE_GG.format(
696
+ prefix="*", suffix=""
697
+ )
524
698
  output_name = self.OUTPUT_FILE_NAME.format(self.xpname.upper())
525
699
 
526
700
  # Sort input files
527
- sorted_cst_input_files = self.sort_files_per_prefix(self.LIST_CST_INPUT_FILES, unique=True)
701
+ sorted_cst_input_files = self.sort_files_per_prefix(
702
+ self.LIST_CST_INPUT_FILES, unique=True
703
+ )
528
704
  sorted_input_files = self.sort_files_per_prefix(self.LIST_INPUT_FILES)
529
705
 
530
706
  # Determine the validity present for each non constant input files,
@@ -533,77 +709,123 @@ class C901(IFSParallel):
533
709
  input_validity = list()
534
710
  for file_role in sorted_input_files:
535
711
  for file_prefix in sorted_input_files[file_role]:
536
- input_validity.append([s.rh.resource.date + s.rh.resource.term
537
- for s in sorted_input_files[file_role][file_prefix]])
712
+ input_validity.append(
713
+ [
714
+ s.rh.resource.date + s.rh.resource.term
715
+ for s in sorted_input_files[file_role][file_prefix]
716
+ ]
717
+ )
538
718
  test_wrong_input_validity = True
539
719
  for i in range(1, len(input_validity)):
540
- test_wrong_input_validity = test_wrong_input_validity and (input_validity[0] == input_validity[i])
541
- self.algoassert(test_wrong_input_validity,
542
- "The files of each type must have the same validity dates.")
720
+ test_wrong_input_validity = test_wrong_input_validity and (
721
+ input_validity[0] == input_validity[i]
722
+ )
723
+ self.algoassert(
724
+ test_wrong_input_validity,
725
+ "The files of each type must have the same validity dates.",
726
+ )
543
727
 
544
728
  # Modify namelist
545
729
  input_namelist = self.context.sequence.effective_inputs(
546
- role="Namelist",
547
- kind="namelist"
730
+ role="Namelist", kind="namelist"
548
731
  )
549
732
  for namelist in input_namelist:
550
733
  namcontents = namelist.rh.contents
551
- self._set_nam_macro(namcontents, namelist.rh.container.actualpath(),
552
- 'LLCLIM', self.clim)
734
+ self._set_nam_macro(
735
+ namcontents,
736
+ namelist.rh.container.actualpath(),
737
+ "LLCLIM",
738
+ self.clim,
739
+ )
553
740
  if namcontents.dumps_needs_update:
554
741
  namcontents.rewrite(namelist.rh.container)
555
742
 
556
743
  for current_validity in input_validity[0]:
557
744
  # Deal with constant input files (gridpoint and spectral)
558
- for (file_role, file_template) in self.LIST_CST_INPUT_FILES:
745
+ for file_role, file_template in self.LIST_CST_INPUT_FILES:
559
746
  if file_role in sorted_cst_input_files:
560
747
  for file_prefix in sorted_cst_input_files[file_role]:
561
- file_name = file_template.format(prefix=file_prefix, suffix="")
562
- current_file_input = sorted_cst_input_files[file_role][file_prefix][0]
563
- self.algoassert(not sh.path.exists(file_name),
564
- "The file {} already exists. It should not.".format(file_name))
565
- sh.cp(current_file_input.rh.container.iotarget(), file_name, intent="in")
748
+ file_name = file_template.format(
749
+ prefix=file_prefix, suffix=""
750
+ )
751
+ current_file_input = sorted_cst_input_files[file_role][
752
+ file_prefix
753
+ ][0]
754
+ self.algoassert(
755
+ not sh.path.exists(file_name),
756
+ "The file {} already exists. It should not.".format(
757
+ file_name
758
+ ),
759
+ )
760
+ sh.cp(
761
+ current_file_input.rh.container.iotarget(),
762
+ file_name,
763
+ intent="in",
764
+ )
566
765
 
567
766
  # Deal with other input files (gridpoint and spectral)
568
- for (file_role, file_template) in self.LIST_INPUT_FILES:
767
+ for file_role, file_template in self.LIST_INPUT_FILES:
569
768
  if file_role in sorted_input_files:
570
769
  for file_prefix in sorted_input_files[file_role]:
571
- file_name = file_template.format(prefix=file_prefix, suffix="")
572
- current_file_input = sorted_input_files[file_role][file_prefix].pop()
573
- self.algoassert(not sh.path.exists(file_name),
574
- "The file {} already exists. It should not.".format(file_name))
575
- sh.cp(current_file_input.rh.container.iotarget(), file_name, intent="in")
770
+ file_name = file_template.format(
771
+ prefix=file_prefix, suffix=""
772
+ )
773
+ current_file_input = sorted_input_files[file_role][
774
+ file_prefix
775
+ ].pop()
776
+ self.algoassert(
777
+ not sh.path.exists(file_name),
778
+ "The file {} already exists. It should not.".format(
779
+ file_name
780
+ ),
781
+ )
782
+ sh.cp(
783
+ current_file_input.rh.container.iotarget(),
784
+ file_name,
785
+ intent="in",
786
+ )
576
787
 
577
788
  if self.clim:
578
789
  # Find the right climatology file
579
790
  current_month = date.Month(current_validity)
580
- self.climfile_fixer(rh, convkind='modelclim', month=current_month,
581
- inputrole=('GlobalClim', 'InitialClim'),
582
- inputkind='clim_model')
791
+ self.climfile_fixer(
792
+ rh,
793
+ convkind="modelclim",
794
+ month=current_month,
795
+ inputrole=("GlobalClim", "InitialClim"),
796
+ inputkind="clim_model",
797
+ )
583
798
 
584
799
  # Standard execution
585
800
  super().execute(rh, opts)
586
801
  # Move the output file
587
802
  current_term = current_file_input.rh.resource.term
588
- sh.move(output_name, output_name + "+{}".format(current_term.fmthm))
803
+ sh.move(
804
+ output_name, output_name + "+{}".format(current_term.fmthm)
805
+ )
589
806
  # Cat all the listings into a single one
590
- sh.cat(self.OUTPUT_LISTING_NAME, output='NODE.all')
807
+ sh.cat(self.OUTPUT_LISTING_NAME, output="NODE.all")
591
808
  # Remove unneeded files
592
- sh.rmall(deleted_spectral_file_SH, deleted_gridpoint_file_GG, deleted_gridpoint_file_UA,
593
- 'std*', self.OUTPUT_LISTING_NAME)
809
+ sh.rmall(
810
+ deleted_spectral_file_SH,
811
+ deleted_gridpoint_file_GG,
812
+ deleted_gridpoint_file_UA,
813
+ "std*",
814
+ self.OUTPUT_LISTING_NAME,
815
+ )
594
816
 
595
817
 
596
818
  class DomeoForcingAtmo(BlindRun, CouplingBaseDateNamMixin):
597
819
  """Correct the Domeo forcing file."""
598
820
 
599
821
  _footprint = dict(
600
- info='Domeo Forcing Atmo',
822
+ info="Domeo Forcing Atmo",
601
823
  attr=dict(
602
824
  kind=dict(
603
- values=['domeo_forcing'],
825
+ values=["domeo_forcing"],
604
826
  ),
605
827
  basedate=dict(
606
828
  optional=False,
607
829
  ),
608
- )
830
+ ),
609
831
  )