vortex-nwp 2.0.0b1__py3-none-any.whl → 2.0.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. vortex/__init__.py +59 -45
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +940 -614
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/serversynctools.py +34 -33
  6. vortex/config.py +19 -22
  7. vortex/data/__init__.py +9 -3
  8. vortex/data/abstractstores.py +593 -655
  9. vortex/data/containers.py +217 -162
  10. vortex/data/contents.py +65 -39
  11. vortex/data/executables.py +93 -102
  12. vortex/data/flow.py +40 -34
  13. vortex/data/geometries.py +228 -132
  14. vortex/data/handlers.py +428 -225
  15. vortex/data/outflow.py +15 -15
  16. vortex/data/providers.py +185 -163
  17. vortex/data/resources.py +48 -42
  18. vortex/data/stores.py +544 -413
  19. vortex/gloves.py +114 -87
  20. vortex/layout/__init__.py +1 -8
  21. vortex/layout/contexts.py +150 -84
  22. vortex/layout/dataflow.py +353 -202
  23. vortex/layout/monitor.py +264 -128
  24. vortex/nwp/__init__.py +5 -2
  25. vortex/nwp/algo/__init__.py +14 -5
  26. vortex/nwp/algo/assim.py +205 -151
  27. vortex/nwp/algo/clim.py +683 -517
  28. vortex/nwp/algo/coupling.py +447 -225
  29. vortex/nwp/algo/eda.py +437 -229
  30. vortex/nwp/algo/eps.py +403 -231
  31. vortex/nwp/algo/forecasts.py +420 -271
  32. vortex/nwp/algo/fpserver.py +683 -307
  33. vortex/nwp/algo/ifsnaming.py +205 -145
  34. vortex/nwp/algo/ifsroot.py +210 -122
  35. vortex/nwp/algo/monitoring.py +132 -76
  36. vortex/nwp/algo/mpitools.py +321 -191
  37. vortex/nwp/algo/odbtools.py +617 -353
  38. vortex/nwp/algo/oopsroot.py +449 -273
  39. vortex/nwp/algo/oopstests.py +90 -56
  40. vortex/nwp/algo/request.py +287 -206
  41. vortex/nwp/algo/stdpost.py +878 -522
  42. vortex/nwp/data/__init__.py +22 -4
  43. vortex/nwp/data/assim.py +125 -137
  44. vortex/nwp/data/boundaries.py +121 -68
  45. vortex/nwp/data/climfiles.py +193 -211
  46. vortex/nwp/data/configfiles.py +73 -69
  47. vortex/nwp/data/consts.py +426 -401
  48. vortex/nwp/data/ctpini.py +59 -43
  49. vortex/nwp/data/diagnostics.py +94 -66
  50. vortex/nwp/data/eda.py +50 -51
  51. vortex/nwp/data/eps.py +195 -146
  52. vortex/nwp/data/executables.py +440 -434
  53. vortex/nwp/data/fields.py +63 -48
  54. vortex/nwp/data/gridfiles.py +183 -111
  55. vortex/nwp/data/logs.py +250 -217
  56. vortex/nwp/data/modelstates.py +180 -151
  57. vortex/nwp/data/monitoring.py +72 -99
  58. vortex/nwp/data/namelists.py +254 -202
  59. vortex/nwp/data/obs.py +400 -308
  60. vortex/nwp/data/oopsexec.py +22 -20
  61. vortex/nwp/data/providers.py +90 -65
  62. vortex/nwp/data/query.py +71 -82
  63. vortex/nwp/data/stores.py +49 -36
  64. vortex/nwp/data/surfex.py +136 -137
  65. vortex/nwp/syntax/__init__.py +1 -1
  66. vortex/nwp/syntax/stdattrs.py +173 -111
  67. vortex/nwp/tools/__init__.py +2 -2
  68. vortex/nwp/tools/addons.py +22 -17
  69. vortex/nwp/tools/agt.py +24 -12
  70. vortex/nwp/tools/bdap.py +16 -5
  71. vortex/nwp/tools/bdcp.py +4 -1
  72. vortex/nwp/tools/bdm.py +3 -0
  73. vortex/nwp/tools/bdmp.py +14 -9
  74. vortex/nwp/tools/conftools.py +728 -378
  75. vortex/nwp/tools/drhook.py +12 -8
  76. vortex/nwp/tools/grib.py +65 -39
  77. vortex/nwp/tools/gribdiff.py +22 -17
  78. vortex/nwp/tools/ifstools.py +82 -42
  79. vortex/nwp/tools/igastuff.py +167 -143
  80. vortex/nwp/tools/mars.py +14 -2
  81. vortex/nwp/tools/odb.py +234 -125
  82. vortex/nwp/tools/partitioning.py +61 -37
  83. vortex/nwp/tools/satrad.py +27 -12
  84. vortex/nwp/util/async.py +83 -55
  85. vortex/nwp/util/beacon.py +10 -10
  86. vortex/nwp/util/diffpygram.py +174 -86
  87. vortex/nwp/util/ens.py +144 -63
  88. vortex/nwp/util/hooks.py +30 -19
  89. vortex/nwp/util/taskdeco.py +28 -24
  90. vortex/nwp/util/usepygram.py +278 -172
  91. vortex/nwp/util/usetnt.py +31 -17
  92. vortex/sessions.py +72 -39
  93. vortex/syntax/__init__.py +1 -1
  94. vortex/syntax/stdattrs.py +410 -171
  95. vortex/syntax/stddeco.py +31 -22
  96. vortex/toolbox.py +327 -192
  97. vortex/tools/__init__.py +11 -2
  98. vortex/tools/actions.py +125 -59
  99. vortex/tools/addons.py +111 -92
  100. vortex/tools/arm.py +42 -22
  101. vortex/tools/compression.py +72 -69
  102. vortex/tools/date.py +11 -4
  103. vortex/tools/delayedactions.py +242 -132
  104. vortex/tools/env.py +75 -47
  105. vortex/tools/folder.py +342 -171
  106. vortex/tools/grib.py +311 -149
  107. vortex/tools/lfi.py +423 -216
  108. vortex/tools/listings.py +109 -40
  109. vortex/tools/names.py +218 -156
  110. vortex/tools/net.py +632 -298
  111. vortex/tools/parallelism.py +93 -61
  112. vortex/tools/prestaging.py +55 -31
  113. vortex/tools/schedulers.py +172 -105
  114. vortex/tools/services.py +402 -333
  115. vortex/tools/storage.py +293 -358
  116. vortex/tools/surfex.py +24 -24
  117. vortex/tools/systems.py +1211 -631
  118. vortex/tools/targets.py +156 -100
  119. vortex/util/__init__.py +1 -1
  120. vortex/util/config.py +377 -327
  121. vortex/util/empty.py +2 -2
  122. vortex/util/helpers.py +56 -24
  123. vortex/util/introspection.py +18 -12
  124. vortex/util/iosponge.py +8 -4
  125. vortex/util/roles.py +4 -6
  126. vortex/util/storefunctions.py +39 -13
  127. vortex/util/structs.py +3 -3
  128. vortex/util/worker.py +29 -17
  129. vortex_nwp-2.0.0b2.dist-info/METADATA +66 -0
  130. vortex_nwp-2.0.0b2.dist-info/RECORD +142 -0
  131. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/WHEEL +1 -1
  132. vortex/layout/appconf.py +0 -109
  133. vortex/layout/jobs.py +0 -1276
  134. vortex/layout/nodes.py +0 -1424
  135. vortex/layout/subjobs.py +0 -464
  136. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  137. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  138. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/LICENSE +0 -0
  139. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/top_level.txt +0 -0
@@ -35,52 +35,49 @@ class Forecast(IFSParallel):
35
35
  _footprint = [
36
36
  outputid_deco,
37
37
  dict(
38
- info = "Run a forecast with Arpege/IFS.",
39
- attr = dict(
40
- kind = dict(
41
- values = ['forecast', 'fc'],
42
- remap = dict(forecast = 'fc')
38
+ info="Run a forecast with Arpege/IFS.",
39
+ attr=dict(
40
+ kind=dict(
41
+ values=["forecast", "fc"], remap=dict(forecast="fc")
43
42
  ),
44
- hist_terms = dict(
45
- info = "The list of terms when historical file production is requested.",
46
- type = footprints.FPList,
47
- optional = True,
43
+ hist_terms=dict(
44
+ info="The list of terms when historical file production is requested.",
45
+ type=footprints.FPList,
46
+ optional=True,
48
47
  ),
49
- surfhist_terms = dict(
50
- info ="The list of terms when surface file production is requested.",
51
- type = footprints.FPList,
52
- optional = True,
48
+ surfhist_terms=dict(
49
+ info="The list of terms when surface file production is requested.",
50
+ type=footprints.FPList,
51
+ optional=True,
53
52
  ),
54
53
  pos_terms=dict(
55
- info = "The list of terms when post-processed data is requested.",
56
- type = footprints.FPList,
57
- optional = True,
54
+ info="The list of terms when post-processed data is requested.",
55
+ type=footprints.FPList,
56
+ optional=True,
58
57
  ),
59
58
  s_norm_terms=dict(
60
- info = "The list of terms when spectal norms should be computed.",
61
- type = footprints.FPList,
62
- optional = True,
59
+ info="The list of terms when spectal norms should be computed.",
60
+ type=footprints.FPList,
61
+ optional=True,
63
62
  ),
64
- flyargs = dict(
65
- default = ('ICMSH', 'PF'),
63
+ flyargs=dict(
64
+ default=("ICMSH", "PF"),
66
65
  ),
67
- xpname = dict(
68
- default = 'FCST'
66
+ xpname=dict(default="FCST"),
67
+ ddhpack=dict(
68
+ info="After run, gather the DDH output file in directories.",
69
+ type=bool,
70
+ optional=True,
71
+ default=False,
72
+ doc_zorder=-5,
69
73
  ),
70
- ddhpack = dict(
71
- info = "After run, gather the DDH output file in directories.",
72
- type = bool,
73
- optional = True,
74
- default = False,
75
- doc_zorder = -5,
76
- ),
77
- )
78
- )
74
+ ),
75
+ ),
79
76
  ]
80
77
 
81
78
  @property
82
79
  def realkind(self):
83
- return 'forecast'
80
+ return "forecast"
84
81
 
85
82
  def _outputs_configurator(self, bin_rh):
86
83
  return footprints.proxy.ifsoutputs_configurator(
@@ -93,10 +90,9 @@ class Forecast(IFSParallel):
93
90
  """Default pre-link for the initial condition file"""
94
91
  super().prepare(rh, opts)
95
92
 
96
- ininc = self.naming_convention('ic', rh)
93
+ ininc = self.naming_convention("ic", rh)
97
94
  analysis = self.setlink(
98
- initrole=('InitialCondition', 'Analysis'),
99
- initname=ininc()
95
+ initrole=("InitialCondition", "Analysis"), initname=ininc()
100
96
  )
101
97
 
102
98
  if analysis:
@@ -104,10 +100,14 @@ class Forecast(IFSParallel):
104
100
  thismonth = analysis.rh.resource.date.month
105
101
 
106
102
  # Possibly fix the model clim
107
- if self.do_climfile_fixer(rh, convkind='modelclim'):
108
- self.climfile_fixer(rh, convkind='modelclim', month=thismonth,
109
- inputrole=('GlobalClim', 'InitialClim'),
110
- inputkind='clim_model')
103
+ if self.do_climfile_fixer(rh, convkind="modelclim"):
104
+ self.climfile_fixer(
105
+ rh,
106
+ convkind="modelclim",
107
+ month=thismonth,
108
+ inputrole=("GlobalClim", "InitialClim"),
109
+ inputkind="clim_model",
110
+ )
111
111
 
112
112
  # Possibly fix post-processing clim files
113
113
  self.all_localclim_fixer(rh, thismonth)
@@ -127,10 +127,10 @@ class Forecast(IFSParallel):
127
127
  #
128
128
  # TODO: Clarify where both regexp keys are coming from
129
129
  guesses = self.context.sequence.effective_inputs(
130
- role=re.compile(r'IAU_(Background|Guess)', flags=re.IGNORECASE)
130
+ role=re.compile(r"IAU_(Background|Guess)", flags=re.IGNORECASE)
131
131
  )
132
132
  analyses = self.context.sequence.effective_inputs(
133
- role=re.compile(r'IAU_(Analysis|Ic)', flags=re.IGNORECASE)
133
+ role=re.compile(r"IAU_(Analysis|Ic)", flags=re.IGNORECASE)
134
134
  )
135
135
 
136
136
  def key(s: Section):
@@ -140,32 +140,41 @@ class Forecast(IFSParallel):
140
140
  s.rh.resource.date,
141
141
  s.rh.resource.date + s.rh.resource.term,
142
142
  )
143
+
143
144
  self._create_ordered_links(
144
- bin_handler=rh, sections=analyses,
145
- sort_key=key, nameconv_kind="iau_analysis",
145
+ bin_handler=rh,
146
+ sections=analyses,
147
+ sort_key=key,
148
+ nameconv_kind="iau_analysis",
146
149
  )
147
150
  self._create_ordered_links(
148
- bin_handler=rh, sections=guesses,
149
- sort_key=key, nameconv_kind="iau_background",
151
+ bin_handler=rh,
152
+ sections=guesses,
153
+ sort_key=key,
154
+ nameconv_kind="iau_background",
150
155
  )
151
156
 
152
157
  # Promises should be nicely managed by a co-proccess
153
158
  if self.promises:
154
159
  prefixes_set = set()
155
160
  for pr_res in [pr.rh.resource for pr in self.promises]:
156
- if pr_res.realkind == 'historic':
157
- prefixes_set.add('ICMSH')
158
- if pr_res.realkind == 'gridpoint':
159
- prefixes_set.add('{:s}PF'.format('GRIB' if pr_res.nativefmt == 'grib' else ''))
161
+ if pr_res.realkind == "historic":
162
+ prefixes_set.add("ICMSH")
163
+ if pr_res.realkind == "gridpoint":
164
+ prefixes_set.add(
165
+ "{:s}PF".format(
166
+ "GRIB" if pr_res.nativefmt == "grib" else ""
167
+ )
168
+ )
160
169
  self.io_poll_args = tuple(prefixes_set)
161
170
  self.flyput = len(self.io_poll_args) > 0
162
171
 
163
172
  def _create_ordered_links(
164
- self,
165
- bin_handler: Handler,
166
- sections: Iterable[Section],
167
- sort_key: Callable[[Section], Any],
168
- nameconv_kind: str,
173
+ self,
174
+ bin_handler: Handler,
175
+ sections: Iterable[Section],
176
+ sort_key: Callable[[Section], Any],
177
+ nameconv_kind: str,
169
178
  ):
170
179
  """Create links to local files, with ordered names
171
180
 
@@ -181,7 +190,8 @@ class Forecast(IFSParallel):
181
190
  """
182
191
  for i, sec in enumerate(sorted(sections, key=sort_key)):
183
192
  nameconv = self.naming_convention(
184
- nameconv_kind, bin_handler,
193
+ nameconv_kind,
194
+ bin_handler,
185
195
  actualfmt=sec.rh.container.actualfmt,
186
196
  )
187
197
  target = nameconv(number=(i + 1))
@@ -189,7 +199,9 @@ class Forecast(IFSParallel):
189
199
  if self.system.path.exists(target):
190
200
  logger.warning(
191
201
  "%s should be linked to %s but %s already exists.",
192
- link_name, target, target
202
+ link_name,
203
+ target,
204
+ target,
193
205
  )
194
206
  continue
195
207
  logger.info("Linking %s to %s.", link_name, target)
@@ -198,15 +210,16 @@ class Forecast(IFSParallel):
198
210
 
199
211
  def find_namelists(self, opts=None):
200
212
  """Find any namelists candidates in actual context inputs."""
201
- return [x.rh
202
- for x in self.context.sequence.effective_inputs(role='Namelist',
203
- kind='namelist')]
213
+ return [
214
+ x.rh
215
+ for x in self.context.sequence.effective_inputs(
216
+ role="Namelist", kind="namelist"
217
+ )
218
+ ]
204
219
 
205
220
  def prepare_namelist_delta(self, rh, namcontents, namlocal):
206
- nam_updated = super().prepare_namelist_delta(
207
- rh, namcontents, namlocal
208
- )
209
- if namlocal == 'fort.4':
221
+ nam_updated = super().prepare_namelist_delta(rh, namcontents, namlocal)
222
+ if namlocal == "fort.4":
210
223
  o_conf = self._outputs_configurator(rh)
211
224
  o_conf.modelstate = self.hist_terms
212
225
  o_conf.surf_modelstate = self.surfhist_terms
@@ -222,34 +235,40 @@ class Forecast(IFSParallel):
222
235
  sh = self.system
223
236
 
224
237
  # Look up for the gridpoint files
225
- gp_out = sh.ls('PF{}*'.format(self.xpname))
238
+ gp_out = sh.ls("PF{}*".format(self.xpname))
226
239
  gp_map = defaultdict(list)
227
240
  if gp_out:
228
- re_pf = re.compile(r'^PF{}(\w+)\+(\d+(?::\d+)?)$'.format(self.xpname))
241
+ re_pf = re.compile(
242
+ r"^PF{}(\w+)\+(\d+(?::\d+)?)$".format(self.xpname)
243
+ )
229
244
  for fname in gp_out:
230
245
  match_pf = re_pf.match(fname)
231
246
  if match_pf:
232
- gp_map[match_pf.group(1).lower()].append(Time(match_pf.group(2)))
247
+ gp_map[match_pf.group(1).lower()].append(
248
+ Time(match_pf.group(2))
249
+ )
233
250
  for k, v in gp_map.items():
234
251
  v.sort()
235
- logger.info('Gridpoint files found: domain=%s, terms=%s',
236
- k,
237
- ','.join([str(t) for t in v]))
252
+ logger.info(
253
+ "Gridpoint files found: domain=%s, terms=%s",
254
+ k,
255
+ ",".join([str(t) for t in v]),
256
+ )
238
257
  if len(gp_map) == 0:
239
- logger.info('No gridpoint file was found.')
240
- sh.json_dump(gp_map, 'gridpoint_map.out', indent=4, cls=ShellEncoder)
258
+ logger.info("No gridpoint file was found.")
259
+ sh.json_dump(gp_map, "gridpoint_map.out", indent=4, cls=ShellEncoder)
241
260
 
242
261
  # Gather DDH in folders
243
262
  if self.ddhpack:
244
- ddhmap = dict(DL='dlimited', GL='global', ZO='zonal')
245
- for (prefix, ddhkind) in ddhmap.items():
246
- flist = sh.glob('DHF{}{}+*'.format(prefix, self.xpname))
263
+ ddhmap = dict(DL="dlimited", GL="global", ZO="zonal")
264
+ for prefix, ddhkind in ddhmap.items():
265
+ flist = sh.glob("DHF{}{}+*".format(prefix, self.xpname))
247
266
  if flist:
248
- dest = 'ddhpack_{}'.format(ddhkind)
249
- logger.info('Creating a DDH pack: %s', dest)
267
+ dest = "ddhpack_{}".format(ddhkind)
268
+ logger.info("Creating a DDH pack: %s", dest)
250
269
  sh.mkdir(dest)
251
270
  for lfa in flist:
252
- sh.mv(lfa, dest, fmt='lfa')
271
+ sh.mv(lfa, dest, fmt="lfa")
253
272
 
254
273
  super().postfix(rh, opts)
255
274
 
@@ -258,36 +277,36 @@ class LAMForecast(Forecast):
258
277
  """Forecast for IFS-like Limited Area Models."""
259
278
 
260
279
  _footprint = dict(
261
- info = "Run a forecast with an Arpege/IFS like Limited Area Model.",
262
- attr = dict(
263
- kind = dict(
264
- values = ['lamfc', 'lamforecast'],
265
- remap = dict(lamforecast = 'lamfc'),
280
+ info="Run a forecast with an Arpege/IFS like Limited Area Model.",
281
+ attr=dict(
282
+ kind=dict(
283
+ values=["lamfc", "lamforecast"],
284
+ remap=dict(lamforecast="lamfc"),
266
285
  ),
267
- synctool = dict(
268
- info = 'The name of the script called when waiting for coupling files',
269
- optional = True,
270
- default = 'atcp.alad',
271
- doc_visibility = footprints.doc.visibility.ADVANCED,
286
+ synctool=dict(
287
+ info="The name of the script called when waiting for coupling files",
288
+ optional=True,
289
+ default="atcp.alad",
290
+ doc_visibility=footprints.doc.visibility.ADVANCED,
272
291
  ),
273
- synctpl = dict(
274
- info = 'The template used to generate the *synctool* script',
275
- optional = True,
276
- default = '@sync-fetch.tpl',
277
- doc_visibility = footprints.doc.visibility.ADVANCED,
292
+ synctpl=dict(
293
+ info="The template used to generate the *synctool* script",
294
+ optional=True,
295
+ default="@sync-fetch.tpl",
296
+ doc_visibility=footprints.doc.visibility.ADVANCED,
278
297
  ),
279
- )
298
+ ),
280
299
  )
281
300
 
282
301
  def spawn_command_options(self):
283
302
  """Dictionary provided for command line factory."""
284
303
  return dict(
285
- name=(self.xpname + 'xxxx')[:4].upper(),
304
+ name=(self.xpname + "xxxx")[:4].upper(),
286
305
  timescheme=self.timescheme,
287
306
  timestep=self.timestep,
288
307
  fcterm=self.fcterm,
289
308
  fcunit=self.fcunit,
290
- model='aladin',
309
+ model="aladin",
291
310
  )
292
311
 
293
312
  def prepare(self, rh, opts):
@@ -297,28 +316,32 @@ class LAMForecast(Forecast):
297
316
  sh = self.system
298
317
 
299
318
  # Check boundaries conditions
300
- cplrh = [x.rh for x in self.context.sequence.effective_inputs(
301
- role='BoundaryConditions',
302
- kind='boundary'
303
- )]
319
+ cplrh = [
320
+ x.rh
321
+ for x in self.context.sequence.effective_inputs(
322
+ role="BoundaryConditions", kind="boundary"
323
+ )
324
+ ]
304
325
  cplrh.sort(key=lambda rh: rh.resource.date + rh.resource.term)
305
326
 
306
327
  # Ordered pre-linking of boundaring and building ot the synchronization tools
307
328
  firstsync = None
308
- sh.header('Check boundaries...')
329
+ sh.header("Check boundaries...")
309
330
  if any([x.is_expected() for x in cplrh]):
310
- logger.info('Some boundaries conditions are still expected')
331
+ logger.info("Some boundaries conditions are still expected")
311
332
  self.mksync = True
312
333
  else:
313
- logger.info('All boundaries conditions available')
334
+ logger.info("All boundaries conditions available")
314
335
  self.mksync = False
315
336
 
316
337
  for i, bound in enumerate(cplrh):
317
338
  thisbound = bound.container.localpath()
318
- lbcnc = self.naming_convention('lbc', rh, actualfmt=bound.container.actualfmt)
339
+ lbcnc = self.naming_convention(
340
+ "lbc", rh, actualfmt=bound.container.actualfmt
341
+ )
319
342
  sh.softlink(thisbound, lbcnc(number=i))
320
343
  if self.mksync:
321
- thistool = self.synctool + '.{:03d}'.format(i)
344
+ thistool = self.synctool + ".{:03d}".format(i)
322
345
  bound.mkgetpr(pr_getter=thistool, tplfetch=self.synctpl)
323
346
  if firstsync is None:
324
347
  firstsync = thistool
@@ -332,7 +355,7 @@ class LAMForecast(Forecast):
332
355
  sh = self.system
333
356
 
334
357
  if self.mksync:
335
- synclog = self.synctool + '.log'
358
+ synclog = self.synctool + ".log"
336
359
  if sh.path.exists(synclog):
337
360
  sh.subtitle(synclog)
338
361
  sh.cat(synclog, output=False)
@@ -344,19 +367,19 @@ class DFIForecast(LAMForecast):
344
367
  """OBSOLETE CODE: do not use."""
345
368
 
346
369
  _footprint = dict(
347
- info = "Run a forecast with an Arpege/IFS like Limited Area Model (with DFIs).",
348
- attr = dict(
349
- kind = dict(
350
- values = ['fcdfi'],
370
+ info="Run a forecast with an Arpege/IFS like Limited Area Model (with DFIs).",
371
+ attr=dict(
372
+ kind=dict(
373
+ values=["fcdfi"],
351
374
  ),
352
- )
375
+ ),
353
376
  )
354
377
 
355
378
  def prepare(self, rh, opts):
356
379
  """Pre-link boundary conditions as special DFI files."""
357
380
  super().prepare(rh, opts)
358
- ininc = self.naming_convention('ic', rh)
359
- lbcnc = self.naming_convention('lbc', rh, actualfmt='fa')
381
+ ininc = self.naming_convention("ic", rh)
382
+ lbcnc = self.naming_convention("lbc", rh, actualfmt="fa")
360
383
  for pseudoterm in (999, 0, 1):
361
384
  self.system.softlink(ininc(), lbcnc(number=pseudoterm))
362
385
 
@@ -369,29 +392,27 @@ class FullPos(IFSParallel):
369
392
 
370
393
  _abstract = True
371
394
  _footprint = dict(
372
- attr = dict(
373
- xpname = dict(
374
- default = 'FPOS'
395
+ attr=dict(
396
+ xpname=dict(default="FPOS"),
397
+ flyput=dict(
398
+ default=False,
399
+ values=[False],
375
400
  ),
376
- flyput = dict(
377
- default = False,
378
- values = [False],
401
+ server_run=dict(
402
+ values=[True, False],
379
403
  ),
380
- server_run = dict(
381
- values = [True, False],
404
+ serversync_method=dict(
405
+ default="simple_socket",
382
406
  ),
383
- serversync_method = dict(
384
- default = 'simple_socket',
385
- ),
386
- serversync_medium = dict(
387
- default = 'cnt3_wait',
407
+ serversync_medium=dict(
408
+ default="cnt3_wait",
388
409
  ),
389
410
  )
390
411
  )
391
412
 
392
413
  @property
393
414
  def realkind(self):
394
- return 'fullpos'
415
+ return "fullpos"
395
416
 
396
417
 
397
418
  class FullPosGeo(FullPos):
@@ -401,68 +422,100 @@ class FullPosGeo(FullPos):
401
422
  """
402
423
 
403
424
  _footprint = dict(
404
- info = "Run a fullpos to interpolate to a new geometry",
405
- attr = dict(
406
- kind = dict(
407
- values = ['l2h', 'h2l'],
425
+ info="Run a fullpos to interpolate to a new geometry",
426
+ attr=dict(
427
+ kind=dict(
428
+ values=["l2h", "h2l"],
408
429
  ),
409
- )
430
+ ),
410
431
  )
411
432
 
412
- _RUNSTORE = 'RUNOUT'
433
+ _RUNSTORE = "RUNOUT"
413
434
 
414
435
  def _compute_target_name(self, r):
415
- return ('PF' + re.sub('^(?:ICMSH)(.*?)(?:INIT)(.*)$', r'\1\2',
416
- r.container.localpath()).format(self.xpname))
436
+ return "PF" + re.sub(
437
+ "^(?:ICMSH)(.*?)(?:INIT)(.*)$", r"\1\2", r.container.localpath()
438
+ ).format(self.xpname)
417
439
 
418
440
  def execute(self, rh, opts):
419
441
  """Loop on the various initial conditions provided."""
420
442
 
421
443
  sh = self.system
422
444
 
423
- initrh = [x.rh for x in self.context.sequence.effective_inputs(
424
- role=('Analysis', 'Guess', 'InitialCondition'),
425
- kind=('analysis', 'historic', 'ic', re.compile('(stp|ana)min'),
426
- re.compile('pert'), ),
427
- )]
445
+ initrh = [
446
+ x.rh
447
+ for x in self.context.sequence.effective_inputs(
448
+ role=("Analysis", "Guess", "InitialCondition"),
449
+ kind=(
450
+ "analysis",
451
+ "historic",
452
+ "ic",
453
+ re.compile("(stp|ana)min"),
454
+ re.compile("pert"),
455
+ ),
456
+ )
457
+ ]
428
458
 
429
459
  # is there one (deterministic forecast) or many (ensemble forecast) fullpos to perform ?
430
460
  isMany = len(initrh) > 1
431
- do_fix_input_clim = self.do_climfile_fixer(rh, convkind='modelclim')
432
- do_fix_output_clim = self.do_climfile_fixer(rh, convkind='targetclim', area='000')
433
- ininc = self.naming_convention('ic', rh)
461
+ do_fix_input_clim = self.do_climfile_fixer(rh, convkind="modelclim")
462
+ do_fix_output_clim = self.do_climfile_fixer(
463
+ rh, convkind="targetclim", area="000"
464
+ )
465
+ ininc = self.naming_convention("ic", rh)
434
466
  infile = ininc()
435
467
 
436
468
  for num, r in enumerate(initrh):
437
- str_subtitle = 'Fullpos execution on {}'.format(r.container.localpath())
469
+ str_subtitle = "Fullpos execution on {}".format(
470
+ r.container.localpath()
471
+ )
438
472
  sh.subtitle(str_subtitle)
439
473
 
440
474
  # Set the actual init file
441
475
  if sh.path.exists(infile):
442
476
  if isMany:
443
- logger.critical('Cannot process multiple Historic files if %s exists.', infile)
477
+ logger.critical(
478
+ "Cannot process multiple Historic files if %s exists.",
479
+ infile,
480
+ )
444
481
  else:
445
- sh.cp(r.container.localpath(), infile, fmt=r.container.actualfmt, intent=intent.IN)
482
+ sh.cp(
483
+ r.container.localpath(),
484
+ infile,
485
+ fmt=r.container.actualfmt,
486
+ intent=intent.IN,
487
+ )
446
488
 
447
489
  # Fix links for climatology files
448
490
  actualmonth = Month(r.resource.date + r.resource.term)
449
491
  startingclim = r.resource.geometry
450
492
 
451
493
  if do_fix_input_clim:
452
- self.climfile_fixer(rh, convkind='modelclim', month=actualmonth, geo=startingclim,
453
- inputrole=(re.compile('^Clim'), re.compile('Clim$')),
454
- inputkind='clim_model')
494
+ self.climfile_fixer(
495
+ rh,
496
+ convkind="modelclim",
497
+ month=actualmonth,
498
+ geo=startingclim,
499
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
500
+ inputkind="clim_model",
501
+ )
455
502
 
456
503
  if do_fix_output_clim:
457
- self.climfile_fixer(rh, convkind='targetclim', month=actualmonth, notgeo=startingclim,
458
- inputrole=(re.compile('^Clim'), re.compile('Clim$')),
459
- inputkind='clim_model', area='000')
504
+ self.climfile_fixer(
505
+ rh,
506
+ convkind="targetclim",
507
+ month=actualmonth,
508
+ notgeo=startingclim,
509
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
510
+ inputkind="clim_model",
511
+ area="000",
512
+ )
460
513
 
461
514
  # Standard execution
462
515
  super().execute(rh, opts)
463
516
 
464
517
  # Find the output filename
465
- output_file = [x for x in sh.glob('PF{:s}*+*'.format(self.xpname))]
518
+ output_file = [x for x in sh.glob("PF{:s}*+*".format(self.xpname))]
466
519
  if len(output_file) != 1:
467
520
  raise AlgoComponentError("No or multiple output files found.")
468
521
  output_file = output_file[0]
@@ -472,35 +525,53 @@ class FullPosGeo(FullPos):
472
525
  # Set a local storage place
473
526
  sh.mkdir(self._RUNSTORE)
474
527
  # Freeze the current output
475
- sh.move(output_file, sh.path.join(self._RUNSTORE, 'pfout_{:d}'.format(num)),
476
- fmt=r.container.actualfmt)
528
+ sh.move(
529
+ output_file,
530
+ sh.path.join(self._RUNSTORE, "pfout_{:d}".format(num)),
531
+ fmt=r.container.actualfmt,
532
+ )
477
533
  sh.remove(infile, fmt=r.container.actualfmt)
478
534
  # Cleaning/Log management
479
535
  if not self.server_run:
480
536
  # The only one listing
481
- sh.cat('NODE.001_01', output='NODE.all')
537
+ sh.cat("NODE.001_01", output="NODE.all")
482
538
  # Some cleaning
483
- sh.rmall('ncf927', 'dirlst')
539
+ sh.rmall("ncf927", "dirlst")
484
540
  else:
485
541
  # Link the output files to new style names
486
- sh.cp(output_file, self._compute_target_name(r),
487
- fmt=r.container.actualfmt, intent='in')
542
+ sh.cp(
543
+ output_file,
544
+ self._compute_target_name(r),
545
+ fmt=r.container.actualfmt,
546
+ intent="in",
547
+ )
488
548
  # Link the listing to NODE.all
489
- sh.cp('NODE.001_01', 'NODE.all', intent='in')
549
+ sh.cp("NODE.001_01", "NODE.all", intent="in")
490
550
 
491
551
  def postfix(self, rh, opts):
492
552
  """Post processing cleaning."""
493
553
  sh = self.system
494
554
 
495
- initrh = [x.rh for x in self.context.sequence.effective_inputs(
496
- role=('Analysis', 'Guess', 'InitialCondition'),
497
- kind=('analysis', 'historic', 'ic', re.compile('(stp|ana)min'),
498
- re.compile('pert'), ),
499
- )]
555
+ initrh = [
556
+ x.rh
557
+ for x in self.context.sequence.effective_inputs(
558
+ role=("Analysis", "Guess", "InitialCondition"),
559
+ kind=(
560
+ "analysis",
561
+ "historic",
562
+ "ic",
563
+ re.compile("(stp|ana)min"),
564
+ re.compile("pert"),
565
+ ),
566
+ )
567
+ ]
500
568
  if len(initrh) > 1:
501
569
  for num, r in enumerate(initrh):
502
- sh.move('{:s}/pfout_{:d}'.format(self._RUNSTORE, num),
503
- self._compute_target_name(r), fmt=r.container.actualfmt)
570
+ sh.move(
571
+ "{:s}/pfout_{:d}".format(self._RUNSTORE, num),
572
+ self._compute_target_name(r),
573
+ fmt=r.container.actualfmt,
574
+ )
504
575
 
505
576
  super().postfix(rh, opts)
506
577
 
@@ -512,29 +583,32 @@ class FullPosBDAP(FullPos):
512
583
  """
513
584
 
514
585
  _footprint = dict(
515
- info = "Run a fullpos to post-process raw model outputs",
516
- attr = dict(
517
- kind = dict(
518
- values = ['fullpos', 'fp'],
519
- remap = dict(fp= 'fullpos')
520
- ),
521
- fcterm = dict(
522
- values = [0, ],
586
+ info="Run a fullpos to post-process raw model outputs",
587
+ attr=dict(
588
+ kind=dict(values=["fullpos", "fp"], remap=dict(fp="fullpos")),
589
+ fcterm=dict(
590
+ values=[
591
+ 0,
592
+ ],
523
593
  ),
524
- outputid = dict(
525
- info = "The identifier for the encoding of post-processed fields.",
526
- optional = True,
594
+ outputid=dict(
595
+ info="The identifier for the encoding of post-processed fields.",
596
+ optional=True,
527
597
  ),
528
- server_run = dict(
529
- values = [False, ],
598
+ server_run=dict(
599
+ values=[
600
+ False,
601
+ ],
530
602
  ),
531
603
  ),
532
604
  )
533
605
 
534
606
  def prepare(self, rh, opts):
535
607
  """Some additional checks."""
536
- if self.system.path.exists('xxt00000000'):
537
- raise AlgoComponentError('There should be no file named xxt00000000 in the working directory')
608
+ if self.system.path.exists("xxt00000000"):
609
+ raise AlgoComponentError(
610
+ "There should be no file named xxt00000000 in the working directory"
611
+ )
538
612
  super().prepare(rh, opts)
539
613
 
540
614
  def execute(self, rh, opts):
@@ -542,93 +616,137 @@ class FullPosBDAP(FullPos):
542
616
 
543
617
  sh = self.system
544
618
 
545
- namrh = [x.rh for x in self.context.sequence.effective_inputs(
546
- kind='namelistfp'
547
- )]
619
+ namrh = [
620
+ x.rh
621
+ for x in self.context.sequence.effective_inputs(kind="namelistfp")
622
+ ]
548
623
 
549
- namxx = [x.rh for x in self.context.sequence.effective_inputs(
550
- role='FullPosSelection',
551
- kind='namselect',
552
- )]
624
+ namxx = [
625
+ x.rh
626
+ for x in self.context.sequence.effective_inputs(
627
+ role="FullPosSelection",
628
+ kind="namselect",
629
+ )
630
+ ]
553
631
 
554
- initsec = [x for x in self.context.sequence.effective_inputs(
555
- role=('InitialCondition', 'ModelState'),
556
- kind='historic',
557
- )]
632
+ initsec = [
633
+ x
634
+ for x in self.context.sequence.effective_inputs(
635
+ role=("InitialCondition", "ModelState"),
636
+ kind="historic",
637
+ )
638
+ ]
558
639
  initsec.sort(key=lambda sec: sec.rh.resource.term)
559
640
 
560
- do_fix_input_clim = self.do_climfile_fixer(rh, convkind='modelclim')
641
+ do_fix_input_clim = self.do_climfile_fixer(rh, convkind="modelclim")
561
642
 
562
- ininc = self.naming_convention('ic', rh)
643
+ ininc = self.naming_convention("ic", rh)
563
644
  infile = ininc()
564
645
 
565
646
  for sec in initsec:
566
647
  r = sec.rh
567
- sh.subtitle('Loop on {:s}'.format(r.resource.term.fmthm))
648
+ sh.subtitle("Loop on {:s}".format(r.resource.term.fmthm))
568
649
 
569
650
  thisdate = r.resource.date + r.resource.term
570
651
  thismonth = thisdate.month
571
- logger.info('Fullpos <month:%s>' % thismonth)
652
+ logger.info("Fullpos <month:%s>" % thismonth)
572
653
 
573
654
  if do_fix_input_clim:
574
- self.climfile_fixer(rh, convkind='modelclim',
575
- month=thismonth, geo=r.resource.geometry,
576
- inputrole=(re.compile('^Clim'), re.compile('Clim$')),
577
- inputkind='clim_model')
655
+ self.climfile_fixer(
656
+ rh,
657
+ convkind="modelclim",
658
+ month=thismonth,
659
+ geo=r.resource.geometry,
660
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
661
+ inputkind="clim_model",
662
+ )
578
663
 
579
664
  thesenames = self.all_localclim_fixer(rh, thismonth)
580
665
 
581
666
  # Set a local storage place
582
- runstore = 'RUNOUT' + r.resource.term.fmtraw
667
+ runstore = "RUNOUT" + r.resource.term.fmtraw
583
668
  sh.mkdir(runstore)
584
669
 
585
670
  # Define an input namelist
586
671
  try:
587
- namfp = [x for x in namrh if x.resource.term == r.resource.term].pop()
672
+ namfp = [
673
+ x for x in namrh if x.resource.term == r.resource.term
674
+ ].pop()
588
675
  namfplocal = namfp.container.localpath()
589
676
  if self.outputid is not None:
590
- self._set_nam_macro(namfp.contents, namfplocal, 'OUTPUTID', self.outputid)
677
+ self._set_nam_macro(
678
+ namfp.contents, namfplocal, "OUTPUTID", self.outputid
679
+ )
591
680
  namfp.contents.rewrite(namfp.container)
592
- sh.remove('fort.4')
593
- sh.symlink(namfplocal, 'fort.4')
681
+ sh.remove("fort.4")
682
+ sh.symlink(namfplocal, "fort.4")
594
683
  except Exception:
595
- logger.critical('Could not get a fullpos namelist for term %s', r.resource.term)
684
+ logger.critical(
685
+ "Could not get a fullpos namelist for term %s",
686
+ r.resource.term,
687
+ )
596
688
  raise
597
689
 
598
690
  # Define an selection namelist
599
691
  if namxx:
600
- namxt = [x for x in namxx if x.resource.term == r.resource.term]
692
+ namxt = [
693
+ x for x in namxx if x.resource.term == r.resource.term
694
+ ]
601
695
  if namxt:
602
- sh.remove('xxt00000000')
603
- sh.symlink(namxt.pop().container.localpath(), 'xxt00000000')
696
+ sh.remove("xxt00000000")
697
+ sh.symlink(
698
+ namxt.pop().container.localpath(), "xxt00000000"
699
+ )
604
700
  else:
605
- logger.critical('Could not get a selection namelist for term %s', r.resource.term)
701
+ logger.critical(
702
+ "Could not get a selection namelist for term %s",
703
+ r.resource.term,
704
+ )
606
705
  raise AlgoComponentError()
607
706
  else:
608
707
  logger.info("No selection namelist are provided.")
609
708
 
610
709
  # Finally set the actual init file
611
710
  sh.remove(infile)
612
- self.grab(sec, comment='Fullpos source (term={:s})'.format(r.resource.term.fmthm))
711
+ self.grab(
712
+ sec,
713
+ comment="Fullpos source (term={:s})".format(
714
+ r.resource.term.fmthm
715
+ ),
716
+ )
613
717
  sh.softlink(r.container.localpath(), infile)
614
718
 
615
719
  # Standard execution
616
720
  super().execute(rh, opts)
617
721
 
618
722
  # Freeze the current output
619
- for posfile in [x for x in (sh.glob('PF{:s}*+*'.format(self.xpname)) +
620
- sh.glob('GRIBPF{:s}*+*'.format(self.xpname)))]:
621
- rootpos = re.sub('0+$', '', posfile)
622
- fmtpos = 'grib' if posfile.startswith('GRIB') else 'lfi'
623
- targetfile = sh.path.join(runstore, rootpos + r.resource.term.fmthm)
723
+ for posfile in [
724
+ x
725
+ for x in (
726
+ sh.glob("PF{:s}*+*".format(self.xpname))
727
+ + sh.glob("GRIBPF{:s}*+*".format(self.xpname))
728
+ )
729
+ ]:
730
+ rootpos = re.sub("0+$", "", posfile)
731
+ fmtpos = "grib" if posfile.startswith("GRIB") else "lfi"
732
+ targetfile = sh.path.join(
733
+ runstore, rootpos + r.resource.term.fmthm
734
+ )
624
735
  targetbase = sh.path.basename(targetfile)
625
736
 
626
737
  # Deal with potential promises
627
- expected = [x for x in self.promises
628
- if x.rh.container.localpath() == targetbase]
738
+ expected = [
739
+ x
740
+ for x in self.promises
741
+ if x.rh.container.localpath() == targetbase
742
+ ]
629
743
  if expected:
630
- logger.info("Start dealing with promises for: %s.",
631
- ", ".join([x.rh.container.localpath() for x in expected]))
744
+ logger.info(
745
+ "Start dealing with promises for: %s.",
746
+ ", ".join(
747
+ [x.rh.container.localpath() for x in expected]
748
+ ),
749
+ )
632
750
  if posfile != targetbase:
633
751
  sh.move(posfile, targetbase, fmt=fmtpos)
634
752
  posfile = targetbase
@@ -637,12 +755,12 @@ class FullPosBDAP(FullPos):
637
755
 
638
756
  sh.move(posfile, targetfile, fmt=fmtpos)
639
757
 
640
- for logfile in sh.glob('NODE.*', 'std*'):
758
+ for logfile in sh.glob("NODE.*", "std*"):
641
759
  sh.move(logfile, sh.path.join(runstore, logfile))
642
760
 
643
761
  # Some cleaning
644
- sh.rmall('PX{:s}*'.format(self.xpname), fmt='lfi')
645
- sh.rmall('ncf927', 'dirlst')
762
+ sh.rmall("PX{:s}*".format(self.xpname), fmt="lfi")
763
+ sh.rmall("ncf927", "dirlst")
646
764
  for clim in thesenames:
647
765
  sh.rm(clim)
648
766
 
@@ -650,12 +768,20 @@ class FullPosBDAP(FullPos):
650
768
  """Post processing cleaning."""
651
769
  sh = self.system
652
770
 
653
- for fpfile in [x for x in (sh.glob('RUNOUT*/PF{:s}*'.format(self.xpname)) +
654
- sh.glob('RUNOUT*/GRIBPF{:s}*+*'.format(self.xpname)))
655
- if sh.path.isfile(x)]:
656
- sh.move(fpfile, sh.path.basename(fpfile),
657
- fmt='grib' if 'GRIBPF' in fpfile else 'lfi')
658
- sh.cat('RUNOUT*/NODE.001_01', output='NODE.all')
771
+ for fpfile in [
772
+ x
773
+ for x in (
774
+ sh.glob("RUNOUT*/PF{:s}*".format(self.xpname))
775
+ + sh.glob("RUNOUT*/GRIBPF{:s}*+*".format(self.xpname))
776
+ )
777
+ if sh.path.isfile(x)
778
+ ]:
779
+ sh.move(
780
+ fpfile,
781
+ sh.path.basename(fpfile),
782
+ fmt="grib" if "GRIBPF" in fpfile else "lfi",
783
+ )
784
+ sh.cat("RUNOUT*/NODE.001_01", output="NODE.all")
659
785
 
660
786
  super().postfix(rh, opts)
661
787
 
@@ -666,80 +792,103 @@ class OfflineSurfex(Parallel, DrHookDecoMixin):
666
792
  _footprint = [
667
793
  model,
668
794
  dict(
669
- info = "Run a forecast with the SURFEX's offline binary.",
670
- attr = dict(
671
- kind = dict(
672
- values = ['offline_forecast', ],
795
+ info="Run a forecast with the SURFEX's offline binary.",
796
+ attr=dict(
797
+ kind=dict(
798
+ values=[
799
+ "offline_forecast",
800
+ ],
673
801
  ),
674
- model = dict(
675
- values = ['surfex', ],
802
+ model=dict(
803
+ values=[
804
+ "surfex",
805
+ ],
676
806
  ),
677
- model_tstep = dict(
678
- info = "The timestep of the model",
679
- type = Period,
807
+ model_tstep=dict(
808
+ info="The timestep of the model",
809
+ type=Period,
680
810
  ),
681
- diag_tstep = dict(
682
- info = "The timestep for writing diagnostics outputs",
683
- type = Period,
811
+ diag_tstep=dict(
812
+ info="The timestep for writing diagnostics outputs",
813
+ type=Period,
684
814
  ),
685
- fcterm = dict(
686
- info = "The forecast's term",
687
- type = Period,
815
+ fcterm=dict(
816
+ info="The forecast's term",
817
+ type=Period,
688
818
  ),
689
- forcing_read_interval = dict(
690
- info = "Read the forcing file every...",
691
- type = Period,
692
- default = Period('PT12H'),
693
- optional = True,
694
- )
695
- )
696
- )
819
+ forcing_read_interval=dict(
820
+ info="Read the forcing file every...",
821
+ type=Period,
822
+ default=Period("PT12H"),
823
+ optional=True,
824
+ ),
825
+ ),
826
+ ),
697
827
  ]
698
828
 
699
829
  def valid_executable(self, rh):
700
830
  """Check the executable's resource."""
701
- bmodel = getattr(rh.resource, 'model', None)
702
- rc = bmodel == 'surfex' and rh.resource.realkind == 'offline'
831
+ bmodel = getattr(rh.resource, "model", None)
832
+ rc = bmodel == "surfex" and rh.resource.realkind == "offline"
703
833
  if not rc:
704
- logger.error('Inapropriate binary provided')
834
+ logger.error("Inapropriate binary provided")
705
835
  return rc and super().valid_executable(rh)
706
836
 
707
837
  @staticmethod
708
838
  def _fix_nam_macro(sec, macro, value):
709
839
  """Set a given namelist macro and issue a log message."""
710
840
  sec.rh.contents.setmacro(macro, value)
711
- logger.info('Setup %s macro to %s.', macro, str(value))
841
+ logger.info("Setup %s macro to %s.", macro, str(value))
712
842
 
713
843
  def prepare(self, rh, opts):
714
844
  """Setup the appropriate namelist macros."""
715
845
  self.system.subtitle("Offline SURFEX Settings.")
716
846
  # Find the run/final date
717
847
  ic = self.context.sequence.effective_inputs(
718
- role=('InitialConditions', 'ModelState', 'Analysis'))
848
+ role=("InitialConditions", "ModelState", "Analysis")
849
+ )
719
850
  if ic:
720
851
  if len(ic) > 1:
721
- logger.warning('Multiple initial conditions, using only the first one...')
852
+ logger.warning(
853
+ "Multiple initial conditions, using only the first one..."
854
+ )
722
855
  rundate = ic[0].rh.resource.date
723
- if hasattr(ic[0].rh.resource, 'term'):
856
+ if hasattr(ic[0].rh.resource, "term"):
724
857
  rundate += ic[0].rh.resource.term
725
858
  finaldate = rundate + self.fcterm
726
- finaldate = [finaldate.year, finaldate.month, finaldate.day,
727
- finaldate.hour * 3600 + finaldate.minute * 60 + finaldate.second]
728
- logger.info('The final date is : %s', str(finaldate))
729
- nbreads = int(math.ceil((finaldate - rundate).length /
730
- self.forcing_read_interval.length))
859
+ finaldate = [
860
+ finaldate.year,
861
+ finaldate.month,
862
+ finaldate.day,
863
+ finaldate.hour * 3600
864
+ + finaldate.minute * 60
865
+ + finaldate.second,
866
+ ]
867
+ logger.info("The final date is : %s", str(finaldate))
868
+ nbreads = int(
869
+ math.ceil(
870
+ (finaldate - rundate).length
871
+ / self.forcing_read_interval.length
872
+ )
873
+ )
731
874
  else:
732
- logger.warning('No initial conditions were found. Hope you know what you are doing...')
875
+ logger.warning(
876
+ "No initial conditions were found. Hope you know what you are doing..."
877
+ )
733
878
  finaldate = None
734
879
  # Ok, let's find the namelist
735
- namsecs = self.context.sequence.effective_inputs(role=('Namelist', 'Namelistsurf'))
880
+ namsecs = self.context.sequence.effective_inputs(
881
+ role=("Namelist", "Namelistsurf")
882
+ )
736
883
  for namsec in namsecs:
737
884
  logger.info("Processing: %s", namsec.rh.container.localpath())
738
- self._fix_nam_macro(namsec, 'TSTEP', self.model_tstep.length)
739
- self._fix_nam_macro(namsec, 'TSTEP_OUTPUTS', self.diag_tstep.length)
885
+ self._fix_nam_macro(namsec, "TSTEP", self.model_tstep.length)
886
+ self._fix_nam_macro(
887
+ namsec, "TSTEP_OUTPUTS", self.diag_tstep.length
888
+ )
740
889
  if finaldate:
741
- self._fix_nam_macro(namsec, 'FINAL_STOP', finaldate)
742
- self._fix_nam_macro(namsec, 'NB_READS', nbreads)
890
+ self._fix_nam_macro(namsec, "FINAL_STOP", finaldate)
891
+ self._fix_nam_macro(namsec, "NB_READS", nbreads)
743
892
  if namsec.rh.contents.dumps_needs_update:
744
893
  namsec.rh.save()
745
894
  logger.info("Namelist dump: \n%s", namsec.rh.container.read())