vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. vortex/__init__.py +75 -47
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +944 -618
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/serversynctools.py +34 -33
  7. vortex/config.py +19 -22
  8. vortex/data/__init__.py +9 -3
  9. vortex/data/abstractstores.py +593 -655
  10. vortex/data/containers.py +217 -162
  11. vortex/data/contents.py +65 -39
  12. vortex/data/executables.py +93 -102
  13. vortex/data/flow.py +40 -34
  14. vortex/data/geometries.py +228 -132
  15. vortex/data/handlers.py +436 -227
  16. vortex/data/outflow.py +15 -15
  17. vortex/data/providers.py +185 -163
  18. vortex/data/resources.py +48 -42
  19. vortex/data/stores.py +540 -417
  20. vortex/data/sync_templates/__init__.py +0 -0
  21. vortex/gloves.py +114 -87
  22. vortex/layout/__init__.py +1 -8
  23. vortex/layout/contexts.py +150 -84
  24. vortex/layout/dataflow.py +353 -202
  25. vortex/layout/monitor.py +264 -128
  26. vortex/nwp/__init__.py +5 -2
  27. vortex/nwp/algo/__init__.py +14 -5
  28. vortex/nwp/algo/assim.py +205 -151
  29. vortex/nwp/algo/clim.py +683 -517
  30. vortex/nwp/algo/coupling.py +447 -225
  31. vortex/nwp/algo/eda.py +437 -229
  32. vortex/nwp/algo/eps.py +403 -231
  33. vortex/nwp/algo/forecasts.py +416 -275
  34. vortex/nwp/algo/fpserver.py +683 -307
  35. vortex/nwp/algo/ifsnaming.py +205 -145
  36. vortex/nwp/algo/ifsroot.py +215 -122
  37. vortex/nwp/algo/monitoring.py +137 -76
  38. vortex/nwp/algo/mpitools.py +330 -190
  39. vortex/nwp/algo/odbtools.py +637 -353
  40. vortex/nwp/algo/oopsroot.py +454 -273
  41. vortex/nwp/algo/oopstests.py +90 -56
  42. vortex/nwp/algo/request.py +287 -206
  43. vortex/nwp/algo/stdpost.py +878 -522
  44. vortex/nwp/data/__init__.py +22 -4
  45. vortex/nwp/data/assim.py +125 -137
  46. vortex/nwp/data/boundaries.py +121 -68
  47. vortex/nwp/data/climfiles.py +193 -211
  48. vortex/nwp/data/configfiles.py +73 -69
  49. vortex/nwp/data/consts.py +426 -401
  50. vortex/nwp/data/ctpini.py +59 -43
  51. vortex/nwp/data/diagnostics.py +94 -66
  52. vortex/nwp/data/eda.py +50 -51
  53. vortex/nwp/data/eps.py +195 -146
  54. vortex/nwp/data/executables.py +440 -434
  55. vortex/nwp/data/fields.py +63 -48
  56. vortex/nwp/data/gridfiles.py +183 -111
  57. vortex/nwp/data/logs.py +250 -217
  58. vortex/nwp/data/modelstates.py +180 -151
  59. vortex/nwp/data/monitoring.py +72 -99
  60. vortex/nwp/data/namelists.py +254 -202
  61. vortex/nwp/data/obs.py +400 -308
  62. vortex/nwp/data/oopsexec.py +22 -20
  63. vortex/nwp/data/providers.py +90 -65
  64. vortex/nwp/data/query.py +71 -82
  65. vortex/nwp/data/stores.py +49 -36
  66. vortex/nwp/data/surfex.py +136 -137
  67. vortex/nwp/syntax/__init__.py +1 -1
  68. vortex/nwp/syntax/stdattrs.py +173 -111
  69. vortex/nwp/tools/__init__.py +2 -2
  70. vortex/nwp/tools/addons.py +22 -17
  71. vortex/nwp/tools/agt.py +24 -12
  72. vortex/nwp/tools/bdap.py +16 -5
  73. vortex/nwp/tools/bdcp.py +4 -1
  74. vortex/nwp/tools/bdm.py +3 -0
  75. vortex/nwp/tools/bdmp.py +14 -9
  76. vortex/nwp/tools/conftools.py +728 -378
  77. vortex/nwp/tools/drhook.py +12 -8
  78. vortex/nwp/tools/grib.py +65 -39
  79. vortex/nwp/tools/gribdiff.py +22 -17
  80. vortex/nwp/tools/ifstools.py +82 -42
  81. vortex/nwp/tools/igastuff.py +167 -143
  82. vortex/nwp/tools/mars.py +14 -2
  83. vortex/nwp/tools/odb.py +234 -125
  84. vortex/nwp/tools/partitioning.py +61 -37
  85. vortex/nwp/tools/satrad.py +27 -12
  86. vortex/nwp/util/async.py +83 -55
  87. vortex/nwp/util/beacon.py +10 -10
  88. vortex/nwp/util/diffpygram.py +174 -86
  89. vortex/nwp/util/ens.py +144 -63
  90. vortex/nwp/util/hooks.py +30 -19
  91. vortex/nwp/util/taskdeco.py +28 -24
  92. vortex/nwp/util/usepygram.py +278 -172
  93. vortex/nwp/util/usetnt.py +31 -17
  94. vortex/sessions.py +72 -39
  95. vortex/syntax/__init__.py +1 -1
  96. vortex/syntax/stdattrs.py +410 -171
  97. vortex/syntax/stddeco.py +31 -22
  98. vortex/toolbox.py +327 -192
  99. vortex/tools/__init__.py +11 -2
  100. vortex/tools/actions.py +110 -121
  101. vortex/tools/addons.py +111 -92
  102. vortex/tools/arm.py +42 -22
  103. vortex/tools/compression.py +72 -69
  104. vortex/tools/date.py +11 -4
  105. vortex/tools/delayedactions.py +242 -132
  106. vortex/tools/env.py +75 -47
  107. vortex/tools/folder.py +342 -171
  108. vortex/tools/grib.py +341 -162
  109. vortex/tools/lfi.py +423 -216
  110. vortex/tools/listings.py +109 -40
  111. vortex/tools/names.py +218 -156
  112. vortex/tools/net.py +655 -299
  113. vortex/tools/parallelism.py +93 -61
  114. vortex/tools/prestaging.py +55 -31
  115. vortex/tools/schedulers.py +172 -105
  116. vortex/tools/services.py +403 -334
  117. vortex/tools/storage.py +293 -358
  118. vortex/tools/surfex.py +24 -24
  119. vortex/tools/systems.py +1234 -643
  120. vortex/tools/targets.py +156 -100
  121. vortex/util/__init__.py +1 -1
  122. vortex/util/config.py +378 -327
  123. vortex/util/empty.py +2 -2
  124. vortex/util/helpers.py +56 -24
  125. vortex/util/introspection.py +18 -12
  126. vortex/util/iosponge.py +8 -4
  127. vortex/util/roles.py +4 -6
  128. vortex/util/storefunctions.py +39 -13
  129. vortex/util/structs.py +3 -3
  130. vortex/util/worker.py +29 -17
  131. vortex_nwp-2.1.0.dist-info/METADATA +67 -0
  132. vortex_nwp-2.1.0.dist-info/RECORD +144 -0
  133. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
  134. vortex/layout/appconf.py +0 -109
  135. vortex/layout/jobs.py +0 -1276
  136. vortex/layout/nodes.py +0 -1424
  137. vortex/layout/subjobs.py +0 -464
  138. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  139. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  140. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
  141. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
@@ -35,52 +35,49 @@ class Forecast(IFSParallel):
35
35
  _footprint = [
36
36
  outputid_deco,
37
37
  dict(
38
- info = "Run a forecast with Arpege/IFS.",
39
- attr = dict(
40
- kind = dict(
41
- values = ['forecast', 'fc'],
42
- remap = dict(forecast = 'fc')
38
+ info="Run a forecast with Arpege/IFS.",
39
+ attr=dict(
40
+ kind=dict(
41
+ values=["forecast", "fc"], remap=dict(forecast="fc")
43
42
  ),
44
- hist_terms = dict(
45
- info = "The list of terms when historical file production is requested.",
46
- type = footprints.FPList,
47
- optional = True,
43
+ hist_terms=dict(
44
+ info="The list of terms when historical file production is requested.",
45
+ type=footprints.FPList,
46
+ optional=True,
48
47
  ),
49
- surfhist_terms = dict(
50
- info ="The list of terms when surface file production is requested.",
51
- type = footprints.FPList,
52
- optional = True,
48
+ surfhist_terms=dict(
49
+ info="The list of terms when surface file production is requested.",
50
+ type=footprints.FPList,
51
+ optional=True,
53
52
  ),
54
53
  pos_terms=dict(
55
- info = "The list of terms when post-processed data is requested.",
56
- type = footprints.FPList,
57
- optional = True,
54
+ info="The list of terms when post-processed data is requested.",
55
+ type=footprints.FPList,
56
+ optional=True,
58
57
  ),
59
58
  s_norm_terms=dict(
60
- info = "The list of terms when spectal norms should be computed.",
61
- type = footprints.FPList,
62
- optional = True,
59
+ info="The list of terms when spectal norms should be computed.",
60
+ type=footprints.FPList,
61
+ optional=True,
63
62
  ),
64
- flyargs = dict(
65
- default = ('ICMSH', 'PF'),
63
+ flyargs=dict(
64
+ default=("ICMSH", "PF"),
66
65
  ),
67
- xpname = dict(
68
- default = 'FCST'
66
+ xpname=dict(default="FCST"),
67
+ ddhpack=dict(
68
+ info="After run, gather the DDH output file in directories.",
69
+ type=bool,
70
+ optional=True,
71
+ default=False,
72
+ doc_zorder=-5,
69
73
  ),
70
- ddhpack = dict(
71
- info = "After run, gather the DDH output file in directories.",
72
- type = bool,
73
- optional = True,
74
- default = False,
75
- doc_zorder = -5,
76
- ),
77
- )
78
- )
74
+ ),
75
+ ),
79
76
  ]
80
77
 
81
78
  @property
82
79
  def realkind(self):
83
- return 'forecast'
80
+ return "forecast"
84
81
 
85
82
  def _outputs_configurator(self, bin_rh):
86
83
  return footprints.proxy.ifsoutputs_configurator(
@@ -93,10 +90,9 @@ class Forecast(IFSParallel):
93
90
  """Default pre-link for the initial condition file"""
94
91
  super().prepare(rh, opts)
95
92
 
96
- ininc = self.naming_convention('ic', rh)
93
+ ininc = self.naming_convention("ic", rh)
97
94
  analysis = self.setlink(
98
- initrole=('InitialCondition', 'Analysis'),
99
- initname=ininc()
95
+ initrole=("InitialCondition", "Analysis"), initname=ininc()
100
96
  )
101
97
 
102
98
  if analysis:
@@ -104,10 +100,14 @@ class Forecast(IFSParallel):
104
100
  thismonth = analysis.rh.resource.date.month
105
101
 
106
102
  # Possibly fix the model clim
107
- if self.do_climfile_fixer(rh, convkind='modelclim'):
108
- self.climfile_fixer(rh, convkind='modelclim', month=thismonth,
109
- inputrole=('GlobalClim', 'InitialClim'),
110
- inputkind='clim_model')
103
+ if self.do_climfile_fixer(rh, convkind="modelclim"):
104
+ self.climfile_fixer(
105
+ rh,
106
+ convkind="modelclim",
107
+ month=thismonth,
108
+ inputrole=("GlobalClim", "InitialClim"),
109
+ inputkind="clim_model",
110
+ )
111
111
 
112
112
  # Possibly fix post-processing clim files
113
113
  self.all_localclim_fixer(rh, thismonth)
@@ -127,10 +127,10 @@ class Forecast(IFSParallel):
127
127
  #
128
128
  # TODO: Clarify where both regexp keys are coming from
129
129
  guesses = self.context.sequence.effective_inputs(
130
- role=re.compile(r'IAU_(Background|Guess)', flags=re.IGNORECASE)
130
+ role=re.compile(r"IAU_(Background|Guess)", flags=re.IGNORECASE)
131
131
  )
132
132
  analyses = self.context.sequence.effective_inputs(
133
- role=re.compile(r'IAU_(Analysis|Ic)', flags=re.IGNORECASE)
133
+ role=re.compile(r"IAU_(Analysis|Ic)", flags=re.IGNORECASE)
134
134
  )
135
135
 
136
136
  def key(s: Section):
@@ -140,32 +140,41 @@ class Forecast(IFSParallel):
140
140
  s.rh.resource.date,
141
141
  s.rh.resource.date + s.rh.resource.term,
142
142
  )
143
+
143
144
  self._create_ordered_links(
144
- bin_handler=rh, sections=analyses,
145
- sort_key=key, nameconv_kind="iau_analysis",
145
+ bin_handler=rh,
146
+ sections=analyses,
147
+ sort_key=key,
148
+ nameconv_kind="iau_analysis",
146
149
  )
147
150
  self._create_ordered_links(
148
- bin_handler=rh, sections=guesses,
149
- sort_key=key, nameconv_kind="iau_background",
151
+ bin_handler=rh,
152
+ sections=guesses,
153
+ sort_key=key,
154
+ nameconv_kind="iau_background",
150
155
  )
151
156
 
152
157
  # Promises should be nicely managed by a co-proccess
153
158
  if self.promises:
154
159
  prefixes_set = set()
155
160
  for pr_res in [pr.rh.resource for pr in self.promises]:
156
- if pr_res.realkind == 'historic':
157
- prefixes_set.add('ICMSH')
158
- if pr_res.realkind == 'gridpoint':
159
- prefixes_set.add('{:s}PF'.format('GRIB' if pr_res.nativefmt == 'grib' else ''))
161
+ if pr_res.realkind == "historic":
162
+ prefixes_set.add("ICMSH")
163
+ if pr_res.realkind == "gridpoint":
164
+ prefixes_set.add(
165
+ "{:s}PF".format(
166
+ "GRIB" if pr_res.nativefmt == "grib" else ""
167
+ )
168
+ )
160
169
  self.io_poll_args = tuple(prefixes_set)
161
170
  self.flyput = len(self.io_poll_args) > 0
162
171
 
163
172
  def _create_ordered_links(
164
- self,
165
- bin_handler: Handler,
166
- sections: Iterable[Section],
167
- sort_key: Callable[[Section], Any],
168
- nameconv_kind: str,
173
+ self,
174
+ bin_handler: Handler,
175
+ sections: Iterable[Section],
176
+ sort_key: Callable[[Section], Any],
177
+ nameconv_kind: str,
169
178
  ):
170
179
  """Create links to local files, with ordered names
171
180
 
@@ -181,7 +190,8 @@ class Forecast(IFSParallel):
181
190
  """
182
191
  for i, sec in enumerate(sorted(sections, key=sort_key)):
183
192
  nameconv = self.naming_convention(
184
- nameconv_kind, bin_handler,
193
+ nameconv_kind,
194
+ bin_handler,
185
195
  actualfmt=sec.rh.container.actualfmt,
186
196
  )
187
197
  target = nameconv(number=(i + 1))
@@ -189,7 +199,9 @@ class Forecast(IFSParallel):
189
199
  if self.system.path.exists(target):
190
200
  logger.warning(
191
201
  "%s should be linked to %s but %s already exists.",
192
- link_name, target, target
202
+ link_name,
203
+ target,
204
+ target,
193
205
  )
194
206
  continue
195
207
  logger.info("Linking %s to %s.", link_name, target)
@@ -198,15 +210,16 @@ class Forecast(IFSParallel):
198
210
 
199
211
  def find_namelists(self, opts=None):
200
212
  """Find any namelists candidates in actual context inputs."""
201
- return [x.rh
202
- for x in self.context.sequence.effective_inputs(role='Namelist',
203
- kind='namelist')]
213
+ return [
214
+ x.rh
215
+ for x in self.context.sequence.effective_inputs(
216
+ role="Namelist", kind="namelist"
217
+ )
218
+ ]
204
219
 
205
220
  def prepare_namelist_delta(self, rh, namcontents, namlocal):
206
- nam_updated = super().prepare_namelist_delta(
207
- rh, namcontents, namlocal
208
- )
209
- if namlocal == 'fort.4':
221
+ nam_updated = super().prepare_namelist_delta(rh, namcontents, namlocal)
222
+ if namlocal == "fort.4":
210
223
  o_conf = self._outputs_configurator(rh)
211
224
  o_conf.modelstate = self.hist_terms
212
225
  o_conf.surf_modelstate = self.surfhist_terms
@@ -222,34 +235,40 @@ class Forecast(IFSParallel):
222
235
  sh = self.system
223
236
 
224
237
  # Look up for the gridpoint files
225
- gp_out = sh.ls('PF{}*'.format(self.xpname))
238
+ gp_out = sh.ls("PF{}*".format(self.xpname))
226
239
  gp_map = defaultdict(list)
227
240
  if gp_out:
228
- re_pf = re.compile(r'^PF{}(\w+)\+(\d+(?::\d+)?)$'.format(self.xpname))
241
+ re_pf = re.compile(
242
+ r"^PF{}(\w+)\+(\d+(?::\d+)?)$".format(self.xpname)
243
+ )
229
244
  for fname in gp_out:
230
245
  match_pf = re_pf.match(fname)
231
246
  if match_pf:
232
- gp_map[match_pf.group(1).lower()].append(Time(match_pf.group(2)))
247
+ gp_map[match_pf.group(1).lower()].append(
248
+ Time(match_pf.group(2))
249
+ )
233
250
  for k, v in gp_map.items():
234
251
  v.sort()
235
- logger.info('Gridpoint files found: domain=%s, terms=%s',
236
- k,
237
- ','.join([str(t) for t in v]))
252
+ logger.info(
253
+ "Gridpoint files found: domain=%s, terms=%s",
254
+ k,
255
+ ",".join([str(t) for t in v]),
256
+ )
238
257
  if len(gp_map) == 0:
239
- logger.info('No gridpoint file was found.')
240
- sh.json_dump(gp_map, 'gridpoint_map.out', indent=4, cls=ShellEncoder)
258
+ logger.info("No gridpoint file was found.")
259
+ sh.json_dump(gp_map, "gridpoint_map.out", indent=4, cls=ShellEncoder)
241
260
 
242
261
  # Gather DDH in folders
243
262
  if self.ddhpack:
244
- ddhmap = dict(DL='dlimited', GL='global', ZO='zonal')
245
- for (prefix, ddhkind) in ddhmap.items():
246
- flist = sh.glob('DHF{}{}+*'.format(prefix, self.xpname))
263
+ ddhmap = dict(DL="dlimited", GL="global", ZO="zonal")
264
+ for prefix, ddhkind in ddhmap.items():
265
+ flist = sh.glob("DHF{}{}+*".format(prefix, self.xpname))
247
266
  if flist:
248
- dest = 'ddhpack_{}'.format(ddhkind)
249
- logger.info('Creating a DDH pack: %s', dest)
267
+ dest = "ddhpack_{}".format(ddhkind)
268
+ logger.info("Creating a DDH pack: %s", dest)
250
269
  sh.mkdir(dest)
251
270
  for lfa in flist:
252
- sh.mv(lfa, dest, fmt='lfa')
271
+ sh.mv(lfa, dest, fmt="lfa")
253
272
 
254
273
  super().postfix(rh, opts)
255
274
 
@@ -258,36 +277,27 @@ class LAMForecast(Forecast):
258
277
  """Forecast for IFS-like Limited Area Models."""
259
278
 
260
279
  _footprint = dict(
261
- info = "Run a forecast with an Arpege/IFS like Limited Area Model.",
262
- attr = dict(
263
- kind = dict(
264
- values = ['lamfc', 'lamforecast'],
265
- remap = dict(lamforecast = 'lamfc'),
266
- ),
267
- synctool = dict(
268
- info = 'The name of the script called when waiting for coupling files',
269
- optional = True,
270
- default = 'atcp.alad',
271
- doc_visibility = footprints.doc.visibility.ADVANCED,
280
+ info="Run a forecast with an Arpege/IFS like Limited Area Model.",
281
+ attr=dict(
282
+ kind=dict(
283
+ values=["lamfc", "lamforecast"],
284
+ remap=dict(lamforecast="lamfc"),
272
285
  ),
273
- synctpl = dict(
274
- info = 'The template used to generate the *synctool* script',
275
- optional = True,
276
- default = '@sync-fetch.tpl',
277
- doc_visibility = footprints.doc.visibility.ADVANCED,
278
- ),
279
- )
286
+ ),
280
287
  )
281
288
 
289
+ synctool = "atcp.alad"
290
+ synctpl = "sync-fetch.tpl"
291
+
282
292
  def spawn_command_options(self):
283
293
  """Dictionary provided for command line factory."""
284
294
  return dict(
285
- name=(self.xpname + 'xxxx')[:4].upper(),
295
+ name=(self.xpname + "xxxx")[:4].upper(),
286
296
  timescheme=self.timescheme,
287
297
  timestep=self.timestep,
288
298
  fcterm=self.fcterm,
289
299
  fcunit=self.fcunit,
290
- model='aladin',
300
+ model="aladin",
291
301
  )
292
302
 
293
303
  def prepare(self, rh, opts):
@@ -297,31 +307,36 @@ class LAMForecast(Forecast):
297
307
  sh = self.system
298
308
 
299
309
  # Check boundaries conditions
300
- cplrh = [x.rh for x in self.context.sequence.effective_inputs(
301
- role='BoundaryConditions',
302
- kind='boundary'
303
- )]
310
+ cplrh = [
311
+ x.rh
312
+ for x in self.context.sequence.effective_inputs(
313
+ role="BoundaryConditions", kind="boundary"
314
+ )
315
+ ]
304
316
  cplrh.sort(key=lambda rh: rh.resource.date + rh.resource.term)
305
317
 
306
318
  # Ordered pre-linking of boundaring and building ot the synchronization tools
307
319
  firstsync = None
308
- sh.header('Check boundaries...')
320
+ sh.header("Check boundaries...")
309
321
  if any([x.is_expected() for x in cplrh]):
310
- logger.info('Some boundaries conditions are still expected')
322
+ logger.info("Some boundaries conditions are still expected")
311
323
  self.mksync = True
312
324
  else:
313
- logger.info('All boundaries conditions available')
325
+ logger.info("All boundaries conditions available")
314
326
  self.mksync = False
315
327
 
316
328
  for i, bound in enumerate(cplrh):
317
329
  thisbound = bound.container.localpath()
318
- lbcnc = self.naming_convention('lbc', rh, actualfmt=bound.container.actualfmt)
330
+ lbcnc = self.naming_convention(
331
+ "lbc", rh, actualfmt=bound.container.actualfmt
332
+ )
319
333
  sh.softlink(thisbound, lbcnc(number=i))
320
334
  if self.mksync:
321
- thistool = self.synctool + '.{:03d}'.format(i)
322
- bound.mkgetpr(pr_getter=thistool, tplfetch=self.synctpl)
335
+ bound.mkgetpr(
336
+ pr_getter=self.synctool + ".{:03d}".format(i),
337
+ )
323
338
  if firstsync is None:
324
- firstsync = thistool
339
+ firstsync = self.synctool + ".{:03d}".format(i)
325
340
 
326
341
  # Set up the first synchronization step
327
342
  if firstsync is not None:
@@ -332,7 +347,7 @@ class LAMForecast(Forecast):
332
347
  sh = self.system
333
348
 
334
349
  if self.mksync:
335
- synclog = self.synctool + '.log'
350
+ synclog = self.synctool + ".log"
336
351
  if sh.path.exists(synclog):
337
352
  sh.subtitle(synclog)
338
353
  sh.cat(synclog, output=False)
@@ -344,19 +359,19 @@ class DFIForecast(LAMForecast):
344
359
  """OBSOLETE CODE: do not use."""
345
360
 
346
361
  _footprint = dict(
347
- info = "Run a forecast with an Arpege/IFS like Limited Area Model (with DFIs).",
348
- attr = dict(
349
- kind = dict(
350
- values = ['fcdfi'],
362
+ info="Run a forecast with an Arpege/IFS like Limited Area Model (with DFIs).",
363
+ attr=dict(
364
+ kind=dict(
365
+ values=["fcdfi"],
351
366
  ),
352
- )
367
+ ),
353
368
  )
354
369
 
355
370
  def prepare(self, rh, opts):
356
371
  """Pre-link boundary conditions as special DFI files."""
357
372
  super().prepare(rh, opts)
358
- ininc = self.naming_convention('ic', rh)
359
- lbcnc = self.naming_convention('lbc', rh, actualfmt='fa')
373
+ ininc = self.naming_convention("ic", rh)
374
+ lbcnc = self.naming_convention("lbc", rh, actualfmt="fa")
360
375
  for pseudoterm in (999, 0, 1):
361
376
  self.system.softlink(ininc(), lbcnc(number=pseudoterm))
362
377
 
@@ -369,29 +384,27 @@ class FullPos(IFSParallel):
369
384
 
370
385
  _abstract = True
371
386
  _footprint = dict(
372
- attr = dict(
373
- xpname = dict(
374
- default = 'FPOS'
375
- ),
376
- flyput = dict(
377
- default = False,
378
- values = [False],
387
+ attr=dict(
388
+ xpname=dict(default="FPOS"),
389
+ flyput=dict(
390
+ default=False,
391
+ values=[False],
379
392
  ),
380
- server_run = dict(
381
- values = [True, False],
393
+ server_run=dict(
394
+ values=[True, False],
382
395
  ),
383
- serversync_method = dict(
384
- default = 'simple_socket',
396
+ serversync_method=dict(
397
+ default="simple_socket",
385
398
  ),
386
- serversync_medium = dict(
387
- default = 'cnt3_wait',
399
+ serversync_medium=dict(
400
+ default="cnt3_wait",
388
401
  ),
389
402
  )
390
403
  )
391
404
 
392
405
  @property
393
406
  def realkind(self):
394
- return 'fullpos'
407
+ return "fullpos"
395
408
 
396
409
 
397
410
  class FullPosGeo(FullPos):
@@ -401,68 +414,100 @@ class FullPosGeo(FullPos):
401
414
  """
402
415
 
403
416
  _footprint = dict(
404
- info = "Run a fullpos to interpolate to a new geometry",
405
- attr = dict(
406
- kind = dict(
407
- values = ['l2h', 'h2l'],
417
+ info="Run a fullpos to interpolate to a new geometry",
418
+ attr=dict(
419
+ kind=dict(
420
+ values=["l2h", "h2l"],
408
421
  ),
409
- )
422
+ ),
410
423
  )
411
424
 
412
- _RUNSTORE = 'RUNOUT'
425
+ _RUNSTORE = "RUNOUT"
413
426
 
414
427
  def _compute_target_name(self, r):
415
- return ('PF' + re.sub('^(?:ICMSH)(.*?)(?:INIT)(.*)$', r'\1\2',
416
- r.container.localpath()).format(self.xpname))
428
+ return "PF" + re.sub(
429
+ "^(?:ICMSH)(.*?)(?:INIT)(.*)$", r"\1\2", r.container.localpath()
430
+ ).format(self.xpname)
417
431
 
418
432
  def execute(self, rh, opts):
419
433
  """Loop on the various initial conditions provided."""
420
434
 
421
435
  sh = self.system
422
436
 
423
- initrh = [x.rh for x in self.context.sequence.effective_inputs(
424
- role=('Analysis', 'Guess', 'InitialCondition'),
425
- kind=('analysis', 'historic', 'ic', re.compile('(stp|ana)min'),
426
- re.compile('pert'), ),
427
- )]
437
+ initrh = [
438
+ x.rh
439
+ for x in self.context.sequence.effective_inputs(
440
+ role=("Analysis", "Guess", "InitialCondition"),
441
+ kind=(
442
+ "analysis",
443
+ "historic",
444
+ "ic",
445
+ re.compile("(stp|ana)min"),
446
+ re.compile("pert"),
447
+ ),
448
+ )
449
+ ]
428
450
 
429
451
  # is there one (deterministic forecast) or many (ensemble forecast) fullpos to perform ?
430
452
  isMany = len(initrh) > 1
431
- do_fix_input_clim = self.do_climfile_fixer(rh, convkind='modelclim')
432
- do_fix_output_clim = self.do_climfile_fixer(rh, convkind='targetclim', area='000')
433
- ininc = self.naming_convention('ic', rh)
453
+ do_fix_input_clim = self.do_climfile_fixer(rh, convkind="modelclim")
454
+ do_fix_output_clim = self.do_climfile_fixer(
455
+ rh, convkind="targetclim", area="000"
456
+ )
457
+ ininc = self.naming_convention("ic", rh)
434
458
  infile = ininc()
435
459
 
436
460
  for num, r in enumerate(initrh):
437
- str_subtitle = 'Fullpos execution on {}'.format(r.container.localpath())
461
+ str_subtitle = "Fullpos execution on {}".format(
462
+ r.container.localpath()
463
+ )
438
464
  sh.subtitle(str_subtitle)
439
465
 
440
466
  # Set the actual init file
441
467
  if sh.path.exists(infile):
442
468
  if isMany:
443
- logger.critical('Cannot process multiple Historic files if %s exists.', infile)
469
+ logger.critical(
470
+ "Cannot process multiple Historic files if %s exists.",
471
+ infile,
472
+ )
444
473
  else:
445
- sh.cp(r.container.localpath(), infile, fmt=r.container.actualfmt, intent=intent.IN)
474
+ sh.cp(
475
+ r.container.localpath(),
476
+ infile,
477
+ fmt=r.container.actualfmt,
478
+ intent=intent.IN,
479
+ )
446
480
 
447
481
  # Fix links for climatology files
448
482
  actualmonth = Month(r.resource.date + r.resource.term)
449
483
  startingclim = r.resource.geometry
450
484
 
451
485
  if do_fix_input_clim:
452
- self.climfile_fixer(rh, convkind='modelclim', month=actualmonth, geo=startingclim,
453
- inputrole=(re.compile('^Clim'), re.compile('Clim$')),
454
- inputkind='clim_model')
486
+ self.climfile_fixer(
487
+ rh,
488
+ convkind="modelclim",
489
+ month=actualmonth,
490
+ geo=startingclim,
491
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
492
+ inputkind="clim_model",
493
+ )
455
494
 
456
495
  if do_fix_output_clim:
457
- self.climfile_fixer(rh, convkind='targetclim', month=actualmonth, notgeo=startingclim,
458
- inputrole=(re.compile('^Clim'), re.compile('Clim$')),
459
- inputkind='clim_model', area='000')
496
+ self.climfile_fixer(
497
+ rh,
498
+ convkind="targetclim",
499
+ month=actualmonth,
500
+ notgeo=startingclim,
501
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
502
+ inputkind="clim_model",
503
+ area="000",
504
+ )
460
505
 
461
506
  # Standard execution
462
507
  super().execute(rh, opts)
463
508
 
464
509
  # Find the output filename
465
- output_file = [x for x in sh.glob('PF{:s}*+*'.format(self.xpname))]
510
+ output_file = [x for x in sh.glob("PF{:s}*+*".format(self.xpname))]
466
511
  if len(output_file) != 1:
467
512
  raise AlgoComponentError("No or multiple output files found.")
468
513
  output_file = output_file[0]
@@ -472,35 +517,53 @@ class FullPosGeo(FullPos):
472
517
  # Set a local storage place
473
518
  sh.mkdir(self._RUNSTORE)
474
519
  # Freeze the current output
475
- sh.move(output_file, sh.path.join(self._RUNSTORE, 'pfout_{:d}'.format(num)),
476
- fmt=r.container.actualfmt)
520
+ sh.move(
521
+ output_file,
522
+ sh.path.join(self._RUNSTORE, "pfout_{:d}".format(num)),
523
+ fmt=r.container.actualfmt,
524
+ )
477
525
  sh.remove(infile, fmt=r.container.actualfmt)
478
526
  # Cleaning/Log management
479
527
  if not self.server_run:
480
528
  # The only one listing
481
- sh.cat('NODE.001_01', output='NODE.all')
529
+ sh.cat("NODE.001_01", output="NODE.all")
482
530
  # Some cleaning
483
- sh.rmall('ncf927', 'dirlst')
531
+ sh.rmall("ncf927", "dirlst")
484
532
  else:
485
533
  # Link the output files to new style names
486
- sh.cp(output_file, self._compute_target_name(r),
487
- fmt=r.container.actualfmt, intent='in')
534
+ sh.cp(
535
+ output_file,
536
+ self._compute_target_name(r),
537
+ fmt=r.container.actualfmt,
538
+ intent="in",
539
+ )
488
540
  # Link the listing to NODE.all
489
- sh.cp('NODE.001_01', 'NODE.all', intent='in')
541
+ sh.cp("NODE.001_01", "NODE.all", intent="in")
490
542
 
491
543
  def postfix(self, rh, opts):
492
544
  """Post processing cleaning."""
493
545
  sh = self.system
494
546
 
495
- initrh = [x.rh for x in self.context.sequence.effective_inputs(
496
- role=('Analysis', 'Guess', 'InitialCondition'),
497
- kind=('analysis', 'historic', 'ic', re.compile('(stp|ana)min'),
498
- re.compile('pert'), ),
499
- )]
547
+ initrh = [
548
+ x.rh
549
+ for x in self.context.sequence.effective_inputs(
550
+ role=("Analysis", "Guess", "InitialCondition"),
551
+ kind=(
552
+ "analysis",
553
+ "historic",
554
+ "ic",
555
+ re.compile("(stp|ana)min"),
556
+ re.compile("pert"),
557
+ ),
558
+ )
559
+ ]
500
560
  if len(initrh) > 1:
501
561
  for num, r in enumerate(initrh):
502
- sh.move('{:s}/pfout_{:d}'.format(self._RUNSTORE, num),
503
- self._compute_target_name(r), fmt=r.container.actualfmt)
562
+ sh.move(
563
+ "{:s}/pfout_{:d}".format(self._RUNSTORE, num),
564
+ self._compute_target_name(r),
565
+ fmt=r.container.actualfmt,
566
+ )
504
567
 
505
568
  super().postfix(rh, opts)
506
569
 
@@ -512,29 +575,32 @@ class FullPosBDAP(FullPos):
512
575
  """
513
576
 
514
577
  _footprint = dict(
515
- info = "Run a fullpos to post-process raw model outputs",
516
- attr = dict(
517
- kind = dict(
518
- values = ['fullpos', 'fp'],
519
- remap = dict(fp= 'fullpos')
520
- ),
521
- fcterm = dict(
522
- values = [0, ],
578
+ info="Run a fullpos to post-process raw model outputs",
579
+ attr=dict(
580
+ kind=dict(values=["fullpos", "fp"], remap=dict(fp="fullpos")),
581
+ fcterm=dict(
582
+ values=[
583
+ 0,
584
+ ],
523
585
  ),
524
- outputid = dict(
525
- info = "The identifier for the encoding of post-processed fields.",
526
- optional = True,
586
+ outputid=dict(
587
+ info="The identifier for the encoding of post-processed fields.",
588
+ optional=True,
527
589
  ),
528
- server_run = dict(
529
- values = [False, ],
590
+ server_run=dict(
591
+ values=[
592
+ False,
593
+ ],
530
594
  ),
531
595
  ),
532
596
  )
533
597
 
534
598
  def prepare(self, rh, opts):
535
599
  """Some additional checks."""
536
- if self.system.path.exists('xxt00000000'):
537
- raise AlgoComponentError('There should be no file named xxt00000000 in the working directory')
600
+ if self.system.path.exists("xxt00000000"):
601
+ raise AlgoComponentError(
602
+ "There should be no file named xxt00000000 in the working directory"
603
+ )
538
604
  super().prepare(rh, opts)
539
605
 
540
606
  def execute(self, rh, opts):
@@ -542,93 +608,137 @@ class FullPosBDAP(FullPos):
542
608
 
543
609
  sh = self.system
544
610
 
545
- namrh = [x.rh for x in self.context.sequence.effective_inputs(
546
- kind='namelistfp'
547
- )]
611
+ namrh = [
612
+ x.rh
613
+ for x in self.context.sequence.effective_inputs(kind="namelistfp")
614
+ ]
548
615
 
549
- namxx = [x.rh for x in self.context.sequence.effective_inputs(
550
- role='FullPosSelection',
551
- kind='namselect',
552
- )]
616
+ namxx = [
617
+ x.rh
618
+ for x in self.context.sequence.effective_inputs(
619
+ role="FullPosSelection",
620
+ kind="namselect",
621
+ )
622
+ ]
553
623
 
554
- initsec = [x for x in self.context.sequence.effective_inputs(
555
- role=('InitialCondition', 'ModelState'),
556
- kind='historic',
557
- )]
624
+ initsec = [
625
+ x
626
+ for x in self.context.sequence.effective_inputs(
627
+ role=("InitialCondition", "ModelState"),
628
+ kind="historic",
629
+ )
630
+ ]
558
631
  initsec.sort(key=lambda sec: sec.rh.resource.term)
559
632
 
560
- do_fix_input_clim = self.do_climfile_fixer(rh, convkind='modelclim')
633
+ do_fix_input_clim = self.do_climfile_fixer(rh, convkind="modelclim")
561
634
 
562
- ininc = self.naming_convention('ic', rh)
635
+ ininc = self.naming_convention("ic", rh)
563
636
  infile = ininc()
564
637
 
565
638
  for sec in initsec:
566
639
  r = sec.rh
567
- sh.subtitle('Loop on {:s}'.format(r.resource.term.fmthm))
640
+ sh.subtitle("Loop on {:s}".format(r.resource.term.fmthm))
568
641
 
569
642
  thisdate = r.resource.date + r.resource.term
570
643
  thismonth = thisdate.month
571
- logger.info('Fullpos <month:%s>' % thismonth)
644
+ logger.info("Fullpos <month:%s>" % thismonth)
572
645
 
573
646
  if do_fix_input_clim:
574
- self.climfile_fixer(rh, convkind='modelclim',
575
- month=thismonth, geo=r.resource.geometry,
576
- inputrole=(re.compile('^Clim'), re.compile('Clim$')),
577
- inputkind='clim_model')
647
+ self.climfile_fixer(
648
+ rh,
649
+ convkind="modelclim",
650
+ month=thismonth,
651
+ geo=r.resource.geometry,
652
+ inputrole=(re.compile("^Clim"), re.compile("Clim$")),
653
+ inputkind="clim_model",
654
+ )
578
655
 
579
656
  thesenames = self.all_localclim_fixer(rh, thismonth)
580
657
 
581
658
  # Set a local storage place
582
- runstore = 'RUNOUT' + r.resource.term.fmtraw
659
+ runstore = "RUNOUT" + r.resource.term.fmtraw
583
660
  sh.mkdir(runstore)
584
661
 
585
662
  # Define an input namelist
586
663
  try:
587
- namfp = [x for x in namrh if x.resource.term == r.resource.term].pop()
664
+ namfp = [
665
+ x for x in namrh if x.resource.term == r.resource.term
666
+ ].pop()
588
667
  namfplocal = namfp.container.localpath()
589
668
  if self.outputid is not None:
590
- self._set_nam_macro(namfp.contents, namfplocal, 'OUTPUTID', self.outputid)
669
+ self._set_nam_macro(
670
+ namfp.contents, namfplocal, "OUTPUTID", self.outputid
671
+ )
591
672
  namfp.contents.rewrite(namfp.container)
592
- sh.remove('fort.4')
593
- sh.symlink(namfplocal, 'fort.4')
673
+ sh.remove("fort.4")
674
+ sh.symlink(namfplocal, "fort.4")
594
675
  except Exception:
595
- logger.critical('Could not get a fullpos namelist for term %s', r.resource.term)
676
+ logger.critical(
677
+ "Could not get a fullpos namelist for term %s",
678
+ r.resource.term,
679
+ )
596
680
  raise
597
681
 
598
682
  # Define an selection namelist
599
683
  if namxx:
600
- namxt = [x for x in namxx if x.resource.term == r.resource.term]
684
+ namxt = [
685
+ x for x in namxx if x.resource.term == r.resource.term
686
+ ]
601
687
  if namxt:
602
- sh.remove('xxt00000000')
603
- sh.symlink(namxt.pop().container.localpath(), 'xxt00000000')
688
+ sh.remove("xxt00000000")
689
+ sh.symlink(
690
+ namxt.pop().container.localpath(), "xxt00000000"
691
+ )
604
692
  else:
605
- logger.critical('Could not get a selection namelist for term %s', r.resource.term)
693
+ logger.critical(
694
+ "Could not get a selection namelist for term %s",
695
+ r.resource.term,
696
+ )
606
697
  raise AlgoComponentError()
607
698
  else:
608
699
  logger.info("No selection namelist are provided.")
609
700
 
610
701
  # Finally set the actual init file
611
702
  sh.remove(infile)
612
- self.grab(sec, comment='Fullpos source (term={:s})'.format(r.resource.term.fmthm))
703
+ self.grab(
704
+ sec,
705
+ comment="Fullpos source (term={:s})".format(
706
+ r.resource.term.fmthm
707
+ ),
708
+ )
613
709
  sh.softlink(r.container.localpath(), infile)
614
710
 
615
711
  # Standard execution
616
712
  super().execute(rh, opts)
617
713
 
618
714
  # Freeze the current output
619
- for posfile in [x for x in (sh.glob('PF{:s}*+*'.format(self.xpname)) +
620
- sh.glob('GRIBPF{:s}*+*'.format(self.xpname)))]:
621
- rootpos = re.sub('0+$', '', posfile)
622
- fmtpos = 'grib' if posfile.startswith('GRIB') else 'lfi'
623
- targetfile = sh.path.join(runstore, rootpos + r.resource.term.fmthm)
715
+ for posfile in [
716
+ x
717
+ for x in (
718
+ sh.glob("PF{:s}*+*".format(self.xpname))
719
+ + sh.glob("GRIBPF{:s}*+*".format(self.xpname))
720
+ )
721
+ ]:
722
+ rootpos = re.sub("0+$", "", posfile)
723
+ fmtpos = "grib" if posfile.startswith("GRIB") else "lfi"
724
+ targetfile = sh.path.join(
725
+ runstore, rootpos + r.resource.term.fmthm
726
+ )
624
727
  targetbase = sh.path.basename(targetfile)
625
728
 
626
729
  # Deal with potential promises
627
- expected = [x for x in self.promises
628
- if x.rh.container.localpath() == targetbase]
730
+ expected = [
731
+ x
732
+ for x in self.promises
733
+ if x.rh.container.localpath() == targetbase
734
+ ]
629
735
  if expected:
630
- logger.info("Start dealing with promises for: %s.",
631
- ", ".join([x.rh.container.localpath() for x in expected]))
736
+ logger.info(
737
+ "Start dealing with promises for: %s.",
738
+ ", ".join(
739
+ [x.rh.container.localpath() for x in expected]
740
+ ),
741
+ )
632
742
  if posfile != targetbase:
633
743
  sh.move(posfile, targetbase, fmt=fmtpos)
634
744
  posfile = targetbase
@@ -637,12 +747,12 @@ class FullPosBDAP(FullPos):
637
747
 
638
748
  sh.move(posfile, targetfile, fmt=fmtpos)
639
749
 
640
- for logfile in sh.glob('NODE.*', 'std*'):
750
+ for logfile in sh.glob("NODE.*", "std*"):
641
751
  sh.move(logfile, sh.path.join(runstore, logfile))
642
752
 
643
753
  # Some cleaning
644
- sh.rmall('PX{:s}*'.format(self.xpname), fmt='lfi')
645
- sh.rmall('ncf927', 'dirlst')
754
+ sh.rmall("PX{:s}*".format(self.xpname), fmt="lfi")
755
+ sh.rmall("ncf927", "dirlst")
646
756
  for clim in thesenames:
647
757
  sh.rm(clim)
648
758
 
@@ -650,12 +760,20 @@ class FullPosBDAP(FullPos):
650
760
  """Post processing cleaning."""
651
761
  sh = self.system
652
762
 
653
- for fpfile in [x for x in (sh.glob('RUNOUT*/PF{:s}*'.format(self.xpname)) +
654
- sh.glob('RUNOUT*/GRIBPF{:s}*+*'.format(self.xpname)))
655
- if sh.path.isfile(x)]:
656
- sh.move(fpfile, sh.path.basename(fpfile),
657
- fmt='grib' if 'GRIBPF' in fpfile else 'lfi')
658
- sh.cat('RUNOUT*/NODE.001_01', output='NODE.all')
763
+ for fpfile in [
764
+ x
765
+ for x in (
766
+ sh.glob("RUNOUT*/PF{:s}*".format(self.xpname))
767
+ + sh.glob("RUNOUT*/GRIBPF{:s}*+*".format(self.xpname))
768
+ )
769
+ if sh.path.isfile(x)
770
+ ]:
771
+ sh.move(
772
+ fpfile,
773
+ sh.path.basename(fpfile),
774
+ fmt="grib" if "GRIBPF" in fpfile else "lfi",
775
+ )
776
+ sh.cat("RUNOUT*/NODE.001_01", output="NODE.all")
659
777
 
660
778
  super().postfix(rh, opts)
661
779
 
@@ -666,80 +784,103 @@ class OfflineSurfex(Parallel, DrHookDecoMixin):
666
784
  _footprint = [
667
785
  model,
668
786
  dict(
669
- info = "Run a forecast with the SURFEX's offline binary.",
670
- attr = dict(
671
- kind = dict(
672
- values = ['offline_forecast', ],
787
+ info="Run a forecast with the SURFEX's offline binary.",
788
+ attr=dict(
789
+ kind=dict(
790
+ values=[
791
+ "offline_forecast",
792
+ ],
673
793
  ),
674
- model = dict(
675
- values = ['surfex', ],
794
+ model=dict(
795
+ values=[
796
+ "surfex",
797
+ ],
676
798
  ),
677
- model_tstep = dict(
678
- info = "The timestep of the model",
679
- type = Period,
799
+ model_tstep=dict(
800
+ info="The timestep of the model",
801
+ type=Period,
680
802
  ),
681
- diag_tstep = dict(
682
- info = "The timestep for writing diagnostics outputs",
683
- type = Period,
803
+ diag_tstep=dict(
804
+ info="The timestep for writing diagnostics outputs",
805
+ type=Period,
684
806
  ),
685
- fcterm = dict(
686
- info = "The forecast's term",
687
- type = Period,
807
+ fcterm=dict(
808
+ info="The forecast's term",
809
+ type=Period,
688
810
  ),
689
- forcing_read_interval = dict(
690
- info = "Read the forcing file every...",
691
- type = Period,
692
- default = Period('PT12H'),
693
- optional = True,
694
- )
695
- )
696
- )
811
+ forcing_read_interval=dict(
812
+ info="Read the forcing file every...",
813
+ type=Period,
814
+ default=Period("PT12H"),
815
+ optional=True,
816
+ ),
817
+ ),
818
+ ),
697
819
  ]
698
820
 
699
821
  def valid_executable(self, rh):
700
822
  """Check the executable's resource."""
701
- bmodel = getattr(rh.resource, 'model', None)
702
- rc = bmodel == 'surfex' and rh.resource.realkind == 'offline'
823
+ bmodel = getattr(rh.resource, "model", None)
824
+ rc = bmodel == "surfex" and rh.resource.realkind == "offline"
703
825
  if not rc:
704
- logger.error('Inapropriate binary provided')
826
+ logger.error("Inapropriate binary provided")
705
827
  return rc and super().valid_executable(rh)
706
828
 
707
829
  @staticmethod
708
830
  def _fix_nam_macro(sec, macro, value):
709
831
  """Set a given namelist macro and issue a log message."""
710
832
  sec.rh.contents.setmacro(macro, value)
711
- logger.info('Setup %s macro to %s.', macro, str(value))
833
+ logger.info("Setup %s macro to %s.", macro, str(value))
712
834
 
713
835
  def prepare(self, rh, opts):
714
836
  """Setup the appropriate namelist macros."""
715
837
  self.system.subtitle("Offline SURFEX Settings.")
716
838
  # Find the run/final date
717
839
  ic = self.context.sequence.effective_inputs(
718
- role=('InitialConditions', 'ModelState', 'Analysis'))
840
+ role=("InitialConditions", "ModelState", "Analysis")
841
+ )
719
842
  if ic:
720
843
  if len(ic) > 1:
721
- logger.warning('Multiple initial conditions, using only the first one...')
844
+ logger.warning(
845
+ "Multiple initial conditions, using only the first one..."
846
+ )
722
847
  rundate = ic[0].rh.resource.date
723
- if hasattr(ic[0].rh.resource, 'term'):
848
+ if hasattr(ic[0].rh.resource, "term"):
724
849
  rundate += ic[0].rh.resource.term
725
850
  finaldate = rundate + self.fcterm
726
- finaldate = [finaldate.year, finaldate.month, finaldate.day,
727
- finaldate.hour * 3600 + finaldate.minute * 60 + finaldate.second]
728
- logger.info('The final date is : %s', str(finaldate))
729
- nbreads = int(math.ceil((finaldate - rundate).length /
730
- self.forcing_read_interval.length))
851
+ finaldate = [
852
+ finaldate.year,
853
+ finaldate.month,
854
+ finaldate.day,
855
+ finaldate.hour * 3600
856
+ + finaldate.minute * 60
857
+ + finaldate.second,
858
+ ]
859
+ logger.info("The final date is : %s", str(finaldate))
860
+ nbreads = int(
861
+ math.ceil(
862
+ (finaldate - rundate).length
863
+ / self.forcing_read_interval.length
864
+ )
865
+ )
731
866
  else:
732
- logger.warning('No initial conditions were found. Hope you know what you are doing...')
867
+ logger.warning(
868
+ "No initial conditions were found. Hope you know what you are doing..."
869
+ )
733
870
  finaldate = None
734
871
  # Ok, let's find the namelist
735
- namsecs = self.context.sequence.effective_inputs(role=('Namelist', 'Namelistsurf'))
872
+ namsecs = self.context.sequence.effective_inputs(
873
+ role=("Namelist", "Namelistsurf")
874
+ )
736
875
  for namsec in namsecs:
737
876
  logger.info("Processing: %s", namsec.rh.container.localpath())
738
- self._fix_nam_macro(namsec, 'TSTEP', self.model_tstep.length)
739
- self._fix_nam_macro(namsec, 'TSTEP_OUTPUTS', self.diag_tstep.length)
877
+ self._fix_nam_macro(namsec, "TSTEP", self.model_tstep.length)
878
+ self._fix_nam_macro(
879
+ namsec, "TSTEP_OUTPUTS", self.diag_tstep.length
880
+ )
740
881
  if finaldate:
741
- self._fix_nam_macro(namsec, 'FINAL_STOP', finaldate)
742
- self._fix_nam_macro(namsec, 'NB_READS', nbreads)
882
+ self._fix_nam_macro(namsec, "FINAL_STOP", finaldate)
883
+ self._fix_nam_macro(namsec, "NB_READS", nbreads)
743
884
  if namsec.rh.contents.dumps_needs_update:
744
885
  namsec.rh.save()
745
886
  logger.info("Namelist dump: \n%s", namsec.rh.container.read())