vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. vortex/__init__.py +75 -47
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +944 -618
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/serversynctools.py +34 -33
  7. vortex/config.py +19 -22
  8. vortex/data/__init__.py +9 -3
  9. vortex/data/abstractstores.py +593 -655
  10. vortex/data/containers.py +217 -162
  11. vortex/data/contents.py +65 -39
  12. vortex/data/executables.py +93 -102
  13. vortex/data/flow.py +40 -34
  14. vortex/data/geometries.py +228 -132
  15. vortex/data/handlers.py +436 -227
  16. vortex/data/outflow.py +15 -15
  17. vortex/data/providers.py +185 -163
  18. vortex/data/resources.py +48 -42
  19. vortex/data/stores.py +540 -417
  20. vortex/data/sync_templates/__init__.py +0 -0
  21. vortex/gloves.py +114 -87
  22. vortex/layout/__init__.py +1 -8
  23. vortex/layout/contexts.py +150 -84
  24. vortex/layout/dataflow.py +353 -202
  25. vortex/layout/monitor.py +264 -128
  26. vortex/nwp/__init__.py +5 -2
  27. vortex/nwp/algo/__init__.py +14 -5
  28. vortex/nwp/algo/assim.py +205 -151
  29. vortex/nwp/algo/clim.py +683 -517
  30. vortex/nwp/algo/coupling.py +447 -225
  31. vortex/nwp/algo/eda.py +437 -229
  32. vortex/nwp/algo/eps.py +403 -231
  33. vortex/nwp/algo/forecasts.py +416 -275
  34. vortex/nwp/algo/fpserver.py +683 -307
  35. vortex/nwp/algo/ifsnaming.py +205 -145
  36. vortex/nwp/algo/ifsroot.py +215 -122
  37. vortex/nwp/algo/monitoring.py +137 -76
  38. vortex/nwp/algo/mpitools.py +330 -190
  39. vortex/nwp/algo/odbtools.py +637 -353
  40. vortex/nwp/algo/oopsroot.py +454 -273
  41. vortex/nwp/algo/oopstests.py +90 -56
  42. vortex/nwp/algo/request.py +287 -206
  43. vortex/nwp/algo/stdpost.py +878 -522
  44. vortex/nwp/data/__init__.py +22 -4
  45. vortex/nwp/data/assim.py +125 -137
  46. vortex/nwp/data/boundaries.py +121 -68
  47. vortex/nwp/data/climfiles.py +193 -211
  48. vortex/nwp/data/configfiles.py +73 -69
  49. vortex/nwp/data/consts.py +426 -401
  50. vortex/nwp/data/ctpini.py +59 -43
  51. vortex/nwp/data/diagnostics.py +94 -66
  52. vortex/nwp/data/eda.py +50 -51
  53. vortex/nwp/data/eps.py +195 -146
  54. vortex/nwp/data/executables.py +440 -434
  55. vortex/nwp/data/fields.py +63 -48
  56. vortex/nwp/data/gridfiles.py +183 -111
  57. vortex/nwp/data/logs.py +250 -217
  58. vortex/nwp/data/modelstates.py +180 -151
  59. vortex/nwp/data/monitoring.py +72 -99
  60. vortex/nwp/data/namelists.py +254 -202
  61. vortex/nwp/data/obs.py +400 -308
  62. vortex/nwp/data/oopsexec.py +22 -20
  63. vortex/nwp/data/providers.py +90 -65
  64. vortex/nwp/data/query.py +71 -82
  65. vortex/nwp/data/stores.py +49 -36
  66. vortex/nwp/data/surfex.py +136 -137
  67. vortex/nwp/syntax/__init__.py +1 -1
  68. vortex/nwp/syntax/stdattrs.py +173 -111
  69. vortex/nwp/tools/__init__.py +2 -2
  70. vortex/nwp/tools/addons.py +22 -17
  71. vortex/nwp/tools/agt.py +24 -12
  72. vortex/nwp/tools/bdap.py +16 -5
  73. vortex/nwp/tools/bdcp.py +4 -1
  74. vortex/nwp/tools/bdm.py +3 -0
  75. vortex/nwp/tools/bdmp.py +14 -9
  76. vortex/nwp/tools/conftools.py +728 -378
  77. vortex/nwp/tools/drhook.py +12 -8
  78. vortex/nwp/tools/grib.py +65 -39
  79. vortex/nwp/tools/gribdiff.py +22 -17
  80. vortex/nwp/tools/ifstools.py +82 -42
  81. vortex/nwp/tools/igastuff.py +167 -143
  82. vortex/nwp/tools/mars.py +14 -2
  83. vortex/nwp/tools/odb.py +234 -125
  84. vortex/nwp/tools/partitioning.py +61 -37
  85. vortex/nwp/tools/satrad.py +27 -12
  86. vortex/nwp/util/async.py +83 -55
  87. vortex/nwp/util/beacon.py +10 -10
  88. vortex/nwp/util/diffpygram.py +174 -86
  89. vortex/nwp/util/ens.py +144 -63
  90. vortex/nwp/util/hooks.py +30 -19
  91. vortex/nwp/util/taskdeco.py +28 -24
  92. vortex/nwp/util/usepygram.py +278 -172
  93. vortex/nwp/util/usetnt.py +31 -17
  94. vortex/sessions.py +72 -39
  95. vortex/syntax/__init__.py +1 -1
  96. vortex/syntax/stdattrs.py +410 -171
  97. vortex/syntax/stddeco.py +31 -22
  98. vortex/toolbox.py +327 -192
  99. vortex/tools/__init__.py +11 -2
  100. vortex/tools/actions.py +110 -121
  101. vortex/tools/addons.py +111 -92
  102. vortex/tools/arm.py +42 -22
  103. vortex/tools/compression.py +72 -69
  104. vortex/tools/date.py +11 -4
  105. vortex/tools/delayedactions.py +242 -132
  106. vortex/tools/env.py +75 -47
  107. vortex/tools/folder.py +342 -171
  108. vortex/tools/grib.py +341 -162
  109. vortex/tools/lfi.py +423 -216
  110. vortex/tools/listings.py +109 -40
  111. vortex/tools/names.py +218 -156
  112. vortex/tools/net.py +655 -299
  113. vortex/tools/parallelism.py +93 -61
  114. vortex/tools/prestaging.py +55 -31
  115. vortex/tools/schedulers.py +172 -105
  116. vortex/tools/services.py +403 -334
  117. vortex/tools/storage.py +293 -358
  118. vortex/tools/surfex.py +24 -24
  119. vortex/tools/systems.py +1234 -643
  120. vortex/tools/targets.py +156 -100
  121. vortex/util/__init__.py +1 -1
  122. vortex/util/config.py +378 -327
  123. vortex/util/empty.py +2 -2
  124. vortex/util/helpers.py +56 -24
  125. vortex/util/introspection.py +18 -12
  126. vortex/util/iosponge.py +8 -4
  127. vortex/util/roles.py +4 -6
  128. vortex/util/storefunctions.py +39 -13
  129. vortex/util/structs.py +3 -3
  130. vortex/util/worker.py +29 -17
  131. vortex_nwp-2.1.0.dist-info/METADATA +67 -0
  132. vortex_nwp-2.1.0.dist-info/RECORD +144 -0
  133. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
  134. vortex/layout/appconf.py +0 -109
  135. vortex/layout/jobs.py +0 -1276
  136. vortex/layout/nodes.py +0 -1424
  137. vortex/layout/subjobs.py +0 -464
  138. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  139. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  140. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
  141. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
@@ -31,12 +31,12 @@ class ConfTool(footprints.FootprintBase):
31
31
  """Abstract class for conftools objects."""
32
32
 
33
33
  _abstract = True
34
- _collector = ('conftool',)
34
+ _collector = ("conftool",)
35
35
  _footprint = dict(
36
- info = 'Abstract Conf/Weird Tool',
37
- attr = dict(
38
- kind = dict(),
39
- )
36
+ info="Abstract Conf/Weird Tool",
37
+ attr=dict(
38
+ kind=dict(),
39
+ ),
40
40
  )
41
41
 
42
42
 
@@ -45,12 +45,14 @@ class AbstractObjectProxyConfTool(ConfTool):
45
45
 
46
46
  _abstract = True
47
47
  _footprint = dict(
48
- info = 'Conf tool that find the appropriate begin/end date for an input resource.',
49
- attr = dict(
50
- kind = dict(
51
- values = ['objproxy', ],
48
+ info="Conf tool that find the appropriate begin/end date for an input resource.",
49
+ attr=dict(
50
+ kind=dict(
51
+ values=[
52
+ "objproxy",
53
+ ],
52
54
  ),
53
- )
55
+ ),
54
56
  )
55
57
 
56
58
  def __init__(self, *kargs, **kwargs):
@@ -72,13 +74,15 @@ class AbstractObjectProxyConfTool(ConfTool):
72
74
 
73
75
 
74
76
  #: Holds coupling's data for a particular cutoff/hour
75
- CouplingInfos = collections.namedtuple('CouplingInfos',
76
- ('base', 'dayoff', 'cutoff', 'vapp', 'vconf', 'xpid', 'model', 'steps')
77
- )
77
+ CouplingInfos = collections.namedtuple(
78
+ "CouplingInfos",
79
+ ("base", "dayoff", "cutoff", "vapp", "vconf", "xpid", "model", "steps"),
80
+ )
78
81
 
79
82
 
80
83
  class CouplingOffsetConfError(Exception):
81
84
  """Abstract exception raise by :class:`CouplingOffsetConfTool` objects."""
85
+
82
86
  pass
83
87
 
84
88
 
@@ -86,7 +90,7 @@ class CouplingOffsetConfPrepareError(CouplingOffsetConfError):
86
90
  """Exception raised when an error occurs during coupling data calculations."""
87
91
 
88
92
  def __init__(self, fmtk):
89
- msg = 'It is useless to compute coupling for: {}.'.format(fmtk)
93
+ msg = "It is useless to compute coupling for: {}.".format(fmtk)
90
94
  super().__init__(msg)
91
95
 
92
96
 
@@ -94,11 +98,11 @@ class CouplingOffsetConfRefillError(CouplingOffsetConfError):
94
98
  """Exception raised when an orror occurs during refill."""
95
99
 
96
100
  def __init__(self, fmtk, hh=None):
97
- msg = 'It is useless to compute a refill for: {}'.format(fmtk)
101
+ msg = "It is useless to compute a refill for: {}".format(fmtk)
98
102
  if hh is None:
99
- msg += '.'
103
+ msg += "."
100
104
  else:
101
- msg += ' at HH={!s}.'.format(hh)
105
+ msg += " at HH={!s}.".format(hh)
102
106
  super().__init__(msg)
103
107
 
104
108
 
@@ -106,88 +110,96 @@ class CouplingOffsetConfTool(ConfTool):
106
110
  """Conf tool that do all sorts of computations for coupling."""
107
111
 
108
112
  _footprint = dict(
109
- info = 'Conf tool that do all sorts of computations for coupling',
110
- attr = dict(
111
- kind = dict(
112
- values= ['couplingoffset', ],
113
+ info="Conf tool that do all sorts of computations for coupling",
114
+ attr=dict(
115
+ kind=dict(
116
+ values=[
117
+ "couplingoffset",
118
+ ],
113
119
  ),
114
- cplhhlist = dict(
115
- info = ('The list of cutoff and hours for this application. '
116
- 'If omitted, all entries of the **cplhhbase** attribute are used. ' +
117
- "(e.g ``{'assim':[0, 6, 12, 18], 'production':[0, ]}``)"),
118
- type = FPDict,
119
- optional = True,
120
+ cplhhlist=dict(
121
+ info=(
122
+ "The list of cutoff and hours for this application. "
123
+ "If omitted, all entries of the **cplhhbase** attribute are used. "
124
+ + "(e.g ``{'assim':[0, 6, 12, 18], 'production':[0, ]}``)"
125
+ ),
126
+ type=FPDict,
127
+ optional=True,
120
128
  ),
121
- cplhhbase = dict(
122
- info = ('For a given cutoff and hour, gives the base hour to couple to. ' +
123
- "(e.g ``{'assim':{0:0, 6:6, 12:12, 18:18}, 'production':{0:18}}``)."),
124
- type = FPDict,
129
+ cplhhbase=dict(
130
+ info=(
131
+ "For a given cutoff and hour, gives the base hour to couple to. "
132
+ + "(e.g ``{'assim':{0:0, 6:6, 12:12, 18:18}, 'production':{0:18}}``)."
133
+ ),
134
+ type=FPDict,
125
135
  ),
126
- cpldayoff = dict(
127
- info = ('For a given cutoff and hour, gives an offset in days. 0 by default. ' +
128
- "(e.g ``{'assim':{'default':0}, 'production':{'default':1}}``)."),
129
- type = FPDict,
130
- optional = True,
136
+ cpldayoff=dict(
137
+ info=(
138
+ "For a given cutoff and hour, gives an offset in days. 0 by default. "
139
+ + "(e.g ``{'assim':{'default':0}, 'production':{'default':1}}``)."
140
+ ),
141
+ type=FPDict,
142
+ optional=True,
131
143
  ),
132
- cplcutoff = dict(
133
- info = 'For a given cutoff and hour, gives the base cutoff to couple to.',
134
- type = FPDict,
144
+ cplcutoff=dict(
145
+ info="For a given cutoff and hour, gives the base cutoff to couple to.",
146
+ type=FPDict,
135
147
  ),
136
- cplvapp = dict(
137
- info = 'For a given cutoff and hour, gives the base vapp to couple to.',
138
- type = FPDict,
148
+ cplvapp=dict(
149
+ info="For a given cutoff and hour, gives the base vapp to couple to.",
150
+ type=FPDict,
139
151
  ),
140
- cplvconf = dict(
141
- info = 'For a given cutoff and hour, gives the base vconf to couple to.',
142
- type = FPDict,
152
+ cplvconf=dict(
153
+ info="For a given cutoff and hour, gives the base vconf to couple to.",
154
+ type=FPDict,
143
155
  ),
144
- cplxpid = dict(
145
- info = 'For a given cutoff and hour, gives the experiment ID to couple to.',
146
- type = FPDict,
147
- optional = True,
156
+ cplxpid=dict(
157
+ info="For a given cutoff and hour, gives the experiment ID to couple to.",
158
+ type=FPDict,
159
+ optional=True,
148
160
  ),
149
- cplmodel = dict(
150
- info = 'For a given cutoff and hour, gives the base model to couple to.',
151
- type = FPDict,
152
- optional = True,
161
+ cplmodel=dict(
162
+ info="For a given cutoff and hour, gives the base model to couple to.",
163
+ type=FPDict,
164
+ optional=True,
153
165
  ),
154
- cplsteps = dict(
155
- info = 'For a given cutoff and hour, gives then list of requested terms.',
156
- type = FPDict,
166
+ cplsteps=dict(
167
+ info="For a given cutoff and hour, gives then list of requested terms.",
168
+ type=FPDict,
157
169
  ),
158
- finalterm = dict(
159
- info = 'For a given cutoff and hour, the final term (for "finalterm" token substitution)',
160
- type = FPDict,
161
- optional = True
170
+ finalterm=dict(
171
+ info='For a given cutoff and hour, the final term (for "finalterm" token substitution)',
172
+ type=FPDict,
173
+ optional=True,
162
174
  ),
163
- refill_cutoff = dict(
164
- values = ['assim', 'production', 'all'],
165
- info = 'By default, what is the cutoff name of the refill task.',
166
- optional = True,
167
- default = 'assim',
175
+ refill_cutoff=dict(
176
+ values=["assim", "production", "all"],
177
+ info="By default, what is the cutoff name of the refill task.",
178
+ optional=True,
179
+ default="assim",
168
180
  ),
169
- compute_on_refill = dict(
170
- info = 'Is it necessary to compute coupling files for the refilling cutoff ?',
171
- optional = True,
172
- default = True,
173
- type = bool,
181
+ compute_on_refill=dict(
182
+ info="Is it necessary to compute coupling files for the refilling cutoff ?",
183
+ optional=True,
184
+ default=True,
185
+ type=bool,
174
186
  ),
175
- isolated_refill = dict(
176
- info = 'Are the refill tasks exclusive with prepare tasks ?',
177
- optional = True,
178
- default = True,
179
- type = bool,
187
+ isolated_refill=dict(
188
+ info="Are the refill tasks exclusive with prepare tasks ?",
189
+ optional=True,
190
+ default=True,
191
+ type=bool,
180
192
  ),
181
- verbose = dict(
182
- info = 'When the object is created, print a summary.',
183
- type = bool,
184
- optional = True,
185
- default = True,
193
+ verbose=dict(
194
+ info="When the object is created, print a summary.",
195
+ type=bool,
196
+ optional=True,
197
+ default=True,
186
198
  ),
187
- )
199
+ ),
188
200
  )
189
201
 
190
- _DFLT_KEY = 'default'
202
+ _DFLT_KEY = "default"
191
203
 
192
204
  def __init__(self, *kargs, **kwargs):
193
205
  super().__init__(*kargs, **kwargs)
@@ -204,7 +216,9 @@ class CouplingOffsetConfTool(ConfTool):
204
216
  else:
205
217
  for c, clist in self.cplhhlist.items():
206
218
  if not isinstance(clist, (tuple, list)):
207
- clist = [clist, ]
219
+ clist = [
220
+ clist,
221
+ ]
208
222
  self._target_hhs[c].update([Time(h) for h in clist])
209
223
  t_hhbase = self._reshape_inputs(self.cplhhbase, value_reclass=Time)
210
224
 
@@ -218,63 +232,101 @@ class CouplingOffsetConfTool(ConfTool):
218
232
  t_model = t_vapp
219
233
  else:
220
234
  t_model = self._reshape_inputs(self.cplmodel)
221
- t_xpid = self._reshape_inputs(self.cplxpid, class_default='')
235
+ t_xpid = self._reshape_inputs(self.cplxpid, class_default="")
222
236
 
223
237
  # If relevent, do "finalterm" token substitution
224
238
  if self.finalterm is not None:
225
- t_finalterm = self._reshape_inputs(self.finalterm, value_reclass=str)
239
+ t_finalterm = self._reshape_inputs(
240
+ self.finalterm, value_reclass=str
241
+ )
226
242
  for c, cv in t_hhbase.items():
227
243
  for hh in cv.keys():
228
244
  if isinstance(t_steps[c][hh], str):
229
- t_steps[c][hh] = t_steps[c][hh].replace('finalterm',
230
- t_finalterm[c][hh])
245
+ t_steps[c][hh] = t_steps[c][hh].replace(
246
+ "finalterm", t_finalterm[c][hh]
247
+ )
231
248
 
232
249
  # Build the dictionary of CouplingInfos objects
233
250
  self._cpl_data = collections.defaultdict(dict)
234
251
  for c, cv in t_hhbase.items():
235
- self._cpl_data[c] = {hh: CouplingInfos(cv[hh], int(t_dayoff[c][hh]),
236
- t_cutoff[c][hh], t_vapp[c][hh],
237
- t_vconf[c][hh], t_xpid[c][hh],
238
- t_model[c][hh],
239
- rangex(t_steps[c][hh]))
240
- for hh in cv.keys()}
252
+ self._cpl_data[c] = {
253
+ hh: CouplingInfos(
254
+ cv[hh],
255
+ int(t_dayoff[c][hh]),
256
+ t_cutoff[c][hh],
257
+ t_vapp[c][hh],
258
+ t_vconf[c][hh],
259
+ t_xpid[c][hh],
260
+ t_model[c][hh],
261
+ rangex(t_steps[c][hh]),
262
+ )
263
+ for hh in cv.keys()
264
+ }
241
265
 
242
266
  # Pre-compute the prepare terms
243
267
  self._prepare_terms_map = self._compute_prepare_terms()
244
268
  if self.verbose:
245
269
  print()
246
- print('#### Coupling configuration tool initialised ####')
247
- print('**** Coupling tasks terms map:')
248
- print('{:s} : {:s}'.format(self._cpl_fmtkey(('HH', 'VAPP', 'VCONF', 'XPID', 'MODEL', 'CUTOFF')),
249
- 'Computed Terms'))
270
+ print("#### Coupling configuration tool initialised ####")
271
+ print("**** Coupling tasks terms map:")
272
+ print(
273
+ "{:s} : {:s}".format(
274
+ self._cpl_fmtkey(
275
+ ("HH", "VAPP", "VCONF", "XPID", "MODEL", "CUTOFF")
276
+ ),
277
+ "Computed Terms",
278
+ )
279
+ )
250
280
  for k in sorted(self._prepare_terms_map.keys()):
251
- print('{:s} : {:s}'.format(self._cpl_fmtkey(k),
252
- ' '.join([str(t.hour)
253
- for t in self._prepare_terms_map[k]
254
- ])
255
- )
256
- )
281
+ print(
282
+ "{:s} : {:s}".format(
283
+ self._cpl_fmtkey(k),
284
+ " ".join(
285
+ [str(t.hour) for t in self._prepare_terms_map[k]]
286
+ ),
287
+ )
288
+ )
257
289
 
258
290
  # Pre-compute the default refill_map
259
291
  self._refill_terms_map = dict()
260
- self._refill_terms_map[self.refill_cutoff] = self._compute_refill_terms(self.refill_cutoff,
261
- self.compute_on_refill,
262
- self.isolated_refill)
292
+ self._refill_terms_map[self.refill_cutoff] = (
293
+ self._compute_refill_terms(
294
+ self.refill_cutoff,
295
+ self.compute_on_refill,
296
+ self.isolated_refill,
297
+ )
298
+ )
263
299
  if self.verbose:
264
- print('**** Refill tasks activation map (default refill_cutoff is: {:s}):'.format(self.refill_cutoff))
265
- print('{:s} : {:s}'.format(self._rtask_fmtkey(('VAPP', 'VCONF', 'XPID', 'MODEL', 'CUTOFF')),
266
- 'Active hours'))
300
+ print(
301
+ "**** Refill tasks activation map (default refill_cutoff is: {:s}):".format(
302
+ self.refill_cutoff
303
+ )
304
+ )
305
+ print(
306
+ "{:s} : {:s}".format(
307
+ self._rtask_fmtkey(
308
+ ("VAPP", "VCONF", "XPID", "MODEL", "CUTOFF")
309
+ ),
310
+ "Active hours",
311
+ )
312
+ )
267
313
  for k in sorted(self._refill_terms_map[self.refill_cutoff].keys()):
268
314
  vdict = self._refill_terms_map[self.refill_cutoff][k]
269
- print('{:s} : {:s}'.format(self._rtask_fmtkey(k),
270
- ' '.join([str(t.hour) for t in sorted(vdict.keys())])))
315
+ print(
316
+ "{:s} : {:s}".format(
317
+ self._rtask_fmtkey(k),
318
+ " ".join([str(t.hour) for t in sorted(vdict.keys())]),
319
+ )
320
+ )
271
321
  print()
272
322
 
273
323
  @property
274
324
  def target_hhs(self):
275
325
  return self._target_hhs
276
326
 
277
- def _reshape_inputs(self, input_dict, class_default=None, value_reclass=lambda x: x):
327
+ def _reshape_inputs(
328
+ self, input_dict, class_default=None, value_reclass=lambda x: x
329
+ ):
278
330
  """Deal with default values, check dictionaries and convert keys to Time objects."""
279
331
  # Convert keys to time objects
280
332
  r_dict = dict()
@@ -307,10 +359,15 @@ class CouplingOffsetConfTool(ConfTool):
307
359
  myv[h] = last_default
308
360
  else:
309
361
  if not my_c_hhs >= self.target_hhs[c]:
310
- logger.error("Inconsistent input arrays while processing: \n%s",
311
- str(input_dict))
312
- logger.error("Cutoff %s, expecting the following HH: \n%s",
313
- c, str(self.target_hhs[c]))
362
+ logger.error(
363
+ "Inconsistent input arrays while processing: \n%s",
364
+ str(input_dict),
365
+ )
366
+ logger.error(
367
+ "Cutoff %s, expecting the following HH: \n%s",
368
+ c,
369
+ str(self.target_hhs[c]),
370
+ )
314
371
  raise ValueError("Inconsistent input array.")
315
372
 
316
373
  # Filter values according to _target_hhs
@@ -331,13 +388,9 @@ class CouplingOffsetConfTool(ConfTool):
331
388
 
332
389
  @staticmethod
333
390
  def _cpl_fmtkey(k):
334
- cutoff_map = dict(production='prod')
335
- return '{:5s} {:6s} {:24s} {:s} ({:s})'.format(
336
- k[0],
337
- cutoff_map.get(k[5], k[5]),
338
- k[1] + '/' + k[2],
339
- k[3],
340
- k[4]
391
+ cutoff_map = dict(production="prod")
392
+ return "{:5s} {:6s} {:24s} {:s} ({:s})".format(
393
+ k[0], cutoff_map.get(k[5], k[5]), k[1] + "/" + k[2], k[3], k[4]
341
394
  )
342
395
 
343
396
  @staticmethod
@@ -346,13 +399,15 @@ class CouplingOffsetConfTool(ConfTool):
346
399
 
347
400
  @staticmethod
348
401
  def _rtask_fmtkey(k):
349
- cutoff_map = dict(production='prod')
350
- return '{:6s} {:24s} {:s} ({:s})'.format(cutoff_map.get(k[4], k[4]), k[0] + '/' + k[1], k[2], k[3])
402
+ cutoff_map = dict(production="prod")
403
+ return "{:6s} {:24s} {:s} ({:s})".format(
404
+ cutoff_map.get(k[4], k[4]), k[0] + "/" + k[1], k[2], k[3]
405
+ )
351
406
 
352
407
  @staticmethod
353
408
  def _process_date(date):
354
409
  mydate = Date(date)
355
- myhh = Time('{0.hour:d}:{0.minute:02d}'.format(mydate))
410
+ myhh = Time("{0.hour:d}:{0.minute:02d}".format(mydate))
356
411
  return mydate, myhh
357
412
 
358
413
  @staticmethod
@@ -366,47 +421,85 @@ class CouplingOffsetConfTool(ConfTool):
366
421
  terms_map = collections.defaultdict(set)
367
422
  for _, cv in self._cpl_data.items():
368
423
  for h, infos in cv.items():
369
- key = self._cpl_key(infos.base, infos.cutoff, infos.vapp, infos.vconf, infos.xpid, infos.model)
424
+ key = self._cpl_key(
425
+ infos.base,
426
+ infos.cutoff,
427
+ infos.vapp,
428
+ infos.vconf,
429
+ infos.xpid,
430
+ infos.model,
431
+ )
370
432
  targetoffset = self._hh_offset(h, infos.base, infos.dayoff)
371
433
  terms_map[key].update([s + targetoffset for s in infos.steps])
372
434
  terms_map = {k: sorted(terms) for k, terms in terms_map.items()}
373
435
  return terms_map
374
436
 
375
- def _compute_refill_terms(self, refill_cutoff, compute_on_refill, isolated_refill):
376
- finaldates = collections.defaultdict(functools.partial(collections.defaultdict,
377
- functools.partial(collections.defaultdict, set)))
378
- if refill_cutoff == 'all':
379
- possiblehours = sorted(functools.reduce(lambda x, y: x | y,
380
- [set(l) for l in self.target_hhs.values()]))
437
+ def _compute_refill_terms(
438
+ self, refill_cutoff, compute_on_refill, isolated_refill
439
+ ):
440
+ finaldates = collections.defaultdict(
441
+ functools.partial(
442
+ collections.defaultdict,
443
+ functools.partial(collections.defaultdict, set),
444
+ )
445
+ )
446
+ if refill_cutoff == "all":
447
+ possiblehours = sorted(
448
+ functools.reduce(
449
+ lambda x, y: x | y,
450
+ [set(l) for l in self.target_hhs.values()],
451
+ )
452
+ )
381
453
  else:
382
454
  possiblehours = self.target_hhs[refill_cutoff]
383
455
 
384
456
  # Look 24hr ahead
385
457
  for c, cv in self._cpl_data.items():
386
458
  for h, infos in cv.items():
387
- key = self._rtask_key(infos.cutoff, infos.vapp, infos.vconf, infos.xpid, infos.model)
459
+ key = self._rtask_key(
460
+ infos.cutoff,
461
+ infos.vapp,
462
+ infos.vconf,
463
+ infos.xpid,
464
+ infos.model,
465
+ )
388
466
  offset = self._hh_offset(h, infos.base, infos.dayoff)
389
467
  for possibleh in possiblehours:
390
468
  roffset = self._hh_offset(h, possibleh, 0)
391
- if ((roffset > 0 or
392
- (compute_on_refill and roffset == 0 and (refill_cutoff == 'all' or refill_cutoff == c))) and
393
- (roffset < offset or (isolated_refill and roffset == offset))):
394
- finaldates[key][possibleh][offset - roffset].update([s + offset for s in infos.steps])
469
+ if (
470
+ roffset > 0
471
+ or (
472
+ compute_on_refill
473
+ and roffset == 0
474
+ and (refill_cutoff == "all" or refill_cutoff == c)
475
+ )
476
+ ) and (
477
+ roffset < offset
478
+ or (isolated_refill and roffset == offset)
479
+ ):
480
+ finaldates[key][possibleh][offset - roffset].update(
481
+ [s + offset for s in infos.steps]
482
+ )
395
483
 
396
484
  for key, vdict in finaldates.items():
397
485
  for possibleh in vdict.keys():
398
- vdict[possibleh] = {off: sorted(terms) for off, terms in vdict[possibleh].items()}
486
+ vdict[possibleh] = {
487
+ off: sorted(terms)
488
+ for off, terms in vdict[possibleh].items()
489
+ }
399
490
 
400
491
  return finaldates
401
492
 
402
493
  def compatible_with(self, other):
403
494
  if isinstance(other, self.__class__):
404
- return (self.target_hhs == other.target_hhs and
405
- self.refill_cutoff == other.refill_cutoff)
495
+ return (
496
+ self.target_hhs == other.target_hhs
497
+ and self.refill_cutoff == other.refill_cutoff
498
+ )
406
499
  else:
407
500
  return False
408
501
 
409
- def prepare_terms(self, date, cutoff, vapp, vconf, model=None, xpid=''):
502
+ def prepare_terms(self, date, cutoff, vapp, vconf, model=None, xpid=""):
410
503
  """
411
504
  For a task computing coupling files (at **date** and **cutoff**,
412
505
  for a specific **vapp** and **vconf**), lists the terms that should be
@@ -427,8 +520,11 @@ class CouplingOffsetConfTool(ConfTool):
427
520
  time delta with the coupling model/file base date.
428
521
  """
429
522
  _, myhh = self._process_date(date)
430
- return self._hh_offset(myhh, self._cpl_data[cutoff][myhh].base,
431
- self._cpl_data[cutoff][myhh].dayoff)
523
+ return self._hh_offset(
524
+ myhh,
525
+ self._cpl_data[cutoff][myhh].base,
526
+ self._cpl_data[cutoff][myhh].dayoff,
527
+ )
432
528
 
433
529
  def coupling_date(self, date, cutoff):
434
530
  """
@@ -436,8 +532,11 @@ class CouplingOffsetConfTool(ConfTool):
436
532
  base date of the coupling model/file.
437
533
  """
438
534
  mydate, myhh = self._process_date(date)
439
- return mydate - self._hh_offset(myhh, self._cpl_data[cutoff][myhh].base,
440
- self._cpl_data[cutoff][myhh].dayoff)
535
+ return mydate - self._hh_offset(
536
+ myhh,
537
+ self._cpl_data[cutoff][myhh].base,
538
+ self._cpl_data[cutoff][myhh].dayoff,
539
+ )
441
540
 
442
541
  def coupling_terms(self, date, cutoff):
443
542
  """
@@ -445,8 +544,11 @@ class CouplingOffsetConfTool(ConfTool):
445
544
  list of terms that should be fetched from the coupling model/file.
446
545
  """
447
546
  _, myhh = self._process_date(date)
448
- offset = self._hh_offset(myhh, self._cpl_data[cutoff][myhh].base,
449
- self._cpl_data[cutoff][myhh].dayoff)
547
+ offset = self._hh_offset(
548
+ myhh,
549
+ self._cpl_data[cutoff][myhh].base,
550
+ self._cpl_data[cutoff][myhh].dayoff,
551
+ )
450
552
  return [s + offset for s in self._cpl_data[cutoff][myhh].steps]
451
553
 
452
554
  def _coupling_stuff(self, date, cutoff, stuff):
@@ -458,105 +560,157 @@ class CouplingOffsetConfTool(ConfTool):
458
560
  For a task needing coupling (at **date** and **cutoff**), return the
459
561
  prescribed steps.
460
562
  """
461
- return self._coupling_stuff(date, cutoff, 'steps')
563
+ return self._coupling_stuff(date, cutoff, "steps")
462
564
 
463
565
  def coupling_cutoff(self, date, cutoff):
464
566
  """
465
567
  For a task needing coupling (at **date** and **cutoff**), return the
466
568
  cutoff of the coupling model/file.
467
569
  """
468
- return self._coupling_stuff(date, cutoff, 'cutoff')
570
+ return self._coupling_stuff(date, cutoff, "cutoff")
469
571
 
470
572
  def coupling_vapp(self, date, cutoff):
471
573
  """
472
574
  For a task needing coupling (at **date** and **cutoff**), return the
473
575
  vapp of the coupling model/file.
474
576
  """
475
- return self._coupling_stuff(date, cutoff, 'vapp')
577
+ return self._coupling_stuff(date, cutoff, "vapp")
476
578
 
477
579
  def coupling_vconf(self, date, cutoff):
478
580
  """
479
581
  For a task needing coupling (at **date** and **cutoff**), return the
480
582
  vconf of the coupling model/file.
481
583
  """
482
- return self._coupling_stuff(date, cutoff, 'vconf')
584
+ return self._coupling_stuff(date, cutoff, "vconf")
483
585
 
484
586
  def coupling_xpid(self, date, cutoff):
485
587
  """
486
588
  For a task needing coupling (at **date** and **cutoff**), return the
487
589
  experiment ID of the coupling model/file.
488
590
  """
489
- return self._coupling_stuff(date, cutoff, 'xpid')
591
+ return self._coupling_stuff(date, cutoff, "xpid")
490
592
 
491
593
  def coupling_model(self, date, cutoff):
492
594
  """
493
595
  For a task needing coupling (at **date** and **cutoff**), return the
494
596
  vconf of the coupling model/file.
495
597
  """
496
- return self._coupling_stuff(date, cutoff, 'model')
497
-
498
- def refill_terms(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
598
+ return self._coupling_stuff(date, cutoff, "model")
599
+
600
+ def refill_terms(
601
+ self,
602
+ date,
603
+ cutoff,
604
+ vapp,
605
+ vconf,
606
+ model=None,
607
+ refill_cutoff=None,
608
+ xpid="",
609
+ ):
499
610
  """The terms that should be computed for a given refill task."""
500
- refill_cutoff = self.refill_cutoff if refill_cutoff is None else refill_cutoff
611
+ refill_cutoff = (
612
+ self.refill_cutoff if refill_cutoff is None else refill_cutoff
613
+ )
501
614
  if refill_cutoff not in self._refill_terms_map:
502
- self._refill_terms_map[refill_cutoff] = self._compute_refill_terms(refill_cutoff,
503
- self.compute_on_refill,
504
- self.isolated_refill)
615
+ self._refill_terms_map[refill_cutoff] = self._compute_refill_terms(
616
+ refill_cutoff, self.compute_on_refill, self.isolated_refill
617
+ )
505
618
  if model is None:
506
619
  model = vapp
507
620
  mydate, myhh = self._process_date(date)
508
621
  key = self._rtask_key(cutoff, vapp, vconf, xpid, model)
509
622
  finaldates = dict()
510
- if (key not in self._refill_terms_map[refill_cutoff] or
511
- myhh not in self._refill_terms_map[refill_cutoff][key]):
623
+ if (
624
+ key not in self._refill_terms_map[refill_cutoff]
625
+ or myhh not in self._refill_terms_map[refill_cutoff][key]
626
+ ):
512
627
  raise CouplingOffsetConfRefillError(self._rtask_fmtkey(key))
513
- for off, terms in self._refill_terms_map[refill_cutoff][key][myhh].items():
628
+ for off, terms in self._refill_terms_map[refill_cutoff][key][
629
+ myhh
630
+ ].items():
514
631
  finaldates[str(mydate - off)] = terms
515
- return {'date': finaldates}
516
-
517
- def refill_dates(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
632
+ return {"date": finaldates}
633
+
634
+ def refill_dates(
635
+ self,
636
+ date,
637
+ cutoff,
638
+ vapp,
639
+ vconf,
640
+ model=None,
641
+ refill_cutoff=None,
642
+ xpid="",
643
+ ):
518
644
  """The dates that should be processed in a given refill task."""
519
- return list(self.refill_terms(date, cutoff, vapp, vconf, model=model,
520
- refill_cutoff=refill_cutoff, xpid=xpid)['date'].keys())
645
+ return list(
646
+ self.refill_terms(
647
+ date,
648
+ cutoff,
649
+ vapp,
650
+ vconf,
651
+ model=model,
652
+ refill_cutoff=refill_cutoff,
653
+ xpid=xpid,
654
+ )["date"].keys()
655
+ )
521
656
 
522
- def refill_months(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
657
+ def refill_months(
658
+ self,
659
+ date,
660
+ cutoff,
661
+ vapp,
662
+ vconf,
663
+ model=None,
664
+ refill_cutoff=None,
665
+ xpid="",
666
+ ):
523
667
  """The months that should be processed in a given refill task."""
524
- mindate = min(self.refill_dates(date, cutoff, vapp, vconf, model=model,
525
- refill_cutoff=refill_cutoff, xpid=xpid))
668
+ mindate = min(
669
+ self.refill_dates(
670
+ date,
671
+ cutoff,
672
+ vapp,
673
+ vconf,
674
+ model=model,
675
+ refill_cutoff=refill_cutoff,
676
+ xpid=xpid,
677
+ )
678
+ )
526
679
  minmonth = Month(mindate)
527
680
  return [minmonth, minmonth + 1]
528
681
 
529
682
 
530
683
  class AggregatedCouplingOffsetConfTool(ConfTool):
531
-
532
684
  _footprint = dict(
533
- info = 'Aggregate several CouplingOffsetConfTool objects into one',
534
- attr = dict(
535
- kind = dict(
536
- values= ['aggcouplingoffset', ],
685
+ info="Aggregate several CouplingOffsetConfTool objects into one",
686
+ attr=dict(
687
+ kind=dict(
688
+ values=[
689
+ "aggcouplingoffset",
690
+ ],
537
691
  ),
538
- nominal = dict(
539
- info = "A list of couplingoffset objects used in nominal cases",
540
- type = FPList,
692
+ nominal=dict(
693
+ info="A list of couplingoffset objects used in nominal cases",
694
+ type=FPList,
541
695
  ),
542
- alternate = dict(
543
- info = "A list of couplingoffset objects used in rescue modes",
544
- type = FPList,
545
- optional = True,
696
+ alternate=dict(
697
+ info="A list of couplingoffset objects used in rescue modes",
698
+ type=FPList,
699
+ optional=True,
546
700
  ),
547
- use_alternates = dict(
548
- info = 'Actually use rescue mode ?',
549
- optional = True,
550
- default = True,
551
- type = bool,
701
+ use_alternates=dict(
702
+ info="Actually use rescue mode ?",
703
+ optional=True,
704
+ default=True,
705
+ type=bool,
552
706
  ),
553
- verbose = dict(
554
- info = 'When the object is created, print a summary.',
555
- type = bool,
556
- optional = True,
557
- default = True,
707
+ verbose=dict(
708
+ info="When the object is created, print a summary.",
709
+ type=bool,
710
+ optional=True,
711
+ default=True,
558
712
  ),
559
- )
713
+ ),
560
714
  )
561
715
 
562
716
  def __init__(self, *kargs, **kwargs):
@@ -570,26 +724,52 @@ class AggregatedCouplingOffsetConfTool(ConfTool):
570
724
  # Check consistency
571
725
  for num, toolobj in enumerate(self._toolslist[1:]):
572
726
  if not self._toolslist[0].compatible_with(toolobj):
573
- print('\n', '*' * 50)
574
- print('self._toolslist[0] =', self._toolslist[0], '\n',
575
- ' target_hhs =', self._toolslist[0].target_hhs,
576
- ' refill_cutoff =', self._toolslist[0].refill_cutoff)
577
- print('is not consistent with object num', num, ':', toolobj, '\n',
578
- ' target_hhs =', toolobj.target_hhs,
579
- ' refill_cutoff =', toolobj.refill_cutoff)
727
+ print("\n", "*" * 50)
728
+ print(
729
+ "self._toolslist[0] =",
730
+ self._toolslist[0],
731
+ "\n",
732
+ " target_hhs =",
733
+ self._toolslist[0].target_hhs,
734
+ " refill_cutoff =",
735
+ self._toolslist[0].refill_cutoff,
736
+ )
737
+ print(
738
+ "is not consistent with object num",
739
+ num,
740
+ ":",
741
+ toolobj,
742
+ "\n",
743
+ " target_hhs =",
744
+ toolobj.target_hhs,
745
+ " refill_cutoff =",
746
+ toolobj.refill_cutoff,
747
+ )
580
748
  raise CouplingOffsetConfError("Inconsistent sub-objects")
581
749
 
582
750
  if self.verbose:
583
751
  print()
584
- print('#### Aggregated Coupling configuration tool initialised ####')
585
- print('It is made of {:d} nominal configuration tool(s)'.format(len(self.nominal)))
752
+ print(
753
+ "#### Aggregated Coupling configuration tool initialised ####"
754
+ )
755
+ print(
756
+ "It is made of {:d} nominal configuration tool(s)".format(
757
+ len(self.nominal)
758
+ )
759
+ )
586
760
  if self.alternate and self.use_alternates:
587
- print('+ {:d} rescue-mode configuration tool(s)'.format(len(self.alternate)))
761
+ print(
762
+ "+ {:d} rescue-mode configuration tool(s)".format(
763
+ len(self.alternate)
764
+ )
765
+ )
588
766
  else:
589
- print('No rescue-mode configuration tool is considered (deactivated)')
767
+ print(
768
+ "No rescue-mode configuration tool is considered (deactivated)"
769
+ )
590
770
  print()
591
771
 
592
- def prepare_terms(self, date, cutoff, vapp, vconf, model=None, xpid=''):
772
+ def prepare_terms(self, date, cutoff, vapp, vconf, model=None, xpid=""):
593
773
  """
594
774
  For a task computing coupling files (at **date** and **cutoff**,
595
775
  for a specific **vapp** and **vconf**), lists the terms that should be
@@ -598,7 +778,11 @@ class AggregatedCouplingOffsetConfTool(ConfTool):
598
778
  terms = set()
599
779
  for toolobj in self._toolslist:
600
780
  try:
601
- terms.update(toolobj.prepare_terms(date, cutoff, vapp, vconf, model=model, xpid=xpid))
781
+ terms.update(
782
+ toolobj.prepare_terms(
783
+ date, cutoff, vapp, vconf, model=model, xpid=xpid
784
+ )
785
+ )
602
786
  except CouplingOffsetConfPrepareError as e:
603
787
  lateste = e
604
788
  if not terms:
@@ -606,14 +790,30 @@ class AggregatedCouplingOffsetConfTool(ConfTool):
606
790
  else:
607
791
  return sorted(terms)
608
792
 
609
- def refill_terms(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
793
+ def refill_terms(
794
+ self,
795
+ date,
796
+ cutoff,
797
+ vapp,
798
+ vconf,
799
+ model=None,
800
+ refill_cutoff=None,
801
+ xpid="",
802
+ ):
610
803
  """The terms that should be computed for a given refill task."""
611
804
  finaldates = collections.defaultdict(set)
612
805
  for toolobj in self._toolslist:
613
806
  try:
614
- rt = toolobj.refill_terms(date, cutoff, vapp, vconf, model=model,
615
- refill_cutoff=refill_cutoff, xpid=xpid)
616
- for k, v in rt['date'].items():
807
+ rt = toolobj.refill_terms(
808
+ date,
809
+ cutoff,
810
+ vapp,
811
+ vconf,
812
+ model=model,
813
+ refill_cutoff=refill_cutoff,
814
+ xpid=xpid,
815
+ )
816
+ for k, v in rt["date"].items():
617
817
  finaldates[k].update(v)
618
818
  except CouplingOffsetConfRefillError as e:
619
819
  lateste = e
@@ -622,23 +822,60 @@ class AggregatedCouplingOffsetConfTool(ConfTool):
622
822
  else:
623
823
  for k, v in finaldates.items():
624
824
  finaldates[k] = sorted(v)
625
- return {'date': finaldates}
626
-
627
- def refill_dates(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
825
+ return {"date": finaldates}
826
+
827
+ def refill_dates(
828
+ self,
829
+ date,
830
+ cutoff,
831
+ vapp,
832
+ vconf,
833
+ model=None,
834
+ refill_cutoff=None,
835
+ xpid="",
836
+ ):
628
837
  """The dates that should be processed in a given refill task."""
629
- return list(self.refill_terms(date, cutoff, vapp, vconf, model=model,
630
- refill_cutoff=refill_cutoff, xpid=xpid)['date'].keys())
838
+ return list(
839
+ self.refill_terms(
840
+ date,
841
+ cutoff,
842
+ vapp,
843
+ vconf,
844
+ model=model,
845
+ refill_cutoff=refill_cutoff,
846
+ xpid=xpid,
847
+ )["date"].keys()
848
+ )
631
849
 
632
- def refill_months(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
850
+ def refill_months(
851
+ self,
852
+ date,
853
+ cutoff,
854
+ vapp,
855
+ vconf,
856
+ model=None,
857
+ refill_cutoff=None,
858
+ xpid="",
859
+ ):
633
860
  """The months that should be processed in a given refill task."""
634
- mindate = min(self.refill_dates(date, cutoff, vapp, vconf, model=model,
635
- refill_cutoff=refill_cutoff, xpid=xpid))
861
+ mindate = min(
862
+ self.refill_dates(
863
+ date,
864
+ cutoff,
865
+ vapp,
866
+ vconf,
867
+ model=model,
868
+ refill_cutoff=refill_cutoff,
869
+ xpid=xpid,
870
+ )
871
+ )
636
872
  minmonth = Month(mindate)
637
873
  return [minmonth, minmonth + 1]
638
874
 
639
875
 
640
876
  class TimeSerieInputFinderError(Exception):
641
877
  """Any exception raise by :class:`TimeSerieInputFinderConfTool` objects."""
878
+
642
879
  pass
643
880
 
644
881
 
@@ -669,31 +906,27 @@ class TimeSerieInputFinderConfTool(ConfTool):
669
906
  """
670
907
 
671
908
  _footprint = dict(
672
- info = 'Conf tool that find the appropriate begin/end date for an input resource.',
673
- attr = dict(
674
- kind = dict(
675
- values = ['timeserie', ],
909
+ info="Conf tool that find the appropriate begin/end date for an input resource.",
910
+ attr=dict(
911
+ kind=dict(
912
+ values=[
913
+ "timeserie",
914
+ ],
676
915
  ),
677
- timeserie_begin = dict(
678
- info = "The date when the time serie starts",
679
- type = Date
916
+ timeserie_begin=dict(
917
+ info="The date when the time serie starts", type=Date
680
918
  ),
681
- timeserie_step = dict(
682
- info = "The step between files of the time serie.",
683
- type = Period
919
+ timeserie_step=dict(
920
+ info="The step between files of the time serie.", type=Period
684
921
  ),
685
- upperbound_included = dict(
686
- type = bool,
687
- optional = True,
688
- default = True
922
+ upperbound_included=dict(type=bool, optional=True, default=True),
923
+ singlefile=dict(
924
+ info="The period requested by a user should be contained in a single file.",
925
+ type=bool,
926
+ optional=True,
927
+ default=False,
689
928
  ),
690
- singlefile = dict(
691
- info = "The period requested by a user should be contained in a single file.",
692
- type = bool,
693
- optional = True,
694
- default = False
695
- )
696
- )
929
+ ),
697
930
  )
698
931
 
699
932
  def __init__(self, *kargs, **kwargs):
@@ -705,10 +938,14 @@ class TimeSerieInputFinderConfTool(ConfTool):
705
938
  """Find the appropriate tiem serie's file date just before **begindate**."""
706
939
  if begindate not in self._begincache:
707
940
  if begindate < self.timeserie_begin:
708
- raise TimeSerieInputFinderError("Request begin date is too soon !")
941
+ raise TimeSerieInputFinderError(
942
+ "Request begin date is too soon !"
943
+ )
709
944
  dt = begindate - self.timeserie_begin
710
945
  nsteps = int(math.floor(dt.length / self._steplength))
711
- self._begincache[begindate] = self.timeserie_begin + nsteps * self.timeserie_step
946
+ self._begincache[begindate] = (
947
+ self.timeserie_begin + nsteps * self.timeserie_step
948
+ )
712
949
  return self._begincache[begindate]
713
950
 
714
951
  def _begindates_expansion(self, tdate, tlength):
@@ -719,7 +956,9 @@ class TimeSerieInputFinderConfTool(ConfTool):
719
956
  nfiles += 1
720
957
  if nfiles > 1:
721
958
  if self.singlefile:
722
- raise TimeSerieInputFinderError("Multiple files requested but singlefile=.T.")
959
+ raise TimeSerieInputFinderError(
960
+ "Multiple files requested but singlefile=.T."
961
+ )
723
962
  return [tdate + i * self.timeserie_step for i in range(0, nfiles)]
724
963
  else:
725
964
  return tdate
@@ -767,7 +1006,9 @@ class TimeSerieInputFinderConfTool(ConfTool):
767
1006
  def begindate(self, begindate, term):
768
1007
  """Find the file dates encompassing [**begindate**, **begindate** + **term**]."""
769
1008
  begindate, term = self._date_term_normalise(begindate, term)
770
- return self._begindates_expansion(self._begin_lookup(begindate), int(term) * 60)
1009
+ return self._begindates_expansion(
1010
+ self._begin_lookup(begindate), int(term) * 60
1011
+ )
771
1012
 
772
1013
  def enddate(self, begindate, term):
773
1014
  """Find the file enddates encompassing [**begindate**, **begindate** + **term**]."""
@@ -933,33 +1174,41 @@ class ArpIfsForecastTermConfTool(ConfTool):
933
1174
  """
934
1175
 
935
1176
  _footprint = dict(
936
- info = "Conf tool that helps setting up Arpege's forecast term and outputs",
937
- attr = dict(
938
- kind = dict(
939
- values= ['arpifs_fcterms', ],
1177
+ info="Conf tool that helps setting up Arpege's forecast term and outputs",
1178
+ attr=dict(
1179
+ kind=dict(
1180
+ values=[
1181
+ "arpifs_fcterms",
1182
+ ],
940
1183
  ),
941
- fcterm_def = dict(
942
- info = ("The forecast's term for each cutoff and base time " +
943
- "(e.g ``{'assim':{0:6, 12:6}, 'production':{0:102}}``)"),
944
- type = dict,
1184
+ fcterm_def=dict(
1185
+ info=(
1186
+ "The forecast's term for each cutoff and base time "
1187
+ + "(e.g ``{'assim':{0:6, 12:6}, 'production':{0:102}}``)"
1188
+ ),
1189
+ type=dict,
945
1190
  ),
946
1191
  fcterm_unit=dict(
947
1192
  info="The forecast's term unit (hour or timestep)",
948
- values=['hour', 'timestep'],
1193
+ values=["hour", "timestep"],
949
1194
  optional=True,
950
- default='hour',
1195
+ default="hour",
951
1196
  ),
952
1197
  hist_terms_def=dict(
953
- info=("The forecast's terms when historical files are needed " +
954
- "(for permanant storage) " +
955
- "(e.g ``{'assim':{default: '0-finalterm-3'}, " +
956
- "'production':{0:'0-23-1,24-finalterm-6}}``)"),
1198
+ info=(
1199
+ "The forecast's terms when historical files are needed "
1200
+ + "(for permanant storage) "
1201
+ + "(e.g ``{'assim':{default: '0-finalterm-3'}, "
1202
+ + "'production':{0:'0-23-1,24-finalterm-6}}``)"
1203
+ ),
957
1204
  type=dict,
958
1205
  optional=True,
959
1206
  ),
960
1207
  surf_terms_def=dict(
961
- info=("The forecast's terms when surface files are needed " +
962
- "(for permanant storage) "),
1208
+ info=(
1209
+ "The forecast's terms when surface files are needed "
1210
+ + "(for permanant storage) "
1211
+ ),
963
1212
  type=dict,
964
1213
  optional=True,
965
1214
  ),
@@ -974,63 +1223,93 @@ class ArpIfsForecastTermConfTool(ConfTool):
974
1223
  optional=True,
975
1224
  ),
976
1225
  extra_fp_terms_def=dict(
977
- info=("The forecast's terms when extra fullpos diagnostics are computed. " +
978
- "They are always computed by some offline tasks. " +
979
- "The dictionary has an additional level (describing the 'name' of the " +
980
- "extra fullpos processing"),
1226
+ info=(
1227
+ "The forecast's terms when extra fullpos diagnostics are computed. "
1228
+ + "They are always computed by some offline tasks. "
1229
+ + "The dictionary has an additional level (describing the 'name' of the "
1230
+ + "extra fullpos processing"
1231
+ ),
981
1232
  type=dict,
982
1233
  optional=True,
983
1234
  ),
984
1235
  secondary_diag_terms_def=dict(
985
- info=("The forecast's terms when secondary diagnostics are computed. " +
986
- "Secondary dignostics are based on diagnostics previously created by " +
987
- "the inline/offline diag fullpos (see diag_fp_terms_def)." +
988
- "The dictionary has an additional level (describing the 'name' of the " +
989
- "secondary diags"),
1236
+ info=(
1237
+ "The forecast's terms when secondary diagnostics are computed. "
1238
+ + "Secondary dignostics are based on diagnostics previously created by "
1239
+ + "the inline/offline diag fullpos (see diag_fp_terms_def)."
1240
+ + "The dictionary has an additional level (describing the 'name' of the "
1241
+ + "secondary diags"
1242
+ ),
990
1243
  type=dict,
991
1244
  optional=True,
992
1245
  ),
993
- use_inline_fp = dict(
994
- info = 'Use inline Fullpos to compute "core_fp_terms"',
995
- type = bool,
996
- optional = True,
997
- default = True,
1246
+ use_inline_fp=dict(
1247
+ info='Use inline Fullpos to compute "core_fp_terms"',
1248
+ type=bool,
1249
+ optional=True,
1250
+ default=True,
998
1251
  ),
999
- )
1252
+ ),
1000
1253
  )
1001
1254
 
1002
- _ACTUAL_T_RE = re.compile(r'(\w+)_terms$')
1003
- _ACTUAL_FPLIST_T_RE = re.compile(r'(\w+)_terms_fplist$')
1255
+ _ACTUAL_T_RE = re.compile(r"(\w+)_terms$")
1256
+ _ACTUAL_FPLIST_T_RE = re.compile(r"(\w+)_terms_fplist$")
1004
1257
  _UNDEFINED = object()
1005
1258
 
1006
1259
  def __init__(self, *kargs, **kwargs):
1007
1260
  super().__init__(*kargs, **kwargs)
1008
- self._x_fcterm = self._check_data_keys_and_times(self.fcterm_def, 'fcterm_def',
1009
- cast=self._cast_unique_value)
1010
- self._x_hist_terms = self._check_data_keys_and_times(self.hist_terms_def, 'hist_terms_def',
1011
- cast=self._cast_timerangex)
1012
- self._x_surf_terms = self._check_data_keys_and_times(self.surf_terms_def, 'surf_terms_def',
1013
- cast=self._cast_timerangex)
1014
- self._x_norm_terms = self._check_data_keys_and_times(self.norm_terms_def, 'norm_terms_def',
1015
- cast=self._cast_timerangex)
1016
- self._x_diag_fp_terms = self._check_data_keys_and_times(self.diag_fp_terms_def, 'diag_fp_terms_def',
1017
- cast=self._cast_timerangex)
1018
- self._x_extra_fp_terms = dict() if self.extra_fp_terms_def is None else self.extra_fp_terms_def
1019
- if not all([isinstance(v, dict) for v in self._x_extra_fp_terms.values()]):
1261
+ self._x_fcterm = self._check_data_keys_and_times(
1262
+ self.fcterm_def, "fcterm_def", cast=self._cast_unique_value
1263
+ )
1264
+ self._x_hist_terms = self._check_data_keys_and_times(
1265
+ self.hist_terms_def, "hist_terms_def", cast=self._cast_timerangex
1266
+ )
1267
+ self._x_surf_terms = self._check_data_keys_and_times(
1268
+ self.surf_terms_def, "surf_terms_def", cast=self._cast_timerangex
1269
+ )
1270
+ self._x_norm_terms = self._check_data_keys_and_times(
1271
+ self.norm_terms_def, "norm_terms_def", cast=self._cast_timerangex
1272
+ )
1273
+ self._x_diag_fp_terms = self._check_data_keys_and_times(
1274
+ self.diag_fp_terms_def,
1275
+ "diag_fp_terms_def",
1276
+ cast=self._cast_timerangex,
1277
+ )
1278
+ self._x_extra_fp_terms = (
1279
+ dict()
1280
+ if self.extra_fp_terms_def is None
1281
+ else self.extra_fp_terms_def
1282
+ )
1283
+ if not all(
1284
+ [isinstance(v, dict) for v in self._x_extra_fp_terms.values()]
1285
+ ):
1020
1286
  raise ValueError("extra_fp_terms values need to be dictionaries")
1021
- self._x_extra_fp_terms = {k: self._check_data_keys_and_times(v,
1022
- 'extra_fp_terms_def[{:s}]'.format(k),
1023
- cast=self._cast_timerangex)
1024
- for k, v in self._x_extra_fp_terms.items()}
1025
- self._x_secondary_diag_terms_def = (dict()
1026
- if self.secondary_diag_terms_def is None
1027
- else self.secondary_diag_terms_def)
1028
- if not all([isinstance(v, dict) for v in self._x_secondary_diag_terms_def.values()]):
1287
+ self._x_extra_fp_terms = {
1288
+ k: self._check_data_keys_and_times(
1289
+ v,
1290
+ "extra_fp_terms_def[{:s}]".format(k),
1291
+ cast=self._cast_timerangex,
1292
+ )
1293
+ for k, v in self._x_extra_fp_terms.items()
1294
+ }
1295
+ self._x_secondary_diag_terms_def = (
1296
+ dict()
1297
+ if self.secondary_diag_terms_def is None
1298
+ else self.secondary_diag_terms_def
1299
+ )
1300
+ if not all(
1301
+ [
1302
+ isinstance(v, dict)
1303
+ for v in self._x_secondary_diag_terms_def.values()
1304
+ ]
1305
+ ):
1029
1306
  raise ValueError("extra_fp_terms values need to be dictionaries")
1030
1307
  self._x_secondary_diag_terms_def = {
1031
- k: self._check_data_keys_and_times(v,
1032
- 'secondary_diag_terms_def[{:s}]'.format(k),
1033
- cast=self._cast_timerangex)
1308
+ k: self._check_data_keys_and_times(
1309
+ v,
1310
+ "secondary_diag_terms_def[{:s}]".format(k),
1311
+ cast=self._cast_timerangex,
1312
+ )
1034
1313
  for k, v in self._x_secondary_diag_terms_def.items()
1035
1314
  }
1036
1315
  self._lookup_cache = dict()
@@ -1040,7 +1319,7 @@ class ArpIfsForecastTermConfTool(ConfTool):
1040
1319
  def _clone(self, **kwargs):
1041
1320
  my_args = self.footprint_as_shallow_dict()
1042
1321
  my_args.update(kwargs)
1043
- return self.__class__(** my_args)
1322
+ return self.__class__(**my_args)
1044
1323
 
1045
1324
  @property
1046
1325
  def no_inline(self):
@@ -1054,7 +1333,7 @@ class ArpIfsForecastTermConfTool(ConfTool):
1054
1333
  return value
1055
1334
 
1056
1335
  def _cast_unique_value(self, value):
1057
- if self.fcterm_unit == 'hour':
1336
+ if self.fcterm_unit == "hour":
1058
1337
  return Time(value)
1059
1338
  else:
1060
1339
  return int(value)
@@ -1063,7 +1342,7 @@ class ArpIfsForecastTermConfTool(ConfTool):
1063
1342
  def _cast_timerangex(value):
1064
1343
  if not (value is None or isinstance(value, str)):
1065
1344
  if isinstance(value, collections.abc.Iterable):
1066
- value = ','.join([str(e) for e in value])
1345
+ value = ",".join([str(e) for e in value])
1067
1346
  else:
1068
1347
  value = str(value)
1069
1348
  return value
@@ -1074,8 +1353,10 @@ class ArpIfsForecastTermConfTool(ConfTool):
1074
1353
  if data is None:
1075
1354
  return dict(default=dict(default=None))
1076
1355
  else:
1077
- if not set(data.keys()) <= {'assim', 'production', 'default'}:
1078
- raise ValueError('Impoper value ({!s}) for "{:s}".'.format(data, dataname))
1356
+ if not set(data.keys()) <= {"assim", "production", "default"}:
1357
+ raise ValueError(
1358
+ 'Impoper value ({!s}) for "{:s}".'.format(data, dataname)
1359
+ )
1079
1360
  return data
1080
1361
 
1081
1362
  def _check_data_keys_and_times(self, data, dataname, cast=None):
@@ -1085,14 +1366,25 @@ class ArpIfsForecastTermConfTool(ConfTool):
1085
1366
  new_data = dict()
1086
1367
  for data_k, data_v in data.items():
1087
1368
  if not isinstance(data_v, dict):
1088
- raise ValueError('The {:s} "{:s}" entry should be a dictionary (got "{!s}")'
1089
- .format(dataname, data_k, data_v))
1369
+ raise ValueError(
1370
+ 'The {:s} "{:s}" entry should be a dictionary (got "{!s}")'.format(
1371
+ dataname, data_k, data_v
1372
+ )
1373
+ )
1090
1374
  try:
1091
- new_data[data_k] = {'default' if k == 'default' else Time(k): cast(v)
1092
- for k, v in data_v.items()}
1375
+ new_data[data_k] = {
1376
+ "default" if k == "default" else Time(k): cast(v)
1377
+ for k, v in data_v.items()
1378
+ }
1093
1379
  except ValueError as e:
1094
- raise ValueError("Error while processing {:s}'s {:s}: ".format(dataname, data_k) +
1095
- "Could not convert to Time (original message '{!s}')".format(e))
1380
+ raise ValueError(
1381
+ "Error while processing {:s}'s {:s}: ".format(
1382
+ dataname, data_k
1383
+ )
1384
+ + "Could not convert to Time (original message '{!s}')".format(
1385
+ e
1386
+ )
1387
+ )
1096
1388
  return new_data
1097
1389
 
1098
1390
  def _cutoff_hh_lookup(self, what_desc, cutoff, hh, rawdata=None):
@@ -1101,15 +1393,23 @@ class ArpIfsForecastTermConfTool(ConfTool):
1101
1393
  hh = Time(hh)
1102
1394
  if (what_desc, cutoff, hh) not in self._lookup_cache:
1103
1395
  if rawdata is None:
1104
- rawdata = getattr(self, '_x_{:s}'.format(what_desc))
1105
- cutoff_v = rawdata.get(cutoff, rawdata.get('default', self._UNDEFINED))
1396
+ rawdata = getattr(self, "_x_{:s}".format(what_desc))
1397
+ cutoff_v = rawdata.get(
1398
+ cutoff, rawdata.get("default", self._UNDEFINED)
1399
+ )
1106
1400
  if cutoff_v is self._UNDEFINED:
1107
- raise ValueError('Nothing is defined for cutoff="{:s}" in "{:s}"'
1108
- .format(cutoff, what_desc))
1109
- hh_v = cutoff_v.get(hh, cutoff_v.get('default', self._UNDEFINED))
1401
+ raise ValueError(
1402
+ 'Nothing is defined for cutoff="{:s}" in "{:s}"'.format(
1403
+ cutoff, what_desc
1404
+ )
1405
+ )
1406
+ hh_v = cutoff_v.get(hh, cutoff_v.get("default", self._UNDEFINED))
1110
1407
  if hh_v is self._UNDEFINED:
1111
- raise ValueError('Nothing is defined for cutoff="{:s}"/hh="{!s}" in "{:s}"'
1112
- .format(cutoff, hh, what_desc))
1408
+ raise ValueError(
1409
+ 'Nothing is defined for cutoff="{:s}"/hh="{!s}" in "{:s}"'.format(
1410
+ cutoff, hh, what_desc
1411
+ )
1412
+ )
1113
1413
  self._lookup_cache[(what_desc, cutoff, hh)] = hh_v
1114
1414
  return self._lookup_cache[(what_desc, cutoff, hh)]
1115
1415
 
@@ -1117,26 +1417,34 @@ class ArpIfsForecastTermConfTool(ConfTool):
1117
1417
  """Look for a particular cutoff in self._x_what_desc and resolve the rangex."""
1118
1418
  if (what_desc, cutoff, hh) not in self._lookup_rangex_cache:
1119
1419
  try:
1120
- what = self._cutoff_hh_lookup(what_desc, cutoff, hh, rawdata=rawdata)
1420
+ what = self._cutoff_hh_lookup(
1421
+ what_desc, cutoff, hh, rawdata=rawdata
1422
+ )
1121
1423
  except ValueError:
1122
1424
  what = None
1123
1425
  if what is None:
1124
1426
  self._lookup_rangex_cache[(what_desc, cutoff, hh)] = list()
1125
1427
  else:
1126
- finalterm = self._cutoff_hh_lookup('fcterm', cutoff, hh)
1127
- if 'finalterm' in what:
1128
- what = what.replace('finalterm', str(finalterm))
1428
+ finalterm = self._cutoff_hh_lookup("fcterm", cutoff, hh)
1429
+ if "finalterm" in what:
1430
+ what = what.replace("finalterm", str(finalterm))
1129
1431
  try:
1130
1432
  tir = timeintrangex(what)
1131
1433
  except (TypeError, ValueError):
1132
1434
  raise ValueError(
1133
- 'Could not process "{:s}" using timeintrangex (from "{:s}" with cutoff={:s}/hh={!s})'
1134
- .format(what, what_desc, cutoff, hh)
1435
+ 'Could not process "{:s}" using timeintrangex (from "{:s}" with cutoff={:s}/hh={!s})'.format(
1436
+ what, what_desc, cutoff, hh
1437
+ )
1438
+ )
1439
+ if self.fcterm_unit == "timestep" and not all(
1440
+ [isinstance(i, int) for i in tir]
1441
+ ):
1442
+ raise ValueError(
1443
+ 'No hours/minutes allowed when fcterm_unit is "timestep" '
1444
+ + '(from "{:s}" with cutoff={:s}/hh={!s})'.format(
1445
+ what_desc, cutoff, hh
1446
+ )
1135
1447
  )
1136
- if self.fcterm_unit == 'timestep' and not all([isinstance(i, int) for i in tir]):
1137
- raise ValueError('No hours/minutes allowed when fcterm_unit is "timestep" ' +
1138
- '(from "{:s}" with cutoff={:s}/hh={!s})'
1139
- .format(what_desc, cutoff, hh))
1140
1448
  self._lookup_rangex_cache[(what_desc, cutoff, hh)] = sorted(
1141
1449
  [t for t in tir if t <= finalterm]
1142
1450
  )
@@ -1144,7 +1452,7 @@ class ArpIfsForecastTermConfTool(ConfTool):
1144
1452
 
1145
1453
  def fcterm(self, cutoff, hh):
1146
1454
  """The forecast term for **cutoff** and **hh**."""
1147
- fcterm = self._cutoff_hh_lookup('fcterm', cutoff, hh)
1455
+ fcterm = self._cutoff_hh_lookup("fcterm", cutoff, hh)
1148
1456
  if isinstance(fcterm, Time) and fcterm.minute == 0:
1149
1457
  return fcterm.hour
1150
1458
  else:
@@ -1152,22 +1460,22 @@ class ArpIfsForecastTermConfTool(ConfTool):
1152
1460
 
1153
1461
  def hist_terms(self, cutoff, hh):
1154
1462
  """The list of terms for requested/archived historical files."""
1155
- return self._cutoff_hh_rangex_lookup('hist_terms', cutoff, hh)
1463
+ return self._cutoff_hh_rangex_lookup("hist_terms", cutoff, hh)
1156
1464
 
1157
1465
  def surf_terms(self, cutoff, hh):
1158
1466
  """The list of terms for historical surface files."""
1159
- return self._cutoff_hh_rangex_lookup('surf_terms', cutoff, hh)
1467
+ return self._cutoff_hh_rangex_lookup("surf_terms", cutoff, hh)
1160
1468
 
1161
1469
  def norm_terms(self, cutoff, hh):
1162
1470
  """The list of terms for norm calculations."""
1163
- return self._cutoff_hh_rangex_lookup('norm_terms', cutoff, hh)
1471
+ return self._cutoff_hh_rangex_lookup("norm_terms", cutoff, hh)
1164
1472
 
1165
1473
  def inline_terms(self, cutoff, hh):
1166
1474
  """The list of terms for inline diagnostics."""
1167
1475
  if self.use_inline_fp:
1168
1476
  return sorted(
1169
- set(self._cutoff_hh_rangex_lookup('diag_fp_terms', cutoff, hh)) |
1170
- self._secondary_diag_terms_set(cutoff, hh)
1477
+ set(self._cutoff_hh_rangex_lookup("diag_fp_terms", cutoff, hh))
1478
+ | self._secondary_diag_terms_set(cutoff, hh)
1171
1479
  )
1172
1480
  else:
1173
1481
  return list()
@@ -1178,8 +1486,8 @@ class ArpIfsForecastTermConfTool(ConfTool):
1178
1486
  return list()
1179
1487
  else:
1180
1488
  return sorted(
1181
- set(self._cutoff_hh_rangex_lookup('diag_fp_terms', cutoff, hh)) |
1182
- self._secondary_diag_terms_set(cutoff, hh)
1489
+ set(self._cutoff_hh_rangex_lookup("diag_fp_terms", cutoff, hh))
1490
+ | self._secondary_diag_terms_set(cutoff, hh)
1183
1491
  )
1184
1492
 
1185
1493
  def diag_terms_fplist(self, cutoff, hh):
@@ -1188,15 +1496,21 @@ class ArpIfsForecastTermConfTool(ConfTool):
1188
1496
  return FPList(flist) if flist else []
1189
1497
 
1190
1498
  def _extra_fp_terms_item_fplist(self, item, cutoff, hh):
1191
- flist = self._cutoff_hh_rangex_lookup('extra_fp_terms[{:s}]'.format(item),
1192
- cutoff, hh,
1193
- rawdata=self._x_extra_fp_terms[item])
1499
+ flist = self._cutoff_hh_rangex_lookup(
1500
+ "extra_fp_terms[{:s}]".format(item),
1501
+ cutoff,
1502
+ hh,
1503
+ rawdata=self._x_extra_fp_terms[item],
1504
+ )
1194
1505
  return FPList(flist) if flist else []
1195
1506
 
1196
1507
  def _secondary_diag_terms_item_fplist(self, item, cutoff, hh):
1197
- flist = self._cutoff_hh_rangex_lookup('secondary_diag_terms[{:s}]'.format(item),
1198
- cutoff, hh,
1199
- rawdata=self._x_secondary_diag_terms_def[item])
1508
+ flist = self._cutoff_hh_rangex_lookup(
1509
+ "secondary_diag_terms[{:s}]".format(item),
1510
+ cutoff,
1511
+ hh,
1512
+ rawdata=self._x_secondary_diag_terms_def[item],
1513
+ )
1200
1514
  return FPList(flist) if flist else []
1201
1515
 
1202
1516
  @secure_getattr
@@ -1204,37 +1518,65 @@ class ArpIfsForecastTermConfTool(ConfTool):
1204
1518
  actual_m = self._ACTUAL_T_RE.match(item)
1205
1519
  actual_fplist_m = self._ACTUAL_FPLIST_T_RE.match(item)
1206
1520
  if actual_m and actual_m.group(1) in self._x_extra_fp_terms.keys():
1207
- return functools.partial(self._cutoff_hh_rangex_lookup,
1208
- 'extra_fp_terms[{:s}]'.format(actual_m.group(1)),
1209
- rawdata=self._x_extra_fp_terms[actual_m.group(1)])
1210
- elif actual_fplist_m and actual_fplist_m.group(1) in self._x_extra_fp_terms.keys():
1211
- return functools.partial(self._extra_fp_terms_item_fplist,
1212
- actual_fplist_m.group(1))
1213
- elif actual_m and actual_m.group(1) in self._x_secondary_diag_terms_def.keys():
1214
- return functools.partial(self._cutoff_hh_rangex_lookup,
1215
- 'secondary_diag_terms[{:s}]'.format(actual_m.group(1)),
1216
- rawdata=self._x_secondary_diag_terms_def[actual_m.group(1)])
1217
- elif actual_fplist_m and actual_fplist_m.group(1) in self._x_secondary_diag_terms_def.keys():
1218
- return functools.partial(self._secondary_diag_terms_item_fplist,
1219
- actual_fplist_m.group(1))
1521
+ return functools.partial(
1522
+ self._cutoff_hh_rangex_lookup,
1523
+ "extra_fp_terms[{:s}]".format(actual_m.group(1)),
1524
+ rawdata=self._x_extra_fp_terms[actual_m.group(1)],
1525
+ )
1526
+ elif (
1527
+ actual_fplist_m
1528
+ and actual_fplist_m.group(1) in self._x_extra_fp_terms.keys()
1529
+ ):
1530
+ return functools.partial(
1531
+ self._extra_fp_terms_item_fplist, actual_fplist_m.group(1)
1532
+ )
1533
+ elif (
1534
+ actual_m
1535
+ and actual_m.group(1) in self._x_secondary_diag_terms_def.keys()
1536
+ ):
1537
+ return functools.partial(
1538
+ self._cutoff_hh_rangex_lookup,
1539
+ "secondary_diag_terms[{:s}]".format(actual_m.group(1)),
1540
+ rawdata=self._x_secondary_diag_terms_def[actual_m.group(1)],
1541
+ )
1542
+ elif (
1543
+ actual_fplist_m
1544
+ and actual_fplist_m.group(1)
1545
+ in self._x_secondary_diag_terms_def.keys()
1546
+ ):
1547
+ return functools.partial(
1548
+ self._secondary_diag_terms_item_fplist,
1549
+ actual_fplist_m.group(1),
1550
+ )
1220
1551
  else:
1221
1552
  raise AttributeError('Attribute "{:s}" was not found'.format(item))
1222
1553
 
1223
1554
  def _fpoff_terms_set(self, cutoff, hh):
1224
1555
  fpoff_terms = set()
1225
1556
  for k, v in self._x_extra_fp_terms.items():
1226
- fpoff_terms.update(self._cutoff_hh_rangex_lookup('extra_fp_terms[{:s}]'.format(k),
1227
- cutoff, hh, rawdata=v))
1557
+ fpoff_terms.update(
1558
+ self._cutoff_hh_rangex_lookup(
1559
+ "extra_fp_terms[{:s}]".format(k), cutoff, hh, rawdata=v
1560
+ )
1561
+ )
1228
1562
  if not self.use_inline_fp:
1229
- fpoff_terms.update(self._cutoff_hh_rangex_lookup('diag_fp_terms', cutoff, hh))
1563
+ fpoff_terms.update(
1564
+ self._cutoff_hh_rangex_lookup("diag_fp_terms", cutoff, hh)
1565
+ )
1230
1566
  fpoff_terms.update(self._secondary_diag_terms_set(cutoff, hh))
1231
1567
  return fpoff_terms
1232
1568
 
1233
1569
  def _secondary_diag_terms_set(self, cutoff, hh):
1234
1570
  sec_terms = set()
1235
1571
  for k, v in self._x_secondary_diag_terms_def.items():
1236
- sec_terms.update(self._cutoff_hh_rangex_lookup('secondary_diag_terms[{:s}]'.format(k),
1237
- cutoff, hh, rawdata=v))
1572
+ sec_terms.update(
1573
+ self._cutoff_hh_rangex_lookup(
1574
+ "secondary_diag_terms[{:s}]".format(k),
1575
+ cutoff,
1576
+ hh,
1577
+ rawdata=v,
1578
+ )
1579
+ )
1238
1580
  return sec_terms
1239
1581
 
1240
1582
  def extra_hist_terms(self, cutoff, hh):
@@ -1256,14 +1598,17 @@ class ArpIfsForecastTermConfTool(ConfTool):
1256
1598
 
1257
1599
  def fpoff_items(self, cutoff, hh, discard=None, only=None):
1258
1600
  """List of active offline post-processing domains."""
1259
- items = {k
1260
- for k, v in self._x_extra_fp_terms.items()
1261
- if self._cutoff_hh_rangex_lookup('extra_fp_terms[{:s}]'.format(k),
1262
- cutoff,
1263
- hh,
1264
- rawdata=v)}
1265
- if not self.use_inline_fp and self._cutoff_hh_rangex_lookup('diag_fp_terms', cutoff, hh):
1266
- items.add('diag')
1601
+ items = {
1602
+ k
1603
+ for k, v in self._x_extra_fp_terms.items()
1604
+ if self._cutoff_hh_rangex_lookup(
1605
+ "extra_fp_terms[{:s}]".format(k), cutoff, hh, rawdata=v
1606
+ )
1607
+ }
1608
+ if not self.use_inline_fp and self._cutoff_hh_rangex_lookup(
1609
+ "diag_fp_terms", cutoff, hh
1610
+ ):
1611
+ items.add("diag")
1267
1612
  if discard:
1268
1613
  items -= set(discard)
1269
1614
  if only:
@@ -1272,13 +1617,17 @@ class ArpIfsForecastTermConfTool(ConfTool):
1272
1617
 
1273
1618
  def fpoff_terms_map(self, cutoff, hh):
1274
1619
  """The mapping dictionary between offline post-processing terms and domains."""
1275
- return {k: getattr(self, '{:s}_terms'.format(k))(cutoff, hh)
1276
- for k in self.fpoff_items(cutoff, hh)}
1620
+ return {
1621
+ k: getattr(self, "{:s}_terms".format(k))(cutoff, hh)
1622
+ for k in self.fpoff_items(cutoff, hh)
1623
+ }
1277
1624
 
1278
1625
  def fpoff_terms_fpmap(self, cutoff, hh):
1279
1626
  """The mapping dictionary between offline post-processing terms and domains (as a FPlist)."""
1280
- return {k: getattr(self, '{:s}_terms_fplist'.format(k))(cutoff, hh)
1281
- for k in self.fpoff_items(cutoff, hh)}
1627
+ return {
1628
+ k: getattr(self, "{:s}_terms_fplist".format(k))(cutoff, hh)
1629
+ for k in self.fpoff_items(cutoff, hh)
1630
+ }
1282
1631
 
1283
1632
 
1284
1633
  class TimeSlotsConfTool(AbstractObjectProxyConfTool):
@@ -1294,18 +1643,19 @@ class TimeSlotsConfTool(AbstractObjectProxyConfTool):
1294
1643
  """
1295
1644
 
1296
1645
  _footprint = dict(
1297
- info = 'Gives easy access to a Timeslots object.',
1298
- attr = dict(
1299
- timeslots_def = dict(
1300
- info = "The timeslots specification",
1646
+ info="Gives easy access to a Timeslots object.",
1647
+ attr=dict(
1648
+ timeslots_def=dict(
1649
+ info="The timeslots specification",
1301
1650
  ),
1302
- )
1651
+ ),
1303
1652
  )
1304
1653
 
1305
1654
  def _create_proxied_obj(self):
1306
1655
  return TimeSlots(self.timeslots_def)
1307
1656
 
1308
1657
 
1309
- if __name__ == '__main__':
1658
+ if __name__ == "__main__":
1310
1659
  import doctest
1660
+
1311
1661
  doctest.testmod()