vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1311 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Conftools are small objects that can be instantiated from an application's
|
|
3
|
+
configuration file.
|
|
4
|
+
|
|
5
|
+
They might be used when some complex calculations are needed to establish the
|
|
6
|
+
tasks configuration.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import collections
|
|
10
|
+
import collections.abc
|
|
11
|
+
import functools
|
|
12
|
+
import math
|
|
13
|
+
import re
|
|
14
|
+
|
|
15
|
+
from bronx.fancies import loggers
|
|
16
|
+
from bronx.stdtypes.date import Date, Time, Period, Month, timeintrangex
|
|
17
|
+
from bronx.syntax.decorators import secure_getattr
|
|
18
|
+
from footprints.stdtypes import FPDict, FPList
|
|
19
|
+
from footprints.util import rangex
|
|
20
|
+
import footprints
|
|
21
|
+
|
|
22
|
+
from ..tools.odb import TimeSlots
|
|
23
|
+
|
|
24
|
+
#: No automatic export
|
|
25
|
+
__all__ = []
|
|
26
|
+
|
|
27
|
+
logger = loggers.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ConfTool(footprints.FootprintBase):
|
|
31
|
+
"""Abstract class for conftools objects."""
|
|
32
|
+
|
|
33
|
+
_abstract = True
|
|
34
|
+
_collector = ('conftool',)
|
|
35
|
+
_footprint = dict(
|
|
36
|
+
info = 'Abstract Conf/Weird Tool',
|
|
37
|
+
attr = dict(
|
|
38
|
+
kind = dict(),
|
|
39
|
+
)
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class AbstractObjectProxyConfTool(ConfTool):
|
|
44
|
+
"""Allow transparent access to any Vortex object."""
|
|
45
|
+
|
|
46
|
+
_abstract = True
|
|
47
|
+
_footprint = dict(
|
|
48
|
+
info = 'Conf tool that find the appropriate begin/end date for an input resource.',
|
|
49
|
+
attr = dict(
|
|
50
|
+
kind = dict(
|
|
51
|
+
values = ['objproxy', ],
|
|
52
|
+
),
|
|
53
|
+
)
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
def __init__(self, *kargs, **kwargs):
|
|
57
|
+
super().__init__(*kargs, **kwargs)
|
|
58
|
+
self._proxied_obj = self._create_proxied_obj()
|
|
59
|
+
|
|
60
|
+
def _create_proxied_obj(self):
|
|
61
|
+
"""Initialise the object that will be proxied."""
|
|
62
|
+
raise NotImplementedError()
|
|
63
|
+
|
|
64
|
+
@secure_getattr
|
|
65
|
+
def __getattr__(self, item):
|
|
66
|
+
"""Pass all requests to the proxied object."""
|
|
67
|
+
target = getattr(self._proxied_obj, item, None)
|
|
68
|
+
if target is None:
|
|
69
|
+
raise AttributeError('Attribute "{:s}" was not found'.format(item))
|
|
70
|
+
else:
|
|
71
|
+
return target
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
#: Holds coupling's data for a particular cutoff/hour
|
|
75
|
+
CouplingInfos = collections.namedtuple('CouplingInfos',
|
|
76
|
+
('base', 'dayoff', 'cutoff', 'vapp', 'vconf', 'xpid', 'model', 'steps')
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class CouplingOffsetConfError(Exception):
|
|
81
|
+
"""Abstract exception raise by :class:`CouplingOffsetConfTool` objects."""
|
|
82
|
+
pass
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class CouplingOffsetConfPrepareError(CouplingOffsetConfError):
|
|
86
|
+
"""Exception raised when an error occurs during coupling data calculations."""
|
|
87
|
+
|
|
88
|
+
def __init__(self, fmtk):
|
|
89
|
+
msg = 'It is useless to compute coupling for: {}.'.format(fmtk)
|
|
90
|
+
super().__init__(msg)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class CouplingOffsetConfRefillError(CouplingOffsetConfError):
|
|
94
|
+
"""Exception raised when an orror occurs during refill."""
|
|
95
|
+
|
|
96
|
+
def __init__(self, fmtk, hh=None):
|
|
97
|
+
msg = 'It is useless to compute a refill for: {}'.format(fmtk)
|
|
98
|
+
if hh is None:
|
|
99
|
+
msg += '.'
|
|
100
|
+
else:
|
|
101
|
+
msg += ' at HH={!s}.'.format(hh)
|
|
102
|
+
super().__init__(msg)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class CouplingOffsetConfTool(ConfTool):
|
|
106
|
+
"""Conf tool that do all sorts of computations for coupling."""
|
|
107
|
+
|
|
108
|
+
_footprint = dict(
|
|
109
|
+
info = 'Conf tool that do all sorts of computations for coupling',
|
|
110
|
+
attr = dict(
|
|
111
|
+
kind = dict(
|
|
112
|
+
values= ['couplingoffset', ],
|
|
113
|
+
),
|
|
114
|
+
cplhhlist = dict(
|
|
115
|
+
info = ('The list of cutoff and hours for this application. '
|
|
116
|
+
'If omitted, all entries of the **cplhhbase** attribute are used. ' +
|
|
117
|
+
"(e.g ``{'assim':[0, 6, 12, 18], 'production':[0, ]}``)"),
|
|
118
|
+
type = FPDict,
|
|
119
|
+
optional = True,
|
|
120
|
+
),
|
|
121
|
+
cplhhbase = dict(
|
|
122
|
+
info = ('For a given cutoff and hour, gives the base hour to couple to. ' +
|
|
123
|
+
"(e.g ``{'assim':{0:0, 6:6, 12:12, 18:18}, 'production':{0:18}}``)."),
|
|
124
|
+
type = FPDict,
|
|
125
|
+
),
|
|
126
|
+
cpldayoff = dict(
|
|
127
|
+
info = ('For a given cutoff and hour, gives an offset in days. 0 by default. ' +
|
|
128
|
+
"(e.g ``{'assim':{'default':0}, 'production':{'default':1}}``)."),
|
|
129
|
+
type = FPDict,
|
|
130
|
+
optional = True,
|
|
131
|
+
),
|
|
132
|
+
cplcutoff = dict(
|
|
133
|
+
info = 'For a given cutoff and hour, gives the base cutoff to couple to.',
|
|
134
|
+
type = FPDict,
|
|
135
|
+
),
|
|
136
|
+
cplvapp = dict(
|
|
137
|
+
info = 'For a given cutoff and hour, gives the base vapp to couple to.',
|
|
138
|
+
type = FPDict,
|
|
139
|
+
),
|
|
140
|
+
cplvconf = dict(
|
|
141
|
+
info = 'For a given cutoff and hour, gives the base vconf to couple to.',
|
|
142
|
+
type = FPDict,
|
|
143
|
+
),
|
|
144
|
+
cplxpid = dict(
|
|
145
|
+
info = 'For a given cutoff and hour, gives the experiment ID to couple to.',
|
|
146
|
+
type = FPDict,
|
|
147
|
+
optional = True,
|
|
148
|
+
),
|
|
149
|
+
cplmodel = dict(
|
|
150
|
+
info = 'For a given cutoff and hour, gives the base model to couple to.',
|
|
151
|
+
type = FPDict,
|
|
152
|
+
optional = True,
|
|
153
|
+
),
|
|
154
|
+
cplsteps = dict(
|
|
155
|
+
info = 'For a given cutoff and hour, gives then list of requested terms.',
|
|
156
|
+
type = FPDict,
|
|
157
|
+
),
|
|
158
|
+
finalterm = dict(
|
|
159
|
+
info = 'For a given cutoff and hour, the final term (for "finalterm" token substitution)',
|
|
160
|
+
type = FPDict,
|
|
161
|
+
optional = True
|
|
162
|
+
),
|
|
163
|
+
refill_cutoff = dict(
|
|
164
|
+
values = ['assim', 'production', 'all'],
|
|
165
|
+
info = 'By default, what is the cutoff name of the refill task.',
|
|
166
|
+
optional = True,
|
|
167
|
+
default = 'assim',
|
|
168
|
+
),
|
|
169
|
+
compute_on_refill = dict(
|
|
170
|
+
info = 'Is it necessary to compute coupling files for the refilling cutoff ?',
|
|
171
|
+
optional = True,
|
|
172
|
+
default = True,
|
|
173
|
+
type = bool,
|
|
174
|
+
),
|
|
175
|
+
isolated_refill = dict(
|
|
176
|
+
info = 'Are the refill tasks exclusive with prepare tasks ?',
|
|
177
|
+
optional = True,
|
|
178
|
+
default = True,
|
|
179
|
+
type = bool,
|
|
180
|
+
),
|
|
181
|
+
verbose = dict(
|
|
182
|
+
info = 'When the object is created, print a summary.',
|
|
183
|
+
type = bool,
|
|
184
|
+
optional = True,
|
|
185
|
+
default = True,
|
|
186
|
+
),
|
|
187
|
+
)
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
_DFLT_KEY = 'default'
|
|
191
|
+
|
|
192
|
+
def __init__(self, *kargs, **kwargs):
|
|
193
|
+
super().__init__(*kargs, **kwargs)
|
|
194
|
+
|
|
195
|
+
# A dictionary summarising the base HH supported by this configuration tool
|
|
196
|
+
# ex: dict(assim=set([0, 1 , 2, ...]), production=set([0, 6,...])
|
|
197
|
+
self._target_hhs = collections.defaultdict(set)
|
|
198
|
+
if self.cplhhlist is None:
|
|
199
|
+
t_hhbase = collections.defaultdict(dict)
|
|
200
|
+
for c, cv in self.cplhhbase.items():
|
|
201
|
+
for h, v in [(Time(lh), Time(lv)) for lh, lv in cv.items()]:
|
|
202
|
+
t_hhbase[c][h] = v
|
|
203
|
+
self._target_hhs[c].add(h)
|
|
204
|
+
else:
|
|
205
|
+
for c, clist in self.cplhhlist.items():
|
|
206
|
+
if not isinstance(clist, (tuple, list)):
|
|
207
|
+
clist = [clist, ]
|
|
208
|
+
self._target_hhs[c].update([Time(h) for h in clist])
|
|
209
|
+
t_hhbase = self._reshape_inputs(self.cplhhbase, value_reclass=Time)
|
|
210
|
+
|
|
211
|
+
# Consistency checks and array reshaping
|
|
212
|
+
t_dayoff = self._reshape_inputs(self.cpldayoff, class_default=0)
|
|
213
|
+
t_cutoff = self._reshape_inputs(self.cplcutoff)
|
|
214
|
+
t_vapp = self._reshape_inputs(self.cplvapp)
|
|
215
|
+
t_vconf = self._reshape_inputs(self.cplvconf)
|
|
216
|
+
t_steps = self._reshape_inputs(self.cplsteps)
|
|
217
|
+
if self.cplmodel is None:
|
|
218
|
+
t_model = t_vapp
|
|
219
|
+
else:
|
|
220
|
+
t_model = self._reshape_inputs(self.cplmodel)
|
|
221
|
+
t_xpid = self._reshape_inputs(self.cplxpid, class_default='')
|
|
222
|
+
|
|
223
|
+
# If relevent, do "finalterm" token substitution
|
|
224
|
+
if self.finalterm is not None:
|
|
225
|
+
t_finalterm = self._reshape_inputs(self.finalterm, value_reclass=str)
|
|
226
|
+
for c, cv in t_hhbase.items():
|
|
227
|
+
for hh in cv.keys():
|
|
228
|
+
if isinstance(t_steps[c][hh], str):
|
|
229
|
+
t_steps[c][hh] = t_steps[c][hh].replace('finalterm',
|
|
230
|
+
t_finalterm[c][hh])
|
|
231
|
+
|
|
232
|
+
# Build the dictionary of CouplingInfos objects
|
|
233
|
+
self._cpl_data = collections.defaultdict(dict)
|
|
234
|
+
for c, cv in t_hhbase.items():
|
|
235
|
+
self._cpl_data[c] = {hh: CouplingInfos(cv[hh], int(t_dayoff[c][hh]),
|
|
236
|
+
t_cutoff[c][hh], t_vapp[c][hh],
|
|
237
|
+
t_vconf[c][hh], t_xpid[c][hh],
|
|
238
|
+
t_model[c][hh],
|
|
239
|
+
rangex(t_steps[c][hh]))
|
|
240
|
+
for hh in cv.keys()}
|
|
241
|
+
|
|
242
|
+
# Pre-compute the prepare terms
|
|
243
|
+
self._prepare_terms_map = self._compute_prepare_terms()
|
|
244
|
+
if self.verbose:
|
|
245
|
+
print()
|
|
246
|
+
print('#### Coupling configuration tool initialised ####')
|
|
247
|
+
print('**** Coupling tasks terms map:')
|
|
248
|
+
print('{:s} : {:s}'.format(self._cpl_fmtkey(('HH', 'VAPP', 'VCONF', 'XPID', 'MODEL', 'CUTOFF')),
|
|
249
|
+
'Computed Terms'))
|
|
250
|
+
for k in sorted(self._prepare_terms_map.keys()):
|
|
251
|
+
print('{:s} : {:s}'.format(self._cpl_fmtkey(k),
|
|
252
|
+
' '.join([str(t.hour)
|
|
253
|
+
for t in self._prepare_terms_map[k]
|
|
254
|
+
])
|
|
255
|
+
)
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
# Pre-compute the default refill_map
|
|
259
|
+
self._refill_terms_map = dict()
|
|
260
|
+
self._refill_terms_map[self.refill_cutoff] = self._compute_refill_terms(self.refill_cutoff,
|
|
261
|
+
self.compute_on_refill,
|
|
262
|
+
self.isolated_refill)
|
|
263
|
+
if self.verbose:
|
|
264
|
+
print('**** Refill tasks activation map (default refill_cutoff is: {:s}):'.format(self.refill_cutoff))
|
|
265
|
+
print('{:s} : {:s}'.format(self._rtask_fmtkey(('VAPP', 'VCONF', 'XPID', 'MODEL', 'CUTOFF')),
|
|
266
|
+
'Active hours'))
|
|
267
|
+
for k in sorted(self._refill_terms_map[self.refill_cutoff].keys()):
|
|
268
|
+
vdict = self._refill_terms_map[self.refill_cutoff][k]
|
|
269
|
+
print('{:s} : {:s}'.format(self._rtask_fmtkey(k),
|
|
270
|
+
' '.join([str(t.hour) for t in sorted(vdict.keys())])))
|
|
271
|
+
print()
|
|
272
|
+
|
|
273
|
+
@property
|
|
274
|
+
def target_hhs(self):
|
|
275
|
+
return self._target_hhs
|
|
276
|
+
|
|
277
|
+
def _reshape_inputs(self, input_dict, class_default=None, value_reclass=lambda x: x):
|
|
278
|
+
"""Deal with default values, check dictionaries and convert keys to Time objects."""
|
|
279
|
+
# Convert keys to time objects
|
|
280
|
+
r_dict = dict()
|
|
281
|
+
if input_dict is not None:
|
|
282
|
+
for c, cv in input_dict.items():
|
|
283
|
+
if isinstance(cv, dict):
|
|
284
|
+
r_dict[c] = dict()
|
|
285
|
+
for h, v in cv.items():
|
|
286
|
+
if h != self._DFLT_KEY:
|
|
287
|
+
r_dict[c][Time(h)] = value_reclass(v)
|
|
288
|
+
else:
|
|
289
|
+
r_dict[c][h] = value_reclass(v)
|
|
290
|
+
else:
|
|
291
|
+
r_dict[c] = cv
|
|
292
|
+
|
|
293
|
+
# Is there a generic default ?
|
|
294
|
+
defined_topdefault = self._DFLT_KEY in r_dict
|
|
295
|
+
top_default = r_dict.pop(self._DFLT_KEY, class_default)
|
|
296
|
+
|
|
297
|
+
# Check consistency and replace missing values with defaults
|
|
298
|
+
for c in self.target_hhs:
|
|
299
|
+
myv = r_dict.setdefault(c, dict())
|
|
300
|
+
# Is there a cutoff specific default ?
|
|
301
|
+
defined_cutdefault = defined_topdefault or self._DFLT_KEY in myv
|
|
302
|
+
last_default = myv.pop(self._DFLT_KEY, top_default)
|
|
303
|
+
my_c_hhs = set(myv.keys())
|
|
304
|
+
if defined_cutdefault or (class_default is not None):
|
|
305
|
+
missinghh = self.target_hhs[c] - my_c_hhs
|
|
306
|
+
for h in missinghh:
|
|
307
|
+
myv[h] = last_default
|
|
308
|
+
else:
|
|
309
|
+
if not my_c_hhs >= self.target_hhs[c]:
|
|
310
|
+
logger.error("Inconsistent input arrays while processing: \n%s",
|
|
311
|
+
str(input_dict))
|
|
312
|
+
logger.error("Cutoff %s, expecting the following HH: \n%s",
|
|
313
|
+
c, str(self.target_hhs[c]))
|
|
314
|
+
raise ValueError("Inconsistent input array.")
|
|
315
|
+
|
|
316
|
+
# Filter values according to _target_hhs
|
|
317
|
+
for c in list(r_dict.keys()):
|
|
318
|
+
if c not in self.target_hhs:
|
|
319
|
+
del r_dict[c]
|
|
320
|
+
for c in self.target_hhs:
|
|
321
|
+
my_c_hhs = set(r_dict[c].keys())
|
|
322
|
+
extra = my_c_hhs - self.target_hhs[c]
|
|
323
|
+
for hh in extra:
|
|
324
|
+
del r_dict[c][hh]
|
|
325
|
+
|
|
326
|
+
return r_dict
|
|
327
|
+
|
|
328
|
+
@staticmethod
|
|
329
|
+
def _cpl_key(hh, cutoff, vapp, vconf, xpid, model):
|
|
330
|
+
return (str(hh), vapp, vconf, xpid, model, cutoff)
|
|
331
|
+
|
|
332
|
+
@staticmethod
|
|
333
|
+
def _cpl_fmtkey(k):
|
|
334
|
+
cutoff_map = dict(production='prod')
|
|
335
|
+
return '{:5s} {:6s} {:24s} {:s} ({:s})'.format(
|
|
336
|
+
k[0],
|
|
337
|
+
cutoff_map.get(k[5], k[5]),
|
|
338
|
+
k[1] + '/' + k[2],
|
|
339
|
+
k[3],
|
|
340
|
+
k[4]
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
@staticmethod
|
|
344
|
+
def _rtask_key(cutoff, vapp, vconf, xpid, model):
|
|
345
|
+
return (vapp, vconf, xpid, model, cutoff)
|
|
346
|
+
|
|
347
|
+
@staticmethod
|
|
348
|
+
def _rtask_fmtkey(k):
|
|
349
|
+
cutoff_map = dict(production='prod')
|
|
350
|
+
return '{:6s} {:24s} {:s} ({:s})'.format(cutoff_map.get(k[4], k[4]), k[0] + '/' + k[1], k[2], k[3])
|
|
351
|
+
|
|
352
|
+
@staticmethod
|
|
353
|
+
def _process_date(date):
|
|
354
|
+
mydate = Date(date)
|
|
355
|
+
myhh = Time('{0.hour:d}:{0.minute:02d}'.format(mydate))
|
|
356
|
+
return mydate, myhh
|
|
357
|
+
|
|
358
|
+
@staticmethod
|
|
359
|
+
def _hh_offset(hh, hhbase, dayoff):
|
|
360
|
+
offset = hh - hhbase
|
|
361
|
+
if offset < 0:
|
|
362
|
+
offset += Time(24)
|
|
363
|
+
return offset + Period(days=dayoff)
|
|
364
|
+
|
|
365
|
+
def _compute_prepare_terms(self):
|
|
366
|
+
terms_map = collections.defaultdict(set)
|
|
367
|
+
for _, cv in self._cpl_data.items():
|
|
368
|
+
for h, infos in cv.items():
|
|
369
|
+
key = self._cpl_key(infos.base, infos.cutoff, infos.vapp, infos.vconf, infos.xpid, infos.model)
|
|
370
|
+
targetoffset = self._hh_offset(h, infos.base, infos.dayoff)
|
|
371
|
+
terms_map[key].update([s + targetoffset for s in infos.steps])
|
|
372
|
+
terms_map = {k: sorted(terms) for k, terms in terms_map.items()}
|
|
373
|
+
return terms_map
|
|
374
|
+
|
|
375
|
+
def _compute_refill_terms(self, refill_cutoff, compute_on_refill, isolated_refill):
|
|
376
|
+
finaldates = collections.defaultdict(functools.partial(collections.defaultdict,
|
|
377
|
+
functools.partial(collections.defaultdict, set)))
|
|
378
|
+
if refill_cutoff == 'all':
|
|
379
|
+
possiblehours = sorted(functools.reduce(lambda x, y: x | y,
|
|
380
|
+
[set(l) for l in self.target_hhs.values()]))
|
|
381
|
+
else:
|
|
382
|
+
possiblehours = self.target_hhs[refill_cutoff]
|
|
383
|
+
|
|
384
|
+
# Look 24hr ahead
|
|
385
|
+
for c, cv in self._cpl_data.items():
|
|
386
|
+
for h, infos in cv.items():
|
|
387
|
+
key = self._rtask_key(infos.cutoff, infos.vapp, infos.vconf, infos.xpid, infos.model)
|
|
388
|
+
offset = self._hh_offset(h, infos.base, infos.dayoff)
|
|
389
|
+
for possibleh in possiblehours:
|
|
390
|
+
roffset = self._hh_offset(h, possibleh, 0)
|
|
391
|
+
if ((roffset > 0 or
|
|
392
|
+
(compute_on_refill and roffset == 0 and (refill_cutoff == 'all' or refill_cutoff == c))) and
|
|
393
|
+
(roffset < offset or (isolated_refill and roffset == offset))):
|
|
394
|
+
finaldates[key][possibleh][offset - roffset].update([s + offset for s in infos.steps])
|
|
395
|
+
|
|
396
|
+
for key, vdict in finaldates.items():
|
|
397
|
+
for possibleh in vdict.keys():
|
|
398
|
+
vdict[possibleh] = {off: sorted(terms) for off, terms in vdict[possibleh].items()}
|
|
399
|
+
|
|
400
|
+
return finaldates
|
|
401
|
+
|
|
402
|
+
def compatible_with(self, other):
|
|
403
|
+
if isinstance(other, self.__class__):
|
|
404
|
+
return (self.target_hhs == other.target_hhs and
|
|
405
|
+
self.refill_cutoff == other.refill_cutoff)
|
|
406
|
+
else:
|
|
407
|
+
return False
|
|
408
|
+
|
|
409
|
+
def prepare_terms(self, date, cutoff, vapp, vconf, model=None, xpid=''):
|
|
410
|
+
"""
|
|
411
|
+
For a task computing coupling files (at **date** and **cutoff**,
|
|
412
|
+
for a specific **vapp** and **vconf**), lists the terms that should be
|
|
413
|
+
computed.
|
|
414
|
+
"""
|
|
415
|
+
_, myhh = self._process_date(date)
|
|
416
|
+
if model is None:
|
|
417
|
+
model = vapp
|
|
418
|
+
key = self._cpl_key(myhh, cutoff, vapp, vconf, xpid, model)
|
|
419
|
+
try:
|
|
420
|
+
return self._prepare_terms_map[key]
|
|
421
|
+
except KeyError:
|
|
422
|
+
raise CouplingOffsetConfPrepareError(self._cpl_fmtkey(key))
|
|
423
|
+
|
|
424
|
+
def coupling_offset(self, date, cutoff):
|
|
425
|
+
"""
|
|
426
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
427
|
+
time delta with the coupling model/file base date.
|
|
428
|
+
"""
|
|
429
|
+
_, myhh = self._process_date(date)
|
|
430
|
+
return self._hh_offset(myhh, self._cpl_data[cutoff][myhh].base,
|
|
431
|
+
self._cpl_data[cutoff][myhh].dayoff)
|
|
432
|
+
|
|
433
|
+
def coupling_date(self, date, cutoff):
|
|
434
|
+
"""
|
|
435
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
436
|
+
base date of the coupling model/file.
|
|
437
|
+
"""
|
|
438
|
+
mydate, myhh = self._process_date(date)
|
|
439
|
+
return mydate - self._hh_offset(myhh, self._cpl_data[cutoff][myhh].base,
|
|
440
|
+
self._cpl_data[cutoff][myhh].dayoff)
|
|
441
|
+
|
|
442
|
+
def coupling_terms(self, date, cutoff):
|
|
443
|
+
"""
|
|
444
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
445
|
+
list of terms that should be fetched from the coupling model/file.
|
|
446
|
+
"""
|
|
447
|
+
_, myhh = self._process_date(date)
|
|
448
|
+
offset = self._hh_offset(myhh, self._cpl_data[cutoff][myhh].base,
|
|
449
|
+
self._cpl_data[cutoff][myhh].dayoff)
|
|
450
|
+
return [s + offset for s in self._cpl_data[cutoff][myhh].steps]
|
|
451
|
+
|
|
452
|
+
def _coupling_stuff(self, date, cutoff, stuff):
|
|
453
|
+
_, myhh = self._process_date(date)
|
|
454
|
+
return getattr(self._cpl_data[cutoff][myhh], stuff)
|
|
455
|
+
|
|
456
|
+
def coupling_steps(self, date, cutoff):
|
|
457
|
+
"""
|
|
458
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
459
|
+
prescribed steps.
|
|
460
|
+
"""
|
|
461
|
+
return self._coupling_stuff(date, cutoff, 'steps')
|
|
462
|
+
|
|
463
|
+
def coupling_cutoff(self, date, cutoff):
|
|
464
|
+
"""
|
|
465
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
466
|
+
cutoff of the coupling model/file.
|
|
467
|
+
"""
|
|
468
|
+
return self._coupling_stuff(date, cutoff, 'cutoff')
|
|
469
|
+
|
|
470
|
+
def coupling_vapp(self, date, cutoff):
|
|
471
|
+
"""
|
|
472
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
473
|
+
vapp of the coupling model/file.
|
|
474
|
+
"""
|
|
475
|
+
return self._coupling_stuff(date, cutoff, 'vapp')
|
|
476
|
+
|
|
477
|
+
def coupling_vconf(self, date, cutoff):
|
|
478
|
+
"""
|
|
479
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
480
|
+
vconf of the coupling model/file.
|
|
481
|
+
"""
|
|
482
|
+
return self._coupling_stuff(date, cutoff, 'vconf')
|
|
483
|
+
|
|
484
|
+
def coupling_xpid(self, date, cutoff):
|
|
485
|
+
"""
|
|
486
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
487
|
+
experiment ID of the coupling model/file.
|
|
488
|
+
"""
|
|
489
|
+
return self._coupling_stuff(date, cutoff, 'xpid')
|
|
490
|
+
|
|
491
|
+
def coupling_model(self, date, cutoff):
|
|
492
|
+
"""
|
|
493
|
+
For a task needing coupling (at **date** and **cutoff**), return the
|
|
494
|
+
vconf of the coupling model/file.
|
|
495
|
+
"""
|
|
496
|
+
return self._coupling_stuff(date, cutoff, 'model')
|
|
497
|
+
|
|
498
|
+
def refill_terms(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
|
|
499
|
+
"""The terms that should be computed for a given refill task."""
|
|
500
|
+
refill_cutoff = self.refill_cutoff if refill_cutoff is None else refill_cutoff
|
|
501
|
+
if refill_cutoff not in self._refill_terms_map:
|
|
502
|
+
self._refill_terms_map[refill_cutoff] = self._compute_refill_terms(refill_cutoff,
|
|
503
|
+
self.compute_on_refill,
|
|
504
|
+
self.isolated_refill)
|
|
505
|
+
if model is None:
|
|
506
|
+
model = vapp
|
|
507
|
+
mydate, myhh = self._process_date(date)
|
|
508
|
+
key = self._rtask_key(cutoff, vapp, vconf, xpid, model)
|
|
509
|
+
finaldates = dict()
|
|
510
|
+
if (key not in self._refill_terms_map[refill_cutoff] or
|
|
511
|
+
myhh not in self._refill_terms_map[refill_cutoff][key]):
|
|
512
|
+
raise CouplingOffsetConfRefillError(self._rtask_fmtkey(key))
|
|
513
|
+
for off, terms in self._refill_terms_map[refill_cutoff][key][myhh].items():
|
|
514
|
+
finaldates[str(mydate - off)] = terms
|
|
515
|
+
return {'date': finaldates}
|
|
516
|
+
|
|
517
|
+
def refill_dates(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
|
|
518
|
+
"""The dates that should be processed in a given refill task."""
|
|
519
|
+
return list(self.refill_terms(date, cutoff, vapp, vconf, model=model,
|
|
520
|
+
refill_cutoff=refill_cutoff, xpid=xpid)['date'].keys())
|
|
521
|
+
|
|
522
|
+
def refill_months(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
|
|
523
|
+
"""The months that should be processed in a given refill task."""
|
|
524
|
+
mindate = min(self.refill_dates(date, cutoff, vapp, vconf, model=model,
|
|
525
|
+
refill_cutoff=refill_cutoff, xpid=xpid))
|
|
526
|
+
minmonth = Month(mindate)
|
|
527
|
+
return [minmonth, minmonth + 1]
|
|
528
|
+
|
|
529
|
+
|
|
530
|
+
class AggregatedCouplingOffsetConfTool(ConfTool):
|
|
531
|
+
|
|
532
|
+
_footprint = dict(
|
|
533
|
+
info = 'Aggregate several CouplingOffsetConfTool objects into one',
|
|
534
|
+
attr = dict(
|
|
535
|
+
kind = dict(
|
|
536
|
+
values= ['aggcouplingoffset', ],
|
|
537
|
+
),
|
|
538
|
+
nominal = dict(
|
|
539
|
+
info = "A list of couplingoffset objects used in nominal cases",
|
|
540
|
+
type = FPList,
|
|
541
|
+
),
|
|
542
|
+
alternate = dict(
|
|
543
|
+
info = "A list of couplingoffset objects used in rescue modes",
|
|
544
|
+
type = FPList,
|
|
545
|
+
optional = True,
|
|
546
|
+
),
|
|
547
|
+
use_alternates = dict(
|
|
548
|
+
info = 'Actually use rescue mode ?',
|
|
549
|
+
optional = True,
|
|
550
|
+
default = True,
|
|
551
|
+
type = bool,
|
|
552
|
+
),
|
|
553
|
+
verbose = dict(
|
|
554
|
+
info = 'When the object is created, print a summary.',
|
|
555
|
+
type = bool,
|
|
556
|
+
optional = True,
|
|
557
|
+
default = True,
|
|
558
|
+
),
|
|
559
|
+
)
|
|
560
|
+
)
|
|
561
|
+
|
|
562
|
+
def __init__(self, *kargs, **kwargs):
|
|
563
|
+
super().__init__(*kargs, **kwargs)
|
|
564
|
+
self._toolslist = list(self.nominal)
|
|
565
|
+
if self.alternate and self.use_alternates:
|
|
566
|
+
self._toolslist.extend(self.alternate)
|
|
567
|
+
# At least one object is needed:
|
|
568
|
+
if not len(self._toolslist):
|
|
569
|
+
raise CouplingOffsetConfError("At least one sub-object is needed")
|
|
570
|
+
# Check consistency
|
|
571
|
+
for num, toolobj in enumerate(self._toolslist[1:]):
|
|
572
|
+
if not self._toolslist[0].compatible_with(toolobj):
|
|
573
|
+
print('\n', '*' * 50)
|
|
574
|
+
print('self._toolslist[0] =', self._toolslist[0], '\n',
|
|
575
|
+
' target_hhs =', self._toolslist[0].target_hhs,
|
|
576
|
+
' refill_cutoff =', self._toolslist[0].refill_cutoff)
|
|
577
|
+
print('is not consistent with object num', num, ':', toolobj, '\n',
|
|
578
|
+
' target_hhs =', toolobj.target_hhs,
|
|
579
|
+
' refill_cutoff =', toolobj.refill_cutoff)
|
|
580
|
+
raise CouplingOffsetConfError("Inconsistent sub-objects")
|
|
581
|
+
|
|
582
|
+
if self.verbose:
|
|
583
|
+
print()
|
|
584
|
+
print('#### Aggregated Coupling configuration tool initialised ####')
|
|
585
|
+
print('It is made of {:d} nominal configuration tool(s)'.format(len(self.nominal)))
|
|
586
|
+
if self.alternate and self.use_alternates:
|
|
587
|
+
print('+ {:d} rescue-mode configuration tool(s)'.format(len(self.alternate)))
|
|
588
|
+
else:
|
|
589
|
+
print('No rescue-mode configuration tool is considered (deactivated)')
|
|
590
|
+
print()
|
|
591
|
+
|
|
592
|
+
def prepare_terms(self, date, cutoff, vapp, vconf, model=None, xpid=''):
|
|
593
|
+
"""
|
|
594
|
+
For a task computing coupling files (at **date** and **cutoff**,
|
|
595
|
+
for a specific **vapp** and **vconf**), lists the terms that should be
|
|
596
|
+
computed.
|
|
597
|
+
"""
|
|
598
|
+
terms = set()
|
|
599
|
+
for toolobj in self._toolslist:
|
|
600
|
+
try:
|
|
601
|
+
terms.update(toolobj.prepare_terms(date, cutoff, vapp, vconf, model=model, xpid=xpid))
|
|
602
|
+
except CouplingOffsetConfPrepareError as e:
|
|
603
|
+
lateste = e
|
|
604
|
+
if not terms:
|
|
605
|
+
raise lateste
|
|
606
|
+
else:
|
|
607
|
+
return sorted(terms)
|
|
608
|
+
|
|
609
|
+
def refill_terms(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
|
|
610
|
+
"""The terms that should be computed for a given refill task."""
|
|
611
|
+
finaldates = collections.defaultdict(set)
|
|
612
|
+
for toolobj in self._toolslist:
|
|
613
|
+
try:
|
|
614
|
+
rt = toolobj.refill_terms(date, cutoff, vapp, vconf, model=model,
|
|
615
|
+
refill_cutoff=refill_cutoff, xpid=xpid)
|
|
616
|
+
for k, v in rt['date'].items():
|
|
617
|
+
finaldates[k].update(v)
|
|
618
|
+
except CouplingOffsetConfRefillError as e:
|
|
619
|
+
lateste = e
|
|
620
|
+
if not finaldates:
|
|
621
|
+
raise lateste
|
|
622
|
+
else:
|
|
623
|
+
for k, v in finaldates.items():
|
|
624
|
+
finaldates[k] = sorted(v)
|
|
625
|
+
return {'date': finaldates}
|
|
626
|
+
|
|
627
|
+
def refill_dates(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
|
|
628
|
+
"""The dates that should be processed in a given refill task."""
|
|
629
|
+
return list(self.refill_terms(date, cutoff, vapp, vconf, model=model,
|
|
630
|
+
refill_cutoff=refill_cutoff, xpid=xpid)['date'].keys())
|
|
631
|
+
|
|
632
|
+
def refill_months(self, date, cutoff, vapp, vconf, model=None, refill_cutoff=None, xpid=''):
|
|
633
|
+
"""The months that should be processed in a given refill task."""
|
|
634
|
+
mindate = min(self.refill_dates(date, cutoff, vapp, vconf, model=model,
|
|
635
|
+
refill_cutoff=refill_cutoff, xpid=xpid))
|
|
636
|
+
minmonth = Month(mindate)
|
|
637
|
+
return [minmonth, minmonth + 1]
|
|
638
|
+
|
|
639
|
+
|
|
640
|
+
class TimeSerieInputFinderError(Exception):
|
|
641
|
+
"""Any exception raise by :class:`TimeSerieInputFinderConfTool` objects."""
|
|
642
|
+
pass
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
class TimeSerieInputFinderConfTool(ConfTool):
|
|
646
|
+
"""
|
|
647
|
+
A conf tool that find the appropriate begin/end date for an input resource
|
|
648
|
+
to be taken in a timeserie.
|
|
649
|
+
|
|
650
|
+
Let's consider a serie of 3 consecutive Surfex forcing files:
|
|
651
|
+
|
|
652
|
+
* The first file start on 2018/01/01 00UTC
|
|
653
|
+
* Each file covers a two days period
|
|
654
|
+
|
|
655
|
+
The conf tool will look like::
|
|
656
|
+
|
|
657
|
+
>>> ct = TimeSerieInputFinderConfTool(kind="timeserie",
|
|
658
|
+
... timeserie_begin="2018010100",
|
|
659
|
+
... timeserie_step="P2D")
|
|
660
|
+
|
|
661
|
+
To find the date/term of the forcing file encompassing a 6 hours forecast
|
|
662
|
+
starting on 2018/01/04 12UTC, use::
|
|
663
|
+
|
|
664
|
+
>>> ct.begindate('2018010412', 'PT6H')
|
|
665
|
+
Date(2018, 1, 3, 0, 0)
|
|
666
|
+
>>> ct.term('2018010312', '06:00')
|
|
667
|
+
Time(48, 0)
|
|
668
|
+
|
|
669
|
+
"""
|
|
670
|
+
|
|
671
|
+
_footprint = dict(
|
|
672
|
+
info = 'Conf tool that find the appropriate begin/end date for an input resource.',
|
|
673
|
+
attr = dict(
|
|
674
|
+
kind = dict(
|
|
675
|
+
values = ['timeserie', ],
|
|
676
|
+
),
|
|
677
|
+
timeserie_begin = dict(
|
|
678
|
+
info = "The date when the time serie starts",
|
|
679
|
+
type = Date
|
|
680
|
+
),
|
|
681
|
+
timeserie_step = dict(
|
|
682
|
+
info = "The step between files of the time serie.",
|
|
683
|
+
type = Period
|
|
684
|
+
),
|
|
685
|
+
upperbound_included = dict(
|
|
686
|
+
type = bool,
|
|
687
|
+
optional = True,
|
|
688
|
+
default = True
|
|
689
|
+
),
|
|
690
|
+
singlefile = dict(
|
|
691
|
+
info = "The period requested by a user should be contained in a single file.",
|
|
692
|
+
type = bool,
|
|
693
|
+
optional = True,
|
|
694
|
+
default = False
|
|
695
|
+
)
|
|
696
|
+
)
|
|
697
|
+
)
|
|
698
|
+
|
|
699
|
+
def __init__(self, *kargs, **kwargs):
|
|
700
|
+
super().__init__(*kargs, **kwargs)
|
|
701
|
+
self._begincache = dict()
|
|
702
|
+
self._steplength = self.timeserie_step.length
|
|
703
|
+
|
|
704
|
+
def _begin_lookup(self, begindate):
|
|
705
|
+
"""Find the appropriate tiem serie's file date just before **begindate**."""
|
|
706
|
+
if begindate not in self._begincache:
|
|
707
|
+
if begindate < self.timeserie_begin:
|
|
708
|
+
raise TimeSerieInputFinderError("Request begin date is too soon !")
|
|
709
|
+
dt = begindate - self.timeserie_begin
|
|
710
|
+
nsteps = int(math.floor(dt.length / self._steplength))
|
|
711
|
+
self._begincache[begindate] = self.timeserie_begin + nsteps * self.timeserie_step
|
|
712
|
+
return self._begincache[begindate]
|
|
713
|
+
|
|
714
|
+
def _begindates_expansion(self, tdate, tlength):
|
|
715
|
+
"""Generate a begin date or a list of begin dates."""
|
|
716
|
+
xperiods = tlength / self._steplength
|
|
717
|
+
nfiles = int(math.ceil(xperiods))
|
|
718
|
+
if xperiods == int(xperiods) and not self.upperbound_included:
|
|
719
|
+
nfiles += 1
|
|
720
|
+
if nfiles > 1:
|
|
721
|
+
if self.singlefile:
|
|
722
|
+
raise TimeSerieInputFinderError("Multiple files requested but singlefile=.T.")
|
|
723
|
+
return [tdate + i * self.timeserie_step for i in range(0, nfiles)]
|
|
724
|
+
else:
|
|
725
|
+
return tdate
|
|
726
|
+
|
|
727
|
+
def _enddates_expansion(self, tdates):
|
|
728
|
+
"""Generate an end date or a dict of enddates."""
|
|
729
|
+
if isinstance(tdates, list):
|
|
730
|
+
return dict(begindate={d: d + self.timeserie_step for d in tdates})
|
|
731
|
+
else:
|
|
732
|
+
return tdates + self.timeserie_step
|
|
733
|
+
|
|
734
|
+
@staticmethod
|
|
735
|
+
def _dates_normalise(begindate, enddate):
|
|
736
|
+
"""Convert **begin/enddate** to a proper Date object."""
|
|
737
|
+
if not isinstance(begindate, Date):
|
|
738
|
+
begindate = Date(begindate)
|
|
739
|
+
if not isinstance(enddate, Date):
|
|
740
|
+
enddate = Date(enddate)
|
|
741
|
+
return begindate, enddate
|
|
742
|
+
|
|
743
|
+
@staticmethod
|
|
744
|
+
def _date_term_normalise(begindate, term):
|
|
745
|
+
"""Convert **begindate** and **term** to a proper Date/Time object."""
|
|
746
|
+
if not isinstance(begindate, Date):
|
|
747
|
+
begindate = Date(begindate)
|
|
748
|
+
if not isinstance(term, Time):
|
|
749
|
+
term = Time(term)
|
|
750
|
+
return begindate, term
|
|
751
|
+
|
|
752
|
+
def begindate_i(self, begindate, enddate):
|
|
753
|
+
"""Find the file dates encompassing [**begindate**, **enddate**]."""
|
|
754
|
+
begindate, enddate = self._dates_normalise(begindate, enddate)
|
|
755
|
+
tdate = self._begin_lookup(begindate)
|
|
756
|
+
tlength = (enddate - begindate).length
|
|
757
|
+
return self._begindates_expansion(tdate, tlength)
|
|
758
|
+
|
|
759
|
+
def enddate_i(self, begindate, enddate):
|
|
760
|
+
"""Find the file enddates encompassing [**begindate**, **enddate**]."""
|
|
761
|
+
return self._enddates_expansion(self.begindate_i(begindate, enddate))
|
|
762
|
+
|
|
763
|
+
def term_i(self, begindate, enddate): # @UnusedVariable
|
|
764
|
+
"""Find the term of the time serie files."""
|
|
765
|
+
return Time(self.timeserie_step)
|
|
766
|
+
|
|
767
|
+
def begindate(self, begindate, term):
|
|
768
|
+
"""Find the file dates encompassing [**begindate**, **begindate** + **term**]."""
|
|
769
|
+
begindate, term = self._date_term_normalise(begindate, term)
|
|
770
|
+
return self._begindates_expansion(self._begin_lookup(begindate), int(term) * 60)
|
|
771
|
+
|
|
772
|
+
def enddate(self, begindate, term):
|
|
773
|
+
"""Find the file enddates encompassing [**begindate**, **begindate** + **term**]."""
|
|
774
|
+
return self._enddates_expansion(self.begindate(begindate, term))
|
|
775
|
+
|
|
776
|
+
def term(self, begindate, term): # @UnusedVariable
|
|
777
|
+
"""Find the term of the time serie files."""
|
|
778
|
+
return Time(self.timeserie_step)
|
|
779
|
+
|
|
780
|
+
|
|
781
|
+
class ArpIfsForecastTermConfTool(ConfTool):
|
|
782
|
+
"""Deal with any Arpege/IFS model final term and outputs.
|
|
783
|
+
|
|
784
|
+
|
|
785
|
+
The conf tool will look like::
|
|
786
|
+
|
|
787
|
+
>>> ct = ArpIfsForecastTermConfTool(kind="arpifs_fcterms",
|
|
788
|
+
... fcterm_def=dict(production={0:102, 12:24},
|
|
789
|
+
... assim={"default": 6}),
|
|
790
|
+
... hist_terms_def=dict(production={"default":"0-47-6,48-finalterm-12"},
|
|
791
|
+
... assim={"default":"0,3,6"}),
|
|
792
|
+
... surf_terms_def=dict(production={"default":None, 0:"3,6"},
|
|
793
|
+
... assim={"default":"3,6"}),
|
|
794
|
+
... diag_fp_terms_def=dict(default={"default":"0-47-3,48-finalterm-6"}),
|
|
795
|
+
... extra_fp_terms_def=dict(
|
|
796
|
+
... aero=dict(production={0:"0-48-3"}),
|
|
797
|
+
... foo=dict(default={"default":"2,3"})
|
|
798
|
+
... ),
|
|
799
|
+
... secondary_diag_terms_def=dict(
|
|
800
|
+
... labo=dict(production={0: "0-12-1"})
|
|
801
|
+
... ),
|
|
802
|
+
... )
|
|
803
|
+
|
|
804
|
+
The forecast term can be retrieved:
|
|
805
|
+
|
|
806
|
+
>>> print(ct.fcterm('assim', 6))
|
|
807
|
+
6
|
|
808
|
+
>>> print(ct.fcterm('production', 0))
|
|
809
|
+
102
|
|
810
|
+
>>> print(ct.fcterm('production', 12))
|
|
811
|
+
24
|
|
812
|
+
|
|
813
|
+
If nothing is defined it crashes:
|
|
814
|
+
|
|
815
|
+
>>> print(ct.fcterm('production', 6))
|
|
816
|
+
Traceback (most recent call last):
|
|
817
|
+
...
|
|
818
|
+
ValueError: Nothing is defined for cutoff="production"/hh="06:00" in "fcterm"
|
|
819
|
+
|
|
820
|
+
The list of requested historical terms can be retrieved. It is automaticaly
|
|
821
|
+
constrained by the forecast term:
|
|
822
|
+
|
|
823
|
+
>>> print(','.join([str(t) for t in ct.hist_terms('assim', 6)]))
|
|
824
|
+
0,3,6
|
|
825
|
+
>>> print(','.join([str(t) for t in ct.hist_terms('production', 0)]))
|
|
826
|
+
0,6,12,18,24,30,36,42,48,60,72,84,96
|
|
827
|
+
>>> print(','.join([str(t) for t in ct.hist_terms('production', 12)]))
|
|
828
|
+
0,6,12,18,24
|
|
829
|
+
|
|
830
|
+
The list of requested Surfex files can be retrieved:
|
|
831
|
+
|
|
832
|
+
>>> print(','.join([str(t) for t in ct.surf_terms('assim', 6)]))
|
|
833
|
+
3,6
|
|
834
|
+
|
|
835
|
+
The list of terms produced by the inline fullpos is:
|
|
836
|
+
|
|
837
|
+
>>> print(','.join([str(t) for t in ct.inline_terms('assim', 6)]))
|
|
838
|
+
0,3,6
|
|
839
|
+
>>> print(','.join([str(t) for t in ct.inline_terms('production', 0)]))
|
|
840
|
+
0,1,2,3,4,5,6,7,8,9,10,11,12,15,18,21,24,27,30,33,36,39,42,45,48,54,60,66,72,78,84,90,96,102
|
|
841
|
+
>>> print(','.join([str(t) for t in ct.inline_terms('production', 12)]))
|
|
842
|
+
0,3,6,9,12,15,18,21,24
|
|
843
|
+
|
|
844
|
+
Note: It depends on the value of **use_inline_fp**. If ``False`` an empty
|
|
845
|
+
list will be returned.
|
|
846
|
+
|
|
847
|
+
The inline Fullpos can also be switched-off manually using the `no_inline`
|
|
848
|
+
property:
|
|
849
|
+
|
|
850
|
+
>>> print(','.join([str(t) for t in ct.no_inline.inline_terms('production', 0)]))
|
|
851
|
+
<BLANKLINE>
|
|
852
|
+
>>> print(','.join([str(t) for t in ct.no_inline.diag_terms('production', 0)]))
|
|
853
|
+
0,1,2,3,4,5,6,7,8,9,10,11,12,15,18,21,24,27,30,33,36,39,42,45,48,54,60,66,72,78,84,90,96,102
|
|
854
|
+
|
|
855
|
+
The list of terms when some offline fullpos job is needed (for any of the
|
|
856
|
+
domains):
|
|
857
|
+
|
|
858
|
+
>>> print(','.join([str(t) for t in ct.fpoff_terms('assim', 6)]))
|
|
859
|
+
2,3
|
|
860
|
+
>>> print(','.join([str(t) for t in ct.fpoff_terms('production', 0)]))
|
|
861
|
+
0,2,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48
|
|
862
|
+
>>> print(','.join([str(t) for t in ct.fpoff_terms('production', 12)]))
|
|
863
|
+
2,3
|
|
864
|
+
|
|
865
|
+
The list of terms, in addition to requested historical terms, needed to run
|
|
866
|
+
offline fullpos job:
|
|
867
|
+
|
|
868
|
+
>>> print(','.join([str(t) for t in ct.extra_hist_terms('production', 0)]))
|
|
869
|
+
2,3,9,15,21,27,33,39,45
|
|
870
|
+
|
|
871
|
+
The list of all historical terms (both requested terms and terms required
|
|
872
|
+
for offline Fullpos)
|
|
873
|
+
|
|
874
|
+
>>> print(','.join([str(t) for t in ct.all_hist_terms('production', 0)]))
|
|
875
|
+
0,2,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,60,72,84,96
|
|
876
|
+
|
|
877
|
+
The list of involved Fullpos objects for a given cutoff/hh:
|
|
878
|
+
|
|
879
|
+
>>> print(','.join([t for t in ct.fpoff_items('assim', 6)]))
|
|
880
|
+
foo
|
|
881
|
+
>>> print(','.join([t for t in ct.fpoff_items('production', 0)]))
|
|
882
|
+
aero,foo
|
|
883
|
+
>>> print(','.join([t for t in ct.fpoff_items('production', 0, discard=['aero'])]))
|
|
884
|
+
foo
|
|
885
|
+
>>> print(','.join([t for t in ct.fpoff_items('production', 0, only=['foo'])]))
|
|
886
|
+
foo
|
|
887
|
+
>>> print(','.join([t for t in ct.fpoff_items('production', 12)]))
|
|
888
|
+
foo
|
|
889
|
+
|
|
890
|
+
The list of terms associated to a given Fullpos object can be obtained:
|
|
891
|
+
|
|
892
|
+
>>> print(','.join([str(t) for t in ct.foo_terms('assim', 6)]))
|
|
893
|
+
2,3
|
|
894
|
+
>>> print(','.join([str(t) for t in ct.aero_terms('assim', 6)]))
|
|
895
|
+
<BLANKLINE>
|
|
896
|
+
>>> print(','.join([str(t) for t in ct.foo_terms('production', 0)]))
|
|
897
|
+
2,3
|
|
898
|
+
>>> print(','.join([str(t) for t in ct.aero_terms('production', 0)]))
|
|
899
|
+
0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48
|
|
900
|
+
>>> print(','.join([str(t) for t in ct.foo_terms('production', 12)]))
|
|
901
|
+
2,3
|
|
902
|
+
>>> print(','.join([str(t) for t in ct.aero_terms('production', 12)]))
|
|
903
|
+
<BLANKLINE>
|
|
904
|
+
|
|
905
|
+
It can also be obtained as a FPList objects (if empty, an empty list is returned
|
|
906
|
+
instead of an FPList object):
|
|
907
|
+
|
|
908
|
+
>>> ct.aero_terms_fplist('assim', 6)
|
|
909
|
+
[]
|
|
910
|
+
>>> print(','.join([str(t) for t in ct.aero_terms_fplist('production', 0)]))
|
|
911
|
+
0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48
|
|
912
|
+
>>> print(type(ct.aero_terms_fplist('production', 0)).__name__)
|
|
913
|
+
FPList
|
|
914
|
+
>>> ct.aero_terms_fplist('production', 12)
|
|
915
|
+
[]
|
|
916
|
+
|
|
917
|
+
A mapping dictionary can also be obtained:
|
|
918
|
+
|
|
919
|
+
>>> for k, v in sorted(ct.fpoff_terms_map('production', 0).items()):
|
|
920
|
+
... print('{:s}: {:s}'.format(k, ','.join([str(vv) for vv in v])))
|
|
921
|
+
aero: 0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48
|
|
922
|
+
foo: 2,3
|
|
923
|
+
|
|
924
|
+
The list of terms associated to secondary diagnostics can be obtained
|
|
925
|
+
("secondary diagnostics" stands for diagnostics that are based on files
|
|
926
|
+
pre-calculated by the inline/offline fullpos):
|
|
927
|
+
|
|
928
|
+
>>> print(','.join([str(t) for t in ct.labo_terms('production', 0)]))
|
|
929
|
+
0,1,2,3,4,5,6,7,8,9,10,11,12
|
|
930
|
+
>>> print(','.join([str(t) for t in ct.labo_terms('production', 12)]))
|
|
931
|
+
<BLANKLINE>
|
|
932
|
+
|
|
933
|
+
"""
|
|
934
|
+
|
|
935
|
+
_footprint = dict(
|
|
936
|
+
info = "Conf tool that helps setting up Arpege's forecast term and outputs",
|
|
937
|
+
attr = dict(
|
|
938
|
+
kind = dict(
|
|
939
|
+
values= ['arpifs_fcterms', ],
|
|
940
|
+
),
|
|
941
|
+
fcterm_def = dict(
|
|
942
|
+
info = ("The forecast's term for each cutoff and base time " +
|
|
943
|
+
"(e.g ``{'assim':{0:6, 12:6}, 'production':{0:102}}``)"),
|
|
944
|
+
type = dict,
|
|
945
|
+
),
|
|
946
|
+
fcterm_unit=dict(
|
|
947
|
+
info="The forecast's term unit (hour or timestep)",
|
|
948
|
+
values=['hour', 'timestep'],
|
|
949
|
+
optional=True,
|
|
950
|
+
default='hour',
|
|
951
|
+
),
|
|
952
|
+
hist_terms_def=dict(
|
|
953
|
+
info=("The forecast's terms when historical files are needed " +
|
|
954
|
+
"(for permanant storage) " +
|
|
955
|
+
"(e.g ``{'assim':{default: '0-finalterm-3'}, " +
|
|
956
|
+
"'production':{0:'0-23-1,24-finalterm-6}}``)"),
|
|
957
|
+
type=dict,
|
|
958
|
+
optional=True,
|
|
959
|
+
),
|
|
960
|
+
surf_terms_def=dict(
|
|
961
|
+
info=("The forecast's terms when surface files are needed " +
|
|
962
|
+
"(for permanant storage) "),
|
|
963
|
+
type=dict,
|
|
964
|
+
optional=True,
|
|
965
|
+
),
|
|
966
|
+
norm_terms_def=dict(
|
|
967
|
+
info="The forecast's terms when spectral norms are computed",
|
|
968
|
+
type=dict,
|
|
969
|
+
optional=True,
|
|
970
|
+
),
|
|
971
|
+
diag_fp_terms_def=dict(
|
|
972
|
+
info="The forecast's terms when fullpos core diagnostics are computed",
|
|
973
|
+
type=dict,
|
|
974
|
+
optional=True,
|
|
975
|
+
),
|
|
976
|
+
extra_fp_terms_def=dict(
|
|
977
|
+
info=("The forecast's terms when extra fullpos diagnostics are computed. " +
|
|
978
|
+
"They are always computed by some offline tasks. " +
|
|
979
|
+
"The dictionary has an additional level (describing the 'name' of the " +
|
|
980
|
+
"extra fullpos processing"),
|
|
981
|
+
type=dict,
|
|
982
|
+
optional=True,
|
|
983
|
+
),
|
|
984
|
+
secondary_diag_terms_def=dict(
|
|
985
|
+
info=("The forecast's terms when secondary diagnostics are computed. " +
|
|
986
|
+
"Secondary dignostics are based on diagnostics previously created by " +
|
|
987
|
+
"the inline/offline diag fullpos (see diag_fp_terms_def)." +
|
|
988
|
+
"The dictionary has an additional level (describing the 'name' of the " +
|
|
989
|
+
"secondary diags"),
|
|
990
|
+
type=dict,
|
|
991
|
+
optional=True,
|
|
992
|
+
),
|
|
993
|
+
use_inline_fp = dict(
|
|
994
|
+
info = 'Use inline Fullpos to compute "core_fp_terms"',
|
|
995
|
+
type = bool,
|
|
996
|
+
optional = True,
|
|
997
|
+
default = True,
|
|
998
|
+
),
|
|
999
|
+
)
|
|
1000
|
+
)
|
|
1001
|
+
|
|
1002
|
+
_ACTUAL_T_RE = re.compile(r'(\w+)_terms$')
|
|
1003
|
+
_ACTUAL_FPLIST_T_RE = re.compile(r'(\w+)_terms_fplist$')
|
|
1004
|
+
_UNDEFINED = object()
|
|
1005
|
+
|
|
1006
|
+
def __init__(self, *kargs, **kwargs):
|
|
1007
|
+
super().__init__(*kargs, **kwargs)
|
|
1008
|
+
self._x_fcterm = self._check_data_keys_and_times(self.fcterm_def, 'fcterm_def',
|
|
1009
|
+
cast=self._cast_unique_value)
|
|
1010
|
+
self._x_hist_terms = self._check_data_keys_and_times(self.hist_terms_def, 'hist_terms_def',
|
|
1011
|
+
cast=self._cast_timerangex)
|
|
1012
|
+
self._x_surf_terms = self._check_data_keys_and_times(self.surf_terms_def, 'surf_terms_def',
|
|
1013
|
+
cast=self._cast_timerangex)
|
|
1014
|
+
self._x_norm_terms = self._check_data_keys_and_times(self.norm_terms_def, 'norm_terms_def',
|
|
1015
|
+
cast=self._cast_timerangex)
|
|
1016
|
+
self._x_diag_fp_terms = self._check_data_keys_and_times(self.diag_fp_terms_def, 'diag_fp_terms_def',
|
|
1017
|
+
cast=self._cast_timerangex)
|
|
1018
|
+
self._x_extra_fp_terms = dict() if self.extra_fp_terms_def is None else self.extra_fp_terms_def
|
|
1019
|
+
if not all([isinstance(v, dict) for v in self._x_extra_fp_terms.values()]):
|
|
1020
|
+
raise ValueError("extra_fp_terms values need to be dictionaries")
|
|
1021
|
+
self._x_extra_fp_terms = {k: self._check_data_keys_and_times(v,
|
|
1022
|
+
'extra_fp_terms_def[{:s}]'.format(k),
|
|
1023
|
+
cast=self._cast_timerangex)
|
|
1024
|
+
for k, v in self._x_extra_fp_terms.items()}
|
|
1025
|
+
self._x_secondary_diag_terms_def = (dict()
|
|
1026
|
+
if self.secondary_diag_terms_def is None
|
|
1027
|
+
else self.secondary_diag_terms_def)
|
|
1028
|
+
if not all([isinstance(v, dict) for v in self._x_secondary_diag_terms_def.values()]):
|
|
1029
|
+
raise ValueError("extra_fp_terms values need to be dictionaries")
|
|
1030
|
+
self._x_secondary_diag_terms_def = {
|
|
1031
|
+
k: self._check_data_keys_and_times(v,
|
|
1032
|
+
'secondary_diag_terms_def[{:s}]'.format(k),
|
|
1033
|
+
cast=self._cast_timerangex)
|
|
1034
|
+
for k, v in self._x_secondary_diag_terms_def.items()
|
|
1035
|
+
}
|
|
1036
|
+
self._lookup_cache = dict()
|
|
1037
|
+
self._lookup_rangex_cache = dict()
|
|
1038
|
+
self._no_inline_cache = None
|
|
1039
|
+
|
|
1040
|
+
def _clone(self, **kwargs):
|
|
1041
|
+
my_args = self.footprint_as_shallow_dict()
|
|
1042
|
+
my_args.update(kwargs)
|
|
1043
|
+
return self.__class__(** my_args)
|
|
1044
|
+
|
|
1045
|
+
@property
|
|
1046
|
+
def no_inline(self):
|
|
1047
|
+
"""Return a clone of this object with inline fullpos de-activated."""
|
|
1048
|
+
if self._no_inline_cache is None:
|
|
1049
|
+
self._no_inline_cache = self._clone(use_inline_fp=False)
|
|
1050
|
+
return self._no_inline_cache
|
|
1051
|
+
|
|
1052
|
+
@staticmethod
|
|
1053
|
+
def _cast_void(value):
|
|
1054
|
+
return value
|
|
1055
|
+
|
|
1056
|
+
def _cast_unique_value(self, value):
|
|
1057
|
+
if self.fcterm_unit == 'hour':
|
|
1058
|
+
return Time(value)
|
|
1059
|
+
else:
|
|
1060
|
+
return int(value)
|
|
1061
|
+
|
|
1062
|
+
@staticmethod
|
|
1063
|
+
def _cast_timerangex(value):
|
|
1064
|
+
if not (value is None or isinstance(value, str)):
|
|
1065
|
+
if isinstance(value, collections.abc.Iterable):
|
|
1066
|
+
value = ','.join([str(e) for e in value])
|
|
1067
|
+
else:
|
|
1068
|
+
value = str(value)
|
|
1069
|
+
return value
|
|
1070
|
+
|
|
1071
|
+
@staticmethod
|
|
1072
|
+
def _check_data_keys(data, dataname):
|
|
1073
|
+
"""Check the first level of any input dictionary."""
|
|
1074
|
+
if data is None:
|
|
1075
|
+
return dict(default=dict(default=None))
|
|
1076
|
+
else:
|
|
1077
|
+
if not set(data.keys()) <= {'assim', 'production', 'default'}:
|
|
1078
|
+
raise ValueError('Impoper value ({!s}) for "{:s}".'.format(data, dataname))
|
|
1079
|
+
return data
|
|
1080
|
+
|
|
1081
|
+
def _check_data_keys_and_times(self, data, dataname, cast=None):
|
|
1082
|
+
"""Check any input dictionary and convert values."""
|
|
1083
|
+
data = self._check_data_keys(data, dataname)
|
|
1084
|
+
cast = self._cast_void if cast is None else cast
|
|
1085
|
+
new_data = dict()
|
|
1086
|
+
for data_k, data_v in data.items():
|
|
1087
|
+
if not isinstance(data_v, dict):
|
|
1088
|
+
raise ValueError('The {:s} "{:s}" entry should be a dictionary (got "{!s}")'
|
|
1089
|
+
.format(dataname, data_k, data_v))
|
|
1090
|
+
try:
|
|
1091
|
+
new_data[data_k] = {'default' if k == 'default' else Time(k): cast(v)
|
|
1092
|
+
for k, v in data_v.items()}
|
|
1093
|
+
except ValueError as e:
|
|
1094
|
+
raise ValueError("Error while processing {:s}'s {:s}: ".format(dataname, data_k) +
|
|
1095
|
+
"Could not convert to Time (original message '{!s}')".format(e))
|
|
1096
|
+
return new_data
|
|
1097
|
+
|
|
1098
|
+
def _cutoff_hh_lookup(self, what_desc, cutoff, hh, rawdata=None):
|
|
1099
|
+
"""Look for a particular cutoff in self._x_what_desc."""
|
|
1100
|
+
if not isinstance(hh, Time):
|
|
1101
|
+
hh = Time(hh)
|
|
1102
|
+
if (what_desc, cutoff, hh) not in self._lookup_cache:
|
|
1103
|
+
if rawdata is None:
|
|
1104
|
+
rawdata = getattr(self, '_x_{:s}'.format(what_desc))
|
|
1105
|
+
cutoff_v = rawdata.get(cutoff, rawdata.get('default', self._UNDEFINED))
|
|
1106
|
+
if cutoff_v is self._UNDEFINED:
|
|
1107
|
+
raise ValueError('Nothing is defined for cutoff="{:s}" in "{:s}"'
|
|
1108
|
+
.format(cutoff, what_desc))
|
|
1109
|
+
hh_v = cutoff_v.get(hh, cutoff_v.get('default', self._UNDEFINED))
|
|
1110
|
+
if hh_v is self._UNDEFINED:
|
|
1111
|
+
raise ValueError('Nothing is defined for cutoff="{:s}"/hh="{!s}" in "{:s}"'
|
|
1112
|
+
.format(cutoff, hh, what_desc))
|
|
1113
|
+
self._lookup_cache[(what_desc, cutoff, hh)] = hh_v
|
|
1114
|
+
return self._lookup_cache[(what_desc, cutoff, hh)]
|
|
1115
|
+
|
|
1116
|
+
def _cutoff_hh_rangex_lookup(self, what_desc, cutoff, hh, rawdata=None):
|
|
1117
|
+
"""Look for a particular cutoff in self._x_what_desc and resolve the rangex."""
|
|
1118
|
+
if (what_desc, cutoff, hh) not in self._lookup_rangex_cache:
|
|
1119
|
+
try:
|
|
1120
|
+
what = self._cutoff_hh_lookup(what_desc, cutoff, hh, rawdata=rawdata)
|
|
1121
|
+
except ValueError:
|
|
1122
|
+
what = None
|
|
1123
|
+
if what is None:
|
|
1124
|
+
self._lookup_rangex_cache[(what_desc, cutoff, hh)] = list()
|
|
1125
|
+
else:
|
|
1126
|
+
finalterm = self._cutoff_hh_lookup('fcterm', cutoff, hh)
|
|
1127
|
+
if 'finalterm' in what:
|
|
1128
|
+
what = what.replace('finalterm', str(finalterm))
|
|
1129
|
+
try:
|
|
1130
|
+
tir = timeintrangex(what)
|
|
1131
|
+
except (TypeError, ValueError):
|
|
1132
|
+
raise ValueError(
|
|
1133
|
+
'Could not process "{:s}" using timeintrangex (from "{:s}" with cutoff={:s}/hh={!s})'
|
|
1134
|
+
.format(what, what_desc, cutoff, hh)
|
|
1135
|
+
)
|
|
1136
|
+
if self.fcterm_unit == 'timestep' and not all([isinstance(i, int) for i in tir]):
|
|
1137
|
+
raise ValueError('No hours/minutes allowed when fcterm_unit is "timestep" ' +
|
|
1138
|
+
'(from "{:s}" with cutoff={:s}/hh={!s})'
|
|
1139
|
+
.format(what_desc, cutoff, hh))
|
|
1140
|
+
self._lookup_rangex_cache[(what_desc, cutoff, hh)] = sorted(
|
|
1141
|
+
[t for t in tir if t <= finalterm]
|
|
1142
|
+
)
|
|
1143
|
+
return self._lookup_rangex_cache[(what_desc, cutoff, hh)]
|
|
1144
|
+
|
|
1145
|
+
def fcterm(self, cutoff, hh):
|
|
1146
|
+
"""The forecast term for **cutoff** and **hh**."""
|
|
1147
|
+
fcterm = self._cutoff_hh_lookup('fcterm', cutoff, hh)
|
|
1148
|
+
if isinstance(fcterm, Time) and fcterm.minute == 0:
|
|
1149
|
+
return fcterm.hour
|
|
1150
|
+
else:
|
|
1151
|
+
return fcterm
|
|
1152
|
+
|
|
1153
|
+
def hist_terms(self, cutoff, hh):
|
|
1154
|
+
"""The list of terms for requested/archived historical files."""
|
|
1155
|
+
return self._cutoff_hh_rangex_lookup('hist_terms', cutoff, hh)
|
|
1156
|
+
|
|
1157
|
+
def surf_terms(self, cutoff, hh):
|
|
1158
|
+
"""The list of terms for historical surface files."""
|
|
1159
|
+
return self._cutoff_hh_rangex_lookup('surf_terms', cutoff, hh)
|
|
1160
|
+
|
|
1161
|
+
def norm_terms(self, cutoff, hh):
|
|
1162
|
+
"""The list of terms for norm calculations."""
|
|
1163
|
+
return self._cutoff_hh_rangex_lookup('norm_terms', cutoff, hh)
|
|
1164
|
+
|
|
1165
|
+
def inline_terms(self, cutoff, hh):
|
|
1166
|
+
"""The list of terms for inline diagnostics."""
|
|
1167
|
+
if self.use_inline_fp:
|
|
1168
|
+
return sorted(
|
|
1169
|
+
set(self._cutoff_hh_rangex_lookup('diag_fp_terms', cutoff, hh)) |
|
|
1170
|
+
self._secondary_diag_terms_set(cutoff, hh)
|
|
1171
|
+
)
|
|
1172
|
+
else:
|
|
1173
|
+
return list()
|
|
1174
|
+
|
|
1175
|
+
def diag_terms(self, cutoff, hh):
|
|
1176
|
+
"""The list of terms for offline diagnostics."""
|
|
1177
|
+
if self.use_inline_fp:
|
|
1178
|
+
return list()
|
|
1179
|
+
else:
|
|
1180
|
+
return sorted(
|
|
1181
|
+
set(self._cutoff_hh_rangex_lookup('diag_fp_terms', cutoff, hh)) |
|
|
1182
|
+
self._secondary_diag_terms_set(cutoff, hh)
|
|
1183
|
+
)
|
|
1184
|
+
|
|
1185
|
+
def diag_terms_fplist(self, cutoff, hh):
|
|
1186
|
+
"""The list of terms for offline diagnostics (as a FPlist)."""
|
|
1187
|
+
flist = self.diag_terms(cutoff, hh)
|
|
1188
|
+
return FPList(flist) if flist else []
|
|
1189
|
+
|
|
1190
|
+
def _extra_fp_terms_item_fplist(self, item, cutoff, hh):
|
|
1191
|
+
flist = self._cutoff_hh_rangex_lookup('extra_fp_terms[{:s}]'.format(item),
|
|
1192
|
+
cutoff, hh,
|
|
1193
|
+
rawdata=self._x_extra_fp_terms[item])
|
|
1194
|
+
return FPList(flist) if flist else []
|
|
1195
|
+
|
|
1196
|
+
def _secondary_diag_terms_item_fplist(self, item, cutoff, hh):
|
|
1197
|
+
flist = self._cutoff_hh_rangex_lookup('secondary_diag_terms[{:s}]'.format(item),
|
|
1198
|
+
cutoff, hh,
|
|
1199
|
+
rawdata=self._x_secondary_diag_terms_def[item])
|
|
1200
|
+
return FPList(flist) if flist else []
|
|
1201
|
+
|
|
1202
|
+
@secure_getattr
|
|
1203
|
+
def __getattr__(self, item):
|
|
1204
|
+
actual_m = self._ACTUAL_T_RE.match(item)
|
|
1205
|
+
actual_fplist_m = self._ACTUAL_FPLIST_T_RE.match(item)
|
|
1206
|
+
if actual_m and actual_m.group(1) in self._x_extra_fp_terms.keys():
|
|
1207
|
+
return functools.partial(self._cutoff_hh_rangex_lookup,
|
|
1208
|
+
'extra_fp_terms[{:s}]'.format(actual_m.group(1)),
|
|
1209
|
+
rawdata=self._x_extra_fp_terms[actual_m.group(1)])
|
|
1210
|
+
elif actual_fplist_m and actual_fplist_m.group(1) in self._x_extra_fp_terms.keys():
|
|
1211
|
+
return functools.partial(self._extra_fp_terms_item_fplist,
|
|
1212
|
+
actual_fplist_m.group(1))
|
|
1213
|
+
elif actual_m and actual_m.group(1) in self._x_secondary_diag_terms_def.keys():
|
|
1214
|
+
return functools.partial(self._cutoff_hh_rangex_lookup,
|
|
1215
|
+
'secondary_diag_terms[{:s}]'.format(actual_m.group(1)),
|
|
1216
|
+
rawdata=self._x_secondary_diag_terms_def[actual_m.group(1)])
|
|
1217
|
+
elif actual_fplist_m and actual_fplist_m.group(1) in self._x_secondary_diag_terms_def.keys():
|
|
1218
|
+
return functools.partial(self._secondary_diag_terms_item_fplist,
|
|
1219
|
+
actual_fplist_m.group(1))
|
|
1220
|
+
else:
|
|
1221
|
+
raise AttributeError('Attribute "{:s}" was not found'.format(item))
|
|
1222
|
+
|
|
1223
|
+
def _fpoff_terms_set(self, cutoff, hh):
|
|
1224
|
+
fpoff_terms = set()
|
|
1225
|
+
for k, v in self._x_extra_fp_terms.items():
|
|
1226
|
+
fpoff_terms.update(self._cutoff_hh_rangex_lookup('extra_fp_terms[{:s}]'.format(k),
|
|
1227
|
+
cutoff, hh, rawdata=v))
|
|
1228
|
+
if not self.use_inline_fp:
|
|
1229
|
+
fpoff_terms.update(self._cutoff_hh_rangex_lookup('diag_fp_terms', cutoff, hh))
|
|
1230
|
+
fpoff_terms.update(self._secondary_diag_terms_set(cutoff, hh))
|
|
1231
|
+
return fpoff_terms
|
|
1232
|
+
|
|
1233
|
+
def _secondary_diag_terms_set(self, cutoff, hh):
|
|
1234
|
+
sec_terms = set()
|
|
1235
|
+
for k, v in self._x_secondary_diag_terms_def.items():
|
|
1236
|
+
sec_terms.update(self._cutoff_hh_rangex_lookup('secondary_diag_terms[{:s}]'.format(k),
|
|
1237
|
+
cutoff, hh, rawdata=v))
|
|
1238
|
+
return sec_terms
|
|
1239
|
+
|
|
1240
|
+
def extra_hist_terms(self, cutoff, hh):
|
|
1241
|
+
"""The list of terms for historical file terms solely produced for fullpos use."""
|
|
1242
|
+
fpoff_terms = self._fpoff_terms_set(cutoff, hh)
|
|
1243
|
+
fpoff_terms -= set(self.hist_terms(cutoff, hh))
|
|
1244
|
+
return sorted(fpoff_terms)
|
|
1245
|
+
|
|
1246
|
+
def all_hist_terms(self, cutoff, hh):
|
|
1247
|
+
"""The list of terms for all historical file."""
|
|
1248
|
+
all_terms = self._fpoff_terms_set(cutoff, hh)
|
|
1249
|
+
all_terms |= set(self.hist_terms(cutoff, hh))
|
|
1250
|
+
return sorted(all_terms)
|
|
1251
|
+
|
|
1252
|
+
def fpoff_terms(self, cutoff, hh):
|
|
1253
|
+
"""The list of terms for offline fullpos."""
|
|
1254
|
+
fpoff_terms = self._fpoff_terms_set(cutoff, hh)
|
|
1255
|
+
return sorted(fpoff_terms)
|
|
1256
|
+
|
|
1257
|
+
def fpoff_items(self, cutoff, hh, discard=None, only=None):
|
|
1258
|
+
"""List of active offline post-processing domains."""
|
|
1259
|
+
items = {k
|
|
1260
|
+
for k, v in self._x_extra_fp_terms.items()
|
|
1261
|
+
if self._cutoff_hh_rangex_lookup('extra_fp_terms[{:s}]'.format(k),
|
|
1262
|
+
cutoff,
|
|
1263
|
+
hh,
|
|
1264
|
+
rawdata=v)}
|
|
1265
|
+
if not self.use_inline_fp and self._cutoff_hh_rangex_lookup('diag_fp_terms', cutoff, hh):
|
|
1266
|
+
items.add('diag')
|
|
1267
|
+
if discard:
|
|
1268
|
+
items -= set(discard)
|
|
1269
|
+
if only:
|
|
1270
|
+
items &= set(only)
|
|
1271
|
+
return sorted(items)
|
|
1272
|
+
|
|
1273
|
+
def fpoff_terms_map(self, cutoff, hh):
|
|
1274
|
+
"""The mapping dictionary between offline post-processing terms and domains."""
|
|
1275
|
+
return {k: getattr(self, '{:s}_terms'.format(k))(cutoff, hh)
|
|
1276
|
+
for k in self.fpoff_items(cutoff, hh)}
|
|
1277
|
+
|
|
1278
|
+
def fpoff_terms_fpmap(self, cutoff, hh):
|
|
1279
|
+
"""The mapping dictionary between offline post-processing terms and domains (as a FPlist)."""
|
|
1280
|
+
return {k: getattr(self, '{:s}_terms_fplist'.format(k))(cutoff, hh)
|
|
1281
|
+
for k in self.fpoff_items(cutoff, hh)}
|
|
1282
|
+
|
|
1283
|
+
|
|
1284
|
+
class TimeSlotsConfTool(AbstractObjectProxyConfTool):
|
|
1285
|
+
"""Gives easy access to a Timeslots object.
|
|
1286
|
+
|
|
1287
|
+
The conf tool will look like::
|
|
1288
|
+
|
|
1289
|
+
>>> ct = TimeSlotsConfTool(kind="objproxy",
|
|
1290
|
+
... timeslots_def="7/-PT3H/PT6H")
|
|
1291
|
+
>>> print(ct.start)
|
|
1292
|
+
-PT10800S
|
|
1293
|
+
|
|
1294
|
+
"""
|
|
1295
|
+
|
|
1296
|
+
_footprint = dict(
|
|
1297
|
+
info = 'Gives easy access to a Timeslots object.',
|
|
1298
|
+
attr = dict(
|
|
1299
|
+
timeslots_def = dict(
|
|
1300
|
+
info = "The timeslots specification",
|
|
1301
|
+
),
|
|
1302
|
+
)
|
|
1303
|
+
)
|
|
1304
|
+
|
|
1305
|
+
def _create_proxied_obj(self):
|
|
1306
|
+
return TimeSlots(self.timeslots_def)
|
|
1307
|
+
|
|
1308
|
+
|
|
1309
|
+
if __name__ == '__main__':
|
|
1310
|
+
import doctest
|
|
1311
|
+
doctest.testmod()
|