vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1661 @@
1
+ """
2
+ Conftools are small objects that can be instantiated from an application's
3
+ configuration file.
4
+
5
+ They might be used when some complex calculations are needed to establish the
6
+ tasks configuration.
7
+ """
8
+
9
+ import collections
10
+ import collections.abc
11
+ import functools
12
+ import math
13
+ import re
14
+
15
+ from bronx.fancies import loggers
16
+ from bronx.stdtypes.date import Date, Time, Period, Month, timeintrangex
17
+ from bronx.syntax.decorators import secure_getattr
18
+ from footprints.stdtypes import FPDict, FPList
19
+ from footprints.util import rangex
20
+ import footprints
21
+
22
+ from ..tools.odb import TimeSlots
23
+
24
+ #: No automatic export
25
+ __all__ = []
26
+
27
+ logger = loggers.getLogger(__name__)
28
+
29
+
30
+ class ConfTool(footprints.FootprintBase):
31
+ """Abstract class for conftools objects."""
32
+
33
+ _abstract = True
34
+ _collector = ("conftool",)
35
+ _footprint = dict(
36
+ info="Abstract Conf/Weird Tool",
37
+ attr=dict(
38
+ kind=dict(),
39
+ ),
40
+ )
41
+
42
+
43
+ class AbstractObjectProxyConfTool(ConfTool):
44
+ """Allow transparent access to any Vortex object."""
45
+
46
+ _abstract = True
47
+ _footprint = dict(
48
+ info="Conf tool that find the appropriate begin/end date for an input resource.",
49
+ attr=dict(
50
+ kind=dict(
51
+ values=[
52
+ "objproxy",
53
+ ],
54
+ ),
55
+ ),
56
+ )
57
+
58
+ def __init__(self, *kargs, **kwargs):
59
+ super().__init__(*kargs, **kwargs)
60
+ self._proxied_obj = self._create_proxied_obj()
61
+
62
+ def _create_proxied_obj(self):
63
+ """Initialise the object that will be proxied."""
64
+ raise NotImplementedError()
65
+
66
+ @secure_getattr
67
+ def __getattr__(self, item):
68
+ """Pass all requests to the proxied object."""
69
+ target = getattr(self._proxied_obj, item, None)
70
+ if target is None:
71
+ raise AttributeError('Attribute "{:s}" was not found'.format(item))
72
+ else:
73
+ return target
74
+
75
+
76
+ #: Holds coupling's data for a particular cutoff/hour
77
+ CouplingInfos = collections.namedtuple(
78
+ "CouplingInfos",
79
+ ("base", "dayoff", "cutoff", "vapp", "vconf", "xpid", "model", "steps"),
80
+ )
81
+
82
+
83
+ class CouplingOffsetConfError(Exception):
84
+ """Abstract exception raise by :class:`CouplingOffsetConfTool` objects."""
85
+
86
+ pass
87
+
88
+
89
+ class CouplingOffsetConfPrepareError(CouplingOffsetConfError):
90
+ """Exception raised when an error occurs during coupling data calculations."""
91
+
92
+ def __init__(self, fmtk):
93
+ msg = "It is useless to compute coupling for: {}.".format(fmtk)
94
+ super().__init__(msg)
95
+
96
+
97
+ class CouplingOffsetConfRefillError(CouplingOffsetConfError):
98
+ """Exception raised when an orror occurs during refill."""
99
+
100
+ def __init__(self, fmtk, hh=None):
101
+ msg = "It is useless to compute a refill for: {}".format(fmtk)
102
+ if hh is None:
103
+ msg += "."
104
+ else:
105
+ msg += " at HH={!s}.".format(hh)
106
+ super().__init__(msg)
107
+
108
+
109
+ class CouplingOffsetConfTool(ConfTool):
110
+ """Conf tool that do all sorts of computations for coupling."""
111
+
112
+ _footprint = dict(
113
+ info="Conf tool that do all sorts of computations for coupling",
114
+ attr=dict(
115
+ kind=dict(
116
+ values=[
117
+ "couplingoffset",
118
+ ],
119
+ ),
120
+ cplhhlist=dict(
121
+ info=(
122
+ "The list of cutoff and hours for this application. "
123
+ "If omitted, all entries of the **cplhhbase** attribute are used. "
124
+ + "(e.g ``{'assim':[0, 6, 12, 18], 'production':[0, ]}``)"
125
+ ),
126
+ type=FPDict,
127
+ optional=True,
128
+ ),
129
+ cplhhbase=dict(
130
+ info=(
131
+ "For a given cutoff and hour, gives the base hour to couple to. "
132
+ + "(e.g ``{'assim':{0:0, 6:6, 12:12, 18:18}, 'production':{0:18}}``)."
133
+ ),
134
+ type=FPDict,
135
+ ),
136
+ cpldayoff=dict(
137
+ info=(
138
+ "For a given cutoff and hour, gives an offset in days. 0 by default. "
139
+ + "(e.g ``{'assim':{'default':0}, 'production':{'default':1}}``)."
140
+ ),
141
+ type=FPDict,
142
+ optional=True,
143
+ ),
144
+ cplcutoff=dict(
145
+ info="For a given cutoff and hour, gives the base cutoff to couple to.",
146
+ type=FPDict,
147
+ ),
148
+ cplvapp=dict(
149
+ info="For a given cutoff and hour, gives the base vapp to couple to.",
150
+ type=FPDict,
151
+ ),
152
+ cplvconf=dict(
153
+ info="For a given cutoff and hour, gives the base vconf to couple to.",
154
+ type=FPDict,
155
+ ),
156
+ cplxpid=dict(
157
+ info="For a given cutoff and hour, gives the experiment ID to couple to.",
158
+ type=FPDict,
159
+ optional=True,
160
+ ),
161
+ cplmodel=dict(
162
+ info="For a given cutoff and hour, gives the base model to couple to.",
163
+ type=FPDict,
164
+ optional=True,
165
+ ),
166
+ cplsteps=dict(
167
+ info="For a given cutoff and hour, gives then list of requested terms.",
168
+ type=FPDict,
169
+ ),
170
+ finalterm=dict(
171
+ info='For a given cutoff and hour, the final term (for "finalterm" token substitution)',
172
+ type=FPDict,
173
+ optional=True,
174
+ ),
175
+ refill_cutoff=dict(
176
+ values=["assim", "production", "all"],
177
+ info="By default, what is the cutoff name of the refill task.",
178
+ optional=True,
179
+ default="assim",
180
+ ),
181
+ compute_on_refill=dict(
182
+ info="Is it necessary to compute coupling files for the refilling cutoff ?",
183
+ optional=True,
184
+ default=True,
185
+ type=bool,
186
+ ),
187
+ isolated_refill=dict(
188
+ info="Are the refill tasks exclusive with prepare tasks ?",
189
+ optional=True,
190
+ default=True,
191
+ type=bool,
192
+ ),
193
+ verbose=dict(
194
+ info="When the object is created, print a summary.",
195
+ type=bool,
196
+ optional=True,
197
+ default=True,
198
+ ),
199
+ ),
200
+ )
201
+
202
+ _DFLT_KEY = "default"
203
+
204
+ def __init__(self, *kargs, **kwargs):
205
+ super().__init__(*kargs, **kwargs)
206
+
207
+ # A dictionary summarising the base HH supported by this configuration tool
208
+ # ex: dict(assim=set([0, 1 , 2, ...]), production=set([0, 6,...])
209
+ self._target_hhs = collections.defaultdict(set)
210
+ if self.cplhhlist is None:
211
+ t_hhbase = collections.defaultdict(dict)
212
+ for c, cv in self.cplhhbase.items():
213
+ for h, v in [(Time(lh), Time(lv)) for lh, lv in cv.items()]:
214
+ t_hhbase[c][h] = v
215
+ self._target_hhs[c].add(h)
216
+ else:
217
+ for c, clist in self.cplhhlist.items():
218
+ if not isinstance(clist, (tuple, list)):
219
+ clist = [
220
+ clist,
221
+ ]
222
+ self._target_hhs[c].update([Time(h) for h in clist])
223
+ t_hhbase = self._reshape_inputs(self.cplhhbase, value_reclass=Time)
224
+
225
+ # Consistency checks and array reshaping
226
+ t_dayoff = self._reshape_inputs(self.cpldayoff, class_default=0)
227
+ t_cutoff = self._reshape_inputs(self.cplcutoff)
228
+ t_vapp = self._reshape_inputs(self.cplvapp)
229
+ t_vconf = self._reshape_inputs(self.cplvconf)
230
+ t_steps = self._reshape_inputs(self.cplsteps)
231
+ if self.cplmodel is None:
232
+ t_model = t_vapp
233
+ else:
234
+ t_model = self._reshape_inputs(self.cplmodel)
235
+ t_xpid = self._reshape_inputs(self.cplxpid, class_default="")
236
+
237
+ # If relevent, do "finalterm" token substitution
238
+ if self.finalterm is not None:
239
+ t_finalterm = self._reshape_inputs(
240
+ self.finalterm, value_reclass=str
241
+ )
242
+ for c, cv in t_hhbase.items():
243
+ for hh in cv.keys():
244
+ if isinstance(t_steps[c][hh], str):
245
+ t_steps[c][hh] = t_steps[c][hh].replace(
246
+ "finalterm", t_finalterm[c][hh]
247
+ )
248
+
249
+ # Build the dictionary of CouplingInfos objects
250
+ self._cpl_data = collections.defaultdict(dict)
251
+ for c, cv in t_hhbase.items():
252
+ self._cpl_data[c] = {
253
+ hh: CouplingInfos(
254
+ cv[hh],
255
+ int(t_dayoff[c][hh]),
256
+ t_cutoff[c][hh],
257
+ t_vapp[c][hh],
258
+ t_vconf[c][hh],
259
+ t_xpid[c][hh],
260
+ t_model[c][hh],
261
+ rangex(t_steps[c][hh]),
262
+ )
263
+ for hh in cv.keys()
264
+ }
265
+
266
+ # Pre-compute the prepare terms
267
+ self._prepare_terms_map = self._compute_prepare_terms()
268
+ if self.verbose:
269
+ print()
270
+ print("#### Coupling configuration tool initialised ####")
271
+ print("**** Coupling tasks terms map:")
272
+ print(
273
+ "{:s} : {:s}".format(
274
+ self._cpl_fmtkey(
275
+ ("HH", "VAPP", "VCONF", "XPID", "MODEL", "CUTOFF")
276
+ ),
277
+ "Computed Terms",
278
+ )
279
+ )
280
+ for k in sorted(self._prepare_terms_map.keys()):
281
+ print(
282
+ "{:s} : {:s}".format(
283
+ self._cpl_fmtkey(k),
284
+ " ".join(
285
+ [str(t.hour) for t in self._prepare_terms_map[k]]
286
+ ),
287
+ )
288
+ )
289
+
290
+ # Pre-compute the default refill_map
291
+ self._refill_terms_map = dict()
292
+ self._refill_terms_map[self.refill_cutoff] = (
293
+ self._compute_refill_terms(
294
+ self.refill_cutoff,
295
+ self.compute_on_refill,
296
+ self.isolated_refill,
297
+ )
298
+ )
299
+ if self.verbose:
300
+ print(
301
+ "**** Refill tasks activation map (default refill_cutoff is: {:s}):".format(
302
+ self.refill_cutoff
303
+ )
304
+ )
305
+ print(
306
+ "{:s} : {:s}".format(
307
+ self._rtask_fmtkey(
308
+ ("VAPP", "VCONF", "XPID", "MODEL", "CUTOFF")
309
+ ),
310
+ "Active hours",
311
+ )
312
+ )
313
+ for k in sorted(self._refill_terms_map[self.refill_cutoff].keys()):
314
+ vdict = self._refill_terms_map[self.refill_cutoff][k]
315
+ print(
316
+ "{:s} : {:s}".format(
317
+ self._rtask_fmtkey(k),
318
+ " ".join([str(t.hour) for t in sorted(vdict.keys())]),
319
+ )
320
+ )
321
+ print()
322
+
323
+ @property
324
+ def target_hhs(self):
325
+ return self._target_hhs
326
+
327
+ def _reshape_inputs(
328
+ self, input_dict, class_default=None, value_reclass=lambda x: x
329
+ ):
330
+ """Deal with default values, check dictionaries and convert keys to Time objects."""
331
+ # Convert keys to time objects
332
+ r_dict = dict()
333
+ if input_dict is not None:
334
+ for c, cv in input_dict.items():
335
+ if isinstance(cv, dict):
336
+ r_dict[c] = dict()
337
+ for h, v in cv.items():
338
+ if h != self._DFLT_KEY:
339
+ r_dict[c][Time(h)] = value_reclass(v)
340
+ else:
341
+ r_dict[c][h] = value_reclass(v)
342
+ else:
343
+ r_dict[c] = cv
344
+
345
+ # Is there a generic default ?
346
+ defined_topdefault = self._DFLT_KEY in r_dict
347
+ top_default = r_dict.pop(self._DFLT_KEY, class_default)
348
+
349
+ # Check consistency and replace missing values with defaults
350
+ for c in self.target_hhs:
351
+ myv = r_dict.setdefault(c, dict())
352
+ # Is there a cutoff specific default ?
353
+ defined_cutdefault = defined_topdefault or self._DFLT_KEY in myv
354
+ last_default = myv.pop(self._DFLT_KEY, top_default)
355
+ my_c_hhs = set(myv.keys())
356
+ if defined_cutdefault or (class_default is not None):
357
+ missinghh = self.target_hhs[c] - my_c_hhs
358
+ for h in missinghh:
359
+ myv[h] = last_default
360
+ else:
361
+ if not my_c_hhs >= self.target_hhs[c]:
362
+ logger.error(
363
+ "Inconsistent input arrays while processing: \n%s",
364
+ str(input_dict),
365
+ )
366
+ logger.error(
367
+ "Cutoff %s, expecting the following HH: \n%s",
368
+ c,
369
+ str(self.target_hhs[c]),
370
+ )
371
+ raise ValueError("Inconsistent input array.")
372
+
373
+ # Filter values according to _target_hhs
374
+ for c in list(r_dict.keys()):
375
+ if c not in self.target_hhs:
376
+ del r_dict[c]
377
+ for c in self.target_hhs:
378
+ my_c_hhs = set(r_dict[c].keys())
379
+ extra = my_c_hhs - self.target_hhs[c]
380
+ for hh in extra:
381
+ del r_dict[c][hh]
382
+
383
+ return r_dict
384
+
385
+ @staticmethod
386
+ def _cpl_key(hh, cutoff, vapp, vconf, xpid, model):
387
+ return (str(hh), vapp, vconf, xpid, model, cutoff)
388
+
389
+ @staticmethod
390
+ def _cpl_fmtkey(k):
391
+ cutoff_map = dict(production="prod")
392
+ return "{:5s} {:6s} {:24s} {:s} ({:s})".format(
393
+ k[0], cutoff_map.get(k[5], k[5]), k[1] + "/" + k[2], k[3], k[4]
394
+ )
395
+
396
+ @staticmethod
397
+ def _rtask_key(cutoff, vapp, vconf, xpid, model):
398
+ return (vapp, vconf, xpid, model, cutoff)
399
+
400
+ @staticmethod
401
+ def _rtask_fmtkey(k):
402
+ cutoff_map = dict(production="prod")
403
+ return "{:6s} {:24s} {:s} ({:s})".format(
404
+ cutoff_map.get(k[4], k[4]), k[0] + "/" + k[1], k[2], k[3]
405
+ )
406
+
407
+ @staticmethod
408
+ def _process_date(date):
409
+ mydate = Date(date)
410
+ myhh = Time("{0.hour:d}:{0.minute:02d}".format(mydate))
411
+ return mydate, myhh
412
+
413
+ @staticmethod
414
+ def _hh_offset(hh, hhbase, dayoff):
415
+ offset = hh - hhbase
416
+ if offset < 0:
417
+ offset += Time(24)
418
+ return offset + Period(days=dayoff)
419
+
420
+ def _compute_prepare_terms(self):
421
+ terms_map = collections.defaultdict(set)
422
+ for _, cv in self._cpl_data.items():
423
+ for h, infos in cv.items():
424
+ key = self._cpl_key(
425
+ infos.base,
426
+ infos.cutoff,
427
+ infos.vapp,
428
+ infos.vconf,
429
+ infos.xpid,
430
+ infos.model,
431
+ )
432
+ targetoffset = self._hh_offset(h, infos.base, infos.dayoff)
433
+ terms_map[key].update([s + targetoffset for s in infos.steps])
434
+ terms_map = {k: sorted(terms) for k, terms in terms_map.items()}
435
+ return terms_map
436
+
437
+ def _compute_refill_terms(
438
+ self, refill_cutoff, compute_on_refill, isolated_refill
439
+ ):
440
+ finaldates = collections.defaultdict(
441
+ functools.partial(
442
+ collections.defaultdict,
443
+ functools.partial(collections.defaultdict, set),
444
+ )
445
+ )
446
+ if refill_cutoff == "all":
447
+ possiblehours = sorted(
448
+ functools.reduce(
449
+ lambda x, y: x | y,
450
+ [set(l) for l in self.target_hhs.values()],
451
+ )
452
+ )
453
+ else:
454
+ possiblehours = self.target_hhs[refill_cutoff]
455
+
456
+ # Look 24hr ahead
457
+ for c, cv in self._cpl_data.items():
458
+ for h, infos in cv.items():
459
+ key = self._rtask_key(
460
+ infos.cutoff,
461
+ infos.vapp,
462
+ infos.vconf,
463
+ infos.xpid,
464
+ infos.model,
465
+ )
466
+ offset = self._hh_offset(h, infos.base, infos.dayoff)
467
+ for possibleh in possiblehours:
468
+ roffset = self._hh_offset(h, possibleh, 0)
469
+ if (
470
+ roffset > 0
471
+ or (
472
+ compute_on_refill
473
+ and roffset == 0
474
+ and (refill_cutoff == "all" or refill_cutoff == c)
475
+ )
476
+ ) and (
477
+ roffset < offset
478
+ or (isolated_refill and roffset == offset)
479
+ ):
480
+ finaldates[key][possibleh][offset - roffset].update(
481
+ [s + offset for s in infos.steps]
482
+ )
483
+
484
+ for key, vdict in finaldates.items():
485
+ for possibleh in vdict.keys():
486
+ vdict[possibleh] = {
487
+ off: sorted(terms)
488
+ for off, terms in vdict[possibleh].items()
489
+ }
490
+
491
+ return finaldates
492
+
493
+ def compatible_with(self, other):
494
+ if isinstance(other, self.__class__):
495
+ return (
496
+ self.target_hhs == other.target_hhs
497
+ and self.refill_cutoff == other.refill_cutoff
498
+ )
499
+ else:
500
+ return False
501
+
502
+ def prepare_terms(self, date, cutoff, vapp, vconf, model=None, xpid=""):
503
+ """
504
+ For a task computing coupling files (at **date** and **cutoff**,
505
+ for a specific **vapp** and **vconf**), lists the terms that should be
506
+ computed.
507
+ """
508
+ _, myhh = self._process_date(date)
509
+ if model is None:
510
+ model = vapp
511
+ key = self._cpl_key(myhh, cutoff, vapp, vconf, xpid, model)
512
+ try:
513
+ return self._prepare_terms_map[key]
514
+ except KeyError:
515
+ raise CouplingOffsetConfPrepareError(self._cpl_fmtkey(key))
516
+
517
+ def coupling_offset(self, date, cutoff):
518
+ """
519
+ For a task needing coupling (at **date** and **cutoff**), return the
520
+ time delta with the coupling model/file base date.
521
+ """
522
+ _, myhh = self._process_date(date)
523
+ return self._hh_offset(
524
+ myhh,
525
+ self._cpl_data[cutoff][myhh].base,
526
+ self._cpl_data[cutoff][myhh].dayoff,
527
+ )
528
+
529
+ def coupling_date(self, date, cutoff):
530
+ """
531
+ For a task needing coupling (at **date** and **cutoff**), return the
532
+ base date of the coupling model/file.
533
+ """
534
+ mydate, myhh = self._process_date(date)
535
+ return mydate - self._hh_offset(
536
+ myhh,
537
+ self._cpl_data[cutoff][myhh].base,
538
+ self._cpl_data[cutoff][myhh].dayoff,
539
+ )
540
+
541
+ def coupling_terms(self, date, cutoff):
542
+ """
543
+ For a task needing coupling (at **date** and **cutoff**), return the
544
+ list of terms that should be fetched from the coupling model/file.
545
+ """
546
+ _, myhh = self._process_date(date)
547
+ offset = self._hh_offset(
548
+ myhh,
549
+ self._cpl_data[cutoff][myhh].base,
550
+ self._cpl_data[cutoff][myhh].dayoff,
551
+ )
552
+ return [s + offset for s in self._cpl_data[cutoff][myhh].steps]
553
+
554
+ def _coupling_stuff(self, date, cutoff, stuff):
555
+ _, myhh = self._process_date(date)
556
+ return getattr(self._cpl_data[cutoff][myhh], stuff)
557
+
558
+ def coupling_steps(self, date, cutoff):
559
+ """
560
+ For a task needing coupling (at **date** and **cutoff**), return the
561
+ prescribed steps.
562
+ """
563
+ return self._coupling_stuff(date, cutoff, "steps")
564
+
565
+ def coupling_cutoff(self, date, cutoff):
566
+ """
567
+ For a task needing coupling (at **date** and **cutoff**), return the
568
+ cutoff of the coupling model/file.
569
+ """
570
+ return self._coupling_stuff(date, cutoff, "cutoff")
571
+
572
+ def coupling_vapp(self, date, cutoff):
573
+ """
574
+ For a task needing coupling (at **date** and **cutoff**), return the
575
+ vapp of the coupling model/file.
576
+ """
577
+ return self._coupling_stuff(date, cutoff, "vapp")
578
+
579
+ def coupling_vconf(self, date, cutoff):
580
+ """
581
+ For a task needing coupling (at **date** and **cutoff**), return the
582
+ vconf of the coupling model/file.
583
+ """
584
+ return self._coupling_stuff(date, cutoff, "vconf")
585
+
586
+ def coupling_xpid(self, date, cutoff):
587
+ """
588
+ For a task needing coupling (at **date** and **cutoff**), return the
589
+ experiment ID of the coupling model/file.
590
+ """
591
+ return self._coupling_stuff(date, cutoff, "xpid")
592
+
593
+ def coupling_model(self, date, cutoff):
594
+ """
595
+ For a task needing coupling (at **date** and **cutoff**), return the
596
+ vconf of the coupling model/file.
597
+ """
598
+ return self._coupling_stuff(date, cutoff, "model")
599
+
600
+ def refill_terms(
601
+ self,
602
+ date,
603
+ cutoff,
604
+ vapp,
605
+ vconf,
606
+ model=None,
607
+ refill_cutoff=None,
608
+ xpid="",
609
+ ):
610
+ """The terms that should be computed for a given refill task."""
611
+ refill_cutoff = (
612
+ self.refill_cutoff if refill_cutoff is None else refill_cutoff
613
+ )
614
+ if refill_cutoff not in self._refill_terms_map:
615
+ self._refill_terms_map[refill_cutoff] = self._compute_refill_terms(
616
+ refill_cutoff, self.compute_on_refill, self.isolated_refill
617
+ )
618
+ if model is None:
619
+ model = vapp
620
+ mydate, myhh = self._process_date(date)
621
+ key = self._rtask_key(cutoff, vapp, vconf, xpid, model)
622
+ finaldates = dict()
623
+ if (
624
+ key not in self._refill_terms_map[refill_cutoff]
625
+ or myhh not in self._refill_terms_map[refill_cutoff][key]
626
+ ):
627
+ raise CouplingOffsetConfRefillError(self._rtask_fmtkey(key))
628
+ for off, terms in self._refill_terms_map[refill_cutoff][key][
629
+ myhh
630
+ ].items():
631
+ finaldates[str(mydate - off)] = terms
632
+ return {"date": finaldates}
633
+
634
+ def refill_dates(
635
+ self,
636
+ date,
637
+ cutoff,
638
+ vapp,
639
+ vconf,
640
+ model=None,
641
+ refill_cutoff=None,
642
+ xpid="",
643
+ ):
644
+ """The dates that should be processed in a given refill task."""
645
+ return list(
646
+ self.refill_terms(
647
+ date,
648
+ cutoff,
649
+ vapp,
650
+ vconf,
651
+ model=model,
652
+ refill_cutoff=refill_cutoff,
653
+ xpid=xpid,
654
+ )["date"].keys()
655
+ )
656
+
657
+ def refill_months(
658
+ self,
659
+ date,
660
+ cutoff,
661
+ vapp,
662
+ vconf,
663
+ model=None,
664
+ refill_cutoff=None,
665
+ xpid="",
666
+ ):
667
+ """The months that should be processed in a given refill task."""
668
+ mindate = min(
669
+ self.refill_dates(
670
+ date,
671
+ cutoff,
672
+ vapp,
673
+ vconf,
674
+ model=model,
675
+ refill_cutoff=refill_cutoff,
676
+ xpid=xpid,
677
+ )
678
+ )
679
+ minmonth = Month(mindate)
680
+ return [minmonth, minmonth + 1]
681
+
682
+
683
+ class AggregatedCouplingOffsetConfTool(ConfTool):
684
+ _footprint = dict(
685
+ info="Aggregate several CouplingOffsetConfTool objects into one",
686
+ attr=dict(
687
+ kind=dict(
688
+ values=[
689
+ "aggcouplingoffset",
690
+ ],
691
+ ),
692
+ nominal=dict(
693
+ info="A list of couplingoffset objects used in nominal cases",
694
+ type=FPList,
695
+ ),
696
+ alternate=dict(
697
+ info="A list of couplingoffset objects used in rescue modes",
698
+ type=FPList,
699
+ optional=True,
700
+ ),
701
+ use_alternates=dict(
702
+ info="Actually use rescue mode ?",
703
+ optional=True,
704
+ default=True,
705
+ type=bool,
706
+ ),
707
+ verbose=dict(
708
+ info="When the object is created, print a summary.",
709
+ type=bool,
710
+ optional=True,
711
+ default=True,
712
+ ),
713
+ ),
714
+ )
715
+
716
+ def __init__(self, *kargs, **kwargs):
717
+ super().__init__(*kargs, **kwargs)
718
+ self._toolslist = list(self.nominal)
719
+ if self.alternate and self.use_alternates:
720
+ self._toolslist.extend(self.alternate)
721
+ # At least one object is needed:
722
+ if not len(self._toolslist):
723
+ raise CouplingOffsetConfError("At least one sub-object is needed")
724
+ # Check consistency
725
+ for num, toolobj in enumerate(self._toolslist[1:]):
726
+ if not self._toolslist[0].compatible_with(toolobj):
727
+ print("\n", "*" * 50)
728
+ print(
729
+ "self._toolslist[0] =",
730
+ self._toolslist[0],
731
+ "\n",
732
+ " target_hhs =",
733
+ self._toolslist[0].target_hhs,
734
+ " refill_cutoff =",
735
+ self._toolslist[0].refill_cutoff,
736
+ )
737
+ print(
738
+ "is not consistent with object num",
739
+ num,
740
+ ":",
741
+ toolobj,
742
+ "\n",
743
+ " target_hhs =",
744
+ toolobj.target_hhs,
745
+ " refill_cutoff =",
746
+ toolobj.refill_cutoff,
747
+ )
748
+ raise CouplingOffsetConfError("Inconsistent sub-objects")
749
+
750
+ if self.verbose:
751
+ print()
752
+ print(
753
+ "#### Aggregated Coupling configuration tool initialised ####"
754
+ )
755
+ print(
756
+ "It is made of {:d} nominal configuration tool(s)".format(
757
+ len(self.nominal)
758
+ )
759
+ )
760
+ if self.alternate and self.use_alternates:
761
+ print(
762
+ "+ {:d} rescue-mode configuration tool(s)".format(
763
+ len(self.alternate)
764
+ )
765
+ )
766
+ else:
767
+ print(
768
+ "No rescue-mode configuration tool is considered (deactivated)"
769
+ )
770
+ print()
771
+
772
+ def prepare_terms(self, date, cutoff, vapp, vconf, model=None, xpid=""):
773
+ """
774
+ For a task computing coupling files (at **date** and **cutoff**,
775
+ for a specific **vapp** and **vconf**), lists the terms that should be
776
+ computed.
777
+ """
778
+ terms = set()
779
+ for toolobj in self._toolslist:
780
+ try:
781
+ terms.update(
782
+ toolobj.prepare_terms(
783
+ date, cutoff, vapp, vconf, model=model, xpid=xpid
784
+ )
785
+ )
786
+ except CouplingOffsetConfPrepareError as e:
787
+ lateste = e
788
+ if not terms:
789
+ raise lateste
790
+ else:
791
+ return sorted(terms)
792
+
793
+ def refill_terms(
794
+ self,
795
+ date,
796
+ cutoff,
797
+ vapp,
798
+ vconf,
799
+ model=None,
800
+ refill_cutoff=None,
801
+ xpid="",
802
+ ):
803
+ """The terms that should be computed for a given refill task."""
804
+ finaldates = collections.defaultdict(set)
805
+ for toolobj in self._toolslist:
806
+ try:
807
+ rt = toolobj.refill_terms(
808
+ date,
809
+ cutoff,
810
+ vapp,
811
+ vconf,
812
+ model=model,
813
+ refill_cutoff=refill_cutoff,
814
+ xpid=xpid,
815
+ )
816
+ for k, v in rt["date"].items():
817
+ finaldates[k].update(v)
818
+ except CouplingOffsetConfRefillError as e:
819
+ lateste = e
820
+ if not finaldates:
821
+ raise lateste
822
+ else:
823
+ for k, v in finaldates.items():
824
+ finaldates[k] = sorted(v)
825
+ return {"date": finaldates}
826
+
827
+ def refill_dates(
828
+ self,
829
+ date,
830
+ cutoff,
831
+ vapp,
832
+ vconf,
833
+ model=None,
834
+ refill_cutoff=None,
835
+ xpid="",
836
+ ):
837
+ """The dates that should be processed in a given refill task."""
838
+ return list(
839
+ self.refill_terms(
840
+ date,
841
+ cutoff,
842
+ vapp,
843
+ vconf,
844
+ model=model,
845
+ refill_cutoff=refill_cutoff,
846
+ xpid=xpid,
847
+ )["date"].keys()
848
+ )
849
+
850
+ def refill_months(
851
+ self,
852
+ date,
853
+ cutoff,
854
+ vapp,
855
+ vconf,
856
+ model=None,
857
+ refill_cutoff=None,
858
+ xpid="",
859
+ ):
860
+ """The months that should be processed in a given refill task."""
861
+ mindate = min(
862
+ self.refill_dates(
863
+ date,
864
+ cutoff,
865
+ vapp,
866
+ vconf,
867
+ model=model,
868
+ refill_cutoff=refill_cutoff,
869
+ xpid=xpid,
870
+ )
871
+ )
872
+ minmonth = Month(mindate)
873
+ return [minmonth, minmonth + 1]
874
+
875
+
876
+ class TimeSerieInputFinderError(Exception):
877
+ """Any exception raise by :class:`TimeSerieInputFinderConfTool` objects."""
878
+
879
+ pass
880
+
881
+
882
+ class TimeSerieInputFinderConfTool(ConfTool):
883
+ """
884
+ A conf tool that find the appropriate begin/end date for an input resource
885
+ to be taken in a timeserie.
886
+
887
+ Let's consider a serie of 3 consecutive Surfex forcing files:
888
+
889
+ * The first file start on 2018/01/01 00UTC
890
+ * Each file covers a two days period
891
+
892
+ The conf tool will look like::
893
+
894
+ >>> ct = TimeSerieInputFinderConfTool(kind="timeserie",
895
+ ... timeserie_begin="2018010100",
896
+ ... timeserie_step="P2D")
897
+
898
+ To find the date/term of the forcing file encompassing a 6 hours forecast
899
+ starting on 2018/01/04 12UTC, use::
900
+
901
+ >>> ct.begindate('2018010412', 'PT6H')
902
+ Date(2018, 1, 3, 0, 0)
903
+ >>> ct.term('2018010312', '06:00')
904
+ Time(48, 0)
905
+
906
+ """
907
+
908
+ _footprint = dict(
909
+ info="Conf tool that find the appropriate begin/end date for an input resource.",
910
+ attr=dict(
911
+ kind=dict(
912
+ values=[
913
+ "timeserie",
914
+ ],
915
+ ),
916
+ timeserie_begin=dict(
917
+ info="The date when the time serie starts", type=Date
918
+ ),
919
+ timeserie_step=dict(
920
+ info="The step between files of the time serie.", type=Period
921
+ ),
922
+ upperbound_included=dict(type=bool, optional=True, default=True),
923
+ singlefile=dict(
924
+ info="The period requested by a user should be contained in a single file.",
925
+ type=bool,
926
+ optional=True,
927
+ default=False,
928
+ ),
929
+ ),
930
+ )
931
+
932
+ def __init__(self, *kargs, **kwargs):
933
+ super().__init__(*kargs, **kwargs)
934
+ self._begincache = dict()
935
+ self._steplength = self.timeserie_step.length
936
+
937
+ def _begin_lookup(self, begindate):
938
+ """Find the appropriate tiem serie's file date just before **begindate**."""
939
+ if begindate not in self._begincache:
940
+ if begindate < self.timeserie_begin:
941
+ raise TimeSerieInputFinderError(
942
+ "Request begin date is too soon !"
943
+ )
944
+ dt = begindate - self.timeserie_begin
945
+ nsteps = int(math.floor(dt.length / self._steplength))
946
+ self._begincache[begindate] = (
947
+ self.timeserie_begin + nsteps * self.timeserie_step
948
+ )
949
+ return self._begincache[begindate]
950
+
951
+ def _begindates_expansion(self, tdate, tlength):
952
+ """Generate a begin date or a list of begin dates."""
953
+ xperiods = tlength / self._steplength
954
+ nfiles = int(math.ceil(xperiods))
955
+ if xperiods == int(xperiods) and not self.upperbound_included:
956
+ nfiles += 1
957
+ if nfiles > 1:
958
+ if self.singlefile:
959
+ raise TimeSerieInputFinderError(
960
+ "Multiple files requested but singlefile=.T."
961
+ )
962
+ return [tdate + i * self.timeserie_step for i in range(0, nfiles)]
963
+ else:
964
+ return tdate
965
+
966
+ def _enddates_expansion(self, tdates):
967
+ """Generate an end date or a dict of enddates."""
968
+ if isinstance(tdates, list):
969
+ return dict(begindate={d: d + self.timeserie_step for d in tdates})
970
+ else:
971
+ return tdates + self.timeserie_step
972
+
973
+ @staticmethod
974
+ def _dates_normalise(begindate, enddate):
975
+ """Convert **begin/enddate** to a proper Date object."""
976
+ if not isinstance(begindate, Date):
977
+ begindate = Date(begindate)
978
+ if not isinstance(enddate, Date):
979
+ enddate = Date(enddate)
980
+ return begindate, enddate
981
+
982
+ @staticmethod
983
+ def _date_term_normalise(begindate, term):
984
+ """Convert **begindate** and **term** to a proper Date/Time object."""
985
+ if not isinstance(begindate, Date):
986
+ begindate = Date(begindate)
987
+ if not isinstance(term, Time):
988
+ term = Time(term)
989
+ return begindate, term
990
+
991
+ def begindate_i(self, begindate, enddate):
992
+ """Find the file dates encompassing [**begindate**, **enddate**]."""
993
+ begindate, enddate = self._dates_normalise(begindate, enddate)
994
+ tdate = self._begin_lookup(begindate)
995
+ tlength = (enddate - begindate).length
996
+ return self._begindates_expansion(tdate, tlength)
997
+
998
+ def enddate_i(self, begindate, enddate):
999
+ """Find the file enddates encompassing [**begindate**, **enddate**]."""
1000
+ return self._enddates_expansion(self.begindate_i(begindate, enddate))
1001
+
1002
+ def term_i(self, begindate, enddate): # @UnusedVariable
1003
+ """Find the term of the time serie files."""
1004
+ return Time(self.timeserie_step)
1005
+
1006
+ def begindate(self, begindate, term):
1007
+ """Find the file dates encompassing [**begindate**, **begindate** + **term**]."""
1008
+ begindate, term = self._date_term_normalise(begindate, term)
1009
+ return self._begindates_expansion(
1010
+ self._begin_lookup(begindate), int(term) * 60
1011
+ )
1012
+
1013
+ def enddate(self, begindate, term):
1014
+ """Find the file enddates encompassing [**begindate**, **begindate** + **term**]."""
1015
+ return self._enddates_expansion(self.begindate(begindate, term))
1016
+
1017
+ def term(self, begindate, term): # @UnusedVariable
1018
+ """Find the term of the time serie files."""
1019
+ return Time(self.timeserie_step)
1020
+
1021
+
1022
+ class ArpIfsForecastTermConfTool(ConfTool):
1023
+ """Deal with any Arpege/IFS model final term and outputs.
1024
+
1025
+
1026
+ The conf tool will look like::
1027
+
1028
+ >>> ct = ArpIfsForecastTermConfTool(kind="arpifs_fcterms",
1029
+ ... fcterm_def=dict(production={0:102, 12:24},
1030
+ ... assim={"default": 6}),
1031
+ ... hist_terms_def=dict(production={"default":"0-47-6,48-finalterm-12"},
1032
+ ... assim={"default":"0,3,6"}),
1033
+ ... surf_terms_def=dict(production={"default":None, 0:"3,6"},
1034
+ ... assim={"default":"3,6"}),
1035
+ ... diag_fp_terms_def=dict(default={"default":"0-47-3,48-finalterm-6"}),
1036
+ ... extra_fp_terms_def=dict(
1037
+ ... aero=dict(production={0:"0-48-3"}),
1038
+ ... foo=dict(default={"default":"2,3"})
1039
+ ... ),
1040
+ ... secondary_diag_terms_def=dict(
1041
+ ... labo=dict(production={0: "0-12-1"})
1042
+ ... ),
1043
+ ... )
1044
+
1045
+ The forecast term can be retrieved:
1046
+
1047
+ >>> print(ct.fcterm('assim', 6))
1048
+ 6
1049
+ >>> print(ct.fcterm('production', 0))
1050
+ 102
1051
+ >>> print(ct.fcterm('production', 12))
1052
+ 24
1053
+
1054
+ If nothing is defined it crashes:
1055
+
1056
+ >>> print(ct.fcterm('production', 6))
1057
+ Traceback (most recent call last):
1058
+ ...
1059
+ ValueError: Nothing is defined for cutoff="production"/hh="06:00" in "fcterm"
1060
+
1061
+ The list of requested historical terms can be retrieved. It is automaticaly
1062
+ constrained by the forecast term:
1063
+
1064
+ >>> print(','.join([str(t) for t in ct.hist_terms('assim', 6)]))
1065
+ 0,3,6
1066
+ >>> print(','.join([str(t) for t in ct.hist_terms('production', 0)]))
1067
+ 0,6,12,18,24,30,36,42,48,60,72,84,96
1068
+ >>> print(','.join([str(t) for t in ct.hist_terms('production', 12)]))
1069
+ 0,6,12,18,24
1070
+
1071
+ The list of requested Surfex files can be retrieved:
1072
+
1073
+ >>> print(','.join([str(t) for t in ct.surf_terms('assim', 6)]))
1074
+ 3,6
1075
+
1076
+ The list of terms produced by the inline fullpos is:
1077
+
1078
+ >>> print(','.join([str(t) for t in ct.inline_terms('assim', 6)]))
1079
+ 0,3,6
1080
+ >>> print(','.join([str(t) for t in ct.inline_terms('production', 0)]))
1081
+ 0,1,2,3,4,5,6,7,8,9,10,11,12,15,18,21,24,27,30,33,36,39,42,45,48,54,60,66,72,78,84,90,96,102
1082
+ >>> print(','.join([str(t) for t in ct.inline_terms('production', 12)]))
1083
+ 0,3,6,9,12,15,18,21,24
1084
+
1085
+ Note: It depends on the value of **use_inline_fp**. If ``False`` an empty
1086
+ list will be returned.
1087
+
1088
+ The inline Fullpos can also be switched-off manually using the `no_inline`
1089
+ property:
1090
+
1091
+ >>> print(','.join([str(t) for t in ct.no_inline.inline_terms('production', 0)]))
1092
+ <BLANKLINE>
1093
+ >>> print(','.join([str(t) for t in ct.no_inline.diag_terms('production', 0)]))
1094
+ 0,1,2,3,4,5,6,7,8,9,10,11,12,15,18,21,24,27,30,33,36,39,42,45,48,54,60,66,72,78,84,90,96,102
1095
+
1096
+ The list of terms when some offline fullpos job is needed (for any of the
1097
+ domains):
1098
+
1099
+ >>> print(','.join([str(t) for t in ct.fpoff_terms('assim', 6)]))
1100
+ 2,3
1101
+ >>> print(','.join([str(t) for t in ct.fpoff_terms('production', 0)]))
1102
+ 0,2,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48
1103
+ >>> print(','.join([str(t) for t in ct.fpoff_terms('production', 12)]))
1104
+ 2,3
1105
+
1106
+ The list of terms, in addition to requested historical terms, needed to run
1107
+ offline fullpos job:
1108
+
1109
+ >>> print(','.join([str(t) for t in ct.extra_hist_terms('production', 0)]))
1110
+ 2,3,9,15,21,27,33,39,45
1111
+
1112
+ The list of all historical terms (both requested terms and terms required
1113
+ for offline Fullpos)
1114
+
1115
+ >>> print(','.join([str(t) for t in ct.all_hist_terms('production', 0)]))
1116
+ 0,2,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,60,72,84,96
1117
+
1118
+ The list of involved Fullpos objects for a given cutoff/hh:
1119
+
1120
+ >>> print(','.join([t for t in ct.fpoff_items('assim', 6)]))
1121
+ foo
1122
+ >>> print(','.join([t for t in ct.fpoff_items('production', 0)]))
1123
+ aero,foo
1124
+ >>> print(','.join([t for t in ct.fpoff_items('production', 0, discard=['aero'])]))
1125
+ foo
1126
+ >>> print(','.join([t for t in ct.fpoff_items('production', 0, only=['foo'])]))
1127
+ foo
1128
+ >>> print(','.join([t for t in ct.fpoff_items('production', 12)]))
1129
+ foo
1130
+
1131
+ The list of terms associated to a given Fullpos object can be obtained:
1132
+
1133
+ >>> print(','.join([str(t) for t in ct.foo_terms('assim', 6)]))
1134
+ 2,3
1135
+ >>> print(','.join([str(t) for t in ct.aero_terms('assim', 6)]))
1136
+ <BLANKLINE>
1137
+ >>> print(','.join([str(t) for t in ct.foo_terms('production', 0)]))
1138
+ 2,3
1139
+ >>> print(','.join([str(t) for t in ct.aero_terms('production', 0)]))
1140
+ 0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48
1141
+ >>> print(','.join([str(t) for t in ct.foo_terms('production', 12)]))
1142
+ 2,3
1143
+ >>> print(','.join([str(t) for t in ct.aero_terms('production', 12)]))
1144
+ <BLANKLINE>
1145
+
1146
+ It can also be obtained as a FPList objects (if empty, an empty list is returned
1147
+ instead of an FPList object):
1148
+
1149
+ >>> ct.aero_terms_fplist('assim', 6)
1150
+ []
1151
+ >>> print(','.join([str(t) for t in ct.aero_terms_fplist('production', 0)]))
1152
+ 0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48
1153
+ >>> print(type(ct.aero_terms_fplist('production', 0)).__name__)
1154
+ FPList
1155
+ >>> ct.aero_terms_fplist('production', 12)
1156
+ []
1157
+
1158
+ A mapping dictionary can also be obtained:
1159
+
1160
+ >>> for k, v in sorted(ct.fpoff_terms_map('production', 0).items()):
1161
+ ... print('{:s}: {:s}'.format(k, ','.join([str(vv) for vv in v])))
1162
+ aero: 0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48
1163
+ foo: 2,3
1164
+
1165
+ The list of terms associated to secondary diagnostics can be obtained
1166
+ ("secondary diagnostics" stands for diagnostics that are based on files
1167
+ pre-calculated by the inline/offline fullpos):
1168
+
1169
+ >>> print(','.join([str(t) for t in ct.labo_terms('production', 0)]))
1170
+ 0,1,2,3,4,5,6,7,8,9,10,11,12
1171
+ >>> print(','.join([str(t) for t in ct.labo_terms('production', 12)]))
1172
+ <BLANKLINE>
1173
+
1174
+ """
1175
+
1176
+ _footprint = dict(
1177
+ info="Conf tool that helps setting up Arpege's forecast term and outputs",
1178
+ attr=dict(
1179
+ kind=dict(
1180
+ values=[
1181
+ "arpifs_fcterms",
1182
+ ],
1183
+ ),
1184
+ fcterm_def=dict(
1185
+ info=(
1186
+ "The forecast's term for each cutoff and base time "
1187
+ + "(e.g ``{'assim':{0:6, 12:6}, 'production':{0:102}}``)"
1188
+ ),
1189
+ type=dict,
1190
+ ),
1191
+ fcterm_unit=dict(
1192
+ info="The forecast's term unit (hour or timestep)",
1193
+ values=["hour", "timestep"],
1194
+ optional=True,
1195
+ default="hour",
1196
+ ),
1197
+ hist_terms_def=dict(
1198
+ info=(
1199
+ "The forecast's terms when historical files are needed "
1200
+ + "(for permanant storage) "
1201
+ + "(e.g ``{'assim':{default: '0-finalterm-3'}, "
1202
+ + "'production':{0:'0-23-1,24-finalterm-6}}``)"
1203
+ ),
1204
+ type=dict,
1205
+ optional=True,
1206
+ ),
1207
+ surf_terms_def=dict(
1208
+ info=(
1209
+ "The forecast's terms when surface files are needed "
1210
+ + "(for permanant storage) "
1211
+ ),
1212
+ type=dict,
1213
+ optional=True,
1214
+ ),
1215
+ norm_terms_def=dict(
1216
+ info="The forecast's terms when spectral norms are computed",
1217
+ type=dict,
1218
+ optional=True,
1219
+ ),
1220
+ diag_fp_terms_def=dict(
1221
+ info="The forecast's terms when fullpos core diagnostics are computed",
1222
+ type=dict,
1223
+ optional=True,
1224
+ ),
1225
+ extra_fp_terms_def=dict(
1226
+ info=(
1227
+ "The forecast's terms when extra fullpos diagnostics are computed. "
1228
+ + "They are always computed by some offline tasks. "
1229
+ + "The dictionary has an additional level (describing the 'name' of the "
1230
+ + "extra fullpos processing"
1231
+ ),
1232
+ type=dict,
1233
+ optional=True,
1234
+ ),
1235
+ secondary_diag_terms_def=dict(
1236
+ info=(
1237
+ "The forecast's terms when secondary diagnostics are computed. "
1238
+ + "Secondary dignostics are based on diagnostics previously created by "
1239
+ + "the inline/offline diag fullpos (see diag_fp_terms_def)."
1240
+ + "The dictionary has an additional level (describing the 'name' of the "
1241
+ + "secondary diags"
1242
+ ),
1243
+ type=dict,
1244
+ optional=True,
1245
+ ),
1246
+ use_inline_fp=dict(
1247
+ info='Use inline Fullpos to compute "core_fp_terms"',
1248
+ type=bool,
1249
+ optional=True,
1250
+ default=True,
1251
+ ),
1252
+ ),
1253
+ )
1254
+
1255
+ _ACTUAL_T_RE = re.compile(r"(\w+)_terms$")
1256
+ _ACTUAL_FPLIST_T_RE = re.compile(r"(\w+)_terms_fplist$")
1257
+ _UNDEFINED = object()
1258
+
1259
+ def __init__(self, *kargs, **kwargs):
1260
+ super().__init__(*kargs, **kwargs)
1261
+ self._x_fcterm = self._check_data_keys_and_times(
1262
+ self.fcterm_def, "fcterm_def", cast=self._cast_unique_value
1263
+ )
1264
+ self._x_hist_terms = self._check_data_keys_and_times(
1265
+ self.hist_terms_def, "hist_terms_def", cast=self._cast_timerangex
1266
+ )
1267
+ self._x_surf_terms = self._check_data_keys_and_times(
1268
+ self.surf_terms_def, "surf_terms_def", cast=self._cast_timerangex
1269
+ )
1270
+ self._x_norm_terms = self._check_data_keys_and_times(
1271
+ self.norm_terms_def, "norm_terms_def", cast=self._cast_timerangex
1272
+ )
1273
+ self._x_diag_fp_terms = self._check_data_keys_and_times(
1274
+ self.diag_fp_terms_def,
1275
+ "diag_fp_terms_def",
1276
+ cast=self._cast_timerangex,
1277
+ )
1278
+ self._x_extra_fp_terms = (
1279
+ dict()
1280
+ if self.extra_fp_terms_def is None
1281
+ else self.extra_fp_terms_def
1282
+ )
1283
+ if not all(
1284
+ [isinstance(v, dict) for v in self._x_extra_fp_terms.values()]
1285
+ ):
1286
+ raise ValueError("extra_fp_terms values need to be dictionaries")
1287
+ self._x_extra_fp_terms = {
1288
+ k: self._check_data_keys_and_times(
1289
+ v,
1290
+ "extra_fp_terms_def[{:s}]".format(k),
1291
+ cast=self._cast_timerangex,
1292
+ )
1293
+ for k, v in self._x_extra_fp_terms.items()
1294
+ }
1295
+ self._x_secondary_diag_terms_def = (
1296
+ dict()
1297
+ if self.secondary_diag_terms_def is None
1298
+ else self.secondary_diag_terms_def
1299
+ )
1300
+ if not all(
1301
+ [
1302
+ isinstance(v, dict)
1303
+ for v in self._x_secondary_diag_terms_def.values()
1304
+ ]
1305
+ ):
1306
+ raise ValueError("extra_fp_terms values need to be dictionaries")
1307
+ self._x_secondary_diag_terms_def = {
1308
+ k: self._check_data_keys_and_times(
1309
+ v,
1310
+ "secondary_diag_terms_def[{:s}]".format(k),
1311
+ cast=self._cast_timerangex,
1312
+ )
1313
+ for k, v in self._x_secondary_diag_terms_def.items()
1314
+ }
1315
+ self._lookup_cache = dict()
1316
+ self._lookup_rangex_cache = dict()
1317
+ self._no_inline_cache = None
1318
+
1319
+ def _clone(self, **kwargs):
1320
+ my_args = self.footprint_as_shallow_dict()
1321
+ my_args.update(kwargs)
1322
+ return self.__class__(**my_args)
1323
+
1324
+ @property
1325
+ def no_inline(self):
1326
+ """Return a clone of this object with inline fullpos de-activated."""
1327
+ if self._no_inline_cache is None:
1328
+ self._no_inline_cache = self._clone(use_inline_fp=False)
1329
+ return self._no_inline_cache
1330
+
1331
+ @staticmethod
1332
+ def _cast_void(value):
1333
+ return value
1334
+
1335
+ def _cast_unique_value(self, value):
1336
+ if self.fcterm_unit == "hour":
1337
+ return Time(value)
1338
+ else:
1339
+ return int(value)
1340
+
1341
+ @staticmethod
1342
+ def _cast_timerangex(value):
1343
+ if not (value is None or isinstance(value, str)):
1344
+ if isinstance(value, collections.abc.Iterable):
1345
+ value = ",".join([str(e) for e in value])
1346
+ else:
1347
+ value = str(value)
1348
+ return value
1349
+
1350
+ @staticmethod
1351
+ def _check_data_keys(data, dataname):
1352
+ """Check the first level of any input dictionary."""
1353
+ if data is None:
1354
+ return dict(default=dict(default=None))
1355
+ else:
1356
+ if not set(data.keys()) <= {"assim", "production", "default"}:
1357
+ raise ValueError(
1358
+ 'Impoper value ({!s}) for "{:s}".'.format(data, dataname)
1359
+ )
1360
+ return data
1361
+
1362
+ def _check_data_keys_and_times(self, data, dataname, cast=None):
1363
+ """Check any input dictionary and convert values."""
1364
+ data = self._check_data_keys(data, dataname)
1365
+ cast = self._cast_void if cast is None else cast
1366
+ new_data = dict()
1367
+ for data_k, data_v in data.items():
1368
+ if not isinstance(data_v, dict):
1369
+ raise ValueError(
1370
+ 'The {:s} "{:s}" entry should be a dictionary (got "{!s}")'.format(
1371
+ dataname, data_k, data_v
1372
+ )
1373
+ )
1374
+ try:
1375
+ new_data[data_k] = {
1376
+ "default" if k == "default" else Time(k): cast(v)
1377
+ for k, v in data_v.items()
1378
+ }
1379
+ except ValueError as e:
1380
+ raise ValueError(
1381
+ "Error while processing {:s}'s {:s}: ".format(
1382
+ dataname, data_k
1383
+ )
1384
+ + "Could not convert to Time (original message '{!s}')".format(
1385
+ e
1386
+ )
1387
+ )
1388
+ return new_data
1389
+
1390
+ def _cutoff_hh_lookup(self, what_desc, cutoff, hh, rawdata=None):
1391
+ """Look for a particular cutoff in self._x_what_desc."""
1392
+ if not isinstance(hh, Time):
1393
+ hh = Time(hh)
1394
+ if (what_desc, cutoff, hh) not in self._lookup_cache:
1395
+ if rawdata is None:
1396
+ rawdata = getattr(self, "_x_{:s}".format(what_desc))
1397
+ cutoff_v = rawdata.get(
1398
+ cutoff, rawdata.get("default", self._UNDEFINED)
1399
+ )
1400
+ if cutoff_v is self._UNDEFINED:
1401
+ raise ValueError(
1402
+ 'Nothing is defined for cutoff="{:s}" in "{:s}"'.format(
1403
+ cutoff, what_desc
1404
+ )
1405
+ )
1406
+ hh_v = cutoff_v.get(hh, cutoff_v.get("default", self._UNDEFINED))
1407
+ if hh_v is self._UNDEFINED:
1408
+ raise ValueError(
1409
+ 'Nothing is defined for cutoff="{:s}"/hh="{!s}" in "{:s}"'.format(
1410
+ cutoff, hh, what_desc
1411
+ )
1412
+ )
1413
+ self._lookup_cache[(what_desc, cutoff, hh)] = hh_v
1414
+ return self._lookup_cache[(what_desc, cutoff, hh)]
1415
+
1416
+ def _cutoff_hh_rangex_lookup(self, what_desc, cutoff, hh, rawdata=None):
1417
+ """Look for a particular cutoff in self._x_what_desc and resolve the rangex."""
1418
+ if (what_desc, cutoff, hh) not in self._lookup_rangex_cache:
1419
+ try:
1420
+ what = self._cutoff_hh_lookup(
1421
+ what_desc, cutoff, hh, rawdata=rawdata
1422
+ )
1423
+ except ValueError:
1424
+ what = None
1425
+ if what is None:
1426
+ self._lookup_rangex_cache[(what_desc, cutoff, hh)] = list()
1427
+ else:
1428
+ finalterm = self._cutoff_hh_lookup("fcterm", cutoff, hh)
1429
+ if "finalterm" in what:
1430
+ what = what.replace("finalterm", str(finalterm))
1431
+ try:
1432
+ tir = timeintrangex(what)
1433
+ except (TypeError, ValueError):
1434
+ raise ValueError(
1435
+ 'Could not process "{:s}" using timeintrangex (from "{:s}" with cutoff={:s}/hh={!s})'.format(
1436
+ what, what_desc, cutoff, hh
1437
+ )
1438
+ )
1439
+ if self.fcterm_unit == "timestep" and not all(
1440
+ [isinstance(i, int) for i in tir]
1441
+ ):
1442
+ raise ValueError(
1443
+ 'No hours/minutes allowed when fcterm_unit is "timestep" '
1444
+ + '(from "{:s}" with cutoff={:s}/hh={!s})'.format(
1445
+ what_desc, cutoff, hh
1446
+ )
1447
+ )
1448
+ self._lookup_rangex_cache[(what_desc, cutoff, hh)] = sorted(
1449
+ [t for t in tir if t <= finalterm]
1450
+ )
1451
+ return self._lookup_rangex_cache[(what_desc, cutoff, hh)]
1452
+
1453
+ def fcterm(self, cutoff, hh):
1454
+ """The forecast term for **cutoff** and **hh**."""
1455
+ fcterm = self._cutoff_hh_lookup("fcterm", cutoff, hh)
1456
+ if isinstance(fcterm, Time) and fcterm.minute == 0:
1457
+ return fcterm.hour
1458
+ else:
1459
+ return fcterm
1460
+
1461
+ def hist_terms(self, cutoff, hh):
1462
+ """The list of terms for requested/archived historical files."""
1463
+ return self._cutoff_hh_rangex_lookup("hist_terms", cutoff, hh)
1464
+
1465
+ def surf_terms(self, cutoff, hh):
1466
+ """The list of terms for historical surface files."""
1467
+ return self._cutoff_hh_rangex_lookup("surf_terms", cutoff, hh)
1468
+
1469
+ def norm_terms(self, cutoff, hh):
1470
+ """The list of terms for norm calculations."""
1471
+ return self._cutoff_hh_rangex_lookup("norm_terms", cutoff, hh)
1472
+
1473
+ def inline_terms(self, cutoff, hh):
1474
+ """The list of terms for inline diagnostics."""
1475
+ if self.use_inline_fp:
1476
+ return sorted(
1477
+ set(self._cutoff_hh_rangex_lookup("diag_fp_terms", cutoff, hh))
1478
+ | self._secondary_diag_terms_set(cutoff, hh)
1479
+ )
1480
+ else:
1481
+ return list()
1482
+
1483
+ def diag_terms(self, cutoff, hh):
1484
+ """The list of terms for offline diagnostics."""
1485
+ if self.use_inline_fp:
1486
+ return list()
1487
+ else:
1488
+ return sorted(
1489
+ set(self._cutoff_hh_rangex_lookup("diag_fp_terms", cutoff, hh))
1490
+ | self._secondary_diag_terms_set(cutoff, hh)
1491
+ )
1492
+
1493
+ def diag_terms_fplist(self, cutoff, hh):
1494
+ """The list of terms for offline diagnostics (as a FPlist)."""
1495
+ flist = self.diag_terms(cutoff, hh)
1496
+ return FPList(flist) if flist else []
1497
+
1498
+ def _extra_fp_terms_item_fplist(self, item, cutoff, hh):
1499
+ flist = self._cutoff_hh_rangex_lookup(
1500
+ "extra_fp_terms[{:s}]".format(item),
1501
+ cutoff,
1502
+ hh,
1503
+ rawdata=self._x_extra_fp_terms[item],
1504
+ )
1505
+ return FPList(flist) if flist else []
1506
+
1507
+ def _secondary_diag_terms_item_fplist(self, item, cutoff, hh):
1508
+ flist = self._cutoff_hh_rangex_lookup(
1509
+ "secondary_diag_terms[{:s}]".format(item),
1510
+ cutoff,
1511
+ hh,
1512
+ rawdata=self._x_secondary_diag_terms_def[item],
1513
+ )
1514
+ return FPList(flist) if flist else []
1515
+
1516
+ @secure_getattr
1517
+ def __getattr__(self, item):
1518
+ actual_m = self._ACTUAL_T_RE.match(item)
1519
+ actual_fplist_m = self._ACTUAL_FPLIST_T_RE.match(item)
1520
+ if actual_m and actual_m.group(1) in self._x_extra_fp_terms.keys():
1521
+ return functools.partial(
1522
+ self._cutoff_hh_rangex_lookup,
1523
+ "extra_fp_terms[{:s}]".format(actual_m.group(1)),
1524
+ rawdata=self._x_extra_fp_terms[actual_m.group(1)],
1525
+ )
1526
+ elif (
1527
+ actual_fplist_m
1528
+ and actual_fplist_m.group(1) in self._x_extra_fp_terms.keys()
1529
+ ):
1530
+ return functools.partial(
1531
+ self._extra_fp_terms_item_fplist, actual_fplist_m.group(1)
1532
+ )
1533
+ elif (
1534
+ actual_m
1535
+ and actual_m.group(1) in self._x_secondary_diag_terms_def.keys()
1536
+ ):
1537
+ return functools.partial(
1538
+ self._cutoff_hh_rangex_lookup,
1539
+ "secondary_diag_terms[{:s}]".format(actual_m.group(1)),
1540
+ rawdata=self._x_secondary_diag_terms_def[actual_m.group(1)],
1541
+ )
1542
+ elif (
1543
+ actual_fplist_m
1544
+ and actual_fplist_m.group(1)
1545
+ in self._x_secondary_diag_terms_def.keys()
1546
+ ):
1547
+ return functools.partial(
1548
+ self._secondary_diag_terms_item_fplist,
1549
+ actual_fplist_m.group(1),
1550
+ )
1551
+ else:
1552
+ raise AttributeError('Attribute "{:s}" was not found'.format(item))
1553
+
1554
+ def _fpoff_terms_set(self, cutoff, hh):
1555
+ fpoff_terms = set()
1556
+ for k, v in self._x_extra_fp_terms.items():
1557
+ fpoff_terms.update(
1558
+ self._cutoff_hh_rangex_lookup(
1559
+ "extra_fp_terms[{:s}]".format(k), cutoff, hh, rawdata=v
1560
+ )
1561
+ )
1562
+ if not self.use_inline_fp:
1563
+ fpoff_terms.update(
1564
+ self._cutoff_hh_rangex_lookup("diag_fp_terms", cutoff, hh)
1565
+ )
1566
+ fpoff_terms.update(self._secondary_diag_terms_set(cutoff, hh))
1567
+ return fpoff_terms
1568
+
1569
+ def _secondary_diag_terms_set(self, cutoff, hh):
1570
+ sec_terms = set()
1571
+ for k, v in self._x_secondary_diag_terms_def.items():
1572
+ sec_terms.update(
1573
+ self._cutoff_hh_rangex_lookup(
1574
+ "secondary_diag_terms[{:s}]".format(k),
1575
+ cutoff,
1576
+ hh,
1577
+ rawdata=v,
1578
+ )
1579
+ )
1580
+ return sec_terms
1581
+
1582
+ def extra_hist_terms(self, cutoff, hh):
1583
+ """The list of terms for historical file terms solely produced for fullpos use."""
1584
+ fpoff_terms = self._fpoff_terms_set(cutoff, hh)
1585
+ fpoff_terms -= set(self.hist_terms(cutoff, hh))
1586
+ return sorted(fpoff_terms)
1587
+
1588
+ def all_hist_terms(self, cutoff, hh):
1589
+ """The list of terms for all historical file."""
1590
+ all_terms = self._fpoff_terms_set(cutoff, hh)
1591
+ all_terms |= set(self.hist_terms(cutoff, hh))
1592
+ return sorted(all_terms)
1593
+
1594
+ def fpoff_terms(self, cutoff, hh):
1595
+ """The list of terms for offline fullpos."""
1596
+ fpoff_terms = self._fpoff_terms_set(cutoff, hh)
1597
+ return sorted(fpoff_terms)
1598
+
1599
+ def fpoff_items(self, cutoff, hh, discard=None, only=None):
1600
+ """List of active offline post-processing domains."""
1601
+ items = {
1602
+ k
1603
+ for k, v in self._x_extra_fp_terms.items()
1604
+ if self._cutoff_hh_rangex_lookup(
1605
+ "extra_fp_terms[{:s}]".format(k), cutoff, hh, rawdata=v
1606
+ )
1607
+ }
1608
+ if not self.use_inline_fp and self._cutoff_hh_rangex_lookup(
1609
+ "diag_fp_terms", cutoff, hh
1610
+ ):
1611
+ items.add("diag")
1612
+ if discard:
1613
+ items -= set(discard)
1614
+ if only:
1615
+ items &= set(only)
1616
+ return sorted(items)
1617
+
1618
+ def fpoff_terms_map(self, cutoff, hh):
1619
+ """The mapping dictionary between offline post-processing terms and domains."""
1620
+ return {
1621
+ k: getattr(self, "{:s}_terms".format(k))(cutoff, hh)
1622
+ for k in self.fpoff_items(cutoff, hh)
1623
+ }
1624
+
1625
+ def fpoff_terms_fpmap(self, cutoff, hh):
1626
+ """The mapping dictionary between offline post-processing terms and domains (as a FPlist)."""
1627
+ return {
1628
+ k: getattr(self, "{:s}_terms_fplist".format(k))(cutoff, hh)
1629
+ for k in self.fpoff_items(cutoff, hh)
1630
+ }
1631
+
1632
+
1633
+ class TimeSlotsConfTool(AbstractObjectProxyConfTool):
1634
+ """Gives easy access to a Timeslots object.
1635
+
1636
+ The conf tool will look like::
1637
+
1638
+ >>> ct = TimeSlotsConfTool(kind="objproxy",
1639
+ ... timeslots_def="7/-PT3H/PT6H")
1640
+ >>> print(ct.start)
1641
+ -PT10800S
1642
+
1643
+ """
1644
+
1645
+ _footprint = dict(
1646
+ info="Gives easy access to a Timeslots object.",
1647
+ attr=dict(
1648
+ timeslots_def=dict(
1649
+ info="The timeslots specification",
1650
+ ),
1651
+ ),
1652
+ )
1653
+
1654
+ def _create_proxied_obj(self):
1655
+ return TimeSlots(self.timeslots_def)
1656
+
1657
+
1658
+ if __name__ == "__main__":
1659
+ import doctest
1660
+
1661
+ doctest.testmod()