vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
vortex/nwp/data/obs.py ADDED
@@ -0,0 +1,840 @@
1
+ """
2
+ Resources to handle observations files in various formats.
3
+ """
4
+
5
+ import re
6
+ from collections import namedtuple
7
+
8
+
9
+ import footprints
10
+ from bronx.datagrip.varbcheaders import VarbcHeadersFile
11
+ from bronx.fancies import loggers
12
+ from bronx.syntax.decorators import nicedeco
13
+
14
+ from vortex.data.flow import GeoFlowResource, FlowResource
15
+ from vortex.data.contents import TextContent, AlmostListContent
16
+ from vortex.syntax import stdattrs, stddeco
17
+
18
+ from ..syntax.stdattrs import gvar, GenvKey
19
+
20
+ #: Automatic export of Observations class
21
+ __all__ = [
22
+ "Observations",
23
+ ]
24
+
25
+ logger = loggers.getLogger(__name__)
26
+
27
+
28
+ @stddeco.namebuilding_insert("style", lambda s: "obs")
29
+ @stddeco.namebuilding_insert("stage", lambda s: s.stage)
30
+ @stddeco.namebuilding_insert("part", lambda s: s.part)
31
+ class Observations(GeoFlowResource):
32
+ """
33
+ Abstract observation resource.
34
+ """
35
+
36
+ _abstract = True
37
+ _footprint = dict(
38
+ info="Observations file",
39
+ attr=dict(
40
+ kind=dict(
41
+ values=["observations", "obs"],
42
+ remap=dict(obs="observations"),
43
+ ),
44
+ part=dict(info="The name of this subset of observations."),
45
+ nativefmt=dict(
46
+ alias=("format",),
47
+ ),
48
+ stage=dict(
49
+ info="The processing stage for this subset of observations."
50
+ ),
51
+ ),
52
+ )
53
+
54
+ @property
55
+ def realkind(self):
56
+ return "observations"
57
+
58
+
59
+ class ObsProcessed(Observations):
60
+ """Pre-Processed or Processed observations."""
61
+
62
+ _footprint = dict(
63
+ info="Pre-Processed observations.",
64
+ attr=dict(
65
+ nativefmt=dict(
66
+ values=["ascii", "netcdf", "hdf5"],
67
+ ),
68
+ stage=dict(
69
+ values=[
70
+ "preprocessing",
71
+ ],
72
+ ),
73
+ ),
74
+ )
75
+
76
+
77
+ @stddeco.namebuilding_insert("layout", lambda s: s.layout)
78
+ class ObsODB(Observations):
79
+ """Observations in ODB format associated to a given stage."""
80
+
81
+ _footprint = dict(
82
+ info="Packed observations (ODB, CCMA, etc.)",
83
+ attr=dict(
84
+ nativefmt=dict(
85
+ values=["odb", "odb/split", "odb/compressed"],
86
+ remap={"odb/split": "odb", "odb/compressed": "odb"},
87
+ ),
88
+ layout=dict(
89
+ info="The layout of the ODB database.",
90
+ optional=True,
91
+ default="ecma",
92
+ values=[
93
+ "ccma",
94
+ "ecma",
95
+ "ecmascr",
96
+ "CCMA",
97
+ "ECMA",
98
+ "ECMASCR",
99
+ "rstbias",
100
+ "countryrstrhbias",
101
+ "sondetyperstrhbias",
102
+ "RSTBIAS",
103
+ "COUNTRYRSTRHBIAS",
104
+ "SONDETYPERSTRHBIAS",
105
+ ],
106
+ remap=dict(
107
+ CCMA="ccma",
108
+ ECMA="ecma",
109
+ ECMASCR="ecmascr",
110
+ RSTBIAS="rstbias",
111
+ COUNTRYRSTRHBIAS="countryrstrhbias",
112
+ SONDETYPERSTRHBIAS="sondetyperstrhbias",
113
+ ),
114
+ ),
115
+ stage=dict(
116
+ values=[
117
+ "void",
118
+ "avg",
119
+ "average",
120
+ "screen",
121
+ "screening",
122
+ "split",
123
+ "build",
124
+ "traj",
125
+ "min",
126
+ "minim",
127
+ "complete",
128
+ "matchup",
129
+ "canari",
130
+ "cans",
131
+ ],
132
+ remap=dict(
133
+ avg="average",
134
+ min="minim",
135
+ cans="canari",
136
+ split="build",
137
+ screen="screening",
138
+ ),
139
+ ),
140
+ ),
141
+ )
142
+
143
+ def olive_basename(self):
144
+ """OLIVE specific naming convention."""
145
+ stage_map = dict(
146
+ screening="screen", build="split", minim="min", canari="cans"
147
+ )
148
+ mystage = stage_map.get(self.stage, self.stage)
149
+ return "_".join((self.layout, mystage, self.part)) + ".tar"
150
+
151
+ @property
152
+ def _archive_mapping(self):
153
+ re_fullmix = re.compile(r"^(?:altitude|mix|full)$")
154
+ ecma_map = dict(
155
+ void="ecmascr.tar",
156
+ screening="odb_screen.tar",
157
+ matchup="odb_cpl.tar",
158
+ complete="odb_cpl.tar",
159
+ )
160
+ ecma_prefix = {
161
+ ("matchup", "arpege"): "BASE/",
162
+ ("complete", "arpege"): "BASE/",
163
+ ("matchup", "arome"): "BASE/",
164
+ ("complete", "arome"): "BASE/",
165
+ ("screening", "arome"): "./",
166
+ }
167
+ if self.stage in ecma_map and self.layout == "ecma":
168
+ if re_fullmix.match(self.part):
169
+ return (ecma_map[self.stage], "extract=all&format=unknown")
170
+ elif self.part == "virtual":
171
+ return (
172
+ ecma_map[self.stage],
173
+ "extract={:s}ECMA&format=unknown".format(
174
+ ecma_prefix.get((self.stage, self.model), "")
175
+ ),
176
+ )
177
+ else:
178
+ return (
179
+ ecma_map[self.stage],
180
+ "extract={:s}ECMA.{:s}&format=unknown".format(
181
+ ecma_prefix.get((self.stage, self.model), ""),
182
+ self.part,
183
+ ),
184
+ )
185
+ elif self.stage == "screening" and self.layout == "ccma":
186
+ return ("odb_ccma_screen.tar", "")
187
+ elif re_fullmix.match(self.part) and self.stage == "traj":
188
+ return ("odb_traj.tar", "")
189
+ elif (
190
+ re_fullmix.match(self.part)
191
+ and self.stage == "minim"
192
+ and self.model == "aladin"
193
+ ):
194
+ return ("odb_cpl.tar", "")
195
+ elif re_fullmix.match(self.part) and self.stage == "minim":
196
+ return ("odb_min.tar", "")
197
+ elif self.part in ("ground", "surf") and self.stage in (
198
+ "canari",
199
+ "surfan",
200
+ ):
201
+ return ("odb_canari.tar", "")
202
+ else:
203
+ logger.error(
204
+ "No archive basename defined for such observations (format=%s, part=%s, stage=%s)",
205
+ self.nativefmt,
206
+ self.part,
207
+ self.stage,
208
+ )
209
+ return (None, None)
210
+
211
+ def archive_basename(self):
212
+ """OP ARCHIVE specific naming convention."""
213
+ return self._archive_mapping[0]
214
+
215
+ def archive_urlquery(self):
216
+ """OP ARCHIVE special query for odb case."""
217
+ return self._archive_mapping[1]
218
+
219
+
220
+ class ObsRaw(Observations):
221
+ """
222
+ TODO.
223
+ """
224
+
225
+ _footprint = dict(
226
+ info="Raw observations set",
227
+ attr=dict(
228
+ nativefmt=dict(
229
+ values=["obsoul", "grib", "bufr", "ascii", "netcdf", "hdf5"],
230
+ remap=dict(
231
+ OBSOUL="obsoul",
232
+ GRIB="grib",
233
+ BUFR="bufr",
234
+ ASCII="ascii",
235
+ NETCDF="netcdf",
236
+ HDF5="hdf5",
237
+ ),
238
+ ),
239
+ stage=dict(values=["void", "extract", "raw", "std"]),
240
+ olivefmt=dict(
241
+ info="The mapping between Vortex and Olive formats names.",
242
+ type=footprints.FPDict,
243
+ optional=True,
244
+ default=footprints.FPDict(
245
+ ascii="ascii",
246
+ obsoul="obsoul",
247
+ grib="obsgrib",
248
+ bufr="obsbufr",
249
+ netcdf="netcdf",
250
+ hdf5="hdf5",
251
+ ),
252
+ doc_visibility=footprints.doc.visibility.GURU,
253
+ ),
254
+ ),
255
+ )
256
+
257
+ def olive_basename(self):
258
+ """OLIVE specific naming convention."""
259
+ return "_".join(
260
+ (
261
+ self.olivefmt.get(self.nativefmt, "obsfoo"),
262
+ self.stage,
263
+ self.part,
264
+ )
265
+ )
266
+
267
+ def archive_basename(self):
268
+ """OP ARCHIVE specific naming convention."""
269
+ if (
270
+ re.match(r"^(?:bufr|obsoul|grib|netcdf|hdf5)$", self.nativefmt)
271
+ and self.part != "full"
272
+ and self.stage == "void"
273
+ ):
274
+ return ".".join((self.nativefmt, self.part))
275
+ elif (
276
+ re.match(r"^obsoul$", self.nativefmt)
277
+ and self.part == "full"
278
+ and self.stage == "void"
279
+ ):
280
+ return "obsoul"
281
+ else:
282
+ logger.error(
283
+ "No archive basename defined for such observations (format=%s, part=%s, stage=%s)",
284
+ self.nativefmt,
285
+ self.part,
286
+ self.stage,
287
+ )
288
+
289
+
290
+ @stddeco.namebuilding_insert("radical", lambda s: s.kind)
291
+ @stddeco.namebuilding_insert(
292
+ "src",
293
+ lambda s: [
294
+ s.part,
295
+ ],
296
+ )
297
+ class ObsFlags(FlowResource):
298
+ """Class for observations flags."""
299
+
300
+ _footprint = dict(
301
+ info="Observations flags",
302
+ attr=dict(
303
+ kind=dict(
304
+ values=["obsflag"],
305
+ ),
306
+ nativefmt=dict(
307
+ values=["ascii", "txt"],
308
+ default="txt",
309
+ remap=dict(ascii="txt"),
310
+ ),
311
+ part=dict(),
312
+ ),
313
+ )
314
+
315
+ @property
316
+ def realkind(self):
317
+ return "obsflags"
318
+
319
+ def olive_basename(self):
320
+ """OLIVE specific naming convention."""
321
+ return "BDM_CQ"
322
+
323
+
324
+ @nicedeco
325
+ def needs_slurp(mtd):
326
+ """Call _actual_slurp before anything happens."""
327
+
328
+ def new_stuff(self):
329
+ if self._do_delayed_slurp is not None:
330
+ with self._do_delayed_slurp.iod_context():
331
+ self._actual_slurp(self._do_delayed_slurp)
332
+ return mtd(self)
333
+
334
+ return new_stuff
335
+
336
+
337
+ class VarBCContent(AlmostListContent):
338
+ # The VarBC file is too big: revert to the good old diff
339
+ _diffable = False
340
+
341
+ def __init__(self, **kw):
342
+ super().__init__(**kw)
343
+ self._parsed_data = None
344
+ self._do_delayed_slurp = None
345
+
346
+ @property
347
+ @needs_slurp
348
+ def data(self):
349
+ """The internal data encapsulated."""
350
+ return self._data
351
+
352
+ @property
353
+ @needs_slurp
354
+ def size(self):
355
+ """The internal data size."""
356
+ return self._size
357
+
358
+ @property
359
+ def parsed_data(self):
360
+ """The data as a :class:`VarbcFile` object."""
361
+ if self._parsed_data is None:
362
+ # May fail if Numpy is not installed...
363
+ from bronx.datagrip.varbc import VarbcFile
364
+
365
+ self._parsed_data = VarbcFile(self.data)
366
+ return self._parsed_data
367
+
368
+ def _actual_slurp(self, container):
369
+ with container.preferred_decoding(byte=False):
370
+ self._size = container.totalsize
371
+ self._data.extend(container.readlines())
372
+ self._do_delayed_slurp = None
373
+
374
+ def slurp(self, container):
375
+ """Get data from the ``container``."""
376
+ self._do_delayed_slurp = container
377
+ with container.preferred_decoding(byte=False):
378
+ container.rewind()
379
+ self._metadata = VarbcHeadersFile(
380
+ [container.readline() for _ in range(3)]
381
+ )
382
+
383
+
384
+ @stddeco.namebuilding_append(
385
+ "src",
386
+ lambda s: [
387
+ s.stage,
388
+ ],
389
+ )
390
+ class VarBC(FlowResource):
391
+ """
392
+ VarBC file resource. Contains all the coefficients for the VarBC bias correction scheme.
393
+ """
394
+
395
+ _footprint = dict(
396
+ info="Varbc file (coefficients for the bias correction of observations).",
397
+ attr=dict(
398
+ kind=dict(values=["varbc"]),
399
+ clscontents=dict(
400
+ default=VarBCContent,
401
+ ),
402
+ nativefmt=dict(
403
+ values=["ascii", "txt"],
404
+ default="txt",
405
+ remap=dict(ascii="txt"),
406
+ ),
407
+ stage=dict(
408
+ optional=True,
409
+ values=[
410
+ "void",
411
+ "merge",
412
+ "screen",
413
+ "screening",
414
+ "minim",
415
+ "traj",
416
+ ],
417
+ remap=dict(screen="screening"),
418
+ default="void",
419
+ ),
420
+ mixmodel=dict(
421
+ optional=True,
422
+ default=None,
423
+ values=stdattrs.models,
424
+ ),
425
+ ),
426
+ )
427
+
428
+ @property
429
+ def realkind(self):
430
+ return "varbc"
431
+
432
+ def olive_basename(self):
433
+ """OLIVE specific naming convention."""
434
+ olivestage_map = {
435
+ "screening": "screen",
436
+ }
437
+ return (
438
+ self.realkind.upper()
439
+ + "."
440
+ + olivestage_map.get(self.stage, self.stage)
441
+ )
442
+
443
+ def archive_basename(self):
444
+ """OP ARCHIVE specific naming convention."""
445
+ if self.stage in ("void", "traj"):
446
+ bname = "VARBC.cycle"
447
+ if self.mixmodel is not None:
448
+ bname += "_"
449
+ if self.mixmodel.startswith("alad"):
450
+ bname = bname + self.mixmodel[:4]
451
+ else:
452
+ bname = bname + self.mixmodel[:3]
453
+ else:
454
+ bname = "VARBC." + self.stage
455
+ return bname
456
+
457
+
458
+ @stddeco.namebuilding_insert("src", lambda s: s.scope)
459
+ class BlackList(FlowResource):
460
+ """
461
+ TODO.
462
+ """
463
+
464
+ _footprint = [
465
+ gvar,
466
+ dict(
467
+ info="Blacklist file for observations",
468
+ attr=dict(
469
+ kind=dict(
470
+ values=["blacklist"],
471
+ ),
472
+ gvar=dict(
473
+ default="blacklist_[scope]",
474
+ values=[
475
+ "BLACKLIST_LOC",
476
+ "BLACKLIST_DIAP",
477
+ "BLACKLIST_LOCAL",
478
+ "BLACKLIST_GLOBAL",
479
+ ],
480
+ remap=dict(
481
+ BLACKLIST_LOCAL="BLACKLIST_LOC",
482
+ BLACKLIST_GLOBAL="BLACKLIST_DIAP",
483
+ blacklist_local="BLACKLIST_LOC",
484
+ blacklist_global="BLACKLIST_DIAP",
485
+ ),
486
+ ),
487
+ clscontents=dict(
488
+ default=TextContent,
489
+ ),
490
+ nativefmt=dict(values=["txt"], default="txt"),
491
+ scope=dict(
492
+ values=[
493
+ "loc",
494
+ "local",
495
+ "site",
496
+ "global",
497
+ "diap",
498
+ "diapason",
499
+ ],
500
+ remap=dict(
501
+ loc="local",
502
+ site="local",
503
+ diap="global",
504
+ diapason="global",
505
+ ),
506
+ ),
507
+ ),
508
+ ),
509
+ ]
510
+
511
+ @property
512
+ def realkind(self):
513
+ return "blacklist"
514
+
515
+ def iga_pathinfo(self):
516
+ """Standard path information for IGA inline cache."""
517
+ return dict(model=self.model)
518
+
519
+ def archive_map(self):
520
+ """OP ARCHIVE specific naming convention."""
521
+ return {
522
+ "local": "LISTE_LOC",
523
+ "global": "LISTE_NOIRE_DIAP",
524
+ }
525
+
526
+ def archive_basename(self):
527
+ """OP ARCHIVE local basename."""
528
+ mapd = self.archive_map()
529
+ return mapd.get(self.scope, "LISTE_NOIRE_X")
530
+
531
+
532
+ #: A namedtuple of the internal fields of an ObsRef file
533
+ ObsRefItem = namedtuple("ObsRefItem", ("data", "fmt", "instr", "date", "time"))
534
+
535
+
536
+ class ObsRefContent(TextContent):
537
+ """Content class for refdata resources."""
538
+
539
+ def append(self, item):
540
+ """Append the specified ``item`` to internal data contents."""
541
+ self.data.append(ObsRefItem(*item))
542
+
543
+ def slurp(self, container):
544
+ with container.preferred_decoding(byte=False):
545
+ self._data.extend(
546
+ [
547
+ ObsRefItem(*x.split()[:5])
548
+ for x in container
549
+ if not x.startswith("#")
550
+ ]
551
+ )
552
+ self._size = container.totalsize
553
+
554
+ @classmethod
555
+ def formatted_data(self, item):
556
+ """Return a formatted string."""
557
+ return "{:8s} {:8s} {:16s} {:s} {!s}".format(
558
+ item.data, item.fmt, item.instr, str(item.date), item.time
559
+ )
560
+
561
+
562
+ @stddeco.namebuilding_append(
563
+ "src",
564
+ lambda s: [
565
+ s.part,
566
+ ],
567
+ )
568
+ class Refdata(FlowResource):
569
+ """
570
+ TODO.
571
+ """
572
+
573
+ _footprint = dict(
574
+ info="Refdata file",
575
+ attr=dict(
576
+ kind=dict(values=["refdata"]),
577
+ clscontents=dict(
578
+ default=ObsRefContent,
579
+ ),
580
+ nativefmt=dict(
581
+ values=["ascii", "txt"], default="txt", remap=dict(ascii="txt")
582
+ ),
583
+ part=dict(optional=True, default="all"),
584
+ ),
585
+ )
586
+
587
+ @property
588
+ def realkind(self):
589
+ return "refdata"
590
+
591
+ def olive_basename(self):
592
+ """OLIVE specific naming convention."""
593
+ return self.realkind + "." + self.part
594
+
595
+ def archive_basename(self):
596
+ """OP ARCHIVE specific naming convention."""
597
+ return self.realkind
598
+
599
+
600
+ #: A namedtuple of the internal fields of an ObsMap file
601
+ ObsMapItem = namedtuple("ObsMapItem", ("odb", "data", "fmt", "instr"))
602
+
603
+
604
+ class ObsMapContent(TextContent):
605
+ """Content class for the *ObsMap* resources.
606
+
607
+ The :class:`ObsMap` resource provides its *discard* and *only* attributes.
608
+ This attribute is a :class:`footprints.stdtypes.FPSet` object thats holds
609
+ *odb:data* pairs that will be used to filter/discard some of the lines of
610
+ the local resource. The matching is done using regular expressions (however
611
+ when *:data* is omitted, ':' is automatically added at the end of the regular
612
+ expression).
613
+
614
+ The *only* attribute is evaluated first (if *only* is not provided or equals
615
+ *None*, all ObsMap lines are retained).
616
+
617
+ Here are some examples:
618
+
619
+ * ``discard=FPSet(('sev',))`` -> The *sev* ODB database will be discarded
620
+ (but the *seviri* database is kept).
621
+ * ``discard=FPSet(('radar', 'radar1'))`` -> Both the *radar* and *radar1*
622
+ ODB databases will be discarded.
623
+ * ``discard=FPSet(('radar1?', ))`` -> Same result as above.
624
+ * ``discard=FPSet(('conv:temp', ))`` -> Discard the *temp* data file that
625
+ would usualy be inserted in the *conv* database.
626
+ * ``discard=FPSet(('conv:temp', ))`` -> Discard the *temp* data file that
627
+ would usualy be inserted in the *conv* database.
628
+ * ``discard=FPSet(('conv:t[ea]', ))`` -> Discard the data file starting
629
+ with *te* or *ta* that would usualy be inserted in the *conv* database.
630
+ * ``only=FPSet(('conv',))`` -> Only *conv* ODB database will be used.
631
+ """
632
+
633
+ def __init__(self, **kw):
634
+ kw.setdefault("discarded", set())
635
+ kw.setdefault("only", None)
636
+ super().__init__(**kw)
637
+
638
+ @property
639
+ def discarded(self):
640
+ """Set of *odb:data* pairs that will be discarded."""
641
+ return self._discarded
642
+
643
+ @property
644
+ def only(self):
645
+ """Set of *odb:data* pairs that will be kept (*None* means "keep everything")."""
646
+ return self._only
647
+
648
+ def append(self, item):
649
+ """Append the specified ``item`` to internal data contents."""
650
+ self._data.append(ObsMapItem(*item))
651
+
652
+ def slurp(self, container):
653
+ """Get data from the ``container``."""
654
+ if self.only is not None:
655
+ ofilters = [
656
+ re.compile(d if ":" in d else d + ":") for d in self.only
657
+ ]
658
+ else:
659
+ ofilters = None
660
+ dfilters = [
661
+ re.compile(d if ":" in d else d + ":") for d in self.discarded
662
+ ]
663
+
664
+ def item_filter(omline):
665
+ om = ":".join([omline.odb, omline.data])
666
+ return (
667
+ ofilters is None or any([f.match(om) for f in ofilters])
668
+ ) and not any([f.match(om) for f in dfilters])
669
+
670
+ with container.preferred_decoding(byte=False):
671
+ container.rewind()
672
+ self.extend(
673
+ filter(
674
+ item_filter,
675
+ [
676
+ ObsMapItem(*x.split())
677
+ for x in [line.strip() for line in container]
678
+ if x and not x.startswith("#")
679
+ ],
680
+ )
681
+ )
682
+ self._size = container.totalsize
683
+
684
+ @classmethod
685
+ def formatted_data(self, item):
686
+ """Return a formatted string."""
687
+ return "{:12s} {:12s} {:12s} {:s}".format(
688
+ item.odb, item.data, item.fmt, item.instr
689
+ )
690
+
691
+ def odbset(self):
692
+ """Return set of odb values."""
693
+ return {x.odb for x in self}
694
+
695
+ def dataset(self):
696
+ """Return set of data values."""
697
+ return {x.data for x in self}
698
+
699
+ def fmtset(self):
700
+ """Return set of format values."""
701
+ return {x.fmt for x in self}
702
+
703
+ def instrset(self):
704
+ """Return set of instrument values."""
705
+ return {x.instr for x in self}
706
+
707
+ def datafmt(self, data):
708
+ """Return format associated to specified ``data``."""
709
+ dfmt = [x.fmt for x in self if x.data == data]
710
+ try:
711
+ return dfmt[0]
712
+ except IndexError:
713
+ logger.warning('Data "%s" not found in ObsMap contents', data)
714
+
715
+ def getfmt(self, g, x):
716
+ """
717
+ Return format ``part`` of data defined in ``g`` or ``x``.
718
+ * ``g`` stands for a guess dictionary.
719
+ * ``x`` stands for an extra dictionary.
720
+
721
+ These naming convention refer to the footprints resolve mechanism.
722
+ """
723
+ part = g.get("part", x.get("part", None))
724
+ if part is None:
725
+ return None
726
+ else:
727
+ return self.datafmt(part)
728
+
729
+
730
+ @stddeco.namebuilding_insert("style", lambda s: "obsmap")
731
+ @stddeco.namebuilding_insert("stage", lambda s: [s.scope, s.stage])
732
+ class ObsMap(FlowResource):
733
+ """Observation mapping.
734
+
735
+ Simple ascii table for the description of the mapping of
736
+ observations set to ODB bases. The native format is :
737
+ odb / data / fmt / instr.
738
+
739
+ The *discard* attribute is passed directly to the :class:`ObsMapContent`
740
+ object in charge of accessing this resource: It is used to discard some
741
+ of the lines of the *ObsMap* file (for more details see the
742
+ :class:`ObsMapContent` class documentation)
743
+ """
744
+
745
+ _footprint = [
746
+ gvar,
747
+ dict(
748
+ info="Bator mapping file",
749
+ attr=dict(
750
+ kind=dict(
751
+ values=["obsmap"],
752
+ ),
753
+ clscontents=dict(
754
+ default=ObsMapContent,
755
+ ),
756
+ nativefmt=dict(
757
+ values=["ascii", "txt"],
758
+ default="txt",
759
+ remap=dict(ascii="txt"),
760
+ ),
761
+ stage=dict(optional=True, default="void"),
762
+ scope=dict(
763
+ optional=True,
764
+ default="full",
765
+ remap=dict(surf="surface"),
766
+ ),
767
+ discard=dict(
768
+ info="Discard some lines of the mapping (see the class documentation).",
769
+ type=footprints.FPSet,
770
+ optional=True,
771
+ default=footprints.FPSet(),
772
+ ),
773
+ only=dict(
774
+ info="Only retain some lines of the mapping (see the class documentation).",
775
+ type=footprints.FPSet,
776
+ optional=True,
777
+ ),
778
+ ),
779
+ ),
780
+ ]
781
+
782
+ @property
783
+ def realkind(self):
784
+ return "obsmap"
785
+
786
+ def contents_args(self):
787
+ """Returns default arguments value to class content constructor."""
788
+ return dict(discarded=set(self.discard), only=self.only)
789
+
790
+ def olive_basename(self):
791
+ """OLIVE specific naming convention."""
792
+ return "OBSMAP_" + self.stage
793
+
794
+ def archive_basename(self):
795
+ """OP ARCHIVE specific naming convention."""
796
+ if self.scope.startswith("surf"):
797
+ return "BATOR_MAP_" + self.scope[:4].lower()
798
+ else:
799
+ return "BATOR_MAP"
800
+
801
+ def genv_basename(self):
802
+ """Genv key naming convention."""
803
+ cutoff_map = {"production": "prod"}
804
+ if self.gvar is None:
805
+ if self.scope == "surface":
806
+ gkey = "bator_map_surf"
807
+ else:
808
+ gkey = "bator_map_" + cutoff_map.get(self.cutoff, self.cutoff)
809
+ return GenvKey(gkey)
810
+ else:
811
+ return self.gvar
812
+
813
+
814
+ @stddeco.namebuilding_insert("src", lambda s: s.satbias)
815
+ class Bcor(FlowResource):
816
+ """Bias correction parameters."""
817
+
818
+ _footprint = dict(
819
+ info="Bias correction parameters",
820
+ attr=dict(
821
+ kind=dict(
822
+ values=["bcor"],
823
+ ),
824
+ nativefmt=dict(
825
+ values=["ascii", "txt"], default="txt", remap=dict(ascii="txt")
826
+ ),
827
+ satbias=dict(
828
+ values=["mtop", "metop", "noaa", "ssmi"],
829
+ remap=dict(metop="mtop"),
830
+ ),
831
+ ),
832
+ )
833
+
834
+ @property
835
+ def realkind(self):
836
+ return "bcor"
837
+
838
+ def archive_basename(self):
839
+ """OP ARCHIVE specific naming convention."""
840
+ return "bcor_" + self.satbias + ".dat"