vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
vortex/nwp/data/obs.py
ADDED
|
@@ -0,0 +1,748 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Resources to handle observations files in various formats.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from collections import namedtuple
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
import footprints
|
|
10
|
+
from bronx.datagrip.varbcheaders import VarbcHeadersFile
|
|
11
|
+
from bronx.fancies import loggers
|
|
12
|
+
from bronx.syntax.decorators import nicedeco
|
|
13
|
+
|
|
14
|
+
from vortex.data.flow import GeoFlowResource, FlowResource
|
|
15
|
+
from vortex.data.contents import TextContent, AlmostListContent
|
|
16
|
+
from vortex.syntax import stdattrs, stddeco
|
|
17
|
+
|
|
18
|
+
from ..syntax.stdattrs import gvar, GenvKey
|
|
19
|
+
|
|
20
|
+
#: Automatic export of Observations class
|
|
21
|
+
__all__ = ['Observations', ]
|
|
22
|
+
|
|
23
|
+
logger = loggers.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@stddeco.namebuilding_insert('style', lambda s: 'obs')
|
|
27
|
+
@stddeco.namebuilding_insert('stage', lambda s: s.stage)
|
|
28
|
+
@stddeco.namebuilding_insert('part', lambda s: s.part)
|
|
29
|
+
class Observations(GeoFlowResource):
|
|
30
|
+
"""
|
|
31
|
+
Abstract observation resource.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
_abstract = True
|
|
35
|
+
_footprint = dict(
|
|
36
|
+
info = 'Observations file',
|
|
37
|
+
attr = dict(
|
|
38
|
+
kind = dict(
|
|
39
|
+
values = ['observations', 'obs'],
|
|
40
|
+
remap = dict(obs = 'observations'),
|
|
41
|
+
),
|
|
42
|
+
part = dict(
|
|
43
|
+
info = 'The name of this subset of observations.'
|
|
44
|
+
),
|
|
45
|
+
nativefmt = dict(
|
|
46
|
+
alias = ('format',),
|
|
47
|
+
),
|
|
48
|
+
stage = dict(
|
|
49
|
+
info = 'The processing stage for this subset of observations.'
|
|
50
|
+
),
|
|
51
|
+
)
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def realkind(self):
|
|
56
|
+
return 'observations'
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class ObsProcessed(Observations):
|
|
60
|
+
"""Pre-Processed or Processed observations."""
|
|
61
|
+
|
|
62
|
+
_footprint = dict(
|
|
63
|
+
info = 'Pre-Processed observations.',
|
|
64
|
+
attr = dict(
|
|
65
|
+
nativefmt = dict(
|
|
66
|
+
values = ['ascii', 'netcdf', 'hdf5'],
|
|
67
|
+
),
|
|
68
|
+
stage = dict(
|
|
69
|
+
values = ['preprocessing', ],
|
|
70
|
+
),
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@stddeco.namebuilding_insert('layout', lambda s: s.layout)
|
|
76
|
+
class ObsODB(Observations):
|
|
77
|
+
"""Observations in ODB format associated to a given stage."""
|
|
78
|
+
|
|
79
|
+
_footprint = dict(
|
|
80
|
+
info = 'Packed observations (ODB, CCMA, etc.)',
|
|
81
|
+
attr = dict(
|
|
82
|
+
nativefmt = dict(
|
|
83
|
+
values = ['odb', 'odb/split', 'odb/compressed'],
|
|
84
|
+
remap = {
|
|
85
|
+
'odb/split': 'odb',
|
|
86
|
+
'odb/compressed': 'odb'
|
|
87
|
+
},
|
|
88
|
+
),
|
|
89
|
+
layout = dict(
|
|
90
|
+
info = 'The layout of the ODB database.',
|
|
91
|
+
optional = True,
|
|
92
|
+
default = 'ecma',
|
|
93
|
+
values = [
|
|
94
|
+
'ccma', 'ecma', 'ecmascr',
|
|
95
|
+
'CCMA', 'ECMA', 'ECMASCR',
|
|
96
|
+
'rstbias', 'countryrstrhbias', 'sondetyperstrhbias',
|
|
97
|
+
'RSTBIAS', 'COUNTRYRSTRHBIAS', 'SONDETYPERSTRHBIAS',
|
|
98
|
+
],
|
|
99
|
+
remap = dict(
|
|
100
|
+
CCMA = 'ccma', ECMA = 'ecma', ECMASCR = 'ecmascr',
|
|
101
|
+
RSTBIAS = 'rstbias',
|
|
102
|
+
COUNTRYRSTRHBIAS = 'countryrstrhbias',
|
|
103
|
+
SONDETYPERSTRHBIAS = 'sondetyperstrhbias',
|
|
104
|
+
)
|
|
105
|
+
),
|
|
106
|
+
stage = dict(
|
|
107
|
+
values = [
|
|
108
|
+
'void', 'avg', 'average', 'screen', 'screening', 'split', 'build',
|
|
109
|
+
'traj', 'min', 'minim', 'complete', 'matchup',
|
|
110
|
+
'canari', 'cans'
|
|
111
|
+
],
|
|
112
|
+
remap = dict(
|
|
113
|
+
avg = 'average',
|
|
114
|
+
min = 'minim',
|
|
115
|
+
cans = 'canari',
|
|
116
|
+
split = 'build',
|
|
117
|
+
screen = 'screening',
|
|
118
|
+
),
|
|
119
|
+
),
|
|
120
|
+
)
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
def olive_basename(self):
|
|
124
|
+
"""OLIVE specific naming convention."""
|
|
125
|
+
stage_map = dict(screening='screen', build='split', minim='min', canari='cans')
|
|
126
|
+
mystage = stage_map.get(self.stage, self.stage)
|
|
127
|
+
return '_'.join((self.layout, mystage, self.part)) + '.tar'
|
|
128
|
+
|
|
129
|
+
@property
|
|
130
|
+
def _archive_mapping(self):
|
|
131
|
+
re_fullmix = re.compile(r'^(?:altitude|mix|full)$')
|
|
132
|
+
ecma_map = dict(void='ecmascr.tar',
|
|
133
|
+
screening='odb_screen.tar',
|
|
134
|
+
matchup='odb_cpl.tar', complete='odb_cpl.tar')
|
|
135
|
+
ecma_prefix = {('matchup', 'arpege'): 'BASE/',
|
|
136
|
+
('complete', 'arpege'): 'BASE/',
|
|
137
|
+
('matchup', 'arome'): 'BASE/',
|
|
138
|
+
('complete', 'arome'): 'BASE/',
|
|
139
|
+
('screening', 'arome'): './'}
|
|
140
|
+
if self.stage in ecma_map and self.layout == 'ecma':
|
|
141
|
+
if re_fullmix.match(self.part):
|
|
142
|
+
return (ecma_map[self.stage], 'extract=all&format=unknown')
|
|
143
|
+
elif self.part == 'virtual':
|
|
144
|
+
return (ecma_map[self.stage],
|
|
145
|
+
'extract={:s}ECMA&format=unknown'
|
|
146
|
+
.format(ecma_prefix.get((self.stage, self.model), '')))
|
|
147
|
+
else:
|
|
148
|
+
return (ecma_map[self.stage],
|
|
149
|
+
'extract={:s}ECMA.{:s}&format=unknown'
|
|
150
|
+
.format(ecma_prefix.get((self.stage, self.model), ''), self.part))
|
|
151
|
+
elif self.stage == 'screening' and self.layout == 'ccma':
|
|
152
|
+
return ('odb_ccma_screen.tar', '')
|
|
153
|
+
elif re_fullmix.match(self.part) and self.stage == 'traj':
|
|
154
|
+
return ('odb_traj.tar', '')
|
|
155
|
+
elif re_fullmix.match(self.part) and self.stage == 'minim' and self.model == 'aladin':
|
|
156
|
+
return ('odb_cpl.tar', '')
|
|
157
|
+
elif re_fullmix.match(self.part) and self.stage == 'minim':
|
|
158
|
+
return ('odb_min.tar', '')
|
|
159
|
+
elif self.part in ('ground', 'surf') and self.stage in ('canari', 'surfan'):
|
|
160
|
+
return ('odb_canari.tar', '')
|
|
161
|
+
else:
|
|
162
|
+
logger.error(
|
|
163
|
+
'No archive basename defined for such observations (format=%s, part=%s, stage=%s)',
|
|
164
|
+
self.nativefmt, self.part, self.stage
|
|
165
|
+
)
|
|
166
|
+
return (None, None)
|
|
167
|
+
|
|
168
|
+
def archive_basename(self):
|
|
169
|
+
"""OP ARCHIVE specific naming convention."""
|
|
170
|
+
return self._archive_mapping[0]
|
|
171
|
+
|
|
172
|
+
def archive_urlquery(self):
|
|
173
|
+
"""OP ARCHIVE special query for odb case."""
|
|
174
|
+
return self._archive_mapping[1]
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class ObsRaw(Observations):
|
|
178
|
+
"""
|
|
179
|
+
TODO.
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
_footprint = dict(
|
|
183
|
+
info = 'Raw observations set',
|
|
184
|
+
attr = dict(
|
|
185
|
+
nativefmt = dict(
|
|
186
|
+
values = ['obsoul', 'grib', 'bufr', 'ascii', 'netcdf', 'hdf5'],
|
|
187
|
+
remap = dict(
|
|
188
|
+
OBSOUL = 'obsoul',
|
|
189
|
+
GRIB = 'grib',
|
|
190
|
+
BUFR = 'bufr',
|
|
191
|
+
ASCII = 'ascii',
|
|
192
|
+
NETCDF = 'netcdf',
|
|
193
|
+
HDF5 = 'hdf5'
|
|
194
|
+
)
|
|
195
|
+
),
|
|
196
|
+
stage = dict(
|
|
197
|
+
values = ['void', 'extract', 'raw', 'std']
|
|
198
|
+
),
|
|
199
|
+
olivefmt = dict(
|
|
200
|
+
info = 'The mapping between Vortex and Olive formats names.',
|
|
201
|
+
type = footprints.FPDict,
|
|
202
|
+
optional = True,
|
|
203
|
+
default = footprints.FPDict(
|
|
204
|
+
ascii = 'ascii',
|
|
205
|
+
obsoul = 'obsoul',
|
|
206
|
+
grib = 'obsgrib',
|
|
207
|
+
bufr = 'obsbufr',
|
|
208
|
+
netcdf = 'netcdf',
|
|
209
|
+
hdf5 = 'hdf5',
|
|
210
|
+
),
|
|
211
|
+
doc_visibility = footprints.doc.visibility.GURU,
|
|
212
|
+
)
|
|
213
|
+
)
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
def olive_basename(self):
|
|
217
|
+
"""OLIVE specific naming convention."""
|
|
218
|
+
return '_'.join((
|
|
219
|
+
self.olivefmt.get(self.nativefmt, 'obsfoo'),
|
|
220
|
+
self.stage,
|
|
221
|
+
self.part
|
|
222
|
+
))
|
|
223
|
+
|
|
224
|
+
def archive_basename(self):
|
|
225
|
+
"""OP ARCHIVE specific naming convention."""
|
|
226
|
+
if (re.match(r'^(?:bufr|obsoul|grib|netcdf|hdf5)$', self.nativefmt) and
|
|
227
|
+
self.part != 'full' and self.stage == 'void'):
|
|
228
|
+
return '.'.join((self.nativefmt, self.part))
|
|
229
|
+
elif re.match(r'^obsoul$', self.nativefmt) and self.part == 'full' and self.stage == 'void':
|
|
230
|
+
return 'obsoul'
|
|
231
|
+
else:
|
|
232
|
+
logger.error(
|
|
233
|
+
'No archive basename defined for such observations (format=%s, part=%s, stage=%s)',
|
|
234
|
+
self.nativefmt, self.part, self.stage
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
@stddeco.namebuilding_insert('radical', lambda s: s.kind)
|
|
239
|
+
@stddeco.namebuilding_insert('src', lambda s: [s.part, ])
|
|
240
|
+
class ObsFlags(FlowResource):
|
|
241
|
+
"""Class for observations flags."""
|
|
242
|
+
|
|
243
|
+
_footprint = dict(
|
|
244
|
+
info = 'Observations flags',
|
|
245
|
+
attr = dict(
|
|
246
|
+
kind = dict(
|
|
247
|
+
values = ['obsflag'],
|
|
248
|
+
),
|
|
249
|
+
nativefmt=dict(
|
|
250
|
+
values=['ascii', 'txt'],
|
|
251
|
+
default='txt',
|
|
252
|
+
remap=dict(ascii='txt'),
|
|
253
|
+
),
|
|
254
|
+
part = dict(),
|
|
255
|
+
),
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
@property
|
|
259
|
+
def realkind(self):
|
|
260
|
+
return 'obsflags'
|
|
261
|
+
|
|
262
|
+
def olive_basename(self):
|
|
263
|
+
"""OLIVE specific naming convention."""
|
|
264
|
+
return 'BDM_CQ'
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
@nicedeco
|
|
268
|
+
def needs_slurp(mtd):
|
|
269
|
+
"""Call _actual_slurp before anything happens."""
|
|
270
|
+
|
|
271
|
+
def new_stuff(self):
|
|
272
|
+
if self._do_delayed_slurp is not None:
|
|
273
|
+
with self._do_delayed_slurp.iod_context():
|
|
274
|
+
self._actual_slurp(self._do_delayed_slurp)
|
|
275
|
+
return mtd(self)
|
|
276
|
+
|
|
277
|
+
return new_stuff
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
class VarBCContent(AlmostListContent):
|
|
281
|
+
|
|
282
|
+
# The VarBC file is too big: revert to the good old diff
|
|
283
|
+
_diffable = False
|
|
284
|
+
|
|
285
|
+
def __init__(self, **kw):
|
|
286
|
+
super().__init__(**kw)
|
|
287
|
+
self._parsed_data = None
|
|
288
|
+
self._do_delayed_slurp = None
|
|
289
|
+
|
|
290
|
+
@property
|
|
291
|
+
@needs_slurp
|
|
292
|
+
def data(self):
|
|
293
|
+
"""The internal data encapsulated."""
|
|
294
|
+
return self._data
|
|
295
|
+
|
|
296
|
+
@property
|
|
297
|
+
@needs_slurp
|
|
298
|
+
def size(self):
|
|
299
|
+
"""The internal data size."""
|
|
300
|
+
return self._size
|
|
301
|
+
|
|
302
|
+
@property
|
|
303
|
+
def parsed_data(self):
|
|
304
|
+
"""The data as a :class:`VarbcFile` object."""
|
|
305
|
+
if self._parsed_data is None:
|
|
306
|
+
# May fail if Numpy is not installed...
|
|
307
|
+
from bronx.datagrip.varbc import VarbcFile
|
|
308
|
+
self._parsed_data = VarbcFile(self.data)
|
|
309
|
+
return self._parsed_data
|
|
310
|
+
|
|
311
|
+
def _actual_slurp(self, container):
|
|
312
|
+
with container.preferred_decoding(byte=False):
|
|
313
|
+
self._size = container.totalsize
|
|
314
|
+
self._data.extend(container.readlines())
|
|
315
|
+
self._do_delayed_slurp = None
|
|
316
|
+
|
|
317
|
+
def slurp(self, container):
|
|
318
|
+
"""Get data from the ``container``."""
|
|
319
|
+
self._do_delayed_slurp = container
|
|
320
|
+
with container.preferred_decoding(byte=False):
|
|
321
|
+
container.rewind()
|
|
322
|
+
self._metadata = VarbcHeadersFile([container.readline() for _ in range(3)])
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
@stddeco.namebuilding_append('src', lambda s: [s.stage, ])
|
|
326
|
+
class VarBC(FlowResource):
|
|
327
|
+
"""
|
|
328
|
+
VarBC file resource. Contains all the coefficients for the VarBC bias correction scheme.
|
|
329
|
+
"""
|
|
330
|
+
|
|
331
|
+
_footprint = dict(
|
|
332
|
+
info = 'Varbc file (coefficients for the bias correction of observations).',
|
|
333
|
+
attr = dict(
|
|
334
|
+
kind = dict(
|
|
335
|
+
values = ['varbc']
|
|
336
|
+
),
|
|
337
|
+
clscontents = dict(
|
|
338
|
+
default = VarBCContent,
|
|
339
|
+
),
|
|
340
|
+
nativefmt = dict(
|
|
341
|
+
values = ['ascii', 'txt'],
|
|
342
|
+
default = 'txt',
|
|
343
|
+
remap = dict(ascii = 'txt'),
|
|
344
|
+
),
|
|
345
|
+
stage = dict(
|
|
346
|
+
optional = True,
|
|
347
|
+
values = ['void', 'merge', 'screen', 'screening', 'minim', 'traj'],
|
|
348
|
+
remap = dict(screen = 'screening'),
|
|
349
|
+
default = 'void'
|
|
350
|
+
),
|
|
351
|
+
mixmodel = dict(
|
|
352
|
+
optional = True,
|
|
353
|
+
default = None,
|
|
354
|
+
values = stdattrs.models,
|
|
355
|
+
),
|
|
356
|
+
)
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
@property
|
|
360
|
+
def realkind(self):
|
|
361
|
+
return 'varbc'
|
|
362
|
+
|
|
363
|
+
def olive_basename(self):
|
|
364
|
+
"""OLIVE specific naming convention."""
|
|
365
|
+
olivestage_map = {'screening': 'screen', }
|
|
366
|
+
return self.realkind.upper() + "." + olivestage_map.get(self.stage, self.stage)
|
|
367
|
+
|
|
368
|
+
def archive_basename(self):
|
|
369
|
+
"""OP ARCHIVE specific naming convention."""
|
|
370
|
+
if self.stage in ('void', 'traj'):
|
|
371
|
+
bname = 'VARBC.cycle'
|
|
372
|
+
if self.mixmodel is not None:
|
|
373
|
+
bname += '_'
|
|
374
|
+
if self.mixmodel.startswith('alad'):
|
|
375
|
+
bname = bname + self.mixmodel[:4]
|
|
376
|
+
else:
|
|
377
|
+
bname = bname + self.mixmodel[:3]
|
|
378
|
+
else:
|
|
379
|
+
bname = 'VARBC.' + self.stage
|
|
380
|
+
return bname
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
@stddeco.namebuilding_insert('src', lambda s: s.scope)
|
|
384
|
+
class BlackList(FlowResource):
|
|
385
|
+
"""
|
|
386
|
+
TODO.
|
|
387
|
+
"""
|
|
388
|
+
|
|
389
|
+
_footprint = [
|
|
390
|
+
gvar,
|
|
391
|
+
dict(
|
|
392
|
+
info = 'Blacklist file for observations',
|
|
393
|
+
attr = dict(
|
|
394
|
+
kind = dict(
|
|
395
|
+
values = ['blacklist'],
|
|
396
|
+
),
|
|
397
|
+
gvar = dict(
|
|
398
|
+
default = 'blacklist_[scope]',
|
|
399
|
+
values = ['BLACKLIST_LOC', 'BLACKLIST_DIAP', 'BLACKLIST_LOCAL', 'BLACKLIST_GLOBAL'],
|
|
400
|
+
remap = dict(
|
|
401
|
+
BLACKLIST_LOCAL = 'BLACKLIST_LOC',
|
|
402
|
+
BLACKLIST_GLOBAL = 'BLACKLIST_DIAP',
|
|
403
|
+
blacklist_local = 'BLACKLIST_LOC',
|
|
404
|
+
blacklist_global = 'BLACKLIST_DIAP',
|
|
405
|
+
)
|
|
406
|
+
),
|
|
407
|
+
clscontents = dict(
|
|
408
|
+
default = TextContent,
|
|
409
|
+
),
|
|
410
|
+
nativefmt = dict(
|
|
411
|
+
values = ['txt'],
|
|
412
|
+
default = 'txt'
|
|
413
|
+
),
|
|
414
|
+
scope = dict(
|
|
415
|
+
values = ['loc', 'local', 'site', 'global', 'diap', 'diapason'],
|
|
416
|
+
remap = dict(
|
|
417
|
+
loc = 'local',
|
|
418
|
+
site = 'local',
|
|
419
|
+
diap = 'global',
|
|
420
|
+
diapason = 'global',
|
|
421
|
+
)
|
|
422
|
+
),
|
|
423
|
+
)
|
|
424
|
+
)
|
|
425
|
+
]
|
|
426
|
+
|
|
427
|
+
@property
|
|
428
|
+
def realkind(self):
|
|
429
|
+
return 'blacklist'
|
|
430
|
+
|
|
431
|
+
def iga_pathinfo(self):
|
|
432
|
+
"""Standard path information for IGA inline cache."""
|
|
433
|
+
return dict(
|
|
434
|
+
model=self.model
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
def archive_map(self):
|
|
438
|
+
"""OP ARCHIVE specific naming convention."""
|
|
439
|
+
return {
|
|
440
|
+
'local': 'LISTE_LOC',
|
|
441
|
+
'global': 'LISTE_NOIRE_DIAP',
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
def archive_basename(self):
|
|
445
|
+
"""OP ARCHIVE local basename."""
|
|
446
|
+
mapd = self.archive_map()
|
|
447
|
+
return mapd.get(self.scope, 'LISTE_NOIRE_X')
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
#: A namedtuple of the internal fields of an ObsRef file
|
|
451
|
+
ObsRefItem = namedtuple('ObsRefItem', ('data', 'fmt', 'instr', 'date', 'time'))
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
class ObsRefContent(TextContent):
|
|
455
|
+
"""Content class for refdata resources."""
|
|
456
|
+
|
|
457
|
+
def append(self, item):
|
|
458
|
+
"""Append the specified ``item`` to internal data contents."""
|
|
459
|
+
self.data.append(ObsRefItem(*item))
|
|
460
|
+
|
|
461
|
+
def slurp(self, container):
|
|
462
|
+
with container.preferred_decoding(byte=False):
|
|
463
|
+
self._data.extend([ObsRefItem(*x.split()[:5]) for x in container if not x.startswith('#')])
|
|
464
|
+
self._size = container.totalsize
|
|
465
|
+
|
|
466
|
+
@classmethod
|
|
467
|
+
def formatted_data(self, item):
|
|
468
|
+
"""Return a formatted string."""
|
|
469
|
+
return '{:8s} {:8s} {:16s} {:s} {!s}'.format(
|
|
470
|
+
item.data, item.fmt, item.instr, str(item.date), item.time
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
@stddeco.namebuilding_append('src', lambda s: [s.part, ])
|
|
475
|
+
class Refdata(FlowResource):
|
|
476
|
+
"""
|
|
477
|
+
TODO.
|
|
478
|
+
"""
|
|
479
|
+
|
|
480
|
+
_footprint = dict(
|
|
481
|
+
info = 'Refdata file',
|
|
482
|
+
attr = dict(
|
|
483
|
+
kind = dict(
|
|
484
|
+
values = ['refdata']
|
|
485
|
+
),
|
|
486
|
+
clscontents = dict(
|
|
487
|
+
default = ObsRefContent,
|
|
488
|
+
),
|
|
489
|
+
nativefmt = dict(
|
|
490
|
+
values = ['ascii', 'txt'],
|
|
491
|
+
default = 'txt',
|
|
492
|
+
remap = dict(ascii = 'txt')
|
|
493
|
+
),
|
|
494
|
+
part = dict(
|
|
495
|
+
optional = True,
|
|
496
|
+
default = 'all'
|
|
497
|
+
),
|
|
498
|
+
)
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
@property
|
|
502
|
+
def realkind(self):
|
|
503
|
+
return 'refdata'
|
|
504
|
+
|
|
505
|
+
def olive_basename(self):
|
|
506
|
+
"""OLIVE specific naming convention."""
|
|
507
|
+
return self.realkind + '.' + self.part
|
|
508
|
+
|
|
509
|
+
def archive_basename(self):
|
|
510
|
+
"""OP ARCHIVE specific naming convention."""
|
|
511
|
+
return self.realkind
|
|
512
|
+
|
|
513
|
+
|
|
514
|
+
#: A namedtuple of the internal fields of an ObsMap file
|
|
515
|
+
ObsMapItem = namedtuple('ObsMapItem', ('odb', 'data', 'fmt', 'instr'))
|
|
516
|
+
|
|
517
|
+
|
|
518
|
+
class ObsMapContent(TextContent):
|
|
519
|
+
"""Content class for the *ObsMap* resources.
|
|
520
|
+
|
|
521
|
+
The :class:`ObsMap` resource provides its *discard* and *only* attributes.
|
|
522
|
+
This attribute is a :class:`footprints.stdtypes.FPSet` object thats holds
|
|
523
|
+
*odb:data* pairs that will be used to filter/discard some of the lines of
|
|
524
|
+
the local resource. The matching is done using regular expressions (however
|
|
525
|
+
when *:data* is omitted, ':' is automatically added at the end of the regular
|
|
526
|
+
expression).
|
|
527
|
+
|
|
528
|
+
The *only* attribute is evaluated first (if *only* is not provided or equals
|
|
529
|
+
*None*, all ObsMap lines are retained).
|
|
530
|
+
|
|
531
|
+
Here are some examples:
|
|
532
|
+
|
|
533
|
+
* ``discard=FPSet(('sev',))`` -> The *sev* ODB database will be discarded
|
|
534
|
+
(but the *seviri* database is kept).
|
|
535
|
+
* ``discard=FPSet(('radar', 'radar1'))`` -> Both the *radar* and *radar1*
|
|
536
|
+
ODB databases will be discarded.
|
|
537
|
+
* ``discard=FPSet(('radar1?', ))`` -> Same result as above.
|
|
538
|
+
* ``discard=FPSet(('conv:temp', ))`` -> Discard the *temp* data file that
|
|
539
|
+
would usualy be inserted in the *conv* database.
|
|
540
|
+
* ``discard=FPSet(('conv:temp', ))`` -> Discard the *temp* data file that
|
|
541
|
+
would usualy be inserted in the *conv* database.
|
|
542
|
+
* ``discard=FPSet(('conv:t[ea]', ))`` -> Discard the data file starting
|
|
543
|
+
with *te* or *ta* that would usualy be inserted in the *conv* database.
|
|
544
|
+
* ``only=FPSet(('conv',))`` -> Only *conv* ODB database will be used.
|
|
545
|
+
"""
|
|
546
|
+
|
|
547
|
+
def __init__(self, **kw):
|
|
548
|
+
kw.setdefault('discarded', set())
|
|
549
|
+
kw.setdefault('only', None)
|
|
550
|
+
super().__init__(**kw)
|
|
551
|
+
|
|
552
|
+
@property
|
|
553
|
+
def discarded(self):
|
|
554
|
+
"""Set of *odb:data* pairs that will be discarded."""
|
|
555
|
+
return self._discarded
|
|
556
|
+
|
|
557
|
+
@property
|
|
558
|
+
def only(self):
|
|
559
|
+
"""Set of *odb:data* pairs that will be kept (*None* means "keep everything")."""
|
|
560
|
+
return self._only
|
|
561
|
+
|
|
562
|
+
def append(self, item):
|
|
563
|
+
"""Append the specified ``item`` to internal data contents."""
|
|
564
|
+
self._data.append(ObsMapItem(*item))
|
|
565
|
+
|
|
566
|
+
def slurp(self, container):
|
|
567
|
+
"""Get data from the ``container``."""
|
|
568
|
+
if self.only is not None:
|
|
569
|
+
ofilters = [re.compile(d if ':' in d else d + ':')
|
|
570
|
+
for d in self.only]
|
|
571
|
+
else:
|
|
572
|
+
ofilters = None
|
|
573
|
+
dfilters = [re.compile(d if ':' in d else d + ':') for d in self.discarded]
|
|
574
|
+
|
|
575
|
+
def item_filter(omline):
|
|
576
|
+
om = ':'.join([omline.odb, omline.data])
|
|
577
|
+
return ((ofilters is None or
|
|
578
|
+
any([f.match(om) for f in ofilters])) and
|
|
579
|
+
not any([f.match(om) for f in dfilters]))
|
|
580
|
+
|
|
581
|
+
with container.preferred_decoding(byte=False):
|
|
582
|
+
container.rewind()
|
|
583
|
+
self.extend(filter(item_filter,
|
|
584
|
+
[ObsMapItem(* x.split())
|
|
585
|
+
for x in [line.strip() for line in container]
|
|
586
|
+
if x and not x.startswith('#')]))
|
|
587
|
+
self._size = container.totalsize
|
|
588
|
+
|
|
589
|
+
@classmethod
|
|
590
|
+
def formatted_data(self, item):
|
|
591
|
+
"""Return a formatted string."""
|
|
592
|
+
return '{:12s} {:12s} {:12s} {:s}'.format(item.odb, item.data, item.fmt, item.instr)
|
|
593
|
+
|
|
594
|
+
def odbset(self):
|
|
595
|
+
"""Return set of odb values."""
|
|
596
|
+
return {x.odb for x in self}
|
|
597
|
+
|
|
598
|
+
def dataset(self):
|
|
599
|
+
"""Return set of data values."""
|
|
600
|
+
return {x.data for x in self}
|
|
601
|
+
|
|
602
|
+
def fmtset(self):
|
|
603
|
+
"""Return set of format values."""
|
|
604
|
+
return {x.fmt for x in self}
|
|
605
|
+
|
|
606
|
+
def instrset(self):
|
|
607
|
+
"""Return set of instrument values."""
|
|
608
|
+
return {x.instr for x in self}
|
|
609
|
+
|
|
610
|
+
def datafmt(self, data):
|
|
611
|
+
"""Return format associated to specified ``data``."""
|
|
612
|
+
dfmt = [x.fmt for x in self if x.data == data]
|
|
613
|
+
try:
|
|
614
|
+
return dfmt[0]
|
|
615
|
+
except IndexError:
|
|
616
|
+
logger.warning('Data "%s" not found in ObsMap contents', data)
|
|
617
|
+
|
|
618
|
+
def getfmt(self, g, x):
|
|
619
|
+
"""
|
|
620
|
+
Return format ``part`` of data defined in ``g`` or ``x``.
|
|
621
|
+
* ``g`` stands for a guess dictionary.
|
|
622
|
+
* ``x`` stands for an extra dictionary.
|
|
623
|
+
|
|
624
|
+
These naming convention refer to the footprints resolve mechanism.
|
|
625
|
+
"""
|
|
626
|
+
part = g.get('part', x.get('part', None))
|
|
627
|
+
if part is None:
|
|
628
|
+
return None
|
|
629
|
+
else:
|
|
630
|
+
return self.datafmt(part)
|
|
631
|
+
|
|
632
|
+
|
|
633
|
+
@stddeco.namebuilding_insert('style', lambda s: 'obsmap')
|
|
634
|
+
@stddeco.namebuilding_insert('stage', lambda s: [s.scope, s.stage])
|
|
635
|
+
class ObsMap(FlowResource):
|
|
636
|
+
"""Observation mapping.
|
|
637
|
+
|
|
638
|
+
Simple ascii table for the description of the mapping of
|
|
639
|
+
observations set to ODB bases. The native format is :
|
|
640
|
+
odb / data / fmt / instr.
|
|
641
|
+
|
|
642
|
+
The *discard* attribute is passed directly to the :class:`ObsMapContent`
|
|
643
|
+
object in charge of accessing this resource: It is used to discard some
|
|
644
|
+
of the lines of the *ObsMap* file (for more details see the
|
|
645
|
+
:class:`ObsMapContent` class documentation)
|
|
646
|
+
"""
|
|
647
|
+
|
|
648
|
+
_footprint = [
|
|
649
|
+
gvar,
|
|
650
|
+
dict(
|
|
651
|
+
info = 'Bator mapping file',
|
|
652
|
+
attr = dict(
|
|
653
|
+
kind = dict(
|
|
654
|
+
values = ['obsmap'],
|
|
655
|
+
),
|
|
656
|
+
clscontents = dict(
|
|
657
|
+
default = ObsMapContent,
|
|
658
|
+
),
|
|
659
|
+
nativefmt = dict(
|
|
660
|
+
values = ['ascii', 'txt'],
|
|
661
|
+
default = 'txt',
|
|
662
|
+
remap = dict(ascii = 'txt')
|
|
663
|
+
),
|
|
664
|
+
stage = dict(
|
|
665
|
+
optional = True,
|
|
666
|
+
default = 'void'
|
|
667
|
+
),
|
|
668
|
+
scope = dict(
|
|
669
|
+
optional = True,
|
|
670
|
+
default = 'full',
|
|
671
|
+
remap = dict(surf = 'surface'),
|
|
672
|
+
),
|
|
673
|
+
discard = dict(
|
|
674
|
+
info = "Discard some lines of the mapping (see the class documentation).",
|
|
675
|
+
type = footprints.FPSet,
|
|
676
|
+
optional = True,
|
|
677
|
+
default = footprints.FPSet(),
|
|
678
|
+
),
|
|
679
|
+
only = dict(
|
|
680
|
+
info = "Only retain some lines of the mapping (see the class documentation).",
|
|
681
|
+
type = footprints.FPSet,
|
|
682
|
+
optional = True,
|
|
683
|
+
)
|
|
684
|
+
)
|
|
685
|
+
)
|
|
686
|
+
]
|
|
687
|
+
|
|
688
|
+
@property
|
|
689
|
+
def realkind(self):
|
|
690
|
+
return 'obsmap'
|
|
691
|
+
|
|
692
|
+
def contents_args(self):
|
|
693
|
+
"""Returns default arguments value to class content constructor."""
|
|
694
|
+
return dict(discarded=set(self.discard), only=self.only)
|
|
695
|
+
|
|
696
|
+
def olive_basename(self):
|
|
697
|
+
"""OLIVE specific naming convention."""
|
|
698
|
+
return 'OBSMAP_' + self.stage
|
|
699
|
+
|
|
700
|
+
def archive_basename(self):
|
|
701
|
+
"""OP ARCHIVE specific naming convention."""
|
|
702
|
+
if self.scope.startswith('surf'):
|
|
703
|
+
return 'BATOR_MAP_' + self.scope[:4].lower()
|
|
704
|
+
else:
|
|
705
|
+
return 'BATOR_MAP'
|
|
706
|
+
|
|
707
|
+
def genv_basename(self):
|
|
708
|
+
"""Genv key naming convention."""
|
|
709
|
+
cutoff_map = {'production': 'prod'}
|
|
710
|
+
if self.gvar is None:
|
|
711
|
+
if self.scope == 'surface':
|
|
712
|
+
gkey = 'bator_map_surf'
|
|
713
|
+
else:
|
|
714
|
+
gkey = 'bator_map_' + cutoff_map.get(self.cutoff, self.cutoff)
|
|
715
|
+
return GenvKey(gkey)
|
|
716
|
+
else:
|
|
717
|
+
return self.gvar
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
@stddeco.namebuilding_insert('src', lambda s: s.satbias)
|
|
721
|
+
class Bcor(FlowResource):
|
|
722
|
+
"""Bias correction parameters."""
|
|
723
|
+
|
|
724
|
+
_footprint = dict(
|
|
725
|
+
info = 'Bias correction parameters',
|
|
726
|
+
attr = dict(
|
|
727
|
+
kind = dict(
|
|
728
|
+
values = ['bcor'],
|
|
729
|
+
),
|
|
730
|
+
nativefmt = dict(
|
|
731
|
+
values = ['ascii', 'txt'],
|
|
732
|
+
default = 'txt',
|
|
733
|
+
remap = dict(ascii = 'txt')
|
|
734
|
+
),
|
|
735
|
+
satbias = dict(
|
|
736
|
+
values = ['mtop', 'metop', 'noaa', 'ssmi'],
|
|
737
|
+
remap = dict(metop = 'mtop'),
|
|
738
|
+
),
|
|
739
|
+
)
|
|
740
|
+
)
|
|
741
|
+
|
|
742
|
+
@property
|
|
743
|
+
def realkind(self):
|
|
744
|
+
return 'bcor'
|
|
745
|
+
|
|
746
|
+
def archive_basename(self):
|
|
747
|
+
"""OP ARCHIVE specific naming convention."""
|
|
748
|
+
return 'bcor_' + self.satbias + '.dat'
|