vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
vortex/nwp/algo/assim.py
ADDED
|
@@ -0,0 +1,483 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AlgoComponents dedicated to computations related to Data Assimilation systems.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from bronx.fancies import loggers
|
|
6
|
+
from bronx.stdtypes.date import Date
|
|
7
|
+
|
|
8
|
+
from vortex.algo.components import BlindRun, Parallel
|
|
9
|
+
from vortex.syntax.stdattrs import a_date
|
|
10
|
+
from .ifsroot import IFSParallel
|
|
11
|
+
from ..tools import odb, drhook
|
|
12
|
+
|
|
13
|
+
#: No automatic export
|
|
14
|
+
__all__ = []
|
|
15
|
+
|
|
16
|
+
logger = loggers.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class MergeVarBC(Parallel):
|
|
20
|
+
"""Merge two VarBC files.
|
|
21
|
+
|
|
22
|
+
The VarBC file resulting from the MergeVarBC contains all the items of the
|
|
23
|
+
first VarBC file plus any new item that would be present in the second file.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
_footprint = dict(
|
|
27
|
+
attr = dict(
|
|
28
|
+
kind = dict(
|
|
29
|
+
values = ['mergevarbc'],
|
|
30
|
+
),
|
|
31
|
+
varbcout = dict(
|
|
32
|
+
optional = True,
|
|
33
|
+
default = 'VARBC.cycle_out',
|
|
34
|
+
),
|
|
35
|
+
)
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
def prepare(self, rh, opts):
|
|
39
|
+
"""Find any ODB candidate in input files."""
|
|
40
|
+
|
|
41
|
+
sh = self.system
|
|
42
|
+
|
|
43
|
+
sh.touch(self.varbcout)
|
|
44
|
+
|
|
45
|
+
# Let ancesters doing real stuff
|
|
46
|
+
super().prepare(rh, opts)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class Anamix(IFSParallel):
|
|
50
|
+
"""Merge the surface and atmospheric analyses into a single file"""
|
|
51
|
+
|
|
52
|
+
_footprint = dict(
|
|
53
|
+
info='Merge surface and atmospheric analyses',
|
|
54
|
+
attr=dict(
|
|
55
|
+
kind=dict(
|
|
56
|
+
values=['anamix'],
|
|
57
|
+
),
|
|
58
|
+
conf=dict(
|
|
59
|
+
default=701,
|
|
60
|
+
),
|
|
61
|
+
xpname=dict(
|
|
62
|
+
default='CANS',
|
|
63
|
+
),
|
|
64
|
+
timestep=dict(
|
|
65
|
+
default=1,
|
|
66
|
+
)
|
|
67
|
+
)
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class SstAnalysis(IFSParallel):
|
|
72
|
+
"""SST (Sea Surface Temperature) Analysis"""
|
|
73
|
+
|
|
74
|
+
_footprint = dict(
|
|
75
|
+
attr = dict(
|
|
76
|
+
kind = dict(
|
|
77
|
+
values = ['sstana', 'sst_ana', 'sst_analysis', 'c931'],
|
|
78
|
+
remap = dict(autoremap = 'first'),
|
|
79
|
+
),
|
|
80
|
+
conf = dict(
|
|
81
|
+
default = 931,
|
|
82
|
+
),
|
|
83
|
+
xpname = dict(
|
|
84
|
+
default = 'ANAL',
|
|
85
|
+
),
|
|
86
|
+
timestep = dict(
|
|
87
|
+
default = '1.',
|
|
88
|
+
),
|
|
89
|
+
)
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class SeaIceAnalysis(IFSParallel):
|
|
94
|
+
"""Sea Ice Analysis"""
|
|
95
|
+
|
|
96
|
+
_footprint = dict(
|
|
97
|
+
attr = dict(
|
|
98
|
+
kind = dict(
|
|
99
|
+
values = ['seaiceana', 'seaice_ana', 'seaice_analysis', 'c932'],
|
|
100
|
+
remap = dict(autoremap = 'first'),
|
|
101
|
+
),
|
|
102
|
+
conf = dict(
|
|
103
|
+
default = 932,
|
|
104
|
+
),
|
|
105
|
+
xpname = dict(
|
|
106
|
+
default = 'ANAL',
|
|
107
|
+
),
|
|
108
|
+
timestep = dict(
|
|
109
|
+
default = '1.',
|
|
110
|
+
),
|
|
111
|
+
date = dict(
|
|
112
|
+
type = Date,
|
|
113
|
+
)
|
|
114
|
+
)
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
def find_namelists(self, opts=None):
|
|
118
|
+
namrh_list = super().find_namelists(opts)
|
|
119
|
+
if not namrh_list:
|
|
120
|
+
logger.critical('No namelist was found.')
|
|
121
|
+
raise ValueError('No namelist was found for seaice analysis')
|
|
122
|
+
return namrh_list
|
|
123
|
+
|
|
124
|
+
def prepare_namelist_delta(self, rh, namcontents, namlocal):
|
|
125
|
+
super().prepare_namelist_delta(rh, namcontents, namlocal)
|
|
126
|
+
self._set_nam_macro(namcontents, namlocal, 'IDAT', int(self.date.ymd))
|
|
127
|
+
return True
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class Canari(IFSParallel, odb.OdbComponentDecoMixin):
|
|
131
|
+
"""Surface analysis."""
|
|
132
|
+
|
|
133
|
+
_footprint = dict(
|
|
134
|
+
info = 'Surface assimilation based on optimal interpolation',
|
|
135
|
+
attr = dict(
|
|
136
|
+
kind = dict(
|
|
137
|
+
values = ['canari'],
|
|
138
|
+
),
|
|
139
|
+
binarysingle = dict(
|
|
140
|
+
default = 'basicnwpobsort',
|
|
141
|
+
),
|
|
142
|
+
conf = dict(
|
|
143
|
+
default = 701,
|
|
144
|
+
),
|
|
145
|
+
xpname = dict(
|
|
146
|
+
default = 'CANS',
|
|
147
|
+
),
|
|
148
|
+
)
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
def prepare(self, rh, opts):
|
|
152
|
+
"""Get a look at raw observations input files."""
|
|
153
|
+
super().prepare(rh, opts)
|
|
154
|
+
|
|
155
|
+
# Looking for input observations
|
|
156
|
+
obsodb = [x for x in self.lookupodb() if x.rh.resource.part.startswith('surf')]
|
|
157
|
+
if not obsodb:
|
|
158
|
+
raise ValueError('No surface obsdata for canari')
|
|
159
|
+
self.odb_date_and_layout_from_sections(obsodb)
|
|
160
|
+
|
|
161
|
+
# Find the unique input ODb database
|
|
162
|
+
ssurf = obsodb.pop()
|
|
163
|
+
if obsodb:
|
|
164
|
+
logger.error('More than one surface obsdata provided')
|
|
165
|
+
logger.error('Using : %s / %s', ssurf.rh.resource.layout, ssurf.rh.resource.part)
|
|
166
|
+
for sobs in obsodb:
|
|
167
|
+
logger.error('Skip : %s / %s', sobs.rh.resource.layout, sobs.rh.resource.part)
|
|
168
|
+
|
|
169
|
+
# Fix paths + generate a global IOASSING file
|
|
170
|
+
cma_path = self.system.path.abspath(ssurf.rh.container.localpath())
|
|
171
|
+
self.odb.fix_db_path(self.virtualdb, cma_path)
|
|
172
|
+
self.odb.ioassign_gather(cma_path)
|
|
173
|
+
|
|
174
|
+
# Some extra settings
|
|
175
|
+
self.odb.create_poolmask(self.virtualdb, cma_path)
|
|
176
|
+
self.odb.shuffle_setup(self.slots, mergedirect=True, ccmadirect=False)
|
|
177
|
+
self.env.update(
|
|
178
|
+
ODB_POOLMASKING=1,
|
|
179
|
+
ODB_PACKING=-1,
|
|
180
|
+
BASETIME=self.date.ymdh,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
# Fix the input DB intent
|
|
184
|
+
self.odb_rw_or_overwrite_method(ssurf)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
class Screening(IFSParallel, odb.OdbComponentDecoMixin):
|
|
188
|
+
"""Observation screening."""
|
|
189
|
+
|
|
190
|
+
_footprint = dict(
|
|
191
|
+
info = 'Observations screening.',
|
|
192
|
+
attr = dict(
|
|
193
|
+
kind = dict(
|
|
194
|
+
values = ['screening', 'screen', 'thinning'],
|
|
195
|
+
remap = dict(autoremap = 'first'),
|
|
196
|
+
),
|
|
197
|
+
binarysingle = dict(
|
|
198
|
+
default = 'basicnwpobsort',
|
|
199
|
+
),
|
|
200
|
+
ioassign = dict(
|
|
201
|
+
optional = False,
|
|
202
|
+
),
|
|
203
|
+
conf = dict(
|
|
204
|
+
default = 2,
|
|
205
|
+
),
|
|
206
|
+
xpname = dict(
|
|
207
|
+
default = 'SCRE',
|
|
208
|
+
),
|
|
209
|
+
)
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
def prepare(self, rh, opts):
|
|
213
|
+
"""Get a look at raw observations input files."""
|
|
214
|
+
super().prepare(rh, opts)
|
|
215
|
+
|
|
216
|
+
# Looking for input observations
|
|
217
|
+
allodb = self.lookupodb()
|
|
218
|
+
self.odb_date_and_layout_from_sections(allodb)
|
|
219
|
+
|
|
220
|
+
# Perform the pre-merging stuff (this will create the ECMA virtual DB)
|
|
221
|
+
virtualdb_path = self.odb_merge_if_needed(allodb)
|
|
222
|
+
# Prepare the CCMA DB
|
|
223
|
+
ccma_path = self.odb_create_db(layout='CCMA')
|
|
224
|
+
|
|
225
|
+
# Fix paths + generate a global IOASSING file
|
|
226
|
+
self.odb.fix_db_path(self.virtualdb, virtualdb_path)
|
|
227
|
+
self.odb.fix_db_path('CCMA', ccma_path)
|
|
228
|
+
self.odb.ioassign_gather(virtualdb_path, ccma_path)
|
|
229
|
+
|
|
230
|
+
# Some extra settings
|
|
231
|
+
self.odb.create_poolmask(self.virtualdb, virtualdb_path)
|
|
232
|
+
self.odb.shuffle_setup(self.slots, mergedirect=True, ccmadirect=True)
|
|
233
|
+
|
|
234
|
+
# Look for extras ODB raw
|
|
235
|
+
self.odb_handle_raw_dbs()
|
|
236
|
+
|
|
237
|
+
# Fix the input databases intent
|
|
238
|
+
self.odb_rw_or_overwrite_method(* allodb)
|
|
239
|
+
|
|
240
|
+
# Look for channels namelists and set appropriate links
|
|
241
|
+
self.setchannels()
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
class IFSODBCCMA(IFSParallel, odb.OdbComponentDecoMixin):
|
|
245
|
+
"""Specialised IFSODB for CCMA processing"""
|
|
246
|
+
|
|
247
|
+
_abstract = True
|
|
248
|
+
_footprint = dict(
|
|
249
|
+
attr = dict(
|
|
250
|
+
virtualdb = dict(
|
|
251
|
+
default = 'ccma',
|
|
252
|
+
),
|
|
253
|
+
binarysingle = dict(
|
|
254
|
+
default = 'basicnwpobsort',
|
|
255
|
+
),
|
|
256
|
+
)
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
def prepare(self, rh, opts):
|
|
260
|
+
"""Get a look at raw observations input files."""
|
|
261
|
+
super().prepare(rh, opts)
|
|
262
|
+
|
|
263
|
+
sh = self.system
|
|
264
|
+
|
|
265
|
+
# Looking for input observations
|
|
266
|
+
allodb = self.lookupodb()
|
|
267
|
+
allccma = [x for x in allodb if x.rh.resource.layout.lower() == 'ccma']
|
|
268
|
+
if allccma:
|
|
269
|
+
if len(allccma) > 1:
|
|
270
|
+
logger.error('Multiple CCMA databases detected: only the first one is taken into account')
|
|
271
|
+
else:
|
|
272
|
+
raise ValueError('Missing CCMA input data for ' + self.kind)
|
|
273
|
+
|
|
274
|
+
# Set env and IOASSIGN
|
|
275
|
+
ccma = allccma.pop()
|
|
276
|
+
ccma_path = sh.path.abspath(ccma.rh.container.localpath())
|
|
277
|
+
self.odb_date_and_layout_from_sections([ccma, ])
|
|
278
|
+
self.odb.fix_db_path(ccma.rh.resource.layout, ccma_path)
|
|
279
|
+
self.odb.ioassign_gather(ccma_path)
|
|
280
|
+
|
|
281
|
+
# Fix the input database intent
|
|
282
|
+
self.odb_rw_or_overwrite_method(ccma)
|
|
283
|
+
|
|
284
|
+
# Look for channels namelists and set appropriate links
|
|
285
|
+
self.setchannels()
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
class Minim(IFSODBCCMA):
|
|
289
|
+
"""Observation minimisation."""
|
|
290
|
+
|
|
291
|
+
_footprint = dict(
|
|
292
|
+
info='Minimisation in the assimilation process.',
|
|
293
|
+
attr=dict(
|
|
294
|
+
kind=dict(
|
|
295
|
+
values=['minim', 'min', 'minimisation'],
|
|
296
|
+
remap=dict(autoremap='first'),
|
|
297
|
+
),
|
|
298
|
+
conf=dict(
|
|
299
|
+
default=131,
|
|
300
|
+
),
|
|
301
|
+
xpname=dict(
|
|
302
|
+
default='MINI',
|
|
303
|
+
),
|
|
304
|
+
)
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
def prepare(self, rh, opts):
|
|
308
|
+
"""Find out if preconditioning eigenvectors are here."""
|
|
309
|
+
super().prepare(rh, opts)
|
|
310
|
+
|
|
311
|
+
# Check if a preconditioning EV map is here
|
|
312
|
+
evmaprh = self.context.sequence.effective_inputs(role=('PreconEVMap',
|
|
313
|
+
'PreconditionningEVMap'),
|
|
314
|
+
kind='precevmap')
|
|
315
|
+
if evmaprh:
|
|
316
|
+
if len(evmaprh) > 1:
|
|
317
|
+
logger.warning("Several preconditioning EV maps provided. Using the first one.")
|
|
318
|
+
nprec_ev = evmaprh[0].rh.contents.data['evlen']
|
|
319
|
+
# If there are preconditioning EV: update the namelist
|
|
320
|
+
if nprec_ev > 0:
|
|
321
|
+
for namrh in [x.rh for x in self.context.sequence.effective_inputs(role='Namelist',
|
|
322
|
+
kind='namelist',)]:
|
|
323
|
+
namc = namrh.contents
|
|
324
|
+
try:
|
|
325
|
+
namc['NAMVAR'].NPCVECS = nprec_ev
|
|
326
|
+
namc.rewrite(namrh.container)
|
|
327
|
+
except Exception:
|
|
328
|
+
logger.critical('Could not fix NAMVAR in %s', namrh.container.actualpath())
|
|
329
|
+
raise
|
|
330
|
+
logger.info("%d preconditioning EV will by used (NPCVECS=%d).", nprec_ev, nprec_ev)
|
|
331
|
+
else:
|
|
332
|
+
logger.warning("A preconditioning EV map was found, " +
|
|
333
|
+
"but no preconditioning EV are available.")
|
|
334
|
+
else:
|
|
335
|
+
logger.info("No preconditioning EV were found.")
|
|
336
|
+
|
|
337
|
+
def postfix(self, rh, opts):
|
|
338
|
+
"""Find out if any special resources have been produced."""
|
|
339
|
+
sh = self.system
|
|
340
|
+
|
|
341
|
+
# Look up for PREConditionning Eigen Vectors
|
|
342
|
+
prec = sh.ls('MEMINI*')
|
|
343
|
+
if prec:
|
|
344
|
+
prec_info = dict(evlen=len(prec))
|
|
345
|
+
prec_info['evnum'] = [int(x[6:]) for x in prec]
|
|
346
|
+
sh.json_dump(prec_info, 'precev_map.out', indent=4)
|
|
347
|
+
|
|
348
|
+
super().postfix(rh, opts)
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
class Trajectory(IFSODBCCMA):
|
|
352
|
+
"""Observation trajectory."""
|
|
353
|
+
|
|
354
|
+
_footprint = dict(
|
|
355
|
+
info='Trajectory in the assimilation process.',
|
|
356
|
+
attr=dict(
|
|
357
|
+
kind=dict(
|
|
358
|
+
values=['traj', 'trajectory'],
|
|
359
|
+
remap=dict(autoremap='first'),
|
|
360
|
+
),
|
|
361
|
+
conf=dict(
|
|
362
|
+
default=2,
|
|
363
|
+
),
|
|
364
|
+
xpname=dict(
|
|
365
|
+
default='TRAJ',
|
|
366
|
+
),
|
|
367
|
+
)
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
class PseudoTrajectory(BlindRun, drhook.DrHookDecoMixin):
|
|
372
|
+
"""Copy a few fields from the Guess file into the Analysis file"""
|
|
373
|
+
_footprint = dict(
|
|
374
|
+
attr = dict(
|
|
375
|
+
kind = dict(
|
|
376
|
+
values = ['pseudotraj', 'traj', 'trajectory'],
|
|
377
|
+
remap = dict(autoremap = 'first'),
|
|
378
|
+
),
|
|
379
|
+
)
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
class SstGrb2Ascii(BlindRun):
|
|
384
|
+
"""Transform sst grib files from the BDAP into ascii files"""
|
|
385
|
+
_footprint = dict(
|
|
386
|
+
info = 'Binary to change the format of sst BDAP files.',
|
|
387
|
+
attr = dict(
|
|
388
|
+
kind = dict(
|
|
389
|
+
values = ['lect_bdap'],
|
|
390
|
+
),
|
|
391
|
+
date = a_date,
|
|
392
|
+
nlat = dict(
|
|
393
|
+
default = 0,
|
|
394
|
+
),
|
|
395
|
+
nlon = dict(
|
|
396
|
+
default = 0,
|
|
397
|
+
)
|
|
398
|
+
)
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
def prepare(self, rh, opts):
|
|
402
|
+
"""Add namelist delta, prepare the environment and build the arguments needed."""
|
|
403
|
+
super().prepare(rh, opts)
|
|
404
|
+
for namrh in [x.rh for x in self.context.sequence.effective_inputs(role='Namelist',
|
|
405
|
+
kind='namelist', )]:
|
|
406
|
+
namc = namrh.contents
|
|
407
|
+
try:
|
|
408
|
+
namc.newblock('NAMFILE')
|
|
409
|
+
namc['NAMFILE'].NBFICH = 1
|
|
410
|
+
namc['NAMFILE']['CCNFICH(1)'] = 'GRIB_SST'
|
|
411
|
+
namc.rewrite(namrh.container)
|
|
412
|
+
except Exception:
|
|
413
|
+
logger.critical('Could not fix NAMFILE in %s', namrh.container.actualpath())
|
|
414
|
+
raise
|
|
415
|
+
|
|
416
|
+
def spawn_command_options(self):
|
|
417
|
+
"""Build the dictionnary to provide arguments to the binary."""
|
|
418
|
+
return dict(
|
|
419
|
+
year=self.date.year,
|
|
420
|
+
month=self.date.month,
|
|
421
|
+
day=self.date.day,
|
|
422
|
+
hour=self.date.hour,
|
|
423
|
+
lon=self.nlon,
|
|
424
|
+
lat=self.nlat,
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
class IceNetCDF2Ascii(BlindRun):
|
|
429
|
+
"""Transform ice NetCDF files from the BDPE into ascii files"""
|
|
430
|
+
_footprint = dict(
|
|
431
|
+
info = 'Binary to change the format of ice BDPE files.',
|
|
432
|
+
attr = dict(
|
|
433
|
+
kind = dict(
|
|
434
|
+
values = ['ice_nc2ascii'],
|
|
435
|
+
),
|
|
436
|
+
output_file = dict(
|
|
437
|
+
optional = True,
|
|
438
|
+
default = "ice_concent"
|
|
439
|
+
),
|
|
440
|
+
param = dict(
|
|
441
|
+
optional = True,
|
|
442
|
+
default = "ice_conc",
|
|
443
|
+
),
|
|
444
|
+
)
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
def prepare(self, rh, opts):
|
|
448
|
+
super().prepare(rh, opts)
|
|
449
|
+
# Look for the input files
|
|
450
|
+
list_netcdf = self.context.sequence.effective_inputs(role='NetCDFfiles',
|
|
451
|
+
kind='observations')
|
|
452
|
+
hn_file = ''
|
|
453
|
+
hs_file = ''
|
|
454
|
+
for sect in list_netcdf:
|
|
455
|
+
part = sect.rh.resource.part
|
|
456
|
+
filename = sect.rh.container.filename
|
|
457
|
+
if part == "ice_hn":
|
|
458
|
+
if hn_file == '':
|
|
459
|
+
hn_file = filename
|
|
460
|
+
logger.info('The input file for the North hemisphere is: %s.', hn_file)
|
|
461
|
+
else:
|
|
462
|
+
logger.warning('There was already one file for the North hemisphere. '
|
|
463
|
+
'The following one, %s, is not used.', filename)
|
|
464
|
+
elif part == "ice_hs":
|
|
465
|
+
if hs_file == '':
|
|
466
|
+
hs_file = filename
|
|
467
|
+
logger.info('The input file for the South hemisphere is: %s.', hs_file)
|
|
468
|
+
else:
|
|
469
|
+
logger.warning('There was already one file for the South hemisphere. '
|
|
470
|
+
'The following one, %s, is not used.', filename)
|
|
471
|
+
else:
|
|
472
|
+
logger.warning('The following file is not used: %s.', filename)
|
|
473
|
+
self.input_file_hn = hn_file
|
|
474
|
+
self.input_file_hs = hs_file
|
|
475
|
+
|
|
476
|
+
def spawn_command_options(self):
|
|
477
|
+
"""Build the dictionnary to provide arguments to the binary."""
|
|
478
|
+
return dict(
|
|
479
|
+
file_in_hn=self.input_file_hn,
|
|
480
|
+
file_in_hs=self.input_file_hs,
|
|
481
|
+
param=self.param,
|
|
482
|
+
file_out=self.output_file
|
|
483
|
+
)
|