vortex-nwp 2.0.0b1__py3-none-any.whl → 2.0.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +59 -45
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +940 -614
- vortex/algo/mpitools.py +802 -497
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +428 -225
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +544 -413
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +420 -271
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +210 -122
- vortex/nwp/algo/monitoring.py +132 -76
- vortex/nwp/algo/mpitools.py +321 -191
- vortex/nwp/algo/odbtools.py +617 -353
- vortex/nwp/algo/oopsroot.py +449 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +125 -59
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +311 -149
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +632 -298
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +402 -333
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1211 -631
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +377 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.0.0b2.dist-info/METADATA +66 -0
- vortex_nwp-2.0.0b2.dist-info/RECORD +142 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/top_level.txt +0 -0
vortex/nwp/algo/clim.py
CHANGED
|
@@ -26,12 +26,12 @@ class BuildPGD(BlindRun, DrHookDecoMixin, EcGribDecoMixin):
|
|
|
26
26
|
"""Preparation of physiographic fields for Surfex."""
|
|
27
27
|
|
|
28
28
|
_footprint = dict(
|
|
29
|
-
info
|
|
30
|
-
attr
|
|
31
|
-
kind
|
|
32
|
-
values
|
|
29
|
+
info="Physiographic fields for Surfex.",
|
|
30
|
+
attr=dict(
|
|
31
|
+
kind=dict(
|
|
32
|
+
values=["buildpgd"],
|
|
33
33
|
),
|
|
34
|
-
)
|
|
34
|
+
),
|
|
35
35
|
)
|
|
36
36
|
|
|
37
37
|
|
|
@@ -39,12 +39,12 @@ class BuildPGD_MPI(Parallel, DrHookDecoMixin, EcGribDecoMixin):
|
|
|
39
39
|
"""Preparation of physiographic fields for Surfex."""
|
|
40
40
|
|
|
41
41
|
_footprint = dict(
|
|
42
|
-
info
|
|
43
|
-
attr
|
|
44
|
-
kind
|
|
45
|
-
values
|
|
42
|
+
info="Physiographic fields for Surfex.",
|
|
43
|
+
attr=dict(
|
|
44
|
+
kind=dict(
|
|
45
|
+
values=["buildpgd"],
|
|
46
46
|
),
|
|
47
|
-
)
|
|
47
|
+
),
|
|
48
48
|
)
|
|
49
49
|
|
|
50
50
|
|
|
@@ -52,64 +52,74 @@ class C923(IFSParallel):
|
|
|
52
52
|
"""Preparation of climatologic fields."""
|
|
53
53
|
|
|
54
54
|
_footprint = dict(
|
|
55
|
-
info
|
|
56
|
-
attr
|
|
57
|
-
kind
|
|
58
|
-
values
|
|
55
|
+
info="Climatologic fields for Arpege/Arome.",
|
|
56
|
+
attr=dict(
|
|
57
|
+
kind=dict(
|
|
58
|
+
values=["c923"],
|
|
59
59
|
),
|
|
60
|
-
step
|
|
61
|
-
info
|
|
60
|
+
step=dict(
|
|
61
|
+
info="""Step of conf 923 (NAMCLI::N923).
|
|
62
62
|
Defines the kind of fields and database processed.""",
|
|
63
|
-
type
|
|
64
|
-
values
|
|
63
|
+
type=int,
|
|
64
|
+
values=footprints.util.rangex(1, 10),
|
|
65
65
|
),
|
|
66
|
-
orog_in_pgd
|
|
67
|
-
info
|
|
66
|
+
orog_in_pgd=dict(
|
|
67
|
+
info="""Whether orography may be read in a PGD file.
|
|
68
68
|
(NAMCLA::LIPGD=.TRUE.)""",
|
|
69
|
-
type
|
|
70
|
-
optional
|
|
71
|
-
default
|
|
69
|
+
type=bool,
|
|
70
|
+
optional=True,
|
|
71
|
+
default=False,
|
|
72
72
|
),
|
|
73
|
-
input_orog_name
|
|
74
|
-
info
|
|
75
|
-
optional
|
|
76
|
-
default
|
|
73
|
+
input_orog_name=dict(
|
|
74
|
+
info="Filename for input orography file (case LNORO=.T.).",
|
|
75
|
+
optional=True,
|
|
76
|
+
default="Neworog",
|
|
77
77
|
),
|
|
78
|
-
xpname
|
|
79
|
-
default
|
|
78
|
+
xpname=dict(
|
|
79
|
+
default="CLIM",
|
|
80
80
|
),
|
|
81
|
-
)
|
|
81
|
+
),
|
|
82
82
|
)
|
|
83
83
|
|
|
84
84
|
def prepare(self, rh, opts):
|
|
85
85
|
super().prepare(rh, opts)
|
|
86
86
|
# check PGD if needed
|
|
87
87
|
if self.orog_in_pgd:
|
|
88
|
-
pgd = self.context.sequence.effective_inputs(role=(
|
|
88
|
+
pgd = self.context.sequence.effective_inputs(role=("Pgd",))
|
|
89
89
|
if len(pgd) == 0:
|
|
90
|
-
raise ValueError(
|
|
91
|
-
|
|
92
|
-
|
|
90
|
+
raise ValueError(
|
|
91
|
+
"As long as 'orog_in_pgd' attribute of this "
|
|
92
|
+
+ "algo component is True, a 'Role: Pgd' "
|
|
93
|
+
+ "resource must be provided."
|
|
94
|
+
)
|
|
93
95
|
pgd = pgd[0].rh
|
|
94
|
-
if pgd.resource.nativefmt ==
|
|
96
|
+
if pgd.resource.nativefmt == "fa":
|
|
95
97
|
self.algoassert(
|
|
96
98
|
pgd.container.basename == self.input_orog_name,
|
|
97
|
-
"Local name for resource Pgd must be '{}'".
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
99
|
+
"Local name for resource Pgd must be '{}'".format(
|
|
100
|
+
self.input_orog_name
|
|
101
|
+
),
|
|
102
|
+
)
|
|
103
|
+
elif pgd.resource.nativefmt == "lfi":
|
|
104
|
+
raise NotImplementedError(
|
|
105
|
+
"CY43T2 onwards: lfi PGD should not be used."
|
|
106
|
+
)
|
|
101
107
|
|
|
102
108
|
def find_namelists(self, opts=None):
|
|
103
|
-
namrh_list = [
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
109
|
+
namrh_list = [
|
|
110
|
+
x.rh
|
|
111
|
+
for x in self.context.sequence.effective_inputs(role=("Namelist",))
|
|
112
|
+
]
|
|
113
|
+
self.algoassert(
|
|
114
|
+
len(namrh_list) == 1,
|
|
115
|
+
"One and only one namelist necessary as input.",
|
|
116
|
+
)
|
|
107
117
|
return namrh_list
|
|
108
118
|
|
|
109
119
|
def prepare_namelist_delta(self, rh, namcontents, namlocal):
|
|
110
120
|
super().prepare_namelist_delta(rh, namcontents, namlocal)
|
|
111
|
-
namcontents[
|
|
112
|
-
namcontents.setmacro(
|
|
121
|
+
namcontents["NAMMCC"]["N923"] = self.step
|
|
122
|
+
namcontents.setmacro("LPGD", self.orog_in_pgd)
|
|
113
123
|
return True
|
|
114
124
|
|
|
115
125
|
|
|
@@ -122,64 +132,87 @@ class FinalizePGD(AlgoComponent):
|
|
|
122
132
|
"""
|
|
123
133
|
|
|
124
134
|
_footprint = dict(
|
|
125
|
-
info
|
|
126
|
-
attr
|
|
127
|
-
kind
|
|
128
|
-
values
|
|
129
|
-
),
|
|
130
|
-
pgd_out_name = dict(
|
|
131
|
-
optional = True,
|
|
132
|
-
default = 'PGD_final.fa'
|
|
135
|
+
info="Finalisation of PGD.",
|
|
136
|
+
attr=dict(
|
|
137
|
+
kind=dict(
|
|
138
|
+
values=["finalize_pgd"],
|
|
133
139
|
),
|
|
134
|
-
|
|
140
|
+
pgd_out_name=dict(optional=True, default="PGD_final.fa"),
|
|
141
|
+
),
|
|
135
142
|
)
|
|
136
143
|
|
|
137
144
|
def __init__(self, *args, **kwargs):
|
|
138
145
|
super().__init__(*args, **kwargs)
|
|
139
146
|
from ..util.usepygram import epygram_checker
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
147
|
+
|
|
148
|
+
ev = "1.2.14"
|
|
149
|
+
self.algoassert(
|
|
150
|
+
epygram_checker.is_available(version=ev),
|
|
151
|
+
"Epygram >= " + ev + " is needed here",
|
|
152
|
+
)
|
|
143
153
|
|
|
144
154
|
def execute(self, rh, opts): # @UnusedVariable
|
|
145
155
|
"""Convert SURFGEOPOTENTIEL from clim to SFX.ZS in pgd."""
|
|
146
156
|
import numpy
|
|
147
157
|
from ..util.usepygram import epygram, epy_env_prepare
|
|
148
158
|
from bronx.meteo.constants import g0
|
|
159
|
+
|
|
149
160
|
# Handle resources
|
|
150
|
-
clim = self.context.sequence.effective_inputs(role=(
|
|
151
|
-
self.algoassert(
|
|
152
|
-
|
|
153
|
-
|
|
161
|
+
clim = self.context.sequence.effective_inputs(role=("Clim",))
|
|
162
|
+
self.algoassert(
|
|
163
|
+
len(clim) == 1, "One and only one Clim has to be provided"
|
|
164
|
+
)
|
|
165
|
+
pgdin = self.context.sequence.effective_inputs(role=("InputPGD",))
|
|
166
|
+
self.algoassert(
|
|
167
|
+
len(pgdin) == 1, "One and only one InputPGD has to be provided"
|
|
168
|
+
)
|
|
154
169
|
if self.system.path.exists(self.pgd_out_name):
|
|
155
|
-
raise OSError(
|
|
170
|
+
raise OSError(
|
|
171
|
+
"The output pgd file {!r} already exists.".format(
|
|
172
|
+
self.pgd_out_name
|
|
173
|
+
)
|
|
174
|
+
)
|
|
156
175
|
# copy fields
|
|
157
176
|
with epy_env_prepare(self.ticket):
|
|
158
177
|
epyclim = clim[0].rh.contents.data
|
|
159
178
|
epypgd = pgdin[0].rh.contents.data
|
|
160
179
|
epyclim.open()
|
|
161
180
|
epypgd.open()
|
|
162
|
-
pgdout = epygram.formats.resource(
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
181
|
+
pgdout = epygram.formats.resource(
|
|
182
|
+
self.pgd_out_name,
|
|
183
|
+
"w",
|
|
184
|
+
fmt="FA",
|
|
185
|
+
headername=epyclim.headername,
|
|
186
|
+
geometry=epyclim.geometry,
|
|
187
|
+
cdiden=epypgd.cdiden,
|
|
188
|
+
validity=epypgd.validity,
|
|
189
|
+
processtype=epypgd.processtype,
|
|
190
|
+
)
|
|
191
|
+
g = epyclim.readfield("SURFGEOPOTENTIEL")
|
|
192
|
+
g.operation("/", g0)
|
|
193
|
+
g.fid["FA"] = "SFX.ZS"
|
|
171
194
|
for f in epypgd.listfields():
|
|
172
195
|
fld = epypgd.readfield(f)
|
|
173
|
-
if f ==
|
|
196
|
+
if f == "SFX.ZS":
|
|
174
197
|
fld = g
|
|
175
|
-
elif (
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
198
|
+
elif (
|
|
199
|
+
isinstance(fld, epygram.fields.H2DField)
|
|
200
|
+
and fld.geometry.grid.get("LAMzone") is not None
|
|
201
|
+
):
|
|
202
|
+
ext_data = numpy.ma.masked_equal(
|
|
203
|
+
numpy.zeros(g.data.shape), 0.0
|
|
204
|
+
)
|
|
205
|
+
ext_data[
|
|
206
|
+
: fld.geometry.dimensions["Y"],
|
|
207
|
+
: fld.geometry.dimensions["X"],
|
|
208
|
+
] = fld.data[:, :]
|
|
209
|
+
fld = footprints.proxy.fields.almost_clone(
|
|
210
|
+
fld, geometry=g.geometry
|
|
211
|
+
)
|
|
181
212
|
fld.setdata(ext_data)
|
|
182
|
-
pgdout.writefield(
|
|
213
|
+
pgdout.writefield(
|
|
214
|
+
fld, compression=epypgd.fieldscompression.get(f, None)
|
|
215
|
+
)
|
|
183
216
|
|
|
184
217
|
|
|
185
218
|
class SetFilteredOrogInPGD(AlgoComponent):
|
|
@@ -188,48 +221,58 @@ class SetFilteredOrogInPGD(AlgoComponent):
|
|
|
188
221
|
"""
|
|
189
222
|
|
|
190
223
|
_footprint = dict(
|
|
191
|
-
info
|
|
192
|
-
attr
|
|
193
|
-
kind
|
|
194
|
-
values
|
|
224
|
+
info="Report spectrally optimized, filtered orography from Clim to PGD.",
|
|
225
|
+
attr=dict(
|
|
226
|
+
kind=dict(
|
|
227
|
+
values=["set_filtered_orog_in_pgd"],
|
|
195
228
|
),
|
|
196
|
-
)
|
|
229
|
+
),
|
|
197
230
|
)
|
|
198
231
|
|
|
199
232
|
def __init__(self, *args, **kwargs):
|
|
200
233
|
super().__init__(*args, **kwargs)
|
|
201
234
|
from ..util.usepygram import epygram_checker
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
235
|
+
|
|
236
|
+
ev = "1.3.2"
|
|
237
|
+
self.algoassert(
|
|
238
|
+
epygram_checker.is_available(version=ev),
|
|
239
|
+
"Epygram >= " + ev + " is needed here",
|
|
240
|
+
)
|
|
205
241
|
|
|
206
242
|
def execute(self, rh, opts): # @UnusedVariable
|
|
207
243
|
"""Convert SURFGEOPOTENTIEL from clim to SFX.ZS in pgd."""
|
|
208
244
|
from ..util.usepygram import epygram_checker, epy_env_prepare
|
|
209
245
|
from bronx.meteo.constants import g0
|
|
246
|
+
|
|
210
247
|
# Handle resources
|
|
211
|
-
clim = self.context.sequence.effective_inputs(role=(
|
|
248
|
+
clim = self.context.sequence.effective_inputs(role=("Clim",))
|
|
212
249
|
self.algoassert(len(clim) == 1, "One and only one Clim to be provided")
|
|
213
|
-
pgdin = self.context.sequence.effective_inputs(role=(
|
|
214
|
-
self.algoassert(
|
|
250
|
+
pgdin = self.context.sequence.effective_inputs(role=("InputPGD",))
|
|
251
|
+
self.algoassert(
|
|
252
|
+
len(pgdin) == 1, "One and only one InputPGD to be provided"
|
|
253
|
+
)
|
|
215
254
|
# copy fields
|
|
216
255
|
with epy_env_prepare(self.ticket):
|
|
217
256
|
epyclim = clim[0].rh.contents.data
|
|
218
257
|
epypgd = pgdin[0].rh.contents.data
|
|
219
258
|
epyclim.open()
|
|
220
|
-
epypgd.open(openmode=
|
|
259
|
+
epypgd.open(openmode="a")
|
|
221
260
|
# read spectrally fitted surface geopotential
|
|
222
|
-
g = epyclim.readfield(
|
|
261
|
+
g = epyclim.readfield("SURFGEOPOTENTIEL")
|
|
223
262
|
# convert to SURFEX orography
|
|
224
|
-
g.operation(
|
|
225
|
-
g.fid[
|
|
263
|
+
g.operation("/", g0)
|
|
264
|
+
g.fid["FA"] = "SFX.ZS"
|
|
226
265
|
# write as orography
|
|
227
|
-
if epygram_checker.is_available(version=
|
|
228
|
-
epypgd.fieldencoding(
|
|
266
|
+
if epygram_checker.is_available(version="1.3.6"):
|
|
267
|
+
epypgd.fieldencoding(
|
|
268
|
+
g.fid["FA"], update_fieldscompression=True
|
|
269
|
+
)
|
|
229
270
|
else:
|
|
230
271
|
# blank read, just to update fieldscompression
|
|
231
|
-
epypgd.readfield(g.fid[
|
|
232
|
-
epypgd.writefield(
|
|
272
|
+
epypgd.readfield(g.fid["FA"], getdata=False)
|
|
273
|
+
epypgd.writefield(
|
|
274
|
+
g, compression=epypgd.fieldscompression.get(g.fid["FA"], None)
|
|
275
|
+
)
|
|
233
276
|
epypgd.close()
|
|
234
277
|
|
|
235
278
|
|
|
@@ -240,70 +283,78 @@ class MakeLAMDomain(AlgoComponent):
|
|
|
240
283
|
"""
|
|
241
284
|
|
|
242
285
|
_footprint = dict(
|
|
243
|
-
attr
|
|
244
|
-
kind
|
|
245
|
-
values
|
|
246
|
-
),
|
|
247
|
-
geometry
|
|
248
|
-
info
|
|
249
|
-
type
|
|
250
|
-
),
|
|
251
|
-
mode
|
|
252
|
-
info
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
optional
|
|
275
|
-
default
|
|
276
|
-
),
|
|
277
|
-
|
|
278
|
-
info
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
286
|
+
attr=dict(
|
|
287
|
+
kind=dict(
|
|
288
|
+
values=["make_domain", "make_lam_domain"],
|
|
289
|
+
),
|
|
290
|
+
geometry=dict(
|
|
291
|
+
info="The horizontal geometry to be generated.",
|
|
292
|
+
type=HorizontalGeometry,
|
|
293
|
+
),
|
|
294
|
+
mode=dict(
|
|
295
|
+
info=(
|
|
296
|
+
"Kind of input for building geometry:"
|
|
297
|
+
+ "'center_dims' to build domain given its centre and"
|
|
298
|
+
+ "dimensions; 'lonlat_included' to build domain given"
|
|
299
|
+
+ "an included lon/lat area."
|
|
300
|
+
),
|
|
301
|
+
values=["center_dims", "lonlat_included"],
|
|
302
|
+
),
|
|
303
|
+
geom_params=dict(
|
|
304
|
+
info=(
|
|
305
|
+
"Set of parameters and/or options to be passed to"
|
|
306
|
+
+ "epygram.geometries.domain_making.build.build_geometry()"
|
|
307
|
+
+ "or"
|
|
308
|
+
+ "epygram.geometries.domain_making.build.build_geometry_fromlonlat()"
|
|
309
|
+
),
|
|
310
|
+
type=footprints.FPDict,
|
|
311
|
+
),
|
|
312
|
+
truncation=dict(
|
|
313
|
+
info=(
|
|
314
|
+
"Type of spectral truncation, among"
|
|
315
|
+
+ "('linear', 'quadratic', 'cubic')."
|
|
316
|
+
),
|
|
317
|
+
optional=True,
|
|
318
|
+
default="linear",
|
|
319
|
+
),
|
|
320
|
+
orography_truncation=dict(
|
|
321
|
+
info=(
|
|
322
|
+
"Type of truncation of orography, among"
|
|
323
|
+
+ "('linear', 'quadratic', 'cubic')."
|
|
324
|
+
),
|
|
325
|
+
optional=True,
|
|
326
|
+
default="quadratic",
|
|
327
|
+
),
|
|
328
|
+
e_zone_in_pgd=dict(
|
|
329
|
+
info="Add E-zone sizes in BuildPGD namelist.",
|
|
330
|
+
optional=True,
|
|
331
|
+
type=bool,
|
|
332
|
+
default=False,
|
|
333
|
+
),
|
|
334
|
+
i_width_in_pgd=dict(
|
|
335
|
+
info="Add I-width size in BuildPGD namelist.",
|
|
336
|
+
optional=True,
|
|
337
|
+
type=bool,
|
|
338
|
+
default=False,
|
|
288
339
|
),
|
|
289
340
|
# plot
|
|
290
|
-
illustration
|
|
291
|
-
info
|
|
292
|
-
type
|
|
293
|
-
optional
|
|
294
|
-
default
|
|
295
|
-
),
|
|
296
|
-
illustration_fmt
|
|
297
|
-
info
|
|
298
|
-
values
|
|
299
|
-
optional
|
|
300
|
-
default
|
|
301
|
-
),
|
|
302
|
-
plot_params
|
|
303
|
-
info
|
|
304
|
-
type
|
|
305
|
-
optional
|
|
306
|
-
default
|
|
341
|
+
illustration=dict(
|
|
342
|
+
info="Create the domain illustration image.",
|
|
343
|
+
type=bool,
|
|
344
|
+
optional=True,
|
|
345
|
+
default=True,
|
|
346
|
+
),
|
|
347
|
+
illustration_fmt=dict(
|
|
348
|
+
info="The format of the domain illustration image.",
|
|
349
|
+
values=["png", "pdf"],
|
|
350
|
+
optional=True,
|
|
351
|
+
default="png",
|
|
352
|
+
),
|
|
353
|
+
plot_params=dict(
|
|
354
|
+
info="Plot geometry parameters.",
|
|
355
|
+
type=footprints.FPDict,
|
|
356
|
+
optional=True,
|
|
357
|
+
default=footprints.FPDict({"background": True}),
|
|
307
358
|
),
|
|
308
359
|
)
|
|
309
360
|
)
|
|
@@ -311,61 +362,87 @@ class MakeLAMDomain(AlgoComponent):
|
|
|
311
362
|
def __init__(self, *args, **kwargs):
|
|
312
363
|
super().__init__(*args, **kwargs)
|
|
313
364
|
from ..util.usepygram import epygram_checker
|
|
314
|
-
|
|
365
|
+
|
|
366
|
+
ev = "1.2.14"
|
|
315
367
|
if self.e_zone_in_pgd:
|
|
316
|
-
ev =
|
|
368
|
+
ev = "1.3.2"
|
|
317
369
|
if self.i_width_in_pgd:
|
|
318
|
-
ev =
|
|
319
|
-
self.algoassert(
|
|
320
|
-
|
|
370
|
+
ev = "1.3.3"
|
|
371
|
+
self.algoassert(
|
|
372
|
+
epygram_checker.is_available(version=ev),
|
|
373
|
+
"Epygram >= " + ev + " is needed here",
|
|
374
|
+
)
|
|
321
375
|
self._check_geometry()
|
|
322
376
|
|
|
323
377
|
def _check_geometry(self):
|
|
324
|
-
if self.mode ==
|
|
325
|
-
params = [
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
params_extended = params + [
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
378
|
+
if self.mode == "center_dims":
|
|
379
|
+
params = [
|
|
380
|
+
"center_lon",
|
|
381
|
+
"center_lat",
|
|
382
|
+
"Xpoints_CI",
|
|
383
|
+
"Ypoints_CI",
|
|
384
|
+
"resolution",
|
|
385
|
+
]
|
|
386
|
+
params_extended = params + [
|
|
387
|
+
"tilting",
|
|
388
|
+
"Iwidth",
|
|
389
|
+
"force_projection",
|
|
390
|
+
"maximize_CI_in_E",
|
|
391
|
+
"reference_lat",
|
|
392
|
+
]
|
|
393
|
+
elif self.mode == "lonlat_included":
|
|
394
|
+
params = ["lonmin", "lonmax", "latmin", "latmax", "resolution"]
|
|
395
|
+
params_extended = params + [
|
|
396
|
+
"Iwidth",
|
|
397
|
+
"force_projection",
|
|
398
|
+
"maximize_CI_in_E",
|
|
399
|
+
]
|
|
400
|
+
self.algoassert(
|
|
401
|
+
set(params).issubset(set(self.geom_params.keys())),
|
|
402
|
+
"With mode=={!s}, geom_params must contain at least {!s}".format(
|
|
403
|
+
self.mode, params
|
|
404
|
+
),
|
|
405
|
+
)
|
|
406
|
+
self.algoassert(
|
|
407
|
+
set(self.geom_params.keys()).issubset(set(params_extended)),
|
|
408
|
+
"With mode=={!s}, geom_params must contain at most {!s}".format(
|
|
409
|
+
self.mode, params
|
|
410
|
+
),
|
|
411
|
+
)
|
|
339
412
|
|
|
340
413
|
def execute(self, rh, opts): # @UnusedVariable
|
|
341
414
|
from ..util.usepygram import epygram
|
|
415
|
+
|
|
342
416
|
dm = epygram.geometries.domain_making
|
|
343
|
-
if self.mode ==
|
|
417
|
+
if self.mode == "center_dims":
|
|
344
418
|
build_func = dm.build.build_geometry
|
|
345
419
|
lonlat_included = None
|
|
346
|
-
elif self.mode ==
|
|
420
|
+
elif self.mode == "lonlat_included":
|
|
347
421
|
build_func = dm.build.build_geometry_fromlonlat
|
|
348
422
|
lonlat_included = self.geom_params
|
|
349
423
|
# build geometry
|
|
350
424
|
geometry = build_func(interactive=False, **self.geom_params)
|
|
351
425
|
# summary, plot, namelists:
|
|
352
|
-
with open(self.geometry.tag +
|
|
426
|
+
with open(self.geometry.tag + "_summary.txt", "w") as o:
|
|
353
427
|
o.write(str(dm.output.summary(geometry)))
|
|
354
428
|
if self.illustration:
|
|
355
|
-
dm.output.plot_geometry(
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
429
|
+
dm.output.plot_geometry(
|
|
430
|
+
geometry,
|
|
431
|
+
lonlat_included=lonlat_included,
|
|
432
|
+
out=".".join([self.geometry.tag, self.illustration_fmt]),
|
|
433
|
+
**self.plot_params,
|
|
434
|
+
)
|
|
360
435
|
dm_extra_params = dict()
|
|
361
436
|
if self.e_zone_in_pgd:
|
|
362
|
-
dm_extra_params[
|
|
437
|
+
dm_extra_params["Ezone_in_pgd"] = self.e_zone_in_pgd
|
|
363
438
|
if self.i_width_in_pgd:
|
|
364
|
-
dm_extra_params[
|
|
365
|
-
namelists = dm.output.lam_geom2namelists(
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
439
|
+
dm_extra_params["Iwidth_in_pgd"] = self.i_width_in_pgd
|
|
440
|
+
namelists = dm.output.lam_geom2namelists(
|
|
441
|
+
geometry,
|
|
442
|
+
truncation=self.truncation,
|
|
443
|
+
orography_subtruncation=self.orography_truncation,
|
|
444
|
+
**dm_extra_params,
|
|
445
|
+
)
|
|
369
446
|
dm.output.write_namelists(namelists, prefix=self.geometry.tag)
|
|
370
447
|
|
|
371
448
|
|
|
@@ -376,95 +453,95 @@ class MakeGaussGeometry(Parallel):
|
|
|
376
453
|
"""
|
|
377
454
|
|
|
378
455
|
_footprint = dict(
|
|
379
|
-
attr
|
|
380
|
-
kind
|
|
381
|
-
values
|
|
382
|
-
),
|
|
383
|
-
geometry
|
|
384
|
-
info
|
|
385
|
-
type
|
|
386
|
-
),
|
|
387
|
-
truncation
|
|
388
|
-
info
|
|
389
|
-
type
|
|
390
|
-
),
|
|
391
|
-
grid
|
|
392
|
-
info
|
|
393
|
-
optional
|
|
394
|
-
default
|
|
395
|
-
),
|
|
396
|
-
orography_grid
|
|
397
|
-
info
|
|
398
|
-
optional
|
|
399
|
-
default
|
|
400
|
-
),
|
|
401
|
-
stretching
|
|
402
|
-
info
|
|
403
|
-
type
|
|
404
|
-
optional
|
|
405
|
-
default
|
|
406
|
-
),
|
|
407
|
-
pole
|
|
408
|
-
info
|
|
409
|
-
type
|
|
410
|
-
optional
|
|
411
|
-
default
|
|
456
|
+
attr=dict(
|
|
457
|
+
kind=dict(
|
|
458
|
+
values=["make_gauss_grid"],
|
|
459
|
+
),
|
|
460
|
+
geometry=dict(
|
|
461
|
+
info="The vortex horizontal geometry to be generated.",
|
|
462
|
+
type=HorizontalGeometry,
|
|
463
|
+
),
|
|
464
|
+
truncation=dict(
|
|
465
|
+
info="nominal truncation",
|
|
466
|
+
type=int,
|
|
467
|
+
),
|
|
468
|
+
grid=dict(
|
|
469
|
+
info="type of grid with regards to truncation, among (linear, quadratic, cubic)",
|
|
470
|
+
optional=True,
|
|
471
|
+
default="linear",
|
|
472
|
+
),
|
|
473
|
+
orography_grid=dict(
|
|
474
|
+
info="orography subtruncation (linear, quadratic, cubic)",
|
|
475
|
+
optional=True,
|
|
476
|
+
default="quadratic",
|
|
477
|
+
),
|
|
478
|
+
stretching=dict(
|
|
479
|
+
info="stretching factor",
|
|
480
|
+
type=float,
|
|
481
|
+
optional=True,
|
|
482
|
+
default=1.0,
|
|
483
|
+
),
|
|
484
|
+
pole=dict(
|
|
485
|
+
info="pole of stretching (lon, lat), angles in degrees",
|
|
486
|
+
type=footprints.FPDict,
|
|
487
|
+
optional=True,
|
|
488
|
+
default={"lon": 0.0, "lat": 90.0},
|
|
412
489
|
),
|
|
413
490
|
# RGRID commandline options
|
|
414
|
-
latitudes
|
|
415
|
-
info
|
|
416
|
-
type
|
|
417
|
-
optional
|
|
418
|
-
default
|
|
419
|
-
),
|
|
420
|
-
longitudes
|
|
421
|
-
info
|
|
422
|
-
type
|
|
423
|
-
optional
|
|
424
|
-
default
|
|
425
|
-
),
|
|
426
|
-
orthogonality
|
|
427
|
-
info
|
|
428
|
-
type
|
|
429
|
-
optional
|
|
430
|
-
default
|
|
431
|
-
),
|
|
432
|
-
aliasing
|
|
433
|
-
info
|
|
434
|
-
type
|
|
435
|
-
optional
|
|
436
|
-
default
|
|
437
|
-
),
|
|
438
|
-
oddity
|
|
439
|
-
info
|
|
440
|
-
type
|
|
441
|
-
optional
|
|
442
|
-
default
|
|
443
|
-
),
|
|
444
|
-
verbosity
|
|
445
|
-
info
|
|
446
|
-
type
|
|
447
|
-
optional
|
|
448
|
-
default
|
|
491
|
+
latitudes=dict(
|
|
492
|
+
info="number of Gaussian latitudes",
|
|
493
|
+
type=int,
|
|
494
|
+
optional=True,
|
|
495
|
+
default=None,
|
|
496
|
+
),
|
|
497
|
+
longitudes=dict(
|
|
498
|
+
info="maximum (equatorial) number of longitudes",
|
|
499
|
+
type=int,
|
|
500
|
+
optional=True,
|
|
501
|
+
default=None,
|
|
502
|
+
),
|
|
503
|
+
orthogonality=dict(
|
|
504
|
+
info="orthogonality precision, as Log10() value",
|
|
505
|
+
type=int,
|
|
506
|
+
optional=True,
|
|
507
|
+
default=None,
|
|
508
|
+
),
|
|
509
|
+
aliasing=dict(
|
|
510
|
+
info="allowed aliasing, as a Log10() value",
|
|
511
|
+
type=int,
|
|
512
|
+
optional=True,
|
|
513
|
+
default=None,
|
|
514
|
+
),
|
|
515
|
+
oddity=dict(
|
|
516
|
+
info="odd numbers allowed (1) or not (0)",
|
|
517
|
+
type=int,
|
|
518
|
+
optional=True,
|
|
519
|
+
default=None,
|
|
520
|
+
),
|
|
521
|
+
verbosity=dict(
|
|
522
|
+
info="verbosity (0 or 1)",
|
|
523
|
+
type=int,
|
|
524
|
+
optional=True,
|
|
525
|
+
default=None,
|
|
449
526
|
),
|
|
450
527
|
# plot
|
|
451
|
-
illustration
|
|
452
|
-
info
|
|
453
|
-
type
|
|
454
|
-
optional
|
|
455
|
-
default
|
|
456
|
-
),
|
|
457
|
-
illustration_fmt
|
|
458
|
-
info
|
|
459
|
-
values
|
|
460
|
-
optional
|
|
461
|
-
default
|
|
462
|
-
),
|
|
463
|
-
plot_params
|
|
464
|
-
info
|
|
465
|
-
type
|
|
466
|
-
optional
|
|
467
|
-
default
|
|
528
|
+
illustration=dict(
|
|
529
|
+
info="Create the domain illustration image.",
|
|
530
|
+
type=bool,
|
|
531
|
+
optional=True,
|
|
532
|
+
default=True,
|
|
533
|
+
),
|
|
534
|
+
illustration_fmt=dict(
|
|
535
|
+
info="The format of the domain illustration image.",
|
|
536
|
+
values=["png", "pdf"],
|
|
537
|
+
optional=True,
|
|
538
|
+
default="png",
|
|
539
|
+
),
|
|
540
|
+
plot_params=dict(
|
|
541
|
+
info="Plot geometry parameters.",
|
|
542
|
+
type=footprints.FPDict,
|
|
543
|
+
optional=True,
|
|
544
|
+
default=footprints.FPDict({"background": True}),
|
|
468
545
|
),
|
|
469
546
|
)
|
|
470
547
|
)
|
|
@@ -472,51 +549,68 @@ class MakeGaussGeometry(Parallel):
|
|
|
472
549
|
def __init__(self, *args, **kwargs):
|
|
473
550
|
super().__init__(*args, **kwargs)
|
|
474
551
|
from ..util.usepygram import epygram_checker
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
552
|
+
|
|
553
|
+
ev = "1.2.14"
|
|
554
|
+
self.algoassert(
|
|
555
|
+
epygram_checker.is_available(version=ev),
|
|
556
|
+
"Epygram >= " + ev + " is needed here",
|
|
557
|
+
)
|
|
478
558
|
self._complete_dimensions()
|
|
479
559
|
self._unit = 4
|
|
480
560
|
|
|
481
561
|
def _complete_dimensions(self):
|
|
482
562
|
from ..util.usepygram import epygram_checker
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
563
|
+
|
|
564
|
+
if epygram_checker.is_available(version="1.4.4"):
|
|
565
|
+
from epygram.geometries.SpectralGeometry import (
|
|
566
|
+
complete_gridpoint_dimensions,
|
|
567
|
+
)
|
|
568
|
+
|
|
569
|
+
longitudes, latitudes = complete_gridpoint_dimensions(
|
|
570
|
+
self.longitudes,
|
|
571
|
+
self.latitudes,
|
|
572
|
+
self.truncation,
|
|
573
|
+
self.grid,
|
|
574
|
+
self.stretching,
|
|
575
|
+
)
|
|
576
|
+
self._attributes["longitudes"] = longitudes
|
|
577
|
+
self._attributes["latitudes"] = latitudes
|
|
492
578
|
else:
|
|
493
579
|
self._old_internal_complete_dimensions()
|
|
494
580
|
|
|
495
581
|
def _old_internal_complete_dimensions(self):
|
|
496
|
-
from epygram.geometries.SpectralGeometry import
|
|
582
|
+
from epygram.geometries.SpectralGeometry import (
|
|
583
|
+
gridpoint_dims_from_truncation,
|
|
584
|
+
)
|
|
585
|
+
|
|
497
586
|
if self.latitudes is None and self.longitudes is None:
|
|
498
|
-
dims = gridpoint_dims_from_truncation(
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
self._attributes[
|
|
587
|
+
dims = gridpoint_dims_from_truncation(
|
|
588
|
+
{"max": self.truncation}, grid=self.grid
|
|
589
|
+
)
|
|
590
|
+
self._attributes["latitudes"] = dims["lat_number"]
|
|
591
|
+
self._attributes["longitudes"] = dims["max_lon_number"]
|
|
502
592
|
elif self.longitudes is None:
|
|
503
|
-
self._attributes[
|
|
593
|
+
self._attributes["longitudes"] = 2 * self.latitudes
|
|
504
594
|
elif self.latitudes is None:
|
|
505
595
|
if self.longitudes % 4 != 0:
|
|
506
|
-
self._attributes[
|
|
596
|
+
self._attributes["latitudes"] = self.longitudes // 2 + 1
|
|
507
597
|
else:
|
|
508
|
-
self._attributes[
|
|
598
|
+
self._attributes["latitudes"] = self.longitudes // 2
|
|
509
599
|
|
|
510
600
|
def spawn_command_options(self):
|
|
511
601
|
"""Prepare options for the resource's command line."""
|
|
512
|
-
options = {
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
602
|
+
options = {
|
|
603
|
+
"t": str(self.truncation),
|
|
604
|
+
"g": str(self.latitudes),
|
|
605
|
+
"l": str(self.longitudes),
|
|
606
|
+
"f": str(self._unit),
|
|
607
|
+
}
|
|
608
|
+
options_dict = {
|
|
609
|
+
"orthogonality": "o",
|
|
610
|
+
"aliasing": "a",
|
|
611
|
+
"oddity": "n",
|
|
612
|
+
"verbosity": "v",
|
|
613
|
+
}
|
|
520
614
|
for k in options_dict.keys():
|
|
521
615
|
if getattr(self, k) is not None:
|
|
522
616
|
options[options_dict[k]] = str(getattr(self, k))
|
|
@@ -525,16 +619,22 @@ class MakeGaussGeometry(Parallel):
|
|
|
525
619
|
def postfix(self, rh, opts):
|
|
526
620
|
"""Complete and write namelists."""
|
|
527
621
|
from ..util.usepygram import epygram_checker
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
622
|
+
|
|
623
|
+
if epygram_checker.is_available(version="1.4.4"):
|
|
624
|
+
from epygram.geometries.domain_making.output import (
|
|
625
|
+
gauss_rgrid2namelists,
|
|
626
|
+
)
|
|
627
|
+
|
|
628
|
+
gauss_rgrid2namelists(
|
|
629
|
+
"fort.{!s}".format(self._unit),
|
|
630
|
+
self.geometry.tag,
|
|
631
|
+
self.latitudes,
|
|
632
|
+
self.longitudes,
|
|
633
|
+
self.truncation,
|
|
634
|
+
self.stretching,
|
|
635
|
+
self.orography_grid,
|
|
636
|
+
self.pole,
|
|
637
|
+
)
|
|
538
638
|
else:
|
|
539
639
|
self._old_internal_postfix(rh, opts)
|
|
540
640
|
super().postfix(rh, opts)
|
|
@@ -542,82 +642,97 @@ class MakeGaussGeometry(Parallel):
|
|
|
542
642
|
def _old_internal_postfix(self, rh, opts):
|
|
543
643
|
"""Complete and write namelists."""
|
|
544
644
|
import math
|
|
545
|
-
from epygram.geometries.SpectralGeometry import
|
|
645
|
+
from epygram.geometries.SpectralGeometry import (
|
|
646
|
+
truncation_from_gridpoint_dims,
|
|
647
|
+
)
|
|
648
|
+
|
|
546
649
|
# complete scalar parameters
|
|
547
650
|
nam = namelist.NamelistSet()
|
|
548
|
-
nam.add(namelist.NamelistBlock(
|
|
549
|
-
nam.add(namelist.NamelistBlock(
|
|
550
|
-
nam.add(namelist.NamelistBlock(
|
|
551
|
-
nam[
|
|
552
|
-
nam[
|
|
553
|
-
nam[
|
|
554
|
-
nam[
|
|
555
|
-
nam[
|
|
556
|
-
nam[
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
nam[
|
|
651
|
+
nam.add(namelist.NamelistBlock("NAM_PGD_GRID"))
|
|
652
|
+
nam.add(namelist.NamelistBlock("NAMDIM"))
|
|
653
|
+
nam.add(namelist.NamelistBlock("NAMGEM"))
|
|
654
|
+
nam["NAM_PGD_GRID"]["CGRID"] = "GAUSS"
|
|
655
|
+
nam["NAMDIM"]["NDGLG"] = self.latitudes
|
|
656
|
+
nam["NAMDIM"]["NDLON"] = self.longitudes
|
|
657
|
+
nam["NAMDIM"]["NSMAX"] = self.truncation
|
|
658
|
+
nam["NAMGEM"]["NHTYP"] = 2
|
|
659
|
+
nam["NAMGEM"]["NSTTYP"] = (
|
|
660
|
+
2 if self.pole != {"lon": 0.0, "lat": 90.0} else 1
|
|
661
|
+
)
|
|
662
|
+
nam["NAMGEM"]["RMUCEN"] = math.sin(
|
|
663
|
+
math.radians(float(self.pole["lat"]))
|
|
664
|
+
)
|
|
665
|
+
nam["NAMGEM"]["RLOCEN"] = math.radians(float(self.pole["lon"]))
|
|
666
|
+
nam["NAMGEM"]["RSTRET"] = self.stretching
|
|
560
667
|
# numbers of longitudes
|
|
561
|
-
with open(
|
|
668
|
+
with open("fort.{!s}".format(self._unit)) as n:
|
|
562
669
|
namrgri = namelist.namparse(n)
|
|
563
670
|
nam.merge(namrgri)
|
|
564
671
|
# PGD namelist
|
|
565
672
|
nam_pgd = copy.deepcopy(nam)
|
|
566
|
-
nam_pgd[
|
|
567
|
-
nam_pgd[
|
|
568
|
-
nam_pgd[
|
|
569
|
-
nam_pgd[
|
|
570
|
-
with open(
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
'w') as out:
|
|
673
|
+
nam_pgd["NAMGEM"].delvar("NHTYP")
|
|
674
|
+
nam_pgd["NAMGEM"].delvar("NSTTYP")
|
|
675
|
+
nam_pgd["NAMDIM"].delvar("NSMAX")
|
|
676
|
+
nam_pgd["NAMDIM"].delvar("NDLON")
|
|
677
|
+
with open(
|
|
678
|
+
".".join([self.geometry.tag, "namel_buildpgd", "geoblocks"]), "w"
|
|
679
|
+
) as out:
|
|
574
680
|
out.write(nam_pgd.dumps(sorting=namelist.SECOND_ORDER_SORTING))
|
|
575
681
|
# C923 namelist
|
|
576
|
-
del nam[
|
|
577
|
-
with open(
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
'w') as out:
|
|
682
|
+
del nam["NAM_PGD_GRID"]
|
|
683
|
+
with open(
|
|
684
|
+
".".join([self.geometry.tag, "namel_c923", "geoblocks"]), "w"
|
|
685
|
+
) as out:
|
|
581
686
|
out.write(nam.dumps(sorting=namelist.SECOND_ORDER_SORTING))
|
|
582
687
|
# subtruncated grid for orography
|
|
583
688
|
from ..util.usepygram import epygram_checker
|
|
584
|
-
|
|
689
|
+
|
|
690
|
+
ev = "1.4.4"
|
|
585
691
|
if epygram_checker.is_available(version=ev):
|
|
586
|
-
trunc_nsmax = truncation_from_gridpoint_dims(
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
692
|
+
trunc_nsmax = truncation_from_gridpoint_dims(
|
|
693
|
+
{
|
|
694
|
+
"lat_number": self.latitudes,
|
|
695
|
+
"max_lon_number": self.longitudes,
|
|
696
|
+
},
|
|
697
|
+
grid=self.orography_grid,
|
|
698
|
+
stretching_coef=self.stretching,
|
|
699
|
+
)["max"]
|
|
591
700
|
else:
|
|
592
|
-
trunc_nsmax = truncation_from_gridpoint_dims(
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
701
|
+
trunc_nsmax = truncation_from_gridpoint_dims(
|
|
702
|
+
{
|
|
703
|
+
"lat_number": self.latitudes,
|
|
704
|
+
"max_lon_number": self.longitudes,
|
|
705
|
+
},
|
|
706
|
+
grid=self.orography_grid,
|
|
707
|
+
)["max"]
|
|
708
|
+
nam["NAMDIM"]["NSMAX"] = trunc_nsmax
|
|
709
|
+
with open(
|
|
710
|
+
".".join([self.geometry.tag, "namel_c923_orography", "geoblocks"]),
|
|
711
|
+
"w",
|
|
712
|
+
) as out:
|
|
601
713
|
out.write(nam.dumps(sorting=namelist.SECOND_ORDER_SORTING))
|
|
602
714
|
# C927 (fullpos) namelist
|
|
603
715
|
nam = namelist.NamelistSet()
|
|
604
|
-
nam.add(namelist.NamelistBlock(
|
|
605
|
-
nam.add(namelist.NamelistBlock(
|
|
606
|
-
nam[
|
|
607
|
-
nam[
|
|
608
|
-
nam[
|
|
609
|
-
nam[
|
|
610
|
-
nam[
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
nam[
|
|
614
|
-
|
|
716
|
+
nam.add(namelist.NamelistBlock("NAMFPD"))
|
|
717
|
+
nam.add(namelist.NamelistBlock("NAMFPG"))
|
|
718
|
+
nam["NAMFPD"]["NLAT"] = self.latitudes
|
|
719
|
+
nam["NAMFPD"]["NLON"] = self.longitudes
|
|
720
|
+
nam["NAMFPG"]["NFPMAX"] = self.truncation
|
|
721
|
+
nam["NAMFPG"]["NFPHTYP"] = 2
|
|
722
|
+
nam["NAMFPG"]["NFPTTYP"] = (
|
|
723
|
+
2 if self.pole != {"lon": 0.0, "lat": 90.0} else 1
|
|
724
|
+
)
|
|
725
|
+
nam["NAMFPG"]["FPMUCEN"] = math.sin(
|
|
726
|
+
math.radians(float(self.pole["lat"]))
|
|
727
|
+
)
|
|
728
|
+
nam["NAMFPG"]["FPLOCEN"] = math.radians(float(self.pole["lon"]))
|
|
729
|
+
nam["NAMFPG"]["FPSTRET"] = self.stretching
|
|
730
|
+
nrgri = [v for _, v in sorted(namrgri["NAMRGRI"].items())]
|
|
615
731
|
for i in range(len(nrgri)):
|
|
616
|
-
nam[
|
|
617
|
-
with open(
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
'w') as out:
|
|
732
|
+
nam["NAMFPG"]["NFPRGRI({:>4})".format(i + 1)] = nrgri[i]
|
|
733
|
+
with open(
|
|
734
|
+
".".join([self.geometry.tag, "namel_c927", "geoblocks"]), "w"
|
|
735
|
+
) as out:
|
|
621
736
|
out.write(nam.dumps(sorting=namelist.SECOND_ORDER_SORTING))
|
|
622
737
|
|
|
623
738
|
|
|
@@ -628,68 +743,70 @@ class MakeBDAPDomain(AlgoComponent):
|
|
|
628
743
|
"""
|
|
629
744
|
|
|
630
745
|
_footprint = dict(
|
|
631
|
-
attr
|
|
632
|
-
kind
|
|
633
|
-
values
|
|
634
|
-
),
|
|
635
|
-
geometry
|
|
636
|
-
info
|
|
637
|
-
type
|
|
638
|
-
),
|
|
639
|
-
mode
|
|
640
|
-
info
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
746
|
+
attr=dict(
|
|
747
|
+
kind=dict(
|
|
748
|
+
values=["make_domain", "make_bdap_domain"],
|
|
749
|
+
),
|
|
750
|
+
geometry=dict(
|
|
751
|
+
info="The horizontal geometry to be generated.",
|
|
752
|
+
type=HorizontalGeometry,
|
|
753
|
+
),
|
|
754
|
+
mode=dict(
|
|
755
|
+
info=(
|
|
756
|
+
"Kind of input for building geometry:"
|
|
757
|
+
+ "'boundaries' to build domain given its lon/lat boundaries"
|
|
758
|
+
+ "(+ resolution); 'inside_model' to build domain given"
|
|
759
|
+
+ "a model geometry to be included in (+ resolution)."
|
|
760
|
+
),
|
|
761
|
+
values=["boundaries", "inside_model"],
|
|
762
|
+
),
|
|
763
|
+
resolution=dict(
|
|
764
|
+
info="Resolution in degrees.",
|
|
765
|
+
type=float,
|
|
766
|
+
optional=True,
|
|
767
|
+
default=None,
|
|
651
768
|
),
|
|
652
769
|
resolution_x=dict(
|
|
653
770
|
info="X resolution in degrees (if different from Y).",
|
|
654
771
|
type=float,
|
|
655
|
-
optional
|
|
656
|
-
default
|
|
772
|
+
optional=True,
|
|
773
|
+
default=None,
|
|
657
774
|
),
|
|
658
775
|
resolution_y=dict(
|
|
659
776
|
info="Y resolution in degrees (if different from X).",
|
|
660
777
|
type=float,
|
|
661
|
-
optional
|
|
662
|
-
default
|
|
778
|
+
optional=True,
|
|
779
|
+
default=None,
|
|
663
780
|
),
|
|
664
|
-
boundaries
|
|
665
|
-
info
|
|
666
|
-
type
|
|
667
|
-
optional
|
|
668
|
-
default
|
|
781
|
+
boundaries=dict(
|
|
782
|
+
info="Lonlat boundaries of the domain, case mode='boundaries'.",
|
|
783
|
+
type=footprints.FPDict,
|
|
784
|
+
optional=True,
|
|
785
|
+
default=None,
|
|
669
786
|
),
|
|
670
|
-
model_clim
|
|
671
|
-
info
|
|
672
|
-
optional
|
|
673
|
-
default
|
|
787
|
+
model_clim=dict(
|
|
788
|
+
info="Filename of the model clim, case mode='inside_model'.",
|
|
789
|
+
optional=True,
|
|
790
|
+
default=None,
|
|
674
791
|
),
|
|
675
792
|
# plot
|
|
676
|
-
illustration
|
|
677
|
-
info
|
|
678
|
-
type
|
|
679
|
-
optional
|
|
680
|
-
default
|
|
681
|
-
),
|
|
682
|
-
illustration_fmt
|
|
683
|
-
info
|
|
684
|
-
values
|
|
685
|
-
optional
|
|
686
|
-
default
|
|
687
|
-
),
|
|
688
|
-
plot_params
|
|
689
|
-
info
|
|
690
|
-
type
|
|
691
|
-
optional
|
|
692
|
-
default
|
|
793
|
+
illustration=dict(
|
|
794
|
+
info="Create the domain illustration image.",
|
|
795
|
+
type=bool,
|
|
796
|
+
optional=True,
|
|
797
|
+
default=True,
|
|
798
|
+
),
|
|
799
|
+
illustration_fmt=dict(
|
|
800
|
+
info="The format of the domain illustration image.",
|
|
801
|
+
values=["png", "pdf"],
|
|
802
|
+
optional=True,
|
|
803
|
+
default="png",
|
|
804
|
+
),
|
|
805
|
+
plot_params=dict(
|
|
806
|
+
info="Plot geometry parameters.",
|
|
807
|
+
type=footprints.FPDict,
|
|
808
|
+
optional=True,
|
|
809
|
+
default=footprints.FPDict({"background": True}),
|
|
693
810
|
),
|
|
694
811
|
)
|
|
695
812
|
)
|
|
@@ -697,37 +814,50 @@ class MakeBDAPDomain(AlgoComponent):
|
|
|
697
814
|
def __init__(self, *args, **kwargs):
|
|
698
815
|
super().__init__(*args, **kwargs)
|
|
699
816
|
from ..util.usepygram import epygram_checker
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
817
|
+
|
|
818
|
+
ev = "1.2.14"
|
|
819
|
+
self.algoassert(
|
|
820
|
+
epygram_checker.is_available(version=ev),
|
|
821
|
+
"Epygram >= " + ev + " is needed here",
|
|
822
|
+
)
|
|
823
|
+
if self.mode == "boundaries":
|
|
824
|
+
params = ["lonmin", "lonmax", "latmin", "latmax"]
|
|
825
|
+
self.algoassert(
|
|
826
|
+
set(params) == set(self.boundaries.keys()),
|
|
827
|
+
"With mode=={}, boundaries must contain at least {}".format(
|
|
828
|
+
self.mode, str(params)
|
|
829
|
+
),
|
|
830
|
+
)
|
|
708
831
|
if self.model_clim is not None:
|
|
709
|
-
logger.info(
|
|
710
|
-
elif self.mode ==
|
|
711
|
-
self.algoassert(
|
|
712
|
-
|
|
713
|
-
|
|
832
|
+
logger.info("attribute *model_clim* ignored")
|
|
833
|
+
elif self.mode == "inside_model":
|
|
834
|
+
self.algoassert(
|
|
835
|
+
self.model_clim is not None,
|
|
836
|
+
"attribute *model_clim* must be provided with "
|
|
837
|
+
+ "mode=='inside_model'.",
|
|
838
|
+
)
|
|
714
839
|
self.algoassert(self.sh.path.exists(self.model_clim))
|
|
715
840
|
if self.boundaries is not None:
|
|
716
|
-
logger.info(
|
|
841
|
+
logger.info("attribute *boundaries* ignored")
|
|
717
842
|
if self.resolution is None:
|
|
718
|
-
self.algoassert(
|
|
719
|
-
|
|
843
|
+
self.algoassert(
|
|
844
|
+
None not in (self.resolution_x, self.resolution_y),
|
|
845
|
+
"Must provide *resolution* OR *resolution_x/resolution_y*",
|
|
846
|
+
)
|
|
720
847
|
else:
|
|
721
|
-
self.algoassert(
|
|
722
|
-
|
|
848
|
+
self.algoassert(
|
|
849
|
+
self.resolution_x is None and self.resolution_y is None,
|
|
850
|
+
"Must provide *resolution* OR *resolution_x/resolution_y*",
|
|
851
|
+
)
|
|
723
852
|
|
|
724
853
|
def execute(self, rh, opts): # @UnusedVariable
|
|
725
854
|
from ..util.usepygram import epygram
|
|
855
|
+
|
|
726
856
|
dm = epygram.geometries.domain_making
|
|
727
|
-
if self.mode ==
|
|
728
|
-
r = epygram.formats.resource(self.model_clim,
|
|
729
|
-
if r.format ==
|
|
730
|
-
g = r.readfield(
|
|
857
|
+
if self.mode == "inside_model":
|
|
858
|
+
r = epygram.formats.resource(self.model_clim, "r")
|
|
859
|
+
if r.format == "FA":
|
|
860
|
+
g = r.readfield("SURFGEOPOTENTIEL")
|
|
731
861
|
else:
|
|
732
862
|
raise NotImplementedError()
|
|
733
863
|
boundaries = dm.build.compute_lonlat_included(g.geometry)
|
|
@@ -735,28 +865,28 @@ class MakeBDAPDomain(AlgoComponent):
|
|
|
735
865
|
boundaries = self.boundaries
|
|
736
866
|
# build geometry
|
|
737
867
|
if self.resolution is None:
|
|
738
|
-
geometry = dm.build.build_lonlat_geometry(
|
|
739
|
-
|
|
740
|
-
|
|
868
|
+
geometry = dm.build.build_lonlat_geometry(
|
|
869
|
+
boundaries, resolution=(self.resolution_x, self.resolution_y)
|
|
870
|
+
)
|
|
741
871
|
else:
|
|
742
|
-
geometry = dm.build.build_lonlat_geometry(
|
|
743
|
-
|
|
872
|
+
geometry = dm.build.build_lonlat_geometry(
|
|
873
|
+
boundaries, resolution=self.resolution
|
|
874
|
+
)
|
|
744
875
|
# summary, plot, namelists:
|
|
745
876
|
if self.illustration:
|
|
746
|
-
fig, _ = geometry.plotgeometry(
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
fig.savefig(
|
|
750
|
-
|
|
751
|
-
|
|
877
|
+
fig, _ = geometry.plotgeometry(
|
|
878
|
+
color="red", title=self.geometry.tag, **self.plot_params
|
|
879
|
+
)
|
|
880
|
+
fig.savefig(
|
|
881
|
+
".".join([self.geometry.tag, self.illustration_fmt]),
|
|
882
|
+
bbox_inches="tight",
|
|
883
|
+
)
|
|
752
884
|
namelists = dm.output.regll_geom2namelists(geometry)
|
|
753
885
|
dm.output.write_namelists(namelists, prefix=self.geometry.tag)
|
|
754
|
-
self.system.symlink(
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
'namel_c923_orography',
|
|
759
|
-
'geoblocks']))
|
|
886
|
+
self.system.symlink(
|
|
887
|
+
".".join([self.geometry.tag, "namel_c923", "geoblocks"]),
|
|
888
|
+
".".join([self.geometry.tag, "namel_c923_orography", "geoblocks"]),
|
|
889
|
+
)
|
|
760
890
|
|
|
761
891
|
|
|
762
892
|
class AddPolesToGLOB(TaylorRun):
|
|
@@ -765,44 +895,50 @@ class AddPolesToGLOB(TaylorRun):
|
|
|
765
895
|
"""
|
|
766
896
|
|
|
767
897
|
_footprint = dict(
|
|
768
|
-
info
|
|
769
|
-
attr
|
|
770
|
-
kind
|
|
771
|
-
values
|
|
898
|
+
info="Add poles to a GLOB* regular FA Lon/Lat file that do not contain them.",
|
|
899
|
+
attr=dict(
|
|
900
|
+
kind=dict(
|
|
901
|
+
values=["add_poles"],
|
|
772
902
|
),
|
|
773
|
-
)
|
|
903
|
+
),
|
|
774
904
|
)
|
|
775
905
|
|
|
776
906
|
def __init__(self, *args, **kwargs):
|
|
777
907
|
super().__init__(*args, **kwargs)
|
|
778
908
|
from ..util.usepygram import epygram_checker
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
909
|
+
|
|
910
|
+
ev = "1.3.4"
|
|
911
|
+
self.algoassert(
|
|
912
|
+
epygram_checker.is_available(version=ev),
|
|
913
|
+
"Epygram >= " + ev + " is needed here",
|
|
914
|
+
)
|
|
782
915
|
|
|
783
916
|
def execute(self, rh, opts): # @UnusedVariable
|
|
784
917
|
"""Add poles to a GLOB* regular FA Lon/Lat file that do not contain them."""
|
|
785
918
|
self._default_pre_execute(rh, opts)
|
|
786
919
|
common_i = self._default_common_instructions(rh, opts)
|
|
787
|
-
clims = self.context.sequence.effective_inputs(role=(
|
|
788
|
-
self._add_instructions(
|
|
789
|
-
|
|
920
|
+
clims = self.context.sequence.effective_inputs(role=("Clim",))
|
|
921
|
+
self._add_instructions(
|
|
922
|
+
common_i,
|
|
923
|
+
dict(filename=[s.rh.container.localpath() for s in clims]),
|
|
924
|
+
)
|
|
790
925
|
self._default_post_execute(rh, opts)
|
|
791
926
|
|
|
792
927
|
|
|
793
928
|
class _AddPolesWorker(TaylorVortexWorker):
|
|
794
929
|
_footprint = dict(
|
|
795
|
-
attr
|
|
796
|
-
kind
|
|
797
|
-
|
|
798
|
-
),
|
|
799
|
-
filename = dict(
|
|
800
|
-
info='The file to be processed.'),
|
|
930
|
+
attr=dict(
|
|
931
|
+
kind=dict(values=["add_poles"]),
|
|
932
|
+
filename=dict(info="The file to be processed."),
|
|
801
933
|
)
|
|
802
934
|
)
|
|
803
935
|
|
|
804
936
|
def vortex_task(self, **_):
|
|
805
|
-
from ..util.usepygram import
|
|
937
|
+
from ..util.usepygram import (
|
|
938
|
+
add_poles_to_reglonlat_file,
|
|
939
|
+
epy_env_prepare,
|
|
940
|
+
)
|
|
941
|
+
|
|
806
942
|
with epy_env_prepare(self.ticket):
|
|
807
943
|
add_poles_to_reglonlat_file(self.filename)
|
|
808
944
|
|
|
@@ -813,21 +949,23 @@ class Festat(Parallel):
|
|
|
813
949
|
"""
|
|
814
950
|
|
|
815
951
|
_footprint = dict(
|
|
816
|
-
info
|
|
817
|
-
attr
|
|
818
|
-
kind
|
|
819
|
-
values
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
952
|
+
info="Run festat",
|
|
953
|
+
attr=dict(
|
|
954
|
+
kind=dict(
|
|
955
|
+
values=[
|
|
956
|
+
"run_festat",
|
|
957
|
+
],
|
|
958
|
+
),
|
|
959
|
+
nb_digits=dict(
|
|
960
|
+
info="Number of digits on which the name of the files should be written",
|
|
961
|
+
type=int,
|
|
962
|
+
default=3,
|
|
963
|
+
optional=True,
|
|
964
|
+
),
|
|
965
|
+
prefix=dict(
|
|
966
|
+
info="Name of the files for the binary",
|
|
967
|
+
optional=True,
|
|
968
|
+
default="CNAME",
|
|
831
969
|
),
|
|
832
970
|
),
|
|
833
971
|
)
|
|
@@ -836,34 +974,52 @@ class Festat(Parallel):
|
|
|
836
974
|
|
|
837
975
|
def prepare(self, rh, opts):
|
|
838
976
|
# Check the namelist
|
|
839
|
-
input_namelist = self.context.sequence.effective_inputs(
|
|
977
|
+
input_namelist = self.context.sequence.effective_inputs(
|
|
978
|
+
role="Namelist", kind="namelist"
|
|
979
|
+
)
|
|
840
980
|
if len(input_namelist) != 1:
|
|
841
981
|
logger.error("One and only one namelist must be provided.")
|
|
842
982
|
raise ValueError("One and only one namelist must be provided.")
|
|
843
983
|
else:
|
|
844
984
|
input_namelist = input_namelist[0].rh
|
|
845
985
|
# Create links for the input files
|
|
846
|
-
maxinsec = 10
|
|
986
|
+
maxinsec = 10**self.nb_digits
|
|
847
987
|
insec = self.context.sequence.effective_inputs(role="InputFiles")
|
|
848
988
|
nbinsec = len(insec)
|
|
849
989
|
if nbinsec > maxinsec:
|
|
850
|
-
logger.error(
|
|
851
|
-
|
|
852
|
-
|
|
990
|
+
logger.error(
|
|
991
|
+
"The number of input files %s exceed the maximum number of files available %s.",
|
|
992
|
+
nbinsec,
|
|
993
|
+
maxinsec,
|
|
994
|
+
)
|
|
995
|
+
raise ValueError(
|
|
996
|
+
"The number of input files exceed the maximum number of files available."
|
|
997
|
+
)
|
|
853
998
|
else:
|
|
854
999
|
logger.info("%s input files will be treated.", nbinsec)
|
|
855
1000
|
i = 0
|
|
856
1001
|
for sec in insec:
|
|
857
1002
|
i += 1
|
|
858
|
-
self.system.symlink(
|
|
859
|
-
|
|
860
|
-
|
|
1003
|
+
self.system.symlink(
|
|
1004
|
+
sec.rh.container.actualpath(),
|
|
1005
|
+
"{prefix}{number}".format(
|
|
1006
|
+
prefix=self.prefix, number=str(i).zfill(self.nb_digits)
|
|
1007
|
+
),
|
|
1008
|
+
)
|
|
861
1009
|
# Put the number of sections and the prefix of the input files in the namelist
|
|
862
1010
|
namcontents = input_namelist.contents
|
|
863
|
-
logger.info(
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
1011
|
+
logger.info(
|
|
1012
|
+
"Setup macro CNAME=%s in %s",
|
|
1013
|
+
self.prefix,
|
|
1014
|
+
input_namelist.container.actualpath(),
|
|
1015
|
+
)
|
|
1016
|
+
namcontents.setmacro("CNAME", self.prefix)
|
|
1017
|
+
logger.info(
|
|
1018
|
+
"Setup macro NCASES=%s in %s",
|
|
1019
|
+
i,
|
|
1020
|
+
input_namelist.container.actualpath(),
|
|
1021
|
+
)
|
|
1022
|
+
namcontents.setmacro("NCASES", i)
|
|
867
1023
|
namcontents.rewrite(input_namelist.container)
|
|
868
1024
|
self._nb_input_files = i
|
|
869
1025
|
# Call the super class
|
|
@@ -873,19 +1029,27 @@ class Festat(Parallel):
|
|
|
873
1029
|
# Rename stabal files
|
|
874
1030
|
list_stabal = self.system.glob("stab*")
|
|
875
1031
|
for stabal in list_stabal:
|
|
876
|
-
self.system.mv(
|
|
877
|
-
|
|
1032
|
+
self.system.mv(
|
|
1033
|
+
stabal,
|
|
1034
|
+
"{stabal}.ncases_{ncases}".format(
|
|
1035
|
+
stabal=stabal, ncases=self._nb_input_files
|
|
1036
|
+
),
|
|
1037
|
+
)
|
|
878
1038
|
# Deal with diag files
|
|
879
1039
|
list_diag_stat = self.system.glob("co*y")
|
|
880
1040
|
if len(list_diag_stat) > 0:
|
|
881
|
-
diastat_dir_name = "dia.stat.ncases_{ncases}".format(
|
|
1041
|
+
diastat_dir_name = "dia.stat.ncases_{ncases}".format(
|
|
1042
|
+
ncases=self._nb_input_files
|
|
1043
|
+
)
|
|
882
1044
|
self.system.mkdir(diastat_dir_name)
|
|
883
1045
|
for file in list_diag_stat:
|
|
884
1046
|
self.system.mv(file, diastat_dir_name + "/")
|
|
885
1047
|
self.system.tar(diastat_dir_name + ".tar", diastat_dir_name)
|
|
886
1048
|
list_diag_expl = self.system.glob("expl*y")
|
|
887
1049
|
if len(list_diag_expl) > 0:
|
|
888
|
-
diaexpl_dir_name = "dia.expl.ncases_{ncases}".format(
|
|
1050
|
+
diaexpl_dir_name = "dia.expl.ncases_{ncases}".format(
|
|
1051
|
+
ncases=self._nb_input_files
|
|
1052
|
+
)
|
|
889
1053
|
self.system.mkdir(diaexpl_dir_name)
|
|
890
1054
|
for file in list_diag_expl:
|
|
891
1055
|
self.system.mv(file, diaexpl_dir_name + "/")
|
|
@@ -900,10 +1064,12 @@ class Fediacov(Parallel):
|
|
|
900
1064
|
"""
|
|
901
1065
|
|
|
902
1066
|
_footprint = dict(
|
|
903
|
-
info
|
|
904
|
-
attr
|
|
905
|
-
kind
|
|
906
|
-
values
|
|
1067
|
+
info="Run fediacov",
|
|
1068
|
+
attr=dict(
|
|
1069
|
+
kind=dict(
|
|
1070
|
+
values=[
|
|
1071
|
+
"run_fediacov",
|
|
1072
|
+
],
|
|
907
1073
|
),
|
|
908
1074
|
),
|
|
909
1075
|
)
|