vortex-nwp 2.0.0b1__py3-none-any.whl → 2.0.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +59 -45
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +940 -614
- vortex/algo/mpitools.py +802 -497
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +428 -225
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +544 -413
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +420 -271
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +210 -122
- vortex/nwp/algo/monitoring.py +132 -76
- vortex/nwp/algo/mpitools.py +321 -191
- vortex/nwp/algo/odbtools.py +617 -353
- vortex/nwp/algo/oopsroot.py +449 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +125 -59
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +311 -149
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +632 -298
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +402 -333
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1211 -631
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +377 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.0.0b2.dist-info/METADATA +66 -0
- vortex_nwp-2.0.0b2.dist-info/RECORD +142 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/top_level.txt +0 -0
vortex/nwp/algo/mpitools.py
CHANGED
|
@@ -25,93 +25,114 @@ class MpiAuto(mpitools.MpiTool):
|
|
|
25
25
|
"""MpiTools that uses mpiauto as a proxy to several MPI implementations"""
|
|
26
26
|
|
|
27
27
|
_footprint = dict(
|
|
28
|
-
attr
|
|
29
|
-
mpiname
|
|
30
|
-
values
|
|
28
|
+
attr=dict(
|
|
29
|
+
mpiname=dict(
|
|
30
|
+
values=[
|
|
31
|
+
"mpiauto",
|
|
32
|
+
],
|
|
31
33
|
),
|
|
32
|
-
mpiopts
|
|
33
|
-
|
|
34
|
+
mpiopts=dict(default=None),
|
|
35
|
+
optprefix=dict(default="--"),
|
|
36
|
+
optmap=dict(
|
|
37
|
+
default=footprints.FPDict(
|
|
38
|
+
nn="nn",
|
|
39
|
+
nnp="nnp",
|
|
40
|
+
openmp="openmp",
|
|
41
|
+
np="np",
|
|
42
|
+
prefixcommand="prefix-command",
|
|
43
|
+
allowodddist="mpi-allow-odd-dist",
|
|
44
|
+
)
|
|
34
45
|
),
|
|
35
|
-
|
|
36
|
-
|
|
46
|
+
timeoutrestart=dict(
|
|
47
|
+
info="The number of attempts made by mpiauto",
|
|
48
|
+
optional=True,
|
|
49
|
+
default=DelayedEnvValue("MPI_INIT_TIMEOUT_RESTART", 2),
|
|
50
|
+
doc_visibility=footprints.doc.visibility.ADVANCED,
|
|
51
|
+
doc_zorder=-90,
|
|
37
52
|
),
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
53
|
+
sublauncher=dict(
|
|
54
|
+
info="How to actualy launch the MPI program",
|
|
55
|
+
values=["srun", "libspecific"],
|
|
56
|
+
optional=True,
|
|
57
|
+
doc_visibility=footprints.doc.visibility.ADVANCED,
|
|
58
|
+
doc_zorder=-90,
|
|
42
59
|
),
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
48
|
-
doc_zorder = -90,
|
|
60
|
+
mpiwrapstd=dict(
|
|
61
|
+
values=[
|
|
62
|
+
False,
|
|
63
|
+
],
|
|
49
64
|
),
|
|
50
|
-
|
|
51
|
-
info
|
|
52
|
-
values
|
|
53
|
-
optional
|
|
54
|
-
doc_visibility
|
|
55
|
-
doc_zorder
|
|
56
|
-
),
|
|
57
|
-
mpiwrapstd = dict(
|
|
58
|
-
values = [False, ],
|
|
59
|
-
),
|
|
60
|
-
bindingmethod = dict(
|
|
61
|
-
info = 'How to bind the MPI processes',
|
|
62
|
-
values = ['arch', 'launcherspecific', 'vortex'],
|
|
63
|
-
optional = True,
|
|
64
|
-
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
65
|
-
doc_zorder = -90,
|
|
65
|
+
bindingmethod=dict(
|
|
66
|
+
info="How to bind the MPI processes",
|
|
67
|
+
values=["arch", "launcherspecific", "vortex"],
|
|
68
|
+
optional=True,
|
|
69
|
+
doc_visibility=footprints.doc.visibility.ADVANCED,
|
|
70
|
+
doc_zorder=-90,
|
|
66
71
|
),
|
|
67
72
|
)
|
|
68
73
|
)
|
|
69
74
|
|
|
70
|
-
_envelope_wrapper_tpl =
|
|
71
|
-
_envelope_rank_var =
|
|
75
|
+
_envelope_wrapper_tpl = "@envelope_wrapper_mpiauto.tpl"
|
|
76
|
+
_envelope_rank_var = "MPIAUTORANK"
|
|
72
77
|
_needs_mpilib_specific_mpienv = False
|
|
73
78
|
|
|
74
79
|
def _reshaped_mpiopts(self):
|
|
75
80
|
"""Raw list of mpi tool command line options."""
|
|
76
81
|
options = super()._reshaped_mpiopts()
|
|
77
|
-
options[
|
|
78
|
-
if self.sublauncher ==
|
|
79
|
-
options[
|
|
80
|
-
elif self.sublauncher ==
|
|
81
|
-
options[
|
|
82
|
+
options["init-timeout-restart"] = [(self.timeoutrestart,)]
|
|
83
|
+
if self.sublauncher == "srun":
|
|
84
|
+
options["use-slurm-mpi"] = [()]
|
|
85
|
+
elif self.sublauncher == "libspecific":
|
|
86
|
+
options["no-use-slurm-mpi"] = [()]
|
|
82
87
|
if self.bindingmethod:
|
|
83
|
-
for k in [
|
|
84
|
-
|
|
88
|
+
for k in [
|
|
89
|
+
"{:s}use-{:s}-bind".format(p, t)
|
|
90
|
+
for p in ("", "no-")
|
|
91
|
+
for t in ("arch", "slurm", "intelmpi", "openmpi")
|
|
92
|
+
]:
|
|
85
93
|
options.pop(k, None)
|
|
86
|
-
if self.bindingmethod ==
|
|
87
|
-
options[
|
|
88
|
-
elif
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
options[
|
|
93
|
-
|
|
94
|
-
|
|
94
|
+
if self.bindingmethod == "arch":
|
|
95
|
+
options["use-arch-bind"] = [()]
|
|
96
|
+
elif (
|
|
97
|
+
self.bindingmethod == "launcherspecific"
|
|
98
|
+
and self.sublauncher == "srun"
|
|
99
|
+
):
|
|
100
|
+
options["no-use-arch-bind"] = [()]
|
|
101
|
+
options["use-slurm-bind"] = [()]
|
|
102
|
+
elif self.bindingmethod == "launcherspecific":
|
|
103
|
+
options["no-use-arch-bind"] = [()]
|
|
104
|
+
for k in [
|
|
105
|
+
"use-{:s}-bind".format(t)
|
|
106
|
+
for t in ("slurm", "intelmpi", "openmpi")
|
|
107
|
+
]:
|
|
95
108
|
options[k] = [()]
|
|
96
|
-
elif self.bindingmethod ==
|
|
97
|
-
options[
|
|
109
|
+
elif self.bindingmethod == "vortex":
|
|
110
|
+
options["no-use-arch-bind"] = [()]
|
|
98
111
|
return options
|
|
99
112
|
|
|
100
113
|
def _envelope_fix_envelope_bit(self, e_bit, e_desc):
|
|
101
114
|
"""Set the envelope fake binary options."""
|
|
102
|
-
e_bit.options = {
|
|
103
|
-
|
|
104
|
-
|
|
115
|
+
e_bit.options = {
|
|
116
|
+
k: v for k, v in e_desc.items() if k not in ("openmp",)
|
|
117
|
+
}
|
|
118
|
+
e_bit.options["prefixcommand"] = self._envelope_wrapper_name
|
|
105
119
|
if self.binaries:
|
|
106
120
|
e_bit.master = self.binaries[0].master
|
|
107
121
|
|
|
108
122
|
def _set_binaries_hack(self, binaries):
|
|
109
123
|
"""Set the list of :class:`MpiBinaryDescription` objects associated with this instance."""
|
|
110
|
-
if len(binaries) > 1 and self.bindingmethod not in (
|
|
111
|
-
|
|
112
|
-
|
|
124
|
+
if len(binaries) > 1 and self.bindingmethod not in (
|
|
125
|
+
None,
|
|
126
|
+
"arch",
|
|
127
|
+
"vortex",
|
|
128
|
+
):
|
|
129
|
+
logger.info(
|
|
130
|
+
"The '{:s}' binding method is not working properly with multiple binaries.".format(
|
|
131
|
+
self.bindingmethod
|
|
132
|
+
)
|
|
133
|
+
)
|
|
113
134
|
logger.warning("Resetting the binding method to 'vortex'.")
|
|
114
|
-
self.bindingmethod =
|
|
135
|
+
self.bindingmethod = "vortex"
|
|
115
136
|
|
|
116
137
|
def _set_binaries_envelope_hack(self, binaries):
|
|
117
138
|
"""Tweak the envelope after binaries were setup."""
|
|
@@ -122,28 +143,38 @@ class MpiAuto(mpitools.MpiTool):
|
|
|
122
143
|
def _set_envelope(self, value):
|
|
123
144
|
"""Set the envelope description."""
|
|
124
145
|
super()._set_envelope(value)
|
|
125
|
-
if len(self._envelope) > 1 and self.bindingmethod not in (
|
|
126
|
-
|
|
127
|
-
|
|
146
|
+
if len(self._envelope) > 1 and self.bindingmethod not in (
|
|
147
|
+
None,
|
|
148
|
+
"arch",
|
|
149
|
+
"vortex",
|
|
150
|
+
):
|
|
151
|
+
logger.info(
|
|
152
|
+
"The '{:s}' binding method is not working properly with complex envelopes.".format(
|
|
153
|
+
self.bindingmethod
|
|
154
|
+
)
|
|
155
|
+
)
|
|
128
156
|
logger.warning("Resetting the binding method to 'vortex'.")
|
|
129
|
-
self.bindingmethod =
|
|
157
|
+
self.bindingmethod = "vortex"
|
|
130
158
|
|
|
131
159
|
envelope = property(mpitools.MpiTool._get_envelope, _set_envelope)
|
|
132
160
|
|
|
133
161
|
def _hook_binary_mpiopts(self, binary, options):
|
|
134
162
|
tuned = options.copy()
|
|
135
163
|
# Regular MPI tasks count (the usual...)
|
|
136
|
-
if
|
|
137
|
-
if options[
|
|
164
|
+
if "nnp" in options and "nn" in options:
|
|
165
|
+
if options["nn"] * options["nnp"] == options["np"]:
|
|
138
166
|
# Remove harmful options
|
|
139
|
-
del tuned[
|
|
140
|
-
tuned.pop(
|
|
167
|
+
del tuned["np"]
|
|
168
|
+
tuned.pop("allowodddist", None)
|
|
141
169
|
# that's the strange MPI distribution...
|
|
142
170
|
else:
|
|
143
|
-
tuned[
|
|
171
|
+
tuned["allowodddist"] = (
|
|
172
|
+
None # With this, let mpiauto determine its own partitioning
|
|
173
|
+
)
|
|
144
174
|
else:
|
|
145
|
-
msg =
|
|
146
|
-
|
|
175
|
+
msg = "The provided mpiopts are insufficient to build the command line: {!s}".format(
|
|
176
|
+
options
|
|
177
|
+
)
|
|
147
178
|
raise mpitools.MpiException(msg)
|
|
148
179
|
return tuned
|
|
149
180
|
|
|
@@ -153,43 +184,50 @@ class MpiAuto(mpitools.MpiTool):
|
|
|
153
184
|
for bin_obj in self.binaries:
|
|
154
185
|
if bin_obj.options:
|
|
155
186
|
for mpirank in range(ranksidx, ranksidx + bin_obj.nprocs):
|
|
156
|
-
prefix_c = bin_obj.options.get(
|
|
187
|
+
prefix_c = bin_obj.options.get("prefixcommand", None)
|
|
157
188
|
if prefix_c:
|
|
158
|
-
todostack[mpirank] = (
|
|
159
|
-
|
|
160
|
-
|
|
189
|
+
todostack[mpirank] = (
|
|
190
|
+
prefix_c,
|
|
191
|
+
[
|
|
192
|
+
todostack[mpirank][0],
|
|
193
|
+
]
|
|
194
|
+
+ todostack[mpirank][1],
|
|
195
|
+
todostack[mpirank][2],
|
|
196
|
+
)
|
|
161
197
|
ranksidx += bin_obj.nprocs
|
|
162
198
|
return todostack, ranks_bsize
|
|
163
199
|
|
|
164
200
|
def _envelope_mkcmdline_extra(self, cmdl):
|
|
165
201
|
"""If possible, add an openmp option when the arch binding method is used."""
|
|
166
202
|
|
|
167
|
-
if self.bindingmethod !=
|
|
168
|
-
openmps = {b.options.get(
|
|
203
|
+
if self.bindingmethod != "vortex":
|
|
204
|
+
openmps = {b.options.get("openmp", None) for b in self.binaries}
|
|
169
205
|
if len(openmps) > 1:
|
|
170
206
|
if self.bindingmethod is not None:
|
|
171
|
-
logger.warning(
|
|
207
|
+
logger.warning(
|
|
208
|
+
"Non-uniform OpenMP threads number... Not specifying anything."
|
|
209
|
+
)
|
|
172
210
|
else:
|
|
173
211
|
openmp = openmps.pop() or 1
|
|
174
|
-
cmdl.append(self.optprefix + self.optmap[
|
|
212
|
+
cmdl.append(self.optprefix + self.optmap["openmp"])
|
|
175
213
|
cmdl.append(str(openmp))
|
|
176
214
|
|
|
177
215
|
def setup_environment(self, opts):
|
|
178
216
|
"""Last minute fixups."""
|
|
179
217
|
super().setup_environment(opts)
|
|
180
|
-
if self.bindingmethod in (
|
|
218
|
+
if self.bindingmethod in ("arch", "vortex"):
|
|
181
219
|
# Make sure srun does nothing !
|
|
182
|
-
self._logged_env_set(
|
|
220
|
+
self._logged_env_set("SLURM_CPU_BIND", "none")
|
|
183
221
|
|
|
184
222
|
def setup(self, opts=None):
|
|
185
223
|
"""Ensure that the prefixcommand has the execution rights."""
|
|
186
224
|
for bin_obj in self.binaries:
|
|
187
|
-
prefix_c = bin_obj.options.get(
|
|
225
|
+
prefix_c = bin_obj.options.get("prefixcommand", None)
|
|
188
226
|
if prefix_c is not None:
|
|
189
227
|
if self.system.path.exists(prefix_c):
|
|
190
228
|
self.system.xperm(prefix_c, force=True)
|
|
191
229
|
else:
|
|
192
|
-
raise OSError(
|
|
230
|
+
raise OSError("The prefixcommand do not exists.")
|
|
193
231
|
super().setup(opts)
|
|
194
232
|
|
|
195
233
|
|
|
@@ -200,88 +238,106 @@ class MpiAutoDDT(MpiAuto):
|
|
|
200
238
|
"""
|
|
201
239
|
|
|
202
240
|
_footprint = dict(
|
|
203
|
-
attr
|
|
204
|
-
mpiname
|
|
205
|
-
values
|
|
241
|
+
attr=dict(
|
|
242
|
+
mpiname=dict(
|
|
243
|
+
values=[
|
|
244
|
+
"mpiauto-ddt",
|
|
245
|
+
],
|
|
206
246
|
),
|
|
207
247
|
)
|
|
208
248
|
)
|
|
209
249
|
|
|
210
|
-
_conf_suffix =
|
|
250
|
+
_conf_suffix = "-ddt"
|
|
211
251
|
|
|
212
252
|
def _reshaped_mpiopts(self):
|
|
213
253
|
options = super()._reshaped_mpiopts()
|
|
214
|
-
if
|
|
215
|
-
raise mpitools.MpiException(
|
|
216
|
-
|
|
217
|
-
|
|
254
|
+
if "prefix-mpirun" in options:
|
|
255
|
+
raise mpitools.MpiException(
|
|
256
|
+
"It is not allowed to start DDT with another "
|
|
257
|
+
+ 'prefix_mpirun command defined: "{:s}"'.format(options)
|
|
258
|
+
)
|
|
218
259
|
armtool = ArmForgeTool(self.ticket)
|
|
219
|
-
options[
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
260
|
+
options["prefix-mpirun"] = [
|
|
261
|
+
(
|
|
262
|
+
" ".join(
|
|
263
|
+
armtool.ddt_prefix_cmd(
|
|
264
|
+
sources=self.sources,
|
|
265
|
+
workdir=self.system.path.dirname(
|
|
266
|
+
self.binaries[0].master
|
|
267
|
+
),
|
|
268
|
+
)
|
|
269
|
+
),
|
|
270
|
+
)
|
|
271
|
+
]
|
|
223
272
|
return options
|
|
224
273
|
|
|
225
274
|
|
|
226
275
|
# Some IFS/Arpege specific things :
|
|
227
276
|
|
|
277
|
+
|
|
228
278
|
def arpifs_obsort_nprocab_binarydeco(cls):
|
|
229
279
|
"""Handle usual IFS/Arpege environment tweaking for OBSORT (nproca & nprocb).
|
|
230
280
|
|
|
231
281
|
Note: This is a class decorator for class somehow based on MpiBinaryDescription
|
|
232
282
|
"""
|
|
233
|
-
orig_setup_env = getattr(cls,
|
|
283
|
+
orig_setup_env = getattr(cls, "setup_environment")
|
|
234
284
|
|
|
235
285
|
def setup_environment(self, opts):
|
|
236
286
|
orig_setup_env(self, opts)
|
|
237
|
-
self.env.NPROCA = int(self.env.NPROCA or
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
logger.info(
|
|
287
|
+
self.env.NPROCA = int(self.env.NPROCA or self.nprocs)
|
|
288
|
+
self.env.NPROCB = int(
|
|
289
|
+
self.env.NPROCB or self.nprocs // self.env.NPROCA
|
|
290
|
+
)
|
|
291
|
+
logger.info(
|
|
292
|
+
"MPI Setup NPROCA=%d and NPROCB=%d",
|
|
293
|
+
self.env.NPROCA,
|
|
294
|
+
self.env.NPROCB,
|
|
295
|
+
)
|
|
242
296
|
|
|
243
|
-
if hasattr(orig_setup_env,
|
|
297
|
+
if hasattr(orig_setup_env, "__doc__"):
|
|
244
298
|
setup_environment.__doc__ = orig_setup_env.__doc__
|
|
245
299
|
|
|
246
|
-
setattr(cls,
|
|
300
|
+
setattr(cls, "setup_environment", setup_environment)
|
|
247
301
|
return cls
|
|
248
302
|
|
|
249
303
|
|
|
250
304
|
class _NWPIoServerMixin:
|
|
305
|
+
_NWP_IOSERV_PATTERNS = ("io_serv.*.d",)
|
|
251
306
|
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
total_iotasks, computed_iodist_value=None):
|
|
307
|
+
def _nwp_ioserv_setup_namelist(
|
|
308
|
+
self, namcontents, namlocal, total_iotasks, computed_iodist_value=None
|
|
309
|
+
):
|
|
256
310
|
"""Applying IO Server profile on local namelist ``namlocal`` with contents namcontents."""
|
|
257
|
-
if
|
|
258
|
-
namio = namcontents[
|
|
311
|
+
if "NAMIO_SERV" in namcontents:
|
|
312
|
+
namio = namcontents["NAMIO_SERV"]
|
|
259
313
|
else:
|
|
260
|
-
namio = namcontents.newblock(
|
|
314
|
+
namio = namcontents.newblock("NAMIO_SERV")
|
|
261
315
|
|
|
262
316
|
namio.nproc_io = total_iotasks
|
|
263
317
|
if computed_iodist_value is not None:
|
|
264
318
|
namio.idistio = computed_iodist_value
|
|
265
319
|
|
|
266
|
-
if
|
|
320
|
+
if "VORTEX_IOSERVER_METHOD" in self.env:
|
|
267
321
|
namio.nio_serv_method = self.env.VORTEX_IOSERVER_METHOD
|
|
268
322
|
|
|
269
|
-
if
|
|
323
|
+
if "VORTEX_IOSERVER_BUFMAX" in self.env:
|
|
270
324
|
namio.nio_serv_buf_maxsize = self.env.VORTEX_IOSERVER_BUFMAX
|
|
271
325
|
|
|
272
|
-
if
|
|
326
|
+
if "VORTEX_IOSERVER_MLSERVER" in self.env:
|
|
273
327
|
namio.nmsg_level_server = self.env.VORTEX_IOSERVER_MLSERVER
|
|
274
328
|
|
|
275
|
-
if
|
|
329
|
+
if "VORTEX_IOSERVER_MLCLIENT" in self.env:
|
|
276
330
|
namio.nmsg_level_client = self.env.VORTEX_IOSERVER_MLCLIENT
|
|
277
331
|
|
|
278
|
-
if
|
|
332
|
+
if "VORTEX_IOSERVER_PROCESS" in self.env:
|
|
279
333
|
namio.nprocess_level = self.env.VORTEX_IOSERVER_PROCESS
|
|
280
334
|
|
|
281
|
-
if
|
|
335
|
+
if "VORTEX_IOSERVER_PIOMODEL" in self.env:
|
|
282
336
|
namio.pioprocr_MDL = self.env.VORTEX_IOSERVER_PIOMODEL
|
|
283
337
|
|
|
284
|
-
self.system.highlight(
|
|
338
|
+
self.system.highlight(
|
|
339
|
+
"Parallel io server namelist for {:s}".format(namlocal)
|
|
340
|
+
)
|
|
285
341
|
print(namio.dumps())
|
|
286
342
|
|
|
287
343
|
return True
|
|
@@ -297,38 +353,46 @@ class _NWPIoServerMixin:
|
|
|
297
353
|
"""Post-execution cleaning for io server."""
|
|
298
354
|
|
|
299
355
|
# Old fashion way to make clear that some polling is needed.
|
|
300
|
-
self.system.touch(
|
|
356
|
+
self.system.touch("io_poll.todo")
|
|
301
357
|
|
|
302
358
|
# Get a look inside io server output directories according to its own pattern
|
|
303
359
|
ioserv_filelist = set()
|
|
304
360
|
ioserv_prefixes = set()
|
|
305
|
-
iofile_re = re.compile(
|
|
306
|
-
|
|
361
|
+
iofile_re = re.compile(
|
|
362
|
+
r"((ICMSH|PF|GRIBPF).*\+\d+(?::\d+)?(?:\.sfx)?)(?:\..+)?$"
|
|
363
|
+
)
|
|
364
|
+
self.system.highlight("Dealing with IO directories")
|
|
307
365
|
iodirs = self._nwp_ioserv_iodirs()
|
|
308
366
|
if iodirs:
|
|
309
|
-
logger.info(
|
|
310
|
-
f_summary = collections.defaultdict(lambda: [
|
|
367
|
+
logger.info("List of IO directories: %s", ",".join(iodirs))
|
|
368
|
+
f_summary = collections.defaultdict(lambda: [" "] * len(iodirs))
|
|
311
369
|
for i, iodir in enumerate(iodirs):
|
|
312
370
|
for iofile in self.system.listdir(iodir):
|
|
313
371
|
zf = iofile_re.match(iofile)
|
|
314
372
|
if zf:
|
|
315
|
-
f_summary[zf.group(1)][i] =
|
|
373
|
+
f_summary[zf.group(1)][i] = "+"
|
|
316
374
|
ioserv_filelist.add((zf.group(1), zf.group(2)))
|
|
317
375
|
ioserv_prefixes.add(zf.group(2))
|
|
318
376
|
else:
|
|
319
|
-
f_summary[iofile][i] =
|
|
377
|
+
f_summary[iofile][i] = "?"
|
|
320
378
|
max_names_len = max([len(iofile) for iofile in f_summary.keys()])
|
|
321
|
-
fmt_names =
|
|
322
|
-
logger.info(
|
|
323
|
-
|
|
324
|
-
|
|
379
|
+
fmt_names = "{:" + str(max_names_len) + "s}"
|
|
380
|
+
logger.info(
|
|
381
|
+
"Data location accross the various IOserver directories:\n%s",
|
|
382
|
+
"\n".join(
|
|
383
|
+
[
|
|
384
|
+
(fmt_names + " |{:s}|").format(iofile, "".join(where))
|
|
385
|
+
for iofile, where in sorted(f_summary.items())
|
|
386
|
+
]
|
|
387
|
+
),
|
|
388
|
+
)
|
|
325
389
|
else:
|
|
326
|
-
logger.info(
|
|
390
|
+
logger.info("No IO directories were found")
|
|
327
391
|
|
|
328
|
-
if
|
|
392
|
+
if "GRIBPF" in ioserv_prefixes:
|
|
329
393
|
# If GRIB are requested, do not bother with old FA PF files
|
|
330
|
-
ioserv_prefixes.discard(
|
|
331
|
-
ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p !=
|
|
394
|
+
ioserv_prefixes.discard("PF")
|
|
395
|
+
ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p != "PF"}
|
|
332
396
|
|
|
333
397
|
# Touch the output files
|
|
334
398
|
for tgfile, _ in ioserv_filelist:
|
|
@@ -336,7 +400,7 @@ class _NWPIoServerMixin:
|
|
|
336
400
|
|
|
337
401
|
# Touch the io_poll.todo.PREFIX
|
|
338
402
|
for prefix in ioserv_prefixes:
|
|
339
|
-
self.system.touch(
|
|
403
|
+
self.system.touch("io_poll.todo.{:s}".format(prefix))
|
|
340
404
|
|
|
341
405
|
|
|
342
406
|
class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
@@ -344,7 +408,7 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
344
408
|
|
|
345
409
|
_abstract = True
|
|
346
410
|
|
|
347
|
-
def __init__(self, *
|
|
411
|
+
def __init__(self, *kargs, **kwargs):
|
|
348
412
|
super().__init__(*kargs, **kwargs)
|
|
349
413
|
self._incore_iotasks = None
|
|
350
414
|
self._effective_incore_iotasks = None
|
|
@@ -359,7 +423,7 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
359
423
|
@incore_iotasks.setter
|
|
360
424
|
def incore_iotasks(self, value):
|
|
361
425
|
"""The number of tasks dedicated to the IO server."""
|
|
362
|
-
if isinstance(value, str) and value.endswith(
|
|
426
|
+
if isinstance(value, str) and value.endswith("%"):
|
|
363
427
|
value = math.ceil(self.nprocs * float(value[:-1]) / 100)
|
|
364
428
|
self._incore_iotasks = int(value)
|
|
365
429
|
self._effective_incore_iotasks = None
|
|
@@ -373,10 +437,12 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
373
437
|
def incore_iotasks_fixer(self, value):
|
|
374
438
|
"""Tweak the number of iotasks in order to respect a given constraints."""
|
|
375
439
|
if not isinstance(value, str):
|
|
376
|
-
raise ValueError(
|
|
377
|
-
if value.startswith(
|
|
378
|
-
self._incore_iotasks_fixer = (
|
|
379
|
-
|
|
440
|
+
raise ValueError("A string is expected")
|
|
441
|
+
if value.startswith("nproc_multiple_of_"):
|
|
442
|
+
self._incore_iotasks_fixer = (
|
|
443
|
+
"nproc_multiple_of",
|
|
444
|
+
[int(i) for i in value[18:].split(",")],
|
|
445
|
+
)
|
|
380
446
|
else:
|
|
381
447
|
raise ValueError('The "{:s}" value is incorrect'.format(value))
|
|
382
448
|
|
|
@@ -391,22 +457,36 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
391
457
|
if self.incore_iotasks is not None:
|
|
392
458
|
if self._effective_incore_iotasks is None:
|
|
393
459
|
if self.incore_iotasks_fixer is not None:
|
|
394
|
-
if self.incore_iotasks_fixer[0] ==
|
|
460
|
+
if self.incore_iotasks_fixer[0] == "nproc_multiple_of":
|
|
395
461
|
# Allow for 5% less, or add some tasks
|
|
396
|
-
for candidate in interleave(
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
462
|
+
for candidate in interleave(
|
|
463
|
+
range(self.incore_iotasks, self.nprocs + 1),
|
|
464
|
+
range(
|
|
465
|
+
self.incore_iotasks - 1,
|
|
466
|
+
int(math.ceil(0.95 * self.incore_iotasks)) - 1,
|
|
467
|
+
-1,
|
|
468
|
+
),
|
|
469
|
+
):
|
|
470
|
+
if any(
|
|
471
|
+
[
|
|
472
|
+
(self.nprocs - candidate) % multiple == 0
|
|
473
|
+
for multiple in self.incore_iotasks_fixer[
|
|
474
|
+
1
|
|
475
|
+
]
|
|
476
|
+
]
|
|
477
|
+
):
|
|
402
478
|
self._effective_incore_iotasks = candidate
|
|
403
479
|
break
|
|
404
480
|
else:
|
|
405
|
-
raise RuntimeError(
|
|
481
|
+
raise RuntimeError("Unsupported fixer")
|
|
406
482
|
if self._effective_incore_iotasks != self.incore_iotasks:
|
|
407
|
-
logger.info(
|
|
408
|
-
|
|
409
|
-
|
|
483
|
+
logger.info(
|
|
484
|
+
"The number of IO tasks was updated form %d to %d "
|
|
485
|
+
+ 'because of the "%s" fixer',
|
|
486
|
+
self.incore_iotasks,
|
|
487
|
+
self._effective_incore_iotasks,
|
|
488
|
+
self.incore_iotasks_fixer[0],
|
|
489
|
+
)
|
|
410
490
|
else:
|
|
411
491
|
self._effective_incore_iotasks = self.incore_iotasks
|
|
412
492
|
return self._effective_incore_iotasks
|
|
@@ -421,17 +501,23 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
421
501
|
@incore_iodist.setter
|
|
422
502
|
def incore_iodist(self, value):
|
|
423
503
|
"""How to distribute IO server tasks within model tasks."""
|
|
424
|
-
allowed = (
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
504
|
+
allowed = (
|
|
505
|
+
"begining",
|
|
506
|
+
"end",
|
|
507
|
+
"scattered",
|
|
508
|
+
)
|
|
509
|
+
if not (isinstance(value, str) and value in allowed):
|
|
510
|
+
raise ValueError(
|
|
511
|
+
"'{!s}' is not an allowed value ('{:s}')".format(
|
|
512
|
+
value, ", ".join(allowed)
|
|
513
|
+
)
|
|
514
|
+
)
|
|
429
515
|
self._incore_iodist = value
|
|
430
516
|
|
|
431
517
|
def _set_nam_macro(self, namcontents, namlocal, macro, value):
|
|
432
518
|
"""Set a namelist macro and log it!"""
|
|
433
519
|
namcontents.setmacro(macro, value)
|
|
434
|
-
logger.info(
|
|
520
|
+
logger.info("Setup macro %s=%s in %s", macro, str(value), namlocal)
|
|
435
521
|
|
|
436
522
|
def setup_namelist_delta(self, namcontents, namlocal):
|
|
437
523
|
"""Applying MPI profile on local namelist ``namlocal`` with contents namcontents."""
|
|
@@ -445,48 +531,74 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
445
531
|
if self.effective_incore_iotasks is not None:
|
|
446
532
|
effective_nprocs -= self.effective_incore_iotasks
|
|
447
533
|
# Set up the effective_nprocs related macros
|
|
448
|
-
nprocs_macros = (
|
|
534
|
+
nprocs_macros = ("NPROC", "NBPROC", "NTASKS")
|
|
449
535
|
if any([n in nam_macros for n in nprocs_macros]):
|
|
450
536
|
for n in nprocs_macros:
|
|
451
537
|
self._set_nam_macro(namcontents, namlocal, n, effective_nprocs)
|
|
452
538
|
namw = True
|
|
453
|
-
if any([n in nam_macros for n in (
|
|
454
|
-
self._set_nam_macro(
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
539
|
+
if any([n in nam_macros for n in ("NCPROC", "NDPROC")]):
|
|
540
|
+
self._set_nam_macro(
|
|
541
|
+
namcontents,
|
|
542
|
+
namlocal,
|
|
543
|
+
"NCPROC",
|
|
544
|
+
int(self.env.VORTEX_NPRGPNS or effective_nprocs),
|
|
545
|
+
)
|
|
546
|
+
self._set_nam_macro(
|
|
547
|
+
namcontents,
|
|
548
|
+
namlocal,
|
|
549
|
+
"NDPROC",
|
|
550
|
+
int(self.env.VORTEX_NPRGPEW or 1),
|
|
551
|
+
)
|
|
458
552
|
namw = True
|
|
459
|
-
if
|
|
460
|
-
np1 = namcontents[
|
|
461
|
-
for nstr in [x for x in (
|
|
462
|
-
if
|
|
463
|
-
|
|
553
|
+
if "NAMPAR1" in namcontents:
|
|
554
|
+
np1 = namcontents["NAMPAR1"]
|
|
555
|
+
for nstr in [x for x in ("NSTRIN", "NSTROUT") if x in np1]:
|
|
556
|
+
if (
|
|
557
|
+
isinstance(np1[nstr], (int, float))
|
|
558
|
+
and np1[nstr] > effective_nprocs
|
|
559
|
+
):
|
|
560
|
+
logger.info(
|
|
561
|
+
"Setup %s=%s in NAMPAR1 %s",
|
|
562
|
+
nstr,
|
|
563
|
+
effective_nprocs,
|
|
564
|
+
namlocal,
|
|
565
|
+
)
|
|
464
566
|
np1[nstr] = effective_nprocs
|
|
465
567
|
namw = True
|
|
466
568
|
# Deal with partitioning macros
|
|
467
|
-
namw_p = setup_partitioning_in_namelist(
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
569
|
+
namw_p = setup_partitioning_in_namelist(
|
|
570
|
+
namcontents,
|
|
571
|
+
effective_nprocs,
|
|
572
|
+
self.options.get("openmp", 1),
|
|
573
|
+
namlocal,
|
|
574
|
+
)
|
|
471
575
|
namw = namw or namw_p
|
|
472
576
|
# Incore IO tasks
|
|
473
577
|
if self.effective_incore_iotasks is not None:
|
|
474
578
|
c_iodist = None
|
|
475
579
|
if self.incore_iodist is not None:
|
|
476
|
-
if self.incore_iodist ==
|
|
580
|
+
if self.incore_iodist == "begining":
|
|
477
581
|
c_iodist = -1
|
|
478
|
-
elif self.incore_iodist ==
|
|
582
|
+
elif self.incore_iodist == "end":
|
|
479
583
|
c_iodist = 0
|
|
480
|
-
elif self.incore_iodist ==
|
|
584
|
+
elif self.incore_iodist == "scattered":
|
|
481
585
|
# Ensure that there is at least one task on the first node
|
|
482
|
-
c_iodist = min(
|
|
483
|
-
|
|
586
|
+
c_iodist = min(
|
|
587
|
+
self.nprocs // self.effective_incore_iotasks,
|
|
588
|
+
self.options.get("nnp", self.nprocs),
|
|
589
|
+
)
|
|
484
590
|
else:
|
|
485
|
-
raise RuntimeError(
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
591
|
+
raise RuntimeError(
|
|
592
|
+
"incore_iodist '{!s}' is not supported: check your code".format(
|
|
593
|
+
self.incore_iodist
|
|
594
|
+
)
|
|
595
|
+
)
|
|
596
|
+
namw_io = self._nwp_ioserv_setup_namelist(
|
|
597
|
+
namcontents,
|
|
598
|
+
namlocal,
|
|
599
|
+
self.effective_incore_iotasks,
|
|
600
|
+
computed_iodist_value=c_iodist,
|
|
601
|
+
)
|
|
490
602
|
namw = namw or namw_io
|
|
491
603
|
return namw
|
|
492
604
|
|
|
@@ -501,8 +613,12 @@ class MpiNWP(_AbstractMpiNWP):
|
|
|
501
613
|
"""The kind of binaries used in IFS/Arpege."""
|
|
502
614
|
|
|
503
615
|
_footprint = dict(
|
|
504
|
-
attr
|
|
505
|
-
kind
|
|
616
|
+
attr=dict(
|
|
617
|
+
kind=dict(
|
|
618
|
+
values=[
|
|
619
|
+
"basicnwp",
|
|
620
|
+
]
|
|
621
|
+
),
|
|
506
622
|
),
|
|
507
623
|
)
|
|
508
624
|
|
|
@@ -512,8 +628,12 @@ class MpiNWPObsort(_AbstractMpiNWP):
|
|
|
512
628
|
"""The kind of binaries used in IFS/Arpege when the ODB OBSSORT code needs to be run."""
|
|
513
629
|
|
|
514
630
|
_footprint = dict(
|
|
515
|
-
attr
|
|
516
|
-
kind
|
|
631
|
+
attr=dict(
|
|
632
|
+
kind=dict(
|
|
633
|
+
values=[
|
|
634
|
+
"basicnwpobsort",
|
|
635
|
+
]
|
|
636
|
+
),
|
|
517
637
|
),
|
|
518
638
|
)
|
|
519
639
|
|
|
@@ -523,8 +643,12 @@ class MpiObsort(mpitools.MpiBinaryBasic):
|
|
|
523
643
|
"""The kind of binaries used when the ODB OBSSORT code needs to be run."""
|
|
524
644
|
|
|
525
645
|
_footprint = dict(
|
|
526
|
-
attr
|
|
527
|
-
kind
|
|
646
|
+
attr=dict(
|
|
647
|
+
kind=dict(
|
|
648
|
+
values=[
|
|
649
|
+
"basicobsort",
|
|
650
|
+
]
|
|
651
|
+
),
|
|
528
652
|
),
|
|
529
653
|
)
|
|
530
654
|
|
|
@@ -533,9 +657,15 @@ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
|
|
|
533
657
|
"""Standard IFS/Arpege NWP IO server."""
|
|
534
658
|
|
|
535
659
|
_footprint = dict(
|
|
536
|
-
attr
|
|
537
|
-
kind
|
|
538
|
-
|
|
660
|
+
attr=dict(
|
|
661
|
+
kind=dict(
|
|
662
|
+
values=[
|
|
663
|
+
"nwpioserv",
|
|
664
|
+
]
|
|
665
|
+
),
|
|
666
|
+
iolocation=dict(
|
|
667
|
+
values=[-1, 0], default=0, optional=True, type=int
|
|
668
|
+
),
|
|
539
669
|
)
|
|
540
670
|
)
|
|
541
671
|
|
|
@@ -545,7 +675,7 @@ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
|
|
|
545
675
|
namcontents,
|
|
546
676
|
namlocal,
|
|
547
677
|
self.nprocs,
|
|
548
|
-
computed_iodist_value=(-1 if self.iolocation == 0 else None)
|
|
678
|
+
computed_iodist_value=(-1 if self.iolocation == 0 else None),
|
|
549
679
|
)
|
|
550
680
|
|
|
551
681
|
def clean(self, opts=None):
|