vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +75 -47
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +944 -618
- vortex/algo/mpitools.py +802 -497
- vortex/algo/mpitools_templates/__init__.py +1 -0
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +436 -227
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +540 -417
- vortex/data/sync_templates/__init__.py +0 -0
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +416 -275
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +215 -122
- vortex/nwp/algo/monitoring.py +137 -76
- vortex/nwp/algo/mpitools.py +330 -190
- vortex/nwp/algo/odbtools.py +637 -353
- vortex/nwp/algo/oopsroot.py +454 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +110 -121
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +341 -162
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +655 -299
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +403 -334
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1234 -643
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +378 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.1.0.dist-info/METADATA +67 -0
- vortex_nwp-2.1.0.dist-info/RECORD +144 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
vortex/nwp/algo/mpitools.py
CHANGED
|
@@ -25,93 +25,124 @@ class MpiAuto(mpitools.MpiTool):
|
|
|
25
25
|
"""MpiTools that uses mpiauto as a proxy to several MPI implementations"""
|
|
26
26
|
|
|
27
27
|
_footprint = dict(
|
|
28
|
-
attr
|
|
29
|
-
mpiname
|
|
30
|
-
values
|
|
28
|
+
attr=dict(
|
|
29
|
+
mpiname=dict(
|
|
30
|
+
values=[
|
|
31
|
+
"mpiauto",
|
|
32
|
+
],
|
|
31
33
|
),
|
|
32
|
-
mpiopts
|
|
33
|
-
|
|
34
|
+
mpiopts=dict(default=None),
|
|
35
|
+
optprefix=dict(default="--"),
|
|
36
|
+
optmap=dict(
|
|
37
|
+
default=footprints.FPDict(
|
|
38
|
+
nn="nn",
|
|
39
|
+
nnp="nnp",
|
|
40
|
+
openmp="openmp",
|
|
41
|
+
np="np",
|
|
42
|
+
prefixcommand="prefix-command",
|
|
43
|
+
allowodddist="mpi-allow-odd-dist",
|
|
44
|
+
)
|
|
34
45
|
),
|
|
35
|
-
|
|
36
|
-
|
|
46
|
+
timeoutrestart=dict(
|
|
47
|
+
info="The number of attempts made by mpiauto",
|
|
48
|
+
optional=True,
|
|
49
|
+
default=DelayedEnvValue("MPI_INIT_TIMEOUT_RESTART", 2),
|
|
50
|
+
doc_visibility=footprints.doc.visibility.ADVANCED,
|
|
51
|
+
doc_zorder=-90,
|
|
37
52
|
),
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
53
|
+
sublauncher=dict(
|
|
54
|
+
info="How to actualy launch the MPI program",
|
|
55
|
+
values=["srun", "libspecific"],
|
|
56
|
+
optional=True,
|
|
57
|
+
doc_visibility=footprints.doc.visibility.ADVANCED,
|
|
58
|
+
doc_zorder=-90,
|
|
42
59
|
),
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
48
|
-
doc_zorder = -90,
|
|
60
|
+
mpiwrapstd=dict(
|
|
61
|
+
values=[
|
|
62
|
+
False,
|
|
63
|
+
],
|
|
49
64
|
),
|
|
50
|
-
|
|
51
|
-
info
|
|
52
|
-
values
|
|
53
|
-
optional
|
|
54
|
-
doc_visibility
|
|
55
|
-
doc_zorder
|
|
65
|
+
bindingmethod=dict(
|
|
66
|
+
info="How to bind the MPI processes",
|
|
67
|
+
values=["vortex", "arch", "launcherspecific"],
|
|
68
|
+
optional=True,
|
|
69
|
+
doc_visibility=footprints.doc.visibility.ADVANCED,
|
|
70
|
+
doc_zorder=-90,
|
|
56
71
|
),
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
values = ['arch', 'launcherspecific', 'vortex'],
|
|
63
|
-
optional = True,
|
|
64
|
-
doc_visibility = footprints.doc.visibility.ADVANCED,
|
|
65
|
-
doc_zorder = -90,
|
|
72
|
+
mplbased=dict(
|
|
73
|
+
info="Is the executable based on MPL?",
|
|
74
|
+
type=bool,
|
|
75
|
+
optional=True,
|
|
76
|
+
default=False,
|
|
66
77
|
),
|
|
67
78
|
)
|
|
68
79
|
)
|
|
69
80
|
|
|
70
|
-
_envelope_wrapper_tpl =
|
|
71
|
-
_envelope_rank_var =
|
|
81
|
+
_envelope_wrapper_tpl = "envelope_wrapper_mpiauto.tpl"
|
|
82
|
+
_envelope_rank_var = "MPIAUTORANK"
|
|
72
83
|
_needs_mpilib_specific_mpienv = False
|
|
73
84
|
|
|
85
|
+
def __init__(self, *args, **kwargs):
|
|
86
|
+
super().__init__(*args, **kwargs)
|
|
87
|
+
self.bindingmethod = "arch" if self.mplbased else "vortex"
|
|
88
|
+
|
|
74
89
|
def _reshaped_mpiopts(self):
|
|
75
90
|
"""Raw list of mpi tool command line options."""
|
|
76
91
|
options = super()._reshaped_mpiopts()
|
|
77
|
-
options[
|
|
78
|
-
if self.sublauncher ==
|
|
79
|
-
options[
|
|
80
|
-
elif self.sublauncher ==
|
|
81
|
-
options[
|
|
92
|
+
options["init-timeout-restart"] = [(self.timeoutrestart,)]
|
|
93
|
+
if self.sublauncher == "srun":
|
|
94
|
+
options["use-slurm-mpi"] = [()]
|
|
95
|
+
elif self.sublauncher == "libspecific":
|
|
96
|
+
options["no-use-slurm-mpi"] = [()]
|
|
82
97
|
if self.bindingmethod:
|
|
83
|
-
for k in [
|
|
84
|
-
|
|
98
|
+
for k in [
|
|
99
|
+
"{:s}use-{:s}-bind".format(p, t)
|
|
100
|
+
for p in ("", "no-")
|
|
101
|
+
for t in ("arch", "slurm", "intelmpi", "openmpi")
|
|
102
|
+
]:
|
|
85
103
|
options.pop(k, None)
|
|
86
|
-
if self.bindingmethod ==
|
|
87
|
-
options[
|
|
88
|
-
elif
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
options[
|
|
93
|
-
|
|
94
|
-
|
|
104
|
+
if self.bindingmethod == "arch":
|
|
105
|
+
options["use-arch-bind"] = [()]
|
|
106
|
+
elif (
|
|
107
|
+
self.bindingmethod == "launcherspecific"
|
|
108
|
+
and self.sublauncher == "srun"
|
|
109
|
+
):
|
|
110
|
+
options["no-use-arch-bind"] = [()]
|
|
111
|
+
options["use-slurm-bind"] = [()]
|
|
112
|
+
elif self.bindingmethod == "launcherspecific":
|
|
113
|
+
options["no-use-arch-bind"] = [()]
|
|
114
|
+
for k in [
|
|
115
|
+
"use-{:s}-bind".format(t)
|
|
116
|
+
for t in ("slurm", "intelmpi", "openmpi")
|
|
117
|
+
]:
|
|
95
118
|
options[k] = [()]
|
|
96
|
-
elif self.bindingmethod ==
|
|
97
|
-
options[
|
|
119
|
+
elif self.bindingmethod == "vortex":
|
|
120
|
+
options["no-use-arch-bind"] = [()]
|
|
98
121
|
return options
|
|
99
122
|
|
|
100
123
|
def _envelope_fix_envelope_bit(self, e_bit, e_desc):
|
|
101
124
|
"""Set the envelope fake binary options."""
|
|
102
|
-
e_bit.options = {
|
|
103
|
-
|
|
104
|
-
|
|
125
|
+
e_bit.options = {
|
|
126
|
+
k: v for k, v in e_desc.items() if k not in ("openmp",)
|
|
127
|
+
}
|
|
128
|
+
e_bit.options["prefixcommand"] = self._envelope_wrapper_name
|
|
105
129
|
if self.binaries:
|
|
106
130
|
e_bit.master = self.binaries[0].master
|
|
107
131
|
|
|
108
132
|
def _set_binaries_hack(self, binaries):
|
|
109
133
|
"""Set the list of :class:`MpiBinaryDescription` objects associated with this instance."""
|
|
110
|
-
if len(binaries) > 1 and self.bindingmethod not in (
|
|
111
|
-
|
|
112
|
-
|
|
134
|
+
if len(binaries) > 1 and self.bindingmethod not in (
|
|
135
|
+
None,
|
|
136
|
+
"arch",
|
|
137
|
+
"vortex",
|
|
138
|
+
):
|
|
139
|
+
logger.info(
|
|
140
|
+
"The '{:s}' binding method is not working properly with multiple binaries.".format(
|
|
141
|
+
self.bindingmethod
|
|
142
|
+
)
|
|
143
|
+
)
|
|
113
144
|
logger.warning("Resetting the binding method to 'vortex'.")
|
|
114
|
-
self.bindingmethod =
|
|
145
|
+
self.bindingmethod = "vortex"
|
|
115
146
|
|
|
116
147
|
def _set_binaries_envelope_hack(self, binaries):
|
|
117
148
|
"""Tweak the envelope after binaries were setup."""
|
|
@@ -122,28 +153,38 @@ class MpiAuto(mpitools.MpiTool):
|
|
|
122
153
|
def _set_envelope(self, value):
|
|
123
154
|
"""Set the envelope description."""
|
|
124
155
|
super()._set_envelope(value)
|
|
125
|
-
if len(self._envelope) > 1 and self.bindingmethod not in (
|
|
126
|
-
|
|
127
|
-
|
|
156
|
+
if len(self._envelope) > 1 and self.bindingmethod not in (
|
|
157
|
+
None,
|
|
158
|
+
"arch",
|
|
159
|
+
"vortex",
|
|
160
|
+
):
|
|
161
|
+
logger.info(
|
|
162
|
+
"The '{:s}' binding method is not working properly with complex envelopes.".format(
|
|
163
|
+
self.bindingmethod
|
|
164
|
+
)
|
|
165
|
+
)
|
|
128
166
|
logger.warning("Resetting the binding method to 'vortex'.")
|
|
129
|
-
self.bindingmethod =
|
|
167
|
+
self.bindingmethod = "vortex"
|
|
130
168
|
|
|
131
169
|
envelope = property(mpitools.MpiTool._get_envelope, _set_envelope)
|
|
132
170
|
|
|
133
171
|
def _hook_binary_mpiopts(self, binary, options):
|
|
134
172
|
tuned = options.copy()
|
|
135
173
|
# Regular MPI tasks count (the usual...)
|
|
136
|
-
if
|
|
137
|
-
if options[
|
|
174
|
+
if "nnp" in options and "nn" in options:
|
|
175
|
+
if options["nn"] * options["nnp"] == options["np"]:
|
|
138
176
|
# Remove harmful options
|
|
139
|
-
del tuned[
|
|
140
|
-
tuned.pop(
|
|
177
|
+
del tuned["np"]
|
|
178
|
+
tuned.pop("allowodddist", None)
|
|
141
179
|
# that's the strange MPI distribution...
|
|
142
180
|
else:
|
|
143
|
-
tuned[
|
|
181
|
+
tuned["allowodddist"] = (
|
|
182
|
+
None # With this, let mpiauto determine its own partitioning
|
|
183
|
+
)
|
|
144
184
|
else:
|
|
145
|
-
msg =
|
|
146
|
-
|
|
185
|
+
msg = "The provided mpiopts are insufficient to build the command line: {!s}".format(
|
|
186
|
+
options
|
|
187
|
+
)
|
|
147
188
|
raise mpitools.MpiException(msg)
|
|
148
189
|
return tuned
|
|
149
190
|
|
|
@@ -153,43 +194,50 @@ class MpiAuto(mpitools.MpiTool):
|
|
|
153
194
|
for bin_obj in self.binaries:
|
|
154
195
|
if bin_obj.options:
|
|
155
196
|
for mpirank in range(ranksidx, ranksidx + bin_obj.nprocs):
|
|
156
|
-
prefix_c = bin_obj.options.get(
|
|
197
|
+
prefix_c = bin_obj.options.get("prefixcommand", None)
|
|
157
198
|
if prefix_c:
|
|
158
|
-
todostack[mpirank] = (
|
|
159
|
-
|
|
160
|
-
|
|
199
|
+
todostack[mpirank] = (
|
|
200
|
+
prefix_c,
|
|
201
|
+
[
|
|
202
|
+
todostack[mpirank][0],
|
|
203
|
+
]
|
|
204
|
+
+ todostack[mpirank][1],
|
|
205
|
+
todostack[mpirank][2],
|
|
206
|
+
)
|
|
161
207
|
ranksidx += bin_obj.nprocs
|
|
162
208
|
return todostack, ranks_bsize
|
|
163
209
|
|
|
164
210
|
def _envelope_mkcmdline_extra(self, cmdl):
|
|
165
211
|
"""If possible, add an openmp option when the arch binding method is used."""
|
|
166
212
|
|
|
167
|
-
if self.bindingmethod !=
|
|
168
|
-
openmps = {b.options.get(
|
|
213
|
+
if self.bindingmethod != "vortex":
|
|
214
|
+
openmps = {b.options.get("openmp", None) for b in self.binaries}
|
|
169
215
|
if len(openmps) > 1:
|
|
170
216
|
if self.bindingmethod is not None:
|
|
171
|
-
logger.warning(
|
|
217
|
+
logger.warning(
|
|
218
|
+
"Non-uniform OpenMP threads number... Not specifying anything."
|
|
219
|
+
)
|
|
172
220
|
else:
|
|
173
221
|
openmp = openmps.pop() or 1
|
|
174
|
-
cmdl.append(self.optprefix + self.optmap[
|
|
222
|
+
cmdl.append(self.optprefix + self.optmap["openmp"])
|
|
175
223
|
cmdl.append(str(openmp))
|
|
176
224
|
|
|
177
225
|
def setup_environment(self, opts):
|
|
178
226
|
"""Last minute fixups."""
|
|
179
227
|
super().setup_environment(opts)
|
|
180
|
-
if self.bindingmethod in (
|
|
228
|
+
if self.bindingmethod in ("arch", "vortex"):
|
|
181
229
|
# Make sure srun does nothing !
|
|
182
|
-
self._logged_env_set(
|
|
230
|
+
self._logged_env_set("SLURM_CPU_BIND", "none")
|
|
183
231
|
|
|
184
232
|
def setup(self, opts=None):
|
|
185
233
|
"""Ensure that the prefixcommand has the execution rights."""
|
|
186
234
|
for bin_obj in self.binaries:
|
|
187
|
-
prefix_c = bin_obj.options.get(
|
|
235
|
+
prefix_c = bin_obj.options.get("prefixcommand", None)
|
|
188
236
|
if prefix_c is not None:
|
|
189
237
|
if self.system.path.exists(prefix_c):
|
|
190
238
|
self.system.xperm(prefix_c, force=True)
|
|
191
239
|
else:
|
|
192
|
-
raise OSError(
|
|
240
|
+
raise OSError("The prefixcommand do not exists.")
|
|
193
241
|
super().setup(opts)
|
|
194
242
|
|
|
195
243
|
|
|
@@ -200,88 +248,106 @@ class MpiAutoDDT(MpiAuto):
|
|
|
200
248
|
"""
|
|
201
249
|
|
|
202
250
|
_footprint = dict(
|
|
203
|
-
attr
|
|
204
|
-
mpiname
|
|
205
|
-
values
|
|
251
|
+
attr=dict(
|
|
252
|
+
mpiname=dict(
|
|
253
|
+
values=[
|
|
254
|
+
"mpiauto-ddt",
|
|
255
|
+
],
|
|
206
256
|
),
|
|
207
257
|
)
|
|
208
258
|
)
|
|
209
259
|
|
|
210
|
-
_conf_suffix =
|
|
260
|
+
_conf_suffix = "-ddt"
|
|
211
261
|
|
|
212
262
|
def _reshaped_mpiopts(self):
|
|
213
263
|
options = super()._reshaped_mpiopts()
|
|
214
|
-
if
|
|
215
|
-
raise mpitools.MpiException(
|
|
216
|
-
|
|
217
|
-
|
|
264
|
+
if "prefix-mpirun" in options:
|
|
265
|
+
raise mpitools.MpiException(
|
|
266
|
+
"It is not allowed to start DDT with another "
|
|
267
|
+
+ 'prefix_mpirun command defined: "{:s}"'.format(options)
|
|
268
|
+
)
|
|
218
269
|
armtool = ArmForgeTool(self.ticket)
|
|
219
|
-
options[
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
270
|
+
options["prefix-mpirun"] = [
|
|
271
|
+
(
|
|
272
|
+
" ".join(
|
|
273
|
+
armtool.ddt_prefix_cmd(
|
|
274
|
+
sources=self.sources,
|
|
275
|
+
workdir=self.system.path.dirname(
|
|
276
|
+
self.binaries[0].master
|
|
277
|
+
),
|
|
278
|
+
)
|
|
279
|
+
),
|
|
280
|
+
)
|
|
281
|
+
]
|
|
223
282
|
return options
|
|
224
283
|
|
|
225
284
|
|
|
226
285
|
# Some IFS/Arpege specific things :
|
|
227
286
|
|
|
287
|
+
|
|
228
288
|
def arpifs_obsort_nprocab_binarydeco(cls):
|
|
229
289
|
"""Handle usual IFS/Arpege environment tweaking for OBSORT (nproca & nprocb).
|
|
230
290
|
|
|
231
291
|
Note: This is a class decorator for class somehow based on MpiBinaryDescription
|
|
232
292
|
"""
|
|
233
|
-
orig_setup_env = getattr(cls,
|
|
293
|
+
orig_setup_env = getattr(cls, "setup_environment")
|
|
234
294
|
|
|
235
295
|
def setup_environment(self, opts):
|
|
236
296
|
orig_setup_env(self, opts)
|
|
237
|
-
self.env.NPROCA = int(self.env.NPROCA or
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
logger.info(
|
|
297
|
+
self.env.NPROCA = int(self.env.NPROCA or self.nprocs)
|
|
298
|
+
self.env.NPROCB = int(
|
|
299
|
+
self.env.NPROCB or self.nprocs // self.env.NPROCA
|
|
300
|
+
)
|
|
301
|
+
logger.info(
|
|
302
|
+
"MPI Setup NPROCA=%d and NPROCB=%d",
|
|
303
|
+
self.env.NPROCA,
|
|
304
|
+
self.env.NPROCB,
|
|
305
|
+
)
|
|
242
306
|
|
|
243
|
-
if hasattr(orig_setup_env,
|
|
307
|
+
if hasattr(orig_setup_env, "__doc__"):
|
|
244
308
|
setup_environment.__doc__ = orig_setup_env.__doc__
|
|
245
309
|
|
|
246
|
-
setattr(cls,
|
|
310
|
+
setattr(cls, "setup_environment", setup_environment)
|
|
247
311
|
return cls
|
|
248
312
|
|
|
249
313
|
|
|
250
314
|
class _NWPIoServerMixin:
|
|
315
|
+
_NWP_IOSERV_PATTERNS = ("io_serv.*.d",)
|
|
251
316
|
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
total_iotasks, computed_iodist_value=None):
|
|
317
|
+
def _nwp_ioserv_setup_namelist(
|
|
318
|
+
self, namcontents, namlocal, total_iotasks, computed_iodist_value=None
|
|
319
|
+
):
|
|
256
320
|
"""Applying IO Server profile on local namelist ``namlocal`` with contents namcontents."""
|
|
257
|
-
if
|
|
258
|
-
namio = namcontents[
|
|
321
|
+
if "NAMIO_SERV" in namcontents:
|
|
322
|
+
namio = namcontents["NAMIO_SERV"]
|
|
259
323
|
else:
|
|
260
|
-
namio = namcontents.newblock(
|
|
324
|
+
namio = namcontents.newblock("NAMIO_SERV")
|
|
261
325
|
|
|
262
326
|
namio.nproc_io = total_iotasks
|
|
263
327
|
if computed_iodist_value is not None:
|
|
264
328
|
namio.idistio = computed_iodist_value
|
|
265
329
|
|
|
266
|
-
if
|
|
330
|
+
if "VORTEX_IOSERVER_METHOD" in self.env:
|
|
267
331
|
namio.nio_serv_method = self.env.VORTEX_IOSERVER_METHOD
|
|
268
332
|
|
|
269
|
-
if
|
|
333
|
+
if "VORTEX_IOSERVER_BUFMAX" in self.env:
|
|
270
334
|
namio.nio_serv_buf_maxsize = self.env.VORTEX_IOSERVER_BUFMAX
|
|
271
335
|
|
|
272
|
-
if
|
|
336
|
+
if "VORTEX_IOSERVER_MLSERVER" in self.env:
|
|
273
337
|
namio.nmsg_level_server = self.env.VORTEX_IOSERVER_MLSERVER
|
|
274
338
|
|
|
275
|
-
if
|
|
339
|
+
if "VORTEX_IOSERVER_MLCLIENT" in self.env:
|
|
276
340
|
namio.nmsg_level_client = self.env.VORTEX_IOSERVER_MLCLIENT
|
|
277
341
|
|
|
278
|
-
if
|
|
342
|
+
if "VORTEX_IOSERVER_PROCESS" in self.env:
|
|
279
343
|
namio.nprocess_level = self.env.VORTEX_IOSERVER_PROCESS
|
|
280
344
|
|
|
281
|
-
if
|
|
345
|
+
if "VORTEX_IOSERVER_PIOMODEL" in self.env:
|
|
282
346
|
namio.pioprocr_MDL = self.env.VORTEX_IOSERVER_PIOMODEL
|
|
283
347
|
|
|
284
|
-
self.system.highlight(
|
|
348
|
+
self.system.highlight(
|
|
349
|
+
"Parallel io server namelist for {:s}".format(namlocal)
|
|
350
|
+
)
|
|
285
351
|
print(namio.dumps())
|
|
286
352
|
|
|
287
353
|
return True
|
|
@@ -297,38 +363,46 @@ class _NWPIoServerMixin:
|
|
|
297
363
|
"""Post-execution cleaning for io server."""
|
|
298
364
|
|
|
299
365
|
# Old fashion way to make clear that some polling is needed.
|
|
300
|
-
self.system.touch(
|
|
366
|
+
self.system.touch("io_poll.todo")
|
|
301
367
|
|
|
302
368
|
# Get a look inside io server output directories according to its own pattern
|
|
303
369
|
ioserv_filelist = set()
|
|
304
370
|
ioserv_prefixes = set()
|
|
305
|
-
iofile_re = re.compile(
|
|
306
|
-
|
|
371
|
+
iofile_re = re.compile(
|
|
372
|
+
r"((ICMSH|PF|GRIBPF).*\+\d+(?::\d+)?(?:\.sfx)?)(?:\..+)?$"
|
|
373
|
+
)
|
|
374
|
+
self.system.highlight("Dealing with IO directories")
|
|
307
375
|
iodirs = self._nwp_ioserv_iodirs()
|
|
308
376
|
if iodirs:
|
|
309
|
-
logger.info(
|
|
310
|
-
f_summary = collections.defaultdict(lambda: [
|
|
377
|
+
logger.info("List of IO directories: %s", ",".join(iodirs))
|
|
378
|
+
f_summary = collections.defaultdict(lambda: [" "] * len(iodirs))
|
|
311
379
|
for i, iodir in enumerate(iodirs):
|
|
312
380
|
for iofile in self.system.listdir(iodir):
|
|
313
381
|
zf = iofile_re.match(iofile)
|
|
314
382
|
if zf:
|
|
315
|
-
f_summary[zf.group(1)][i] =
|
|
383
|
+
f_summary[zf.group(1)][i] = "+"
|
|
316
384
|
ioserv_filelist.add((zf.group(1), zf.group(2)))
|
|
317
385
|
ioserv_prefixes.add(zf.group(2))
|
|
318
386
|
else:
|
|
319
|
-
f_summary[iofile][i] =
|
|
387
|
+
f_summary[iofile][i] = "?"
|
|
320
388
|
max_names_len = max([len(iofile) for iofile in f_summary.keys()])
|
|
321
|
-
fmt_names =
|
|
322
|
-
logger.info(
|
|
323
|
-
|
|
324
|
-
|
|
389
|
+
fmt_names = "{:" + str(max_names_len) + "s}"
|
|
390
|
+
logger.info(
|
|
391
|
+
"Data location accross the various IOserver directories:\n%s",
|
|
392
|
+
"\n".join(
|
|
393
|
+
[
|
|
394
|
+
(fmt_names + " |{:s}|").format(iofile, "".join(where))
|
|
395
|
+
for iofile, where in sorted(f_summary.items())
|
|
396
|
+
]
|
|
397
|
+
),
|
|
398
|
+
)
|
|
325
399
|
else:
|
|
326
|
-
logger.info(
|
|
400
|
+
logger.info("No IO directories were found")
|
|
327
401
|
|
|
328
|
-
if
|
|
402
|
+
if "GRIBPF" in ioserv_prefixes:
|
|
329
403
|
# If GRIB are requested, do not bother with old FA PF files
|
|
330
|
-
ioserv_prefixes.discard(
|
|
331
|
-
ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p !=
|
|
404
|
+
ioserv_prefixes.discard("PF")
|
|
405
|
+
ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p != "PF"}
|
|
332
406
|
|
|
333
407
|
# Touch the output files
|
|
334
408
|
for tgfile, _ in ioserv_filelist:
|
|
@@ -336,7 +410,7 @@ class _NWPIoServerMixin:
|
|
|
336
410
|
|
|
337
411
|
# Touch the io_poll.todo.PREFIX
|
|
338
412
|
for prefix in ioserv_prefixes:
|
|
339
|
-
self.system.touch(
|
|
413
|
+
self.system.touch("io_poll.todo.{:s}".format(prefix))
|
|
340
414
|
|
|
341
415
|
|
|
342
416
|
class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
@@ -344,7 +418,7 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
344
418
|
|
|
345
419
|
_abstract = True
|
|
346
420
|
|
|
347
|
-
def __init__(self, *
|
|
421
|
+
def __init__(self, *kargs, **kwargs):
|
|
348
422
|
super().__init__(*kargs, **kwargs)
|
|
349
423
|
self._incore_iotasks = None
|
|
350
424
|
self._effective_incore_iotasks = None
|
|
@@ -359,7 +433,7 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
359
433
|
@incore_iotasks.setter
|
|
360
434
|
def incore_iotasks(self, value):
|
|
361
435
|
"""The number of tasks dedicated to the IO server."""
|
|
362
|
-
if isinstance(value, str) and value.endswith(
|
|
436
|
+
if isinstance(value, str) and value.endswith("%"):
|
|
363
437
|
value = math.ceil(self.nprocs * float(value[:-1]) / 100)
|
|
364
438
|
self._incore_iotasks = int(value)
|
|
365
439
|
self._effective_incore_iotasks = None
|
|
@@ -373,10 +447,12 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
373
447
|
def incore_iotasks_fixer(self, value):
|
|
374
448
|
"""Tweak the number of iotasks in order to respect a given constraints."""
|
|
375
449
|
if not isinstance(value, str):
|
|
376
|
-
raise ValueError(
|
|
377
|
-
if value.startswith(
|
|
378
|
-
self._incore_iotasks_fixer = (
|
|
379
|
-
|
|
450
|
+
raise ValueError("A string is expected")
|
|
451
|
+
if value.startswith("nproc_multiple_of_"):
|
|
452
|
+
self._incore_iotasks_fixer = (
|
|
453
|
+
"nproc_multiple_of",
|
|
454
|
+
[int(i) for i in value[18:].split(",")],
|
|
455
|
+
)
|
|
380
456
|
else:
|
|
381
457
|
raise ValueError('The "{:s}" value is incorrect'.format(value))
|
|
382
458
|
|
|
@@ -391,22 +467,36 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
391
467
|
if self.incore_iotasks is not None:
|
|
392
468
|
if self._effective_incore_iotasks is None:
|
|
393
469
|
if self.incore_iotasks_fixer is not None:
|
|
394
|
-
if self.incore_iotasks_fixer[0] ==
|
|
470
|
+
if self.incore_iotasks_fixer[0] == "nproc_multiple_of":
|
|
395
471
|
# Allow for 5% less, or add some tasks
|
|
396
|
-
for candidate in interleave(
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
472
|
+
for candidate in interleave(
|
|
473
|
+
range(self.incore_iotasks, self.nprocs + 1),
|
|
474
|
+
range(
|
|
475
|
+
self.incore_iotasks - 1,
|
|
476
|
+
int(math.ceil(0.95 * self.incore_iotasks)) - 1,
|
|
477
|
+
-1,
|
|
478
|
+
),
|
|
479
|
+
):
|
|
480
|
+
if any(
|
|
481
|
+
[
|
|
482
|
+
(self.nprocs - candidate) % multiple == 0
|
|
483
|
+
for multiple in self.incore_iotasks_fixer[
|
|
484
|
+
1
|
|
485
|
+
]
|
|
486
|
+
]
|
|
487
|
+
):
|
|
402
488
|
self._effective_incore_iotasks = candidate
|
|
403
489
|
break
|
|
404
490
|
else:
|
|
405
|
-
raise RuntimeError(
|
|
491
|
+
raise RuntimeError("Unsupported fixer")
|
|
406
492
|
if self._effective_incore_iotasks != self.incore_iotasks:
|
|
407
|
-
logger.info(
|
|
408
|
-
|
|
409
|
-
|
|
493
|
+
logger.info(
|
|
494
|
+
"The number of IO tasks was updated form %d to %d "
|
|
495
|
+
+ 'because of the "%s" fixer',
|
|
496
|
+
self.incore_iotasks,
|
|
497
|
+
self._effective_incore_iotasks,
|
|
498
|
+
self.incore_iotasks_fixer[0],
|
|
499
|
+
)
|
|
410
500
|
else:
|
|
411
501
|
self._effective_incore_iotasks = self.incore_iotasks
|
|
412
502
|
return self._effective_incore_iotasks
|
|
@@ -421,17 +511,23 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
421
511
|
@incore_iodist.setter
|
|
422
512
|
def incore_iodist(self, value):
|
|
423
513
|
"""How to distribute IO server tasks within model tasks."""
|
|
424
|
-
allowed = (
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
514
|
+
allowed = (
|
|
515
|
+
"begining",
|
|
516
|
+
"end",
|
|
517
|
+
"scattered",
|
|
518
|
+
)
|
|
519
|
+
if not (isinstance(value, str) and value in allowed):
|
|
520
|
+
raise ValueError(
|
|
521
|
+
"'{!s}' is not an allowed value ('{:s}')".format(
|
|
522
|
+
value, ", ".join(allowed)
|
|
523
|
+
)
|
|
524
|
+
)
|
|
429
525
|
self._incore_iodist = value
|
|
430
526
|
|
|
431
527
|
def _set_nam_macro(self, namcontents, namlocal, macro, value):
|
|
432
528
|
"""Set a namelist macro and log it!"""
|
|
433
529
|
namcontents.setmacro(macro, value)
|
|
434
|
-
logger.info(
|
|
530
|
+
logger.info("Setup macro %s=%s in %s", macro, str(value), namlocal)
|
|
435
531
|
|
|
436
532
|
def setup_namelist_delta(self, namcontents, namlocal):
|
|
437
533
|
"""Applying MPI profile on local namelist ``namlocal`` with contents namcontents."""
|
|
@@ -445,48 +541,74 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
|
|
|
445
541
|
if self.effective_incore_iotasks is not None:
|
|
446
542
|
effective_nprocs -= self.effective_incore_iotasks
|
|
447
543
|
# Set up the effective_nprocs related macros
|
|
448
|
-
nprocs_macros = (
|
|
544
|
+
nprocs_macros = ("NPROC", "NBPROC", "NTASKS")
|
|
449
545
|
if any([n in nam_macros for n in nprocs_macros]):
|
|
450
546
|
for n in nprocs_macros:
|
|
451
547
|
self._set_nam_macro(namcontents, namlocal, n, effective_nprocs)
|
|
452
548
|
namw = True
|
|
453
|
-
if any([n in nam_macros for n in (
|
|
454
|
-
self._set_nam_macro(
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
549
|
+
if any([n in nam_macros for n in ("NCPROC", "NDPROC")]):
|
|
550
|
+
self._set_nam_macro(
|
|
551
|
+
namcontents,
|
|
552
|
+
namlocal,
|
|
553
|
+
"NCPROC",
|
|
554
|
+
int(self.env.VORTEX_NPRGPNS or effective_nprocs),
|
|
555
|
+
)
|
|
556
|
+
self._set_nam_macro(
|
|
557
|
+
namcontents,
|
|
558
|
+
namlocal,
|
|
559
|
+
"NDPROC",
|
|
560
|
+
int(self.env.VORTEX_NPRGPEW or 1),
|
|
561
|
+
)
|
|
458
562
|
namw = True
|
|
459
|
-
if
|
|
460
|
-
np1 = namcontents[
|
|
461
|
-
for nstr in [x for x in (
|
|
462
|
-
if
|
|
463
|
-
|
|
563
|
+
if "NAMPAR1" in namcontents:
|
|
564
|
+
np1 = namcontents["NAMPAR1"]
|
|
565
|
+
for nstr in [x for x in ("NSTRIN", "NSTROUT") if x in np1]:
|
|
566
|
+
if (
|
|
567
|
+
isinstance(np1[nstr], (int, float))
|
|
568
|
+
and np1[nstr] > effective_nprocs
|
|
569
|
+
):
|
|
570
|
+
logger.info(
|
|
571
|
+
"Setup %s=%s in NAMPAR1 %s",
|
|
572
|
+
nstr,
|
|
573
|
+
effective_nprocs,
|
|
574
|
+
namlocal,
|
|
575
|
+
)
|
|
464
576
|
np1[nstr] = effective_nprocs
|
|
465
577
|
namw = True
|
|
466
578
|
# Deal with partitioning macros
|
|
467
|
-
namw_p = setup_partitioning_in_namelist(
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
579
|
+
namw_p = setup_partitioning_in_namelist(
|
|
580
|
+
namcontents,
|
|
581
|
+
effective_nprocs,
|
|
582
|
+
self.options.get("openmp", 1),
|
|
583
|
+
namlocal,
|
|
584
|
+
)
|
|
471
585
|
namw = namw or namw_p
|
|
472
586
|
# Incore IO tasks
|
|
473
587
|
if self.effective_incore_iotasks is not None:
|
|
474
588
|
c_iodist = None
|
|
475
589
|
if self.incore_iodist is not None:
|
|
476
|
-
if self.incore_iodist ==
|
|
590
|
+
if self.incore_iodist == "begining":
|
|
477
591
|
c_iodist = -1
|
|
478
|
-
elif self.incore_iodist ==
|
|
592
|
+
elif self.incore_iodist == "end":
|
|
479
593
|
c_iodist = 0
|
|
480
|
-
elif self.incore_iodist ==
|
|
594
|
+
elif self.incore_iodist == "scattered":
|
|
481
595
|
# Ensure that there is at least one task on the first node
|
|
482
|
-
c_iodist = min(
|
|
483
|
-
|
|
596
|
+
c_iodist = min(
|
|
597
|
+
self.nprocs // self.effective_incore_iotasks,
|
|
598
|
+
self.options.get("nnp", self.nprocs),
|
|
599
|
+
)
|
|
484
600
|
else:
|
|
485
|
-
raise RuntimeError(
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
601
|
+
raise RuntimeError(
|
|
602
|
+
"incore_iodist '{!s}' is not supported: check your code".format(
|
|
603
|
+
self.incore_iodist
|
|
604
|
+
)
|
|
605
|
+
)
|
|
606
|
+
namw_io = self._nwp_ioserv_setup_namelist(
|
|
607
|
+
namcontents,
|
|
608
|
+
namlocal,
|
|
609
|
+
self.effective_incore_iotasks,
|
|
610
|
+
computed_iodist_value=c_iodist,
|
|
611
|
+
)
|
|
490
612
|
namw = namw or namw_io
|
|
491
613
|
return namw
|
|
492
614
|
|
|
@@ -501,8 +623,12 @@ class MpiNWP(_AbstractMpiNWP):
|
|
|
501
623
|
"""The kind of binaries used in IFS/Arpege."""
|
|
502
624
|
|
|
503
625
|
_footprint = dict(
|
|
504
|
-
attr
|
|
505
|
-
kind
|
|
626
|
+
attr=dict(
|
|
627
|
+
kind=dict(
|
|
628
|
+
values=[
|
|
629
|
+
"basicnwp",
|
|
630
|
+
]
|
|
631
|
+
),
|
|
506
632
|
),
|
|
507
633
|
)
|
|
508
634
|
|
|
@@ -512,8 +638,12 @@ class MpiNWPObsort(_AbstractMpiNWP):
|
|
|
512
638
|
"""The kind of binaries used in IFS/Arpege when the ODB OBSSORT code needs to be run."""
|
|
513
639
|
|
|
514
640
|
_footprint = dict(
|
|
515
|
-
attr
|
|
516
|
-
kind
|
|
641
|
+
attr=dict(
|
|
642
|
+
kind=dict(
|
|
643
|
+
values=[
|
|
644
|
+
"basicnwpobsort",
|
|
645
|
+
]
|
|
646
|
+
),
|
|
517
647
|
),
|
|
518
648
|
)
|
|
519
649
|
|
|
@@ -523,8 +653,12 @@ class MpiObsort(mpitools.MpiBinaryBasic):
|
|
|
523
653
|
"""The kind of binaries used when the ODB OBSSORT code needs to be run."""
|
|
524
654
|
|
|
525
655
|
_footprint = dict(
|
|
526
|
-
attr
|
|
527
|
-
kind
|
|
656
|
+
attr=dict(
|
|
657
|
+
kind=dict(
|
|
658
|
+
values=[
|
|
659
|
+
"basicobsort",
|
|
660
|
+
]
|
|
661
|
+
),
|
|
528
662
|
),
|
|
529
663
|
)
|
|
530
664
|
|
|
@@ -533,9 +667,15 @@ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
|
|
|
533
667
|
"""Standard IFS/Arpege NWP IO server."""
|
|
534
668
|
|
|
535
669
|
_footprint = dict(
|
|
536
|
-
attr
|
|
537
|
-
kind
|
|
538
|
-
|
|
670
|
+
attr=dict(
|
|
671
|
+
kind=dict(
|
|
672
|
+
values=[
|
|
673
|
+
"nwpioserv",
|
|
674
|
+
]
|
|
675
|
+
),
|
|
676
|
+
iolocation=dict(
|
|
677
|
+
values=[-1, 0], default=0, optional=True, type=int
|
|
678
|
+
),
|
|
539
679
|
)
|
|
540
680
|
)
|
|
541
681
|
|
|
@@ -545,7 +685,7 @@ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
|
|
|
545
685
|
namcontents,
|
|
546
686
|
namlocal,
|
|
547
687
|
self.nprocs,
|
|
548
|
-
computed_iodist_value=(-1 if self.iolocation == 0 else None)
|
|
688
|
+
computed_iodist_value=(-1 if self.iolocation == 0 else None),
|
|
549
689
|
)
|
|
550
690
|
|
|
551
691
|
def clean(self, opts=None):
|