vortex-nwp 2.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. vortex/__init__.py +135 -0
  2. vortex/algo/__init__.py +12 -0
  3. vortex/algo/components.py +2136 -0
  4. vortex/algo/mpitools.py +1648 -0
  5. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  7. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  8. vortex/algo/serversynctools.py +170 -0
  9. vortex/config.py +115 -0
  10. vortex/data/__init__.py +13 -0
  11. vortex/data/abstractstores.py +1572 -0
  12. vortex/data/containers.py +780 -0
  13. vortex/data/contents.py +596 -0
  14. vortex/data/executables.py +284 -0
  15. vortex/data/flow.py +113 -0
  16. vortex/data/geometries.ini +2689 -0
  17. vortex/data/geometries.py +703 -0
  18. vortex/data/handlers.py +1021 -0
  19. vortex/data/outflow.py +67 -0
  20. vortex/data/providers.py +465 -0
  21. vortex/data/resources.py +201 -0
  22. vortex/data/stores.py +1271 -0
  23. vortex/gloves.py +282 -0
  24. vortex/layout/__init__.py +27 -0
  25. vortex/layout/appconf.py +109 -0
  26. vortex/layout/contexts.py +511 -0
  27. vortex/layout/dataflow.py +1069 -0
  28. vortex/layout/jobs.py +1276 -0
  29. vortex/layout/monitor.py +833 -0
  30. vortex/layout/nodes.py +1424 -0
  31. vortex/layout/subjobs.py +464 -0
  32. vortex/nwp/__init__.py +11 -0
  33. vortex/nwp/algo/__init__.py +12 -0
  34. vortex/nwp/algo/assim.py +483 -0
  35. vortex/nwp/algo/clim.py +920 -0
  36. vortex/nwp/algo/coupling.py +609 -0
  37. vortex/nwp/algo/eda.py +632 -0
  38. vortex/nwp/algo/eps.py +613 -0
  39. vortex/nwp/algo/forecasts.py +745 -0
  40. vortex/nwp/algo/fpserver.py +927 -0
  41. vortex/nwp/algo/ifsnaming.py +403 -0
  42. vortex/nwp/algo/ifsroot.py +311 -0
  43. vortex/nwp/algo/monitoring.py +202 -0
  44. vortex/nwp/algo/mpitools.py +554 -0
  45. vortex/nwp/algo/odbtools.py +974 -0
  46. vortex/nwp/algo/oopsroot.py +735 -0
  47. vortex/nwp/algo/oopstests.py +186 -0
  48. vortex/nwp/algo/request.py +579 -0
  49. vortex/nwp/algo/stdpost.py +1285 -0
  50. vortex/nwp/data/__init__.py +12 -0
  51. vortex/nwp/data/assim.py +392 -0
  52. vortex/nwp/data/boundaries.py +261 -0
  53. vortex/nwp/data/climfiles.py +539 -0
  54. vortex/nwp/data/configfiles.py +149 -0
  55. vortex/nwp/data/consts.py +929 -0
  56. vortex/nwp/data/ctpini.py +133 -0
  57. vortex/nwp/data/diagnostics.py +181 -0
  58. vortex/nwp/data/eda.py +148 -0
  59. vortex/nwp/data/eps.py +383 -0
  60. vortex/nwp/data/executables.py +1039 -0
  61. vortex/nwp/data/fields.py +96 -0
  62. vortex/nwp/data/gridfiles.py +308 -0
  63. vortex/nwp/data/logs.py +551 -0
  64. vortex/nwp/data/modelstates.py +334 -0
  65. vortex/nwp/data/monitoring.py +220 -0
  66. vortex/nwp/data/namelists.py +644 -0
  67. vortex/nwp/data/obs.py +748 -0
  68. vortex/nwp/data/oopsexec.py +72 -0
  69. vortex/nwp/data/providers.py +182 -0
  70. vortex/nwp/data/query.py +217 -0
  71. vortex/nwp/data/stores.py +147 -0
  72. vortex/nwp/data/surfex.py +338 -0
  73. vortex/nwp/syntax/__init__.py +9 -0
  74. vortex/nwp/syntax/stdattrs.py +375 -0
  75. vortex/nwp/tools/__init__.py +10 -0
  76. vortex/nwp/tools/addons.py +35 -0
  77. vortex/nwp/tools/agt.py +55 -0
  78. vortex/nwp/tools/bdap.py +48 -0
  79. vortex/nwp/tools/bdcp.py +38 -0
  80. vortex/nwp/tools/bdm.py +21 -0
  81. vortex/nwp/tools/bdmp.py +49 -0
  82. vortex/nwp/tools/conftools.py +1311 -0
  83. vortex/nwp/tools/drhook.py +62 -0
  84. vortex/nwp/tools/grib.py +268 -0
  85. vortex/nwp/tools/gribdiff.py +99 -0
  86. vortex/nwp/tools/ifstools.py +163 -0
  87. vortex/nwp/tools/igastuff.py +249 -0
  88. vortex/nwp/tools/mars.py +56 -0
  89. vortex/nwp/tools/odb.py +548 -0
  90. vortex/nwp/tools/partitioning.py +234 -0
  91. vortex/nwp/tools/satrad.py +56 -0
  92. vortex/nwp/util/__init__.py +6 -0
  93. vortex/nwp/util/async.py +184 -0
  94. vortex/nwp/util/beacon.py +40 -0
  95. vortex/nwp/util/diffpygram.py +359 -0
  96. vortex/nwp/util/ens.py +198 -0
  97. vortex/nwp/util/hooks.py +128 -0
  98. vortex/nwp/util/taskdeco.py +81 -0
  99. vortex/nwp/util/usepygram.py +591 -0
  100. vortex/nwp/util/usetnt.py +87 -0
  101. vortex/proxy.py +6 -0
  102. vortex/sessions.py +341 -0
  103. vortex/syntax/__init__.py +9 -0
  104. vortex/syntax/stdattrs.py +628 -0
  105. vortex/syntax/stddeco.py +176 -0
  106. vortex/toolbox.py +982 -0
  107. vortex/tools/__init__.py +11 -0
  108. vortex/tools/actions.py +457 -0
  109. vortex/tools/addons.py +297 -0
  110. vortex/tools/arm.py +76 -0
  111. vortex/tools/compression.py +322 -0
  112. vortex/tools/date.py +20 -0
  113. vortex/tools/ddhpack.py +10 -0
  114. vortex/tools/delayedactions.py +672 -0
  115. vortex/tools/env.py +513 -0
  116. vortex/tools/folder.py +663 -0
  117. vortex/tools/grib.py +559 -0
  118. vortex/tools/lfi.py +746 -0
  119. vortex/tools/listings.py +354 -0
  120. vortex/tools/names.py +575 -0
  121. vortex/tools/net.py +1790 -0
  122. vortex/tools/odb.py +10 -0
  123. vortex/tools/parallelism.py +336 -0
  124. vortex/tools/prestaging.py +186 -0
  125. vortex/tools/rawfiles.py +10 -0
  126. vortex/tools/schedulers.py +413 -0
  127. vortex/tools/services.py +871 -0
  128. vortex/tools/storage.py +1061 -0
  129. vortex/tools/surfex.py +61 -0
  130. vortex/tools/systems.py +3396 -0
  131. vortex/tools/targets.py +384 -0
  132. vortex/util/__init__.py +9 -0
  133. vortex/util/config.py +1071 -0
  134. vortex/util/empty.py +24 -0
  135. vortex/util/helpers.py +184 -0
  136. vortex/util/introspection.py +63 -0
  137. vortex/util/iosponge.py +76 -0
  138. vortex/util/roles.py +51 -0
  139. vortex/util/storefunctions.py +103 -0
  140. vortex/util/structs.py +26 -0
  141. vortex/util/worker.py +150 -0
  142. vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
  143. vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
  144. vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
  145. vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
  146. vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,554 @@
1
+ """
2
+ General interest and NWP specific MPI launchers.
3
+ """
4
+
5
+ import collections
6
+ import re
7
+ import math
8
+
9
+ from bronx.fancies import loggers
10
+ from bronx.syntax.iterators import interleave
11
+ import footprints
12
+
13
+ from vortex.algo import mpitools
14
+ from vortex.syntax.stdattrs import DelayedEnvValue
15
+ from vortex.tools.arm import ArmForgeTool
16
+ from ..tools.partitioning import setup_partitioning_in_namelist
17
+
18
+ #: No automatic export
19
+ __all__ = []
20
+
21
+ logger = loggers.getLogger(__name__)
22
+
23
+
24
+ class MpiAuto(mpitools.MpiTool):
25
+ """MpiTools that uses mpiauto as a proxy to several MPI implementations"""
26
+
27
+ _footprint = dict(
28
+ attr = dict(
29
+ mpiname = dict(
30
+ values = ['mpiauto', ],
31
+ ),
32
+ mpiopts = dict(
33
+ default = None
34
+ ),
35
+ optprefix = dict(
36
+ default = '--'
37
+ ),
38
+ optmap = dict(
39
+ default = footprints.FPDict(nn='nn', nnp='nnp', openmp='openmp',
40
+ np='np', prefixcommand='prefix-command',
41
+ allowodddist='mpi-allow-odd-dist')
42
+ ),
43
+ timeoutrestart = dict(
44
+ info = 'The number of attempts made by mpiauto',
45
+ optional = True,
46
+ default = DelayedEnvValue('MPI_INIT_TIMEOUT_RESTART', 2),
47
+ doc_visibility = footprints.doc.visibility.ADVANCED,
48
+ doc_zorder = -90,
49
+ ),
50
+ sublauncher = dict(
51
+ info = 'How to actualy launch the MPI program',
52
+ values = ['srun', 'libspecific'],
53
+ optional = True,
54
+ doc_visibility = footprints.doc.visibility.ADVANCED,
55
+ doc_zorder = -90,
56
+ ),
57
+ mpiwrapstd = dict(
58
+ values = [False, ],
59
+ ),
60
+ bindingmethod = dict(
61
+ info = 'How to bind the MPI processes',
62
+ values = ['arch', 'launcherspecific', 'vortex'],
63
+ optional = True,
64
+ doc_visibility = footprints.doc.visibility.ADVANCED,
65
+ doc_zorder = -90,
66
+ ),
67
+ )
68
+ )
69
+
70
+ _envelope_wrapper_tpl = '@envelope_wrapper_mpiauto.tpl'
71
+ _envelope_rank_var = 'MPIAUTORANK'
72
+ _needs_mpilib_specific_mpienv = False
73
+
74
+ def _reshaped_mpiopts(self):
75
+ """Raw list of mpi tool command line options."""
76
+ options = super()._reshaped_mpiopts()
77
+ options['init-timeout-restart'] = [(self.timeoutrestart, )]
78
+ if self.sublauncher == 'srun':
79
+ options['use-slurm-mpi'] = [()]
80
+ elif self.sublauncher == 'libspecific':
81
+ options['no-use-slurm-mpi'] = [()]
82
+ if self.bindingmethod:
83
+ for k in ['{:s}use-{:s}-bind'.format(p, t) for p in ('', 'no-')
84
+ for t in ('arch', 'slurm', 'intelmpi', 'openmpi')]:
85
+ options.pop(k, None)
86
+ if self.bindingmethod == 'arch':
87
+ options['use-arch-bind'] = [()]
88
+ elif self.bindingmethod == 'launcherspecific' and self.sublauncher == 'srun':
89
+ options['no-use-arch-bind'] = [()]
90
+ options['use-slurm-bind'] = [()]
91
+ elif self.bindingmethod == 'launcherspecific':
92
+ options['no-use-arch-bind'] = [()]
93
+ for k in ['use-{:s}-bind'.format(t)
94
+ for t in ('slurm', 'intelmpi', 'openmpi')]:
95
+ options[k] = [()]
96
+ elif self.bindingmethod == 'vortex':
97
+ options['no-use-arch-bind'] = [()]
98
+ return options
99
+
100
+ def _envelope_fix_envelope_bit(self, e_bit, e_desc):
101
+ """Set the envelope fake binary options."""
102
+ e_bit.options = {k: v for k, v in e_desc.items()
103
+ if k not in ('openmp', )}
104
+ e_bit.options['prefixcommand'] = self._envelope_wrapper_name
105
+ if self.binaries:
106
+ e_bit.master = self.binaries[0].master
107
+
108
+ def _set_binaries_hack(self, binaries):
109
+ """Set the list of :class:`MpiBinaryDescription` objects associated with this instance."""
110
+ if len(binaries) > 1 and self.bindingmethod not in (None, 'arch', 'vortex'):
111
+ logger.info("The '{:s}' binding method is not working properly with multiple binaries."
112
+ .format(self.bindingmethod))
113
+ logger.warning("Resetting the binding method to 'vortex'.")
114
+ self.bindingmethod = 'vortex'
115
+
116
+ def _set_binaries_envelope_hack(self, binaries):
117
+ """Tweak the envelope after binaries were setup."""
118
+ super()._set_binaries_envelope_hack(binaries)
119
+ for e_bit in self.envelope:
120
+ e_bit.master = binaries[0].master
121
+
122
+ def _set_envelope(self, value):
123
+ """Set the envelope description."""
124
+ super()._set_envelope(value)
125
+ if len(self._envelope) > 1 and self.bindingmethod not in (None, 'arch', 'vortex'):
126
+ logger.info("The '{:s}' binding method is not working properly with complex envelopes."
127
+ .format(self.bindingmethod))
128
+ logger.warning("Resetting the binding method to 'vortex'.")
129
+ self.bindingmethod = 'vortex'
130
+
131
+ envelope = property(mpitools.MpiTool._get_envelope, _set_envelope)
132
+
133
+ def _hook_binary_mpiopts(self, binary, options):
134
+ tuned = options.copy()
135
+ # Regular MPI tasks count (the usual...)
136
+ if 'nnp' in options and 'nn' in options:
137
+ if options['nn'] * options['nnp'] == options['np']:
138
+ # Remove harmful options
139
+ del tuned['np']
140
+ tuned.pop('allowodddist', None)
141
+ # that's the strange MPI distribution...
142
+ else:
143
+ tuned['allowodddist'] = None # With this, let mpiauto determine its own partitioning
144
+ else:
145
+ msg = ("The provided mpiopts are insufficient to build the command line: {!s}"
146
+ .format(options))
147
+ raise mpitools.MpiException(msg)
148
+ return tuned
149
+
150
+ def _envelope_mkwrapper_todostack(self):
151
+ ranksidx = 0
152
+ todostack, ranks_bsize = super()._envelope_mkwrapper_todostack()
153
+ for bin_obj in self.binaries:
154
+ if bin_obj.options:
155
+ for mpirank in range(ranksidx, ranksidx + bin_obj.nprocs):
156
+ prefix_c = bin_obj.options.get('prefixcommand', None)
157
+ if prefix_c:
158
+ todostack[mpirank] = (prefix_c,
159
+ [todostack[mpirank][0], ] + todostack[mpirank][1],
160
+ todostack[mpirank][2])
161
+ ranksidx += bin_obj.nprocs
162
+ return todostack, ranks_bsize
163
+
164
+ def _envelope_mkcmdline_extra(self, cmdl):
165
+ """If possible, add an openmp option when the arch binding method is used."""
166
+
167
+ if self.bindingmethod != 'vortex':
168
+ openmps = {b.options.get('openmp', None) for b in self.binaries}
169
+ if len(openmps) > 1:
170
+ if self.bindingmethod is not None:
171
+ logger.warning("Non-uniform OpenMP threads number... Not specifying anything.")
172
+ else:
173
+ openmp = openmps.pop() or 1
174
+ cmdl.append(self.optprefix + self.optmap['openmp'])
175
+ cmdl.append(str(openmp))
176
+
177
+ def setup_environment(self, opts):
178
+ """Last minute fixups."""
179
+ super().setup_environment(opts)
180
+ if self.bindingmethod in ('arch', 'vortex'):
181
+ # Make sure srun does nothing !
182
+ self._logged_env_set('SLURM_CPU_BIND', 'none')
183
+
184
+ def setup(self, opts=None):
185
+ """Ensure that the prefixcommand has the execution rights."""
186
+ for bin_obj in self.binaries:
187
+ prefix_c = bin_obj.options.get('prefixcommand', None)
188
+ if prefix_c is not None:
189
+ if self.system.path.exists(prefix_c):
190
+ self.system.xperm(prefix_c, force=True)
191
+ else:
192
+ raise OSError('The prefixcommand do not exists.')
193
+ super().setup(opts)
194
+
195
+
196
+ class MpiAutoDDT(MpiAuto):
197
+ """
198
+ MpiTools that uses mpiauto as a proxy to several MPI implementations
199
+ with DDT support.
200
+ """
201
+
202
+ _footprint = dict(
203
+ attr = dict(
204
+ mpiname = dict(
205
+ values = ['mpiauto-ddt', ],
206
+ ),
207
+ )
208
+ )
209
+
210
+ _conf_suffix = '-ddt'
211
+
212
+ def _reshaped_mpiopts(self):
213
+ options = super()._reshaped_mpiopts()
214
+ if 'prefix-mpirun' in options:
215
+ raise mpitools.MpiException('It is not allowed to start DDT with another ' +
216
+ 'prefix_mpirun command defined: "{:s}"'
217
+ .format(options))
218
+ armtool = ArmForgeTool(self.ticket)
219
+ options['prefix-mpirun'] = [(' '.join(armtool.ddt_prefix_cmd(
220
+ sources=self.sources,
221
+ workdir=self.system.path.dirname(self.binaries[0].master)
222
+ )), )]
223
+ return options
224
+
225
+
226
+ # Some IFS/Arpege specific things :
227
+
228
+ def arpifs_obsort_nprocab_binarydeco(cls):
229
+ """Handle usual IFS/Arpege environment tweaking for OBSORT (nproca & nprocb).
230
+
231
+ Note: This is a class decorator for class somehow based on MpiBinaryDescription
232
+ """
233
+ orig_setup_env = getattr(cls, 'setup_environment')
234
+
235
+ def setup_environment(self, opts):
236
+ orig_setup_env(self, opts)
237
+ self.env.NPROCA = int(self.env.NPROCA or
238
+ self.nprocs)
239
+ self.env.NPROCB = int(self.env.NPROCB or
240
+ self.nprocs // self.env.NPROCA)
241
+ logger.info("MPI Setup NPROCA=%d and NPROCB=%d", self.env.NPROCA, self.env.NPROCB)
242
+
243
+ if hasattr(orig_setup_env, '__doc__'):
244
+ setup_environment.__doc__ = orig_setup_env.__doc__
245
+
246
+ setattr(cls, 'setup_environment', setup_environment)
247
+ return cls
248
+
249
+
250
+ class _NWPIoServerMixin:
251
+
252
+ _NWP_IOSERV_PATTERNS = ('io_serv.*.d', )
253
+
254
+ def _nwp_ioserv_setup_namelist(self, namcontents, namlocal,
255
+ total_iotasks, computed_iodist_value=None):
256
+ """Applying IO Server profile on local namelist ``namlocal`` with contents namcontents."""
257
+ if 'NAMIO_SERV' in namcontents:
258
+ namio = namcontents['NAMIO_SERV']
259
+ else:
260
+ namio = namcontents.newblock('NAMIO_SERV')
261
+
262
+ namio.nproc_io = total_iotasks
263
+ if computed_iodist_value is not None:
264
+ namio.idistio = computed_iodist_value
265
+
266
+ if 'VORTEX_IOSERVER_METHOD' in self.env:
267
+ namio.nio_serv_method = self.env.VORTEX_IOSERVER_METHOD
268
+
269
+ if 'VORTEX_IOSERVER_BUFMAX' in self.env:
270
+ namio.nio_serv_buf_maxsize = self.env.VORTEX_IOSERVER_BUFMAX
271
+
272
+ if 'VORTEX_IOSERVER_MLSERVER' in self.env:
273
+ namio.nmsg_level_server = self.env.VORTEX_IOSERVER_MLSERVER
274
+
275
+ if 'VORTEX_IOSERVER_MLCLIENT' in self.env:
276
+ namio.nmsg_level_client = self.env.VORTEX_IOSERVER_MLCLIENT
277
+
278
+ if 'VORTEX_IOSERVER_PROCESS' in self.env:
279
+ namio.nprocess_level = self.env.VORTEX_IOSERVER_PROCESS
280
+
281
+ if 'VORTEX_IOSERVER_PIOMODEL' in self.env:
282
+ namio.pioprocr_MDL = self.env.VORTEX_IOSERVER_PIOMODEL
283
+
284
+ self.system.highlight('Parallel io server namelist for {:s}'.format(namlocal))
285
+ print(namio.dumps())
286
+
287
+ return True
288
+
289
+ def _nwp_ioserv_iodirs(self):
290
+ """Return an ordered list of directories matching the ``pattern`` attribute."""
291
+ found = []
292
+ for pattern in self._NWP_IOSERV_PATTERNS:
293
+ found.extend(self.system.glob(pattern))
294
+ return sorted(found)
295
+
296
+ def _nwp_ioserv_clean(self):
297
+ """Post-execution cleaning for io server."""
298
+
299
+ # Old fashion way to make clear that some polling is needed.
300
+ self.system.touch('io_poll.todo')
301
+
302
+ # Get a look inside io server output directories according to its own pattern
303
+ ioserv_filelist = set()
304
+ ioserv_prefixes = set()
305
+ iofile_re = re.compile(r'((ICMSH|PF|GRIBPF).*\+\d+(?::\d+)?(?:\.sfx)?)(?:\..+)?$')
306
+ self.system.highlight('Dealing with IO directories')
307
+ iodirs = self._nwp_ioserv_iodirs()
308
+ if iodirs:
309
+ logger.info('List of IO directories: %s', ','.join(iodirs))
310
+ f_summary = collections.defaultdict(lambda: [' '] * len(iodirs))
311
+ for i, iodir in enumerate(iodirs):
312
+ for iofile in self.system.listdir(iodir):
313
+ zf = iofile_re.match(iofile)
314
+ if zf:
315
+ f_summary[zf.group(1)][i] = '+'
316
+ ioserv_filelist.add((zf.group(1), zf.group(2)))
317
+ ioserv_prefixes.add(zf.group(2))
318
+ else:
319
+ f_summary[iofile][i] = '?'
320
+ max_names_len = max([len(iofile) for iofile in f_summary.keys()])
321
+ fmt_names = '{:' + str(max_names_len) + 's}'
322
+ logger.info('Data location accross the various IOserver directories:\n%s',
323
+ '\n'.join([(fmt_names + ' |{:s}|').format(iofile, ''.join(where))
324
+ for iofile, where in sorted(f_summary.items())]))
325
+ else:
326
+ logger.info('No IO directories were found')
327
+
328
+ if 'GRIBPF' in ioserv_prefixes:
329
+ # If GRIB are requested, do not bother with old FA PF files
330
+ ioserv_prefixes.discard('PF')
331
+ ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p != 'PF'}
332
+
333
+ # Touch the output files
334
+ for tgfile, _ in ioserv_filelist:
335
+ self.system.touch(tgfile)
336
+
337
+ # Touch the io_poll.todo.PREFIX
338
+ for prefix in ioserv_prefixes:
339
+ self.system.touch('io_poll.todo.{:s}'.format(prefix))
340
+
341
+
342
+ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
343
+ """The kind of binaries used in IFS/Arpege."""
344
+
345
+ _abstract = True
346
+
347
+ def __init__(self, * kargs, **kwargs):
348
+ super().__init__(*kargs, **kwargs)
349
+ self._incore_iotasks = None
350
+ self._effective_incore_iotasks = None
351
+ self._incore_iotasks_fixer = None
352
+ self._incore_iodist = None
353
+
354
+ @property
355
+ def incore_iotasks(self):
356
+ """The number of tasks dedicated to the IO server."""
357
+ return self._incore_iotasks
358
+
359
+ @incore_iotasks.setter
360
+ def incore_iotasks(self, value):
361
+ """The number of tasks dedicated to the IO server."""
362
+ if isinstance(value, str) and value.endswith('%'):
363
+ value = math.ceil(self.nprocs * float(value[:-1]) / 100)
364
+ self._incore_iotasks = int(value)
365
+ self._effective_incore_iotasks = None
366
+
367
+ @property
368
+ def incore_iotasks_fixer(self):
369
+ """Tweak the number of iotasks in order to respect a given constraints."""
370
+ return self._incore_iotasks_fixer
371
+
372
+ @incore_iotasks_fixer.setter
373
+ def incore_iotasks_fixer(self, value):
374
+ """Tweak the number of iotasks in order to respect a given constraints."""
375
+ if not isinstance(value, str):
376
+ raise ValueError('A string is expected')
377
+ if value.startswith('nproc_multiple_of_'):
378
+ self._incore_iotasks_fixer = ('nproc_multiple_of',
379
+ [int(i) for i in value[18:].split(',')])
380
+ else:
381
+ raise ValueError('The "{:s}" value is incorrect'.format(value))
382
+
383
+ @property
384
+ def effective_incore_iotasks(self):
385
+ """Apply fixers to incore_iotasks and return this value.
386
+
387
+ e.g. "nproc_multiple_of_15,16,17" ensure that the number of processes
388
+ dedicated to computations (i.e. total number of process - IO processes)
389
+ is a multiple of 15, 16 or 17.
390
+ """
391
+ if self.incore_iotasks is not None:
392
+ if self._effective_incore_iotasks is None:
393
+ if self.incore_iotasks_fixer is not None:
394
+ if self.incore_iotasks_fixer[0] == 'nproc_multiple_of':
395
+ # Allow for 5% less, or add some tasks
396
+ for candidate in interleave(range(self.incore_iotasks, self.nprocs + 1),
397
+ range(self.incore_iotasks - 1,
398
+ int(math.ceil(0.95 * self.incore_iotasks)) - 1,
399
+ -1)):
400
+ if any([(self.nprocs - candidate) % multiple == 0
401
+ for multiple in self.incore_iotasks_fixer[1]]):
402
+ self._effective_incore_iotasks = candidate
403
+ break
404
+ else:
405
+ raise RuntimeError('Unsupported fixer')
406
+ if self._effective_incore_iotasks != self.incore_iotasks:
407
+ logger.info('The number of IO tasks was updated form %d to %d ' +
408
+ 'because of the "%s" fixer', self.incore_iotasks,
409
+ self._effective_incore_iotasks, self.incore_iotasks_fixer[0])
410
+ else:
411
+ self._effective_incore_iotasks = self.incore_iotasks
412
+ return self._effective_incore_iotasks
413
+ else:
414
+ return None
415
+
416
+ @property
417
+ def incore_iodist(self):
418
+ """How to distribute IO server tasks within model tasks."""
419
+ return self._incore_iodist
420
+
421
+ @incore_iodist.setter
422
+ def incore_iodist(self, value):
423
+ """How to distribute IO server tasks within model tasks."""
424
+ allowed = ('begining', 'end', 'scattered',)
425
+ if not (isinstance(value, str) and
426
+ value in allowed):
427
+ raise ValueError("'{!s}' is not an allowed value ('{:s}')"
428
+ .format(value, ', '.join(allowed)))
429
+ self._incore_iodist = value
430
+
431
+ def _set_nam_macro(self, namcontents, namlocal, macro, value):
432
+ """Set a namelist macro and log it!"""
433
+ namcontents.setmacro(macro, value)
434
+ logger.info('Setup macro %s=%s in %s', macro, str(value), namlocal)
435
+
436
+ def setup_namelist_delta(self, namcontents, namlocal):
437
+ """Applying MPI profile on local namelist ``namlocal`` with contents namcontents."""
438
+ namw = False
439
+ # List of macros actually used in the namelist
440
+ nam_macros = set()
441
+ for nam_block in namcontents.values():
442
+ nam_macros.update(nam_block.macros())
443
+ # The actual number of tasks involved in computations
444
+ effective_nprocs = self.nprocs
445
+ if self.effective_incore_iotasks is not None:
446
+ effective_nprocs -= self.effective_incore_iotasks
447
+ # Set up the effective_nprocs related macros
448
+ nprocs_macros = ('NPROC', 'NBPROC', 'NTASKS')
449
+ if any([n in nam_macros for n in nprocs_macros]):
450
+ for n in nprocs_macros:
451
+ self._set_nam_macro(namcontents, namlocal, n, effective_nprocs)
452
+ namw = True
453
+ if any([n in nam_macros for n in ('NCPROC', 'NDPROC')]):
454
+ self._set_nam_macro(namcontents, namlocal, 'NCPROC',
455
+ int(self.env.VORTEX_NPRGPNS or effective_nprocs))
456
+ self._set_nam_macro(namcontents, namlocal, 'NDPROC',
457
+ int(self.env.VORTEX_NPRGPEW or 1))
458
+ namw = True
459
+ if 'NAMPAR1' in namcontents:
460
+ np1 = namcontents['NAMPAR1']
461
+ for nstr in [x for x in ('NSTRIN', 'NSTROUT') if x in np1]:
462
+ if isinstance(np1[nstr], (int, float)) and np1[nstr] > effective_nprocs:
463
+ logger.info('Setup %s=%s in NAMPAR1 %s', nstr, effective_nprocs, namlocal)
464
+ np1[nstr] = effective_nprocs
465
+ namw = True
466
+ # Deal with partitioning macros
467
+ namw_p = setup_partitioning_in_namelist(namcontents,
468
+ effective_nprocs,
469
+ self.options.get('openmp', 1),
470
+ namlocal)
471
+ namw = namw or namw_p
472
+ # Incore IO tasks
473
+ if self.effective_incore_iotasks is not None:
474
+ c_iodist = None
475
+ if self.incore_iodist is not None:
476
+ if self.incore_iodist == 'begining':
477
+ c_iodist = -1
478
+ elif self.incore_iodist == 'end':
479
+ c_iodist = 0
480
+ elif self.incore_iodist == 'scattered':
481
+ # Ensure that there is at least one task on the first node
482
+ c_iodist = min(self.nprocs // self.effective_incore_iotasks,
483
+ self.options.get('nnp', self.nprocs))
484
+ else:
485
+ raise RuntimeError("incore_iodist '{!s}' is not supported: check your code"
486
+ .format(self.incore_iodist))
487
+ namw_io = self._nwp_ioserv_setup_namelist(namcontents, namlocal,
488
+ self.effective_incore_iotasks,
489
+ computed_iodist_value=c_iodist)
490
+ namw = namw or namw_io
491
+ return namw
492
+
493
+ def clean(self, opts=None):
494
+ """Finalise the IO server run."""
495
+ super().clean(opts=opts)
496
+ if self.incore_iotasks:
497
+ self._nwp_ioserv_clean()
498
+
499
+
500
+ class MpiNWP(_AbstractMpiNWP):
501
+ """The kind of binaries used in IFS/Arpege."""
502
+
503
+ _footprint = dict(
504
+ attr = dict(
505
+ kind = dict(values = ['basicnwp', ]),
506
+ ),
507
+ )
508
+
509
+
510
+ @arpifs_obsort_nprocab_binarydeco
511
+ class MpiNWPObsort(_AbstractMpiNWP):
512
+ """The kind of binaries used in IFS/Arpege when the ODB OBSSORT code needs to be run."""
513
+
514
+ _footprint = dict(
515
+ attr = dict(
516
+ kind = dict(values = ['basicnwpobsort', ]),
517
+ ),
518
+ )
519
+
520
+
521
+ @arpifs_obsort_nprocab_binarydeco
522
+ class MpiObsort(mpitools.MpiBinaryBasic):
523
+ """The kind of binaries used when the ODB OBSSORT code needs to be run."""
524
+
525
+ _footprint = dict(
526
+ attr = dict(
527
+ kind = dict(values = ['basicobsort', ]),
528
+ ),
529
+ )
530
+
531
+
532
+ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
533
+ """Standard IFS/Arpege NWP IO server."""
534
+
535
+ _footprint = dict(
536
+ attr = dict(
537
+ kind = dict(values = ['nwpioserv', ]),
538
+ iolocation = dict(values = [-1, 0], default = 0, optional = True, type = int),
539
+ )
540
+ )
541
+
542
+ def setup_namelist_delta(self, namcontents, namlocal):
543
+ """Setup the IO Server."""
544
+ self._nwp_ioserv_setup_namelist(
545
+ namcontents,
546
+ namlocal,
547
+ self.nprocs,
548
+ computed_iodist_value=(-1 if self.iolocation == 0 else None)
549
+ )
550
+
551
+ def clean(self, opts=None):
552
+ """Finalise the IO server run."""
553
+ super().clean(opts=opts)
554
+ self._nwp_ioserv_clean()