vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. vortex/__init__.py +75 -47
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +944 -618
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/serversynctools.py +34 -33
  7. vortex/config.py +19 -22
  8. vortex/data/__init__.py +9 -3
  9. vortex/data/abstractstores.py +593 -655
  10. vortex/data/containers.py +217 -162
  11. vortex/data/contents.py +65 -39
  12. vortex/data/executables.py +93 -102
  13. vortex/data/flow.py +40 -34
  14. vortex/data/geometries.py +228 -132
  15. vortex/data/handlers.py +436 -227
  16. vortex/data/outflow.py +15 -15
  17. vortex/data/providers.py +185 -163
  18. vortex/data/resources.py +48 -42
  19. vortex/data/stores.py +540 -417
  20. vortex/data/sync_templates/__init__.py +0 -0
  21. vortex/gloves.py +114 -87
  22. vortex/layout/__init__.py +1 -8
  23. vortex/layout/contexts.py +150 -84
  24. vortex/layout/dataflow.py +353 -202
  25. vortex/layout/monitor.py +264 -128
  26. vortex/nwp/__init__.py +5 -2
  27. vortex/nwp/algo/__init__.py +14 -5
  28. vortex/nwp/algo/assim.py +205 -151
  29. vortex/nwp/algo/clim.py +683 -517
  30. vortex/nwp/algo/coupling.py +447 -225
  31. vortex/nwp/algo/eda.py +437 -229
  32. vortex/nwp/algo/eps.py +403 -231
  33. vortex/nwp/algo/forecasts.py +416 -275
  34. vortex/nwp/algo/fpserver.py +683 -307
  35. vortex/nwp/algo/ifsnaming.py +205 -145
  36. vortex/nwp/algo/ifsroot.py +215 -122
  37. vortex/nwp/algo/monitoring.py +137 -76
  38. vortex/nwp/algo/mpitools.py +330 -190
  39. vortex/nwp/algo/odbtools.py +637 -353
  40. vortex/nwp/algo/oopsroot.py +454 -273
  41. vortex/nwp/algo/oopstests.py +90 -56
  42. vortex/nwp/algo/request.py +287 -206
  43. vortex/nwp/algo/stdpost.py +878 -522
  44. vortex/nwp/data/__init__.py +22 -4
  45. vortex/nwp/data/assim.py +125 -137
  46. vortex/nwp/data/boundaries.py +121 -68
  47. vortex/nwp/data/climfiles.py +193 -211
  48. vortex/nwp/data/configfiles.py +73 -69
  49. vortex/nwp/data/consts.py +426 -401
  50. vortex/nwp/data/ctpini.py +59 -43
  51. vortex/nwp/data/diagnostics.py +94 -66
  52. vortex/nwp/data/eda.py +50 -51
  53. vortex/nwp/data/eps.py +195 -146
  54. vortex/nwp/data/executables.py +440 -434
  55. vortex/nwp/data/fields.py +63 -48
  56. vortex/nwp/data/gridfiles.py +183 -111
  57. vortex/nwp/data/logs.py +250 -217
  58. vortex/nwp/data/modelstates.py +180 -151
  59. vortex/nwp/data/monitoring.py +72 -99
  60. vortex/nwp/data/namelists.py +254 -202
  61. vortex/nwp/data/obs.py +400 -308
  62. vortex/nwp/data/oopsexec.py +22 -20
  63. vortex/nwp/data/providers.py +90 -65
  64. vortex/nwp/data/query.py +71 -82
  65. vortex/nwp/data/stores.py +49 -36
  66. vortex/nwp/data/surfex.py +136 -137
  67. vortex/nwp/syntax/__init__.py +1 -1
  68. vortex/nwp/syntax/stdattrs.py +173 -111
  69. vortex/nwp/tools/__init__.py +2 -2
  70. vortex/nwp/tools/addons.py +22 -17
  71. vortex/nwp/tools/agt.py +24 -12
  72. vortex/nwp/tools/bdap.py +16 -5
  73. vortex/nwp/tools/bdcp.py +4 -1
  74. vortex/nwp/tools/bdm.py +3 -0
  75. vortex/nwp/tools/bdmp.py +14 -9
  76. vortex/nwp/tools/conftools.py +728 -378
  77. vortex/nwp/tools/drhook.py +12 -8
  78. vortex/nwp/tools/grib.py +65 -39
  79. vortex/nwp/tools/gribdiff.py +22 -17
  80. vortex/nwp/tools/ifstools.py +82 -42
  81. vortex/nwp/tools/igastuff.py +167 -143
  82. vortex/nwp/tools/mars.py +14 -2
  83. vortex/nwp/tools/odb.py +234 -125
  84. vortex/nwp/tools/partitioning.py +61 -37
  85. vortex/nwp/tools/satrad.py +27 -12
  86. vortex/nwp/util/async.py +83 -55
  87. vortex/nwp/util/beacon.py +10 -10
  88. vortex/nwp/util/diffpygram.py +174 -86
  89. vortex/nwp/util/ens.py +144 -63
  90. vortex/nwp/util/hooks.py +30 -19
  91. vortex/nwp/util/taskdeco.py +28 -24
  92. vortex/nwp/util/usepygram.py +278 -172
  93. vortex/nwp/util/usetnt.py +31 -17
  94. vortex/sessions.py +72 -39
  95. vortex/syntax/__init__.py +1 -1
  96. vortex/syntax/stdattrs.py +410 -171
  97. vortex/syntax/stddeco.py +31 -22
  98. vortex/toolbox.py +327 -192
  99. vortex/tools/__init__.py +11 -2
  100. vortex/tools/actions.py +110 -121
  101. vortex/tools/addons.py +111 -92
  102. vortex/tools/arm.py +42 -22
  103. vortex/tools/compression.py +72 -69
  104. vortex/tools/date.py +11 -4
  105. vortex/tools/delayedactions.py +242 -132
  106. vortex/tools/env.py +75 -47
  107. vortex/tools/folder.py +342 -171
  108. vortex/tools/grib.py +341 -162
  109. vortex/tools/lfi.py +423 -216
  110. vortex/tools/listings.py +109 -40
  111. vortex/tools/names.py +218 -156
  112. vortex/tools/net.py +655 -299
  113. vortex/tools/parallelism.py +93 -61
  114. vortex/tools/prestaging.py +55 -31
  115. vortex/tools/schedulers.py +172 -105
  116. vortex/tools/services.py +403 -334
  117. vortex/tools/storage.py +293 -358
  118. vortex/tools/surfex.py +24 -24
  119. vortex/tools/systems.py +1234 -643
  120. vortex/tools/targets.py +156 -100
  121. vortex/util/__init__.py +1 -1
  122. vortex/util/config.py +378 -327
  123. vortex/util/empty.py +2 -2
  124. vortex/util/helpers.py +56 -24
  125. vortex/util/introspection.py +18 -12
  126. vortex/util/iosponge.py +8 -4
  127. vortex/util/roles.py +4 -6
  128. vortex/util/storefunctions.py +39 -13
  129. vortex/util/structs.py +3 -3
  130. vortex/util/worker.py +29 -17
  131. vortex_nwp-2.1.0.dist-info/METADATA +67 -0
  132. vortex_nwp-2.1.0.dist-info/RECORD +144 -0
  133. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
  134. vortex/layout/appconf.py +0 -109
  135. vortex/layout/jobs.py +0 -1276
  136. vortex/layout/nodes.py +0 -1424
  137. vortex/layout/subjobs.py +0 -464
  138. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  139. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  140. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
  141. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
@@ -25,93 +25,124 @@ class MpiAuto(mpitools.MpiTool):
25
25
  """MpiTools that uses mpiauto as a proxy to several MPI implementations"""
26
26
 
27
27
  _footprint = dict(
28
- attr = dict(
29
- mpiname = dict(
30
- values = ['mpiauto', ],
28
+ attr=dict(
29
+ mpiname=dict(
30
+ values=[
31
+ "mpiauto",
32
+ ],
31
33
  ),
32
- mpiopts = dict(
33
- default = None
34
+ mpiopts=dict(default=None),
35
+ optprefix=dict(default="--"),
36
+ optmap=dict(
37
+ default=footprints.FPDict(
38
+ nn="nn",
39
+ nnp="nnp",
40
+ openmp="openmp",
41
+ np="np",
42
+ prefixcommand="prefix-command",
43
+ allowodddist="mpi-allow-odd-dist",
44
+ )
34
45
  ),
35
- optprefix = dict(
36
- default = '--'
46
+ timeoutrestart=dict(
47
+ info="The number of attempts made by mpiauto",
48
+ optional=True,
49
+ default=DelayedEnvValue("MPI_INIT_TIMEOUT_RESTART", 2),
50
+ doc_visibility=footprints.doc.visibility.ADVANCED,
51
+ doc_zorder=-90,
37
52
  ),
38
- optmap = dict(
39
- default = footprints.FPDict(nn='nn', nnp='nnp', openmp='openmp',
40
- np='np', prefixcommand='prefix-command',
41
- allowodddist='mpi-allow-odd-dist')
53
+ sublauncher=dict(
54
+ info="How to actualy launch the MPI program",
55
+ values=["srun", "libspecific"],
56
+ optional=True,
57
+ doc_visibility=footprints.doc.visibility.ADVANCED,
58
+ doc_zorder=-90,
42
59
  ),
43
- timeoutrestart = dict(
44
- info = 'The number of attempts made by mpiauto',
45
- optional = True,
46
- default = DelayedEnvValue('MPI_INIT_TIMEOUT_RESTART', 2),
47
- doc_visibility = footprints.doc.visibility.ADVANCED,
48
- doc_zorder = -90,
60
+ mpiwrapstd=dict(
61
+ values=[
62
+ False,
63
+ ],
49
64
  ),
50
- sublauncher = dict(
51
- info = 'How to actualy launch the MPI program',
52
- values = ['srun', 'libspecific'],
53
- optional = True,
54
- doc_visibility = footprints.doc.visibility.ADVANCED,
55
- doc_zorder = -90,
65
+ bindingmethod=dict(
66
+ info="How to bind the MPI processes",
67
+ values=["vortex", "arch", "launcherspecific"],
68
+ optional=True,
69
+ doc_visibility=footprints.doc.visibility.ADVANCED,
70
+ doc_zorder=-90,
56
71
  ),
57
- mpiwrapstd = dict(
58
- values = [False, ],
59
- ),
60
- bindingmethod = dict(
61
- info = 'How to bind the MPI processes',
62
- values = ['arch', 'launcherspecific', 'vortex'],
63
- optional = True,
64
- doc_visibility = footprints.doc.visibility.ADVANCED,
65
- doc_zorder = -90,
72
+ mplbased=dict(
73
+ info="Is the executable based on MPL?",
74
+ type=bool,
75
+ optional=True,
76
+ default=False,
66
77
  ),
67
78
  )
68
79
  )
69
80
 
70
- _envelope_wrapper_tpl = '@envelope_wrapper_mpiauto.tpl'
71
- _envelope_rank_var = 'MPIAUTORANK'
81
+ _envelope_wrapper_tpl = "envelope_wrapper_mpiauto.tpl"
82
+ _envelope_rank_var = "MPIAUTORANK"
72
83
  _needs_mpilib_specific_mpienv = False
73
84
 
85
+ def __init__(self, *args, **kwargs):
86
+ super().__init__(*args, **kwargs)
87
+ self.bindingmethod = "arch" if self.mplbased else "vortex"
88
+
74
89
  def _reshaped_mpiopts(self):
75
90
  """Raw list of mpi tool command line options."""
76
91
  options = super()._reshaped_mpiopts()
77
- options['init-timeout-restart'] = [(self.timeoutrestart, )]
78
- if self.sublauncher == 'srun':
79
- options['use-slurm-mpi'] = [()]
80
- elif self.sublauncher == 'libspecific':
81
- options['no-use-slurm-mpi'] = [()]
92
+ options["init-timeout-restart"] = [(self.timeoutrestart,)]
93
+ if self.sublauncher == "srun":
94
+ options["use-slurm-mpi"] = [()]
95
+ elif self.sublauncher == "libspecific":
96
+ options["no-use-slurm-mpi"] = [()]
82
97
  if self.bindingmethod:
83
- for k in ['{:s}use-{:s}-bind'.format(p, t) for p in ('', 'no-')
84
- for t in ('arch', 'slurm', 'intelmpi', 'openmpi')]:
98
+ for k in [
99
+ "{:s}use-{:s}-bind".format(p, t)
100
+ for p in ("", "no-")
101
+ for t in ("arch", "slurm", "intelmpi", "openmpi")
102
+ ]:
85
103
  options.pop(k, None)
86
- if self.bindingmethod == 'arch':
87
- options['use-arch-bind'] = [()]
88
- elif self.bindingmethod == 'launcherspecific' and self.sublauncher == 'srun':
89
- options['no-use-arch-bind'] = [()]
90
- options['use-slurm-bind'] = [()]
91
- elif self.bindingmethod == 'launcherspecific':
92
- options['no-use-arch-bind'] = [()]
93
- for k in ['use-{:s}-bind'.format(t)
94
- for t in ('slurm', 'intelmpi', 'openmpi')]:
104
+ if self.bindingmethod == "arch":
105
+ options["use-arch-bind"] = [()]
106
+ elif (
107
+ self.bindingmethod == "launcherspecific"
108
+ and self.sublauncher == "srun"
109
+ ):
110
+ options["no-use-arch-bind"] = [()]
111
+ options["use-slurm-bind"] = [()]
112
+ elif self.bindingmethod == "launcherspecific":
113
+ options["no-use-arch-bind"] = [()]
114
+ for k in [
115
+ "use-{:s}-bind".format(t)
116
+ for t in ("slurm", "intelmpi", "openmpi")
117
+ ]:
95
118
  options[k] = [()]
96
- elif self.bindingmethod == 'vortex':
97
- options['no-use-arch-bind'] = [()]
119
+ elif self.bindingmethod == "vortex":
120
+ options["no-use-arch-bind"] = [()]
98
121
  return options
99
122
 
100
123
  def _envelope_fix_envelope_bit(self, e_bit, e_desc):
101
124
  """Set the envelope fake binary options."""
102
- e_bit.options = {k: v for k, v in e_desc.items()
103
- if k not in ('openmp', )}
104
- e_bit.options['prefixcommand'] = self._envelope_wrapper_name
125
+ e_bit.options = {
126
+ k: v for k, v in e_desc.items() if k not in ("openmp",)
127
+ }
128
+ e_bit.options["prefixcommand"] = self._envelope_wrapper_name
105
129
  if self.binaries:
106
130
  e_bit.master = self.binaries[0].master
107
131
 
108
132
  def _set_binaries_hack(self, binaries):
109
133
  """Set the list of :class:`MpiBinaryDescription` objects associated with this instance."""
110
- if len(binaries) > 1 and self.bindingmethod not in (None, 'arch', 'vortex'):
111
- logger.info("The '{:s}' binding method is not working properly with multiple binaries."
112
- .format(self.bindingmethod))
134
+ if len(binaries) > 1 and self.bindingmethod not in (
135
+ None,
136
+ "arch",
137
+ "vortex",
138
+ ):
139
+ logger.info(
140
+ "The '{:s}' binding method is not working properly with multiple binaries.".format(
141
+ self.bindingmethod
142
+ )
143
+ )
113
144
  logger.warning("Resetting the binding method to 'vortex'.")
114
- self.bindingmethod = 'vortex'
145
+ self.bindingmethod = "vortex"
115
146
 
116
147
  def _set_binaries_envelope_hack(self, binaries):
117
148
  """Tweak the envelope after binaries were setup."""
@@ -122,28 +153,38 @@ class MpiAuto(mpitools.MpiTool):
122
153
  def _set_envelope(self, value):
123
154
  """Set the envelope description."""
124
155
  super()._set_envelope(value)
125
- if len(self._envelope) > 1 and self.bindingmethod not in (None, 'arch', 'vortex'):
126
- logger.info("The '{:s}' binding method is not working properly with complex envelopes."
127
- .format(self.bindingmethod))
156
+ if len(self._envelope) > 1 and self.bindingmethod not in (
157
+ None,
158
+ "arch",
159
+ "vortex",
160
+ ):
161
+ logger.info(
162
+ "The '{:s}' binding method is not working properly with complex envelopes.".format(
163
+ self.bindingmethod
164
+ )
165
+ )
128
166
  logger.warning("Resetting the binding method to 'vortex'.")
129
- self.bindingmethod = 'vortex'
167
+ self.bindingmethod = "vortex"
130
168
 
131
169
  envelope = property(mpitools.MpiTool._get_envelope, _set_envelope)
132
170
 
133
171
  def _hook_binary_mpiopts(self, binary, options):
134
172
  tuned = options.copy()
135
173
  # Regular MPI tasks count (the usual...)
136
- if 'nnp' in options and 'nn' in options:
137
- if options['nn'] * options['nnp'] == options['np']:
174
+ if "nnp" in options and "nn" in options:
175
+ if options["nn"] * options["nnp"] == options["np"]:
138
176
  # Remove harmful options
139
- del tuned['np']
140
- tuned.pop('allowodddist', None)
177
+ del tuned["np"]
178
+ tuned.pop("allowodddist", None)
141
179
  # that's the strange MPI distribution...
142
180
  else:
143
- tuned['allowodddist'] = None # With this, let mpiauto determine its own partitioning
181
+ tuned["allowodddist"] = (
182
+ None # With this, let mpiauto determine its own partitioning
183
+ )
144
184
  else:
145
- msg = ("The provided mpiopts are insufficient to build the command line: {!s}"
146
- .format(options))
185
+ msg = "The provided mpiopts are insufficient to build the command line: {!s}".format(
186
+ options
187
+ )
147
188
  raise mpitools.MpiException(msg)
148
189
  return tuned
149
190
 
@@ -153,43 +194,50 @@ class MpiAuto(mpitools.MpiTool):
153
194
  for bin_obj in self.binaries:
154
195
  if bin_obj.options:
155
196
  for mpirank in range(ranksidx, ranksidx + bin_obj.nprocs):
156
- prefix_c = bin_obj.options.get('prefixcommand', None)
197
+ prefix_c = bin_obj.options.get("prefixcommand", None)
157
198
  if prefix_c:
158
- todostack[mpirank] = (prefix_c,
159
- [todostack[mpirank][0], ] + todostack[mpirank][1],
160
- todostack[mpirank][2])
199
+ todostack[mpirank] = (
200
+ prefix_c,
201
+ [
202
+ todostack[mpirank][0],
203
+ ]
204
+ + todostack[mpirank][1],
205
+ todostack[mpirank][2],
206
+ )
161
207
  ranksidx += bin_obj.nprocs
162
208
  return todostack, ranks_bsize
163
209
 
164
210
  def _envelope_mkcmdline_extra(self, cmdl):
165
211
  """If possible, add an openmp option when the arch binding method is used."""
166
212
 
167
- if self.bindingmethod != 'vortex':
168
- openmps = {b.options.get('openmp', None) for b in self.binaries}
213
+ if self.bindingmethod != "vortex":
214
+ openmps = {b.options.get("openmp", None) for b in self.binaries}
169
215
  if len(openmps) > 1:
170
216
  if self.bindingmethod is not None:
171
- logger.warning("Non-uniform OpenMP threads number... Not specifying anything.")
217
+ logger.warning(
218
+ "Non-uniform OpenMP threads number... Not specifying anything."
219
+ )
172
220
  else:
173
221
  openmp = openmps.pop() or 1
174
- cmdl.append(self.optprefix + self.optmap['openmp'])
222
+ cmdl.append(self.optprefix + self.optmap["openmp"])
175
223
  cmdl.append(str(openmp))
176
224
 
177
225
  def setup_environment(self, opts):
178
226
  """Last minute fixups."""
179
227
  super().setup_environment(opts)
180
- if self.bindingmethod in ('arch', 'vortex'):
228
+ if self.bindingmethod in ("arch", "vortex"):
181
229
  # Make sure srun does nothing !
182
- self._logged_env_set('SLURM_CPU_BIND', 'none')
230
+ self._logged_env_set("SLURM_CPU_BIND", "none")
183
231
 
184
232
  def setup(self, opts=None):
185
233
  """Ensure that the prefixcommand has the execution rights."""
186
234
  for bin_obj in self.binaries:
187
- prefix_c = bin_obj.options.get('prefixcommand', None)
235
+ prefix_c = bin_obj.options.get("prefixcommand", None)
188
236
  if prefix_c is not None:
189
237
  if self.system.path.exists(prefix_c):
190
238
  self.system.xperm(prefix_c, force=True)
191
239
  else:
192
- raise OSError('The prefixcommand do not exists.')
240
+ raise OSError("The prefixcommand do not exists.")
193
241
  super().setup(opts)
194
242
 
195
243
 
@@ -200,88 +248,106 @@ class MpiAutoDDT(MpiAuto):
200
248
  """
201
249
 
202
250
  _footprint = dict(
203
- attr = dict(
204
- mpiname = dict(
205
- values = ['mpiauto-ddt', ],
251
+ attr=dict(
252
+ mpiname=dict(
253
+ values=[
254
+ "mpiauto-ddt",
255
+ ],
206
256
  ),
207
257
  )
208
258
  )
209
259
 
210
- _conf_suffix = '-ddt'
260
+ _conf_suffix = "-ddt"
211
261
 
212
262
  def _reshaped_mpiopts(self):
213
263
  options = super()._reshaped_mpiopts()
214
- if 'prefix-mpirun' in options:
215
- raise mpitools.MpiException('It is not allowed to start DDT with another ' +
216
- 'prefix_mpirun command defined: "{:s}"'
217
- .format(options))
264
+ if "prefix-mpirun" in options:
265
+ raise mpitools.MpiException(
266
+ "It is not allowed to start DDT with another "
267
+ + 'prefix_mpirun command defined: "{:s}"'.format(options)
268
+ )
218
269
  armtool = ArmForgeTool(self.ticket)
219
- options['prefix-mpirun'] = [(' '.join(armtool.ddt_prefix_cmd(
220
- sources=self.sources,
221
- workdir=self.system.path.dirname(self.binaries[0].master)
222
- )), )]
270
+ options["prefix-mpirun"] = [
271
+ (
272
+ " ".join(
273
+ armtool.ddt_prefix_cmd(
274
+ sources=self.sources,
275
+ workdir=self.system.path.dirname(
276
+ self.binaries[0].master
277
+ ),
278
+ )
279
+ ),
280
+ )
281
+ ]
223
282
  return options
224
283
 
225
284
 
226
285
  # Some IFS/Arpege specific things :
227
286
 
287
+
228
288
  def arpifs_obsort_nprocab_binarydeco(cls):
229
289
  """Handle usual IFS/Arpege environment tweaking for OBSORT (nproca & nprocb).
230
290
 
231
291
  Note: This is a class decorator for class somehow based on MpiBinaryDescription
232
292
  """
233
- orig_setup_env = getattr(cls, 'setup_environment')
293
+ orig_setup_env = getattr(cls, "setup_environment")
234
294
 
235
295
  def setup_environment(self, opts):
236
296
  orig_setup_env(self, opts)
237
- self.env.NPROCA = int(self.env.NPROCA or
238
- self.nprocs)
239
- self.env.NPROCB = int(self.env.NPROCB or
240
- self.nprocs // self.env.NPROCA)
241
- logger.info("MPI Setup NPROCA=%d and NPROCB=%d", self.env.NPROCA, self.env.NPROCB)
297
+ self.env.NPROCA = int(self.env.NPROCA or self.nprocs)
298
+ self.env.NPROCB = int(
299
+ self.env.NPROCB or self.nprocs // self.env.NPROCA
300
+ )
301
+ logger.info(
302
+ "MPI Setup NPROCA=%d and NPROCB=%d",
303
+ self.env.NPROCA,
304
+ self.env.NPROCB,
305
+ )
242
306
 
243
- if hasattr(orig_setup_env, '__doc__'):
307
+ if hasattr(orig_setup_env, "__doc__"):
244
308
  setup_environment.__doc__ = orig_setup_env.__doc__
245
309
 
246
- setattr(cls, 'setup_environment', setup_environment)
310
+ setattr(cls, "setup_environment", setup_environment)
247
311
  return cls
248
312
 
249
313
 
250
314
  class _NWPIoServerMixin:
315
+ _NWP_IOSERV_PATTERNS = ("io_serv.*.d",)
251
316
 
252
- _NWP_IOSERV_PATTERNS = ('io_serv.*.d', )
253
-
254
- def _nwp_ioserv_setup_namelist(self, namcontents, namlocal,
255
- total_iotasks, computed_iodist_value=None):
317
+ def _nwp_ioserv_setup_namelist(
318
+ self, namcontents, namlocal, total_iotasks, computed_iodist_value=None
319
+ ):
256
320
  """Applying IO Server profile on local namelist ``namlocal`` with contents namcontents."""
257
- if 'NAMIO_SERV' in namcontents:
258
- namio = namcontents['NAMIO_SERV']
321
+ if "NAMIO_SERV" in namcontents:
322
+ namio = namcontents["NAMIO_SERV"]
259
323
  else:
260
- namio = namcontents.newblock('NAMIO_SERV')
324
+ namio = namcontents.newblock("NAMIO_SERV")
261
325
 
262
326
  namio.nproc_io = total_iotasks
263
327
  if computed_iodist_value is not None:
264
328
  namio.idistio = computed_iodist_value
265
329
 
266
- if 'VORTEX_IOSERVER_METHOD' in self.env:
330
+ if "VORTEX_IOSERVER_METHOD" in self.env:
267
331
  namio.nio_serv_method = self.env.VORTEX_IOSERVER_METHOD
268
332
 
269
- if 'VORTEX_IOSERVER_BUFMAX' in self.env:
333
+ if "VORTEX_IOSERVER_BUFMAX" in self.env:
270
334
  namio.nio_serv_buf_maxsize = self.env.VORTEX_IOSERVER_BUFMAX
271
335
 
272
- if 'VORTEX_IOSERVER_MLSERVER' in self.env:
336
+ if "VORTEX_IOSERVER_MLSERVER" in self.env:
273
337
  namio.nmsg_level_server = self.env.VORTEX_IOSERVER_MLSERVER
274
338
 
275
- if 'VORTEX_IOSERVER_MLCLIENT' in self.env:
339
+ if "VORTEX_IOSERVER_MLCLIENT" in self.env:
276
340
  namio.nmsg_level_client = self.env.VORTEX_IOSERVER_MLCLIENT
277
341
 
278
- if 'VORTEX_IOSERVER_PROCESS' in self.env:
342
+ if "VORTEX_IOSERVER_PROCESS" in self.env:
279
343
  namio.nprocess_level = self.env.VORTEX_IOSERVER_PROCESS
280
344
 
281
- if 'VORTEX_IOSERVER_PIOMODEL' in self.env:
345
+ if "VORTEX_IOSERVER_PIOMODEL" in self.env:
282
346
  namio.pioprocr_MDL = self.env.VORTEX_IOSERVER_PIOMODEL
283
347
 
284
- self.system.highlight('Parallel io server namelist for {:s}'.format(namlocal))
348
+ self.system.highlight(
349
+ "Parallel io server namelist for {:s}".format(namlocal)
350
+ )
285
351
  print(namio.dumps())
286
352
 
287
353
  return True
@@ -297,38 +363,46 @@ class _NWPIoServerMixin:
297
363
  """Post-execution cleaning for io server."""
298
364
 
299
365
  # Old fashion way to make clear that some polling is needed.
300
- self.system.touch('io_poll.todo')
366
+ self.system.touch("io_poll.todo")
301
367
 
302
368
  # Get a look inside io server output directories according to its own pattern
303
369
  ioserv_filelist = set()
304
370
  ioserv_prefixes = set()
305
- iofile_re = re.compile(r'((ICMSH|PF|GRIBPF).*\+\d+(?::\d+)?(?:\.sfx)?)(?:\..+)?$')
306
- self.system.highlight('Dealing with IO directories')
371
+ iofile_re = re.compile(
372
+ r"((ICMSH|PF|GRIBPF).*\+\d+(?::\d+)?(?:\.sfx)?)(?:\..+)?$"
373
+ )
374
+ self.system.highlight("Dealing with IO directories")
307
375
  iodirs = self._nwp_ioserv_iodirs()
308
376
  if iodirs:
309
- logger.info('List of IO directories: %s', ','.join(iodirs))
310
- f_summary = collections.defaultdict(lambda: [' '] * len(iodirs))
377
+ logger.info("List of IO directories: %s", ",".join(iodirs))
378
+ f_summary = collections.defaultdict(lambda: [" "] * len(iodirs))
311
379
  for i, iodir in enumerate(iodirs):
312
380
  for iofile in self.system.listdir(iodir):
313
381
  zf = iofile_re.match(iofile)
314
382
  if zf:
315
- f_summary[zf.group(1)][i] = '+'
383
+ f_summary[zf.group(1)][i] = "+"
316
384
  ioserv_filelist.add((zf.group(1), zf.group(2)))
317
385
  ioserv_prefixes.add(zf.group(2))
318
386
  else:
319
- f_summary[iofile][i] = '?'
387
+ f_summary[iofile][i] = "?"
320
388
  max_names_len = max([len(iofile) for iofile in f_summary.keys()])
321
- fmt_names = '{:' + str(max_names_len) + 's}'
322
- logger.info('Data location accross the various IOserver directories:\n%s',
323
- '\n'.join([(fmt_names + ' |{:s}|').format(iofile, ''.join(where))
324
- for iofile, where in sorted(f_summary.items())]))
389
+ fmt_names = "{:" + str(max_names_len) + "s}"
390
+ logger.info(
391
+ "Data location accross the various IOserver directories:\n%s",
392
+ "\n".join(
393
+ [
394
+ (fmt_names + " |{:s}|").format(iofile, "".join(where))
395
+ for iofile, where in sorted(f_summary.items())
396
+ ]
397
+ ),
398
+ )
325
399
  else:
326
- logger.info('No IO directories were found')
400
+ logger.info("No IO directories were found")
327
401
 
328
- if 'GRIBPF' in ioserv_prefixes:
402
+ if "GRIBPF" in ioserv_prefixes:
329
403
  # If GRIB are requested, do not bother with old FA PF files
330
- ioserv_prefixes.discard('PF')
331
- ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p != 'PF'}
404
+ ioserv_prefixes.discard("PF")
405
+ ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p != "PF"}
332
406
 
333
407
  # Touch the output files
334
408
  for tgfile, _ in ioserv_filelist:
@@ -336,7 +410,7 @@ class _NWPIoServerMixin:
336
410
 
337
411
  # Touch the io_poll.todo.PREFIX
338
412
  for prefix in ioserv_prefixes:
339
- self.system.touch('io_poll.todo.{:s}'.format(prefix))
413
+ self.system.touch("io_poll.todo.{:s}".format(prefix))
340
414
 
341
415
 
342
416
  class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
@@ -344,7 +418,7 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
344
418
 
345
419
  _abstract = True
346
420
 
347
- def __init__(self, * kargs, **kwargs):
421
+ def __init__(self, *kargs, **kwargs):
348
422
  super().__init__(*kargs, **kwargs)
349
423
  self._incore_iotasks = None
350
424
  self._effective_incore_iotasks = None
@@ -359,7 +433,7 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
359
433
  @incore_iotasks.setter
360
434
  def incore_iotasks(self, value):
361
435
  """The number of tasks dedicated to the IO server."""
362
- if isinstance(value, str) and value.endswith('%'):
436
+ if isinstance(value, str) and value.endswith("%"):
363
437
  value = math.ceil(self.nprocs * float(value[:-1]) / 100)
364
438
  self._incore_iotasks = int(value)
365
439
  self._effective_incore_iotasks = None
@@ -373,10 +447,12 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
373
447
  def incore_iotasks_fixer(self, value):
374
448
  """Tweak the number of iotasks in order to respect a given constraints."""
375
449
  if not isinstance(value, str):
376
- raise ValueError('A string is expected')
377
- if value.startswith('nproc_multiple_of_'):
378
- self._incore_iotasks_fixer = ('nproc_multiple_of',
379
- [int(i) for i in value[18:].split(',')])
450
+ raise ValueError("A string is expected")
451
+ if value.startswith("nproc_multiple_of_"):
452
+ self._incore_iotasks_fixer = (
453
+ "nproc_multiple_of",
454
+ [int(i) for i in value[18:].split(",")],
455
+ )
380
456
  else:
381
457
  raise ValueError('The "{:s}" value is incorrect'.format(value))
382
458
 
@@ -391,22 +467,36 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
391
467
  if self.incore_iotasks is not None:
392
468
  if self._effective_incore_iotasks is None:
393
469
  if self.incore_iotasks_fixer is not None:
394
- if self.incore_iotasks_fixer[0] == 'nproc_multiple_of':
470
+ if self.incore_iotasks_fixer[0] == "nproc_multiple_of":
395
471
  # Allow for 5% less, or add some tasks
396
- for candidate in interleave(range(self.incore_iotasks, self.nprocs + 1),
397
- range(self.incore_iotasks - 1,
398
- int(math.ceil(0.95 * self.incore_iotasks)) - 1,
399
- -1)):
400
- if any([(self.nprocs - candidate) % multiple == 0
401
- for multiple in self.incore_iotasks_fixer[1]]):
472
+ for candidate in interleave(
473
+ range(self.incore_iotasks, self.nprocs + 1),
474
+ range(
475
+ self.incore_iotasks - 1,
476
+ int(math.ceil(0.95 * self.incore_iotasks)) - 1,
477
+ -1,
478
+ ),
479
+ ):
480
+ if any(
481
+ [
482
+ (self.nprocs - candidate) % multiple == 0
483
+ for multiple in self.incore_iotasks_fixer[
484
+ 1
485
+ ]
486
+ ]
487
+ ):
402
488
  self._effective_incore_iotasks = candidate
403
489
  break
404
490
  else:
405
- raise RuntimeError('Unsupported fixer')
491
+ raise RuntimeError("Unsupported fixer")
406
492
  if self._effective_incore_iotasks != self.incore_iotasks:
407
- logger.info('The number of IO tasks was updated form %d to %d ' +
408
- 'because of the "%s" fixer', self.incore_iotasks,
409
- self._effective_incore_iotasks, self.incore_iotasks_fixer[0])
493
+ logger.info(
494
+ "The number of IO tasks was updated form %d to %d "
495
+ + 'because of the "%s" fixer',
496
+ self.incore_iotasks,
497
+ self._effective_incore_iotasks,
498
+ self.incore_iotasks_fixer[0],
499
+ )
410
500
  else:
411
501
  self._effective_incore_iotasks = self.incore_iotasks
412
502
  return self._effective_incore_iotasks
@@ -421,17 +511,23 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
421
511
  @incore_iodist.setter
422
512
  def incore_iodist(self, value):
423
513
  """How to distribute IO server tasks within model tasks."""
424
- allowed = ('begining', 'end', 'scattered',)
425
- if not (isinstance(value, str) and
426
- value in allowed):
427
- raise ValueError("'{!s}' is not an allowed value ('{:s}')"
428
- .format(value, ', '.join(allowed)))
514
+ allowed = (
515
+ "begining",
516
+ "end",
517
+ "scattered",
518
+ )
519
+ if not (isinstance(value, str) and value in allowed):
520
+ raise ValueError(
521
+ "'{!s}' is not an allowed value ('{:s}')".format(
522
+ value, ", ".join(allowed)
523
+ )
524
+ )
429
525
  self._incore_iodist = value
430
526
 
431
527
  def _set_nam_macro(self, namcontents, namlocal, macro, value):
432
528
  """Set a namelist macro and log it!"""
433
529
  namcontents.setmacro(macro, value)
434
- logger.info('Setup macro %s=%s in %s', macro, str(value), namlocal)
530
+ logger.info("Setup macro %s=%s in %s", macro, str(value), namlocal)
435
531
 
436
532
  def setup_namelist_delta(self, namcontents, namlocal):
437
533
  """Applying MPI profile on local namelist ``namlocal`` with contents namcontents."""
@@ -445,48 +541,74 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
445
541
  if self.effective_incore_iotasks is not None:
446
542
  effective_nprocs -= self.effective_incore_iotasks
447
543
  # Set up the effective_nprocs related macros
448
- nprocs_macros = ('NPROC', 'NBPROC', 'NTASKS')
544
+ nprocs_macros = ("NPROC", "NBPROC", "NTASKS")
449
545
  if any([n in nam_macros for n in nprocs_macros]):
450
546
  for n in nprocs_macros:
451
547
  self._set_nam_macro(namcontents, namlocal, n, effective_nprocs)
452
548
  namw = True
453
- if any([n in nam_macros for n in ('NCPROC', 'NDPROC')]):
454
- self._set_nam_macro(namcontents, namlocal, 'NCPROC',
455
- int(self.env.VORTEX_NPRGPNS or effective_nprocs))
456
- self._set_nam_macro(namcontents, namlocal, 'NDPROC',
457
- int(self.env.VORTEX_NPRGPEW or 1))
549
+ if any([n in nam_macros for n in ("NCPROC", "NDPROC")]):
550
+ self._set_nam_macro(
551
+ namcontents,
552
+ namlocal,
553
+ "NCPROC",
554
+ int(self.env.VORTEX_NPRGPNS or effective_nprocs),
555
+ )
556
+ self._set_nam_macro(
557
+ namcontents,
558
+ namlocal,
559
+ "NDPROC",
560
+ int(self.env.VORTEX_NPRGPEW or 1),
561
+ )
458
562
  namw = True
459
- if 'NAMPAR1' in namcontents:
460
- np1 = namcontents['NAMPAR1']
461
- for nstr in [x for x in ('NSTRIN', 'NSTROUT') if x in np1]:
462
- if isinstance(np1[nstr], (int, float)) and np1[nstr] > effective_nprocs:
463
- logger.info('Setup %s=%s in NAMPAR1 %s', nstr, effective_nprocs, namlocal)
563
+ if "NAMPAR1" in namcontents:
564
+ np1 = namcontents["NAMPAR1"]
565
+ for nstr in [x for x in ("NSTRIN", "NSTROUT") if x in np1]:
566
+ if (
567
+ isinstance(np1[nstr], (int, float))
568
+ and np1[nstr] > effective_nprocs
569
+ ):
570
+ logger.info(
571
+ "Setup %s=%s in NAMPAR1 %s",
572
+ nstr,
573
+ effective_nprocs,
574
+ namlocal,
575
+ )
464
576
  np1[nstr] = effective_nprocs
465
577
  namw = True
466
578
  # Deal with partitioning macros
467
- namw_p = setup_partitioning_in_namelist(namcontents,
468
- effective_nprocs,
469
- self.options.get('openmp', 1),
470
- namlocal)
579
+ namw_p = setup_partitioning_in_namelist(
580
+ namcontents,
581
+ effective_nprocs,
582
+ self.options.get("openmp", 1),
583
+ namlocal,
584
+ )
471
585
  namw = namw or namw_p
472
586
  # Incore IO tasks
473
587
  if self.effective_incore_iotasks is not None:
474
588
  c_iodist = None
475
589
  if self.incore_iodist is not None:
476
- if self.incore_iodist == 'begining':
590
+ if self.incore_iodist == "begining":
477
591
  c_iodist = -1
478
- elif self.incore_iodist == 'end':
592
+ elif self.incore_iodist == "end":
479
593
  c_iodist = 0
480
- elif self.incore_iodist == 'scattered':
594
+ elif self.incore_iodist == "scattered":
481
595
  # Ensure that there is at least one task on the first node
482
- c_iodist = min(self.nprocs // self.effective_incore_iotasks,
483
- self.options.get('nnp', self.nprocs))
596
+ c_iodist = min(
597
+ self.nprocs // self.effective_incore_iotasks,
598
+ self.options.get("nnp", self.nprocs),
599
+ )
484
600
  else:
485
- raise RuntimeError("incore_iodist '{!s}' is not supported: check your code"
486
- .format(self.incore_iodist))
487
- namw_io = self._nwp_ioserv_setup_namelist(namcontents, namlocal,
488
- self.effective_incore_iotasks,
489
- computed_iodist_value=c_iodist)
601
+ raise RuntimeError(
602
+ "incore_iodist '{!s}' is not supported: check your code".format(
603
+ self.incore_iodist
604
+ )
605
+ )
606
+ namw_io = self._nwp_ioserv_setup_namelist(
607
+ namcontents,
608
+ namlocal,
609
+ self.effective_incore_iotasks,
610
+ computed_iodist_value=c_iodist,
611
+ )
490
612
  namw = namw or namw_io
491
613
  return namw
492
614
 
@@ -501,8 +623,12 @@ class MpiNWP(_AbstractMpiNWP):
501
623
  """The kind of binaries used in IFS/Arpege."""
502
624
 
503
625
  _footprint = dict(
504
- attr = dict(
505
- kind = dict(values = ['basicnwp', ]),
626
+ attr=dict(
627
+ kind=dict(
628
+ values=[
629
+ "basicnwp",
630
+ ]
631
+ ),
506
632
  ),
507
633
  )
508
634
 
@@ -512,8 +638,12 @@ class MpiNWPObsort(_AbstractMpiNWP):
512
638
  """The kind of binaries used in IFS/Arpege when the ODB OBSSORT code needs to be run."""
513
639
 
514
640
  _footprint = dict(
515
- attr = dict(
516
- kind = dict(values = ['basicnwpobsort', ]),
641
+ attr=dict(
642
+ kind=dict(
643
+ values=[
644
+ "basicnwpobsort",
645
+ ]
646
+ ),
517
647
  ),
518
648
  )
519
649
 
@@ -523,8 +653,12 @@ class MpiObsort(mpitools.MpiBinaryBasic):
523
653
  """The kind of binaries used when the ODB OBSSORT code needs to be run."""
524
654
 
525
655
  _footprint = dict(
526
- attr = dict(
527
- kind = dict(values = ['basicobsort', ]),
656
+ attr=dict(
657
+ kind=dict(
658
+ values=[
659
+ "basicobsort",
660
+ ]
661
+ ),
528
662
  ),
529
663
  )
530
664
 
@@ -533,9 +667,15 @@ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
533
667
  """Standard IFS/Arpege NWP IO server."""
534
668
 
535
669
  _footprint = dict(
536
- attr = dict(
537
- kind = dict(values = ['nwpioserv', ]),
538
- iolocation = dict(values = [-1, 0], default = 0, optional = True, type = int),
670
+ attr=dict(
671
+ kind=dict(
672
+ values=[
673
+ "nwpioserv",
674
+ ]
675
+ ),
676
+ iolocation=dict(
677
+ values=[-1, 0], default=0, optional=True, type=int
678
+ ),
539
679
  )
540
680
  )
541
681
 
@@ -545,7 +685,7 @@ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
545
685
  namcontents,
546
686
  namlocal,
547
687
  self.nprocs,
548
- computed_iodist_value=(-1 if self.iolocation == 0 else None)
688
+ computed_iodist_value=(-1 if self.iolocation == 0 else None),
549
689
  )
550
690
 
551
691
  def clean(self, opts=None):