vortex-nwp 2.0.0b1__py3-none-any.whl → 2.0.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. vortex/__init__.py +59 -45
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +940 -614
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/serversynctools.py +34 -33
  6. vortex/config.py +19 -22
  7. vortex/data/__init__.py +9 -3
  8. vortex/data/abstractstores.py +593 -655
  9. vortex/data/containers.py +217 -162
  10. vortex/data/contents.py +65 -39
  11. vortex/data/executables.py +93 -102
  12. vortex/data/flow.py +40 -34
  13. vortex/data/geometries.py +228 -132
  14. vortex/data/handlers.py +428 -225
  15. vortex/data/outflow.py +15 -15
  16. vortex/data/providers.py +185 -163
  17. vortex/data/resources.py +48 -42
  18. vortex/data/stores.py +544 -413
  19. vortex/gloves.py +114 -87
  20. vortex/layout/__init__.py +1 -8
  21. vortex/layout/contexts.py +150 -84
  22. vortex/layout/dataflow.py +353 -202
  23. vortex/layout/monitor.py +264 -128
  24. vortex/nwp/__init__.py +5 -2
  25. vortex/nwp/algo/__init__.py +14 -5
  26. vortex/nwp/algo/assim.py +205 -151
  27. vortex/nwp/algo/clim.py +683 -517
  28. vortex/nwp/algo/coupling.py +447 -225
  29. vortex/nwp/algo/eda.py +437 -229
  30. vortex/nwp/algo/eps.py +403 -231
  31. vortex/nwp/algo/forecasts.py +420 -271
  32. vortex/nwp/algo/fpserver.py +683 -307
  33. vortex/nwp/algo/ifsnaming.py +205 -145
  34. vortex/nwp/algo/ifsroot.py +210 -122
  35. vortex/nwp/algo/monitoring.py +132 -76
  36. vortex/nwp/algo/mpitools.py +321 -191
  37. vortex/nwp/algo/odbtools.py +617 -353
  38. vortex/nwp/algo/oopsroot.py +449 -273
  39. vortex/nwp/algo/oopstests.py +90 -56
  40. vortex/nwp/algo/request.py +287 -206
  41. vortex/nwp/algo/stdpost.py +878 -522
  42. vortex/nwp/data/__init__.py +22 -4
  43. vortex/nwp/data/assim.py +125 -137
  44. vortex/nwp/data/boundaries.py +121 -68
  45. vortex/nwp/data/climfiles.py +193 -211
  46. vortex/nwp/data/configfiles.py +73 -69
  47. vortex/nwp/data/consts.py +426 -401
  48. vortex/nwp/data/ctpini.py +59 -43
  49. vortex/nwp/data/diagnostics.py +94 -66
  50. vortex/nwp/data/eda.py +50 -51
  51. vortex/nwp/data/eps.py +195 -146
  52. vortex/nwp/data/executables.py +440 -434
  53. vortex/nwp/data/fields.py +63 -48
  54. vortex/nwp/data/gridfiles.py +183 -111
  55. vortex/nwp/data/logs.py +250 -217
  56. vortex/nwp/data/modelstates.py +180 -151
  57. vortex/nwp/data/monitoring.py +72 -99
  58. vortex/nwp/data/namelists.py +254 -202
  59. vortex/nwp/data/obs.py +400 -308
  60. vortex/nwp/data/oopsexec.py +22 -20
  61. vortex/nwp/data/providers.py +90 -65
  62. vortex/nwp/data/query.py +71 -82
  63. vortex/nwp/data/stores.py +49 -36
  64. vortex/nwp/data/surfex.py +136 -137
  65. vortex/nwp/syntax/__init__.py +1 -1
  66. vortex/nwp/syntax/stdattrs.py +173 -111
  67. vortex/nwp/tools/__init__.py +2 -2
  68. vortex/nwp/tools/addons.py +22 -17
  69. vortex/nwp/tools/agt.py +24 -12
  70. vortex/nwp/tools/bdap.py +16 -5
  71. vortex/nwp/tools/bdcp.py +4 -1
  72. vortex/nwp/tools/bdm.py +3 -0
  73. vortex/nwp/tools/bdmp.py +14 -9
  74. vortex/nwp/tools/conftools.py +728 -378
  75. vortex/nwp/tools/drhook.py +12 -8
  76. vortex/nwp/tools/grib.py +65 -39
  77. vortex/nwp/tools/gribdiff.py +22 -17
  78. vortex/nwp/tools/ifstools.py +82 -42
  79. vortex/nwp/tools/igastuff.py +167 -143
  80. vortex/nwp/tools/mars.py +14 -2
  81. vortex/nwp/tools/odb.py +234 -125
  82. vortex/nwp/tools/partitioning.py +61 -37
  83. vortex/nwp/tools/satrad.py +27 -12
  84. vortex/nwp/util/async.py +83 -55
  85. vortex/nwp/util/beacon.py +10 -10
  86. vortex/nwp/util/diffpygram.py +174 -86
  87. vortex/nwp/util/ens.py +144 -63
  88. vortex/nwp/util/hooks.py +30 -19
  89. vortex/nwp/util/taskdeco.py +28 -24
  90. vortex/nwp/util/usepygram.py +278 -172
  91. vortex/nwp/util/usetnt.py +31 -17
  92. vortex/sessions.py +72 -39
  93. vortex/syntax/__init__.py +1 -1
  94. vortex/syntax/stdattrs.py +410 -171
  95. vortex/syntax/stddeco.py +31 -22
  96. vortex/toolbox.py +327 -192
  97. vortex/tools/__init__.py +11 -2
  98. vortex/tools/actions.py +125 -59
  99. vortex/tools/addons.py +111 -92
  100. vortex/tools/arm.py +42 -22
  101. vortex/tools/compression.py +72 -69
  102. vortex/tools/date.py +11 -4
  103. vortex/tools/delayedactions.py +242 -132
  104. vortex/tools/env.py +75 -47
  105. vortex/tools/folder.py +342 -171
  106. vortex/tools/grib.py +311 -149
  107. vortex/tools/lfi.py +423 -216
  108. vortex/tools/listings.py +109 -40
  109. vortex/tools/names.py +218 -156
  110. vortex/tools/net.py +632 -298
  111. vortex/tools/parallelism.py +93 -61
  112. vortex/tools/prestaging.py +55 -31
  113. vortex/tools/schedulers.py +172 -105
  114. vortex/tools/services.py +402 -333
  115. vortex/tools/storage.py +293 -358
  116. vortex/tools/surfex.py +24 -24
  117. vortex/tools/systems.py +1211 -631
  118. vortex/tools/targets.py +156 -100
  119. vortex/util/__init__.py +1 -1
  120. vortex/util/config.py +377 -327
  121. vortex/util/empty.py +2 -2
  122. vortex/util/helpers.py +56 -24
  123. vortex/util/introspection.py +18 -12
  124. vortex/util/iosponge.py +8 -4
  125. vortex/util/roles.py +4 -6
  126. vortex/util/storefunctions.py +39 -13
  127. vortex/util/structs.py +3 -3
  128. vortex/util/worker.py +29 -17
  129. vortex_nwp-2.0.0b2.dist-info/METADATA +66 -0
  130. vortex_nwp-2.0.0b2.dist-info/RECORD +142 -0
  131. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/WHEEL +1 -1
  132. vortex/layout/appconf.py +0 -109
  133. vortex/layout/jobs.py +0 -1276
  134. vortex/layout/nodes.py +0 -1424
  135. vortex/layout/subjobs.py +0 -464
  136. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  137. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  138. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/LICENSE +0 -0
  139. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/top_level.txt +0 -0
@@ -25,93 +25,114 @@ class MpiAuto(mpitools.MpiTool):
25
25
  """MpiTools that uses mpiauto as a proxy to several MPI implementations"""
26
26
 
27
27
  _footprint = dict(
28
- attr = dict(
29
- mpiname = dict(
30
- values = ['mpiauto', ],
28
+ attr=dict(
29
+ mpiname=dict(
30
+ values=[
31
+ "mpiauto",
32
+ ],
31
33
  ),
32
- mpiopts = dict(
33
- default = None
34
+ mpiopts=dict(default=None),
35
+ optprefix=dict(default="--"),
36
+ optmap=dict(
37
+ default=footprints.FPDict(
38
+ nn="nn",
39
+ nnp="nnp",
40
+ openmp="openmp",
41
+ np="np",
42
+ prefixcommand="prefix-command",
43
+ allowodddist="mpi-allow-odd-dist",
44
+ )
34
45
  ),
35
- optprefix = dict(
36
- default = '--'
46
+ timeoutrestart=dict(
47
+ info="The number of attempts made by mpiauto",
48
+ optional=True,
49
+ default=DelayedEnvValue("MPI_INIT_TIMEOUT_RESTART", 2),
50
+ doc_visibility=footprints.doc.visibility.ADVANCED,
51
+ doc_zorder=-90,
37
52
  ),
38
- optmap = dict(
39
- default = footprints.FPDict(nn='nn', nnp='nnp', openmp='openmp',
40
- np='np', prefixcommand='prefix-command',
41
- allowodddist='mpi-allow-odd-dist')
53
+ sublauncher=dict(
54
+ info="How to actualy launch the MPI program",
55
+ values=["srun", "libspecific"],
56
+ optional=True,
57
+ doc_visibility=footprints.doc.visibility.ADVANCED,
58
+ doc_zorder=-90,
42
59
  ),
43
- timeoutrestart = dict(
44
- info = 'The number of attempts made by mpiauto',
45
- optional = True,
46
- default = DelayedEnvValue('MPI_INIT_TIMEOUT_RESTART', 2),
47
- doc_visibility = footprints.doc.visibility.ADVANCED,
48
- doc_zorder = -90,
60
+ mpiwrapstd=dict(
61
+ values=[
62
+ False,
63
+ ],
49
64
  ),
50
- sublauncher = dict(
51
- info = 'How to actualy launch the MPI program',
52
- values = ['srun', 'libspecific'],
53
- optional = True,
54
- doc_visibility = footprints.doc.visibility.ADVANCED,
55
- doc_zorder = -90,
56
- ),
57
- mpiwrapstd = dict(
58
- values = [False, ],
59
- ),
60
- bindingmethod = dict(
61
- info = 'How to bind the MPI processes',
62
- values = ['arch', 'launcherspecific', 'vortex'],
63
- optional = True,
64
- doc_visibility = footprints.doc.visibility.ADVANCED,
65
- doc_zorder = -90,
65
+ bindingmethod=dict(
66
+ info="How to bind the MPI processes",
67
+ values=["arch", "launcherspecific", "vortex"],
68
+ optional=True,
69
+ doc_visibility=footprints.doc.visibility.ADVANCED,
70
+ doc_zorder=-90,
66
71
  ),
67
72
  )
68
73
  )
69
74
 
70
- _envelope_wrapper_tpl = '@envelope_wrapper_mpiauto.tpl'
71
- _envelope_rank_var = 'MPIAUTORANK'
75
+ _envelope_wrapper_tpl = "@envelope_wrapper_mpiauto.tpl"
76
+ _envelope_rank_var = "MPIAUTORANK"
72
77
  _needs_mpilib_specific_mpienv = False
73
78
 
74
79
  def _reshaped_mpiopts(self):
75
80
  """Raw list of mpi tool command line options."""
76
81
  options = super()._reshaped_mpiopts()
77
- options['init-timeout-restart'] = [(self.timeoutrestart, )]
78
- if self.sublauncher == 'srun':
79
- options['use-slurm-mpi'] = [()]
80
- elif self.sublauncher == 'libspecific':
81
- options['no-use-slurm-mpi'] = [()]
82
+ options["init-timeout-restart"] = [(self.timeoutrestart,)]
83
+ if self.sublauncher == "srun":
84
+ options["use-slurm-mpi"] = [()]
85
+ elif self.sublauncher == "libspecific":
86
+ options["no-use-slurm-mpi"] = [()]
82
87
  if self.bindingmethod:
83
- for k in ['{:s}use-{:s}-bind'.format(p, t) for p in ('', 'no-')
84
- for t in ('arch', 'slurm', 'intelmpi', 'openmpi')]:
88
+ for k in [
89
+ "{:s}use-{:s}-bind".format(p, t)
90
+ for p in ("", "no-")
91
+ for t in ("arch", "slurm", "intelmpi", "openmpi")
92
+ ]:
85
93
  options.pop(k, None)
86
- if self.bindingmethod == 'arch':
87
- options['use-arch-bind'] = [()]
88
- elif self.bindingmethod == 'launcherspecific' and self.sublauncher == 'srun':
89
- options['no-use-arch-bind'] = [()]
90
- options['use-slurm-bind'] = [()]
91
- elif self.bindingmethod == 'launcherspecific':
92
- options['no-use-arch-bind'] = [()]
93
- for k in ['use-{:s}-bind'.format(t)
94
- for t in ('slurm', 'intelmpi', 'openmpi')]:
94
+ if self.bindingmethod == "arch":
95
+ options["use-arch-bind"] = [()]
96
+ elif (
97
+ self.bindingmethod == "launcherspecific"
98
+ and self.sublauncher == "srun"
99
+ ):
100
+ options["no-use-arch-bind"] = [()]
101
+ options["use-slurm-bind"] = [()]
102
+ elif self.bindingmethod == "launcherspecific":
103
+ options["no-use-arch-bind"] = [()]
104
+ for k in [
105
+ "use-{:s}-bind".format(t)
106
+ for t in ("slurm", "intelmpi", "openmpi")
107
+ ]:
95
108
  options[k] = [()]
96
- elif self.bindingmethod == 'vortex':
97
- options['no-use-arch-bind'] = [()]
109
+ elif self.bindingmethod == "vortex":
110
+ options["no-use-arch-bind"] = [()]
98
111
  return options
99
112
 
100
113
  def _envelope_fix_envelope_bit(self, e_bit, e_desc):
101
114
  """Set the envelope fake binary options."""
102
- e_bit.options = {k: v for k, v in e_desc.items()
103
- if k not in ('openmp', )}
104
- e_bit.options['prefixcommand'] = self._envelope_wrapper_name
115
+ e_bit.options = {
116
+ k: v for k, v in e_desc.items() if k not in ("openmp",)
117
+ }
118
+ e_bit.options["prefixcommand"] = self._envelope_wrapper_name
105
119
  if self.binaries:
106
120
  e_bit.master = self.binaries[0].master
107
121
 
108
122
  def _set_binaries_hack(self, binaries):
109
123
  """Set the list of :class:`MpiBinaryDescription` objects associated with this instance."""
110
- if len(binaries) > 1 and self.bindingmethod not in (None, 'arch', 'vortex'):
111
- logger.info("The '{:s}' binding method is not working properly with multiple binaries."
112
- .format(self.bindingmethod))
124
+ if len(binaries) > 1 and self.bindingmethod not in (
125
+ None,
126
+ "arch",
127
+ "vortex",
128
+ ):
129
+ logger.info(
130
+ "The '{:s}' binding method is not working properly with multiple binaries.".format(
131
+ self.bindingmethod
132
+ )
133
+ )
113
134
  logger.warning("Resetting the binding method to 'vortex'.")
114
- self.bindingmethod = 'vortex'
135
+ self.bindingmethod = "vortex"
115
136
 
116
137
  def _set_binaries_envelope_hack(self, binaries):
117
138
  """Tweak the envelope after binaries were setup."""
@@ -122,28 +143,38 @@ class MpiAuto(mpitools.MpiTool):
122
143
  def _set_envelope(self, value):
123
144
  """Set the envelope description."""
124
145
  super()._set_envelope(value)
125
- if len(self._envelope) > 1 and self.bindingmethod not in (None, 'arch', 'vortex'):
126
- logger.info("The '{:s}' binding method is not working properly with complex envelopes."
127
- .format(self.bindingmethod))
146
+ if len(self._envelope) > 1 and self.bindingmethod not in (
147
+ None,
148
+ "arch",
149
+ "vortex",
150
+ ):
151
+ logger.info(
152
+ "The '{:s}' binding method is not working properly with complex envelopes.".format(
153
+ self.bindingmethod
154
+ )
155
+ )
128
156
  logger.warning("Resetting the binding method to 'vortex'.")
129
- self.bindingmethod = 'vortex'
157
+ self.bindingmethod = "vortex"
130
158
 
131
159
  envelope = property(mpitools.MpiTool._get_envelope, _set_envelope)
132
160
 
133
161
  def _hook_binary_mpiopts(self, binary, options):
134
162
  tuned = options.copy()
135
163
  # Regular MPI tasks count (the usual...)
136
- if 'nnp' in options and 'nn' in options:
137
- if options['nn'] * options['nnp'] == options['np']:
164
+ if "nnp" in options and "nn" in options:
165
+ if options["nn"] * options["nnp"] == options["np"]:
138
166
  # Remove harmful options
139
- del tuned['np']
140
- tuned.pop('allowodddist', None)
167
+ del tuned["np"]
168
+ tuned.pop("allowodddist", None)
141
169
  # that's the strange MPI distribution...
142
170
  else:
143
- tuned['allowodddist'] = None # With this, let mpiauto determine its own partitioning
171
+ tuned["allowodddist"] = (
172
+ None # With this, let mpiauto determine its own partitioning
173
+ )
144
174
  else:
145
- msg = ("The provided mpiopts are insufficient to build the command line: {!s}"
146
- .format(options))
175
+ msg = "The provided mpiopts are insufficient to build the command line: {!s}".format(
176
+ options
177
+ )
147
178
  raise mpitools.MpiException(msg)
148
179
  return tuned
149
180
 
@@ -153,43 +184,50 @@ class MpiAuto(mpitools.MpiTool):
153
184
  for bin_obj in self.binaries:
154
185
  if bin_obj.options:
155
186
  for mpirank in range(ranksidx, ranksidx + bin_obj.nprocs):
156
- prefix_c = bin_obj.options.get('prefixcommand', None)
187
+ prefix_c = bin_obj.options.get("prefixcommand", None)
157
188
  if prefix_c:
158
- todostack[mpirank] = (prefix_c,
159
- [todostack[mpirank][0], ] + todostack[mpirank][1],
160
- todostack[mpirank][2])
189
+ todostack[mpirank] = (
190
+ prefix_c,
191
+ [
192
+ todostack[mpirank][0],
193
+ ]
194
+ + todostack[mpirank][1],
195
+ todostack[mpirank][2],
196
+ )
161
197
  ranksidx += bin_obj.nprocs
162
198
  return todostack, ranks_bsize
163
199
 
164
200
  def _envelope_mkcmdline_extra(self, cmdl):
165
201
  """If possible, add an openmp option when the arch binding method is used."""
166
202
 
167
- if self.bindingmethod != 'vortex':
168
- openmps = {b.options.get('openmp', None) for b in self.binaries}
203
+ if self.bindingmethod != "vortex":
204
+ openmps = {b.options.get("openmp", None) for b in self.binaries}
169
205
  if len(openmps) > 1:
170
206
  if self.bindingmethod is not None:
171
- logger.warning("Non-uniform OpenMP threads number... Not specifying anything.")
207
+ logger.warning(
208
+ "Non-uniform OpenMP threads number... Not specifying anything."
209
+ )
172
210
  else:
173
211
  openmp = openmps.pop() or 1
174
- cmdl.append(self.optprefix + self.optmap['openmp'])
212
+ cmdl.append(self.optprefix + self.optmap["openmp"])
175
213
  cmdl.append(str(openmp))
176
214
 
177
215
  def setup_environment(self, opts):
178
216
  """Last minute fixups."""
179
217
  super().setup_environment(opts)
180
- if self.bindingmethod in ('arch', 'vortex'):
218
+ if self.bindingmethod in ("arch", "vortex"):
181
219
  # Make sure srun does nothing !
182
- self._logged_env_set('SLURM_CPU_BIND', 'none')
220
+ self._logged_env_set("SLURM_CPU_BIND", "none")
183
221
 
184
222
  def setup(self, opts=None):
185
223
  """Ensure that the prefixcommand has the execution rights."""
186
224
  for bin_obj in self.binaries:
187
- prefix_c = bin_obj.options.get('prefixcommand', None)
225
+ prefix_c = bin_obj.options.get("prefixcommand", None)
188
226
  if prefix_c is not None:
189
227
  if self.system.path.exists(prefix_c):
190
228
  self.system.xperm(prefix_c, force=True)
191
229
  else:
192
- raise OSError('The prefixcommand do not exists.')
230
+ raise OSError("The prefixcommand do not exists.")
193
231
  super().setup(opts)
194
232
 
195
233
 
@@ -200,88 +238,106 @@ class MpiAutoDDT(MpiAuto):
200
238
  """
201
239
 
202
240
  _footprint = dict(
203
- attr = dict(
204
- mpiname = dict(
205
- values = ['mpiauto-ddt', ],
241
+ attr=dict(
242
+ mpiname=dict(
243
+ values=[
244
+ "mpiauto-ddt",
245
+ ],
206
246
  ),
207
247
  )
208
248
  )
209
249
 
210
- _conf_suffix = '-ddt'
250
+ _conf_suffix = "-ddt"
211
251
 
212
252
  def _reshaped_mpiopts(self):
213
253
  options = super()._reshaped_mpiopts()
214
- if 'prefix-mpirun' in options:
215
- raise mpitools.MpiException('It is not allowed to start DDT with another ' +
216
- 'prefix_mpirun command defined: "{:s}"'
217
- .format(options))
254
+ if "prefix-mpirun" in options:
255
+ raise mpitools.MpiException(
256
+ "It is not allowed to start DDT with another "
257
+ + 'prefix_mpirun command defined: "{:s}"'.format(options)
258
+ )
218
259
  armtool = ArmForgeTool(self.ticket)
219
- options['prefix-mpirun'] = [(' '.join(armtool.ddt_prefix_cmd(
220
- sources=self.sources,
221
- workdir=self.system.path.dirname(self.binaries[0].master)
222
- )), )]
260
+ options["prefix-mpirun"] = [
261
+ (
262
+ " ".join(
263
+ armtool.ddt_prefix_cmd(
264
+ sources=self.sources,
265
+ workdir=self.system.path.dirname(
266
+ self.binaries[0].master
267
+ ),
268
+ )
269
+ ),
270
+ )
271
+ ]
223
272
  return options
224
273
 
225
274
 
226
275
  # Some IFS/Arpege specific things :
227
276
 
277
+
228
278
  def arpifs_obsort_nprocab_binarydeco(cls):
229
279
  """Handle usual IFS/Arpege environment tweaking for OBSORT (nproca & nprocb).
230
280
 
231
281
  Note: This is a class decorator for class somehow based on MpiBinaryDescription
232
282
  """
233
- orig_setup_env = getattr(cls, 'setup_environment')
283
+ orig_setup_env = getattr(cls, "setup_environment")
234
284
 
235
285
  def setup_environment(self, opts):
236
286
  orig_setup_env(self, opts)
237
- self.env.NPROCA = int(self.env.NPROCA or
238
- self.nprocs)
239
- self.env.NPROCB = int(self.env.NPROCB or
240
- self.nprocs // self.env.NPROCA)
241
- logger.info("MPI Setup NPROCA=%d and NPROCB=%d", self.env.NPROCA, self.env.NPROCB)
287
+ self.env.NPROCA = int(self.env.NPROCA or self.nprocs)
288
+ self.env.NPROCB = int(
289
+ self.env.NPROCB or self.nprocs // self.env.NPROCA
290
+ )
291
+ logger.info(
292
+ "MPI Setup NPROCA=%d and NPROCB=%d",
293
+ self.env.NPROCA,
294
+ self.env.NPROCB,
295
+ )
242
296
 
243
- if hasattr(orig_setup_env, '__doc__'):
297
+ if hasattr(orig_setup_env, "__doc__"):
244
298
  setup_environment.__doc__ = orig_setup_env.__doc__
245
299
 
246
- setattr(cls, 'setup_environment', setup_environment)
300
+ setattr(cls, "setup_environment", setup_environment)
247
301
  return cls
248
302
 
249
303
 
250
304
  class _NWPIoServerMixin:
305
+ _NWP_IOSERV_PATTERNS = ("io_serv.*.d",)
251
306
 
252
- _NWP_IOSERV_PATTERNS = ('io_serv.*.d', )
253
-
254
- def _nwp_ioserv_setup_namelist(self, namcontents, namlocal,
255
- total_iotasks, computed_iodist_value=None):
307
+ def _nwp_ioserv_setup_namelist(
308
+ self, namcontents, namlocal, total_iotasks, computed_iodist_value=None
309
+ ):
256
310
  """Applying IO Server profile on local namelist ``namlocal`` with contents namcontents."""
257
- if 'NAMIO_SERV' in namcontents:
258
- namio = namcontents['NAMIO_SERV']
311
+ if "NAMIO_SERV" in namcontents:
312
+ namio = namcontents["NAMIO_SERV"]
259
313
  else:
260
- namio = namcontents.newblock('NAMIO_SERV')
314
+ namio = namcontents.newblock("NAMIO_SERV")
261
315
 
262
316
  namio.nproc_io = total_iotasks
263
317
  if computed_iodist_value is not None:
264
318
  namio.idistio = computed_iodist_value
265
319
 
266
- if 'VORTEX_IOSERVER_METHOD' in self.env:
320
+ if "VORTEX_IOSERVER_METHOD" in self.env:
267
321
  namio.nio_serv_method = self.env.VORTEX_IOSERVER_METHOD
268
322
 
269
- if 'VORTEX_IOSERVER_BUFMAX' in self.env:
323
+ if "VORTEX_IOSERVER_BUFMAX" in self.env:
270
324
  namio.nio_serv_buf_maxsize = self.env.VORTEX_IOSERVER_BUFMAX
271
325
 
272
- if 'VORTEX_IOSERVER_MLSERVER' in self.env:
326
+ if "VORTEX_IOSERVER_MLSERVER" in self.env:
273
327
  namio.nmsg_level_server = self.env.VORTEX_IOSERVER_MLSERVER
274
328
 
275
- if 'VORTEX_IOSERVER_MLCLIENT' in self.env:
329
+ if "VORTEX_IOSERVER_MLCLIENT" in self.env:
276
330
  namio.nmsg_level_client = self.env.VORTEX_IOSERVER_MLCLIENT
277
331
 
278
- if 'VORTEX_IOSERVER_PROCESS' in self.env:
332
+ if "VORTEX_IOSERVER_PROCESS" in self.env:
279
333
  namio.nprocess_level = self.env.VORTEX_IOSERVER_PROCESS
280
334
 
281
- if 'VORTEX_IOSERVER_PIOMODEL' in self.env:
335
+ if "VORTEX_IOSERVER_PIOMODEL" in self.env:
282
336
  namio.pioprocr_MDL = self.env.VORTEX_IOSERVER_PIOMODEL
283
337
 
284
- self.system.highlight('Parallel io server namelist for {:s}'.format(namlocal))
338
+ self.system.highlight(
339
+ "Parallel io server namelist for {:s}".format(namlocal)
340
+ )
285
341
  print(namio.dumps())
286
342
 
287
343
  return True
@@ -297,38 +353,46 @@ class _NWPIoServerMixin:
297
353
  """Post-execution cleaning for io server."""
298
354
 
299
355
  # Old fashion way to make clear that some polling is needed.
300
- self.system.touch('io_poll.todo')
356
+ self.system.touch("io_poll.todo")
301
357
 
302
358
  # Get a look inside io server output directories according to its own pattern
303
359
  ioserv_filelist = set()
304
360
  ioserv_prefixes = set()
305
- iofile_re = re.compile(r'((ICMSH|PF|GRIBPF).*\+\d+(?::\d+)?(?:\.sfx)?)(?:\..+)?$')
306
- self.system.highlight('Dealing with IO directories')
361
+ iofile_re = re.compile(
362
+ r"((ICMSH|PF|GRIBPF).*\+\d+(?::\d+)?(?:\.sfx)?)(?:\..+)?$"
363
+ )
364
+ self.system.highlight("Dealing with IO directories")
307
365
  iodirs = self._nwp_ioserv_iodirs()
308
366
  if iodirs:
309
- logger.info('List of IO directories: %s', ','.join(iodirs))
310
- f_summary = collections.defaultdict(lambda: [' '] * len(iodirs))
367
+ logger.info("List of IO directories: %s", ",".join(iodirs))
368
+ f_summary = collections.defaultdict(lambda: [" "] * len(iodirs))
311
369
  for i, iodir in enumerate(iodirs):
312
370
  for iofile in self.system.listdir(iodir):
313
371
  zf = iofile_re.match(iofile)
314
372
  if zf:
315
- f_summary[zf.group(1)][i] = '+'
373
+ f_summary[zf.group(1)][i] = "+"
316
374
  ioserv_filelist.add((zf.group(1), zf.group(2)))
317
375
  ioserv_prefixes.add(zf.group(2))
318
376
  else:
319
- f_summary[iofile][i] = '?'
377
+ f_summary[iofile][i] = "?"
320
378
  max_names_len = max([len(iofile) for iofile in f_summary.keys()])
321
- fmt_names = '{:' + str(max_names_len) + 's}'
322
- logger.info('Data location accross the various IOserver directories:\n%s',
323
- '\n'.join([(fmt_names + ' |{:s}|').format(iofile, ''.join(where))
324
- for iofile, where in sorted(f_summary.items())]))
379
+ fmt_names = "{:" + str(max_names_len) + "s}"
380
+ logger.info(
381
+ "Data location accross the various IOserver directories:\n%s",
382
+ "\n".join(
383
+ [
384
+ (fmt_names + " |{:s}|").format(iofile, "".join(where))
385
+ for iofile, where in sorted(f_summary.items())
386
+ ]
387
+ ),
388
+ )
325
389
  else:
326
- logger.info('No IO directories were found')
390
+ logger.info("No IO directories were found")
327
391
 
328
- if 'GRIBPF' in ioserv_prefixes:
392
+ if "GRIBPF" in ioserv_prefixes:
329
393
  # If GRIB are requested, do not bother with old FA PF files
330
- ioserv_prefixes.discard('PF')
331
- ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p != 'PF'}
394
+ ioserv_prefixes.discard("PF")
395
+ ioserv_filelist = {(f, p) for f, p in ioserv_filelist if p != "PF"}
332
396
 
333
397
  # Touch the output files
334
398
  for tgfile, _ in ioserv_filelist:
@@ -336,7 +400,7 @@ class _NWPIoServerMixin:
336
400
 
337
401
  # Touch the io_poll.todo.PREFIX
338
402
  for prefix in ioserv_prefixes:
339
- self.system.touch('io_poll.todo.{:s}'.format(prefix))
403
+ self.system.touch("io_poll.todo.{:s}".format(prefix))
340
404
 
341
405
 
342
406
  class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
@@ -344,7 +408,7 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
344
408
 
345
409
  _abstract = True
346
410
 
347
- def __init__(self, * kargs, **kwargs):
411
+ def __init__(self, *kargs, **kwargs):
348
412
  super().__init__(*kargs, **kwargs)
349
413
  self._incore_iotasks = None
350
414
  self._effective_incore_iotasks = None
@@ -359,7 +423,7 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
359
423
  @incore_iotasks.setter
360
424
  def incore_iotasks(self, value):
361
425
  """The number of tasks dedicated to the IO server."""
362
- if isinstance(value, str) and value.endswith('%'):
426
+ if isinstance(value, str) and value.endswith("%"):
363
427
  value = math.ceil(self.nprocs * float(value[:-1]) / 100)
364
428
  self._incore_iotasks = int(value)
365
429
  self._effective_incore_iotasks = None
@@ -373,10 +437,12 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
373
437
  def incore_iotasks_fixer(self, value):
374
438
  """Tweak the number of iotasks in order to respect a given constraints."""
375
439
  if not isinstance(value, str):
376
- raise ValueError('A string is expected')
377
- if value.startswith('nproc_multiple_of_'):
378
- self._incore_iotasks_fixer = ('nproc_multiple_of',
379
- [int(i) for i in value[18:].split(',')])
440
+ raise ValueError("A string is expected")
441
+ if value.startswith("nproc_multiple_of_"):
442
+ self._incore_iotasks_fixer = (
443
+ "nproc_multiple_of",
444
+ [int(i) for i in value[18:].split(",")],
445
+ )
380
446
  else:
381
447
  raise ValueError('The "{:s}" value is incorrect'.format(value))
382
448
 
@@ -391,22 +457,36 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
391
457
  if self.incore_iotasks is not None:
392
458
  if self._effective_incore_iotasks is None:
393
459
  if self.incore_iotasks_fixer is not None:
394
- if self.incore_iotasks_fixer[0] == 'nproc_multiple_of':
460
+ if self.incore_iotasks_fixer[0] == "nproc_multiple_of":
395
461
  # Allow for 5% less, or add some tasks
396
- for candidate in interleave(range(self.incore_iotasks, self.nprocs + 1),
397
- range(self.incore_iotasks - 1,
398
- int(math.ceil(0.95 * self.incore_iotasks)) - 1,
399
- -1)):
400
- if any([(self.nprocs - candidate) % multiple == 0
401
- for multiple in self.incore_iotasks_fixer[1]]):
462
+ for candidate in interleave(
463
+ range(self.incore_iotasks, self.nprocs + 1),
464
+ range(
465
+ self.incore_iotasks - 1,
466
+ int(math.ceil(0.95 * self.incore_iotasks)) - 1,
467
+ -1,
468
+ ),
469
+ ):
470
+ if any(
471
+ [
472
+ (self.nprocs - candidate) % multiple == 0
473
+ for multiple in self.incore_iotasks_fixer[
474
+ 1
475
+ ]
476
+ ]
477
+ ):
402
478
  self._effective_incore_iotasks = candidate
403
479
  break
404
480
  else:
405
- raise RuntimeError('Unsupported fixer')
481
+ raise RuntimeError("Unsupported fixer")
406
482
  if self._effective_incore_iotasks != self.incore_iotasks:
407
- logger.info('The number of IO tasks was updated form %d to %d ' +
408
- 'because of the "%s" fixer', self.incore_iotasks,
409
- self._effective_incore_iotasks, self.incore_iotasks_fixer[0])
483
+ logger.info(
484
+ "The number of IO tasks was updated form %d to %d "
485
+ + 'because of the "%s" fixer',
486
+ self.incore_iotasks,
487
+ self._effective_incore_iotasks,
488
+ self.incore_iotasks_fixer[0],
489
+ )
410
490
  else:
411
491
  self._effective_incore_iotasks = self.incore_iotasks
412
492
  return self._effective_incore_iotasks
@@ -421,17 +501,23 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
421
501
  @incore_iodist.setter
422
502
  def incore_iodist(self, value):
423
503
  """How to distribute IO server tasks within model tasks."""
424
- allowed = ('begining', 'end', 'scattered',)
425
- if not (isinstance(value, str) and
426
- value in allowed):
427
- raise ValueError("'{!s}' is not an allowed value ('{:s}')"
428
- .format(value, ', '.join(allowed)))
504
+ allowed = (
505
+ "begining",
506
+ "end",
507
+ "scattered",
508
+ )
509
+ if not (isinstance(value, str) and value in allowed):
510
+ raise ValueError(
511
+ "'{!s}' is not an allowed value ('{:s}')".format(
512
+ value, ", ".join(allowed)
513
+ )
514
+ )
429
515
  self._incore_iodist = value
430
516
 
431
517
  def _set_nam_macro(self, namcontents, namlocal, macro, value):
432
518
  """Set a namelist macro and log it!"""
433
519
  namcontents.setmacro(macro, value)
434
- logger.info('Setup macro %s=%s in %s', macro, str(value), namlocal)
520
+ logger.info("Setup macro %s=%s in %s", macro, str(value), namlocal)
435
521
 
436
522
  def setup_namelist_delta(self, namcontents, namlocal):
437
523
  """Applying MPI profile on local namelist ``namlocal`` with contents namcontents."""
@@ -445,48 +531,74 @@ class _AbstractMpiNWP(mpitools.MpiBinaryBasic, _NWPIoServerMixin):
445
531
  if self.effective_incore_iotasks is not None:
446
532
  effective_nprocs -= self.effective_incore_iotasks
447
533
  # Set up the effective_nprocs related macros
448
- nprocs_macros = ('NPROC', 'NBPROC', 'NTASKS')
534
+ nprocs_macros = ("NPROC", "NBPROC", "NTASKS")
449
535
  if any([n in nam_macros for n in nprocs_macros]):
450
536
  for n in nprocs_macros:
451
537
  self._set_nam_macro(namcontents, namlocal, n, effective_nprocs)
452
538
  namw = True
453
- if any([n in nam_macros for n in ('NCPROC', 'NDPROC')]):
454
- self._set_nam_macro(namcontents, namlocal, 'NCPROC',
455
- int(self.env.VORTEX_NPRGPNS or effective_nprocs))
456
- self._set_nam_macro(namcontents, namlocal, 'NDPROC',
457
- int(self.env.VORTEX_NPRGPEW or 1))
539
+ if any([n in nam_macros for n in ("NCPROC", "NDPROC")]):
540
+ self._set_nam_macro(
541
+ namcontents,
542
+ namlocal,
543
+ "NCPROC",
544
+ int(self.env.VORTEX_NPRGPNS or effective_nprocs),
545
+ )
546
+ self._set_nam_macro(
547
+ namcontents,
548
+ namlocal,
549
+ "NDPROC",
550
+ int(self.env.VORTEX_NPRGPEW or 1),
551
+ )
458
552
  namw = True
459
- if 'NAMPAR1' in namcontents:
460
- np1 = namcontents['NAMPAR1']
461
- for nstr in [x for x in ('NSTRIN', 'NSTROUT') if x in np1]:
462
- if isinstance(np1[nstr], (int, float)) and np1[nstr] > effective_nprocs:
463
- logger.info('Setup %s=%s in NAMPAR1 %s', nstr, effective_nprocs, namlocal)
553
+ if "NAMPAR1" in namcontents:
554
+ np1 = namcontents["NAMPAR1"]
555
+ for nstr in [x for x in ("NSTRIN", "NSTROUT") if x in np1]:
556
+ if (
557
+ isinstance(np1[nstr], (int, float))
558
+ and np1[nstr] > effective_nprocs
559
+ ):
560
+ logger.info(
561
+ "Setup %s=%s in NAMPAR1 %s",
562
+ nstr,
563
+ effective_nprocs,
564
+ namlocal,
565
+ )
464
566
  np1[nstr] = effective_nprocs
465
567
  namw = True
466
568
  # Deal with partitioning macros
467
- namw_p = setup_partitioning_in_namelist(namcontents,
468
- effective_nprocs,
469
- self.options.get('openmp', 1),
470
- namlocal)
569
+ namw_p = setup_partitioning_in_namelist(
570
+ namcontents,
571
+ effective_nprocs,
572
+ self.options.get("openmp", 1),
573
+ namlocal,
574
+ )
471
575
  namw = namw or namw_p
472
576
  # Incore IO tasks
473
577
  if self.effective_incore_iotasks is not None:
474
578
  c_iodist = None
475
579
  if self.incore_iodist is not None:
476
- if self.incore_iodist == 'begining':
580
+ if self.incore_iodist == "begining":
477
581
  c_iodist = -1
478
- elif self.incore_iodist == 'end':
582
+ elif self.incore_iodist == "end":
479
583
  c_iodist = 0
480
- elif self.incore_iodist == 'scattered':
584
+ elif self.incore_iodist == "scattered":
481
585
  # Ensure that there is at least one task on the first node
482
- c_iodist = min(self.nprocs // self.effective_incore_iotasks,
483
- self.options.get('nnp', self.nprocs))
586
+ c_iodist = min(
587
+ self.nprocs // self.effective_incore_iotasks,
588
+ self.options.get("nnp", self.nprocs),
589
+ )
484
590
  else:
485
- raise RuntimeError("incore_iodist '{!s}' is not supported: check your code"
486
- .format(self.incore_iodist))
487
- namw_io = self._nwp_ioserv_setup_namelist(namcontents, namlocal,
488
- self.effective_incore_iotasks,
489
- computed_iodist_value=c_iodist)
591
+ raise RuntimeError(
592
+ "incore_iodist '{!s}' is not supported: check your code".format(
593
+ self.incore_iodist
594
+ )
595
+ )
596
+ namw_io = self._nwp_ioserv_setup_namelist(
597
+ namcontents,
598
+ namlocal,
599
+ self.effective_incore_iotasks,
600
+ computed_iodist_value=c_iodist,
601
+ )
490
602
  namw = namw or namw_io
491
603
  return namw
492
604
 
@@ -501,8 +613,12 @@ class MpiNWP(_AbstractMpiNWP):
501
613
  """The kind of binaries used in IFS/Arpege."""
502
614
 
503
615
  _footprint = dict(
504
- attr = dict(
505
- kind = dict(values = ['basicnwp', ]),
616
+ attr=dict(
617
+ kind=dict(
618
+ values=[
619
+ "basicnwp",
620
+ ]
621
+ ),
506
622
  ),
507
623
  )
508
624
 
@@ -512,8 +628,12 @@ class MpiNWPObsort(_AbstractMpiNWP):
512
628
  """The kind of binaries used in IFS/Arpege when the ODB OBSSORT code needs to be run."""
513
629
 
514
630
  _footprint = dict(
515
- attr = dict(
516
- kind = dict(values = ['basicnwpobsort', ]),
631
+ attr=dict(
632
+ kind=dict(
633
+ values=[
634
+ "basicnwpobsort",
635
+ ]
636
+ ),
517
637
  ),
518
638
  )
519
639
 
@@ -523,8 +643,12 @@ class MpiObsort(mpitools.MpiBinaryBasic):
523
643
  """The kind of binaries used when the ODB OBSSORT code needs to be run."""
524
644
 
525
645
  _footprint = dict(
526
- attr = dict(
527
- kind = dict(values = ['basicobsort', ]),
646
+ attr=dict(
647
+ kind=dict(
648
+ values=[
649
+ "basicobsort",
650
+ ]
651
+ ),
528
652
  ),
529
653
  )
530
654
 
@@ -533,9 +657,15 @@ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
533
657
  """Standard IFS/Arpege NWP IO server."""
534
658
 
535
659
  _footprint = dict(
536
- attr = dict(
537
- kind = dict(values = ['nwpioserv', ]),
538
- iolocation = dict(values = [-1, 0], default = 0, optional = True, type = int),
660
+ attr=dict(
661
+ kind=dict(
662
+ values=[
663
+ "nwpioserv",
664
+ ]
665
+ ),
666
+ iolocation=dict(
667
+ values=[-1, 0], default=0, optional=True, type=int
668
+ ),
539
669
  )
540
670
  )
541
671
 
@@ -545,7 +675,7 @@ class MpiNWPIO(mpitools.MpiBinaryIOServer, _NWPIoServerMixin):
545
675
  namcontents,
546
676
  namlocal,
547
677
  self.nprocs,
548
- computed_iodist_value=(-1 if self.iolocation == 0 else None)
678
+ computed_iodist_value=(-1 if self.iolocation == 0 else None),
549
679
  )
550
680
 
551
681
  def clean(self, opts=None):