vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. vortex/__init__.py +75 -47
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +944 -618
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/serversynctools.py +34 -33
  7. vortex/config.py +19 -22
  8. vortex/data/__init__.py +9 -3
  9. vortex/data/abstractstores.py +593 -655
  10. vortex/data/containers.py +217 -162
  11. vortex/data/contents.py +65 -39
  12. vortex/data/executables.py +93 -102
  13. vortex/data/flow.py +40 -34
  14. vortex/data/geometries.py +228 -132
  15. vortex/data/handlers.py +436 -227
  16. vortex/data/outflow.py +15 -15
  17. vortex/data/providers.py +185 -163
  18. vortex/data/resources.py +48 -42
  19. vortex/data/stores.py +540 -417
  20. vortex/data/sync_templates/__init__.py +0 -0
  21. vortex/gloves.py +114 -87
  22. vortex/layout/__init__.py +1 -8
  23. vortex/layout/contexts.py +150 -84
  24. vortex/layout/dataflow.py +353 -202
  25. vortex/layout/monitor.py +264 -128
  26. vortex/nwp/__init__.py +5 -2
  27. vortex/nwp/algo/__init__.py +14 -5
  28. vortex/nwp/algo/assim.py +205 -151
  29. vortex/nwp/algo/clim.py +683 -517
  30. vortex/nwp/algo/coupling.py +447 -225
  31. vortex/nwp/algo/eda.py +437 -229
  32. vortex/nwp/algo/eps.py +403 -231
  33. vortex/nwp/algo/forecasts.py +416 -275
  34. vortex/nwp/algo/fpserver.py +683 -307
  35. vortex/nwp/algo/ifsnaming.py +205 -145
  36. vortex/nwp/algo/ifsroot.py +215 -122
  37. vortex/nwp/algo/monitoring.py +137 -76
  38. vortex/nwp/algo/mpitools.py +330 -190
  39. vortex/nwp/algo/odbtools.py +637 -353
  40. vortex/nwp/algo/oopsroot.py +454 -273
  41. vortex/nwp/algo/oopstests.py +90 -56
  42. vortex/nwp/algo/request.py +287 -206
  43. vortex/nwp/algo/stdpost.py +878 -522
  44. vortex/nwp/data/__init__.py +22 -4
  45. vortex/nwp/data/assim.py +125 -137
  46. vortex/nwp/data/boundaries.py +121 -68
  47. vortex/nwp/data/climfiles.py +193 -211
  48. vortex/nwp/data/configfiles.py +73 -69
  49. vortex/nwp/data/consts.py +426 -401
  50. vortex/nwp/data/ctpini.py +59 -43
  51. vortex/nwp/data/diagnostics.py +94 -66
  52. vortex/nwp/data/eda.py +50 -51
  53. vortex/nwp/data/eps.py +195 -146
  54. vortex/nwp/data/executables.py +440 -434
  55. vortex/nwp/data/fields.py +63 -48
  56. vortex/nwp/data/gridfiles.py +183 -111
  57. vortex/nwp/data/logs.py +250 -217
  58. vortex/nwp/data/modelstates.py +180 -151
  59. vortex/nwp/data/monitoring.py +72 -99
  60. vortex/nwp/data/namelists.py +254 -202
  61. vortex/nwp/data/obs.py +400 -308
  62. vortex/nwp/data/oopsexec.py +22 -20
  63. vortex/nwp/data/providers.py +90 -65
  64. vortex/nwp/data/query.py +71 -82
  65. vortex/nwp/data/stores.py +49 -36
  66. vortex/nwp/data/surfex.py +136 -137
  67. vortex/nwp/syntax/__init__.py +1 -1
  68. vortex/nwp/syntax/stdattrs.py +173 -111
  69. vortex/nwp/tools/__init__.py +2 -2
  70. vortex/nwp/tools/addons.py +22 -17
  71. vortex/nwp/tools/agt.py +24 -12
  72. vortex/nwp/tools/bdap.py +16 -5
  73. vortex/nwp/tools/bdcp.py +4 -1
  74. vortex/nwp/tools/bdm.py +3 -0
  75. vortex/nwp/tools/bdmp.py +14 -9
  76. vortex/nwp/tools/conftools.py +728 -378
  77. vortex/nwp/tools/drhook.py +12 -8
  78. vortex/nwp/tools/grib.py +65 -39
  79. vortex/nwp/tools/gribdiff.py +22 -17
  80. vortex/nwp/tools/ifstools.py +82 -42
  81. vortex/nwp/tools/igastuff.py +167 -143
  82. vortex/nwp/tools/mars.py +14 -2
  83. vortex/nwp/tools/odb.py +234 -125
  84. vortex/nwp/tools/partitioning.py +61 -37
  85. vortex/nwp/tools/satrad.py +27 -12
  86. vortex/nwp/util/async.py +83 -55
  87. vortex/nwp/util/beacon.py +10 -10
  88. vortex/nwp/util/diffpygram.py +174 -86
  89. vortex/nwp/util/ens.py +144 -63
  90. vortex/nwp/util/hooks.py +30 -19
  91. vortex/nwp/util/taskdeco.py +28 -24
  92. vortex/nwp/util/usepygram.py +278 -172
  93. vortex/nwp/util/usetnt.py +31 -17
  94. vortex/sessions.py +72 -39
  95. vortex/syntax/__init__.py +1 -1
  96. vortex/syntax/stdattrs.py +410 -171
  97. vortex/syntax/stddeco.py +31 -22
  98. vortex/toolbox.py +327 -192
  99. vortex/tools/__init__.py +11 -2
  100. vortex/tools/actions.py +110 -121
  101. vortex/tools/addons.py +111 -92
  102. vortex/tools/arm.py +42 -22
  103. vortex/tools/compression.py +72 -69
  104. vortex/tools/date.py +11 -4
  105. vortex/tools/delayedactions.py +242 -132
  106. vortex/tools/env.py +75 -47
  107. vortex/tools/folder.py +342 -171
  108. vortex/tools/grib.py +341 -162
  109. vortex/tools/lfi.py +423 -216
  110. vortex/tools/listings.py +109 -40
  111. vortex/tools/names.py +218 -156
  112. vortex/tools/net.py +655 -299
  113. vortex/tools/parallelism.py +93 -61
  114. vortex/tools/prestaging.py +55 -31
  115. vortex/tools/schedulers.py +172 -105
  116. vortex/tools/services.py +403 -334
  117. vortex/tools/storage.py +293 -358
  118. vortex/tools/surfex.py +24 -24
  119. vortex/tools/systems.py +1234 -643
  120. vortex/tools/targets.py +156 -100
  121. vortex/util/__init__.py +1 -1
  122. vortex/util/config.py +378 -327
  123. vortex/util/empty.py +2 -2
  124. vortex/util/helpers.py +56 -24
  125. vortex/util/introspection.py +18 -12
  126. vortex/util/iosponge.py +8 -4
  127. vortex/util/roles.py +4 -6
  128. vortex/util/storefunctions.py +39 -13
  129. vortex/util/structs.py +3 -3
  130. vortex/util/worker.py +29 -17
  131. vortex_nwp-2.1.0.dist-info/METADATA +67 -0
  132. vortex_nwp-2.1.0.dist-info/RECORD +144 -0
  133. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
  134. vortex/layout/appconf.py +0 -109
  135. vortex/layout/jobs.py +0 -1276
  136. vortex/layout/nodes.py +0 -1424
  137. vortex/layout/subjobs.py +0 -464
  138. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  139. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  140. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
  141. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
@@ -34,13 +34,13 @@ class TaylorVortexWorker(taylorism.Worker):
34
34
 
35
35
  _abstract = True
36
36
  _footprint = dict(
37
- attr = dict(
38
- kind = dict(),
39
- taskdebug = dict(
40
- info = 'Dump all stdout/stderr to a file (in real live !)',
41
- type = bool,
42
- default = False,
43
- optional = True,
37
+ attr=dict(
38
+ kind=dict(),
39
+ taskdebug=dict(
40
+ info="Dump all stdout/stderr to a file (in real live !)",
41
+ type=bool,
42
+ default=False,
43
+ optional=True,
44
44
  ),
45
45
  )
46
46
  )
@@ -62,7 +62,9 @@ class TaylorVortexWorker(taylorism.Worker):
62
62
  def _task(self, **kwargs):
63
63
  """Should not be overridden anymore: see :meth:`vortex_task`."""
64
64
  self._vortex_shortcuts()
65
- with ParallelSilencer(self.context, self.name, debug=self.taskdebug) as psi:
65
+ with ParallelSilencer(
66
+ self.context, self.name, debug=self.taskdebug
67
+ ) as psi:
66
68
  rc = self.vortex_task(**kwargs)
67
69
  psi_rc = psi.export_result()
68
70
  return self._vortex_rc_wrapup(rc, psi_rc)
@@ -77,30 +79,29 @@ class VortexWorkerBlindRun(TaylorVortexWorker):
77
79
 
78
80
  _abstract = True
79
81
  _footprint = dict(
80
- attr = dict(
81
- progname = dict(
82
+ attr=dict(
83
+ progname=dict(),
84
+ progargs=dict(
85
+ type=footprints.FPList,
86
+ default=footprints.FPList(),
87
+ optional=True,
82
88
  ),
83
- progargs = dict(
84
- type = footprints.FPList,
85
- default = footprints.FPList(),
86
- optional = True,
89
+ progtaskset=dict(
90
+ info="Topology/Method to set up the CPU affinity of the child task.",
91
+ default=None,
92
+ optional=True,
87
93
  ),
88
- progtaskset = dict(
89
- info = "Topology/Method to set up the CPU affinity of the child task.",
90
- default = None,
91
- optional = True,
94
+ progtaskset_bsize=dict(
95
+ info="The number of threads used by one task",
96
+ type=int,
97
+ default=1,
98
+ optional=True,
92
99
  ),
93
- progtaskset_bsize = dict(
94
- info = 'The number of threads used by one task',
95
- type = int,
96
- default = 1,
97
- optional = True
98
- ),
99
- progenvdelta = dict(
100
- info = 'Any alteration to environment variables',
101
- type = footprints.FPDict,
102
- default = footprints.FPDict({}),
103
- optional = True
100
+ progenvdelta=dict(
101
+ info="Any alteration to environment variables",
102
+ type=footprints.FPDict,
103
+ default=footprints.FPDict({}),
104
+ optional=True,
104
105
  ),
105
106
  )
106
107
  )
@@ -115,22 +116,37 @@ class VortexWorkerBlindRun(TaylorVortexWorker):
115
116
  :param stdoutfile: Path to the file where the standard/error output will
116
117
  be saved.
117
118
  """
118
- tmpio = open(stdoutfile, 'wb')
119
+ tmpio = open(stdoutfile, "wb")
119
120
  try:
120
- self.system.softlink('/dev/null', 'core')
121
+ self.system.softlink("/dev/null", "core")
121
122
  except FileExistsError:
122
123
  pass
123
124
  self.local_spawn_hook()
124
125
  self.system.default_target.spawn_hook(self.system)
125
126
  logger.info("The program stdout/err will be saved to %s", stdoutfile)
126
- logger.info("Starting the following command: %s (taskset=%s, id=%d)",
127
- " ".join([self.progname, ] + self.progargs),
128
- str(self.progtaskset), self.scheduler_ticket)
129
- with self.system.env.delta_context(** self.progenvdelta):
130
- self.system.spawn([self.progname, ] + self.progargs, output=tmpio,
131
- fatal=True, taskset=self.progtaskset,
132
- taskset_id=self.scheduler_ticket,
133
- taskset_bsize=self.progtaskset_bsize)
127
+ logger.info(
128
+ "Starting the following command: %s (taskset=%s, id=%d)",
129
+ " ".join(
130
+ [
131
+ self.progname,
132
+ ]
133
+ + self.progargs
134
+ ),
135
+ str(self.progtaskset),
136
+ self.scheduler_ticket,
137
+ )
138
+ with self.system.env.delta_context(**self.progenvdelta):
139
+ self.system.spawn(
140
+ [
141
+ self.progname,
142
+ ]
143
+ + self.progargs,
144
+ output=tmpio,
145
+ fatal=True,
146
+ taskset=self.progtaskset,
147
+ taskset_id=self.scheduler_ticket,
148
+ taskset_bsize=self.progtaskset_bsize,
149
+ )
134
150
 
135
151
  def delayed_error_local_spawn(self, stdoutfile, rcdict):
136
152
  """local_spawn wrapped in a try/except in order to trigger delayed exceptions."""
@@ -138,13 +154,16 @@ class VortexWorkerBlindRun(TaylorVortexWorker):
138
154
  self.local_spawn(stdoutfile)
139
155
  except ExecutionError as e:
140
156
  logger.error("The execution failed.")
141
- rcdict['rc'] = e
157
+ rcdict["rc"] = e
142
158
  return rcdict
143
159
 
144
160
  def find_namelists(self, opts=None): # @UnusedVariable
145
161
  """Find any namelists candidates in actual context inputs."""
146
- namcandidates = [x.rh for x in self.context.sequence.effective_inputs(kind='namelist')]
147
- self.system.subtitle('Namelist candidates')
162
+ namcandidates = [
163
+ x.rh
164
+ for x in self.context.sequence.effective_inputs(kind="namelist")
165
+ ]
166
+ self.system.subtitle("Namelist candidates")
148
167
  for nam in namcandidates:
149
168
  nam.quickview()
150
169
 
@@ -158,9 +177,11 @@ class TeeLikeStringIO(io.StringIO):
158
177
  super().__init__()
159
178
  self._tees = set()
160
179
 
161
- def record_teefile(self, filename, mode='w', line_buffering=True):
180
+ def record_teefile(self, filename, mode="w", line_buffering=True):
162
181
  """Add **filename** to the set of extra logfiles."""
163
- self._tees.add(open(filename, mode=mode, buffering=int(line_buffering)))
182
+ self._tees.add(
183
+ open(filename, mode=mode, buffering=int(line_buffering))
184
+ )
164
185
 
165
186
  def discard_tees(self):
166
187
  """Dismiss all of the extra logfiles."""
@@ -174,7 +195,7 @@ class TeeLikeStringIO(io.StringIO):
174
195
  teeio.write(t)
175
196
  super().write(t)
176
197
 
177
- def filedump(self, filename, mode='w'):
198
+ def filedump(self, filename, mode="w"):
178
199
  """Dump all of the captured data to **filename**."""
179
200
  with open(filename, mode=mode) as fhdump:
180
201
  self.seek(0)
@@ -207,8 +228,9 @@ class ParallelSilencer:
207
228
  """
208
229
  self._ctx = context
209
230
  self._taskdebug = debug
210
- self._debugfile = '{:s}_{:s}_stdeo.txt'.format(taskname,
211
- date.now().ymdhms)
231
+ self._debugfile = "{:s}_{:s}_stdeo.txt".format(
232
+ taskname, date.now().ymdhms
233
+ )
212
234
  self._ctx_r = None
213
235
  self._io_r = io.StringIO()
214
236
  # Other temporary stuff
@@ -240,7 +262,9 @@ class ParallelSilencer:
240
262
  r_logger.addHandler(self._stream_h)
241
263
  for a_handler in self._removed_h[r_logger]:
242
264
  r_logger.removeHandler(a_handler)
243
- for a_logger in [logging.getLogger(x) for x in loggers.lognames | loggers.roots]:
265
+ for a_logger in [
266
+ logging.getLogger(x) for x in loggers.lognames | loggers.roots
267
+ ]:
244
268
  self._removed_h[a_logger] = list(a_logger.handlers)
245
269
  for a_handler in self._removed_h[a_logger]:
246
270
  a_logger.removeHandler(a_handler)
@@ -254,8 +278,11 @@ class ParallelSilencer:
254
278
  def __exit__(self, exctype, excvalue, exctb): # @UnusedVariable
255
279
  """The end of a context."""
256
280
  self._stop_recording()
257
- if (exctype is not None and
258
- not self._taskdebug and self._io_r is not None):
281
+ if (
282
+ exctype is not None
283
+ and not self._taskdebug
284
+ and self._io_r is not None
285
+ ):
259
286
  # Emergency dump of the outputs (even with debug=False) !
260
287
  self._io_r.filedump(self._debugfile)
261
288
 
@@ -269,7 +296,9 @@ class ParallelSilencer:
269
296
  for a_handler in self._removed_h[r_logger]:
270
297
  r_logger.addHandler(a_handler)
271
298
  r_logger.removeHandler(self._stream_h)
272
- for a_logger in [logging.getLogger(x) for x in loggers.roots | loggers.lognames]:
299
+ for a_logger in [
300
+ logging.getLogger(x) for x in loggers.roots | loggers.lognames
301
+ ]:
273
302
  for a_handler in self._removed_h.get(a_logger, ()):
274
303
  a_logger.addHandler(a_handler)
275
304
  # flush
@@ -289,8 +318,9 @@ class ParallelSilencer:
289
318
  """
290
319
  self._stop_recording()
291
320
  self._io_r.seek(0)
292
- return dict(context_record=self._ctx_r,
293
- stdoe_record=self._io_r.readlines())
321
+ return dict(
322
+ context_record=self._ctx_r, stdoe_record=self._io_r.readlines()
323
+ )
294
324
 
295
325
 
296
326
  class ParallelResultParser:
@@ -319,18 +349,20 @@ class ParallelResultParser:
319
349
  raise res
320
350
  else:
321
351
  sys.stdout.flush()
322
- logger.info('Parallel processing results for %s', res['name'])
352
+ logger.info("Parallel processing results for %s", res["name"])
323
353
  # Update the context
324
- logger.info('... Updating the current context ...')
325
- res['report']['context_record'].replay_in(self.context)
354
+ logger.info("... Updating the current context ...")
355
+ res["report"]["context_record"].replay_in(self.context)
326
356
  # Display the stdout
327
- if res['report']['stdoe_record']:
328
- logger.info('... Dump of the mixed standard/error output generated by the subprocess ...')
329
- for l in res['report']['stdoe_record']:
357
+ if res["report"]["stdoe_record"]:
358
+ logger.info(
359
+ "... Dump of the mixed standard/error output generated by the subprocess ..."
360
+ )
361
+ for l in res["report"]["stdoe_record"]:
330
362
  sys.stdout.write(l)
331
- logger.info("... That's all for all for %s ...", res['name'])
363
+ logger.info("... That's all for all for %s ...", res["name"])
332
364
 
333
- return res['report'].get('rc', True)
365
+ return res["report"].get("rc", True)
334
366
 
335
367
  def __call__(self, res):
336
368
  return self.slurp(res)
@@ -20,7 +20,9 @@ __all__ = []
20
20
  logger = loggers.getLogger(__name__)
21
21
 
22
22
  #: Definition of a named tuple PrestagingPriorityTuple
23
- PrestagingPriorityTuple = namedtuple('PrestagingPriorityTuple', ['urgent', 'normal', 'low'])
23
+ PrestagingPriorityTuple = namedtuple(
24
+ "PrestagingPriorityTuple", ["urgent", "normal", "low"]
25
+ )
24
26
 
25
27
  #: Predefined PrestagingPriorities values for urgent, normal and low
26
28
  prestaging_p = PrestagingPriorityTuple(urgent=99, normal=50, low=0)
@@ -36,23 +38,20 @@ class PrestagingTool(footprints.FootprintBase, Catalog):
36
38
  """Abstract class that deal with pre-staging for a given storage target."""
37
39
 
38
40
  _abstract = True
39
- _collector = ('prestagingtool',)
41
+ _collector = ("prestagingtool",)
40
42
  _footprint = dict(
41
- info = "Abstract class that deal with pre-staging for a given storage target.",
42
- attr = dict(
43
- system = dict(
44
- info = "The current system object",
45
- type = OSExtended
43
+ info="Abstract class that deal with pre-staging for a given storage target.",
44
+ attr=dict(
45
+ system=dict(info="The current system object", type=OSExtended),
46
+ issuerkind=dict(
47
+ info="The kind of store issuing the prestaging request"
46
48
  ),
47
- issuerkind = dict(
48
- info = 'The kind of store issuing the prestaging request'
49
+ priority=dict(
50
+ info="The prestaging request priority",
51
+ type=int,
52
+ values=list(prestaging_p),
49
53
  ),
50
- priority = dict(
51
- info = 'The prestaging request priority',
52
- type = int,
53
- values = list(prestaging_p)
54
- )
55
- )
54
+ ),
56
55
  )
57
56
 
58
57
  def __init__(self, *kargs, **kwargs):
@@ -66,12 +65,14 @@ class PrestagingTool(footprints.FootprintBase, Catalog):
66
65
 
67
66
  def describe(self, fulldump=False):
68
67
  """Print the object's characteristics and content."""
69
- res = 'PrestagingTool object of class: {!s}\n'.format(self.__class__)
68
+ res = "PrestagingTool object of class: {!s}\n".format(self.__class__)
70
69
  for k, v in self.footprint_as_shallow_dict().items():
71
- res += ' * {:s}: {!s}\n'.format(k, v)
70
+ res += " * {:s}: {!s}\n".format(k, v)
72
71
  if fulldump:
73
- res += '\n * Todo list:\n'
74
- res += '\n'.join([' - {:s}'.format(item) for item in sorted(self.items())])
72
+ res += "\n * Todo list:\n"
73
+ res += "\n".join(
74
+ [" - {:s}".format(item) for item in sorted(self.items())]
75
+ )
75
76
  return res
76
77
 
77
78
  def flush(self, email=None):
@@ -113,24 +114,35 @@ class PrivatePrestagingHub:
113
114
  # Prestaging tool descriptions
114
115
  myptool_desc = self.prestagingtools_default_opts.copy()
115
116
  myptool_desc.update(kwargs)
116
- myptool_desc['priority'] = priority
117
- myptool_desc['system'] = self._sh
117
+ myptool_desc["priority"] = priority
118
+ myptool_desc["system"] = self._sh
118
119
  myptool = None
119
120
  # Scan pre-existing prestaging tools to find a suitable one
120
121
  for ptool in self._prestagingtools:
121
- if ptool.footprint_reusable() and ptool.footprint_compatible(myptool_desc):
122
- logger.debug("Re-usable prestaging tool found: %s", lightdump(myptool_desc))
122
+ if ptool.footprint_reusable() and ptool.footprint_compatible(
123
+ myptool_desc
124
+ ):
125
+ logger.debug(
126
+ "Re-usable prestaging tool found: %s",
127
+ lightdump(myptool_desc),
128
+ )
123
129
  myptool = ptool
124
130
  break
125
131
  # If necessary, create a new one
126
132
  if myptool is None:
127
133
  myptool = fpx.prestagingtool(_emptywarning=False, **myptool_desc)
128
134
  if myptool is not None:
129
- logger.debug("Fresh prestaging tool created: %s", lightdump(myptool_desc))
135
+ logger.debug(
136
+ "Fresh prestaging tool created: %s",
137
+ lightdump(myptool_desc),
138
+ )
130
139
  self._prestagingtools.add(myptool)
131
140
  # Let's role
132
141
  if myptool is None:
133
- logger.debug("Unable to perform prestaging with: %s", lightdump(myptool_desc))
142
+ logger.debug(
143
+ "Unable to perform prestaging with: %s",
144
+ lightdump(myptool_desc),
145
+ )
134
146
  else:
135
147
  logger.debug("Prestaging requested accepted for: %s", location)
136
148
  myptool.add(location)
@@ -143,13 +155,21 @@ class PrivatePrestagingHub:
143
155
  return todo
144
156
 
145
157
  def __repr__(self, *args, **kwargs):
146
- return ('{:s} | n_prestagingtools={:d}>'
147
- .format(super().__repr__().rstrip('>'),
148
- len(self._prestagingtools)))
158
+ return "{:s} | n_prestagingtools={:d}>".format(
159
+ super().__repr__().rstrip(">"), len(self._prestagingtools)
160
+ )
149
161
 
150
162
  def __str__(self):
151
- return (repr(self) + "\n\n" +
152
- "\n\n".join([ptool.describe(fulldump=True) for ptool in self._prestagingtools]))
163
+ return (
164
+ repr(self)
165
+ + "\n\n"
166
+ + "\n\n".join(
167
+ [
168
+ ptool.describe(fulldump=True)
169
+ for ptool in self._prestagingtools
170
+ ]
171
+ )
172
+ )
153
173
 
154
174
  def flush(self, priority_threshold=prestaging_p.low):
155
175
  """Actually send the pre-staging request to the appropriate location.
@@ -163,7 +183,10 @@ class PrivatePrestagingHub:
163
183
  if rc:
164
184
  self._prestagingtools.discard(ptool)
165
185
  else:
166
- logger.error("Something went wrong when flushing the %s prestaging tool", ptool)
186
+ logger.error(
187
+ "Something went wrong when flushing the %s prestaging tool",
188
+ ptool,
189
+ )
167
190
 
168
191
  def clear(self, priority_threshold=prestaging_p.low):
169
192
  """Erase the pre-staging requests list.
@@ -183,4 +206,5 @@ class PrestagingHub(PrivatePrestagingHub, getbytag.GetByTag):
183
206
  Therefore, a *tag* attribute needs to be specified when building/retrieving
184
207
  an object of this class.
185
208
  """
209
+
186
210
  pass