vortex-nwp 2.0.0b1__py3-none-any.whl → 2.0.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. vortex/__init__.py +59 -45
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +940 -614
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/serversynctools.py +34 -33
  6. vortex/config.py +19 -22
  7. vortex/data/__init__.py +9 -3
  8. vortex/data/abstractstores.py +593 -655
  9. vortex/data/containers.py +217 -162
  10. vortex/data/contents.py +65 -39
  11. vortex/data/executables.py +93 -102
  12. vortex/data/flow.py +40 -34
  13. vortex/data/geometries.py +228 -132
  14. vortex/data/handlers.py +428 -225
  15. vortex/data/outflow.py +15 -15
  16. vortex/data/providers.py +185 -163
  17. vortex/data/resources.py +48 -42
  18. vortex/data/stores.py +544 -413
  19. vortex/gloves.py +114 -87
  20. vortex/layout/__init__.py +1 -8
  21. vortex/layout/contexts.py +150 -84
  22. vortex/layout/dataflow.py +353 -202
  23. vortex/layout/monitor.py +264 -128
  24. vortex/nwp/__init__.py +5 -2
  25. vortex/nwp/algo/__init__.py +14 -5
  26. vortex/nwp/algo/assim.py +205 -151
  27. vortex/nwp/algo/clim.py +683 -517
  28. vortex/nwp/algo/coupling.py +447 -225
  29. vortex/nwp/algo/eda.py +437 -229
  30. vortex/nwp/algo/eps.py +403 -231
  31. vortex/nwp/algo/forecasts.py +420 -271
  32. vortex/nwp/algo/fpserver.py +683 -307
  33. vortex/nwp/algo/ifsnaming.py +205 -145
  34. vortex/nwp/algo/ifsroot.py +210 -122
  35. vortex/nwp/algo/monitoring.py +132 -76
  36. vortex/nwp/algo/mpitools.py +321 -191
  37. vortex/nwp/algo/odbtools.py +617 -353
  38. vortex/nwp/algo/oopsroot.py +449 -273
  39. vortex/nwp/algo/oopstests.py +90 -56
  40. vortex/nwp/algo/request.py +287 -206
  41. vortex/nwp/algo/stdpost.py +878 -522
  42. vortex/nwp/data/__init__.py +22 -4
  43. vortex/nwp/data/assim.py +125 -137
  44. vortex/nwp/data/boundaries.py +121 -68
  45. vortex/nwp/data/climfiles.py +193 -211
  46. vortex/nwp/data/configfiles.py +73 -69
  47. vortex/nwp/data/consts.py +426 -401
  48. vortex/nwp/data/ctpini.py +59 -43
  49. vortex/nwp/data/diagnostics.py +94 -66
  50. vortex/nwp/data/eda.py +50 -51
  51. vortex/nwp/data/eps.py +195 -146
  52. vortex/nwp/data/executables.py +440 -434
  53. vortex/nwp/data/fields.py +63 -48
  54. vortex/nwp/data/gridfiles.py +183 -111
  55. vortex/nwp/data/logs.py +250 -217
  56. vortex/nwp/data/modelstates.py +180 -151
  57. vortex/nwp/data/monitoring.py +72 -99
  58. vortex/nwp/data/namelists.py +254 -202
  59. vortex/nwp/data/obs.py +400 -308
  60. vortex/nwp/data/oopsexec.py +22 -20
  61. vortex/nwp/data/providers.py +90 -65
  62. vortex/nwp/data/query.py +71 -82
  63. vortex/nwp/data/stores.py +49 -36
  64. vortex/nwp/data/surfex.py +136 -137
  65. vortex/nwp/syntax/__init__.py +1 -1
  66. vortex/nwp/syntax/stdattrs.py +173 -111
  67. vortex/nwp/tools/__init__.py +2 -2
  68. vortex/nwp/tools/addons.py +22 -17
  69. vortex/nwp/tools/agt.py +24 -12
  70. vortex/nwp/tools/bdap.py +16 -5
  71. vortex/nwp/tools/bdcp.py +4 -1
  72. vortex/nwp/tools/bdm.py +3 -0
  73. vortex/nwp/tools/bdmp.py +14 -9
  74. vortex/nwp/tools/conftools.py +728 -378
  75. vortex/nwp/tools/drhook.py +12 -8
  76. vortex/nwp/tools/grib.py +65 -39
  77. vortex/nwp/tools/gribdiff.py +22 -17
  78. vortex/nwp/tools/ifstools.py +82 -42
  79. vortex/nwp/tools/igastuff.py +167 -143
  80. vortex/nwp/tools/mars.py +14 -2
  81. vortex/nwp/tools/odb.py +234 -125
  82. vortex/nwp/tools/partitioning.py +61 -37
  83. vortex/nwp/tools/satrad.py +27 -12
  84. vortex/nwp/util/async.py +83 -55
  85. vortex/nwp/util/beacon.py +10 -10
  86. vortex/nwp/util/diffpygram.py +174 -86
  87. vortex/nwp/util/ens.py +144 -63
  88. vortex/nwp/util/hooks.py +30 -19
  89. vortex/nwp/util/taskdeco.py +28 -24
  90. vortex/nwp/util/usepygram.py +278 -172
  91. vortex/nwp/util/usetnt.py +31 -17
  92. vortex/sessions.py +72 -39
  93. vortex/syntax/__init__.py +1 -1
  94. vortex/syntax/stdattrs.py +410 -171
  95. vortex/syntax/stddeco.py +31 -22
  96. vortex/toolbox.py +327 -192
  97. vortex/tools/__init__.py +11 -2
  98. vortex/tools/actions.py +125 -59
  99. vortex/tools/addons.py +111 -92
  100. vortex/tools/arm.py +42 -22
  101. vortex/tools/compression.py +72 -69
  102. vortex/tools/date.py +11 -4
  103. vortex/tools/delayedactions.py +242 -132
  104. vortex/tools/env.py +75 -47
  105. vortex/tools/folder.py +342 -171
  106. vortex/tools/grib.py +311 -149
  107. vortex/tools/lfi.py +423 -216
  108. vortex/tools/listings.py +109 -40
  109. vortex/tools/names.py +218 -156
  110. vortex/tools/net.py +632 -298
  111. vortex/tools/parallelism.py +93 -61
  112. vortex/tools/prestaging.py +55 -31
  113. vortex/tools/schedulers.py +172 -105
  114. vortex/tools/services.py +402 -333
  115. vortex/tools/storage.py +293 -358
  116. vortex/tools/surfex.py +24 -24
  117. vortex/tools/systems.py +1211 -631
  118. vortex/tools/targets.py +156 -100
  119. vortex/util/__init__.py +1 -1
  120. vortex/util/config.py +377 -327
  121. vortex/util/empty.py +2 -2
  122. vortex/util/helpers.py +56 -24
  123. vortex/util/introspection.py +18 -12
  124. vortex/util/iosponge.py +8 -4
  125. vortex/util/roles.py +4 -6
  126. vortex/util/storefunctions.py +39 -13
  127. vortex/util/structs.py +3 -3
  128. vortex/util/worker.py +29 -17
  129. vortex_nwp-2.0.0b2.dist-info/METADATA +66 -0
  130. vortex_nwp-2.0.0b2.dist-info/RECORD +142 -0
  131. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/WHEEL +1 -1
  132. vortex/layout/appconf.py +0 -109
  133. vortex/layout/jobs.py +0 -1276
  134. vortex/layout/nodes.py +0 -1424
  135. vortex/layout/subjobs.py +0 -464
  136. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  137. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  138. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/LICENSE +0 -0
  139. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/top_level.txt +0 -0
vortex/tools/systems.py CHANGED
@@ -67,6 +67,7 @@ from vortex.tools.compression import CompressionPipeline
67
67
  from vortex.tools.env import Environment
68
68
  from vortex.tools.net import AssistedSsh, AutoRetriesFtp, DEFAULT_FTP_PORT
69
69
  from vortex.tools.net import FtpConnectionPool, LinuxNetstats, StdFtp
70
+ import vortex.tools.storage
70
71
 
71
72
  #: No automatic export
72
73
  __all__ = []
@@ -74,13 +75,13 @@ __all__ = []
74
75
  logger = loggers.getLogger(__name__)
75
76
 
76
77
  #: Pre-compiled regex to check a none str value
77
- isnonedef = re.compile(r'\s*none\s*$', re.IGNORECASE)
78
+ isnonedef = re.compile(r"\s*none\s*$", re.IGNORECASE)
78
79
 
79
80
  #: Pre-compiled regex to check a boolean true str value
80
- istruedef = re.compile(r'\s*(on|true|ok)\s*$', re.IGNORECASE)
81
+ istruedef = re.compile(r"\s*(on|true|ok)\s*$", re.IGNORECASE)
81
82
 
82
83
  #: Pre-compiled regex to check a boolean false str value
83
- isfalsedef = re.compile(r'\s*(off|false|ko)\s*$', re.IGNORECASE)
84
+ isfalsedef = re.compile(r"\s*(off|false|ko)\s*$", re.IGNORECASE)
84
85
 
85
86
  #: Global lock to protect temporary locale changes
86
87
  LOCALE_LOCK = threading.Lock()
@@ -97,7 +98,9 @@ _fmtshcmd_docbonus = """
97
98
  # Constant items
98
99
 
99
100
  #: Definition of a named tuple ftpflavour
100
- FtpFlavourTuple = namedtuple('FtpFlavourTuple', ['STD', 'RETRIES', 'CONNECTION_POOLS'])
101
+ FtpFlavourTuple = namedtuple(
102
+ "FtpFlavourTuple", ["STD", "RETRIES", "CONNECTION_POOLS"]
103
+ )
101
104
 
102
105
  #: Predefined FTP_FLAVOUR values IN, OUT and INOUT.
103
106
  FTP_FLAVOUR = FtpFlavourTuple(STD=0, RETRIES=1, CONNECTION_POOLS=2)
@@ -114,10 +117,12 @@ def fmtshcmd(func):
114
117
  """
115
118
 
116
119
  def formatted_method(self, *args, **kw):
117
- fmt = kw.pop('fmt', None)
120
+ fmt = kw.pop("fmt", None)
118
121
  shtarget = self if isinstance(self, System) else self.sh
119
- fmtcall = getattr(shtarget, str(fmt).lower() + '_' + func.__name__, func)
120
- if getattr(fmtcall, 'func_extern', False):
122
+ fmtcall = getattr(
123
+ shtarget, str(fmt).lower() + "_" + func.__name__, func
124
+ )
125
+ if getattr(fmtcall, "func_extern", False):
121
126
  return fmtcall(*args, **kw)
122
127
  else:
123
128
  return fmtcall(self, *args, **kw)
@@ -144,11 +149,13 @@ def _kw2spawn(func):
144
149
 
145
150
  class ExecutionError(RuntimeError):
146
151
  """Go through exception for internal :meth:`OSExtended.spawn` errors."""
152
+
147
153
  pass
148
154
 
149
155
 
150
156
  class CopyTreeError(OSError):
151
157
  """An error raised during the recursive copy of a directory."""
158
+
152
159
  pass
153
160
 
154
161
 
@@ -171,12 +178,12 @@ class CdContext:
171
178
  self.newpath = self.sh.path.expanduser(newpath)
172
179
 
173
180
  def __enter__(self):
174
- if self.newpath not in ('', '.'):
181
+ if self.newpath not in ("", "."):
175
182
  self.oldpath = self.sh.getcwd()
176
183
  self.sh.cd(self.newpath, create=self.create)
177
184
 
178
185
  def __exit__(self, etype, value, traceback): # @UnusedVariable
179
- if self.newpath not in ('', '.'):
186
+ if self.newpath not in ("", "."):
180
187
  self.sh.cd(self.oldpath)
181
188
  if self.clean_onexit:
182
189
  self.sh.rm(self.newpath)
@@ -220,14 +227,14 @@ class PythonSimplifiedVersion:
220
227
  It can be used in a footprint specification.
221
228
  """
222
229
 
223
- _VERSION_RE = re.compile(r'(\d+)\.(\d+)\.(\d+)')
230
+ _VERSION_RE = re.compile(r"(\d+)\.(\d+)\.(\d+)")
224
231
 
225
232
  def __init__(self, versionstr):
226
233
  v_match = self._VERSION_RE.match(versionstr)
227
234
  if v_match:
228
235
  self._version = tuple([int(d) for d in v_match.groups()])
229
236
  else:
230
- raise ValueError('Malformed version string: {}'.format(versionstr))
237
+ raise ValueError("Malformed version string: {}".format(versionstr))
231
238
 
232
239
  @property
233
240
  def version(self):
@@ -250,10 +257,12 @@ class PythonSimplifiedVersion:
250
257
  return self.version > other.version
251
258
 
252
259
  def __str__(self):
253
- return '.'.join([str(d) for d in self.version])
260
+ return ".".join([str(d) for d in self.version])
254
261
 
255
262
  def __repr__(self):
256
- return '<{} | {!s}>'.format(object.__repr__(self).lstrip('<').rstrip('>'), self)
263
+ return "<{} | {!s}>".format(
264
+ object.__repr__(self).lstrip("<").rstrip(">"), self
265
+ )
257
266
 
258
267
  def export_dict(self):
259
268
  """The pure dict/json output is the raw integer"""
@@ -269,50 +278,50 @@ class System(footprints.FootprintBase):
269
278
 
270
279
  _abstract = True
271
280
  _explicit = False
272
- _collector = ('system',)
281
+ _collector = ("system",)
273
282
 
274
283
  _footprint = dict(
275
- info = 'Default system interface',
276
- attr = dict(
277
- hostname = dict(
278
- info = "The computer's network name",
279
- optional = True,
280
- default = platform.node(),
281
- alias = ('nodename',)
284
+ info="Default system interface",
285
+ attr=dict(
286
+ hostname=dict(
287
+ info="The computer's network name",
288
+ optional=True,
289
+ default=platform.node(),
290
+ alias=("nodename",),
282
291
  ),
283
- sysname = dict(
284
- info = "The underlying system/OS name (e.g. Linux, Darwin, ...)",
285
- optional = True,
286
- default = platform.system(),
292
+ sysname=dict(
293
+ info="The underlying system/OS name (e.g. Linux, Darwin, ...)",
294
+ optional=True,
295
+ default=platform.system(),
287
296
  ),
288
- arch = dict(
289
- info = "The underlying machine type (e.g. i386, x86_64, ...)",
290
- optional = True,
291
- default = platform.machine(),
292
- alias = ('machine',)
297
+ arch=dict(
298
+ info="The underlying machine type (e.g. i386, x86_64, ...)",
299
+ optional=True,
300
+ default=platform.machine(),
301
+ alias=("machine",),
293
302
  ),
294
- release = dict(
295
- info = "The underlying system's release, (e.g. 2.2.0, NT, ...)",
296
- optional = True,
297
- default = platform.release()
303
+ release=dict(
304
+ info="The underlying system's release, (e.g. 2.2.0, NT, ...)",
305
+ optional=True,
306
+ default=platform.release(),
298
307
  ),
299
- version = dict(
300
- info = "The underlying system's release version",
301
- optional = True,
302
- default = platform.version()
308
+ version=dict(
309
+ info="The underlying system's release version",
310
+ optional=True,
311
+ default=platform.version(),
303
312
  ),
304
- python = dict(
305
- info = "The Python's version (e.g 2.7.5)",
306
- type = PythonSimplifiedVersion,
307
- optional = True,
308
- default = platform.python_version(),
313
+ python=dict(
314
+ info="The Python's version (e.g 2.7.5)",
315
+ type=PythonSimplifiedVersion,
316
+ optional=True,
317
+ default=platform.python_version(),
309
318
  ),
310
- glove = dict(
311
- info = "The session's Glove object",
312
- optional = True,
313
- type = Glove,
314
- )
315
- )
319
+ glove=dict(
320
+ info="The session's Glove object",
321
+ optional=True,
322
+ type=Glove,
323
+ ),
324
+ ),
316
325
  )
317
326
 
318
327
  def __init__(self, *args, **kw):
@@ -364,25 +373,29 @@ class System(footprints.FootprintBase):
364
373
  user will be able to call ``sh.greatstuff``).
365
374
 
366
375
  """
367
- logger.debug('Abstract System init %s', self.__class__)
368
- self.__dict__['_os'] = kw.pop('os', os)
369
- self.__dict__['_rl'] = kw.pop('rlimit', resource)
370
- self.__dict__['_sh'] = kw.pop('shutil', kw.pop('sh', shutil))
371
- self.__dict__['_search'] = [self.__dict__['_os'], self.__dict__['_sh'], self.__dict__['_rl']]
372
- self.__dict__['_xtrack'] = dict()
373
- self.__dict__['_history'] = History(tag='shell')
374
- self.__dict__['_rclast'] = 0
375
- self.__dict__['prompt'] = str(kw.pop('prompt', ''))
376
- for flag in ('trace', 'timer'):
376
+ logger.debug("Abstract System init %s", self.__class__)
377
+ self.__dict__["_os"] = kw.pop("os", os)
378
+ self.__dict__["_rl"] = kw.pop("rlimit", resource)
379
+ self.__dict__["_sh"] = kw.pop("shutil", kw.pop("sh", shutil))
380
+ self.__dict__["_search"] = [
381
+ self.__dict__["_os"],
382
+ self.__dict__["_sh"],
383
+ self.__dict__["_rl"],
384
+ ]
385
+ self.__dict__["_xtrack"] = dict()
386
+ self.__dict__["_history"] = History(tag="shell")
387
+ self.__dict__["_rclast"] = 0
388
+ self.__dict__["prompt"] = str(kw.pop("prompt", ""))
389
+ for flag in ("trace", "timer"):
377
390
  self.__dict__[flag] = kw.pop(flag, False)
378
- for flag in ('output',):
391
+ for flag in ("output",):
379
392
  self.__dict__[flag] = kw.pop(flag, True)
380
393
  super().__init__(*args, **kw)
381
394
 
382
395
  @property
383
396
  def realkind(self):
384
397
  """The object/class realkind."""
385
- return 'system'
398
+ return "system"
386
399
 
387
400
  @property
388
401
  def history(self):
@@ -407,18 +420,18 @@ class System(footprints.FootprintBase):
407
420
  @property
408
421
  def default_syslog(self):
409
422
  """Address to use in logging.handler.SysLogHandler()."""
410
- return '/dev/log'
423
+ return "/dev/log"
411
424
 
412
425
  def extend(self, obj=None):
413
426
  """Extend the current external attribute resolution to **obj** (module or object)."""
414
427
  if obj is not None:
415
- if hasattr(obj, 'kind'):
428
+ if hasattr(obj, "kind"):
416
429
  for k, v in self._xtrack.items():
417
- if hasattr(v, 'kind'):
430
+ if hasattr(v, "kind"):
418
431
  if hasattr(self, k):
419
432
  delattr(self, k)
420
433
  for addon in self.search:
421
- if hasattr(addon, 'kind') and addon.kind == obj.kind:
434
+ if hasattr(addon, "kind") and addon.kind == obj.kind:
422
435
  self.search.remove(addon)
423
436
  self.search.append(obj)
424
437
  return len(self.search)
@@ -429,7 +442,7 @@ class System(footprints.FootprintBase):
429
442
  (*i.e.* :class:`~vortex.tools.addons.Addon objects previously
430
443
  loaded with the :meth:`extend` method).
431
444
  """
432
- return [addon.kind for addon in self.search if hasattr(addon, 'kind')]
445
+ return [addon.kind for addon in self.search if hasattr(addon, "kind")]
433
446
 
434
447
  def external(self, key):
435
448
  """Return effective module object reference if any, or *None*."""
@@ -446,12 +459,14 @@ class System(footprints.FootprintBase):
446
459
  This is the place where the ``self.search`` list is looked for...
447
460
  """
448
461
  actualattr = None
449
- if key.startswith('_'):
462
+ if key.startswith("_"):
450
463
  # Do not attempt to look for hidden attributes
451
- raise AttributeError('Method or attribute ' + key + ' not found')
464
+ raise AttributeError("Method or attribute " + key + " not found")
452
465
  for shxobj in self.search:
453
466
  if hasattr(shxobj, key):
454
- if isinstance(shxobj, footprints.FootprintBase) and shxobj.footprint_has_attribute(key):
467
+ if isinstance(
468
+ shxobj, footprints.FootprintBase
469
+ ) and shxobj.footprint_has_attribute(key):
455
470
  # Ignore footprint attributes
456
471
  continue
457
472
  if actualattr is None:
@@ -459,17 +474,24 @@ class System(footprints.FootprintBase):
459
474
  self._xtrack[key] = shxobj
460
475
  else:
461
476
  # Do not warn for a restricted list of keys
462
- if key not in ('stat',):
463
- logger.warning('System: duplicate entry while looking for key="%s". ' +
464
- 'First result in %s but also available in %s.',
465
- key, self._xtrack[key], shxobj)
477
+ if key not in ("stat",):
478
+ logger.warning(
479
+ 'System: duplicate entry while looking for key="%s". '
480
+ + "First result in %s but also available in %s.",
481
+ key,
482
+ self._xtrack[key],
483
+ shxobj,
484
+ )
466
485
  if actualattr is None:
467
- raise AttributeError('Method or attribute ' + key + ' not found')
486
+ raise AttributeError("Method or attribute " + key + " not found")
468
487
  if callable(actualattr):
488
+
469
489
  def osproxy(*args, **kw):
470
490
  cmd = [key]
471
491
  cmd.extend(args)
472
- cmd.extend(['{:s}={:s}'.format(x, str(kw[x])) for x in kw.keys()])
492
+ cmd.extend(
493
+ ["{:s}={:s}".format(x, str(kw[x])) for x in kw.keys()]
494
+ )
473
495
  self.stderr(*cmd)
474
496
  return actualattr(*args, **kw)
475
497
 
@@ -484,15 +506,21 @@ class System(footprints.FootprintBase):
484
506
 
485
507
  def stderr(self, *args):
486
508
  """Write a formatted message to standard error (if ``self.trace == True``)."""
487
- count, justnow, = self.history.append(*args)
509
+ (
510
+ count,
511
+ justnow,
512
+ ) = self.history.append(*args)
488
513
  if self.trace:
489
- if self.trace == 'log':
490
- logger.info('[sh:#%d] %s', count, ' '.join([str(x) for x in args]))
514
+ if self.trace == "log":
515
+ logger.info(
516
+ "[sh:#%d] %s", count, " ".join([str(x) for x in args])
517
+ )
491
518
  else:
492
519
  sys.stderr.write(
493
520
  "* [{:s}][{:d}] {:s}\n".format(
494
- justnow.strftime('%Y/%m/%d-%H:%M:%S'), count,
495
- ' '.join([str(x) for x in args])
521
+ justnow.strftime("%Y/%m/%d-%H:%M:%S"),
522
+ count,
523
+ " ".join([str(x) for x in args]),
496
524
  )
497
525
  )
498
526
 
@@ -512,9 +540,9 @@ class System(footprints.FootprintBase):
512
540
 
513
541
  def echo(self, *args):
514
542
  """Joined **args** are echoed."""
515
- print('>>>', ' '.join([str(arg) for arg in args]))
543
+ print(">>>", " ".join([str(arg) for arg in args]))
516
544
 
517
- def title(self, textlist, tchar='=', autolen=96):
545
+ def title(self, textlist, tchar="=", autolen=96):
518
546
  """Formated title output.
519
547
 
520
548
  :param list|str textlist: A list of strings that contains the title's text
@@ -530,12 +558,16 @@ class System(footprints.FootprintBase):
530
558
  print()
531
559
  print(tchar * (nbc + 4))
532
560
  for text in textlist:
533
- print('{0:s} {1:^{size}s} {0:s}'.format(tchar, text.upper(), size=nbc))
561
+ print(
562
+ "{0:s} {1:^{size}s} {0:s}".format(
563
+ tchar, text.upper(), size=nbc
564
+ )
565
+ )
534
566
  print(tchar * (nbc + 4))
535
567
  print()
536
568
  self.flush_stdall()
537
569
 
538
- def subtitle(self, text='', tchar='-', autolen=96):
570
+ def subtitle(self, text="", tchar="-", autolen=96):
539
571
  """Formatted subtitle output.
540
572
 
541
573
  :param str text: The subtitle's text
@@ -549,11 +581,13 @@ class System(footprints.FootprintBase):
549
581
  print()
550
582
  print(tchar * (nbc + 4))
551
583
  if text:
552
- print('# {0:{size}s} #'.format(text, size=nbc))
584
+ print("# {0:{size}s} #".format(text, size=nbc))
553
585
  print(tchar * (nbc + 4))
554
586
  self.flush_stdall()
555
587
 
556
- def header(self, text='', tchar='-', autolen=False, xline=True, prompt=None):
588
+ def header(
589
+ self, text="", tchar="-", autolen=False, xline=True, prompt=None
590
+ ):
557
591
  """Formatted header output.
558
592
 
559
593
  :param str text: The subtitle's text
@@ -572,15 +606,17 @@ class System(footprints.FootprintBase):
572
606
  if not prompt:
573
607
  prompt = self.prompt
574
608
  if prompt:
575
- prompt = str(prompt) + ' '
609
+ prompt = str(prompt) + " "
576
610
  else:
577
- prompt = ''
611
+ prompt = ""
578
612
  print(prompt + str(text))
579
613
  if xline:
580
614
  print(tchar * nbc)
581
615
  self.flush_stdall()
582
616
 
583
- def highlight(self, text='', hchar='----', bchar='#', bline=False, bline0=True):
617
+ def highlight(
618
+ self, text="", hchar="----", bchar="#", bline=False, bline0=True
619
+ ):
584
620
  """Highlight some text.
585
621
 
586
622
  :param str text: The text to be highlighted
@@ -591,8 +627,11 @@ class System(footprints.FootprintBase):
591
627
  """
592
628
  if bline0:
593
629
  print()
594
- print('{0:s} {1:s} {2:s} {1:s} {3:s}'
595
- .format(bchar.rstrip(), hchar, text, bchar.lstrip()))
630
+ print(
631
+ "{0:s} {1:s} {2:s} {1:s} {3:s}".format(
632
+ bchar.rstrip(), hchar, text, bchar.lstrip()
633
+ )
634
+ )
596
635
  if bline:
597
636
  print()
598
637
  self.flush_stdall()
@@ -600,18 +639,18 @@ class System(footprints.FootprintBase):
600
639
  @property
601
640
  def executable(self):
602
641
  """Return the actual ``sys.executable``."""
603
- self.stderr('executable')
642
+ self.stderr("executable")
604
643
  return sys.executable
605
644
 
606
645
  def pythonpath(self, output=None):
607
646
  """Return or print actual ``sys.path``."""
608
647
  if output is None:
609
648
  output = self.output
610
- self.stderr('pythonpath')
649
+ self.stderr("pythonpath")
611
650
  if output:
612
651
  return sys.path[:]
613
652
  else:
614
- self.subtitle('Python PATH')
653
+ self.subtitle("Python PATH")
615
654
  for pypath in sys.path:
616
655
  print(pypath)
617
656
  return True
@@ -625,12 +664,12 @@ class System(footprints.FootprintBase):
625
664
  """Try to determine an identification string for the current script."""
626
665
  # PBS scheduler SLURM scheduler Good-old PID
627
666
  env = self.env
628
- label = env.PBS_JOBID or env.SLURM_JOB_ID or 'localpid'
629
- if label == 'localpid':
667
+ label = env.PBS_JOBID or env.SLURM_JOB_ID or "localpid"
668
+ if label == "localpid":
630
669
  label = str(self.getpid())
631
670
  return label
632
671
 
633
- def vortex_modules(self, only='.'):
672
+ def vortex_modules(self, only="."):
634
673
  """Return a filtered list of modules in the vortex package.
635
674
 
636
675
  :param str only: The regex used to filter the modules list.
@@ -638,22 +677,26 @@ class System(footprints.FootprintBase):
638
677
  if self.glove is not None:
639
678
  g = self.glove
640
679
  mfiles = [
641
- re.sub(r'^' + mroot + r'/', '', x)
642
- for mroot in (g.siteroot + '/src', g.siteroot + '/site')
680
+ re.sub(r"^" + mroot + r"/", "", x)
681
+ for mroot in (g.siteroot + "/src", g.siteroot + "/site")
643
682
  for x in self.ffind(mroot)
644
- if self.path.isfile(self.path.join(self.path.dirname(x), '__init__.py'))
683
+ if self.path.isfile(
684
+ self.path.join(self.path.dirname(x), "__init__.py")
685
+ )
645
686
  ]
646
687
  return [
647
- re.sub(r'(?:/__init__)?\.py$', '', x).replace('/', '.')
688
+ re.sub(r"(?:/__init__)?\.py$", "", x).replace("/", ".")
648
689
  for x in mfiles
649
- if (not x.startswith('.') and
650
- re.search(only, x, re.IGNORECASE) and
651
- x.endswith('.py'))
690
+ if (
691
+ not x.startswith(".")
692
+ and re.search(only, x, re.IGNORECASE)
693
+ and x.endswith(".py")
694
+ )
652
695
  ]
653
696
  else:
654
697
  raise RuntimeError("A glove must be defined")
655
698
 
656
- def vortex_loaded_modules(self, only='.', output=None):
699
+ def vortex_loaded_modules(self, only=".", output=None):
657
700
  """Check loaded modules, producing either a dump or a list of tuple (status, modulename).
658
701
 
659
702
  :param str only: The regex used to filter the modules list.
@@ -666,7 +709,7 @@ class System(footprints.FootprintBase):
666
709
  if not output:
667
710
  for m, s in checklist:
668
711
  print(str(s).ljust(8), m)
669
- print('--')
712
+ print("--")
670
713
  return True
671
714
  else:
672
715
  return checklist
@@ -674,13 +717,17 @@ class System(footprints.FootprintBase):
674
717
  def systems_reload(self):
675
718
  """Load extra systems modules not yet loaded."""
676
719
  extras = list()
677
- for modname in self.vortex_modules(only='systems'):
720
+ for modname in self.vortex_modules(only="systems"):
678
721
  if modname not in sys.modules:
679
722
  try:
680
723
  self.import_module(modname)
681
724
  extras.append(modname)
682
725
  except ValueError as err:
683
- logger.critical('systems_reload: cannot import module %s (%s)', modname, str(err))
726
+ logger.critical(
727
+ "systems_reload: cannot import module %s (%s)",
728
+ modname,
729
+ str(err),
730
+ )
684
731
  return extras
685
732
 
686
733
  # Redefinition of methods of the resource package...
@@ -692,16 +739,16 @@ class System(footprints.FootprintBase):
692
739
  """
693
740
  if type(r_id) is not int:
694
741
  r_id = r_id.upper()
695
- if not r_id.startswith('RLIMIT_'):
696
- r_id = 'RLIMIT_' + r_id
742
+ if not r_id.startswith("RLIMIT_"):
743
+ r_id = "RLIMIT_" + r_id
697
744
  r_id = getattr(self._rl, r_id, None)
698
745
  if r_id is None:
699
- raise ValueError('Invalid resource specified')
746
+ raise ValueError("Invalid resource specified")
700
747
  return r_id
701
748
 
702
749
  def setrlimit(self, r_id, r_limits):
703
750
  """Proxy to :mod:`resource` function of the same name."""
704
- self.stderr('setrlimit', r_id, r_limits)
751
+ self.stderr("setrlimit", r_id, r_limits)
705
752
  try:
706
753
  r_limits = tuple(r_limits)
707
754
  except TypeError:
@@ -710,14 +757,14 @@ class System(footprints.FootprintBase):
710
757
 
711
758
  def getrlimit(self, r_id):
712
759
  """Proxy to :mod:`resource` function of the same name."""
713
- self.stderr('getrlimit', r_id)
760
+ self.stderr("getrlimit", r_id)
714
761
  return self._rl.getrlimit(self.numrlimit(r_id))
715
762
 
716
763
  def getrusage(self, pid=None):
717
764
  """Proxy to :mod:`resource` function of the same name with current process as defaut."""
718
765
  if pid is None:
719
766
  pid = self._rl.RUSAGE_SELF
720
- self.stderr('getrusage', pid)
767
+ self.stderr("getrusage", pid)
721
768
  return self._rl.getrusage(pid)
722
769
 
723
770
  def import_module(self, modname):
@@ -728,12 +775,12 @@ class System(footprints.FootprintBase):
728
775
  def import_function(self, funcname):
729
776
  """Import the function named **funcname** qualified by a proper module name package."""
730
777
  thisfunc = None
731
- if '.' in funcname:
732
- thismod = self.import_module('.'.join(funcname.split('.')[:-1]))
778
+ if "." in funcname:
779
+ thismod = self.import_module(".".join(funcname.split(".")[:-1]))
733
780
  if thismod:
734
- thisfunc = getattr(thismod, funcname.split('.')[-1], None)
781
+ thisfunc = getattr(thismod, funcname.split(".")[-1], None)
735
782
  else:
736
- logger.error('Bad function path name <%s>' % funcname)
783
+ logger.error("Bad function path name <%s>" % funcname)
737
784
  return thisfunc
738
785
 
739
786
 
@@ -744,9 +791,7 @@ class OSExtended(System):
744
791
  """
745
792
 
746
793
  _abstract = True
747
- _footprint = dict(
748
- info = 'Abstract extended base system'
749
- )
794
+ _footprint = dict(info="Abstract extended base system")
750
795
 
751
796
  def __init__(self, *args, **kw):
752
797
  """
@@ -766,15 +811,15 @@ class OSExtended(System):
766
811
  (default: `FTP_FLAVOUR.CONNECTION_POOLS`). See the :meth:`ftp` method
767
812
  for more details.
768
813
  """
769
- logger.debug('Abstract System init %s', self.__class__)
770
- self._rmtreemin = kw.pop('rmtreemin', 3)
771
- self._cmpaftercp = kw.pop('cmpaftercp', True)
814
+ logger.debug("Abstract System init %s", self.__class__)
815
+ self._rmtreemin = kw.pop("rmtreemin", 3)
816
+ self._cmpaftercp = kw.pop("cmpaftercp", True)
772
817
  # Switches for rawft* methods
773
- self._ftraw = kw.pop('ftraw', None)
774
- self.ftputcmd = kw.pop('ftputcmd', None)
775
- self.ftgetcmd = kw.pop('ftgetcmd', None)
818
+ self._ftraw = kw.pop("ftraw", None)
819
+ self.ftputcmd = kw.pop("ftputcmd", None)
820
+ self.ftgetcmd = kw.pop("ftgetcmd", None)
776
821
  # FTP stuff again
777
- self.ftpflavour = kw.pop('ftpflavour', FTP_FLAVOUR.CONNECTION_POOLS)
822
+ self.ftpflavour = kw.pop("ftpflavour", FTP_FLAVOUR.CONNECTION_POOLS)
778
823
  self._current_ftppool = None
779
824
  # Some internal variables used by particular methods
780
825
  self._ftspool_cache = None
@@ -784,10 +829,10 @@ class OSExtended(System):
784
829
  # Go for the superclass' constructor
785
830
  super().__init__(*args, **kw)
786
831
  # Initialise possibly missing objects
787
- self.__dict__['_cpusinfo'] = None
788
- self.__dict__['_numainfo'] = None
789
- self.__dict__['_memoryinfo'] = None
790
- self.__dict__['_netstatsinfo'] = None
832
+ self.__dict__["_cpusinfo"] = None
833
+ self.__dict__["_numainfo"] = None
834
+ self.__dict__["_memoryinfo"] = None
835
+ self.__dict__["_netstatsinfo"] = None
791
836
 
792
837
  # Initialise the signal handler object
793
838
  self._signal_intercept_init()
@@ -819,10 +864,7 @@ class OSExtended(System):
819
864
  * The object returned by this method will be used in subsequent calls
820
865
  to ::attr:`default_target` (this is the concept of frozen target).
821
866
  """
822
- desc = dict(
823
- hostname=self.hostname,
824
- sysname=self.sysname
825
- )
867
+ desc = dict(hostname=self.hostname, sysname=self.sysname)
826
868
  desc.update(kw)
827
869
  self._frozen_target = footprints.proxy.targets.default(**desc)
828
870
  return self._frozen_target
@@ -834,10 +876,18 @@ class OSExtended(System):
834
876
 
835
877
  def fmtspecific_mtd(self, method, fmt):
836
878
  """Check if a format specific implementation is available for a given format."""
837
- return hasattr(self, '{:s}_{:s}'.format(fmt, method))
838
-
839
- def popen(self, args, stdin=None, stdout=None, stderr=None, shell=False,
840
- output=False, bufsize=0): # @UnusedVariable
879
+ return hasattr(self, "{:s}_{:s}".format(fmt, method))
880
+
881
+ def popen(
882
+ self,
883
+ args,
884
+ stdin=None,
885
+ stdout=None,
886
+ stderr=None,
887
+ shell=False,
888
+ output=False,
889
+ bufsize=0,
890
+ ): # @UnusedVariable
841
891
  """Return an open pipe for the **args** command.
842
892
 
843
893
  :param str|list args: The command (+ its command-line arguments) to be
@@ -882,7 +932,14 @@ class OSExtended(System):
882
932
  stdin = subprocess.PIPE
883
933
  if stderr is True:
884
934
  stderr = subprocess.PIPE
885
- return subprocess.Popen(args, bufsize=bufsize, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell)
935
+ return subprocess.Popen(
936
+ args,
937
+ bufsize=bufsize,
938
+ stdin=stdin,
939
+ stdout=stdout,
940
+ stderr=stderr,
941
+ shell=shell,
942
+ )
886
943
 
887
944
  def pclose(self, p, ok=None):
888
945
  """Do its best to nicely shutdown the process started by **p**.
@@ -905,7 +962,7 @@ class OSExtended(System):
905
962
  p.terminate()
906
963
  except OSError as e:
907
964
  if e.errno == 3:
908
- logger.debug('Processus %s alreaded terminated.' % str(p))
965
+ logger.debug("Processus %s alreaded terminated." % str(p))
909
966
  else:
910
967
  raise
911
968
 
@@ -917,9 +974,21 @@ class OSExtended(System):
917
974
  else:
918
975
  return False
919
976
 
920
- def spawn(self, args, ok=None, shell=False, stdin=None, output=None,
921
- outmode='a+b', outsplit=True, silent=False, fatal=True,
922
- taskset=None, taskset_id=0, taskset_bsize=1):
977
+ def spawn(
978
+ self,
979
+ args,
980
+ ok=None,
981
+ shell=False,
982
+ stdin=None,
983
+ output=None,
984
+ outmode="a+b",
985
+ outsplit=True,
986
+ silent=False,
987
+ fatal=True,
988
+ taskset=None,
989
+ taskset_id=0,
990
+ taskset_bsize=1,
991
+ ):
923
992
  """Subprocess call of **args**.
924
993
 
925
994
  :param str|list[str] args: The command (+ its command-line arguments) to be
@@ -986,25 +1055,25 @@ class OSExtended(System):
986
1055
  stdin = subprocess.PIPE
987
1056
  localenv = self._os.environ.copy()
988
1057
  if taskset is not None:
989
- taskset_def = taskset.split('_')
990
- taskset, taskset_cmd, taskset_env = self.cpus_affinity_get(taskset_id,
991
- taskset_bsize,
992
- *taskset_def)
1058
+ taskset_def = taskset.split("_")
1059
+ taskset, taskset_cmd, taskset_env = self.cpus_affinity_get(
1060
+ taskset_id, taskset_bsize, *taskset_def
1061
+ )
993
1062
  if taskset:
994
1063
  localenv.update(taskset_env)
995
1064
  else:
996
1065
  logger.warning("CPU binding is not available on this platform")
997
1066
  if isinstance(args, str):
998
1067
  if taskset:
999
- args = taskset_cmd + ' ' + args
1068
+ args = taskset_cmd + " " + args
1000
1069
  if self.timer:
1001
- args = 'time ' + args
1070
+ args = "time " + args
1002
1071
  self.stderr(args)
1003
1072
  else:
1004
1073
  if taskset:
1005
1074
  args[:0] = taskset_cmd
1006
1075
  if self.timer:
1007
- args[:0] = ['time']
1076
+ args[:0] = ["time"]
1008
1077
  self.stderr(*args)
1009
1078
  if isinstance(output, bool):
1010
1079
  if output:
@@ -1017,36 +1086,47 @@ class OSExtended(System):
1017
1086
  cmdout, cmderr = output, output
1018
1087
  p = None
1019
1088
  try:
1020
- p = subprocess.Popen(args, stdin=stdin, stdout=cmdout, stderr=cmderr,
1021
- shell=shell, env=localenv)
1089
+ p = subprocess.Popen(
1090
+ args,
1091
+ stdin=stdin,
1092
+ stdout=cmdout,
1093
+ stderr=cmderr,
1094
+ shell=shell,
1095
+ env=localenv,
1096
+ )
1022
1097
  p_out, p_err = p.communicate()
1023
1098
  except ValueError as e:
1024
1099
  logger.critical(
1025
- 'Weird arguments to Popen ({!s}, stdout={!s}, stderr={!s}, shell={!s})'.format(
1100
+ "Weird arguments to Popen ({!s}, stdout={!s}, stderr={!s}, shell={!s})".format(
1026
1101
  args, cmdout, cmderr, shell
1027
1102
  )
1028
1103
  )
1029
- logger.critical('Caught exception: %s', e)
1104
+ logger.critical("Caught exception: %s", e)
1030
1105
  if fatal:
1031
1106
  raise
1032
1107
  else:
1033
- logger.warning('Carry on because fatal is off')
1108
+ logger.warning("Carry on because fatal is off")
1034
1109
  except OSError:
1035
- logger.critical('Could not call %s', str(args))
1110
+ logger.critical("Could not call %s", str(args))
1036
1111
  if fatal:
1037
1112
  raise
1038
1113
  else:
1039
- logger.warning('Carry on because fatal is off')
1114
+ logger.warning("Carry on because fatal is off")
1040
1115
  except Exception as perr:
1041
- logger.critical('System returns %s', str(perr))
1116
+ logger.critical("System returns %s", str(perr))
1042
1117
  if fatal:
1043
- raise RuntimeError('System {!s} spawned {!s} got [{!s}]: {!s}'
1044
- .format(self, args, p.returncode, perr))
1118
+ raise RuntimeError(
1119
+ "System {!s} spawned {!s} got [{!s}]: {!s}".format(
1120
+ self, args, p.returncode, perr
1121
+ )
1122
+ )
1045
1123
  else:
1046
- logger.warning('Carry on because fatal is off')
1124
+ logger.warning("Carry on because fatal is off")
1047
1125
  except (SignalInterruptError, KeyboardInterrupt) as perr:
1048
- logger.critical('The python process was killed: %s. Trying to terminate the subprocess.',
1049
- str(perr))
1126
+ logger.critical(
1127
+ "The python process was killed: %s. Trying to terminate the subprocess.",
1128
+ str(perr),
1129
+ )
1050
1130
  if p:
1051
1131
  if shell:
1052
1132
  # Kill the process group: apparently it's the only way when shell=T
@@ -1056,24 +1136,26 @@ class OSExtended(System):
1056
1136
  p.wait()
1057
1137
  raise # Fatal has no effect on that !
1058
1138
  else:
1059
- plocale = locale.getlocale()[1] or 'ascii'
1139
+ plocale = locale.getlocale()[1] or "ascii"
1060
1140
  if p.returncode in ok:
1061
1141
  if isinstance(output, bool) and output:
1062
- rc = p_out.decode(plocale, 'replace')
1142
+ rc = p_out.decode(plocale, "replace")
1063
1143
  if outsplit:
1064
- rc = rc.rstrip('\n').split('\n')
1144
+ rc = rc.rstrip("\n").split("\n")
1065
1145
  p.stdout.close()
1066
1146
  else:
1067
1147
  rc = not bool(p.returncode)
1068
1148
  else:
1069
1149
  if not silent:
1070
- logger.warning('Bad return code [%d] for %s', p.returncode, str(args))
1150
+ logger.warning(
1151
+ "Bad return code [%d] for %s", p.returncode, str(args)
1152
+ )
1071
1153
  if isinstance(output, bool) and output:
1072
- sys.stderr.write(p_err.decode(plocale, 'replace'))
1154
+ sys.stderr.write(p_err.decode(plocale, "replace"))
1073
1155
  if fatal:
1074
1156
  raise ExecutionError()
1075
1157
  else:
1076
- logger.warning('Carry on because fatal is off')
1158
+ logger.warning("Carry on because fatal is off")
1077
1159
  finally:
1078
1160
  self._rclast = p.returncode if p else 1
1079
1161
  if isinstance(output, bool) and p:
@@ -1107,11 +1189,11 @@ class OSExtended(System):
1107
1189
  """Current working directory."""
1108
1190
  if output is None:
1109
1191
  output = self.output
1110
- self.stderr('pwd')
1192
+ self.stderr("pwd")
1111
1193
  try:
1112
1194
  realpwd = self._os.getcwd()
1113
1195
  except OSError as e:
1114
- logger.error('getcwdu failed: %s.', str(e))
1196
+ logger.error("getcwdu failed: %s.", str(e))
1115
1197
  return None
1116
1198
  if output:
1117
1199
  return realpwd
@@ -1122,7 +1204,7 @@ class OSExtended(System):
1122
1204
  def cd(self, pathtogo, create=False):
1123
1205
  """Change the current working directory to **pathtogo**."""
1124
1206
  pathtogo = self.path.expanduser(pathtogo)
1125
- self.stderr('cd', pathtogo, create)
1207
+ self.stderr("cd", pathtogo, create)
1126
1208
  if create:
1127
1209
  self.mkdir(pathtogo)
1128
1210
  self._os.chdir(pathtogo)
@@ -1143,11 +1225,13 @@ class OSExtended(System):
1143
1225
  :param prefix: The temporary directory name will start with that suffix
1144
1226
  :param dir: The temporary directory will be created in that directory
1145
1227
  """
1146
- self.stderr('temporary_dir_context starts', suffix)
1147
- self.stderr('tempfile.TemporaryDirectory', suffix, prefix, dir)
1148
- with tempfile.TemporaryDirectory(suffix=suffix, prefix=prefix, dir=dir) as tmp_dir:
1228
+ self.stderr("temporary_dir_context starts", suffix)
1229
+ self.stderr("tempfile.TemporaryDirectory", suffix, prefix, dir)
1230
+ with tempfile.TemporaryDirectory(
1231
+ suffix=suffix, prefix=prefix, dir=dir
1232
+ ) as tmp_dir:
1149
1233
  yield tmp_dir
1150
- self.stderr('tempfile.TemporaryDirectory cleanup', tmp_dir)
1234
+ self.stderr("tempfile.TemporaryDirectory cleanup", tmp_dir)
1151
1235
 
1152
1236
  @contextlib.contextmanager
1153
1237
  def temporary_dir_cdcontext(self, suffix=None, prefix=None, dir=None):
@@ -1155,23 +1239,27 @@ class OSExtended(System):
1155
1239
 
1156
1240
  For a description of the context's arguments, see :func:`temporary_dir_context`.
1157
1241
  """
1158
- with self.temporary_dir_context(suffix=suffix, prefix=prefix, dir=dir) as tmp_dir:
1242
+ with self.temporary_dir_context(
1243
+ suffix=suffix, prefix=prefix, dir=dir
1244
+ ) as tmp_dir:
1159
1245
  with self.cdcontext(tmp_dir, create=False, clean_onexit=False):
1160
1246
  yield tmp_dir
1161
1247
 
1162
1248
  def ffind(self, *args):
1163
1249
  """Recursive file find. Arguments are starting paths."""
1164
1250
  if not args:
1165
- args = ['*']
1251
+ args = ["*"]
1166
1252
  else:
1167
1253
  args = [self.path.expanduser(x) for x in args]
1168
1254
  files = []
1169
- self.stderr('ffind', *args)
1255
+ self.stderr("ffind", *args)
1170
1256
  for pathtogo in self.glob(*args):
1171
1257
  if self.path.isfile(pathtogo):
1172
1258
  files.append(pathtogo)
1173
1259
  else:
1174
- for root, u_dirs, filenames in self._os.walk(pathtogo): # @UnusedVariable
1260
+ for root, u_dirs, filenames in self._os.walk(
1261
+ pathtogo
1262
+ ): # @UnusedVariable
1175
1263
  files.extend([self.path.join(root, f) for f in filenames])
1176
1264
  return sorted(files)
1177
1265
 
@@ -1184,8 +1272,13 @@ class OSExtended(System):
1184
1272
  if self._os.path.exists(filename):
1185
1273
  is_x = bool(self._os.stat(filename).st_mode & 1)
1186
1274
  if not is_x and force:
1187
- self.chmod(filename,
1188
- self._os.stat(filename).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
1275
+ self.chmod(
1276
+ filename,
1277
+ self._os.stat(filename).st_mode
1278
+ | stat.S_IXUSR
1279
+ | stat.S_IXGRP
1280
+ | stat.S_IXOTH,
1281
+ )
1189
1282
  is_x = True
1190
1283
  return is_x
1191
1284
  else:
@@ -1199,9 +1292,16 @@ class OSExtended(System):
1199
1292
  """
1200
1293
  if self._os.path.exists(filename):
1201
1294
  mode = self._os.stat(filename).st_mode
1202
- is_r = all([bool(mode & i) for i in [stat.S_IRUSR, stat.S_IRGRP, stat.S_IROTH]])
1295
+ is_r = all(
1296
+ [
1297
+ bool(mode & i)
1298
+ for i in [stat.S_IRUSR, stat.S_IRGRP, stat.S_IROTH]
1299
+ ]
1300
+ )
1203
1301
  if not is_r and force:
1204
- self.chmod(filename, mode | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
1302
+ self.chmod(
1303
+ filename, mode | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
1304
+ )
1205
1305
  is_r = True
1206
1306
  return is_r
1207
1307
  else:
@@ -1238,7 +1338,7 @@ class OSExtended(System):
1238
1338
  def readonly(self, inodename):
1239
1339
  """Set permissions of the ``inodename`` object to read-only."""
1240
1340
  inodename = self.path.expanduser(inodename)
1241
- self.stderr('readonly', inodename)
1341
+ self.stderr("readonly", inodename)
1242
1342
  rc = None
1243
1343
  if self._os.path.exists(inodename):
1244
1344
  if self._os.path.isdir(inodename):
@@ -1246,7 +1346,10 @@ class OSExtended(System):
1246
1346
  else:
1247
1347
  st = self.stat(inodename).st_mode
1248
1348
  if st & stat.S_IWUSR or st & stat.S_IWGRP or st & stat.S_IWOTH:
1249
- rc = self.chmod(inodename, st & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH))
1349
+ rc = self.chmod(
1350
+ inodename,
1351
+ st & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH),
1352
+ )
1250
1353
  else:
1251
1354
  rc = True
1252
1355
  return rc
@@ -1266,7 +1369,7 @@ class OSExtended(System):
1266
1369
  def touch(self, filename):
1267
1370
  """Clone of the eponymous unix command."""
1268
1371
  filename = self.path.expanduser(filename)
1269
- self.stderr('touch', filename)
1372
+ self.stderr("touch", filename)
1270
1373
  rc = True
1271
1374
  if self.path.exists(filename):
1272
1375
  # Note: "filename" might as well be a directory...
@@ -1275,7 +1378,7 @@ class OSExtended(System):
1275
1378
  except Exception:
1276
1379
  rc = False
1277
1380
  else:
1278
- fh = open(filename, 'a')
1381
+ fh = open(filename, "a")
1279
1382
  fh.close()
1280
1383
  return rc
1281
1384
 
@@ -1287,13 +1390,13 @@ class OSExtended(System):
1287
1390
  """
1288
1391
  objpath = self.path.expanduser(objpath)
1289
1392
  if self._os.path.exists(objpath):
1290
- self.stderr('remove', objpath)
1393
+ self.stderr("remove", objpath)
1291
1394
  if self._os.path.isdir(objpath):
1292
1395
  self.rmtree(objpath)
1293
1396
  else:
1294
1397
  self.unlink(objpath)
1295
1398
  else:
1296
- self.stderr('clear', objpath)
1399
+ self.stderr("clear", objpath)
1297
1400
  return not self._os.path.exists(objpath)
1298
1401
 
1299
1402
  @fmtshcmd
@@ -1311,35 +1414,48 @@ class OSExtended(System):
1311
1414
  expressions are used).
1312
1415
  """
1313
1416
  if not pscmd:
1314
- pscmd = ['ps']
1417
+ pscmd = ["ps"]
1315
1418
  if opts is None:
1316
1419
  opts = []
1317
1420
  pscmd.extend(self._psopts)
1318
1421
  pscmd.extend(opts)
1319
1422
  self.stderr(*pscmd)
1320
- psall = subprocess.Popen(pscmd, stdout=subprocess.PIPE).communicate()[0].split('\n')
1423
+ psall = (
1424
+ subprocess.Popen(pscmd, stdout=subprocess.PIPE)
1425
+ .communicate()[0]
1426
+ .split("\n")
1427
+ )
1321
1428
  if search:
1322
1429
  psall = filter(lambda x: re.search(search, x), psall)
1323
1430
  return [x.strip() for x in psall]
1324
1431
 
1325
1432
  def sleep(self, nbsecs):
1326
1433
  """Clone of the unix eponymous command."""
1327
- self.stderr('sleep', nbsecs)
1434
+ self.stderr("sleep", nbsecs)
1328
1435
  time.sleep(nbsecs)
1329
1436
 
1330
1437
  def setulimit(self, r_id):
1331
1438
  """Set an unlimited value to the specified resource (**r_id**)."""
1332
- self.stderr('setulimit', r_id)
1439
+ self.stderr("setulimit", r_id)
1333
1440
  u_soft, hard = self.getrlimit(r_id) # @UnusedVariable
1334
1441
  if hard != self._rl.RLIM_INFINITY:
1335
- logger.info('Unable to raise the %s soft limit to "unlimited", ' +
1336
- 'using the hard limit instead (%s).', str(r_id), str(hard))
1442
+ logger.info(
1443
+ 'Unable to raise the %s soft limit to "unlimited", '
1444
+ + "using the hard limit instead (%s).",
1445
+ str(r_id),
1446
+ str(hard),
1447
+ )
1337
1448
  return self.setrlimit(r_id, (hard, hard))
1338
1449
 
1339
1450
  def ulimit(self):
1340
1451
  """Dump the user limits currently defined."""
1341
- for limit in [r for r in dir(self._rl) if r.startswith('RLIMIT_')]:
1342
- print(' ', limit.ljust(16), ':', self._rl.getrlimit(getattr(self._rl, limit)))
1452
+ for limit in [r for r in dir(self._rl) if r.startswith("RLIMIT_")]:
1453
+ print(
1454
+ " ",
1455
+ limit.ljust(16),
1456
+ ":",
1457
+ self._rl.getrlimit(getattr(self._rl, limit)),
1458
+ )
1343
1459
 
1344
1460
  @property
1345
1461
  def cpus_info(self):
@@ -1352,7 +1468,9 @@ class OSExtended(System):
1352
1468
  """
1353
1469
  return self._cpusinfo
1354
1470
 
1355
- def cpus_ids_per_blocks(self, blocksize=1, topology='raw', hexmask=False): # @UnusedVariable
1471
+ def cpus_ids_per_blocks(
1472
+ self, blocksize=1, topology="raw", hexmask=False
1473
+ ): # @UnusedVariable
1356
1474
  """Get the list of CPUs IDs ordered for subsequent binding.
1357
1475
 
1358
1476
  :param int blocksize: The number of thread consumed by one task
@@ -1361,14 +1479,16 @@ class OSExtended(System):
1361
1479
  """
1362
1480
  return []
1363
1481
 
1364
- def cpus_ids_dispenser(self, topology='raw'):
1482
+ def cpus_ids_dispenser(self, topology="raw"):
1365
1483
  """Get a dispenser of CPUs IDs for nicely ordered for subsequent binding.
1366
1484
 
1367
1485
  :param str topology: The task distribution scheme
1368
1486
  """
1369
1487
  return None
1370
1488
 
1371
- def cpus_affinity_get(self, taskid, blocksize=1, method='default', topology='raw'): # @UnusedVariable
1489
+ def cpus_affinity_get(
1490
+ self, taskid, blocksize=1, method="default", topology="raw"
1491
+ ): # @UnusedVariable
1372
1492
  """Get the necessary command/environment to set the CPUs affinity.
1373
1493
 
1374
1494
  :param int taskid: the task number
@@ -1421,7 +1541,9 @@ class OSExtended(System):
1421
1541
  netstat may not be implemented.
1422
1542
  """
1423
1543
  if self.netstatsinfo is None:
1424
- raise NotImplementedError('This function is not implemented on this system.')
1544
+ raise NotImplementedError(
1545
+ "This function is not implemented on this system."
1546
+ )
1425
1547
  return self.netstatsinfo.available_localport()
1426
1548
 
1427
1549
  def check_localport(self, port):
@@ -1431,12 +1553,14 @@ class OSExtended(System):
1431
1553
  netstat may not be implemented.
1432
1554
  """
1433
1555
  if self.netstatsinfo is None:
1434
- raise NotImplementedError('This function is not implemented on this system.')
1556
+ raise NotImplementedError(
1557
+ "This function is not implemented on this system."
1558
+ )
1435
1559
  return self.netstatsinfo.check_localport(port)
1436
1560
 
1437
1561
  def clear(self):
1438
1562
  """Clone of the unix eponymous command."""
1439
- self._os.system('clear')
1563
+ self._os.system("clear")
1440
1564
 
1441
1565
  @property
1442
1566
  def cls(self):
@@ -1444,8 +1568,14 @@ class OSExtended(System):
1444
1568
  self.clear()
1445
1569
  return None
1446
1570
 
1447
- def rawopts(self, cmdline=None, defaults=None,
1448
- isnone=isnonedef, istrue=istruedef, isfalse=isfalsedef):
1571
+ def rawopts(
1572
+ self,
1573
+ cmdline=None,
1574
+ defaults=None,
1575
+ isnone=isnonedef,
1576
+ istrue=istruedef,
1577
+ isfalse=isfalsedef,
1578
+ ):
1449
1579
  """Parse a simple options command line that looks like `` key=value``.
1450
1580
 
1451
1581
  :param str cmdline: The command line to be processed (if *None*, ``sys.argv``
@@ -1461,11 +1591,13 @@ class OSExtended(System):
1461
1591
  try:
1462
1592
  opts.update(defaults)
1463
1593
  except (ValueError, TypeError):
1464
- logger.warning('Could not update options default: %s', defaults)
1594
+ logger.warning(
1595
+ "Could not update options default: %s", defaults
1596
+ )
1465
1597
 
1466
1598
  if cmdline is None:
1467
1599
  cmdline = sys.argv[1:]
1468
- opts.update(dict([x.split('=') for x in cmdline]))
1600
+ opts.update(dict([x.split("=") for x in cmdline]))
1469
1601
  for k, v in opts.items():
1470
1602
  if v not in (None, True, False):
1471
1603
  if istrue.match(v):
@@ -1479,9 +1611,10 @@ class OSExtended(System):
1479
1611
  def is_iofile(self, iocandidate):
1480
1612
  """Check if actual **iocandidate** is a valid filename or io stream."""
1481
1613
  return iocandidate is not None and (
1482
- (isinstance(iocandidate, str) and self.path.exists(iocandidate)) or
1483
- isinstance(iocandidate, io.IOBase) or
1484
- isinstance(iocandidate, io.BytesIO) or isinstance(iocandidate, io.StringIO)
1614
+ (isinstance(iocandidate, str) and self.path.exists(iocandidate))
1615
+ or isinstance(iocandidate, io.IOBase)
1616
+ or isinstance(iocandidate, io.BytesIO)
1617
+ or isinstance(iocandidate, io.StringIO)
1485
1618
  )
1486
1619
 
1487
1620
  @contextlib.contextmanager
@@ -1512,20 +1645,28 @@ class OSExtended(System):
1512
1645
  hostname = self.glove.default_fthost
1513
1646
  if not hostname:
1514
1647
  if fatal:
1515
- raise ValueError('An *hostname* must be provided one way or another')
1648
+ raise ValueError(
1649
+ "An *hostname* must be provided one way or another"
1650
+ )
1516
1651
  return hostname
1517
1652
 
1518
1653
  def fix_ftuser(self, hostname, logname, fatal=True, defaults_to_user=True):
1519
1654
  """Given *hostname*, if *logname* is None, tries to find a default value for it."""
1520
1655
  if logname is None:
1521
1656
  if self.glove is not None:
1522
- logname = self.glove.getftuser(hostname, defaults_to_user=defaults_to_user)
1657
+ logname = self.glove.getftuser(
1658
+ hostname, defaults_to_user=defaults_to_user
1659
+ )
1523
1660
  else:
1524
1661
  if fatal:
1525
- raise ValueError("Either a *logname* or a glove must be set-up")
1662
+ raise ValueError(
1663
+ "Either a *logname* or a glove must be set-up"
1664
+ )
1526
1665
  return logname
1527
1666
 
1528
- def ftp(self, hostname, logname=None, delayed=False, port=DEFAULT_FTP_PORT):
1667
+ def ftp(
1668
+ self, hostname, logname=None, delayed=False, port=DEFAULT_FTP_PORT
1669
+ ):
1529
1670
  """Return an FTP client object.
1530
1671
 
1531
1672
  :param str hostname: the remote host's name for FTP.
@@ -1555,20 +1696,36 @@ class OSExtended(System):
1555
1696
  logname = self.fix_ftuser(hostname, logname)
1556
1697
  if port is None:
1557
1698
  port = DEFAULT_FTP_PORT
1558
- if self.ftpflavour == FTP_FLAVOUR.CONNECTION_POOLS and self._current_ftppool is not None:
1559
- return self._current_ftppool.deal(hostname, logname, port=port, delayed=delayed)
1699
+ if (
1700
+ self.ftpflavour == FTP_FLAVOUR.CONNECTION_POOLS
1701
+ and self._current_ftppool is not None
1702
+ ):
1703
+ return self._current_ftppool.deal(
1704
+ hostname, logname, port=port, delayed=delayed
1705
+ )
1560
1706
  else:
1561
- ftpclass = AutoRetriesFtp if self.ftpflavour != FTP_FLAVOUR.STD else StdFtp
1707
+ ftpclass = (
1708
+ AutoRetriesFtp
1709
+ if self.ftpflavour != FTP_FLAVOUR.STD
1710
+ else StdFtp
1711
+ )
1562
1712
  ftpbox = ftpclass(self, hostname, port=port)
1563
1713
  rc = ftpbox.fastlogin(logname, delayed=delayed)
1564
1714
  if rc:
1565
1715
  return ftpbox
1566
1716
  else:
1567
- logger.warning('Could not login on %s as %s [%s]', hostname, logname, str(rc))
1717
+ logger.warning(
1718
+ "Could not login on %s as %s [%s]",
1719
+ hostname,
1720
+ logname,
1721
+ str(rc),
1722
+ )
1568
1723
  return None
1569
1724
 
1570
1725
  @contextlib.contextmanager
1571
- def ftpcontext(self, hostname, logname=None, delayed=False, port=DEFAULT_FTP_PORT):
1726
+ def ftpcontext(
1727
+ self, hostname, logname=None, delayed=False, port=DEFAULT_FTP_PORT
1728
+ ):
1572
1729
  """Create an FTP object and close it when the context exits.
1573
1730
 
1574
1731
  For a description of the context's arguments, see :func:`ftp`.
@@ -1591,8 +1748,15 @@ class OSExtended(System):
1591
1748
  return remote
1592
1749
 
1593
1750
  @fmtshcmd
1594
- def ftget(self, source, destination, hostname=None, logname=None, port=DEFAULT_FTP_PORT,
1595
- cpipeline=None):
1751
+ def ftget(
1752
+ self,
1753
+ source,
1754
+ destination,
1755
+ hostname=None,
1756
+ logname=None,
1757
+ port=DEFAULT_FTP_PORT,
1758
+ cpipeline=None,
1759
+ ):
1596
1760
  """Proceed to a direct ftp get on the specified target (using Vortex's FTP client).
1597
1761
 
1598
1762
  :param str source: the remote path to get data
@@ -1616,10 +1780,12 @@ class OSExtended(System):
1616
1780
  if cpipeline is None:
1617
1781
  rc = ftp.get(source, destination)
1618
1782
  else:
1619
- with cpipeline.stream2uncompress(destination) as cdestination:
1783
+ with cpipeline.stream2uncompress(
1784
+ destination
1785
+ ) as cdestination:
1620
1786
  rc = ftp.get(source, cdestination)
1621
1787
  except ftplib.all_errors as e:
1622
- logger.warning('An FTP error occured: %s', str(e))
1788
+ logger.warning("An FTP error occured: %s", str(e))
1623
1789
  rc = False
1624
1790
  finally:
1625
1791
  ftp.close()
@@ -1628,8 +1794,16 @@ class OSExtended(System):
1628
1794
  return False
1629
1795
 
1630
1796
  @fmtshcmd
1631
- def ftput(self, source, destination, hostname=None, logname=None, port=DEFAULT_FTP_PORT,
1632
- cpipeline=None, sync=False): # @UnusedVariable
1797
+ def ftput(
1798
+ self,
1799
+ source,
1800
+ destination,
1801
+ hostname=None,
1802
+ logname=None,
1803
+ port=DEFAULT_FTP_PORT,
1804
+ cpipeline=None,
1805
+ sync=False,
1806
+ ): # @UnusedVariable
1633
1807
  """Proceed to a direct ftp put on the specified target (using Vortex's FTP client).
1634
1808
 
1635
1809
  :param source: The source of data (either a path to file or a
@@ -1655,33 +1829,45 @@ class OSExtended(System):
1655
1829
  if cpipeline is None:
1656
1830
  rc = ftp.put(source, destination)
1657
1831
  else:
1658
- with cpipeline.compress2stream(source, iosponge=True) as csource:
1832
+ with cpipeline.compress2stream(
1833
+ source, iosponge=True
1834
+ ) as csource:
1659
1835
  # csource is an IoSponge consequently the size attribute exists
1660
- rc = ftp.put(csource, destination, size=csource.size)
1836
+ rc = ftp.put(
1837
+ csource, destination, size=csource.size
1838
+ )
1661
1839
  except ftplib.all_errors as e:
1662
- logger.warning('An FTP error occured: %s', str(e))
1840
+ logger.warning("An FTP error occured: %s", str(e))
1663
1841
  rc = False
1664
1842
  finally:
1665
1843
  ftp.close()
1666
1844
  else:
1667
- raise OSError('No such file or directory: {!r}'.format(source))
1845
+ raise OSError("No such file or directory: {!r}".format(source))
1668
1846
  return rc
1669
1847
 
1670
1848
  def ftspool_cache(self):
1671
1849
  """Return a cache object for the FtSpool."""
1672
- if self._ftspool_cache is None:
1673
- self._ftspool_cache = footprints.proxy.cache(kind='ftstash')
1850
+ if self._ftspool_cache is not None:
1851
+ return self._ftspool_cache
1852
+ self._ftspool_cache = footprints.proxy.cache(
1853
+ entry=os.path.join(
1854
+ vortex.data.stores.get_cache_location(), "ftspool"
1855
+ ),
1856
+ )
1674
1857
  return self._ftspool_cache
1675
1858
 
1676
1859
  def copy2ftspool(self, source, nest=False, **kwargs):
1677
1860
  """Make a copy of **source** to the FtSpool cache."""
1678
- h = hashlib.new('md5')
1679
- h.update(source.encode(encoding='utf-8'))
1680
- outputname = 'vortex_{:s}_P{:06d}_{:s}'.format(date.now().strftime('%Y%m%d%H%M%S-%f'),
1681
- self.getpid(), h.hexdigest())
1861
+ h = hashlib.new("md5")
1862
+ h.update(source.encode(encoding="utf-8"))
1863
+ outputname = "vortex_{:s}_P{:06d}_{:s}".format(
1864
+ date.now().strftime("%Y%m%d%H%M%S-%f"),
1865
+ self.getpid(),
1866
+ h.hexdigest(),
1867
+ )
1682
1868
  if nest:
1683
1869
  outputname = self.path.join(outputname, self.path.basename(source))
1684
- kwargs['intent'] = 'in' # Force intent=in
1870
+ kwargs["intent"] = "in" # Force intent=in
1685
1871
  if self.ftspool_cache().insert(outputname, source, **kwargs):
1686
1872
  return self.ftspool_cache().fullpath(outputname)
1687
1873
  else:
@@ -1691,42 +1877,69 @@ class OSExtended(System):
1691
1877
  """Given **source** and **destination**, is FtServ usable ?"""
1692
1878
  return isinstance(source, str) and isinstance(destination, str)
1693
1879
 
1694
- def ftserv_put(self, source, destination, hostname=None, logname=None, port=None,
1695
- specialshell=None, sync=False):
1880
+ def ftserv_put(
1881
+ self,
1882
+ source,
1883
+ destination,
1884
+ hostname=None,
1885
+ logname=None,
1886
+ port=None,
1887
+ specialshell=None,
1888
+ sync=False,
1889
+ ):
1696
1890
  """Asynchronous put of a file using FtServ."""
1697
1891
  if self.ftserv_allowed(source, destination):
1698
1892
  if self.path.exists(source):
1699
- ftcmd = self.ftputcmd or 'ftput'
1893
+ ftcmd = self.ftputcmd or "ftput"
1700
1894
  hostname = self.fix_fthostname(hostname, fatal=False)
1701
1895
  logname = self.fix_ftuser(hostname, logname, fatal=False)
1702
1896
  extras = list()
1703
1897
  if not sync:
1704
- extras.extend(['-q', ])
1898
+ extras.extend(
1899
+ [
1900
+ "-q",
1901
+ ]
1902
+ )
1705
1903
  if hostname:
1706
1904
  if port is not None:
1707
- hostname += ':{:s}'.format(port)
1708
- extras.extend(['-h', hostname])
1905
+ hostname += ":{:s}".format(port)
1906
+ extras.extend(["-h", hostname])
1709
1907
  if logname:
1710
- extras.extend(['-u', logname])
1908
+ extras.extend(["-u", logname])
1711
1909
  if specialshell:
1712
- extras.extend(['-s', specialshell])
1910
+ extras.extend(["-s", specialshell])
1713
1911
  # Remove ~/ and ~logname/ from the destinations' path
1714
- actual_dest = re.sub('^~/+', '', destination)
1912
+ actual_dest = re.sub("^~/+", "", destination)
1715
1913
  if logname:
1716
- actual_dest = re.sub('^~{:s}/+'.format(logname), '', actual_dest)
1914
+ actual_dest = re.sub(
1915
+ "^~{:s}/+".format(logname), "", actual_dest
1916
+ )
1717
1917
  try:
1718
- rc = self.spawn([ftcmd,
1719
- '-o', 'mkdir', ] + # Automatically create subdirectories
1720
- extras + [source, actual_dest], output=False)
1918
+ rc = self.spawn(
1919
+ [
1920
+ ftcmd,
1921
+ "-o",
1922
+ "mkdir",
1923
+ ] # Automatically create subdirectories
1924
+ + extras
1925
+ + [source, actual_dest],
1926
+ output=False,
1927
+ )
1721
1928
  except ExecutionError:
1722
1929
  rc = False
1723
1930
  else:
1724
- raise OSError('No such file or directory: {!s}'.format(source))
1931
+ raise OSError("No such file or directory: {!s}".format(source))
1725
1932
  else:
1726
- raise OSError('Source or destination is not a plain file path: {!r}'.format(source))
1933
+ raise OSError(
1934
+ "Source or destination is not a plain file path: {!r}".format(
1935
+ source
1936
+ )
1937
+ )
1727
1938
  return rc
1728
1939
 
1729
- def ftserv_get(self, source, destination, hostname=None, logname=None, port=None):
1940
+ def ftserv_get(
1941
+ self, source, destination, hostname=None, logname=None, port=None
1942
+ ):
1730
1943
  """Get a file using FtServ."""
1731
1944
  if self.ftserv_allowed(source, destination):
1732
1945
  if self.filecocoon(destination):
@@ -1736,67 +1949,103 @@ class OSExtended(System):
1736
1949
  extras = list()
1737
1950
  if hostname:
1738
1951
  if port is not None:
1739
- hostname += ':{:s}'.format(port)
1740
- extras.extend(['-h', hostname])
1952
+ hostname += ":{:s}".format(port)
1953
+ extras.extend(["-h", hostname])
1741
1954
  if logname:
1742
- extras.extend(['-u', logname])
1743
- ftcmd = self.ftgetcmd or 'ftget'
1955
+ extras.extend(["-u", logname])
1956
+ ftcmd = self.ftgetcmd or "ftget"
1744
1957
  try:
1745
- rc = self.spawn([ftcmd, ] + extras + [source, destination], output=False)
1958
+ rc = self.spawn(
1959
+ [
1960
+ ftcmd,
1961
+ ]
1962
+ + extras
1963
+ + [source, destination],
1964
+ output=False,
1965
+ )
1746
1966
  except ExecutionError:
1747
1967
  rc = False
1748
1968
  else:
1749
- raise OSError('Could not cocoon: {!s}'.format(destination))
1969
+ raise OSError("Could not cocoon: {!s}".format(destination))
1750
1970
  else:
1751
- raise OSError('Source or destination is not a plain file path: {!r}'.format(source))
1971
+ raise OSError(
1972
+ "Source or destination is not a plain file path: {!r}".format(
1973
+ source
1974
+ )
1975
+ )
1752
1976
  return rc
1753
1977
 
1754
- def ftserv_batchget(self, source, destination, hostname=None, logname=None, port=None):
1978
+ def ftserv_batchget(
1979
+ self, source, destination, hostname=None, logname=None, port=None
1980
+ ):
1755
1981
  """Get a list of files using FtServ.
1756
1982
 
1757
1983
  :note: **source** and **destination** are list or tuple.
1758
1984
  """
1759
- if all([self.ftserv_allowed(s, d) for s, d in zip(source, destination)]):
1985
+ if all(
1986
+ [self.ftserv_allowed(s, d) for s, d in zip(source, destination)]
1987
+ ):
1760
1988
  for d in destination:
1761
1989
  if not self.filecocoon(d):
1762
- raise OSError('Could not cocoon: {!s}'.format(d))
1990
+ raise OSError("Could not cocoon: {!s}".format(d))
1763
1991
  extras = list()
1764
1992
  hostname = self.fix_fthostname(hostname, fatal=False)
1765
1993
  logname = self.fix_ftuser(hostname, logname, fatal=False)
1766
1994
  if hostname:
1767
1995
  if port is not None:
1768
- hostname += ':{:s}'.format(port)
1769
- extras.extend(['-h', hostname])
1996
+ hostname += ":{:s}".format(port)
1997
+ extras.extend(["-h", hostname])
1770
1998
  if logname:
1771
- extras.extend(['-u', logname])
1772
- ftcmd = self.ftgetcmd or 'ftget'
1773
- plocale = locale.getlocale()[1] or 'ascii'
1774
- with tempfile.TemporaryFile(dir=self.path.dirname(self.path.abspath(destination[0])),
1775
- mode='wb') as tmpio:
1776
- tmpio.writelines(['{:s} {:s}\n'.format(s, d).encode(plocale)
1777
- for s, d in zip(source, destination)])
1999
+ extras.extend(["-u", logname])
2000
+ ftcmd = self.ftgetcmd or "ftget"
2001
+ plocale = locale.getlocale()[1] or "ascii"
2002
+ with tempfile.TemporaryFile(
2003
+ dir=self.path.dirname(self.path.abspath(destination[0])),
2004
+ mode="wb",
2005
+ ) as tmpio:
2006
+ tmpio.writelines(
2007
+ [
2008
+ "{:s} {:s}\n".format(s, d).encode(plocale)
2009
+ for s, d in zip(source, destination)
2010
+ ]
2011
+ )
1778
2012
  tmpio.seek(0)
1779
- with tempfile.TemporaryFile(dir=self.path.dirname(self.path.abspath(destination[0])),
1780
- mode='w+b') as tmpoutput:
2013
+ with tempfile.TemporaryFile(
2014
+ dir=self.path.dirname(self.path.abspath(destination[0])),
2015
+ mode="w+b",
2016
+ ) as tmpoutput:
1781
2017
  try:
1782
- rc = self.spawn([ftcmd, ] + extras, output=tmpoutput, stdin=tmpio)
2018
+ rc = self.spawn(
2019
+ [
2020
+ ftcmd,
2021
+ ]
2022
+ + extras,
2023
+ output=tmpoutput,
2024
+ stdin=tmpio,
2025
+ )
1783
2026
  except ExecutionError:
1784
2027
  rc = False
1785
2028
  # Process output data
1786
2029
  tmpoutput.seek(0)
1787
2030
  ft_outputs = tmpoutput.read()
1788
- ft_outputs = ft_outputs.decode(locale.getlocale()[1] or 'ascii', 'replace')
1789
- logger.info('Here is the ftget command output: \n%s', ft_outputs)
2031
+ ft_outputs = ft_outputs.decode(
2032
+ locale.getlocale()[1] or "ascii", "replace"
2033
+ )
2034
+ logger.info("Here is the ftget command output: \n%s", ft_outputs)
1790
2035
  # Expand the return codes
1791
2036
  if rc:
1792
- x_rc = [True, ] * len(source)
2037
+ x_rc = [
2038
+ True,
2039
+ ] * len(source)
1793
2040
  else:
1794
- ack_re = re.compile(r'.*FT_(OK|ABORT)\s*:\s*GET\s+(.*)$')
2041
+ ack_re = re.compile(r".*FT_(OK|ABORT)\s*:\s*GET\s+(.*)$")
1795
2042
  ack_lines = dict()
1796
- for line in ft_outputs.split('\n'):
2043
+ for line in ft_outputs.split("\n"):
1797
2044
  ack_match = ack_re.match(line)
1798
2045
  if ack_match:
1799
- ack_lines[ack_match.group(2)] = ack_match.group(1) == 'OK'
2046
+ ack_lines[ack_match.group(2)] = (
2047
+ ack_match.group(1) == "OK"
2048
+ )
1800
2049
  x_rc = []
1801
2050
  for a_source in source:
1802
2051
  my_rc = None
@@ -1806,7 +2055,11 @@ class OSExtended(System):
1806
2055
  break
1807
2056
  x_rc.append(my_rc)
1808
2057
  else:
1809
- raise OSError('Source or destination is not a plain file path: {!r}'.format(source))
2058
+ raise OSError(
2059
+ "Source or destination is not a plain file path: {!r}".format(
2060
+ source
2061
+ )
2062
+ )
1810
2063
  return x_rc
1811
2064
 
1812
2065
  def rawftput_worthy(self, source, destination):
@@ -1814,8 +2067,16 @@ class OSExtended(System):
1814
2067
  return self.ftraw and self.ftserv_allowed(source, destination)
1815
2068
 
1816
2069
  @fmtshcmd
1817
- def rawftput(self, source, destination, hostname=None, logname=None, port=None,
1818
- cpipeline=None, sync=False):
2070
+ def rawftput(
2071
+ self,
2072
+ source,
2073
+ destination,
2074
+ hostname=None,
2075
+ logname=None,
2076
+ port=None,
2077
+ cpipeline=None,
2078
+ sync=False,
2079
+ ):
1819
2080
  """Proceed with some external ftput command on the specified target.
1820
2081
 
1821
2082
  :param str source: Path to the source filename
@@ -1829,21 +2090,43 @@ class OSExtended(System):
1829
2090
  """
1830
2091
  if cpipeline is not None:
1831
2092
  if cpipeline.compress2rawftp(source):
1832
- return self.ftserv_put(source, destination, hostname,
1833
- logname=logname, port=port,
1834
- specialshell=cpipeline.compress2rawftp(source),
1835
- sync=sync)
2093
+ return self.ftserv_put(
2094
+ source,
2095
+ destination,
2096
+ hostname,
2097
+ logname=logname,
2098
+ port=port,
2099
+ specialshell=cpipeline.compress2rawftp(source),
2100
+ sync=sync,
2101
+ )
1836
2102
  else:
1837
2103
  if port is None:
1838
2104
  port = DEFAULT_FTP_PORT
1839
- return self.ftput(source, destination, hostname, logname=logname,
1840
- port=port, cpipeline=cpipeline, sync=sync)
2105
+ return self.ftput(
2106
+ source,
2107
+ destination,
2108
+ hostname,
2109
+ logname=logname,
2110
+ port=port,
2111
+ cpipeline=cpipeline,
2112
+ sync=sync,
2113
+ )
1841
2114
  else:
1842
- return self.ftserv_put(source, destination, hostname, logname,
1843
- port=port, sync=sync)
2115
+ return self.ftserv_put(
2116
+ source, destination, hostname, logname, port=port, sync=sync
2117
+ )
1844
2118
 
1845
- def smartftput(self, source, destination, hostname=None, logname=None, port=None,
1846
- cpipeline=None, sync=False, fmt=None):
2119
+ def smartftput(
2120
+ self,
2121
+ source,
2122
+ destination,
2123
+ hostname=None,
2124
+ logname=None,
2125
+ port=None,
2126
+ cpipeline=None,
2127
+ sync=False,
2128
+ fmt=None,
2129
+ ):
1847
2130
  """Select the best alternative between ``ftput`` and ``rawftput``.
1848
2131
 
1849
2132
  :param source: The source of data (either a path to file or a
@@ -1867,21 +2150,48 @@ class OSExtended(System):
1867
2150
  * **destination** is a string (as opposed to a File like object)
1868
2151
  """
1869
2152
  if self.rawftput_worthy(source, destination):
1870
- return self.rawftput(source, destination, hostname=hostname, logname=logname,
1871
- port=port, cpipeline=cpipeline, sync=sync, fmt=fmt)
2153
+ return self.rawftput(
2154
+ source,
2155
+ destination,
2156
+ hostname=hostname,
2157
+ logname=logname,
2158
+ port=port,
2159
+ cpipeline=cpipeline,
2160
+ sync=sync,
2161
+ fmt=fmt,
2162
+ )
1872
2163
  else:
1873
2164
  if port is None:
1874
2165
  port = DEFAULT_FTP_PORT
1875
- return self.ftput(source, destination, hostname=hostname, logname=logname,
1876
- port=port, cpipeline=cpipeline, sync=sync, fmt=fmt)
2166
+ return self.ftput(
2167
+ source,
2168
+ destination,
2169
+ hostname=hostname,
2170
+ logname=logname,
2171
+ port=port,
2172
+ cpipeline=cpipeline,
2173
+ sync=sync,
2174
+ fmt=fmt,
2175
+ )
1877
2176
 
1878
2177
  def rawftget_worthy(self, source, destination, cpipeline=None):
1879
2178
  """Is it allowed to use FtServ given **source** and **destination**."""
1880
- return self.ftraw and cpipeline is None and self.ftserv_allowed(source, destination)
2179
+ return (
2180
+ self.ftraw
2181
+ and cpipeline is None
2182
+ and self.ftserv_allowed(source, destination)
2183
+ )
1881
2184
 
1882
2185
  @fmtshcmd
1883
- def rawftget(self, source, destination, hostname=None, logname=None, port=None,
1884
- cpipeline=None):
2186
+ def rawftget(
2187
+ self,
2188
+ source,
2189
+ destination,
2190
+ hostname=None,
2191
+ logname=None,
2192
+ port=None,
2193
+ cpipeline=None,
2194
+ ):
1885
2195
  """Proceed with some external ftget command on the specified target.
1886
2196
 
1887
2197
  :param str source: the remote path to get data
@@ -1893,11 +2203,20 @@ class OSExtended(System):
1893
2203
  """
1894
2204
  if cpipeline is not None:
1895
2205
  raise OSError("cpipeline is not supported by this method.")
1896
- return self.ftserv_get(source, destination, hostname, logname, port=port)
2206
+ return self.ftserv_get(
2207
+ source, destination, hostname, logname, port=port
2208
+ )
1897
2209
 
1898
2210
  @fmtshcmd
1899
- def batchrawftget(self, source, destination, hostname=None, logname=None,
1900
- port=None, cpipeline=None):
2211
+ def batchrawftget(
2212
+ self,
2213
+ source,
2214
+ destination,
2215
+ hostname=None,
2216
+ logname=None,
2217
+ port=None,
2218
+ cpipeline=None,
2219
+ ):
1901
2220
  """Proceed with some external ftget command on the specified target.
1902
2221
 
1903
2222
  :param source: A list of remote paths to get data
@@ -1909,10 +2228,20 @@ class OSExtended(System):
1909
2228
  """
1910
2229
  if cpipeline is not None:
1911
2230
  raise OSError("cpipeline is not supported by this method.")
1912
- return self.ftserv_batchget(source, destination, hostname, logname, port=port)
2231
+ return self.ftserv_batchget(
2232
+ source, destination, hostname, logname, port=port
2233
+ )
1913
2234
 
1914
- def smartftget(self, source, destination, hostname=None, logname=None, port=None,
1915
- cpipeline=None, fmt=None):
2235
+ def smartftget(
2236
+ self,
2237
+ source,
2238
+ destination,
2239
+ hostname=None,
2240
+ logname=None,
2241
+ port=None,
2242
+ cpipeline=None,
2243
+ fmt=None,
2244
+ ):
1916
2245
  """Select the best alternative between ``ftget`` and ``rawftget``.
1917
2246
 
1918
2247
  :param str source: the remote path to get data
@@ -1937,16 +2266,38 @@ class OSExtended(System):
1937
2266
  """
1938
2267
  if self.rawftget_worthy(source, destination, cpipeline):
1939
2268
  # FtServ is uninteresting when dealing with compression
1940
- return self.rawftget(source, destination, hostname=hostname, logname=logname,
1941
- port=port, cpipeline=cpipeline, fmt=fmt)
2269
+ return self.rawftget(
2270
+ source,
2271
+ destination,
2272
+ hostname=hostname,
2273
+ logname=logname,
2274
+ port=port,
2275
+ cpipeline=cpipeline,
2276
+ fmt=fmt,
2277
+ )
1942
2278
  else:
1943
2279
  if port is None:
1944
2280
  port = DEFAULT_FTP_PORT
1945
- return self.ftget(source, destination, hostname=hostname, logname=logname,
1946
- port=port, cpipeline=cpipeline, fmt=fmt)
2281
+ return self.ftget(
2282
+ source,
2283
+ destination,
2284
+ hostname=hostname,
2285
+ logname=logname,
2286
+ port=port,
2287
+ cpipeline=cpipeline,
2288
+ fmt=fmt,
2289
+ )
1947
2290
 
1948
- def smartbatchftget(self, source, destination, hostname=None, logname=None,
1949
- port=None, cpipeline=None, fmt=None):
2291
+ def smartbatchftget(
2292
+ self,
2293
+ source,
2294
+ destination,
2295
+ hostname=None,
2296
+ logname=None,
2297
+ port=None,
2298
+ cpipeline=None,
2299
+ fmt=None,
2300
+ ):
1950
2301
  """
1951
2302
  Select the best alternative between ``ftget`` and ``batchrawftget``
1952
2303
  when retrieving several files.
@@ -1964,18 +2315,37 @@ class OSExtended(System):
1964
2315
  uncompress the data during the file transfer.
1965
2316
  :param str fmt: The format of data.
1966
2317
  """
1967
- if all([self.rawftget_worthy(s, d, cpipeline) for s, d in zip(source, destination)]):
2318
+ if all(
2319
+ [
2320
+ self.rawftget_worthy(s, d, cpipeline)
2321
+ for s, d in zip(source, destination)
2322
+ ]
2323
+ ):
1968
2324
  # FtServ is uninteresting when dealing with compression
1969
- return self.batchrawftget(source, destination, hostname=hostname, logname=logname,
1970
- port=None, cpipeline=cpipeline, fmt=fmt)
2325
+ return self.batchrawftget(
2326
+ source,
2327
+ destination,
2328
+ hostname=hostname,
2329
+ logname=logname,
2330
+ port=None,
2331
+ cpipeline=cpipeline,
2332
+ fmt=fmt,
2333
+ )
1971
2334
  else:
1972
2335
  rc = True
1973
2336
  if port is None:
1974
2337
  port = DEFAULT_FTP_PORT
1975
2338
  with self.ftppool():
1976
2339
  for s, d in zip(source, destination):
1977
- rc = rc and self.ftget(s, d, hostname=hostname, logname=logname,
1978
- port=port, cpipeline=cpipeline, fmt=fmt)
2340
+ rc = rc and self.ftget(
2341
+ s,
2342
+ d,
2343
+ hostname=hostname,
2344
+ logname=logname,
2345
+ port=port,
2346
+ cpipeline=cpipeline,
2347
+ fmt=fmt,
2348
+ )
1979
2349
  return rc
1980
2350
 
1981
2351
  def ssh(self, hostname, logname=None, *args, **kw):
@@ -1993,7 +2363,9 @@ class OSExtended(System):
1993
2363
  return AssistedSsh(self, hostname, logname, *args, **kw)
1994
2364
 
1995
2365
  @fmtshcmd
1996
- def scpput(self, source, destination, hostname, logname=None, cpipeline=None):
2366
+ def scpput(
2367
+ self, source, destination, hostname, logname=None, cpipeline=None
2368
+ ):
1997
2369
  """Perform an scp to the specified target.
1998
2370
 
1999
2371
  :param source: The source of data (either a path to file or a
@@ -2005,14 +2377,16 @@ class OSExtended(System):
2005
2377
  :param CompressionPipeline cpipeline: If not *None*, the object used to
2006
2378
  compress the data during the file transfer (default: *None*).
2007
2379
  """
2008
- logname = self.fix_ftuser(hostname, logname, fatal=False, defaults_to_user=False)
2009
- msg = '[hostname={!s} logname={!s}]'.format(hostname, logname)
2380
+ logname = self.fix_ftuser(
2381
+ hostname, logname, fatal=False, defaults_to_user=False
2382
+ )
2383
+ msg = "[hostname={!s} logname={!s}]".format(hostname, logname)
2010
2384
  ssh = self.ssh(hostname, logname)
2011
2385
  if isinstance(source, str) and cpipeline is None:
2012
- self.stderr('scpput', source, destination, msg)
2386
+ self.stderr("scpput", source, destination, msg)
2013
2387
  return ssh.scpput(source, destination)
2014
2388
  else:
2015
- self.stderr('scpput_stream', source, destination, msg)
2389
+ self.stderr("scpput_stream", source, destination, msg)
2016
2390
  if cpipeline is None:
2017
2391
  return ssh.scpput_stream(source, destination)
2018
2392
  else:
@@ -2020,7 +2394,9 @@ class OSExtended(System):
2020
2394
  return ssh.scpput_stream(csource, destination)
2021
2395
 
2022
2396
  @fmtshcmd
2023
- def scpget(self, source, destination, hostname, logname=None, cpipeline=None):
2397
+ def scpget(
2398
+ self, source, destination, hostname, logname=None, cpipeline=None
2399
+ ):
2024
2400
  """Perform an scp to get the specified source.
2025
2401
 
2026
2402
  :param str source: the remote path to get data
@@ -2032,14 +2408,16 @@ class OSExtended(System):
2032
2408
  :param CompressionPipeline cpipeline: If not *None*, the object used to
2033
2409
  uncompress the data during the file transfer (default: *None*).
2034
2410
  """
2035
- logname = self.fix_ftuser(hostname, logname, fatal=False, defaults_to_user=False)
2036
- msg = '[hostname={!s} logname={!s}]'.format(hostname, logname)
2411
+ logname = self.fix_ftuser(
2412
+ hostname, logname, fatal=False, defaults_to_user=False
2413
+ )
2414
+ msg = "[hostname={!s} logname={!s}]".format(hostname, logname)
2037
2415
  ssh = self.ssh(hostname, logname)
2038
2416
  if isinstance(destination, str) and cpipeline is None:
2039
- self.stderr('scpget', source, destination, msg)
2417
+ self.stderr("scpget", source, destination, msg)
2040
2418
  return ssh.scpget(source, destination)
2041
2419
  else:
2042
- self.stderr('scpget_stream', source, destination, msg)
2420
+ self.stderr("scpget_stream", source, destination, msg)
2043
2421
  if cpipeline is None:
2044
2422
  return ssh.scpget_stream(source, destination)
2045
2423
  else:
@@ -2048,7 +2426,7 @@ class OSExtended(System):
2048
2426
 
2049
2427
  def softlink(self, source, destination):
2050
2428
  """Set a symbolic link if **source** is not **destination**."""
2051
- self.stderr('softlink', source, destination)
2429
+ self.stderr("softlink", source, destination)
2052
2430
  if source == destination:
2053
2431
  return False
2054
2432
  else:
@@ -2057,7 +2435,7 @@ class OSExtended(System):
2057
2435
  def size(self, filepath):
2058
2436
  """Returns the actual size in bytes of the specified **filepath**."""
2059
2437
  filepath = self.path.expanduser(filepath)
2060
- self.stderr('size', filepath)
2438
+ self.stderr("size", filepath)
2061
2439
  try:
2062
2440
  return self.stat(filepath).st_size
2063
2441
  except Exception:
@@ -2076,7 +2454,9 @@ class OSExtended(System):
2076
2454
  total_size = self.size(objpath)
2077
2455
  for dirpath, dirnames, filenames in self.walk(objpath):
2078
2456
  for f in filenames + dirnames:
2079
- total_size += self.lstat(self.path.join(dirpath, f)).st_size
2457
+ total_size += self.lstat(
2458
+ self.path.join(dirpath, f)
2459
+ ).st_size
2080
2460
  return total_size
2081
2461
  return self.lstat(objpath).st_size
2082
2462
 
@@ -2084,8 +2464,8 @@ class OSExtended(System):
2084
2464
  """Normalises path name of **dirpath** and recursively creates this directory."""
2085
2465
  normdir = self.path.normpath(self.path.expanduser(dirpath))
2086
2466
  if normdir and not self.path.isdir(normdir):
2087
- logger.debug('Cocooning directory %s', normdir)
2088
- self.stderr('mkdir', normdir)
2467
+ logger.debug("Cocooning directory %s", normdir)
2468
+ self.stderr("mkdir", normdir)
2089
2469
  try:
2090
2470
  self.makedirs(normdir)
2091
2471
  return True
@@ -2102,17 +2482,17 @@ class OSExtended(System):
2102
2482
  """Normalises path name of ``destination`` and creates **destination**'s directory."""
2103
2483
  return self.mkdir(self.path.dirname(self.path.expanduser(destination)))
2104
2484
 
2105
- _SAFE_SUFFIX_RE = re.compile('_[a-f0-9]{32}$')
2485
+ _SAFE_SUFFIX_RE = re.compile("_[a-f0-9]{32}$")
2106
2486
 
2107
2487
  def safe_filesuffix(self):
2108
2488
  """Returns a file suffix that should be unique across the system."""
2109
- return '_' + uuid.uuid1().hex
2489
+ return "_" + uuid.uuid1().hex
2110
2490
 
2111
2491
  def safe_fileaddsuffix(self, name):
2112
2492
  """Returns a file path that will look like name + a unique suffix."""
2113
2493
  d_name = self.path.dirname(name)
2114
2494
  b_name = self.path.basename(name)
2115
- b_name = self._SAFE_SUFFIX_RE.sub('', b_name)
2495
+ b_name = self._SAFE_SUFFIX_RE.sub("", b_name)
2116
2496
  return self.path.join(d_name, b_name + self.safe_filesuffix())
2117
2497
 
2118
2498
  def _validate_symlink_below(self, symlink, valid_below):
@@ -2125,20 +2505,26 @@ class OSExtended(System):
2125
2505
  """
2126
2506
  link_to = self._os.readlink(symlink)
2127
2507
  # Is it relative ?
2128
- if re.match('^([^{0:s}]|..{0:s}|.{0:s})'.format(re.escape(os.path.sep)),
2129
- link_to):
2508
+ if re.match(
2509
+ "^([^{0:s}]|..{0:s}|.{0:s})".format(re.escape(os.path.sep)),
2510
+ link_to,
2511
+ ):
2130
2512
  symlink_dir = self.path.realpath(
2131
- self.path.abspath(
2132
- self.path.dirname(symlink)
2133
- )
2513
+ self.path.abspath(self.path.dirname(symlink))
2134
2514
  )
2135
2515
  abspath_to = self.path.normpath(
2136
2516
  self.path.join(symlink_dir, link_to)
2137
2517
  )
2138
2518
  # Valid ?
2139
- valid = self.path.commonprefix([valid_below, abspath_to]) == valid_below
2140
- return (self.path.relpath(abspath_to, start=symlink_dir)
2141
- if valid else None)
2519
+ valid = (
2520
+ self.path.commonprefix([valid_below, abspath_to])
2521
+ == valid_below
2522
+ )
2523
+ return (
2524
+ self.path.relpath(abspath_to, start=symlink_dir)
2525
+ if valid
2526
+ else None
2527
+ )
2142
2528
  else:
2143
2529
  return None
2144
2530
 
@@ -2150,10 +2536,11 @@ class OSExtended(System):
2150
2536
 
2151
2537
  The destination directory must not already exist.
2152
2538
  """
2153
- self.stderr('_copydatatree', src, dst)
2539
+ self.stderr("_copydatatree", src, dst)
2154
2540
  with self.mute_stderr():
2155
- keep_symlinks_below = (keep_symlinks_below or
2156
- self.path.realpath(self.path.abspath(src)))
2541
+ keep_symlinks_below = keep_symlinks_below or self.path.realpath(
2542
+ self.path.abspath(src)
2543
+ )
2157
2544
  names = self._os.listdir(src)
2158
2545
  self._os.makedirs(dst)
2159
2546
  errors = []
@@ -2162,14 +2549,19 @@ class OSExtended(System):
2162
2549
  dstname = self._os.path.join(dst, name)
2163
2550
  try:
2164
2551
  if self.path.isdir(srcname):
2165
- self._copydatatree(srcname, dstname,
2166
- keep_symlinks_below=keep_symlinks_below)
2552
+ self._copydatatree(
2553
+ srcname,
2554
+ dstname,
2555
+ keep_symlinks_below=keep_symlinks_below,
2556
+ )
2167
2557
  elif self._os.path.islink(srcname):
2168
- linkto = self._validate_symlink_below(srcname, keep_symlinks_below)
2558
+ linkto = self._validate_symlink_below(
2559
+ srcname, keep_symlinks_below
2560
+ )
2169
2561
  if linkto is not None:
2170
2562
  self._os.symlink(linkto, dstname)
2171
2563
  else:
2172
- rc = self._sh.copyfile(srcname, dstname)
2564
+ self._sh.copyfile(srcname, dstname)
2173
2565
  else:
2174
2566
  # Will raise a SpecialFileError for unsupported file types
2175
2567
  self._sh.copyfile(srcname, dstname)
@@ -2191,7 +2583,7 @@ class OSExtended(System):
2191
2583
  """
2192
2584
  source = self.path.expanduser(source)
2193
2585
  destination = self.path.expanduser(destination)
2194
- self.stderr('rawcp', source, destination)
2586
+ self.stderr("rawcp", source, destination)
2195
2587
  tmp = self.safe_fileaddsuffix(destination)
2196
2588
  if self.path.isdir(source):
2197
2589
  self._copydatatree(source, tmp)
@@ -2218,29 +2610,35 @@ class OSExtended(System):
2218
2610
  If **destination** is a real-word file name (i.e. not e File-like object),
2219
2611
  the operation is atomic.
2220
2612
  """
2221
- self.stderr('hybridcp', source, destination)
2613
+ self.stderr("hybridcp", source, destination)
2222
2614
  if isinstance(source, str):
2223
2615
  if not self.path.exists(source):
2224
2616
  if not silent:
2225
- logger.error('Missing source %s', source)
2617
+ logger.error("Missing source %s", source)
2226
2618
  return False
2227
- source = open(self.path.expanduser(source), 'rb')
2619
+ source = open(self.path.expanduser(source), "rb")
2228
2620
  xsource = True
2229
2621
  else:
2230
2622
  xsource = False
2231
2623
  try:
2232
2624
  source.seek(0)
2233
2625
  except AttributeError:
2234
- logger.warning('Could not rewind io source before cp: ' + str(source))
2626
+ logger.warning(
2627
+ "Could not rewind io source before cp: " + str(source)
2628
+ )
2235
2629
  if isinstance(destination, str):
2236
2630
  if self.filecocoon(destination):
2237
2631
  # Write to a temp file
2238
2632
  original_dest = self.path.expanduser(destination)
2239
- tmp_dest = self.safe_fileaddsuffix(self.path.expanduser(destination))
2240
- destination = open(tmp_dest, 'wb')
2633
+ tmp_dest = self.safe_fileaddsuffix(
2634
+ self.path.expanduser(destination)
2635
+ )
2636
+ destination = open(tmp_dest, "wb")
2241
2637
  xdestination = True
2242
2638
  else:
2243
- logger.error('Could not create a cocoon for file %s', destination)
2639
+ logger.error(
2640
+ "Could not create a cocoon for file %s", destination
2641
+ )
2244
2642
  return False
2245
2643
  else:
2246
2644
  destination.seek(0)
@@ -2253,8 +2651,13 @@ class OSExtended(System):
2253
2651
  if xdestination:
2254
2652
  destination.close()
2255
2653
  # Move the tmp_file to the real destination
2256
- if not self.move(tmp_dest, original_dest): # Move is atomic for a file
2257
- logger.error('Cannot move the tmp file to the final destination %s', original_dest)
2654
+ if not self.move(
2655
+ tmp_dest, original_dest
2656
+ ): # Move is atomic for a file
2657
+ logger.error(
2658
+ "Cannot move the tmp file to the final destination %s",
2659
+ original_dest,
2660
+ )
2258
2661
  return False
2259
2662
  return rc
2260
2663
 
@@ -2265,7 +2668,7 @@ class OSExtended(System):
2265
2668
  return st1.st_dev == st2.st_dev and not self.path.islink(path1)
2266
2669
 
2267
2670
  def _rawcp_instead_of_hardlink(self, source, destination, securecopy=True):
2268
- self.stderr('rawcp_instead_of_hardlink', source, destination)
2671
+ self.stderr("rawcp_instead_of_hardlink", source, destination)
2269
2672
  if securecopy:
2270
2673
  rc = self.rawcp(source, destination)
2271
2674
  else:
@@ -2294,19 +2697,29 @@ class OSExtended(System):
2294
2697
  except OSError as e:
2295
2698
  if e.errno == errno.EMLINK:
2296
2699
  # Too many links
2297
- logger.warning('Too many links for the source file (%s).', source)
2700
+ logger.warning(
2701
+ "Too many links for the source file (%s).", source
2702
+ )
2298
2703
  if self.usr_file(source):
2299
- rc = self._rawcp_instead_of_hardlink(source, destination, securecopy=securecopy)
2704
+ rc = self._rawcp_instead_of_hardlink(
2705
+ source, destination, securecopy=securecopy
2706
+ )
2300
2707
  if rc:
2301
2708
  try:
2302
- logger.warning('Replacing the orignal file with a copy...')
2709
+ logger.warning(
2710
+ "Replacing the orignal file with a copy..."
2711
+ )
2303
2712
  self.move(destination, source)
2304
2713
  except OSError as ebis:
2305
2714
  if ebis.errno == errno.EACCES:
2306
2715
  # Permission denied
2307
- logger.warning('No permissions to create a copy of the source file (%s)',
2308
- source)
2309
- logger.warning('Going on with the copy instead of the link...')
2716
+ logger.warning(
2717
+ "No permissions to create a copy of the source file (%s)",
2718
+ source,
2719
+ )
2720
+ logger.warning(
2721
+ "Going on with the copy instead of the link..."
2722
+ )
2310
2723
  else:
2311
2724
  raise
2312
2725
  else:
@@ -2319,9 +2732,15 @@ class OSExtended(System):
2319
2732
  rc = self.path.samefile(source, destination)
2320
2733
  return rc
2321
2734
 
2322
- def hardlink(self, source, destination,
2323
- link_threshold=0, readonly=True, securecopy=True,
2324
- keep_symlinks_below=None):
2735
+ def hardlink(
2736
+ self,
2737
+ source,
2738
+ destination,
2739
+ link_threshold=0,
2740
+ readonly=True,
2741
+ securecopy=True,
2742
+ keep_symlinks_below=None,
2743
+ ):
2325
2744
  """Create hardlinks for both single files or directories.
2326
2745
 
2327
2746
  :param int link_threshold: if the source file size is smaller than
@@ -2337,10 +2756,17 @@ class OSExtended(System):
2337
2756
  directory (if omitted, **source** is used)
2338
2757
  """
2339
2758
  if self.path.isdir(source):
2340
- self.stderr('hardlink', source, destination,
2341
- '#', 'directory,', 'readonly={!s}'.format(readonly))
2342
- keep_symlinks_below = (keep_symlinks_below or
2343
- self.path.realpath(self.path.abspath(source)))
2759
+ self.stderr(
2760
+ "hardlink",
2761
+ source,
2762
+ destination,
2763
+ "#",
2764
+ "directory,",
2765
+ "readonly={!s}".format(readonly),
2766
+ )
2767
+ keep_symlinks_below = keep_symlinks_below or self.path.realpath(
2768
+ self.path.abspath(source)
2769
+ )
2344
2770
  with self.mute_stderr():
2345
2771
  # Mimics 'cp -al'
2346
2772
  names = self._os.listdir(source)
@@ -2350,30 +2776,53 @@ class OSExtended(System):
2350
2776
  srcname = self._os.path.join(source, name)
2351
2777
  dstname = self._os.path.join(destination, name)
2352
2778
  if self._os.path.islink(srcname):
2353
- linkto = self._validate_symlink_below(srcname, keep_symlinks_below)
2779
+ linkto = self._validate_symlink_below(
2780
+ srcname, keep_symlinks_below
2781
+ )
2354
2782
  if linkto is None:
2355
- link_target = self.path.join(self.path.dirname(srcname),
2356
- self._os.readlink(srcname))
2357
- rc = self.hardlink(link_target, dstname,
2358
- link_threshold=link_threshold,
2359
- readonly=readonly, securecopy=securecopy,
2360
- keep_symlinks_below=keep_symlinks_below)
2783
+ link_target = self.path.join(
2784
+ self.path.dirname(srcname),
2785
+ self._os.readlink(srcname),
2786
+ )
2787
+ rc = self.hardlink(
2788
+ link_target,
2789
+ dstname,
2790
+ link_threshold=link_threshold,
2791
+ readonly=readonly,
2792
+ securecopy=securecopy,
2793
+ keep_symlinks_below=keep_symlinks_below,
2794
+ )
2361
2795
  else:
2362
2796
  self._os.symlink(linkto, dstname)
2363
2797
  elif self.path.isdir(srcname):
2364
- rc = self.hardlink(srcname, dstname,
2365
- link_threshold=link_threshold,
2366
- readonly=readonly, securecopy=securecopy,
2367
- keep_symlinks_below=keep_symlinks_below)
2798
+ rc = self.hardlink(
2799
+ srcname,
2800
+ dstname,
2801
+ link_threshold=link_threshold,
2802
+ readonly=readonly,
2803
+ securecopy=securecopy,
2804
+ keep_symlinks_below=keep_symlinks_below,
2805
+ )
2368
2806
  else:
2369
- if link_threshold and self.size(srcname) < link_threshold:
2370
- rc = self._rawcp_instead_of_hardlink(srcname, dstname, securecopy=securecopy)
2807
+ if (
2808
+ link_threshold
2809
+ and self.size(srcname) < link_threshold
2810
+ ):
2811
+ rc = self._rawcp_instead_of_hardlink(
2812
+ srcname, dstname, securecopy=securecopy
2813
+ )
2371
2814
  else:
2372
- rc = self._safe_hardlink(srcname, dstname, securecopy=securecopy)
2815
+ rc = self._safe_hardlink(
2816
+ srcname, dstname, securecopy=securecopy
2817
+ )
2373
2818
  if readonly and rc:
2374
2819
  self.readonly(dstname)
2375
2820
  if not rc:
2376
- logger.error('Error while processing %s (rc=%s)', srcname, str(rc))
2821
+ logger.error(
2822
+ "Error while processing %s (rc=%s)",
2823
+ srcname,
2824
+ str(rc),
2825
+ )
2377
2826
  break
2378
2827
  if rc:
2379
2828
  self._sh.copystat(source, destination)
@@ -2381,16 +2830,27 @@ class OSExtended(System):
2381
2830
  return rc
2382
2831
  else:
2383
2832
  if link_threshold and self.size(source) < link_threshold:
2384
- rc = self._rawcp_instead_of_hardlink(source, destination, securecopy=securecopy)
2833
+ rc = self._rawcp_instead_of_hardlink(
2834
+ source, destination, securecopy=securecopy
2835
+ )
2385
2836
  else:
2386
- self.stderr('hardlink', source, destination)
2387
- rc = self._safe_hardlink(source, destination, securecopy=securecopy)
2837
+ self.stderr("hardlink", source, destination)
2838
+ rc = self._safe_hardlink(
2839
+ source, destination, securecopy=securecopy
2840
+ )
2388
2841
  if readonly and rc:
2389
2842
  self.readonly(destination)
2390
2843
  return rc
2391
2844
 
2392
- def _smartcp_cross_users_links_fallback(self, source, destination, smartcp_threshold, silent,
2393
- exc, tmp_destination=None):
2845
+ def _smartcp_cross_users_links_fallback(
2846
+ self,
2847
+ source,
2848
+ destination,
2849
+ smartcp_threshold,
2850
+ silent,
2851
+ exc,
2852
+ tmp_destination=None,
2853
+ ):
2394
2854
  """Catch errors related to Kernel configuration."""
2395
2855
  if (exc.errno == errno.EPERM) and not self.usr_file(source):
2396
2856
  # This is expected to fail if the fs.protected_hardlinks
@@ -2400,8 +2860,12 @@ class OSExtended(System):
2400
2860
  logger.info("Force System's allow_cross_users_links to False")
2401
2861
  self.allow_cross_users_links = False
2402
2862
  logger.info("Re-running the smartcp command")
2403
- return self.smartcp(source, destination,
2404
- smartcp_threshold=smartcp_threshold, silent=silent)
2863
+ return self.smartcp(
2864
+ source,
2865
+ destination,
2866
+ smartcp_threshold=smartcp_threshold,
2867
+ silent=silent,
2868
+ )
2405
2869
  else:
2406
2870
  raise
2407
2871
 
@@ -2415,30 +2879,39 @@ class OSExtended(System):
2415
2879
  When working on a file, the operation is atomic. When working on a
2416
2880
  directory some restrictions apply (see :meth:`rawcp`)
2417
2881
  """
2418
- self.stderr('smartcp', source, destination)
2882
+ self.stderr("smartcp", source, destination)
2419
2883
  if not isinstance(source, str) or not isinstance(destination, str):
2420
2884
  return self.hybridcp(source, destination)
2421
2885
  source = self.path.expanduser(source)
2422
2886
  if not self.path.exists(source):
2423
2887
  if not silent:
2424
- logger.error('Missing source %s', source)
2888
+ logger.error("Missing source %s", source)
2425
2889
  return False
2426
2890
  if self.filecocoon(destination):
2427
2891
  destination = self.path.expanduser(destination)
2428
2892
  if self.path.islink(source):
2429
2893
  # Solve the symbolic link: this may avoid a rawcp
2430
2894
  source = self.path.realpath(source)
2431
- if (self.is_samefs(source, destination) and
2432
- (self.allow_cross_users_links or self.usr_file(source))):
2895
+ if self.is_samefs(source, destination) and (
2896
+ self.allow_cross_users_links or self.usr_file(source)
2897
+ ):
2433
2898
  tmp_destination = self.safe_fileaddsuffix(destination)
2434
2899
  if self.path.isdir(source):
2435
2900
  try:
2436
- rc = self.hardlink(source, tmp_destination,
2437
- link_threshold=smartcp_threshold, securecopy=False)
2901
+ rc = self.hardlink(
2902
+ source,
2903
+ tmp_destination,
2904
+ link_threshold=smartcp_threshold,
2905
+ securecopy=False,
2906
+ )
2438
2907
  except OSError as e:
2439
2908
  rc = self._smartcp_cross_users_links_fallback(
2440
- source, destination,
2441
- smartcp_threshold, silent, e, tmp_destination=tmp_destination
2909
+ source,
2910
+ destination,
2911
+ smartcp_threshold,
2912
+ silent,
2913
+ e,
2914
+ tmp_destination=tmp_destination,
2442
2915
  )
2443
2916
  else:
2444
2917
  if rc:
@@ -2446,44 +2919,71 @@ class OSExtended(System):
2446
2919
  with self.secure_directory_move(destination):
2447
2920
  rc = self.move(tmp_destination, destination)
2448
2921
  if not rc:
2449
- logger.error('Cannot move the tmp directory to the final destination %s',
2450
- destination)
2451
- self.remove(tmp_destination) # Anyway, try to clean-up things
2922
+ logger.error(
2923
+ "Cannot move the tmp directory to the final destination %s",
2924
+ destination,
2925
+ )
2926
+ self.remove(
2927
+ tmp_destination
2928
+ ) # Anyway, try to clean-up things
2452
2929
  else:
2453
- logger.error('Cannot copy the data to the tmp directory %s', tmp_destination)
2454
- self.remove(tmp_destination) # Anyway, try to clean-up things
2930
+ logger.error(
2931
+ "Cannot copy the data to the tmp directory %s",
2932
+ tmp_destination,
2933
+ )
2934
+ self.remove(
2935
+ tmp_destination
2936
+ ) # Anyway, try to clean-up things
2455
2937
  return rc
2456
2938
  else:
2457
2939
  try:
2458
- rc = self.hardlink(source, tmp_destination,
2459
- link_threshold=smartcp_threshold, securecopy=False)
2940
+ rc = self.hardlink(
2941
+ source,
2942
+ tmp_destination,
2943
+ link_threshold=smartcp_threshold,
2944
+ securecopy=False,
2945
+ )
2460
2946
  except OSError as e:
2461
- rc = self._smartcp_cross_users_links_fallback(source, destination,
2462
- smartcp_threshold, silent, e)
2947
+ rc = self._smartcp_cross_users_links_fallback(
2948
+ source, destination, smartcp_threshold, silent, e
2949
+ )
2463
2950
  else:
2464
- rc = rc and self.move(tmp_destination, destination) # Move is atomic for a file
2951
+ rc = rc and self.move(
2952
+ tmp_destination, destination
2953
+ ) # Move is atomic for a file
2465
2954
  # On some systems, the temporary file may remain (if the
2466
2955
  # destination's inode is identical to the tmp_destination's
2467
2956
  # inode). The following call to remove will remove leftovers.
2468
2957
  self.remove(tmp_destination)
2469
2958
  return rc
2470
2959
  else:
2471
- rc = self.rawcp(source, destination) # Rawcp is atomic as much as possible
2960
+ rc = self.rawcp(
2961
+ source, destination
2962
+ ) # Rawcp is atomic as much as possible
2472
2963
  if rc:
2473
2964
  if self.path.isdir(destination):
2474
2965
  for copiedfile in self.ffind(destination):
2475
- if not self.path.islink(copiedfile): # This make no sense to chmod symlinks
2966
+ if not self.path.islink(
2967
+ copiedfile
2968
+ ): # This make no sense to chmod symlinks
2476
2969
  self.chmod(copiedfile, 0o444)
2477
2970
  else:
2478
2971
  self.readonly(destination)
2479
2972
  return rc
2480
2973
  else:
2481
- logger.error('Could not create a cocoon for file %s', destination)
2974
+ logger.error("Could not create a cocoon for file %s", destination)
2482
2975
  return False
2483
2976
 
2484
2977
  @fmtshcmd
2485
- def cp(self, source, destination, intent='inout',
2486
- smartcp=True, smartcp_threshold=0, silent=False):
2978
+ def cp(
2979
+ self,
2980
+ source,
2981
+ destination,
2982
+ intent="inout",
2983
+ smartcp=True,
2984
+ smartcp_threshold=0,
2985
+ silent=False,
2986
+ ):
2487
2987
  """Copy the **source** file to a safe **destination**.
2488
2988
 
2489
2989
  :param source: The source of data (either a path to file or a
@@ -2504,27 +3004,31 @@ class OSExtended(System):
2504
3004
 
2505
3005
  The fastest option should be used...
2506
3006
  """
2507
- self.stderr('cp', source, destination)
3007
+ self.stderr("cp", source, destination)
2508
3008
  if not isinstance(source, str) or not isinstance(destination, str):
2509
3009
  return self.hybridcp(source, destination, silent=silent)
2510
3010
  if not self.path.exists(source):
2511
3011
  if not silent:
2512
- logger.error('Missing source %s', source)
3012
+ logger.error("Missing source %s", source)
2513
3013
  return False
2514
- if smartcp and intent == 'in':
2515
- return self.smartcp(source, destination,
2516
- smartcp_threshold=smartcp_threshold, silent=silent)
3014
+ if smartcp and intent == "in":
3015
+ return self.smartcp(
3016
+ source,
3017
+ destination,
3018
+ smartcp_threshold=smartcp_threshold,
3019
+ silent=silent,
3020
+ )
2517
3021
  if self.filecocoon(destination):
2518
3022
  return self.rawcp(source, destination)
2519
3023
  else:
2520
- logger.error('Could not create a cocoon for file %s', destination)
3024
+ logger.error("Could not create a cocoon for file %s", destination)
2521
3025
  return False
2522
3026
 
2523
3027
  def glob(self, *args):
2524
3028
  """Glob file system entries according to ``args``. Returns a list."""
2525
3029
  entries = []
2526
3030
  for entry in args:
2527
- if entry.startswith(':'):
3031
+ if entry.startswith(":"):
2528
3032
  entries.append(entry[1:])
2529
3033
  else:
2530
3034
  entries.extend(glob.glob(self.path.expanduser(entry)))
@@ -2544,15 +3048,23 @@ class OSExtended(System):
2544
3048
  """
2545
3049
  safe = True
2546
3050
  if len(thispath.split(self._os.sep)) < self._rmtreemin + 1:
2547
- logger.warning('Unsafe starting point depth %s (min is %s)', thispath, self._rmtreemin)
3051
+ logger.warning(
3052
+ "Unsafe starting point depth %s (min is %s)",
3053
+ thispath,
3054
+ self._rmtreemin,
3055
+ )
2548
3056
  safe = False
2549
3057
  else:
2550
3058
  for safepack in safedirs:
2551
3059
  (safedir, d) = safepack
2552
3060
  rp = self.path.relpath(thispath, safedir)
2553
- if not rp.startswith('..'):
3061
+ if not rp.startswith(".."):
2554
3062
  if len(rp.split(self._os.sep)) < d:
2555
- logger.warning('Unsafe access to %s relative to %s', thispath, safedir)
3063
+ logger.warning(
3064
+ "Unsafe access to %s relative to %s",
3065
+ thispath,
3066
+ safedir,
3067
+ )
2556
3068
  safe = False
2557
3069
  return safe
2558
3070
 
@@ -2565,43 +3077,47 @@ class OSExtended(System):
2565
3077
  if isinstance(pathlist, str):
2566
3078
  pathlist = [pathlist]
2567
3079
  for pname in pathlist:
2568
- for entry in filter(lambda x: self.safepath(x, safedirs), self.glob(pname)):
3080
+ for entry in filter(
3081
+ lambda x: self.safepath(x, safedirs), self.glob(pname)
3082
+ ):
2569
3083
  ok = self.remove(entry) and ok
2570
3084
  return ok
2571
3085
 
2572
3086
  def _globcmd(self, cmd, args, **kw):
2573
3087
  """Globbing files or directories as arguments before running ``cmd``."""
2574
- cmd.extend([opt for opt in args if opt.startswith('-')])
3088
+ cmd.extend([opt for opt in args if opt.startswith("-")])
2575
3089
  cmdlen = len(cmd)
2576
3090
  cmdargs = False
2577
- globtries = [self.path.expanduser(x) for x in args if not x.startswith('-')]
3091
+ globtries = [
3092
+ self.path.expanduser(x) for x in args if not x.startswith("-")
3093
+ ]
2578
3094
  for pname in globtries:
2579
3095
  cmdargs = True
2580
3096
  cmd.extend(self.glob(pname))
2581
3097
  if cmdargs and len(cmd) == cmdlen:
2582
- logger.warning('Could not find any matching pattern %s', globtries)
3098
+ logger.warning("Could not find any matching pattern %s", globtries)
2583
3099
  return False
2584
3100
  else:
2585
- kw.setdefault('ok', [0])
3101
+ kw.setdefault("ok", [0])
2586
3102
  return self.spawn(cmd, **kw)
2587
3103
 
2588
3104
  @_kw2spawn
2589
3105
  def wc(self, *args, **kw):
2590
3106
  """Word count on globbed files."""
2591
- return self._globcmd(['wc'], args, **kw)
3107
+ return self._globcmd(["wc"], args, **kw)
2592
3108
 
2593
3109
  @_kw2spawn
2594
3110
  def ls(self, *args, **kw):
2595
3111
  """Clone of the eponymous unix command."""
2596
- return self._globcmd(['ls'], args, **kw)
3112
+ return self._globcmd(["ls"], args, **kw)
2597
3113
 
2598
3114
  @_kw2spawn
2599
3115
  def ll(self, *args, **kw):
2600
3116
  """Clone of the eponymous unix alias (ls -l)."""
2601
- kw['output'] = True
2602
- llresult = self._globcmd(['ls', '-l'], args, **kw)
3117
+ kw["output"] = True
3118
+ llresult = self._globcmd(["ls", "-l"], args, **kw)
2603
3119
  if llresult:
2604
- for lline in [x for x in llresult if not x.startswith('total')]:
3120
+ for lline in [x for x in llresult if not x.startswith("total")]:
2605
3121
  print(lline)
2606
3122
  else:
2607
3123
  return False
@@ -2609,25 +3125,25 @@ class OSExtended(System):
2609
3125
  @_kw2spawn
2610
3126
  def dir(self, *args, **kw):
2611
3127
  """Proxy to ``ls('-l')``."""
2612
- return self._globcmd(['ls', '-l'], args, **kw)
3128
+ return self._globcmd(["ls", "-l"], args, **kw)
2613
3129
 
2614
3130
  @_kw2spawn
2615
3131
  def cat(self, *args, **kw):
2616
3132
  """Clone of the eponymous unix command."""
2617
- return self._globcmd(['cat'], args, **kw)
3133
+ return self._globcmd(["cat"], args, **kw)
2618
3134
 
2619
3135
  @fmtshcmd
2620
3136
  @_kw2spawn
2621
3137
  def diff(self, *args, **kw):
2622
3138
  """Clone of the eponymous unix command."""
2623
- kw.setdefault('ok', [0, 1])
2624
- kw.setdefault('output', False)
2625
- return self._globcmd(['cmp'], args, **kw)
3139
+ kw.setdefault("ok", [0, 1])
3140
+ kw.setdefault("output", False)
3141
+ return self._globcmd(["cmp"], args, **kw)
2626
3142
 
2627
3143
  @_kw2spawn
2628
3144
  def rmglob(self, *args, **kw):
2629
3145
  """Wrapper of the shell's ``rm`` command through the :meth:`globcmd` method."""
2630
- return self._globcmd(['rm'], args, **kw)
3146
+ return self._globcmd(["rm"], args, **kw)
2631
3147
 
2632
3148
  @fmtshcmd
2633
3149
  def move(self, source, destination):
@@ -2636,20 +3152,23 @@ class OSExtended(System):
2636
3152
  :param str source: The source object (file, directory, ...)
2637
3153
  :param str destination: The destination object (file, directory, ...)
2638
3154
  """
2639
- self.stderr('move', source, destination)
3155
+ self.stderr("move", source, destination)
2640
3156
  try:
2641
3157
  self._sh.move(source, destination)
2642
3158
  except Exception:
2643
- logger.critical('Could not move <%s> to <%s>', source, destination)
3159
+ logger.critical("Could not move <%s> to <%s>", source, destination)
2644
3160
  raise
2645
3161
  else:
2646
3162
  return True
2647
3163
 
2648
3164
  @contextlib.contextmanager
2649
3165
  def secure_directory_move(self, destination):
2650
- with self.lockdir_context(destination + '.vortex-lockdir', sloppy=True):
2651
- do_cleanup = (isinstance(destination, str) and
2652
- self.path.exists(destination))
3166
+ with self.lockdir_context(
3167
+ destination + ".vortex-lockdir", sloppy=True
3168
+ ):
3169
+ do_cleanup = isinstance(destination, str) and self.path.exists(
3170
+ destination
3171
+ )
2653
3172
  if do_cleanup:
2654
3173
  # Warning: Not an atomic portion of code (sorry)
2655
3174
  tmp_destination = self.safe_fileaddsuffix(destination)
@@ -2668,7 +3187,7 @@ class OSExtended(System):
2668
3187
  :param source: The source object (file, directory, File-like object, ...)
2669
3188
  :param destination: The destination object (file, directory, File-like object, ...)
2670
3189
  """
2671
- self.stderr('mv', source, destination)
3190
+ self.stderr("mv", source, destination)
2672
3191
  if not isinstance(source, str) or not isinstance(destination, str):
2673
3192
  self.hybridcp(source, destination)
2674
3193
  if isinstance(source, str):
@@ -2679,13 +3198,13 @@ class OSExtended(System):
2679
3198
  @_kw2spawn
2680
3199
  def mvglob(self, *args):
2681
3200
  """Wrapper of the shell's ``mv`` command through the :meth:`globcmd` method."""
2682
- return self._globcmd(['mv'], args)
3201
+ return self._globcmd(["mv"], args)
2683
3202
 
2684
3203
  def listdir(self, *args):
2685
3204
  """Proxy to standard :mod:`os` directory listing function."""
2686
3205
  if not args:
2687
- args = ('.',)
2688
- self.stderr('listdir', *args)
3206
+ args = (".",)
3207
+ self.stderr("listdir", *args)
2689
3208
  return self._os.listdir(self.path.expanduser(args[0]))
2690
3209
 
2691
3210
  def pyls(self, *args):
@@ -2694,10 +3213,10 @@ class OSExtended(System):
2694
3213
  :meth:`ls` method except that that shell's ``ls`` command is not actually
2695
3214
  called.
2696
3215
  """
2697
- rl = [x for x in args if not x.startswith('-')]
3216
+ rl = [x for x in args if not x.startswith("-")]
2698
3217
  if not rl:
2699
- rl.append('*')
2700
- self.stderr('pyls', *rl)
3218
+ rl.append("*")
3219
+ self.stderr("pyls", *rl)
2701
3220
  return self.glob(*rl)
2702
3221
 
2703
3222
  def ldirs(self, *args):
@@ -2706,23 +3225,23 @@ class OSExtended(System):
2706
3225
  :meth:`ls` method except that that shell's ``ls`` command is not actually
2707
3226
  called.
2708
3227
  """
2709
- rl = [x for x in args if not x.startswith('-')]
3228
+ rl = [x for x in args if not x.startswith("-")]
2710
3229
  if not rl:
2711
- rl.append('*')
2712
- self.stderr('ldirs', *rl)
3230
+ rl.append("*")
3231
+ self.stderr("ldirs", *rl)
2713
3232
  return [x for x in self.glob(*rl) if self.path.isdir(x)]
2714
3233
 
2715
3234
  @_kw2spawn
2716
3235
  def gzip(self, *args, **kw):
2717
3236
  """Simple gzip compression of a file."""
2718
- cmd = ['gzip', '-vf', args[0]]
3237
+ cmd = ["gzip", "-vf", args[0]]
2719
3238
  cmd.extend(args[1:])
2720
3239
  return self.spawn(cmd, **kw)
2721
3240
 
2722
3241
  @_kw2spawn
2723
3242
  def gunzip(self, *args, **kw):
2724
3243
  """Simple gunzip of a gzip-compressed file."""
2725
- cmd = ['gunzip', args[0]]
3244
+ cmd = ["gunzip", args[0]]
2726
3245
  cmd.extend(args[1:])
2727
3246
  return self.spawn(cmd, **kw)
2728
3247
 
@@ -2734,21 +3253,21 @@ class OSExtended(System):
2734
3253
  """Build a proper string sequence of tar options."""
2735
3254
  zopt = set(opts)
2736
3255
  if verbose:
2737
- zopt.add('v')
3256
+ zopt.add("v")
2738
3257
  else:
2739
- zopt.discard('v')
3258
+ zopt.discard("v")
2740
3259
  if autocompress:
2741
- if tarfile.endswith('gz'):
3260
+ if tarfile.endswith("gz"):
2742
3261
  # includes the conventional "*.tgz"
2743
- zopt.add('z')
3262
+ zopt.add("z")
2744
3263
  else:
2745
- zopt.discard('z')
2746
- if tarfile.endswith('bz') or tarfile.endswith('bz2'):
3264
+ zopt.discard("z")
3265
+ if tarfile.endswith("bz") or tarfile.endswith("bz2"):
2747
3266
  # includes the conventional "*.tbz"
2748
- zopt.add('j')
3267
+ zopt.add("j")
2749
3268
  else:
2750
- zopt.discard('j')
2751
- return ''.join(zopt)
3269
+ zopt.discard("j")
3270
+ return "".join(zopt)
2752
3271
 
2753
3272
  @_kw2spawn
2754
3273
  def tar(self, *args, **kw):
@@ -2756,8 +3275,13 @@ class OSExtended(System):
2756
3275
 
2757
3276
  :example: ``self.tar('destination.tar', 'directory1', 'directory2')``
2758
3277
  """
2759
- opts = self.taropts(args[0], 'cf', kw.pop('verbose', True), kw.pop('autocompress', True))
2760
- cmd = ['tar', opts, args[0]]
3278
+ opts = self.taropts(
3279
+ args[0],
3280
+ "cf",
3281
+ kw.pop("verbose", True),
3282
+ kw.pop("autocompress", True),
3283
+ )
3284
+ cmd = ["tar", opts, args[0]]
2761
3285
  cmd.extend(self.glob(*args[1:]))
2762
3286
  return self.spawn(cmd, **kw)
2763
3287
 
@@ -2768,8 +3292,13 @@ class OSExtended(System):
2768
3292
  :example: ``self.untar('source.tar')``
2769
3293
  :example: ``self.untar('source.tar', 'to_untar1', 'to_untar2')``
2770
3294
  """
2771
- opts = self.taropts(args[0], 'xf', kw.pop('verbose', True), kw.pop('autocompress', True))
2772
- cmd = ['tar', opts, args[0]]
3295
+ opts = self.taropts(
3296
+ args[0],
3297
+ "xf",
3298
+ kw.pop("verbose", True),
3299
+ kw.pop("autocompress", True),
3300
+ )
3301
+ cmd = ["tar", opts, args[0]]
2773
3302
  cmd.extend(args[1:])
2774
3303
  return self.spawn(cmd, **kw)
2775
3304
 
@@ -2784,56 +3313,69 @@ class OSExtended(System):
2784
3313
  This is done in a relatively safe way since it is checked that no existing
2785
3314
  files/directories are overwritten.
2786
3315
  """
2787
- uniquelevel_ignore = kw.pop('uniquelevel_ignore', False)
3316
+ uniquelevel_ignore = kw.pop("uniquelevel_ignore", False)
2788
3317
  fullsource = self.path.realpath(source)
2789
3318
  self.mkdir(destination)
2790
- loctmp = tempfile.mkdtemp(prefix='untar_', dir=destination)
3319
+ loctmp = tempfile.mkdtemp(prefix="untar_", dir=destination)
2791
3320
  with self.cdcontext(loctmp, clean_onexit=True):
2792
- output_setting = kw.pop('output', True)
3321
+ output_setting = kw.pop("output", True)
2793
3322
  output_txt = self.untar(fullsource, output=output_setting, **kw)
2794
3323
  if output_setting and output_txt:
2795
- logger.info('Untar command output:\n%s', '\n'.join(output_txt))
2796
- unpacked = self.glob('*')
2797
- unpacked_prefix = '.'
3324
+ logger.info("Untar command output:\n%s", "\n".join(output_txt))
3325
+ unpacked = self.glob("*")
3326
+ unpacked_prefix = "."
2798
3327
  # If requested, ignore the first level of directory
2799
- if (uniquelevel_ignore and len(unpacked) == 1 and
2800
- self.path.isdir(self.path.join(unpacked[0]))):
3328
+ if (
3329
+ uniquelevel_ignore
3330
+ and len(unpacked) == 1
3331
+ and self.path.isdir(self.path.join(unpacked[0]))
3332
+ ):
2801
3333
  unpacked_prefix = unpacked[0]
2802
- logger.info('Moving contents one level up: %s', unpacked_prefix)
3334
+ logger.info(
3335
+ "Moving contents one level up: %s", unpacked_prefix
3336
+ )
2803
3337
  with self.cdcontext(unpacked_prefix):
2804
- unpacked = self.glob('*')
3338
+ unpacked = self.glob("*")
2805
3339
  for untaritem in unpacked:
2806
- itemtarget = self.path.join('..', self.path.basename(untaritem))
3340
+ itemtarget = self.path.join(
3341
+ "..", self.path.basename(untaritem)
3342
+ )
2807
3343
  if self.path.exists(itemtarget):
2808
- logger.error('Some previous item exists before untar [%s]', untaritem)
3344
+ logger.error(
3345
+ "Some previous item exists before untar [%s]",
3346
+ untaritem,
3347
+ )
2809
3348
  else:
2810
- self.mv(self.path.join(unpacked_prefix, untaritem),
2811
- itemtarget)
3349
+ self.mv(
3350
+ self.path.join(unpacked_prefix, untaritem), itemtarget
3351
+ )
2812
3352
  return unpacked
2813
3353
 
2814
3354
  def is_tarname(self, objname):
2815
3355
  """Check if a ``objname`` is a string with ``.tar`` suffix."""
2816
- return isinstance(objname, str) and (objname.endswith('.tar') or
2817
- objname.endswith('.tar.gz') or
2818
- objname.endswith('.tgz') or
2819
- objname.endswith('.tar.bz2') or
2820
- objname.endswith('.tbz'))
3356
+ return isinstance(objname, str) and (
3357
+ objname.endswith(".tar")
3358
+ or objname.endswith(".tar.gz")
3359
+ or objname.endswith(".tgz")
3360
+ or objname.endswith(".tar.bz2")
3361
+ or objname.endswith(".tbz")
3362
+ )
2821
3363
 
2822
3364
  def tarname_radix(self, objname):
2823
3365
  """Remove any ``.tar`` specific suffix."""
2824
3366
  if not self.is_tarname(objname):
2825
3367
  return objname
2826
3368
  radix = self.path.splitext(objname)[0]
2827
- if radix.endswith('.tar'):
3369
+ if radix.endswith(".tar"):
2828
3370
  radix = radix[:-4]
2829
3371
  return radix
2830
3372
 
2831
3373
  def tarname_splitext(self, objname):
2832
3374
  """Like os.path.splitext, but for tar names (e.g. might return ``.tar.gz``)."""
2833
3375
  if not self.is_tarname(objname):
2834
- return (objname, '')
3376
+ return (objname, "")
2835
3377
  radix = self.tarname_radix(objname)
2836
- ext = objname.replace(radix, '')
3378
+ ext = objname.replace(radix, "")
2837
3379
  return (radix, ext)
2838
3380
 
2839
3381
  @fmtshcmd
@@ -2852,12 +3394,14 @@ class OSExtended(System):
2852
3394
  (either a file descriptor or a filename).
2853
3395
  """
2854
3396
  rc = None
2855
- if hasattr(destination, 'write'):
3397
+ if hasattr(destination, "write"):
2856
3398
  rc = gateway.dump(obj, destination, **opts)
2857
3399
  else:
2858
3400
  if self.filecocoon(destination):
2859
- with open(self.path.expanduser(destination),
2860
- 'w' + ('b' if bytesdump else '')) as fd:
3401
+ with open(
3402
+ self.path.expanduser(destination),
3403
+ "w" + ("b" if bytesdump else ""),
3404
+ ) as fd:
2861
3405
  rc = gateway.dump(obj, fd, **opts)
2862
3406
  return rc
2863
3407
 
@@ -2866,7 +3410,9 @@ class OSExtended(System):
2866
3410
  Dump a pickled representation of specified **obj** in file **destination**,
2867
3411
  (either a file descriptor or a filename).
2868
3412
  """
2869
- return self.blind_dump(pickle, obj, destination, bytesdump=True, **opts)
3413
+ return self.blind_dump(
3414
+ pickle, obj, destination, bytesdump=True, **opts
3415
+ )
2870
3416
 
2871
3417
  def json_dump(self, obj, destination, **opts):
2872
3418
  """
@@ -2880,11 +3426,12 @@ class OSExtended(System):
2880
3426
  Use **gateway** for a blind load the representation stored in file **source**,
2881
3427
  (either a file descriptor or a filename).
2882
3428
  """
2883
- if hasattr(source, 'read'):
3429
+ if hasattr(source, "read"):
2884
3430
  obj = gateway.load(source)
2885
3431
  else:
2886
- with open(self.path.expanduser(source),
2887
- 'r' + ('b' if bytesload else '')) as fd:
3432
+ with open(
3433
+ self.path.expanduser(source), "r" + ("b" if bytesload else "")
3434
+ ) as fd:
2888
3435
  obj = gateway.load(fd)
2889
3436
  return obj
2890
3437
 
@@ -2909,13 +3456,17 @@ class OSExtended(System):
2909
3456
  def utlines(self, *args):
2910
3457
  """Return number of significant code or configuration lines in specified directories."""
2911
3458
  lookfiles = [
2912
- x for x in self.ffind(*args)
2913
- if self.path.splitext[1] in ['.py', '.ini', '.tpl', '.rst']
3459
+ x
3460
+ for x in self.ffind(*args)
3461
+ if self.path.splitext[1] in [".py", ".ini", ".tpl", ".rst"]
2914
3462
  ]
2915
- return len([
2916
- x for x in self.cat(*lookfiles)
2917
- if re.search(r'\S', x) and re.search(r'[^\'\"\)\],\s]', x)
2918
- ])
3463
+ return len(
3464
+ [
3465
+ x
3466
+ for x in self.cat(*lookfiles)
3467
+ if re.search(r"\S", x) and re.search(r"[^\'\"\)\],\s]", x)
3468
+ ]
3469
+ )
2919
3470
 
2920
3471
  def _signal_intercept_init(self):
2921
3472
  """Initialise the signal handler object (but do not activate it)."""
@@ -2935,7 +3486,9 @@ class OSExtended(System):
2935
3486
  """
2936
3487
  self._sighandler.deactivate()
2937
3488
 
2938
- _LDD_REGEX = re.compile(r'^\s*([^\s]+)\s+=>\s*(?:([^\s]+)\s+\(0x.+\)|not found)$')
3489
+ _LDD_REGEX = re.compile(
3490
+ r"^\s*([^\s]+)\s+=>\s*(?:([^\s]+)\s+\(0x.+\)|not found)$"
3491
+ )
2939
3492
 
2940
3493
  def ldd(self, filename):
2941
3494
  """Call ldd on **filename**.
@@ -2943,14 +3496,14 @@ class OSExtended(System):
2943
3496
  Return the mapping between the library name and its physical path.
2944
3497
  """
2945
3498
  if self.path.isfile(filename):
2946
- ldd_out = self.spawn(('ldd', filename))
3499
+ ldd_out = self.spawn(("ldd", filename))
2947
3500
  libs = dict()
2948
3501
  for ldd_match in [self._LDD_REGEX.match(l) for l in ldd_out]:
2949
3502
  if ldd_match is not None:
2950
3503
  libs[ldd_match.group(1)] = ldd_match.group(2) or None
2951
3504
  return libs
2952
3505
  else:
2953
- raise ValueError('{} is not a regular file'.format(filename))
3506
+ raise ValueError("{} is not a regular file".format(filename))
2954
3507
 
2955
3508
  def generic_compress(self, pipelinedesc, source, destination=None):
2956
3509
  """Compress a file using the :class:`CompressionPipeline` class.
@@ -2964,7 +3517,9 @@ class OSExtended(System):
2964
3517
  if isinstance(source, str):
2965
3518
  destination = source + cp.suffix
2966
3519
  else:
2967
- raise ValueError("If destination is omitted, source must be a filename.")
3520
+ raise ValueError(
3521
+ "If destination is omitted, source must be a filename."
3522
+ )
2968
3523
  return cp.compress2file(source, destination)
2969
3524
 
2970
3525
  def generic_uncompress(self, pipelinedesc, source, destination=None):
@@ -2978,11 +3533,17 @@ class OSExtended(System):
2978
3533
  if destination is None:
2979
3534
  if isinstance(source, str):
2980
3535
  if source.endswith(cp.suffix):
2981
- destination = source[:-len(cp.suffix)]
3536
+ destination = source[: -len(cp.suffix)]
2982
3537
  else:
2983
- raise ValueError("Source do not exhibit the appropriate suffix ({:s})".format(cp.suffix))
3538
+ raise ValueError(
3539
+ "Source do not exhibit the appropriate suffix ({:s})".format(
3540
+ cp.suffix
3541
+ )
3542
+ )
2984
3543
  else:
2985
- raise ValueError("If destination is omitted, source must be a filename.")
3544
+ raise ValueError(
3545
+ "If destination is omitted, source must be a filename."
3546
+ )
2986
3547
  return cp.file2uncompress(source, destination)
2987
3548
 
2988
3549
  def find_mount_point(self, path):
@@ -2993,7 +3554,7 @@ class OSExtended(System):
2993
3554
  :rtype: str
2994
3555
  """
2995
3556
  if not self._os.path.exists(path):
2996
- logger.warning('Path does not exist: <%s>', path)
3557
+ logger.warning("Path does not exist: <%s>", path)
2997
3558
 
2998
3559
  path = self._os.path.abspath(path)
2999
3560
  while not self._os.path.ismount(path):
@@ -3013,7 +3574,9 @@ class OSExtended(System):
3013
3574
  """
3014
3575
  rc = None
3015
3576
  t0 = time.time()
3016
- while rc is None or (not rc and blocking and time.time() - t0 < timeout):
3577
+ while rc is None or (
3578
+ not rc and blocking and time.time() - t0 < timeout
3579
+ ):
3017
3580
  if rc is not None:
3018
3581
  self.sleep(sleeptime)
3019
3582
  try:
@@ -3021,7 +3584,7 @@ class OSExtended(System):
3021
3584
  # since we need to get an error if the target directory already
3022
3585
  # exists
3023
3586
  self._os.mkdir(ldir)
3024
- except FileExistsError as os_e:
3587
+ except FileExistsError:
3025
3588
  rc = False
3026
3589
  else:
3027
3590
  rc = True
@@ -3038,8 +3601,14 @@ class OSExtended(System):
3038
3601
  logger.warning("'%s' did not exists... that's odd", ldir)
3039
3602
 
3040
3603
  @contextlib.contextmanager
3041
- def lockdir_context(self, ldir,
3042
- sloppy=False, timeout=120, sleeptime_min=0.1, sleeptime_max=0.3):
3604
+ def lockdir_context(
3605
+ self,
3606
+ ldir,
3607
+ sloppy=False,
3608
+ timeout=120,
3609
+ sleeptime_min=0.1,
3610
+ sleeptime_max=0.3,
3611
+ ):
3043
3612
  """Try to acquire a lock directory and after that remove it.
3044
3613
 
3045
3614
  :param bool sloppy: If the lock can be acquired after *timeout* second, go on anyway
@@ -3049,14 +3618,20 @@ class OSExtended(System):
3049
3618
  :param float sleeptime_max: When blocking, wait at most **sleeptime_max** seconds
3050
3619
  between to attempts to acquire the lock.
3051
3620
  """
3052
- sleeptime = sleeptime_min + (sleeptime_max - sleeptime_min) * random.random()
3621
+ sleeptime = (
3622
+ sleeptime_min + (sleeptime_max - sleeptime_min) * random.random()
3623
+ )
3053
3624
  self.filecocoon(ldir)
3054
- rc = self._lockdir_create(ldir, blocking=True, timeout=timeout, sleeptime=sleeptime)
3625
+ rc = self._lockdir_create(
3626
+ ldir, blocking=True, timeout=timeout, sleeptime=sleeptime
3627
+ )
3055
3628
  try:
3056
3629
  if not rc:
3057
- msg = "Could not acquire lockdir < {:s} >. Already exists.".format(ldir)
3630
+ msg = "Could not acquire lockdir < {:s} >. Already exists.".format(
3631
+ ldir
3632
+ )
3058
3633
  if sloppy:
3059
- logger.warning(msg + '.. but going on.')
3634
+ logger.warning(msg + ".. but going on.")
3060
3635
  else:
3061
3636
  raise OSError(msg)
3062
3637
  yield
@@ -3070,9 +3645,11 @@ class OSExtended(System):
3070
3645
  if self.glove is not None:
3071
3646
  myglove = self.glove
3072
3647
  rcdir = myglove.configrc
3073
- lockdir = self.path.join(rcdir,
3074
- 'appwide_locks',
3075
- '{0.vapp:s}-{0.vconf:s}'.format(myglove))
3648
+ lockdir = self.path.join(
3649
+ rcdir,
3650
+ "appwide_locks",
3651
+ "{0.vapp:s}-{0.vconf:s}".format(myglove),
3652
+ )
3076
3653
  self.mkdir(lockdir)
3077
3654
  return lockdir
3078
3655
  else:
@@ -3097,10 +3674,9 @@ class OSExtended(System):
3097
3674
  attempts to acquire the lock.
3098
3675
  """
3099
3676
  ldir = self._appwide_lockdir_path(label)
3100
- return self._lockdir_create(ldir,
3101
- blocking=blocking,
3102
- timeout=timeout,
3103
- sleeptime=sleeptime)
3677
+ return self._lockdir_create(
3678
+ ldir, blocking=blocking, timeout=timeout, sleeptime=sleeptime
3679
+ )
3104
3680
 
3105
3681
  def appwide_unlock(self, label):
3106
3682
  """Pseudo-lock mechanism based on atomic directory creation: release lock.
@@ -3124,9 +3700,8 @@ class Python34:
3124
3700
  """
3125
3701
 
3126
3702
  # Optional, netcdf comparison tool
3127
- b_netcdf_checker = ExternalCodeImportChecker('netdcf')
3128
- with b_netcdf_checker as npregister:
3129
- from bronx.datagrip import netcdf as b_netcdf
3703
+ b_netcdf_checker = ExternalCodeImportChecker("netdcf")
3704
+ from bronx.datagrip import netcdf as b_netcdf
3130
3705
 
3131
3706
  if b_netcdf_checker.is_available():
3132
3707
  # Unfortunately, the netCDF4 package seems to leak memory,
@@ -3136,25 +3711,28 @@ class Python34:
3136
3711
  """Function started by the subprocess."""
3137
3712
  outcome.value = int(b_netcdf.netcdf_file_diff(nc1, nc2))
3138
3713
 
3139
- rc = multiprocessing.Value('i', 0)
3140
- p = multiprocessing.Process(target=_compare_function,
3141
- args=(netcdf1, netcdf2, rc))
3714
+ rc = multiprocessing.Value("i", 0)
3715
+ p = multiprocessing.Process(
3716
+ target=_compare_function, args=(netcdf1, netcdf2, rc)
3717
+ )
3142
3718
  p.start()
3143
3719
  p.join()
3144
3720
  return bool(rc.value)
3145
3721
  else:
3146
- logger.error("Unable to load the 'bronx.datagrip.netcdf' package. " +
3147
- "The netcdf library and/or 'netCDF4' python package are probably missing.")
3722
+ logger.error(
3723
+ "Unable to load the 'bronx.datagrip.netcdf' package. "
3724
+ + "The netcdf library and/or 'netCDF4' python package are probably missing."
3725
+ )
3148
3726
  return False
3149
3727
 
3150
3728
  # Let's make this method compatible with fmtshcmd...
3151
3729
  netcdf_diff.func_extern = True
3152
3730
 
3153
3731
 
3154
- _python34_fp = footprints.Footprint(info='An abstract footprint to be used with the Python34 Mixin',
3155
- only=dict(
3156
- after_python=PythonSimplifiedVersion('3.4.0')
3157
- ))
3732
+ _python34_fp = footprints.Footprint(
3733
+ info="An abstract footprint to be used with the Python34 Mixin",
3734
+ only=dict(after_python=PythonSimplifiedVersion("3.4.0")),
3735
+ )
3158
3736
 
3159
3737
 
3160
3738
  class Garbage(OSExtended):
@@ -3166,20 +3744,18 @@ class Garbage(OSExtended):
3166
3744
 
3167
3745
  _abstract = True
3168
3746
  _footprint = dict(
3169
- info = 'Garbage base system',
3170
- attr = dict(
3171
- sysname = dict(
3172
- outcast = ['Linux', 'Darwin', 'UnitTestLinux', 'UnitTestable']
3747
+ info="Garbage base system",
3748
+ attr=dict(
3749
+ sysname=dict(
3750
+ outcast=["Linux", "Darwin", "UnitTestLinux", "UnitTestable"]
3173
3751
  )
3174
3752
  ),
3175
- priority = dict(
3176
- level = footprints.priorities.top.DEFAULT
3177
- )
3753
+ priority=dict(level=footprints.priorities.top.DEFAULT),
3178
3754
  )
3179
3755
 
3180
3756
  def __init__(self, *args, **kw):
3181
3757
  """Gateway to parent method after debug logging."""
3182
- logger.debug('Garbage system init %s', self.__class__)
3758
+ logger.debug("Garbage system init %s", self.__class__)
3183
3759
  super().__init__(*args, **kw)
3184
3760
 
3185
3761
 
@@ -3188,7 +3764,7 @@ class Garbage34p(Garbage, Python34):
3188
3764
 
3189
3765
  _footprint = [
3190
3766
  _python34_fp,
3191
- dict(info = 'Garbage base system withh a blazing Python version')
3767
+ dict(info="Garbage base system withh a blazing Python version"),
3192
3768
  ]
3193
3769
 
3194
3770
 
@@ -3197,12 +3773,8 @@ class Linux(OSExtended):
3197
3773
 
3198
3774
  _abstract = True
3199
3775
  _footprint = dict(
3200
- info = 'Abstract Linux base system',
3201
- attr = dict(
3202
- sysname = dict(
3203
- values = ['Linux']
3204
- )
3205
- )
3776
+ info="Abstract Linux base system",
3777
+ attr=dict(sysname=dict(values=["Linux"])),
3206
3778
  )
3207
3779
 
3208
3780
  def __init__(self, *args, **kw):
@@ -3212,78 +3784,86 @@ class Linux(OSExtended):
3212
3784
 
3213
3785
  * **psopts** - as default option for the ps command (default: ``-w -f -a``).
3214
3786
  """
3215
- logger.debug('Linux system init %s', self.__class__)
3216
- self._psopts = kw.pop('psopts', ['-w', '-f', '-a'])
3787
+ logger.debug("Linux system init %s", self.__class__)
3788
+ self._psopts = kw.pop("psopts", ["-w", "-f", "-a"])
3217
3789
  super().__init__(*args, **kw)
3218
- self.__dict__['_cpusinfo'] = LinuxCpusInfo()
3790
+ self.__dict__["_cpusinfo"] = LinuxCpusInfo()
3219
3791
  try:
3220
- self.__dict__['_numainfo'] = LibNumaNodesInfo()
3792
+ self.__dict__["_numainfo"] = LibNumaNodesInfo()
3221
3793
  except (OSError, NotImplementedError):
3222
3794
  # On very few Linux systems, libnuma is not available...
3223
3795
  pass
3224
- self.__dict__['_memoryinfo'] = LinuxMemInfo()
3225
- self.__dict__['_netstatsinfo'] = LinuxNetstats()
3796
+ self.__dict__["_memoryinfo"] = LinuxMemInfo()
3797
+ self.__dict__["_netstatsinfo"] = LinuxNetstats()
3226
3798
 
3227
3799
  @property
3228
3800
  def realkind(self):
3229
- return 'linux'
3801
+ return "linux"
3230
3802
 
3231
- def cpus_ids_per_blocks(self, blocksize=1, topology='raw', hexmask=False):
3803
+ def cpus_ids_per_blocks(self, blocksize=1, topology="raw", hexmask=False):
3232
3804
  """Get the list of CPUs IDs for nicely ordered for subsequent binding.
3233
3805
 
3234
3806
  :param int blocksize: the number of thread consumed by one task
3235
3807
  :param str topology: The task distribution scheme
3236
3808
  :param bool hexmask: Return a list of CPU masks in hexadecimal
3237
3809
  """
3238
- if topology.startswith('numa'):
3239
- if topology.endswith('_discardsmt'):
3810
+ if topology.startswith("numa"):
3811
+ if topology.endswith("_discardsmt"):
3240
3812
  topology = topology[:-11]
3241
3813
  smtlayout = None
3242
3814
  else:
3243
3815
  smtlayout = self.cpus_info.physical_cores_smtthreads
3244
3816
  try:
3245
- cpulist = getattr(self.numa_info,
3246
- topology + '_cpulist')(blocksize,
3247
- smtlayout=smtlayout)
3817
+ cpulist = getattr(self.numa_info, topology + "_cpulist")(
3818
+ blocksize, smtlayout=smtlayout
3819
+ )
3248
3820
  except AttributeError:
3249
- raise ValueError('Unknown topology ({:s}).'.format(topology))
3821
+ raise ValueError("Unknown topology ({:s}).".format(topology))
3250
3822
  else:
3251
3823
  try:
3252
- cpulist = getattr(self.cpus_info, topology + '_cpulist')(blocksize)
3824
+ cpulist = getattr(self.cpus_info, topology + "_cpulist")(
3825
+ blocksize
3826
+ )
3253
3827
  except AttributeError:
3254
- raise ValueError('Unknown topology ({:s}).'.format(topology))
3828
+ raise ValueError("Unknown topology ({:s}).".format(topology))
3255
3829
  cpulist = list(cpulist)
3256
- cpulist = [[cpulist[(taskid * blocksize + i)]
3257
- for i in range(blocksize)]
3258
- for taskid in range(len(cpulist) // blocksize)]
3830
+ cpulist = [
3831
+ [cpulist[(taskid * blocksize + i)] for i in range(blocksize)]
3832
+ for taskid in range(len(cpulist) // blocksize)
3833
+ ]
3259
3834
  if hexmask:
3260
3835
  cpulist = [hex(sum([1 << i for i in item])) for item in cpulist]
3261
3836
  return cpulist
3262
3837
 
3263
- def cpus_ids_dispenser(self, topology='raw'):
3838
+ def cpus_ids_dispenser(self, topology="raw"):
3264
3839
  """Get a dispenser of CPUs IDs for nicely ordered for subsequent binding.
3265
3840
 
3266
3841
  :param str topology: The task distribution scheme
3267
3842
  """
3268
- if topology.startswith('numa'):
3269
- if topology.endswith('_discardsmt'):
3843
+ if topology.startswith("numa"):
3844
+ if topology.endswith("_discardsmt"):
3270
3845
  topology = topology[:-11]
3271
3846
  smtlayout = None
3272
3847
  else:
3273
3848
  smtlayout = self.cpus_info.physical_cores_smtthreads
3274
3849
  try:
3275
- cpudisp = getattr(self.numa_info,
3276
- topology + '_cpu_dispenser')(smtlayout=smtlayout)
3850
+ cpudisp = getattr(self.numa_info, topology + "_cpu_dispenser")(
3851
+ smtlayout=smtlayout
3852
+ )
3277
3853
  except AttributeError:
3278
- raise ValueError('Unknown topology ({:s}).'.format(topology))
3854
+ raise ValueError("Unknown topology ({:s}).".format(topology))
3279
3855
  else:
3280
3856
  try:
3281
- cpudisp = getattr(self.cpus_info, topology + '_cpu_dispenser')()
3857
+ cpudisp = getattr(
3858
+ self.cpus_info, topology + "_cpu_dispenser"
3859
+ )()
3282
3860
  except AttributeError:
3283
- raise ValueError('Unknown topology ({:s}).'.format(topology))
3861
+ raise ValueError("Unknown topology ({:s}).".format(topology))
3284
3862
  return cpudisp
3285
3863
 
3286
- def cpus_affinity_get(self, taskid, blocksize=1, topology='socketpacked', method='taskset'):
3864
+ def cpus_affinity_get(
3865
+ self, taskid, blocksize=1, topology="socketpacked", method="taskset"
3866
+ ):
3287
3867
  """Get the necessary command/environment to set the CPUs affinity.
3288
3868
 
3289
3869
  :param int taskid: the task number
@@ -3293,25 +3873,29 @@ class Linux(OSExtended):
3293
3873
  :return: A 3-elements tuple. (bool: BindingPossible,
3294
3874
  list: Starting command prefix, dict: Environment update)
3295
3875
  """
3296
- if method not in ('taskset', 'gomp', 'omp', 'ompverbose'):
3297
- raise ValueError('Unknown binding method ({:s}).'.format(method))
3298
- if method == 'taskset':
3299
- if not self.which('taskset'):
3300
- logger.warning("The taskset is program is missing. Going on without binding.")
3876
+ if method not in ("taskset", "gomp", "omp", "ompverbose"):
3877
+ raise ValueError("Unknown binding method ({:s}).".format(method))
3878
+ if method == "taskset":
3879
+ if not self.which("taskset"):
3880
+ logger.warning(
3881
+ "The taskset is program is missing. Going on without binding."
3882
+ )
3301
3883
  return (False, list(), dict())
3302
- cpulist = self.cpus_ids_per_blocks(blocksize=blocksize, topology=topology)
3884
+ cpulist = self.cpus_ids_per_blocks(
3885
+ blocksize=blocksize, topology=topology
3886
+ )
3303
3887
  cpus = cpulist[taskid % len(cpulist)]
3304
3888
  cmdl = list()
3305
3889
  env = dict()
3306
- if method == 'taskset':
3307
- cmdl += ['taskset', '--cpu-list', ','.join([str(c) for c in cpus])]
3308
- elif method == 'gomp':
3309
- env['GOMP_CPU_AFFINITY'] = ' '.join([str(c) for c in cpus])
3310
- elif method.startswith('omp'):
3311
- env['OMP_PLACES'] = ','.join(['{{{:d}}}'.format(c) for c in cpus])
3312
- if method.endswith('verbose'):
3313
- env['OMP_DISPLAY_ENV'] = 'TRUE'
3314
- env['OMP_DISPLAY_AFFINITY'] = 'TRUE'
3890
+ if method == "taskset":
3891
+ cmdl += ["taskset", "--cpu-list", ",".join([str(c) for c in cpus])]
3892
+ elif method == "gomp":
3893
+ env["GOMP_CPU_AFFINITY"] = " ".join([str(c) for c in cpus])
3894
+ elif method.startswith("omp"):
3895
+ env["OMP_PLACES"] = ",".join(["{{{:d}}}".format(c) for c in cpus])
3896
+ if method.endswith("verbose"):
3897
+ env["OMP_DISPLAY_ENV"] = "TRUE"
3898
+ env["OMP_DISPLAY_AFFINITY"] = "TRUE"
3315
3899
  return (True, cmdl, env)
3316
3900
 
3317
3901
 
@@ -3320,7 +3904,7 @@ class Linux34p(Linux, Python34):
3320
3904
 
3321
3905
  _footprint = [
3322
3906
  _python34_fp,
3323
- dict(info = 'Linux based system with a blazing Python version')
3907
+ dict(info="Linux based system with a blazing Python version"),
3324
3908
  ]
3325
3909
 
3326
3910
 
@@ -3328,26 +3912,24 @@ class LinuxDebug(Linux34p):
3328
3912
  """Special system class for crude debugging on Linux based systems."""
3329
3913
 
3330
3914
  _footprint = dict(
3331
- info = 'Linux debug system',
3332
- attr = dict(
3333
- version = dict(
3334
- optional = False,
3335
- values = ['dbug', 'debug'],
3336
- remap = dict(
3337
- dbug = 'debug'
3338
- )
3915
+ info="Linux debug system",
3916
+ attr=dict(
3917
+ version=dict(
3918
+ optional=False,
3919
+ values=["dbug", "debug"],
3920
+ remap=dict(dbug="debug"),
3339
3921
  )
3340
- )
3922
+ ),
3341
3923
  )
3342
3924
 
3343
3925
  def __init__(self, *args, **kw):
3344
3926
  """Gateway to parent method after debug logging."""
3345
- logger.debug('LinuxDebug system init %s', self.__class__)
3927
+ logger.debug("LinuxDebug system init %s", self.__class__)
3346
3928
  super().__init__(*args, **kw)
3347
3929
 
3348
3930
  @property
3349
3931
  def realkind(self):
3350
- return 'linuxdebug'
3932
+ return "linuxdebug"
3351
3933
 
3352
3934
 
3353
3935
  class Macosx(OSExtended):
@@ -3355,15 +3937,11 @@ class Macosx(OSExtended):
3355
3937
 
3356
3938
  _abstract = True
3357
3939
  _footprint = dict(
3358
- info = 'Apple Mac computer under Macosx',
3359
- attr = dict(
3360
- sysname = dict(
3361
- values = ['Darwin']
3362
- ),
3940
+ info="Apple Mac computer under Macosx",
3941
+ attr=dict(
3942
+ sysname=dict(values=["Darwin"]),
3363
3943
  ),
3364
- priority = dict(
3365
- level = footprints.priorities.top.TOOLBOX
3366
- )
3944
+ priority=dict(level=footprints.priorities.top.TOOLBOX),
3367
3945
  )
3368
3946
 
3369
3947
  def __init__(self, *args, **kw):
@@ -3373,18 +3951,18 @@ class Macosx(OSExtended):
3373
3951
 
3374
3952
  * **psopts** - as default option for the ps command (default: ``-w -f -a``).
3375
3953
  """
3376
- logger.debug('Darwin system init %s', self.__class__)
3377
- self._psopts = kw.pop('psopts', ['-w', '-f', '-a'])
3954
+ logger.debug("Darwin system init %s", self.__class__)
3955
+ self._psopts = kw.pop("psopts", ["-w", "-f", "-a"])
3378
3956
  super().__init__(*args, **kw)
3379
3957
 
3380
3958
  @property
3381
3959
  def realkind(self):
3382
- return 'darwin'
3960
+ return "darwin"
3383
3961
 
3384
3962
  @property
3385
3963
  def default_syslog(self):
3386
3964
  """Address to use in logging.handler.SysLogHandler()."""
3387
- return '/var/run/syslog'
3965
+ return "/var/run/syslog"
3388
3966
 
3389
3967
 
3390
3968
  class Macosx34p(Macosx, Python34):
@@ -3392,5 +3970,7 @@ class Macosx34p(Macosx, Python34):
3392
3970
 
3393
3971
  _footprint = [
3394
3972
  _python34_fp,
3395
- dict(info = 'Apple Mac computer under Macosx with a blazing Python version')
3973
+ dict(
3974
+ info="Apple Mac computer under Macosx with a blazing Python version"
3975
+ ),
3396
3976
  ]