vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. vortex/__init__.py +75 -47
  2. vortex/algo/__init__.py +3 -2
  3. vortex/algo/components.py +944 -618
  4. vortex/algo/mpitools.py +802 -497
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/serversynctools.py +34 -33
  7. vortex/config.py +19 -22
  8. vortex/data/__init__.py +9 -3
  9. vortex/data/abstractstores.py +593 -655
  10. vortex/data/containers.py +217 -162
  11. vortex/data/contents.py +65 -39
  12. vortex/data/executables.py +93 -102
  13. vortex/data/flow.py +40 -34
  14. vortex/data/geometries.py +228 -132
  15. vortex/data/handlers.py +436 -227
  16. vortex/data/outflow.py +15 -15
  17. vortex/data/providers.py +185 -163
  18. vortex/data/resources.py +48 -42
  19. vortex/data/stores.py +540 -417
  20. vortex/data/sync_templates/__init__.py +0 -0
  21. vortex/gloves.py +114 -87
  22. vortex/layout/__init__.py +1 -8
  23. vortex/layout/contexts.py +150 -84
  24. vortex/layout/dataflow.py +353 -202
  25. vortex/layout/monitor.py +264 -128
  26. vortex/nwp/__init__.py +5 -2
  27. vortex/nwp/algo/__init__.py +14 -5
  28. vortex/nwp/algo/assim.py +205 -151
  29. vortex/nwp/algo/clim.py +683 -517
  30. vortex/nwp/algo/coupling.py +447 -225
  31. vortex/nwp/algo/eda.py +437 -229
  32. vortex/nwp/algo/eps.py +403 -231
  33. vortex/nwp/algo/forecasts.py +416 -275
  34. vortex/nwp/algo/fpserver.py +683 -307
  35. vortex/nwp/algo/ifsnaming.py +205 -145
  36. vortex/nwp/algo/ifsroot.py +215 -122
  37. vortex/nwp/algo/monitoring.py +137 -76
  38. vortex/nwp/algo/mpitools.py +330 -190
  39. vortex/nwp/algo/odbtools.py +637 -353
  40. vortex/nwp/algo/oopsroot.py +454 -273
  41. vortex/nwp/algo/oopstests.py +90 -56
  42. vortex/nwp/algo/request.py +287 -206
  43. vortex/nwp/algo/stdpost.py +878 -522
  44. vortex/nwp/data/__init__.py +22 -4
  45. vortex/nwp/data/assim.py +125 -137
  46. vortex/nwp/data/boundaries.py +121 -68
  47. vortex/nwp/data/climfiles.py +193 -211
  48. vortex/nwp/data/configfiles.py +73 -69
  49. vortex/nwp/data/consts.py +426 -401
  50. vortex/nwp/data/ctpini.py +59 -43
  51. vortex/nwp/data/diagnostics.py +94 -66
  52. vortex/nwp/data/eda.py +50 -51
  53. vortex/nwp/data/eps.py +195 -146
  54. vortex/nwp/data/executables.py +440 -434
  55. vortex/nwp/data/fields.py +63 -48
  56. vortex/nwp/data/gridfiles.py +183 -111
  57. vortex/nwp/data/logs.py +250 -217
  58. vortex/nwp/data/modelstates.py +180 -151
  59. vortex/nwp/data/monitoring.py +72 -99
  60. vortex/nwp/data/namelists.py +254 -202
  61. vortex/nwp/data/obs.py +400 -308
  62. vortex/nwp/data/oopsexec.py +22 -20
  63. vortex/nwp/data/providers.py +90 -65
  64. vortex/nwp/data/query.py +71 -82
  65. vortex/nwp/data/stores.py +49 -36
  66. vortex/nwp/data/surfex.py +136 -137
  67. vortex/nwp/syntax/__init__.py +1 -1
  68. vortex/nwp/syntax/stdattrs.py +173 -111
  69. vortex/nwp/tools/__init__.py +2 -2
  70. vortex/nwp/tools/addons.py +22 -17
  71. vortex/nwp/tools/agt.py +24 -12
  72. vortex/nwp/tools/bdap.py +16 -5
  73. vortex/nwp/tools/bdcp.py +4 -1
  74. vortex/nwp/tools/bdm.py +3 -0
  75. vortex/nwp/tools/bdmp.py +14 -9
  76. vortex/nwp/tools/conftools.py +728 -378
  77. vortex/nwp/tools/drhook.py +12 -8
  78. vortex/nwp/tools/grib.py +65 -39
  79. vortex/nwp/tools/gribdiff.py +22 -17
  80. vortex/nwp/tools/ifstools.py +82 -42
  81. vortex/nwp/tools/igastuff.py +167 -143
  82. vortex/nwp/tools/mars.py +14 -2
  83. vortex/nwp/tools/odb.py +234 -125
  84. vortex/nwp/tools/partitioning.py +61 -37
  85. vortex/nwp/tools/satrad.py +27 -12
  86. vortex/nwp/util/async.py +83 -55
  87. vortex/nwp/util/beacon.py +10 -10
  88. vortex/nwp/util/diffpygram.py +174 -86
  89. vortex/nwp/util/ens.py +144 -63
  90. vortex/nwp/util/hooks.py +30 -19
  91. vortex/nwp/util/taskdeco.py +28 -24
  92. vortex/nwp/util/usepygram.py +278 -172
  93. vortex/nwp/util/usetnt.py +31 -17
  94. vortex/sessions.py +72 -39
  95. vortex/syntax/__init__.py +1 -1
  96. vortex/syntax/stdattrs.py +410 -171
  97. vortex/syntax/stddeco.py +31 -22
  98. vortex/toolbox.py +327 -192
  99. vortex/tools/__init__.py +11 -2
  100. vortex/tools/actions.py +110 -121
  101. vortex/tools/addons.py +111 -92
  102. vortex/tools/arm.py +42 -22
  103. vortex/tools/compression.py +72 -69
  104. vortex/tools/date.py +11 -4
  105. vortex/tools/delayedactions.py +242 -132
  106. vortex/tools/env.py +75 -47
  107. vortex/tools/folder.py +342 -171
  108. vortex/tools/grib.py +341 -162
  109. vortex/tools/lfi.py +423 -216
  110. vortex/tools/listings.py +109 -40
  111. vortex/tools/names.py +218 -156
  112. vortex/tools/net.py +655 -299
  113. vortex/tools/parallelism.py +93 -61
  114. vortex/tools/prestaging.py +55 -31
  115. vortex/tools/schedulers.py +172 -105
  116. vortex/tools/services.py +403 -334
  117. vortex/tools/storage.py +293 -358
  118. vortex/tools/surfex.py +24 -24
  119. vortex/tools/systems.py +1234 -643
  120. vortex/tools/targets.py +156 -100
  121. vortex/util/__init__.py +1 -1
  122. vortex/util/config.py +378 -327
  123. vortex/util/empty.py +2 -2
  124. vortex/util/helpers.py +56 -24
  125. vortex/util/introspection.py +18 -12
  126. vortex/util/iosponge.py +8 -4
  127. vortex/util/roles.py +4 -6
  128. vortex/util/storefunctions.py +39 -13
  129. vortex/util/structs.py +3 -3
  130. vortex/util/worker.py +29 -17
  131. vortex_nwp-2.1.0.dist-info/METADATA +67 -0
  132. vortex_nwp-2.1.0.dist-info/RECORD +144 -0
  133. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
  134. vortex/layout/appconf.py +0 -109
  135. vortex/layout/jobs.py +0 -1276
  136. vortex/layout/nodes.py +0 -1424
  137. vortex/layout/subjobs.py +0 -464
  138. vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
  139. vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
  140. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
  141. {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
@@ -16,13 +16,16 @@ from bronx.fancies import loggers
16
16
 
17
17
  logger = loggers.getLogger(__name__)
18
18
 
19
- __all__ = ['PartitioningError',
20
- 'Rectangular2DPartitioner',
21
- 'setup_partitioning_in_namelist']
19
+ __all__ = [
20
+ "PartitioningError",
21
+ "Rectangular2DPartitioner",
22
+ "setup_partitioning_in_namelist",
23
+ ]
22
24
 
23
25
 
24
26
  class PartitioningError(ValueError):
25
27
  """Any error raised during domain partitionings."""
28
+
26
29
  pass
27
30
 
28
31
 
@@ -40,24 +43,33 @@ class AbstratctPartitioner:
40
43
  :param p_method_specification: The partitioning method definition
41
44
  """
42
45
  # Process the partitioning method specification string
43
- p_method_parts = p_method_specification.lower().split('_')
46
+ p_method_parts = p_method_specification.lower().split("_")
44
47
  self.p_method_name = None
45
48
  self.p_method_args = ()
46
49
  for a_method, a_method_n_args in self._REGISTERED_METHODS:
47
50
  if p_method_parts[0] == a_method:
48
51
  self.p_method_name = p_method_parts[0]
49
52
  if len(p_method_parts) - 1 != a_method_n_args:
50
- raise ValueError('Erroneous number of interger args ' +
51
- 'for the {:s} p_method ({:d} required)'
52
- .format(a_method, a_method_n_args))
53
- self.p_method_args = tuple([int(s) for s in p_method_parts[1:]])
53
+ raise ValueError(
54
+ "Erroneous number of interger args "
55
+ + "for the {:s} p_method ({:d} required)".format(
56
+ a_method, a_method_n_args
57
+ )
58
+ )
59
+ self.p_method_args = tuple(
60
+ [int(s) for s in p_method_parts[1:]]
61
+ )
54
62
  # Unknown method -> crash
55
63
  if self.p_method_name is None:
56
- raise PartitioningError("Unknown partitioning method ({:s})."
57
- .format(p_method_specification))
64
+ raise PartitioningError(
65
+ "Unknown partitioning method ({:s}).".format(
66
+ p_method_specification
67
+ )
68
+ )
58
69
  # The actual class' method that will be used to compute a and b
59
- self.p_method = functools.partial(getattr(self, '_' + self.p_method_name),
60
- * self.p_method_args)
70
+ self.p_method = functools.partial(
71
+ getattr(self, "_" + self.p_method_name), *self.p_method_args
72
+ )
61
73
  # Implement a caching mechanism
62
74
  self._p_cache = dict()
63
75
 
@@ -124,10 +136,10 @@ class Rectangular2DPartitioner(AbstratctPartitioner):
124
136
  """
125
137
 
126
138
  _REGISTERED_METHODS = (
127
- ('xcloseto', 1),
128
- ('ycloseto', 1),
129
- ('square', 0),
130
- ('aspect', 2)
139
+ ("xcloseto", 1),
140
+ ("ycloseto", 1),
141
+ ("square", 0),
142
+ ("aspect", 2),
131
143
  )
132
144
 
133
145
  @staticmethod
@@ -143,8 +155,7 @@ class Rectangular2DPartitioner(AbstratctPartitioner):
143
155
  """Find ``x`` as the closest possible value to **close_to_what**."""
144
156
  guesses = b_iter.interleave(
145
157
  range(close_to_what, 0, -1),
146
- range(close_to_what + 1, min(close_to_what * 2,
147
- ntasks))
158
+ range(close_to_what + 1, min(close_to_what * 2, ntasks)),
148
159
  )
149
160
  return self._test_and_return(ntasks, guesses)
150
161
 
@@ -164,17 +175,15 @@ class Rectangular2DPartitioner(AbstratctPartitioner):
164
175
  def _aspect(self, x_spec, y_spec, ntasks):
165
176
  """Find ``x`` and ``y`` so that ``x / y =~ x_spec / y_spec``."""
166
177
  aspect_ratio = x_spec / y_spec
167
- return self._xcloseto(int(math.sqrt(ntasks * aspect_ratio)),
168
- ntasks)
178
+ return self._xcloseto(int(math.sqrt(ntasks * aspect_ratio)), ntasks)
169
179
 
170
180
 
171
181
  _PARTITIONERS_CACHE = dict()
172
182
 
173
183
 
174
- def setup_partitioning_in_namelist(namcontents,
175
- effective_tasks,
176
- effective_threads,
177
- namlocal=None):
184
+ def setup_partitioning_in_namelist(
185
+ namcontents, effective_tasks, effective_threads, namlocal=None
186
+ ):
178
187
  """Look in a namelist Content object and replace the macros related to partitioning.
179
188
 
180
189
  :param nwp.data.namelists.NamelistContent namcontents: The namelist's Content
@@ -199,10 +208,12 @@ def setup_partitioning_in_namelist(namcontents,
199
208
  partitioning class. Any value that is accepted by the partitioning class is
200
209
  fine.
201
210
  """
202
- macrovalid = re.compile('PART_' +
203
- '(?P<what>TASKS|THREADS)(?P<cls>2D)_' +
204
- '(?P<dim>[XY])_(?P<def>.*)$')
205
- partitioning_classes = {'2D': Rectangular2DPartitioner}
211
+ macrovalid = re.compile(
212
+ "PART_"
213
+ + "(?P<what>TASKS|THREADS)(?P<cls>2D)_"
214
+ + "(?P<dim>[XY])_(?P<def>.*)$"
215
+ )
216
+ partitioning_classes = {"2D": Rectangular2DPartitioner}
206
217
  namw = False
207
218
  # Find the list of existing macros
208
219
  all_macros = set()
@@ -212,23 +223,36 @@ def setup_partitioning_in_namelist(namcontents,
212
223
  for macroname in all_macros:
213
224
  macroname_re = macrovalid.match(macroname)
214
225
  if macroname_re:
215
- cache_key = (macroname_re.group('cls'), macroname_re.group('def'))
226
+ cache_key = (macroname_re.group("cls"), macroname_re.group("def"))
216
227
  if cache_key not in _PARTITIONERS_CACHE:
217
- partitioning_class = partitioning_classes[macroname_re.group('cls')]
218
- _PARTITIONERS_CACHE[cache_key] = partitioning_class(macroname_re.group('def'))
219
- effective_n = dict(TASKS=effective_tasks,
220
- THREADS=effective_threads)[macroname_re.group('what')]
228
+ partitioning_class = partitioning_classes[
229
+ macroname_re.group("cls")
230
+ ]
231
+ _PARTITIONERS_CACHE[cache_key] = partitioning_class(
232
+ macroname_re.group("def")
233
+ )
234
+ effective_n = dict(
235
+ TASKS=effective_tasks, THREADS=effective_threads
236
+ )[macroname_re.group("what")]
221
237
  part_x, part_y = _PARTITIONERS_CACHE[cache_key](effective_n)
222
- final_result = part_x if macroname_re.group('dim') == 'X' else part_y
238
+ final_result = (
239
+ part_x if macroname_re.group("dim") == "X" else part_y
240
+ )
223
241
  if namlocal:
224
- logger.info('Setup macro %s=%s in %s', macroname, final_result, namlocal)
242
+ logger.info(
243
+ "Setup macro %s=%s in %s",
244
+ macroname,
245
+ final_result,
246
+ namlocal,
247
+ )
225
248
  else:
226
- logger.info('Setup macro %s=%s', macroname, final_result)
249
+ logger.info("Setup macro %s=%s", macroname, final_result)
227
250
  namcontents.setmacro(macroname, final_result)
228
251
  namw = True
229
252
  return namw
230
253
 
231
254
 
232
- if __name__ == '__main__':
255
+ if __name__ == "__main__":
233
256
  import doctest
257
+
234
258
  doctest.testmod()
@@ -29,28 +29,43 @@ class SatRadDecoMixin(AlgoComponentDecoMixin):
29
29
 
30
30
  def _satrad_coeffdir_setup(self, rh, opts): # @UnusedVariable
31
31
  """Look for RTTOV coefficient files and act on it."""
32
- rtcoefs = self.context.sequence.effective_inputs(role='RtCoef', kind='rtcoef')
32
+ rtcoefs = self.context.sequence.effective_inputs(
33
+ role="RtCoef", kind="rtcoef"
34
+ )
33
35
  if rtcoefs:
34
36
  sh = self.system
35
- rtpaths = {sh.path.dirname(sh.path.realpath(rtcoef.rh.container.localpath()))
36
- for rtcoef in rtcoefs}
37
+ rtpaths = {
38
+ sh.path.dirname(
39
+ sh.path.realpath(rtcoef.rh.container.localpath())
40
+ )
41
+ for rtcoef in rtcoefs
42
+ }
37
43
  if len(rtpaths) != 1:
38
- raise AlgoComponentError('The Radiative Transfer Coefficients are scattered in' +
39
- 'several directories: {!s}'.format(rtpaths))
44
+ raise AlgoComponentError(
45
+ "The Radiative Transfer Coefficients are scattered in"
46
+ + "several directories: {!s}".format(rtpaths)
47
+ )
40
48
  rtpath = rtpaths.pop()
41
- logger.info('Setting %s = %s', 'RTTOV_COEFDIR', rtpath)
42
- self.env['RTTOV_COEFDIR'] = rtpath
49
+ logger.info("Setting %s = %s", "RTTOV_COEFDIR", rtpath)
50
+ self.env["RTTOV_COEFDIR"] = rtpath
43
51
 
44
- _MIXIN_PREPARE_HOOKS = (_satrad_coeffdir_setup, )
52
+ _MIXIN_PREPARE_HOOKS = (_satrad_coeffdir_setup,)
45
53
 
46
54
  def setchannels(self):
47
55
  """Look up for channels namelists in effective inputs."""
48
56
  namchan = [
49
- x.rh for x in self.context.sequence.effective_inputs(kind='namelist')
50
- if 'channel' in x.rh.options
57
+ x.rh
58
+ for x in self.context.sequence.effective_inputs(kind="namelist")
59
+ if "channel" in x.rh.options
51
60
  ]
52
61
  for thisnam in namchan:
53
- thisloc = re.sub(r'\d+$', '', thisnam.options['channel']) + 'channels'
62
+ thisloc = (
63
+ re.sub(r"\d+$", "", thisnam.options["channel"]) + "channels"
64
+ )
54
65
  if thisloc != thisnam.container.localpath():
55
- logger.info('Linking < %s > to < %s >', thisnam.container.localpath(), thisloc)
66
+ logger.info(
67
+ "Linking < %s > to < %s >",
68
+ thisnam.container.localpath(),
69
+ thisloc,
70
+ )
56
71
  self.system.softlink(thisnam.container.localpath(), thisloc)
vortex/nwp/util/async.py CHANGED
@@ -18,7 +18,7 @@ def _double_ssh(sh, loginnode, transfernode):
18
18
 
19
19
  May return None when network problems occur.
20
20
  """
21
- cmd = ['ssh', '-x', loginnode, 'ssh', '-x', transfernode, 'hostname', '-s']
21
+ cmd = ["ssh", "-x", loginnode, "ssh", "-x", transfernode, "hostname", "-s"]
22
22
  rc = sh.spawn(cmd, shell=False, output=True, fatal=False)
23
23
  if not rc:
24
24
  return None
@@ -33,54 +33,73 @@ def system_ftput(pnum, ask, config, logger, **opts):
33
33
  obtained by a double ssh.
34
34
  """
35
35
 
36
- logger.info('System', todo=ask.todo, pnum=pnum, opts=opts)
37
- value = dict(rpool='retry')
36
+ logger.info("System", todo=ask.todo, pnum=pnum, opts=opts)
37
+ value = dict(rpool="retry")
38
38
 
39
- phasemode = opts.get('phasemode', False)
40
- nbtries = opts.get('attempts', 1)
39
+ phasemode = opts.get("phasemode", False)
40
+ nbtries = opts.get("attempts", 1)
41
41
  if phasemode:
42
42
  rawftput = False
43
43
  else:
44
- rawftput = opts.get('rawftput', False)
44
+ rawftput = opts.get("rawftput", False)
45
45
  trynum = 0
46
46
 
47
- profile = config['driver'].get('profile', None)
47
+ profile = config["driver"].get("profile", None)
48
48
  with VortexWorker(logger=logger, profile=profile) as vwork:
49
49
  sh = vwork.session.sh
50
- sh.trace = 'log'
51
- sh.ftpflavour = systems.FTP_FLAVOUR.STD # Because errors are handled directly by jeeves
50
+ sh.trace = "log"
51
+ sh.ftpflavour = (
52
+ systems.FTP_FLAVOUR.STD
53
+ ) # Because errors are handled directly by jeeves
52
54
 
53
55
  data = vwork.get_dataset(ask)
54
- logger.info('ftput', source=data.source, destination=data.destination)
56
+ logger.info("ftput", source=data.source, destination=data.destination)
55
57
  if not sh.path.exists(data.source):
56
- logger.error('The source file is missing - sorry')
57
- return pnum, False, dict(rpool='error')
58
+ logger.error("The source file is missing - sorry")
59
+ return pnum, False, dict(rpool="error")
58
60
 
59
61
  if phasemode:
60
- data.hostname = _double_ssh(sh, data.phase_loginnode, data.phase_transfernode)
62
+ data.hostname = _double_ssh(
63
+ sh, data.phase_loginnode, data.phase_transfernode
64
+ )
61
65
  if data.hostname is None:
62
- return pnum, False, dict(rpool='retry')
63
-
64
- cpipeline = (None if not hasattr(data, 'cpipeline') or not data.cpipeline
65
- else compression.CompressionPipeline(sh, data.cpipeline))
66
-
67
- logger.info('FTPut host', hostname=data.hostname, logname=data.logname)
68
- logger.info('FTPut data', source=data.source, destination=data.destination)
66
+ return pnum, False, dict(rpool="retry")
67
+
68
+ cpipeline = (
69
+ None
70
+ if not hasattr(data, "cpipeline") or not data.cpipeline
71
+ else compression.CompressionPipeline(sh, data.cpipeline)
72
+ )
73
+
74
+ logger.info("FTPut host", hostname=data.hostname, logname=data.logname)
75
+ logger.info(
76
+ "FTPut data", source=data.source, destination=data.destination
77
+ )
69
78
  while trynum < nbtries:
70
79
  trynum += 1
71
80
  if nbtries > 1:
72
- logger.info('FTPut loop', attempt=trynum)
81
+ logger.info("FTPut loop", attempt=trynum)
73
82
  try:
74
83
  if rawftput:
75
- putrc = sh.rawftput(data.source, data.destination, hostname=data.hostname,
76
- logname=data.logname, cpipeline=cpipeline,
77
- fmt=data.fmt)
84
+ putrc = sh.rawftput(
85
+ data.source,
86
+ data.destination,
87
+ hostname=data.hostname,
88
+ logname=data.logname,
89
+ cpipeline=cpipeline,
90
+ fmt=data.fmt,
91
+ )
78
92
  else:
79
- putrc = sh.ftput(data.source, data.destination, hostname=data.hostname,
80
- logname=data.logname, cpipeline=cpipeline,
81
- fmt=data.fmt)
93
+ putrc = sh.ftput(
94
+ data.source,
95
+ data.destination,
96
+ hostname=data.hostname,
97
+ logname=data.logname,
98
+ cpipeline=cpipeline,
99
+ fmt=data.fmt,
100
+ )
82
101
  except Exception as e:
83
- logger.warning('FTPut failed', attempt=trynum, error=e)
102
+ logger.warning("FTPut failed", attempt=trynum, error=e)
84
103
  putrc = False
85
104
  if putrc:
86
105
  value = dict(clear=sh.rm(data.source, fmt=data.fmt))
@@ -92,23 +111,23 @@ def system_ftput(pnum, ask, config, logger, **opts):
92
111
  def system_cp(pnum, ask, config, logger, **opts):
93
112
  """Local transfers (between filesystems) on a given host."""
94
113
 
95
- logger.info('System', todo=ask.todo, pnum=pnum, opts=opts)
96
- value = dict(rpool='retry')
114
+ logger.info("System", todo=ask.todo, pnum=pnum, opts=opts)
115
+ value = dict(rpool="retry")
97
116
 
98
- profile = config['driver'].get('profile', None)
117
+ profile = config["driver"].get("profile", None)
99
118
  with VortexWorker(logger=logger, profile=profile) as vwork:
100
119
  sh = vwork.session.sh
101
- sh.trace = 'log'
120
+ sh.trace = "log"
102
121
  data = vwork.get_dataset(ask)
103
- logger.info('cp', source=data.source, destination=data.destination)
122
+ logger.info("cp", source=data.source, destination=data.destination)
104
123
  if not sh.path.exists(data.source):
105
- logger.error('The source file is missing - sorry')
106
- return pnum, False, dict(rpool='error')
124
+ logger.error("The source file is missing - sorry")
125
+ return pnum, False, dict(rpool="error")
107
126
 
108
127
  try:
109
128
  rc = sh.cp(data.source, data.destination, fmt=data.fmt)
110
129
  except Exception as e:
111
- logger.warning('cp failed', error=e)
130
+ logger.warning("cp failed", error=e)
112
131
  rc = False
113
132
  if rc:
114
133
  value = dict(clear=sh.rm(data.source, fmt=data.fmt))
@@ -123,33 +142,42 @@ def system_scp(pnum, ask, config, logger, **opts):
123
142
  In phase mode, raw ftp is not allowed, and the hostname is dynamically
124
143
  obtained by a double ssh.
125
144
  """
126
- logger.info('System', todo=ask.todo, pnum=pnum, opts=opts)
127
- value = dict(rpool='retry')
145
+ logger.info("System", todo=ask.todo, pnum=pnum, opts=opts)
146
+ value = dict(rpool="retry")
128
147
 
129
- phasemode = opts.get('phasemode', False)
148
+ phasemode = opts.get("phasemode", False)
130
149
 
131
- profile = config['driver'].get('profile', None)
150
+ profile = config["driver"].get("profile", None)
132
151
  with VortexWorker(logger=logger, profile=profile) as vwork:
133
152
  sh = vwork.session.sh
134
- sh.trace = 'log'
153
+ sh.trace = "log"
135
154
 
136
155
  data = vwork.get_dataset(ask)
137
- logger.info('scp', source=data.source, destination=data.destination)
156
+ logger.info("scp", source=data.source, destination=data.destination)
138
157
  if not sh.path.exists(data.source):
139
- logger.error('The source file is missing - sorry')
140
- return pnum, False, dict(rpool='error')
158
+ logger.error("The source file is missing - sorry")
159
+ return pnum, False, dict(rpool="error")
141
160
 
142
161
  if phasemode:
143
- data.hostname = _double_ssh(sh, data.phase_loginnode, data.phase_transfernode)
162
+ data.hostname = _double_ssh(
163
+ sh, data.phase_loginnode, data.phase_transfernode
164
+ )
144
165
  if data.hostname is None:
145
166
  return pnum, False, value
146
- logger.info('scp host', hostname=data.hostname, logname=data.logname)
147
- logger.info('scp data', source=data.source, destination=data.destination)
167
+ logger.info("scp host", hostname=data.hostname, logname=data.logname)
168
+ logger.info(
169
+ "scp data", source=data.source, destination=data.destination
170
+ )
148
171
  try:
149
- putrc = sh.scpput(data.source, data.destination, hostname=data.hostname,
150
- logname=data.logname, fmt=data.fmt)
172
+ putrc = sh.scpput(
173
+ data.source,
174
+ data.destination,
175
+ hostname=data.hostname,
176
+ logname=data.logname,
177
+ fmt=data.fmt,
178
+ )
151
179
  except Exception as e:
152
- logger.warning('scp failed', error=e)
180
+ logger.warning("scp failed", error=e)
153
181
  putrc = False
154
182
  if putrc:
155
183
  value = dict(clear=sh.rm(data.source, fmt=data.fmt))
@@ -162,23 +190,23 @@ def system_noop(pnum, ask, config, logger, **opts):
162
190
 
163
191
  Used to desactivate jeeves when mirroring the operational suite.
164
192
  """
165
- logger.info('Noop', todo=ask.todo, pnum=pnum, opts=opts)
193
+ logger.info("Noop", todo=ask.todo, pnum=pnum, opts=opts)
166
194
 
167
- profile = config['driver'].get('profile', None)
195
+ profile = config["driver"].get("profile", None)
168
196
  with VortexWorker(logger=logger, profile=profile) as vwork:
169
197
  sh = vwork.session.sh
170
- sh.trace = 'log'
198
+ sh.trace = "log"
171
199
  data = vwork.get_dataset(ask)
172
200
  value = dict(clear=sh.rm(data.source, fmt=data.fmt))
173
201
 
174
202
  return pnum, vwork.rc, value
175
203
 
176
204
 
177
- if __name__ == '__main__':
205
+ if __name__ == "__main__":
178
206
  import vortex
179
207
 
180
208
  t = vortex.ticket()
181
209
  main_sh = t.sh
182
210
  main_sh.trace = True
183
211
  main_sh.verbose = True
184
- print(_double_ssh(main_sh, 'beaufixoper', 'beaufixtransfert-agt'))
212
+ print(_double_ssh(main_sh, "beaufixoper", "beaufixtransfert-agt"))
vortex/nwp/util/beacon.py CHANGED
@@ -23,18 +23,18 @@ def beaconfunction(options):
23
23
  rst = dict()
24
24
 
25
25
  # Find out if a resource handler is present and load the elements to be written
26
- rhdict = options.get('rhandler', None)
26
+ rhdict = options.get("rhandler", None)
27
27
  if rhdict:
28
- rst['vapp'] = rhdict.get('provider', {}).get('vapp', '')
29
- rst['vconf'] = rhdict.get('provider', {}).get('vconf', '')
30
- rst['model'] = rhdict.get('resource', {}).get('model', '')
31
- rst['date'] = rhdict.get('resource', {}).get('date', '')
32
- rst['cutoff'] = rhdict.get('resource', {}).get('cutoff', '')
33
- member = rhdict.get('provider', {}).get('member', None)
28
+ rst["vapp"] = rhdict.get("provider", {}).get("vapp", "")
29
+ rst["vconf"] = rhdict.get("provider", {}).get("vconf", "")
30
+ rst["model"] = rhdict.get("resource", {}).get("model", "")
31
+ rst["date"] = rhdict.get("resource", {}).get("date", "")
32
+ rst["cutoff"] = rhdict.get("resource", {}).get("cutoff", "")
33
+ member = rhdict.get("provider", {}).get("member", None)
34
34
  if member is not None:
35
- rst['member'] = member
35
+ rst["member"] = member
36
36
  else:
37
- rst['error'] = 'No resource handler here'
37
+ rst["error"] = "No resource handler here"
38
38
  outstr = json.dumps(rst)
39
39
  # Return the string, which has to be converted to a file like object
40
- return io.BytesIO(outstr.encode(encoding='utf_8'))
40
+ return io.BytesIO(outstr.encode(encoding="utf_8"))