vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
vortex/tools/odb.py
ADDED
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract classes for :mod:`taylorism` workers to be used in conjunction with
|
|
3
|
+
AlgoComponents based on the :class:`~vortex.algo.components.TaylorRun` class.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import io
|
|
7
|
+
import logging
|
|
8
|
+
import sys
|
|
9
|
+
|
|
10
|
+
from bronx.fancies import loggers
|
|
11
|
+
from bronx.stdtypes import date
|
|
12
|
+
import footprints
|
|
13
|
+
import taylorism
|
|
14
|
+
import vortex
|
|
15
|
+
from vortex.tools.systems import ExecutionError
|
|
16
|
+
|
|
17
|
+
#: No automatic export
|
|
18
|
+
__all__ = []
|
|
19
|
+
|
|
20
|
+
logger = loggers.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class TaylorVortexWorker(taylorism.Worker):
|
|
24
|
+
"""Vortex version of the :class:`taylorism.Worker` class.
|
|
25
|
+
|
|
26
|
+
This class provides additional features:
|
|
27
|
+
|
|
28
|
+
* Useful shortcuts (system, context, ...)
|
|
29
|
+
* Setup a Context recorder to track changes in the Context (and replay them later)
|
|
30
|
+
* Setup necessary hooks to record the logging messages and standard output. They
|
|
31
|
+
are sent back to the main process where they are displayed using the
|
|
32
|
+
:class:`ParallelResultParser` class.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
_abstract = True
|
|
36
|
+
_footprint = dict(
|
|
37
|
+
attr = dict(
|
|
38
|
+
kind = dict(),
|
|
39
|
+
taskdebug = dict(
|
|
40
|
+
info = 'Dump all stdout/stderr to a file (in real live !)',
|
|
41
|
+
type = bool,
|
|
42
|
+
default = False,
|
|
43
|
+
optional = True,
|
|
44
|
+
),
|
|
45
|
+
)
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
def _vortex_shortcuts(self):
|
|
49
|
+
"""Setup a few shortcuts."""
|
|
50
|
+
self.ticket = vortex.sessions.current()
|
|
51
|
+
self.context = self.ticket.context
|
|
52
|
+
self.system = self.context.system
|
|
53
|
+
|
|
54
|
+
def _vortex_rc_wrapup(self, rc, psi_rc):
|
|
55
|
+
"""Complement the return code with the ParallelSilencer recording."""
|
|
56
|
+
# Update the return values
|
|
57
|
+
if not isinstance(rc, dict):
|
|
58
|
+
rc = dict(msg=rc)
|
|
59
|
+
rc.update(psi_rc)
|
|
60
|
+
return rc
|
|
61
|
+
|
|
62
|
+
def _task(self, **kwargs):
|
|
63
|
+
"""Should not be overridden anymore: see :meth:`vortex_task`."""
|
|
64
|
+
self._vortex_shortcuts()
|
|
65
|
+
with ParallelSilencer(self.context, self.name, debug=self.taskdebug) as psi:
|
|
66
|
+
rc = self.vortex_task(**kwargs)
|
|
67
|
+
psi_rc = psi.export_result()
|
|
68
|
+
return self._vortex_rc_wrapup(rc, psi_rc)
|
|
69
|
+
|
|
70
|
+
def vortex_task(self, **kwargs):
|
|
71
|
+
"""This method is to be implemented through inheritance: the real work happens here!"""
|
|
72
|
+
raise NotImplementedError()
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class VortexWorkerBlindRun(TaylorVortexWorker):
|
|
76
|
+
"""Include utility methods to run a basic program (i.e no MPI)."""
|
|
77
|
+
|
|
78
|
+
_abstract = True
|
|
79
|
+
_footprint = dict(
|
|
80
|
+
attr = dict(
|
|
81
|
+
progname = dict(
|
|
82
|
+
),
|
|
83
|
+
progargs = dict(
|
|
84
|
+
type = footprints.FPList,
|
|
85
|
+
default = footprints.FPList(),
|
|
86
|
+
optional = True,
|
|
87
|
+
),
|
|
88
|
+
progtaskset = dict(
|
|
89
|
+
info = "Topology/Method to set up the CPU affinity of the child task.",
|
|
90
|
+
default = None,
|
|
91
|
+
optional = True,
|
|
92
|
+
),
|
|
93
|
+
progtaskset_bsize = dict(
|
|
94
|
+
info = 'The number of threads used by one task',
|
|
95
|
+
type = int,
|
|
96
|
+
default = 1,
|
|
97
|
+
optional = True
|
|
98
|
+
),
|
|
99
|
+
progenvdelta = dict(
|
|
100
|
+
info = 'Any alteration to environment variables',
|
|
101
|
+
type = footprints.FPDict,
|
|
102
|
+
default = footprints.FPDict({}),
|
|
103
|
+
optional = True
|
|
104
|
+
),
|
|
105
|
+
)
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
def local_spawn_hook(self):
|
|
109
|
+
"""Last chance to say something before execution."""
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
def local_spawn(self, stdoutfile):
|
|
113
|
+
"""Execute the command specified in the **progname** attributes.
|
|
114
|
+
|
|
115
|
+
:param stdoutfile: Path to the file where the standard/error output will
|
|
116
|
+
be saved.
|
|
117
|
+
"""
|
|
118
|
+
tmpio = open(stdoutfile, 'wb')
|
|
119
|
+
try:
|
|
120
|
+
self.system.softlink('/dev/null', 'core')
|
|
121
|
+
except FileExistsError:
|
|
122
|
+
pass
|
|
123
|
+
self.local_spawn_hook()
|
|
124
|
+
self.system.default_target.spawn_hook(self.system)
|
|
125
|
+
logger.info("The program stdout/err will be saved to %s", stdoutfile)
|
|
126
|
+
logger.info("Starting the following command: %s (taskset=%s, id=%d)",
|
|
127
|
+
" ".join([self.progname, ] + self.progargs),
|
|
128
|
+
str(self.progtaskset), self.scheduler_ticket)
|
|
129
|
+
with self.system.env.delta_context(** self.progenvdelta):
|
|
130
|
+
self.system.spawn([self.progname, ] + self.progargs, output=tmpio,
|
|
131
|
+
fatal=True, taskset=self.progtaskset,
|
|
132
|
+
taskset_id=self.scheduler_ticket,
|
|
133
|
+
taskset_bsize=self.progtaskset_bsize)
|
|
134
|
+
|
|
135
|
+
def delayed_error_local_spawn(self, stdoutfile, rcdict):
|
|
136
|
+
"""local_spawn wrapped in a try/except in order to trigger delayed exceptions."""
|
|
137
|
+
try:
|
|
138
|
+
self.local_spawn(stdoutfile)
|
|
139
|
+
except ExecutionError as e:
|
|
140
|
+
logger.error("The execution failed.")
|
|
141
|
+
rcdict['rc'] = e
|
|
142
|
+
return rcdict
|
|
143
|
+
|
|
144
|
+
def find_namelists(self, opts=None): # @UnusedVariable
|
|
145
|
+
"""Find any namelists candidates in actual context inputs."""
|
|
146
|
+
namcandidates = [x.rh for x in self.context.sequence.effective_inputs(kind='namelist')]
|
|
147
|
+
self.system.subtitle('Namelist candidates')
|
|
148
|
+
for nam in namcandidates:
|
|
149
|
+
nam.quickview()
|
|
150
|
+
|
|
151
|
+
return namcandidates
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class TeeLikeStringIO(io.StringIO):
|
|
155
|
+
"""A StringIO variatn that can also write to several files."""
|
|
156
|
+
|
|
157
|
+
def __init__(self):
|
|
158
|
+
super().__init__()
|
|
159
|
+
self._tees = set()
|
|
160
|
+
|
|
161
|
+
def record_teefile(self, filename, mode='w', line_buffering=True):
|
|
162
|
+
"""Add **filename** to the set of extra logfiles."""
|
|
163
|
+
self._tees.add(open(filename, mode=mode, buffering=int(line_buffering)))
|
|
164
|
+
|
|
165
|
+
def discard_tees(self):
|
|
166
|
+
"""Dismiss all of the extra logfiles."""
|
|
167
|
+
for teeio in self._tees:
|
|
168
|
+
teeio.close()
|
|
169
|
+
self._tees = set()
|
|
170
|
+
|
|
171
|
+
def write(self, t):
|
|
172
|
+
"""Write in the present StringIO but also in the extra logfiles."""
|
|
173
|
+
for teeio in self._tees:
|
|
174
|
+
teeio.write(t)
|
|
175
|
+
super().write(t)
|
|
176
|
+
|
|
177
|
+
def filedump(self, filename, mode='w'):
|
|
178
|
+
"""Dump all of the captured data to **filename**."""
|
|
179
|
+
with open(filename, mode=mode) as fhdump:
|
|
180
|
+
self.seek(0)
|
|
181
|
+
for line in self:
|
|
182
|
+
fhdump.write(line)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
class ParallelSilencer:
|
|
186
|
+
"""Record everything and suppress all outputs (stdout, loggers, ...).
|
|
187
|
+
|
|
188
|
+
The record is kept within the object: the *export_result* method returns
|
|
189
|
+
the record as a dictionary that can be processed using the
|
|
190
|
+
:class:`ParallelResultParser` class.
|
|
191
|
+
|
|
192
|
+
:note: This object is designed to be used as a Context manager.
|
|
193
|
+
|
|
194
|
+
:example:
|
|
195
|
+
.. code-block:: python
|
|
196
|
+
|
|
197
|
+
with ParallelSilencer(context) as psi:
|
|
198
|
+
# do a lot of stuff here
|
|
199
|
+
psi_record = psi.export_result()
|
|
200
|
+
# do whatever you need with the psi_record
|
|
201
|
+
"""
|
|
202
|
+
|
|
203
|
+
def __init__(self, context, taskname, debug=False):
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
:param vortex.layout.contexts.Context context: : The context we will record.
|
|
207
|
+
"""
|
|
208
|
+
self._ctx = context
|
|
209
|
+
self._taskdebug = debug
|
|
210
|
+
self._debugfile = '{:s}_{:s}_stdeo.txt'.format(taskname,
|
|
211
|
+
date.now().ymdhms)
|
|
212
|
+
self._ctx_r = None
|
|
213
|
+
self._io_r = io.StringIO()
|
|
214
|
+
# Other temporary stuff
|
|
215
|
+
self._reset_temporary()
|
|
216
|
+
|
|
217
|
+
def _reset_records(self):
|
|
218
|
+
"""Reset variables were the records are stored."""
|
|
219
|
+
self._io_r = TeeLikeStringIO()
|
|
220
|
+
if self._taskdebug:
|
|
221
|
+
self._io_r.record_teefile(self._debugfile)
|
|
222
|
+
self._stream_h = logging.StreamHandler(self._io_r)
|
|
223
|
+
self._stream_h.setLevel(logging.DEBUG)
|
|
224
|
+
self._stream_h.setFormatter(loggers.default_console.formatter)
|
|
225
|
+
|
|
226
|
+
def _reset_temporary(self):
|
|
227
|
+
"""Reset other temporary stuff."""
|
|
228
|
+
self._removed_h = dict()
|
|
229
|
+
(self._prev_stdo, self._prev_stde) = (None, None)
|
|
230
|
+
|
|
231
|
+
def __enter__(self):
|
|
232
|
+
"""The beginning of a new context."""
|
|
233
|
+
# Reset all
|
|
234
|
+
self._reset_records()
|
|
235
|
+
# Start the recording of the context (to be replayed in the main process)
|
|
236
|
+
self._ctx_r = self._ctx.get_recorder()
|
|
237
|
+
# Reset all the log handlers and slurp everything
|
|
238
|
+
r_logger = logging.getLogger()
|
|
239
|
+
self._removed_h[r_logger] = list(r_logger.handlers)
|
|
240
|
+
r_logger.addHandler(self._stream_h)
|
|
241
|
+
for a_handler in self._removed_h[r_logger]:
|
|
242
|
+
r_logger.removeHandler(a_handler)
|
|
243
|
+
for a_logger in [logging.getLogger(x) for x in loggers.lognames | loggers.roots]:
|
|
244
|
+
self._removed_h[a_logger] = list(a_logger.handlers)
|
|
245
|
+
for a_handler in self._removed_h[a_logger]:
|
|
246
|
+
a_logger.removeHandler(a_handler)
|
|
247
|
+
# Do not speak on stdout/err
|
|
248
|
+
self._prev_stdo = sys.stdout
|
|
249
|
+
self._prev_stde = sys.stderr
|
|
250
|
+
sys.stdout = self._io_r
|
|
251
|
+
sys.stderr = self._io_r
|
|
252
|
+
return self
|
|
253
|
+
|
|
254
|
+
def __exit__(self, exctype, excvalue, exctb): # @UnusedVariable
|
|
255
|
+
"""The end of a context."""
|
|
256
|
+
self._stop_recording()
|
|
257
|
+
if (exctype is not None and
|
|
258
|
+
not self._taskdebug and self._io_r is not None):
|
|
259
|
+
# Emergency dump of the outputs (even with debug=False) !
|
|
260
|
+
self._io_r.filedump(self._debugfile)
|
|
261
|
+
|
|
262
|
+
def _stop_recording(self):
|
|
263
|
+
"""Stop recording and restore everything."""
|
|
264
|
+
if self._prev_stdo is not None:
|
|
265
|
+
# Stop recording the context
|
|
266
|
+
self._ctx_r.unregister()
|
|
267
|
+
# Restore the loggers
|
|
268
|
+
r_logger = logging.getLogger()
|
|
269
|
+
for a_handler in self._removed_h[r_logger]:
|
|
270
|
+
r_logger.addHandler(a_handler)
|
|
271
|
+
r_logger.removeHandler(self._stream_h)
|
|
272
|
+
for a_logger in [logging.getLogger(x) for x in loggers.roots | loggers.lognames]:
|
|
273
|
+
for a_handler in self._removed_h.get(a_logger, ()):
|
|
274
|
+
a_logger.addHandler(a_handler)
|
|
275
|
+
# flush
|
|
276
|
+
self._stream_h.flush()
|
|
277
|
+
# Restore stdout/err
|
|
278
|
+
sys.stdout = self._prev_stdo
|
|
279
|
+
sys.stderr = self._prev_stde
|
|
280
|
+
# Remove all tees
|
|
281
|
+
self._io_r.discard_tees()
|
|
282
|
+
# Cleanup
|
|
283
|
+
self._reset_temporary()
|
|
284
|
+
|
|
285
|
+
def export_result(self):
|
|
286
|
+
"""Return everything that has been recorded.
|
|
287
|
+
|
|
288
|
+
:return: A dictionary that can be processed with the :class:`ParallelResultParser` class.
|
|
289
|
+
"""
|
|
290
|
+
self._stop_recording()
|
|
291
|
+
self._io_r.seek(0)
|
|
292
|
+
return dict(context_record=self._ctx_r,
|
|
293
|
+
stdoe_record=self._io_r.readlines())
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
class ParallelResultParser:
|
|
297
|
+
"""Summarise the results of a parallel execution.
|
|
298
|
+
|
|
299
|
+
Just pass to this object the `rc` of a `taylorism` worker based on
|
|
300
|
+
:class:`TaylorVortexWorker`. It will:
|
|
301
|
+
|
|
302
|
+
* update the context with the changes made by the worker ;
|
|
303
|
+
* display the standard output/error of the worker
|
|
304
|
+
"""
|
|
305
|
+
|
|
306
|
+
def __init__(self, context):
|
|
307
|
+
"""
|
|
308
|
+
|
|
309
|
+
:param vortex.layout.contexts.Context context: The context where the results will be replayed.
|
|
310
|
+
"""
|
|
311
|
+
self.context = context
|
|
312
|
+
|
|
313
|
+
def slurp(self, res):
|
|
314
|
+
"""Summarise the results of a parallel execution.
|
|
315
|
+
|
|
316
|
+
:param dict res: A result record
|
|
317
|
+
"""
|
|
318
|
+
if isinstance(res, Exception):
|
|
319
|
+
raise res
|
|
320
|
+
else:
|
|
321
|
+
sys.stdout.flush()
|
|
322
|
+
logger.info('Parallel processing results for %s', res['name'])
|
|
323
|
+
# Update the context
|
|
324
|
+
logger.info('... Updating the current context ...')
|
|
325
|
+
res['report']['context_record'].replay_in(self.context)
|
|
326
|
+
# Display the stdout
|
|
327
|
+
if res['report']['stdoe_record']:
|
|
328
|
+
logger.info('... Dump of the mixed standard/error output generated by the subprocess ...')
|
|
329
|
+
for l in res['report']['stdoe_record']:
|
|
330
|
+
sys.stdout.write(l)
|
|
331
|
+
logger.info("... That's all for all for %s ...", res['name'])
|
|
332
|
+
|
|
333
|
+
return res['report'].get('rc', True)
|
|
334
|
+
|
|
335
|
+
def __call__(self, res):
|
|
336
|
+
return self.slurp(res)
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Advanced tools that deal with resources pre-staging.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from collections import namedtuple
|
|
6
|
+
|
|
7
|
+
from bronx.fancies import loggers
|
|
8
|
+
from bronx.fancies.dump import lightdump
|
|
9
|
+
from bronx.patterns import getbytag
|
|
10
|
+
from bronx.stdtypes.catalog import Catalog
|
|
11
|
+
|
|
12
|
+
import footprints
|
|
13
|
+
from footprints import proxy as fpx
|
|
14
|
+
|
|
15
|
+
from vortex.tools.systems import OSExtended
|
|
16
|
+
|
|
17
|
+
#: No automatic export
|
|
18
|
+
__all__ = []
|
|
19
|
+
|
|
20
|
+
logger = loggers.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
#: Definition of a named tuple PrestagingPriorityTuple
|
|
23
|
+
PrestagingPriorityTuple = namedtuple('PrestagingPriorityTuple', ['urgent', 'normal', 'low'])
|
|
24
|
+
|
|
25
|
+
#: Predefined PrestagingPriorities values for urgent, normal and low
|
|
26
|
+
prestaging_p = PrestagingPriorityTuple(urgent=99, normal=50, low=0)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# Module Interface
|
|
30
|
+
def get_hub(**kw):
|
|
31
|
+
"""Return the actual PrestagingHub object matching the *tag* (or create one)."""
|
|
32
|
+
return PrestagingHub(**kw)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class PrestagingTool(footprints.FootprintBase, Catalog):
|
|
36
|
+
"""Abstract class that deal with pre-staging for a given storage target."""
|
|
37
|
+
|
|
38
|
+
_abstract = True
|
|
39
|
+
_collector = ('prestagingtool',)
|
|
40
|
+
_footprint = dict(
|
|
41
|
+
info = "Abstract class that deal with pre-staging for a given storage target.",
|
|
42
|
+
attr = dict(
|
|
43
|
+
system = dict(
|
|
44
|
+
info = "The current system object",
|
|
45
|
+
type = OSExtended
|
|
46
|
+
),
|
|
47
|
+
issuerkind = dict(
|
|
48
|
+
info = 'The kind of store issuing the prestaging request'
|
|
49
|
+
),
|
|
50
|
+
priority = dict(
|
|
51
|
+
info = 'The prestaging request priority',
|
|
52
|
+
type = int,
|
|
53
|
+
values = list(prestaging_p)
|
|
54
|
+
)
|
|
55
|
+
)
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
def __init__(self, *kargs, **kwargs):
|
|
59
|
+
"""Abstract PrestagingTools init."""
|
|
60
|
+
# Call both inits
|
|
61
|
+
Catalog.__init__(self)
|
|
62
|
+
footprints.FootprintBase.__init__(self, *kargs, **kwargs)
|
|
63
|
+
|
|
64
|
+
def __str__(self):
|
|
65
|
+
return self.describe(fulldump=False)
|
|
66
|
+
|
|
67
|
+
def describe(self, fulldump=False):
|
|
68
|
+
"""Print the object's characteristics and content."""
|
|
69
|
+
res = 'PrestagingTool object of class: {!s}\n'.format(self.__class__)
|
|
70
|
+
for k, v in self.footprint_as_shallow_dict().items():
|
|
71
|
+
res += ' * {:s}: {!s}\n'.format(k, v)
|
|
72
|
+
if fulldump:
|
|
73
|
+
res += '\n * Todo list:\n'
|
|
74
|
+
res += '\n'.join([' - {:s}'.format(item) for item in sorted(self.items())])
|
|
75
|
+
return res
|
|
76
|
+
|
|
77
|
+
def flush(self, email=None):
|
|
78
|
+
"""Send the prestaging request to the appropriate location."""
|
|
79
|
+
raise NotImplementedError()
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class PrivatePrestagingHub:
|
|
83
|
+
"""
|
|
84
|
+
Manages pre-staging request by forwarding them to the appropriate
|
|
85
|
+
:class:`PrestagingTool` object.
|
|
86
|
+
|
|
87
|
+
If no, :class:`PrestagingTool` class is able to handle the pre-staging
|
|
88
|
+
request, just do nothing.
|
|
89
|
+
|
|
90
|
+
:note: When calling the :meth:`record` method, the pre-staging request is
|
|
91
|
+
just stored away. To actually request the pre-statging, one must call the
|
|
92
|
+
:meth:`flush` method.
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
def __init__(self, sh, email=None):
|
|
96
|
+
self._email = email
|
|
97
|
+
self._sh = sh
|
|
98
|
+
self._prestagingtools_default_opts = dict()
|
|
99
|
+
self._prestagingtools = set()
|
|
100
|
+
|
|
101
|
+
@property
|
|
102
|
+
def prestagingtools_default_opts(self):
|
|
103
|
+
"""The dictionary of defaults that will be used when creating prestagingtool objects."""
|
|
104
|
+
return self._prestagingtools_default_opts
|
|
105
|
+
|
|
106
|
+
def record(self, location, priority=prestaging_p.normal, **kwargs):
|
|
107
|
+
"""Take into consideration a pre-staging request.
|
|
108
|
+
|
|
109
|
+
:param str location: The location of the requested data
|
|
110
|
+
:param int priority: The prestaging request priority
|
|
111
|
+
:param dict kwargs: Any argument that will be used to create the :class:`PrestagingTool` object
|
|
112
|
+
"""
|
|
113
|
+
# Prestaging tool descriptions
|
|
114
|
+
myptool_desc = self.prestagingtools_default_opts.copy()
|
|
115
|
+
myptool_desc.update(kwargs)
|
|
116
|
+
myptool_desc['priority'] = priority
|
|
117
|
+
myptool_desc['system'] = self._sh
|
|
118
|
+
myptool = None
|
|
119
|
+
# Scan pre-existing prestaging tools to find a suitable one
|
|
120
|
+
for ptool in self._prestagingtools:
|
|
121
|
+
if ptool.footprint_reusable() and ptool.footprint_compatible(myptool_desc):
|
|
122
|
+
logger.debug("Re-usable prestaging tool found: %s", lightdump(myptool_desc))
|
|
123
|
+
myptool = ptool
|
|
124
|
+
break
|
|
125
|
+
# If necessary, create a new one
|
|
126
|
+
if myptool is None:
|
|
127
|
+
myptool = fpx.prestagingtool(_emptywarning=False, **myptool_desc)
|
|
128
|
+
if myptool is not None:
|
|
129
|
+
logger.debug("Fresh prestaging tool created: %s", lightdump(myptool_desc))
|
|
130
|
+
self._prestagingtools.add(myptool)
|
|
131
|
+
# Let's role
|
|
132
|
+
if myptool is None:
|
|
133
|
+
logger.debug("Unable to perform prestaging with: %s", lightdump(myptool_desc))
|
|
134
|
+
else:
|
|
135
|
+
logger.debug("Prestaging requested accepted for: %s", location)
|
|
136
|
+
myptool.add(location)
|
|
137
|
+
|
|
138
|
+
def _get_ptools(self, priority_threshold=prestaging_p.low):
|
|
139
|
+
todo = set()
|
|
140
|
+
for ptool in self._prestagingtools:
|
|
141
|
+
if ptool.priority >= priority_threshold:
|
|
142
|
+
todo.add(ptool)
|
|
143
|
+
return todo
|
|
144
|
+
|
|
145
|
+
def __repr__(self, *args, **kwargs):
|
|
146
|
+
return ('{:s} | n_prestagingtools={:d}>'
|
|
147
|
+
.format(super().__repr__().rstrip('>'),
|
|
148
|
+
len(self._prestagingtools)))
|
|
149
|
+
|
|
150
|
+
def __str__(self):
|
|
151
|
+
return (repr(self) + "\n\n" +
|
|
152
|
+
"\n\n".join([ptool.describe(fulldump=True) for ptool in self._prestagingtools]))
|
|
153
|
+
|
|
154
|
+
def flush(self, priority_threshold=prestaging_p.low):
|
|
155
|
+
"""Actually send the pre-staging request to the appropriate location.
|
|
156
|
+
|
|
157
|
+
:param int priority_threshold: Only requests with a priority >= *priority_threshold*
|
|
158
|
+
will be sent.
|
|
159
|
+
"""
|
|
160
|
+
for ptool in self._get_ptools(priority_threshold):
|
|
161
|
+
print()
|
|
162
|
+
rc = ptool.flush(email=self._email)
|
|
163
|
+
if rc:
|
|
164
|
+
self._prestagingtools.discard(ptool)
|
|
165
|
+
else:
|
|
166
|
+
logger.error("Something went wrong when flushing the %s prestaging tool", ptool)
|
|
167
|
+
|
|
168
|
+
def clear(self, priority_threshold=prestaging_p.low):
|
|
169
|
+
"""Erase the pre-staging requests list.
|
|
170
|
+
|
|
171
|
+
:param int priority_threshold: Only requests with a priority >= *priority_threshold*
|
|
172
|
+
will be deleted.
|
|
173
|
+
"""
|
|
174
|
+
for ptool in self._get_ptools(priority_threshold):
|
|
175
|
+
self._prestagingtools.discard(ptool)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class PrestagingHub(PrivatePrestagingHub, getbytag.GetByTag):
|
|
179
|
+
"""
|
|
180
|
+
A subclass of :class:`PrivatePrestagingHub` that using :class:`GetByTag`
|
|
181
|
+
to remain persistent in memory.
|
|
182
|
+
|
|
183
|
+
Therefore, a *tag* attribute needs to be specified when building/retrieving
|
|
184
|
+
an object of this class.
|
|
185
|
+
"""
|
|
186
|
+
pass
|
vortex/tools/rawfiles.py
ADDED