vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
vortex/tools/storage.py
ADDED
|
@@ -0,0 +1,1061 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This package handles :class:`Storage` objects that could be in charge of
|
|
3
|
+
hosting data resources both locally ("Cache") or on a remote host "Archive").
|
|
4
|
+
|
|
5
|
+
* :class:`Storage` is the main abstract class that defines the user-interface for
|
|
6
|
+
every class of this module. :meth:`Storage.fullpath`, :meth:`Storage.check`,
|
|
7
|
+
:meth:`Storage.insert`, :meth:`Storage.retrieve` and :meth:`Storage.delete` are
|
|
8
|
+
frequently used from a user point of view.
|
|
9
|
+
* The :class:`Cache` abstract class is a specialisation of the :class:`Storage`
|
|
10
|
+
class that handles data resources locally (i.e. data hosted on the same machine
|
|
11
|
+
that are readily and timelessly accessible). In this module, various concrete
|
|
12
|
+
implementations are provided for this class in order to support various cache
|
|
13
|
+
flavors.
|
|
14
|
+
* The :class:`Archive` class (readily usable) is a specialisation of the
|
|
15
|
+
:class:`Storage` class dedicated to data resources stored remotely (e.g on a
|
|
16
|
+
mass archive system).
|
|
17
|
+
|
|
18
|
+
These classes purely focus on the technical aspects (e.g. how to transfer a given
|
|
19
|
+
filename, directory or file like object to its storage place). For :class:`Cache`
|
|
20
|
+
based storage it determines the location of the data on the filesystem, in a
|
|
21
|
+
database, ... For :class:`Archive` based storage it smoothly handles communication
|
|
22
|
+
protocol between the local host and the remote archive.
|
|
23
|
+
|
|
24
|
+
These classes are used by :class:`Store` objects to access data. Thus,
|
|
25
|
+
:class:`Store` objects do not need to worry anymore about the technical
|
|
26
|
+
aspects. Using the :mod:`footprints` package, for a given execution target, it
|
|
27
|
+
allows to customise the way data are accessed leaving the :class:`Store` objects
|
|
28
|
+
unchanged.
|
|
29
|
+
"""
|
|
30
|
+
import contextlib
|
|
31
|
+
import ftplib
|
|
32
|
+
import re
|
|
33
|
+
import time
|
|
34
|
+
from collections import defaultdict
|
|
35
|
+
from datetime import datetime
|
|
36
|
+
import os
|
|
37
|
+
|
|
38
|
+
import footprints
|
|
39
|
+
from bronx.fancies import loggers
|
|
40
|
+
from bronx.stdtypes.history import History
|
|
41
|
+
from bronx.syntax.decorators import nicedeco
|
|
42
|
+
from vortex import sessions
|
|
43
|
+
from vortex.tools.actions import actiond as ad
|
|
44
|
+
from vortex.tools.delayedactions import d_action_status
|
|
45
|
+
from vortex.tools.systems import istruedef
|
|
46
|
+
# TODO clean instances of GenericConfigParser
|
|
47
|
+
from vortex.util.config import GenericConfigParser
|
|
48
|
+
from vortex import config
|
|
49
|
+
|
|
50
|
+
#: No automatic export
|
|
51
|
+
__all__ = []
|
|
52
|
+
|
|
53
|
+
logger = loggers.getLogger(__name__)
|
|
54
|
+
|
|
55
|
+
# If the source file size exceed this threshold, a hard link will be
|
|
56
|
+
# used (as much as possible). Otherwise a simple copy will be used.
|
|
57
|
+
HARDLINK_THRESHOLD = 1048576
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
# Decorators: for internal use in the Storage class
|
|
61
|
+
# -------------------------------------------------
|
|
62
|
+
|
|
63
|
+
def do_recording(flag):
|
|
64
|
+
"""Add a record line in the History object (if sensible)."""
|
|
65
|
+
|
|
66
|
+
@nicedeco
|
|
67
|
+
def do_flagged_recording(f):
|
|
68
|
+
def wrapped_action(self, item, *kargs, **kwargs):
|
|
69
|
+
infos = self._findout_record_infos(kwargs)
|
|
70
|
+
(rc, extrainfos) = f(self, item, *kargs, **kwargs)
|
|
71
|
+
infos.update(extrainfos)
|
|
72
|
+
self.addrecord(flag, item, status=rc, **infos)
|
|
73
|
+
return rc
|
|
74
|
+
|
|
75
|
+
return wrapped_action
|
|
76
|
+
|
|
77
|
+
return do_flagged_recording
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@nicedeco
|
|
81
|
+
def enforce_readonly(f):
|
|
82
|
+
"""Check that the current storage object is not readonly."""
|
|
83
|
+
|
|
84
|
+
def wrapped_action(self, item, *kargs, **kwargs):
|
|
85
|
+
if self.readonly:
|
|
86
|
+
raise OSError("This Storage place is readonly.")
|
|
87
|
+
return f(self, item, *kargs, **kwargs)
|
|
88
|
+
|
|
89
|
+
return wrapped_action
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
# Main Storage abstract class
|
|
93
|
+
# ---------------------------
|
|
94
|
+
|
|
95
|
+
class Storage(footprints.FootprintBase):
|
|
96
|
+
"""Root class for any Storage class, ex: Cache, Archive, ...
|
|
97
|
+
|
|
98
|
+
Tips for developers:
|
|
99
|
+
|
|
100
|
+
The following methods needs to be defined in the child classes:
|
|
101
|
+
|
|
102
|
+
* *_actual_fullpath*
|
|
103
|
+
* *_actual_prestageinfo*
|
|
104
|
+
* *_actual_check*
|
|
105
|
+
* *_actual_list*
|
|
106
|
+
* *_actual_insert*
|
|
107
|
+
* *_actual_retrieve*
|
|
108
|
+
* *_actual_delete*
|
|
109
|
+
|
|
110
|
+
They must return a two elements tuple consisting of a returncode and a
|
|
111
|
+
dictionary whose items will be written in the object's record.
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
_abstract = True,
|
|
115
|
+
_footprint = dict(
|
|
116
|
+
info = 'Default/Abstract storage place description.',
|
|
117
|
+
attr = dict(
|
|
118
|
+
kind=dict(
|
|
119
|
+
info="The storage place's kind.",
|
|
120
|
+
values=['std'],
|
|
121
|
+
),
|
|
122
|
+
storage=dict(
|
|
123
|
+
info="The storage target.",
|
|
124
|
+
),
|
|
125
|
+
record=dict(
|
|
126
|
+
info="Record insert, retrieve, delete actions in an History object.",
|
|
127
|
+
type=bool,
|
|
128
|
+
optional=True,
|
|
129
|
+
default=False,
|
|
130
|
+
access='rwx',
|
|
131
|
+
),
|
|
132
|
+
readonly=dict(
|
|
133
|
+
info="Disallow insert and delete action for this storage place.",
|
|
134
|
+
type=bool,
|
|
135
|
+
optional=True,
|
|
136
|
+
default=False,
|
|
137
|
+
),
|
|
138
|
+
)
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
def __init__(self, *args, **kw):
|
|
142
|
+
logger.debug('Abstract storage init %s', self.__class__)
|
|
143
|
+
super().__init__(*args, **kw)
|
|
144
|
+
self._history = History(tag=self.tag)
|
|
145
|
+
|
|
146
|
+
@property
|
|
147
|
+
def tag(self):
|
|
148
|
+
"""The identifier of the storage place."""
|
|
149
|
+
raise NotImplementedError()
|
|
150
|
+
|
|
151
|
+
@property
|
|
152
|
+
def realkind(self):
|
|
153
|
+
return 'storage'
|
|
154
|
+
|
|
155
|
+
def _str_more(self):
|
|
156
|
+
return 'tag={:s}'.format(self.tag)
|
|
157
|
+
|
|
158
|
+
@property
|
|
159
|
+
def context(self):
|
|
160
|
+
"""Shortcut to the active context object."""
|
|
161
|
+
return sessions.get().context
|
|
162
|
+
|
|
163
|
+
@property
|
|
164
|
+
def session(self):
|
|
165
|
+
return sessions.current()
|
|
166
|
+
|
|
167
|
+
@property
|
|
168
|
+
def sh(self):
|
|
169
|
+
"""Shortcut to the active System object."""
|
|
170
|
+
return sessions.system()
|
|
171
|
+
|
|
172
|
+
@property
|
|
173
|
+
def history(self):
|
|
174
|
+
"""The History object that will be used by this storage place.
|
|
175
|
+
|
|
176
|
+
:note: History objects are associated with the self.tag identifier. i.e.
|
|
177
|
+
all Storage's objects with the same tag will use the same History
|
|
178
|
+
object.
|
|
179
|
+
"""
|
|
180
|
+
return self._history
|
|
181
|
+
|
|
182
|
+
def addrecord(self, action, item, **infos):
|
|
183
|
+
"""Push a new record to the storage place log/history."""
|
|
184
|
+
if self.record:
|
|
185
|
+
self.history.append(action, item, infos)
|
|
186
|
+
|
|
187
|
+
def flush(self, dumpfile=None):
|
|
188
|
+
"""Flush actual history to the specified ``dumpfile`` if record is on.
|
|
189
|
+
|
|
190
|
+
:note: May raise the :class:`NotImplementedError` exception.
|
|
191
|
+
"""
|
|
192
|
+
raise NotImplementedError()
|
|
193
|
+
|
|
194
|
+
def _findout_record_infos(self, kwargs):
|
|
195
|
+
return dict(info=kwargs.get("info", None))
|
|
196
|
+
|
|
197
|
+
def allow_reads(self, item): # @UnusedVariable
|
|
198
|
+
"""
|
|
199
|
+
This method can be used to determine whether or not the present object
|
|
200
|
+
supports reads for **item**.
|
|
201
|
+
|
|
202
|
+
:note: This is different from **check** since, **item**'s existence is
|
|
203
|
+
not checked. It just tells if reads to **item** are supported...
|
|
204
|
+
"""
|
|
205
|
+
return True
|
|
206
|
+
|
|
207
|
+
def allow_writes(self, item): # @UnusedVariable
|
|
208
|
+
"""
|
|
209
|
+
This method can be used to determine whether or not the present object
|
|
210
|
+
supports writes for **item**.
|
|
211
|
+
|
|
212
|
+
:note: This is different from **check** since, **item**'s existence is
|
|
213
|
+
not checked. It just tells if writes to **item** are supported...
|
|
214
|
+
"""
|
|
215
|
+
return True
|
|
216
|
+
|
|
217
|
+
def fullpath(self, item, **kwargs):
|
|
218
|
+
"""Return the path/URI to the **item**'s storage location."""
|
|
219
|
+
# Currently no recording is performed for the check action
|
|
220
|
+
(rc, _) = self._actual_fullpath(item, **kwargs)
|
|
221
|
+
return rc
|
|
222
|
+
|
|
223
|
+
def prestageinfo(self, item, **kwargs):
|
|
224
|
+
"""Return the prestage infos for an **item** in the current storage place."""
|
|
225
|
+
# Currently no recording is performed for the check action
|
|
226
|
+
(rc, _) = self._actual_prestageinfo(item, **kwargs)
|
|
227
|
+
return rc
|
|
228
|
+
|
|
229
|
+
def check(self, item, **kwargs):
|
|
230
|
+
"""Check/Stat an **item** from the current storage place."""
|
|
231
|
+
# Currently no recording is performed for the check action
|
|
232
|
+
(rc, _) = self._actual_check(item, **kwargs)
|
|
233
|
+
return rc
|
|
234
|
+
|
|
235
|
+
def list(self, item, **kwargs):
|
|
236
|
+
"""List all data resources available in the **item** directory."""
|
|
237
|
+
# Currently no recording is performed for the check action
|
|
238
|
+
(rc, _) = self._actual_list(item, **kwargs)
|
|
239
|
+
return rc
|
|
240
|
+
|
|
241
|
+
@enforce_readonly
|
|
242
|
+
@do_recording('INSERT')
|
|
243
|
+
def insert(self, item, local, **kwargs):
|
|
244
|
+
"""Insert an **item** in the current storage place.
|
|
245
|
+
|
|
246
|
+
:note: **local** may be a path to a file or any kind of file like objects.
|
|
247
|
+
"""
|
|
248
|
+
return self._actual_insert(item, local, **kwargs)
|
|
249
|
+
|
|
250
|
+
@do_recording('RETRIEVE')
|
|
251
|
+
def retrieve(self, item, local, **kwargs):
|
|
252
|
+
"""Retrieve an **item** from the current storage place.
|
|
253
|
+
|
|
254
|
+
:note: **local** may be a path to a file or any kind of file like objects.
|
|
255
|
+
"""
|
|
256
|
+
return self._actual_retrieve(item, local, **kwargs)
|
|
257
|
+
|
|
258
|
+
def earlyretrieve(self, item, local, **kwargs):
|
|
259
|
+
"""Trigger a delayed retrieve of **item** from the current storage place.
|
|
260
|
+
|
|
261
|
+
:note: **local** may be a path to a file or any kind of file like objects.
|
|
262
|
+
"""
|
|
263
|
+
return self._actual_earlyretrieve(item, local, **kwargs)
|
|
264
|
+
|
|
265
|
+
def _actual_earlyretrieve(self, item, local, **kwargs): # @UnusedVariable
|
|
266
|
+
"""No earlyretrieve implemented by default."""
|
|
267
|
+
return None
|
|
268
|
+
|
|
269
|
+
def finaliseretrieve(self, retrieve_id, item, local, **kwargs):
|
|
270
|
+
"""Finalise a delayed retrieve from the current storage place.
|
|
271
|
+
|
|
272
|
+
:note: **local** may be a path to a file or any kind of file like objects.
|
|
273
|
+
"""
|
|
274
|
+
rc, idict = self._actual_finaliseretrieve(retrieve_id, item, local, **kwargs)
|
|
275
|
+
if rc is not None:
|
|
276
|
+
infos = self._findout_record_infos(kwargs)
|
|
277
|
+
infos.update(idict)
|
|
278
|
+
self.addrecord('RETRIEVE', item, status=rc, **infos)
|
|
279
|
+
return rc
|
|
280
|
+
|
|
281
|
+
def _actual_finaliseretrieve(self, retrieve_id, item, local, **kwargs): # @UnusedVariable
|
|
282
|
+
"""No delayedretrieve implemented by default."""
|
|
283
|
+
return None, dict()
|
|
284
|
+
|
|
285
|
+
@enforce_readonly
|
|
286
|
+
@do_recording('DELETE')
|
|
287
|
+
def delete(self, item, **kwargs):
|
|
288
|
+
"""Delete an **item** from the current storage place."""
|
|
289
|
+
return self._actual_delete(item, **kwargs)
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
# Defining the two main flavours of storage places
|
|
293
|
+
# -----------------------------------------------
|
|
294
|
+
|
|
295
|
+
class Cache(Storage):
|
|
296
|
+
"""Root class for any :class:Cache subclasses."""
|
|
297
|
+
|
|
298
|
+
_abstract = True
|
|
299
|
+
_collector = ('cache',)
|
|
300
|
+
_footprint = dict(
|
|
301
|
+
info = 'Default cache description',
|
|
302
|
+
attr = dict(
|
|
303
|
+
headdir = dict(
|
|
304
|
+
info = "The cache's subdirectory (within **rootdir**).",
|
|
305
|
+
optional = True,
|
|
306
|
+
default = 'cache',
|
|
307
|
+
),
|
|
308
|
+
# TODO is 'storage' used in any way?
|
|
309
|
+
storage = dict(
|
|
310
|
+
optional = True,
|
|
311
|
+
default = 'localhost',
|
|
312
|
+
),
|
|
313
|
+
rtouch = dict(
|
|
314
|
+
info = "Perform the recursive touch command on the directory structure.",
|
|
315
|
+
type = bool,
|
|
316
|
+
optional = True,
|
|
317
|
+
default = False,
|
|
318
|
+
),
|
|
319
|
+
rtouchskip = dict(
|
|
320
|
+
info = "Do not 'touch' the first **rtouchskip** directories.",
|
|
321
|
+
type = int,
|
|
322
|
+
optional = True,
|
|
323
|
+
default = 0,
|
|
324
|
+
),
|
|
325
|
+
rtouchdelay = dict(
|
|
326
|
+
info = ("Do not perfom a touch if it has already been done in " +
|
|
327
|
+
"the last X seconds."),
|
|
328
|
+
type = float,
|
|
329
|
+
optional = True,
|
|
330
|
+
default = 600., # 10 minutes
|
|
331
|
+
),
|
|
332
|
+
)
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
def __init__(self, *kargs, **kwargs):
|
|
336
|
+
super().__init__(*kargs, **kwargs)
|
|
337
|
+
self._touch_tracker = dict()
|
|
338
|
+
|
|
339
|
+
@property
|
|
340
|
+
def realkind(self):
|
|
341
|
+
return 'cache'
|
|
342
|
+
|
|
343
|
+
@property
|
|
344
|
+
def tag(self):
|
|
345
|
+
"""The identifier of this cache place."""
|
|
346
|
+
return '{:s}_{:s}_{:s}'.format(self.realkind, self.kind, self.headdir)
|
|
347
|
+
|
|
348
|
+
def _formatted_path(self, subpath, **kwargs): # @UnusedVariable
|
|
349
|
+
raise NotImplementedError()
|
|
350
|
+
|
|
351
|
+
def catalog(self):
|
|
352
|
+
"""List all files present in this cache.
|
|
353
|
+
|
|
354
|
+
:note: It might be quite slow...
|
|
355
|
+
"""
|
|
356
|
+
raise NotImplementedError()
|
|
357
|
+
|
|
358
|
+
def _xtouch(self, path):
|
|
359
|
+
"""
|
|
360
|
+
Perform a touch operation only if the last one, on te same path, was
|
|
361
|
+
less than `self.rtouchdelay` seconds ago.
|
|
362
|
+
"""
|
|
363
|
+
ts = time.time()
|
|
364
|
+
ts_delay = ts - self._touch_tracker.get(path, 0)
|
|
365
|
+
if ts_delay > self.rtouchdelay:
|
|
366
|
+
logger.debug('Touching: %s (delay was %.2f)', path, ts_delay)
|
|
367
|
+
self.sh.touch(path)
|
|
368
|
+
self._touch_tracker[path] = ts
|
|
369
|
+
else:
|
|
370
|
+
logger.debug('Skipping touch: %s (delay was %.2f)', path, ts_delay)
|
|
371
|
+
|
|
372
|
+
def _recursive_touch(self, rc, item, writing=False):
|
|
373
|
+
"""Make recursive touches on parent directories.
|
|
374
|
+
|
|
375
|
+
It might be useful for cleaning scripts.
|
|
376
|
+
"""
|
|
377
|
+
if self.rtouch and (not self.readonly) and rc:
|
|
378
|
+
items = item.lstrip('/').split('/')
|
|
379
|
+
items = items[:-1]
|
|
380
|
+
if writing:
|
|
381
|
+
# It's useless to touch the rightmost directory
|
|
382
|
+
items = items[:-1] if len(items) > 1 else []
|
|
383
|
+
for index in range(len(items), self.rtouchskip, -1):
|
|
384
|
+
self._xtouch(self._formatted_path(self.sh.path.join(*items[:index])))
|
|
385
|
+
|
|
386
|
+
def _actual_fullpath(self, item, **kwargs):
|
|
387
|
+
"""Return the path/URI to the **item**'s storage location."""
|
|
388
|
+
return self._formatted_path(item, **kwargs), dict()
|
|
389
|
+
|
|
390
|
+
def _actual_prestageinfo(self, item, **kwargs):
|
|
391
|
+
"""Returns pre-staging informations."""
|
|
392
|
+
return dict(strategy=self.kind,
|
|
393
|
+
location=self.fullpath(item, **kwargs)), dict()
|
|
394
|
+
|
|
395
|
+
def _actual_check(self, item, **kwargs):
|
|
396
|
+
"""Check/Stat an **item** from the current storage place."""
|
|
397
|
+
path = self._formatted_path(item, **kwargs)
|
|
398
|
+
if path is None:
|
|
399
|
+
return None, dict()
|
|
400
|
+
try:
|
|
401
|
+
st = self.sh.stat(path)
|
|
402
|
+
except OSError:
|
|
403
|
+
st = None
|
|
404
|
+
return st, dict()
|
|
405
|
+
|
|
406
|
+
def _actual_list(self, item, **kwargs):
|
|
407
|
+
"""List all data resources available in the **item** directory."""
|
|
408
|
+
path = self.fullpath(item, **kwargs)
|
|
409
|
+
if path is not None and self.sh.path.exists(path):
|
|
410
|
+
if self.sh.path.isdir(path):
|
|
411
|
+
return self.sh.listdir(path), dict()
|
|
412
|
+
else:
|
|
413
|
+
return True, dict()
|
|
414
|
+
else:
|
|
415
|
+
return None, dict()
|
|
416
|
+
|
|
417
|
+
def _actual_insert(self, item, local, **kwargs):
|
|
418
|
+
"""Insert an **item** in the current storage place."""
|
|
419
|
+
# Get the relevant options
|
|
420
|
+
intent = kwargs.get("intent", "in")
|
|
421
|
+
fmt = kwargs.get("fmt", "foo")
|
|
422
|
+
# Insert the element
|
|
423
|
+
tpath = self._formatted_path(item)
|
|
424
|
+
if tpath is not None:
|
|
425
|
+
rc = self.sh.cp(local, tpath, intent=intent, fmt=fmt,
|
|
426
|
+
smartcp_threshold=HARDLINK_THRESHOLD)
|
|
427
|
+
else:
|
|
428
|
+
logger.warning('No target location for < %s >', item)
|
|
429
|
+
rc = False
|
|
430
|
+
self._recursive_touch(rc, item, writing=True)
|
|
431
|
+
return rc, dict(intent=intent, fmt=fmt)
|
|
432
|
+
|
|
433
|
+
def _actual_retrieve(self, item, local, **kwargs):
|
|
434
|
+
"""Retrieve an **item** from the current storage place."""
|
|
435
|
+
# Get the relevant options
|
|
436
|
+
intent = kwargs.get("intent", "in")
|
|
437
|
+
fmt = kwargs.get("fmt", "foo")
|
|
438
|
+
silent = kwargs.get("silent", False)
|
|
439
|
+
dirextract = kwargs.get("dirextract", False)
|
|
440
|
+
tarextract = kwargs.get("tarextract", False)
|
|
441
|
+
uniquelevel_ignore = kwargs.get("uniquelevel_ignore", True)
|
|
442
|
+
source = self._formatted_path(item)
|
|
443
|
+
if source is not None:
|
|
444
|
+
# If auto_dirextract, copy recursively each file contained in source
|
|
445
|
+
if dirextract and self.sh.path.isdir(source) and self.sh.is_tarname(local):
|
|
446
|
+
rc = True
|
|
447
|
+
destdir = self.sh.path.dirname(self.sh.path.realpath(local))
|
|
448
|
+
logger.info('Automatic directory extract to: %s', destdir)
|
|
449
|
+
for subpath in self.sh.glob(source + '/*'):
|
|
450
|
+
rc = rc and self.sh.cp(subpath,
|
|
451
|
+
self.sh.path.join(destdir, self.sh.path.basename(subpath)),
|
|
452
|
+
intent=intent, fmt=fmt,
|
|
453
|
+
smartcp_threshold=HARDLINK_THRESHOLD)
|
|
454
|
+
# For the insitu feature to work...
|
|
455
|
+
rc = rc and self.sh.touch(local)
|
|
456
|
+
# The usual case: just copy source
|
|
457
|
+
else:
|
|
458
|
+
rc = self.sh.cp(source, local, intent=intent, fmt=fmt, silent=silent,
|
|
459
|
+
smartcp_threshold=HARDLINK_THRESHOLD)
|
|
460
|
+
# If auto_tarextract, a potential tar file is extracted
|
|
461
|
+
if (rc and tarextract and not self.sh.path.isdir(local) and
|
|
462
|
+
self.sh.is_tarname(local) and self.sh.is_tarfile(local)):
|
|
463
|
+
destdir = self.sh.path.dirname(self.sh.path.realpath(local))
|
|
464
|
+
logger.info('Automatic Tar extract to: %s', destdir)
|
|
465
|
+
rc = rc and self.sh.smartuntar(local, destdir,
|
|
466
|
+
uniquelevel_ignore=uniquelevel_ignore)
|
|
467
|
+
else:
|
|
468
|
+
getattr(logger, 'info' if silent else 'warning')('No readable source for < %s >', item)
|
|
469
|
+
rc = False
|
|
470
|
+
self._recursive_touch(rc, item)
|
|
471
|
+
return rc, dict(intent=intent, fmt=fmt)
|
|
472
|
+
|
|
473
|
+
def _actual_delete(self, item, **kwargs):
|
|
474
|
+
"""Delete an **item** from the current storage place."""
|
|
475
|
+
# Get the relevant options
|
|
476
|
+
fmt = kwargs.get("fmt", "foo")
|
|
477
|
+
# Delete the element
|
|
478
|
+
tpath = self._formatted_path(item)
|
|
479
|
+
if tpath is not None:
|
|
480
|
+
rc = self.sh.remove(tpath, fmt=fmt)
|
|
481
|
+
else:
|
|
482
|
+
logger.warning('No target location for < %s >', item)
|
|
483
|
+
rc = False
|
|
484
|
+
return rc, dict(fmt=fmt)
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
class AbstractArchive(Storage):
|
|
488
|
+
"""The default class to handle storage to some kind if Archive."""
|
|
489
|
+
|
|
490
|
+
_abstract = True
|
|
491
|
+
_collector = ('archive',)
|
|
492
|
+
_footprint = dict(
|
|
493
|
+
info = 'Default archive description',
|
|
494
|
+
attr = dict(
|
|
495
|
+
tube = dict(
|
|
496
|
+
info = "How to communicate with the archive ?",
|
|
497
|
+
),
|
|
498
|
+
)
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
@property
|
|
502
|
+
def tag(self):
|
|
503
|
+
"""The identifier of this cache place."""
|
|
504
|
+
return '{:s}_{:s}_{:s}'.format(self.realkind, self.storage, self.kind)
|
|
505
|
+
|
|
506
|
+
@property
|
|
507
|
+
def realkind(self):
|
|
508
|
+
return 'archive'
|
|
509
|
+
|
|
510
|
+
def _formatted_path(self, rawpath, **kwargs):
|
|
511
|
+
root = kwargs.get('root', None)
|
|
512
|
+
if root is not None:
|
|
513
|
+
rawpath = self.sh.path.join(root, rawpath.lstrip('/'))
|
|
514
|
+
# Deal with compression
|
|
515
|
+
compressionpipeline = kwargs.get('compressionpipeline', None)
|
|
516
|
+
if compressionpipeline is not None:
|
|
517
|
+
rawpath += compressionpipeline.suffix
|
|
518
|
+
return self.sh.anyft_remote_rewrite(rawpath, fmt=kwargs.get('fmt', 'foo'))
|
|
519
|
+
|
|
520
|
+
def _actual_proxy_method(self, pmethod):
|
|
521
|
+
"""Create a proxy method based on the **pmethod** actual method."""
|
|
522
|
+
|
|
523
|
+
def actual_proxy(item, *kargs, **kwargs):
|
|
524
|
+
path = self._formatted_path(item, **kwargs)
|
|
525
|
+
if path is None:
|
|
526
|
+
raise ValueError("The archive's path is void.")
|
|
527
|
+
return pmethod(path, *kargs, **kwargs)
|
|
528
|
+
|
|
529
|
+
actual_proxy.__name__ = pmethod.__name__
|
|
530
|
+
actual_proxy.__doc__ = pmethod.__doc__
|
|
531
|
+
return actual_proxy
|
|
532
|
+
|
|
533
|
+
def __getattr__(self, attr):
|
|
534
|
+
"""Provides proxy methods for _actual_* methods."""
|
|
535
|
+
methods = r'fullpath|prestageinfo|check|list|insert|retrieve|delete'
|
|
536
|
+
mattr = re.match(r'_actual_(?P<action>' + methods + r')', attr)
|
|
537
|
+
if mattr:
|
|
538
|
+
pmethod = getattr(self, '_{:s}{:s}'.format(self.tube, mattr.group('action')))
|
|
539
|
+
return self._actual_proxy_method(pmethod)
|
|
540
|
+
else:
|
|
541
|
+
raise AttributeError("The {:s} attribute was not found in this object"
|
|
542
|
+
.format(attr))
|
|
543
|
+
|
|
544
|
+
def _actual_earlyretrieve(self, item, local, **kwargs):
|
|
545
|
+
"""Proxy to the appropriate tube dependent earlyretrieve method (if available)."""
|
|
546
|
+
pmethod = getattr(self, '_{:s}{:s}'.format(self.tube, 'earlyretrieve'), None)
|
|
547
|
+
if pmethod:
|
|
548
|
+
return self._actual_proxy_method(pmethod)(item, local, **kwargs)
|
|
549
|
+
else:
|
|
550
|
+
return None
|
|
551
|
+
|
|
552
|
+
def _actual_finaliseretrieve(self, retrieve_id, item, local, **kwargs):
|
|
553
|
+
"""Proxy to the appropriate tube dependent finaliseretrieve method (if available)."""
|
|
554
|
+
pmethod = getattr(self, '_{:s}{:s}'.format(self.tube, 'finaliseretrieve'), None)
|
|
555
|
+
if pmethod:
|
|
556
|
+
return self._actual_proxy_method(pmethod)(item, local, retrieve_id, **kwargs)
|
|
557
|
+
else:
|
|
558
|
+
return None, dict()
|
|
559
|
+
|
|
560
|
+
|
|
561
|
+
class Archive(AbstractArchive):
|
|
562
|
+
"""The default class to handle storage to a remote location."""
|
|
563
|
+
|
|
564
|
+
_footprint = dict(
|
|
565
|
+
info = 'Default archive description',
|
|
566
|
+
attr = dict(
|
|
567
|
+
tube = dict(
|
|
568
|
+
values = ['ftp'],
|
|
569
|
+
),
|
|
570
|
+
)
|
|
571
|
+
)
|
|
572
|
+
|
|
573
|
+
def __init__(self, *kargs, **kwargs):
|
|
574
|
+
super().__init__(*kargs, **kwargs)
|
|
575
|
+
self.default_usejeeves = config.from_config(
|
|
576
|
+
section="storage", key="usejeeves",
|
|
577
|
+
)
|
|
578
|
+
|
|
579
|
+
@property
|
|
580
|
+
def _ftp_hostinfos(self):
|
|
581
|
+
"""Return the FTP hostname end port number."""
|
|
582
|
+
s_storage = self.storage.split(':', 1)
|
|
583
|
+
hostname = s_storage[0]
|
|
584
|
+
port = None
|
|
585
|
+
if len(s_storage) > 1:
|
|
586
|
+
try:
|
|
587
|
+
port = int(s_storage[1])
|
|
588
|
+
except ValueError:
|
|
589
|
+
logger.error('Invalid port number < %s >. Ignoring it', s_storage[1])
|
|
590
|
+
return hostname, port
|
|
591
|
+
|
|
592
|
+
def _ftp_client(self, logname=None, delayed=False):
|
|
593
|
+
"""Return a FTP client object."""
|
|
594
|
+
hostname, port = self._ftp_hostinfos
|
|
595
|
+
return self.sh.ftp(hostname, logname=logname, delayed=delayed, port=port)
|
|
596
|
+
|
|
597
|
+
def _ftpfullpath(self, item, **kwargs):
|
|
598
|
+
"""Actual _fullpath using ftp."""
|
|
599
|
+
username = kwargs.get('username', None)
|
|
600
|
+
rc = None
|
|
601
|
+
ftp = self._ftp_client(logname=username, delayed=True)
|
|
602
|
+
if ftp:
|
|
603
|
+
try:
|
|
604
|
+
rc = ftp.netpath(item)
|
|
605
|
+
finally:
|
|
606
|
+
ftp.close()
|
|
607
|
+
return rc, dict()
|
|
608
|
+
|
|
609
|
+
def _ftpprestageinfo(self, item, **kwargs):
|
|
610
|
+
"""Actual _prestageinfo using ftp."""
|
|
611
|
+
username = kwargs.get('username', None)
|
|
612
|
+
if username is None:
|
|
613
|
+
ftp = self._ftp_client(logname=username, delayed=True)
|
|
614
|
+
if ftp:
|
|
615
|
+
try:
|
|
616
|
+
username = ftp.logname
|
|
617
|
+
finally:
|
|
618
|
+
ftp.close()
|
|
619
|
+
baseinfo = dict(storage=self.storage,
|
|
620
|
+
logname=username,
|
|
621
|
+
location=item, )
|
|
622
|
+
return baseinfo, dict()
|
|
623
|
+
|
|
624
|
+
def _ftpcheck(self, item, **kwargs):
|
|
625
|
+
"""Actual _check using ftp."""
|
|
626
|
+
rc = None
|
|
627
|
+
ftp = self._ftp_client(logname=kwargs.get('username', None))
|
|
628
|
+
if ftp:
|
|
629
|
+
try:
|
|
630
|
+
rc = ftp.size(item)
|
|
631
|
+
except (ValueError, TypeError):
|
|
632
|
+
pass
|
|
633
|
+
except ftplib.all_errors:
|
|
634
|
+
pass
|
|
635
|
+
finally:
|
|
636
|
+
ftp.close()
|
|
637
|
+
return rc, dict()
|
|
638
|
+
|
|
639
|
+
def _ftplist(self, item, **kwargs):
|
|
640
|
+
"""Actual _list using ftp."""
|
|
641
|
+
ftp = self._ftp_client(logname=kwargs.get('username', None))
|
|
642
|
+
rc = None
|
|
643
|
+
if ftp:
|
|
644
|
+
try:
|
|
645
|
+
# Is this a directory ?
|
|
646
|
+
rc = ftp.cd(item)
|
|
647
|
+
except ftplib.all_errors:
|
|
648
|
+
# Apparently not...
|
|
649
|
+
try:
|
|
650
|
+
# Is it a file ?
|
|
651
|
+
if ftp.size(item) is not None:
|
|
652
|
+
rc = True
|
|
653
|
+
except (ValueError, TypeError):
|
|
654
|
+
pass
|
|
655
|
+
except ftplib.all_errors:
|
|
656
|
+
pass
|
|
657
|
+
else:
|
|
658
|
+
# Content of the directory...
|
|
659
|
+
if rc:
|
|
660
|
+
rc = ftp.nlst('.')
|
|
661
|
+
finally:
|
|
662
|
+
ftp.close()
|
|
663
|
+
return rc, dict()
|
|
664
|
+
|
|
665
|
+
def _ftpretrieve(self, item, local, **kwargs):
|
|
666
|
+
"""Actual _retrieve using ftp."""
|
|
667
|
+
logger.info('ftpget on ftp://%s/%s (to: %s)', self.storage, item, local)
|
|
668
|
+
extras = dict(fmt=kwargs.get('fmt', 'foo'),
|
|
669
|
+
cpipeline=kwargs.get('compressionpipeline', None))
|
|
670
|
+
hostname, port = self._ftp_hostinfos
|
|
671
|
+
if port is not None:
|
|
672
|
+
extras['port'] = port
|
|
673
|
+
rc = self.sh.smartftget(
|
|
674
|
+
item,
|
|
675
|
+
local,
|
|
676
|
+
# Ftp control
|
|
677
|
+
hostname=hostname,
|
|
678
|
+
logname=kwargs.get('username', None),
|
|
679
|
+
**extras
|
|
680
|
+
)
|
|
681
|
+
return rc, extras
|
|
682
|
+
|
|
683
|
+
def _ftpearlyretrieve(self, item, local, **kwargs):
|
|
684
|
+
"""
|
|
685
|
+
If FtServ/ftraw is used, trigger a delayed action in order to fetch
|
|
686
|
+
several files at once.
|
|
687
|
+
"""
|
|
688
|
+
cpipeline = kwargs.get('compressionpipeline', None)
|
|
689
|
+
if self.sh.rawftget_worthy(item, local, cpipeline):
|
|
690
|
+
return self.context.delayedactions_hub.register((item, kwargs.get('fmt', 'foo')),
|
|
691
|
+
kind='archive',
|
|
692
|
+
storage=self.storage,
|
|
693
|
+
goal='get',
|
|
694
|
+
tube='ftp',
|
|
695
|
+
raw=True,
|
|
696
|
+
logname=kwargs.get('username', None))
|
|
697
|
+
else:
|
|
698
|
+
return None
|
|
699
|
+
|
|
700
|
+
def _ftpfinaliseretrieve(self, item, local, retrieve_id, **kwargs): # @UnusedVariable
|
|
701
|
+
"""
|
|
702
|
+
Get the resource given the **retrieve_id** identifier returned by the
|
|
703
|
+
:meth:`_ftpearlyretrieve` method.
|
|
704
|
+
"""
|
|
705
|
+
extras = dict(fmt=kwargs.get('fmt', 'foo'), )
|
|
706
|
+
d_action = self.context.delayedactions_hub.retrieve(retrieve_id, bareobject=True)
|
|
707
|
+
if d_action.status == d_action_status.done:
|
|
708
|
+
if self.sh.filecocoon(local):
|
|
709
|
+
rc = self.sh.mv(d_action.result, local, **extras)
|
|
710
|
+
else:
|
|
711
|
+
raise OSError('Could not cocoon: {!s}'.format(local))
|
|
712
|
+
elif d_action.status == d_action_status.failed:
|
|
713
|
+
logger.info('The earlyretrieve failed (retrieve_id=%s)', retrieve_id)
|
|
714
|
+
rc = False
|
|
715
|
+
else:
|
|
716
|
+
rc = None
|
|
717
|
+
return rc, extras
|
|
718
|
+
|
|
719
|
+
def _ftpinsert(self, item, local, **kwargs):
|
|
720
|
+
"""Actual _insert using ftp."""
|
|
721
|
+
usejeeves = kwargs.get('usejeeves', None)
|
|
722
|
+
if usejeeves is None:
|
|
723
|
+
usejeeves = self.default_usejeeves
|
|
724
|
+
hostname, port = self._ftp_hostinfos
|
|
725
|
+
if not usejeeves:
|
|
726
|
+
logger.info('ftpput to ftp://%s/%s (from: %s)', self.storage, item, local)
|
|
727
|
+
extras = dict(fmt=kwargs.get('fmt', 'foo'),
|
|
728
|
+
cpipeline=kwargs.get('compressionpipeline', None))
|
|
729
|
+
if port is not None:
|
|
730
|
+
extras['port'] = port
|
|
731
|
+
rc = self.sh.smartftput(
|
|
732
|
+
local,
|
|
733
|
+
item,
|
|
734
|
+
# Ftp control
|
|
735
|
+
hostname=hostname,
|
|
736
|
+
logname=kwargs.get('username', None),
|
|
737
|
+
sync=kwargs.get('enforcesync', False),
|
|
738
|
+
**extras
|
|
739
|
+
)
|
|
740
|
+
else:
|
|
741
|
+
logger.info('delayed ftpput to ftp://%s/%s (from: %s)', self.storage, item, local)
|
|
742
|
+
tempo = footprints.proxy.service(kind='hiddencache',
|
|
743
|
+
asfmt=kwargs.get('fmt'))
|
|
744
|
+
compressionpipeline = kwargs.get('compressionpipeline', '')
|
|
745
|
+
if compressionpipeline:
|
|
746
|
+
compressionpipeline = compressionpipeline.description_string
|
|
747
|
+
extras = dict(fmt=kwargs.get('fmt', 'foo'),
|
|
748
|
+
cpipeline=compressionpipeline)
|
|
749
|
+
if port is not None:
|
|
750
|
+
extras['port'] = port
|
|
751
|
+
|
|
752
|
+
rc = ad.jeeves(
|
|
753
|
+
hostname=hostname,
|
|
754
|
+
# Explicitly resolve the logname (because jeeves FTP client is not
|
|
755
|
+
# running with the same glove (i.e. Jeeves ftuser configuration may
|
|
756
|
+
# be different).
|
|
757
|
+
logname=self.sh.fix_ftuser(hostname,
|
|
758
|
+
kwargs.get('username', None)),
|
|
759
|
+
todo='ftput',
|
|
760
|
+
rhandler=kwargs.get('info', None),
|
|
761
|
+
source=tempo(local),
|
|
762
|
+
destination=item,
|
|
763
|
+
original=self.sh.path.abspath(local),
|
|
764
|
+
**extras
|
|
765
|
+
)
|
|
766
|
+
return rc, extras
|
|
767
|
+
|
|
768
|
+
def _ftpdelete(self, item, **kwargs):
|
|
769
|
+
"""Actual _delete using ftp."""
|
|
770
|
+
rc = None
|
|
771
|
+
ftp = self._ftp_client(logname=kwargs.get('username', None))
|
|
772
|
+
if ftp:
|
|
773
|
+
if self._ftpcheck(item, **kwargs)[0]:
|
|
774
|
+
logger.info('ftpdelete on ftp://%s/%s', self.storage, item)
|
|
775
|
+
rc = ftp.delete(item)
|
|
776
|
+
ftp.close()
|
|
777
|
+
else:
|
|
778
|
+
logger.error('Try to remove a non-existing resource <%s>', item)
|
|
779
|
+
return rc, dict()
|
|
780
|
+
|
|
781
|
+
|
|
782
|
+
class AbstractLocalArchive(AbstractArchive):
|
|
783
|
+
"""The default class to handle storage to the same host."""
|
|
784
|
+
|
|
785
|
+
_abstract = True
|
|
786
|
+
_footprint = dict(
|
|
787
|
+
info = 'Generic local archive description',
|
|
788
|
+
attr = dict(
|
|
789
|
+
tube = dict(
|
|
790
|
+
values = ['inplace', ],
|
|
791
|
+
),
|
|
792
|
+
)
|
|
793
|
+
)
|
|
794
|
+
|
|
795
|
+
def _inplacefullpath(self, item, **kwargs):
|
|
796
|
+
"""Actual _fullpath."""
|
|
797
|
+
return item, dict()
|
|
798
|
+
|
|
799
|
+
def _inplacecheck(self, item, **kwargs):
|
|
800
|
+
"""Actual _check."""
|
|
801
|
+
try:
|
|
802
|
+
st = self.sh.stat(item)
|
|
803
|
+
except OSError:
|
|
804
|
+
rc = None
|
|
805
|
+
else:
|
|
806
|
+
rc = st.st_size
|
|
807
|
+
return rc, dict()
|
|
808
|
+
|
|
809
|
+
def _inplacelist(self, item, **kwargs):
|
|
810
|
+
"""Actual _list."""
|
|
811
|
+
if self.sh.path.exists(item):
|
|
812
|
+
if self.sh.path.isdir(item):
|
|
813
|
+
return self.sh.listdir(item), dict()
|
|
814
|
+
else:
|
|
815
|
+
return True, dict()
|
|
816
|
+
else:
|
|
817
|
+
return None, dict()
|
|
818
|
+
|
|
819
|
+
def _inplaceretrieve(self, item, local, **kwargs):
|
|
820
|
+
"""Actual _retrieve using ftp."""
|
|
821
|
+
logger.info('inplaceget on file:///%s (to: %s)', item, local)
|
|
822
|
+
fmt = kwargs.get('fmt', 'foo')
|
|
823
|
+
cpipeline = kwargs.get('compressionpipeline', None)
|
|
824
|
+
if cpipeline:
|
|
825
|
+
rc = cpipeline.file2uncompress(item, local)
|
|
826
|
+
else:
|
|
827
|
+
# Do not use fmt=... on purpose (otherwise "forceunpack" may be called twice)
|
|
828
|
+
rc = self.sh.cp(item, local, intent='in')
|
|
829
|
+
rc = rc and self.sh.forceunpack(local, fmt=fmt)
|
|
830
|
+
return rc, dict(fmt=fmt, cpipeline=cpipeline)
|
|
831
|
+
|
|
832
|
+
@contextlib.contextmanager
|
|
833
|
+
def _inplaceinsert_pack(self, local, fmt):
|
|
834
|
+
local_packed = self.sh.forcepack(local, fmt=fmt)
|
|
835
|
+
if local_packed != local:
|
|
836
|
+
try:
|
|
837
|
+
yield local_packed
|
|
838
|
+
finally:
|
|
839
|
+
self.sh.rm(local_packed, fmt=fmt)
|
|
840
|
+
else:
|
|
841
|
+
yield local
|
|
842
|
+
|
|
843
|
+
def _inplaceinsert(self, item, local, **kwargs):
|
|
844
|
+
"""Actual _insert using ftp."""
|
|
845
|
+
logger.info('inplaceput to file:///%s (from: %s)', item, local)
|
|
846
|
+
cpipeline = kwargs.get('compressionpipeline', None)
|
|
847
|
+
fmt = kwargs.get('fmt', 'foo')
|
|
848
|
+
with self._inplaceinsert_pack(local, fmt) as local_packed:
|
|
849
|
+
if cpipeline:
|
|
850
|
+
rc = cpipeline.compress2file(local_packed, item)
|
|
851
|
+
else:
|
|
852
|
+
# Do not use fmt=... on purpose (otherwise "forcepack" may be called twice)
|
|
853
|
+
rc = self.sh.cp(local_packed, item, intent='in')
|
|
854
|
+
return rc, dict(fmt=fmt, cpipeline=cpipeline)
|
|
855
|
+
|
|
856
|
+
def _inplacedelete(self, item, **kwargs):
|
|
857
|
+
"""Actual _delete using ftp."""
|
|
858
|
+
fmt = kwargs.get('fmt', 'foo')
|
|
859
|
+
rc = None
|
|
860
|
+
if self._inplacecheck(item, **kwargs)[0]:
|
|
861
|
+
rc = self.sh.rm(item, fmt=fmt)
|
|
862
|
+
return rc, dict(fmt=fmt)
|
|
863
|
+
|
|
864
|
+
|
|
865
|
+
class LocalArchive(AbstractLocalArchive):
|
|
866
|
+
"""The default class to handle storage to the same host."""
|
|
867
|
+
|
|
868
|
+
_footprint = dict(
|
|
869
|
+
info = 'Default local archive description',
|
|
870
|
+
attr = dict(
|
|
871
|
+
storage = dict(
|
|
872
|
+
values = ['localhost', ],
|
|
873
|
+
),
|
|
874
|
+
auto_self_expand = dict(
|
|
875
|
+
info = ('Automatically expand the current user home if ' +
|
|
876
|
+
'a relative path is given (should always be True ' +
|
|
877
|
+
'except during unit-testing)'),
|
|
878
|
+
type = bool,
|
|
879
|
+
default = True,
|
|
880
|
+
optional = True,
|
|
881
|
+
),
|
|
882
|
+
)
|
|
883
|
+
)
|
|
884
|
+
|
|
885
|
+
def _formatted_path(self, rawpath, **kwargs):
|
|
886
|
+
rawpath = self.sh.path.expanduser(rawpath)
|
|
887
|
+
if '~' in rawpath:
|
|
888
|
+
raise OSError('User expansion failed for "{:s}"'.format(rawpath))
|
|
889
|
+
if self.auto_self_expand and not self.sh.path.isabs(rawpath):
|
|
890
|
+
rawpath = self.sh.path.expanduser(self.sh.path.join('~', rawpath))
|
|
891
|
+
return super()._formatted_path(rawpath, **kwargs)
|
|
892
|
+
|
|
893
|
+
|
|
894
|
+
# Concrete cache implementations
|
|
895
|
+
# ------------------------------
|
|
896
|
+
|
|
897
|
+
|
|
898
|
+
class FixedEntryCache(Cache):
|
|
899
|
+
_abstract = True
|
|
900
|
+
_footprint = dict(
|
|
901
|
+
info = 'Default cache description (with a fixed entry point)',
|
|
902
|
+
attr = dict(
|
|
903
|
+
rootdir = dict(
|
|
904
|
+
info = "The cache's location (usually on a filesystem).",
|
|
905
|
+
optional = True,
|
|
906
|
+
default = None,
|
|
907
|
+
),
|
|
908
|
+
)
|
|
909
|
+
)
|
|
910
|
+
|
|
911
|
+
@property
|
|
912
|
+
def entry(self):
|
|
913
|
+
"""Tries to figure out what could be the actual entry point for storage space."""
|
|
914
|
+
if not self.rootdir:
|
|
915
|
+
self.rootdir = "/tmp"
|
|
916
|
+
return self.sh.path.join(self.rootdir, self.kind, self.headdir)
|
|
917
|
+
|
|
918
|
+
@property
|
|
919
|
+
def tag(self):
|
|
920
|
+
"""The identifier of this cache place."""
|
|
921
|
+
return '{:s}_{:s}'.format(self.realkind, self.entry)
|
|
922
|
+
|
|
923
|
+
def _formatted_path(self, subpath, **kwargs): # @UnusedVariable
|
|
924
|
+
return self.sh.path.join(self.entry, subpath.lstrip('/'))
|
|
925
|
+
|
|
926
|
+
def catalog(self):
|
|
927
|
+
"""List all files present in this cache.
|
|
928
|
+
|
|
929
|
+
:note: It might be quite slow...
|
|
930
|
+
"""
|
|
931
|
+
entry = self.sh.path.expanduser(self.entry)
|
|
932
|
+
files = self.sh.ffind(entry)
|
|
933
|
+
return [f[len(entry):] for f in files]
|
|
934
|
+
|
|
935
|
+
def flush(self, dumpfile=None):
|
|
936
|
+
"""Flush actual history to the specified ``dumpfile`` if record is on."""
|
|
937
|
+
if dumpfile is None:
|
|
938
|
+
logfile = '.'.join((
|
|
939
|
+
'HISTORY',
|
|
940
|
+
datetime.now().strftime('%Y%m%d%H%M%S.%f'),
|
|
941
|
+
'P{:06d}'.format(self.sh.getpid()),
|
|
942
|
+
self.sh.getlogname()
|
|
943
|
+
))
|
|
944
|
+
dumpfile = self.sh.path.join(self.entry, '.history', logfile)
|
|
945
|
+
if self.record:
|
|
946
|
+
self.sh.pickle_dump(self.history, dumpfile)
|
|
947
|
+
|
|
948
|
+
|
|
949
|
+
class MtoolCache(FixedEntryCache):
|
|
950
|
+
"""Cache items for the MTOOL jobs (or any job that acts like it)."""
|
|
951
|
+
|
|
952
|
+
_footprint = dict(
|
|
953
|
+
info = 'MTOOL like Cache',
|
|
954
|
+
attr = dict(
|
|
955
|
+
kind = dict(
|
|
956
|
+
values = ['mtool', 'swapp'],
|
|
957
|
+
remap = dict(swapp = 'mtool'),
|
|
958
|
+
),
|
|
959
|
+
headdir = dict(
|
|
960
|
+
optional = True,
|
|
961
|
+
default = "",
|
|
962
|
+
),
|
|
963
|
+
)
|
|
964
|
+
)
|
|
965
|
+
|
|
966
|
+
@property
|
|
967
|
+
def entry(self):
|
|
968
|
+
"""Tries to figure out what could be the actual entry point
|
|
969
|
+
for cache space.
|
|
970
|
+
|
|
971
|
+
"""
|
|
972
|
+
if self.rootdir:
|
|
973
|
+
return os.path.join(self.rootdir, self.headdir)
|
|
974
|
+
|
|
975
|
+
if config.is_defined(section="data-tree", key="rootdir"):
|
|
976
|
+
rootdir = config.from_config(
|
|
977
|
+
section="data-tree", key="rootdir",
|
|
978
|
+
)
|
|
979
|
+
else:
|
|
980
|
+
rootdir = self.sh.path.join(os.environ["HOME"], ".vortex.d")
|
|
981
|
+
|
|
982
|
+
return self.sh.path.join(rootdir, self.headdir)
|
|
983
|
+
|
|
984
|
+
|
|
985
|
+
class FtStashCache(MtoolCache):
|
|
986
|
+
"""A place to store file to be sent with ftserv."""
|
|
987
|
+
|
|
988
|
+
_footprint = dict(
|
|
989
|
+
info = 'A place to store file to be sent with ftserv',
|
|
990
|
+
attr = dict(
|
|
991
|
+
kind = dict(
|
|
992
|
+
values = ['ftstash', ],
|
|
993
|
+
),
|
|
994
|
+
headdir = dict(
|
|
995
|
+
optional = True,
|
|
996
|
+
default = 'ftspool',
|
|
997
|
+
),
|
|
998
|
+
)
|
|
999
|
+
)
|
|
1000
|
+
|
|
1001
|
+
|
|
1002
|
+
class Op2ResearchCache(FixedEntryCache):
|
|
1003
|
+
"""Cache of the operational suite (read-only)."""
|
|
1004
|
+
|
|
1005
|
+
_footprint = dict(
|
|
1006
|
+
info = 'MTOOL like Operations Cache (read-only)',
|
|
1007
|
+
attr = dict(
|
|
1008
|
+
kind = dict(
|
|
1009
|
+
values = ['op2r'],
|
|
1010
|
+
),
|
|
1011
|
+
headdir = dict(
|
|
1012
|
+
optional = True,
|
|
1013
|
+
default = 'vortex',
|
|
1014
|
+
),
|
|
1015
|
+
readonly = dict(
|
|
1016
|
+
values = [True, ],
|
|
1017
|
+
default = True,
|
|
1018
|
+
)
|
|
1019
|
+
)
|
|
1020
|
+
)
|
|
1021
|
+
|
|
1022
|
+
@property
|
|
1023
|
+
def entry(self):
|
|
1024
|
+
cache = (
|
|
1025
|
+
self.rootdir or
|
|
1026
|
+
config.from_config(section="data-tree", key="op_rootdir")
|
|
1027
|
+
)
|
|
1028
|
+
return self.sh.path.join(cache, self.headdir)
|
|
1029
|
+
|
|
1030
|
+
|
|
1031
|
+
class HackerCache(FixedEntryCache):
|
|
1032
|
+
"""A dirty cache where users can hack things."""
|
|
1033
|
+
|
|
1034
|
+
_footprint = dict(
|
|
1035
|
+
info = 'A place to hack things...',
|
|
1036
|
+
attr = dict(
|
|
1037
|
+
kind = dict(
|
|
1038
|
+
values = ['hack'],
|
|
1039
|
+
),
|
|
1040
|
+
rootdir = dict(
|
|
1041
|
+
optional = True,
|
|
1042
|
+
default = 'auto'
|
|
1043
|
+
),
|
|
1044
|
+
readonly = dict(
|
|
1045
|
+
default = True,
|
|
1046
|
+
),
|
|
1047
|
+
)
|
|
1048
|
+
)
|
|
1049
|
+
|
|
1050
|
+
@property
|
|
1051
|
+
def entry(self):
|
|
1052
|
+
"""Tries to figure out what could be the actual entry point for cache space."""
|
|
1053
|
+
sh = self.sh
|
|
1054
|
+
if self.rootdir == 'auto':
|
|
1055
|
+
gl = sessions.current().glove
|
|
1056
|
+
sweethome = sh.path.join(gl.configrc, 'hack')
|
|
1057
|
+
sh.mkdir(sweethome)
|
|
1058
|
+
logger.debug('Using %s hack cache: %s', self.__class__, sweethome)
|
|
1059
|
+
else:
|
|
1060
|
+
sweethome = self.rootdir
|
|
1061
|
+
return sh.path.join(sweethome, self.headdir)
|