vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +75 -47
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +944 -618
- vortex/algo/mpitools.py +802 -497
- vortex/algo/mpitools_templates/__init__.py +1 -0
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +436 -227
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +540 -417
- vortex/data/sync_templates/__init__.py +0 -0
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +416 -275
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +215 -122
- vortex/nwp/algo/monitoring.py +137 -76
- vortex/nwp/algo/mpitools.py +330 -190
- vortex/nwp/algo/odbtools.py +637 -353
- vortex/nwp/algo/oopsroot.py +454 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +110 -121
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +341 -162
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +655 -299
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +403 -334
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1234 -643
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +378 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.1.0.dist-info/METADATA +67 -0
- vortex_nwp-2.1.0.dist-info/RECORD +144 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
vortex/data/abstractstores.py
CHANGED
|
@@ -5,11 +5,7 @@ This module handles store objects in charge of physically accessing resources.
|
|
|
5
5
|
Store objects use the :mod:`footprints` mechanism.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
-
from collections import defaultdict
|
|
9
|
-
import contextlib
|
|
10
8
|
import copy
|
|
11
|
-
import functools
|
|
12
|
-
import re
|
|
13
9
|
|
|
14
10
|
from bronx.fancies import loggers
|
|
15
11
|
from bronx.patterns import observer
|
|
@@ -18,28 +14,29 @@ from bronx.system import hash as hashutils
|
|
|
18
14
|
import footprints
|
|
19
15
|
|
|
20
16
|
from vortex import sessions
|
|
21
|
-
from vortex.config import from_config
|
|
22
|
-
from vortex.
|
|
23
|
-
|
|
17
|
+
from vortex.config import from_config, ConfigurationError
|
|
18
|
+
from vortex.syntax.stdattrs import (
|
|
19
|
+
hashalgo,
|
|
20
|
+
hashalgo_avail_list,
|
|
21
|
+
compressionpipeline,
|
|
22
|
+
)
|
|
24
23
|
from vortex.tools import storage
|
|
25
24
|
from vortex.tools import compression
|
|
26
|
-
from vortex.tools import net
|
|
27
|
-
from vortex.tools.env import vartrue
|
|
28
25
|
from vortex.tools.systems import ExecutionError
|
|
29
26
|
from vortex.syntax.stdattrs import Namespace
|
|
30
27
|
|
|
31
28
|
#: Export base class
|
|
32
|
-
__all__ = [
|
|
29
|
+
__all__ = ["Store"]
|
|
33
30
|
|
|
34
31
|
logger = loggers.getLogger(__name__)
|
|
35
32
|
|
|
36
|
-
OBSERVER_TAG =
|
|
33
|
+
OBSERVER_TAG = "Stores-Activity"
|
|
37
34
|
|
|
38
|
-
CACHE_PUT_INTENT =
|
|
39
|
-
CACHE_GET_INTENT_DEFAULT =
|
|
35
|
+
CACHE_PUT_INTENT = "in"
|
|
36
|
+
CACHE_GET_INTENT_DEFAULT = "in"
|
|
40
37
|
|
|
41
|
-
ARCHIVE_PUT_INTENT =
|
|
42
|
-
ARCHIVE_GET_INTENT_DEFAULT =
|
|
38
|
+
ARCHIVE_PUT_INTENT = "in"
|
|
39
|
+
ARCHIVE_GET_INTENT_DEFAULT = "in"
|
|
43
40
|
|
|
44
41
|
|
|
45
42
|
def observer_board(obsname=None):
|
|
@@ -53,36 +50,31 @@ class Store(footprints.FootprintBase):
|
|
|
53
50
|
"""Root class for any :class:`Store` subclasses."""
|
|
54
51
|
|
|
55
52
|
_abstract = True
|
|
56
|
-
_collector = (
|
|
53
|
+
_collector = ("store",)
|
|
57
54
|
_footprint = [
|
|
58
55
|
hashalgo,
|
|
59
56
|
dict(
|
|
60
|
-
info
|
|
61
|
-
attr
|
|
62
|
-
scheme
|
|
63
|
-
|
|
57
|
+
info="Default store",
|
|
58
|
+
attr=dict(
|
|
59
|
+
scheme=dict(alias=("protocol",)),
|
|
60
|
+
netloc=dict(type=Namespace, alias=("domain", "namespace")),
|
|
61
|
+
storetrack=dict(
|
|
62
|
+
type=bool,
|
|
63
|
+
default=True,
|
|
64
|
+
optional=True,
|
|
64
65
|
),
|
|
65
|
-
|
|
66
|
-
type
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
storetrack = dict(
|
|
70
|
-
type = bool,
|
|
71
|
-
default = True,
|
|
72
|
-
optional = True,
|
|
73
|
-
),
|
|
74
|
-
readonly = dict(
|
|
75
|
-
type = bool,
|
|
76
|
-
optional = True,
|
|
77
|
-
default = False,
|
|
66
|
+
readonly=dict(
|
|
67
|
+
type=bool,
|
|
68
|
+
optional=True,
|
|
69
|
+
default=False,
|
|
78
70
|
),
|
|
79
71
|
),
|
|
80
|
-
)
|
|
72
|
+
),
|
|
81
73
|
]
|
|
82
74
|
|
|
83
75
|
def __init__(self, *args, **kw):
|
|
84
|
-
logger.debug(
|
|
85
|
-
sh = kw.pop(
|
|
76
|
+
logger.debug("Abstract store init %s", self.__class__)
|
|
77
|
+
sh = kw.pop("system", sessions.system())
|
|
86
78
|
super().__init__(*args, **kw)
|
|
87
79
|
self._sh = sh
|
|
88
80
|
self._observer = observer_board()
|
|
@@ -92,7 +84,7 @@ class Store(footprints.FootprintBase):
|
|
|
92
84
|
|
|
93
85
|
@property
|
|
94
86
|
def realkind(self):
|
|
95
|
-
return
|
|
87
|
+
return "store"
|
|
96
88
|
|
|
97
89
|
@property
|
|
98
90
|
def system(self):
|
|
@@ -112,15 +104,15 @@ class Store(footprints.FootprintBase):
|
|
|
112
104
|
return False
|
|
113
105
|
|
|
114
106
|
def _observer_notify(self, action, rc, remote, local=None, options=None):
|
|
115
|
-
strack = options is None or options.get(
|
|
107
|
+
strack = options is None or options.get("obs_notify", True)
|
|
116
108
|
if self.storetrack and strack:
|
|
117
109
|
infos = dict(action=action, status=rc, remote=remote)
|
|
118
110
|
# Is a localpath provided ?
|
|
119
111
|
if local is not None:
|
|
120
|
-
infos[
|
|
112
|
+
infos["local"] = local
|
|
121
113
|
# We may want to cheat on the localpath...
|
|
122
|
-
if options is not None and
|
|
123
|
-
infos[
|
|
114
|
+
if options is not None and "obs_overridelocal" in options:
|
|
115
|
+
infos["local"] = options["obs_overridelocal"]
|
|
124
116
|
self._observer.notify_upd(self, infos)
|
|
125
117
|
|
|
126
118
|
def notyet(self, *args):
|
|
@@ -128,7 +120,7 @@ class Store(footprints.FootprintBase):
|
|
|
128
120
|
Internal method to be used as a critical backup method
|
|
129
121
|
when a specific method is not yet defined.
|
|
130
122
|
"""
|
|
131
|
-
logger.critical(
|
|
123
|
+
logger.critical("Scheme %s not yet implemented", self.scheme)
|
|
132
124
|
|
|
133
125
|
@property
|
|
134
126
|
def writeable(self):
|
|
@@ -136,12 +128,12 @@ class Store(footprints.FootprintBase):
|
|
|
136
128
|
|
|
137
129
|
def enforce_readonly(self):
|
|
138
130
|
if self.readonly:
|
|
139
|
-
raise OSError(
|
|
131
|
+
raise OSError("This store is in readonly mode")
|
|
140
132
|
|
|
141
133
|
@staticmethod
|
|
142
134
|
def _verbose_log(options, level, *kargs, **kwargs):
|
|
143
|
-
slevel = kwargs.pop(
|
|
144
|
-
if options is not None and options.get(
|
|
135
|
+
slevel = kwargs.pop("slevel", "debug")
|
|
136
|
+
if options is not None and options.get("silent", False):
|
|
145
137
|
level = slevel
|
|
146
138
|
getattr(logger, level)(*kargs, **kwargs)
|
|
147
139
|
|
|
@@ -149,10 +141,11 @@ class Store(footprints.FootprintBase):
|
|
|
149
141
|
def _actual_cpipeline(self):
|
|
150
142
|
"""Check if the current store has a CompressionPipeline."""
|
|
151
143
|
if self._cpipeline is False:
|
|
152
|
-
cpipeline_desc = getattr(self,
|
|
144
|
+
cpipeline_desc = getattr(self, "store_compressed", None)
|
|
153
145
|
if cpipeline_desc is not None:
|
|
154
|
-
self._cpipeline = compression.CompressionPipeline(
|
|
155
|
-
|
|
146
|
+
self._cpipeline = compression.CompressionPipeline(
|
|
147
|
+
self.system, cpipeline_desc
|
|
148
|
+
)
|
|
156
149
|
else:
|
|
157
150
|
self._cpipeline = None
|
|
158
151
|
return self._cpipeline
|
|
@@ -164,27 +157,39 @@ class Store(footprints.FootprintBase):
|
|
|
164
157
|
|
|
165
158
|
def _incache_inarchive_check(self, options):
|
|
166
159
|
rc = True
|
|
167
|
-
incache = options.get(
|
|
168
|
-
inarchive = options.get(
|
|
160
|
+
incache = options.get("incache", False)
|
|
161
|
+
inarchive = options.get("inarchive", False)
|
|
169
162
|
if incache and inarchive:
|
|
170
|
-
raise ValueError(
|
|
163
|
+
raise ValueError(
|
|
164
|
+
"'incache=True' and 'inarchive=True' are mutually exclusive"
|
|
165
|
+
)
|
|
171
166
|
if incache and not self.use_cache():
|
|
172
|
-
self._verbose_log(
|
|
173
|
-
|
|
167
|
+
self._verbose_log(
|
|
168
|
+
options,
|
|
169
|
+
"info",
|
|
170
|
+
'Skip this "%s" store because a cache is requested',
|
|
171
|
+
self.__class__,
|
|
172
|
+
)
|
|
174
173
|
rc = False
|
|
175
174
|
if inarchive and not self.use_archive():
|
|
176
|
-
self._verbose_log(
|
|
177
|
-
|
|
175
|
+
self._verbose_log(
|
|
176
|
+
options,
|
|
177
|
+
"info",
|
|
178
|
+
'Skip this "%s" store because an archive is requested',
|
|
179
|
+
self.__class__,
|
|
180
|
+
)
|
|
178
181
|
rc = False
|
|
179
182
|
return rc
|
|
180
183
|
|
|
181
184
|
def _hash_check_or_delete(self, callback, remote, options):
|
|
182
185
|
"""Check or delete a hash file."""
|
|
183
|
-
if (self.storehash is None) or (
|
|
186
|
+
if (self.storehash is None) or (
|
|
187
|
+
remote["path"].endswith("." + self.storehash)
|
|
188
|
+
):
|
|
184
189
|
return True
|
|
185
190
|
options = self._hash_store_defaults(options)
|
|
186
191
|
remote = remote.copy()
|
|
187
|
-
remote[
|
|
192
|
+
remote["path"] = remote["path"] + "." + self.storehash
|
|
188
193
|
return callback(remote, options)
|
|
189
194
|
|
|
190
195
|
@staticmethod
|
|
@@ -193,29 +198,33 @@ class Store(footprints.FootprintBase):
|
|
|
193
198
|
|
|
194
199
|
def check(self, remote, options=None):
|
|
195
200
|
"""Proxy method to dedicated check method according to scheme."""
|
|
196
|
-
logger.debug(
|
|
201
|
+
logger.debug("Store check from %s", remote)
|
|
197
202
|
options = self._options_fixup(options)
|
|
198
203
|
if not self._incache_inarchive_check(options):
|
|
199
204
|
return False
|
|
200
|
-
rc = getattr(self, self.scheme +
|
|
201
|
-
self._observer_notify(
|
|
205
|
+
rc = getattr(self, self.scheme + "check", self.notyet)(remote, options)
|
|
206
|
+
self._observer_notify("check", rc, remote, options=options)
|
|
202
207
|
return rc
|
|
203
208
|
|
|
204
209
|
def locate(self, remote, options=None):
|
|
205
210
|
"""Proxy method to dedicated locate method according to scheme."""
|
|
206
211
|
options = self._options_fixup(options)
|
|
207
|
-
logger.debug(
|
|
212
|
+
logger.debug("Store locate %s", remote)
|
|
208
213
|
if not self._incache_inarchive_check(options):
|
|
209
214
|
return None
|
|
210
|
-
return getattr(self, self.scheme +
|
|
215
|
+
return getattr(self, self.scheme + "locate", self.notyet)(
|
|
216
|
+
remote, options
|
|
217
|
+
)
|
|
211
218
|
|
|
212
219
|
def list(self, remote, options=None):
|
|
213
220
|
"""Proxy method to dedicated list method according to scheme."""
|
|
214
221
|
options = self._options_fixup(options)
|
|
215
|
-
logger.debug(
|
|
222
|
+
logger.debug("Store list %s", remote)
|
|
216
223
|
if not self._incache_inarchive_check(options):
|
|
217
224
|
return None
|
|
218
|
-
return getattr(self, self.scheme +
|
|
225
|
+
return getattr(self, self.scheme + "list", self.notyet)(
|
|
226
|
+
remote, options
|
|
227
|
+
)
|
|
219
228
|
|
|
220
229
|
def prestage_advertise(self, remote, options=None):
|
|
221
230
|
"""Use the Stores-Activity observer board to advertise the prestaging request.
|
|
@@ -224,65 +233,82 @@ class Store(footprints.FootprintBase):
|
|
|
224
233
|
the request.
|
|
225
234
|
"""
|
|
226
235
|
options = self._options_fixup(options)
|
|
227
|
-
logger.debug(
|
|
228
|
-
infos_cb = getattr(self, self.scheme +
|
|
236
|
+
logger.debug("Store prestage through hub %s", remote)
|
|
237
|
+
infos_cb = getattr(self, self.scheme + "prestageinfo", None)
|
|
229
238
|
if infos_cb:
|
|
230
239
|
infodict = infos_cb(remote, options)
|
|
231
|
-
infodict.setdefault(
|
|
232
|
-
infodict.setdefault(
|
|
233
|
-
if options and
|
|
234
|
-
infodict[
|
|
235
|
-
infodict[
|
|
240
|
+
infodict.setdefault("issuerkind", self.realkind)
|
|
241
|
+
infodict.setdefault("scheme", self.scheme)
|
|
242
|
+
if options and "priority" in options:
|
|
243
|
+
infodict["priority"] = options["priority"]
|
|
244
|
+
infodict["action"] = "prestage_req"
|
|
236
245
|
self._observer.notify_upd(self, infodict)
|
|
237
246
|
else:
|
|
238
|
-
logger.info(
|
|
247
|
+
logger.info(
|
|
248
|
+
"Prestaging is not supported for scheme: %s", self.scheme
|
|
249
|
+
)
|
|
239
250
|
return True
|
|
240
251
|
|
|
241
252
|
def prestage(self, remote, options=None):
|
|
242
253
|
"""Proxy method to dedicated prestage method according to scheme."""
|
|
243
254
|
options = self._options_fixup(options)
|
|
244
|
-
logger.debug(
|
|
255
|
+
logger.debug("Store prestage %s", remote)
|
|
245
256
|
if not self._incache_inarchive_check(options):
|
|
246
257
|
return True
|
|
247
|
-
return getattr(
|
|
258
|
+
return getattr(
|
|
259
|
+
self, self.scheme + "prestage", self.prestage_advertise
|
|
260
|
+
)(remote, options)
|
|
248
261
|
|
|
249
262
|
@staticmethod
|
|
250
263
|
def _hash_store_defaults(options):
|
|
251
264
|
"""Update default options when fetching hash files."""
|
|
252
265
|
options = options.copy()
|
|
253
|
-
options[
|
|
254
|
-
options[
|
|
255
|
-
options[
|
|
256
|
-
options[
|
|
257
|
-
options[
|
|
266
|
+
options["obs_notify"] = False
|
|
267
|
+
options["fmt"] = "ascii"
|
|
268
|
+
options["intent"] = CACHE_GET_INTENT_DEFAULT
|
|
269
|
+
options["auto_tarextract"] = False
|
|
270
|
+
options["auto_dirextract"] = False
|
|
258
271
|
return options
|
|
259
272
|
|
|
260
273
|
def _hash_get_check(self, callback, remote, local, options):
|
|
261
274
|
"""Update default options when fetching hash files."""
|
|
262
|
-
if (self.storehash is None) or (
|
|
275
|
+
if (self.storehash is None) or (
|
|
276
|
+
remote["path"].endswith("." + self.storehash)
|
|
277
|
+
):
|
|
263
278
|
return True
|
|
264
279
|
if isinstance(local, str) and not self.system.path.isfile(local):
|
|
265
|
-
logger.info(
|
|
280
|
+
logger.info(
|
|
281
|
+
"< %s > is not a plain file. The control sum can't be checked.",
|
|
282
|
+
local,
|
|
283
|
+
)
|
|
266
284
|
return True
|
|
267
285
|
options = self._hash_store_defaults(options)
|
|
268
286
|
remote = remote.copy()
|
|
269
|
-
remote[
|
|
270
|
-
|
|
287
|
+
remote["path"] = (
|
|
288
|
+
remote["path"] + "." + self.storehash
|
|
289
|
+
) # Name of the hash file
|
|
290
|
+
remote["query"].pop("extract", None) # Ignore any extract request
|
|
271
291
|
try:
|
|
272
292
|
tempcontainer = None
|
|
273
293
|
try:
|
|
274
294
|
# First, try to fetch the sum in a real file
|
|
275
295
|
# (in order to potentially use ftserv...)
|
|
276
|
-
tempcontainer = footprints.proxy.container(
|
|
296
|
+
tempcontainer = footprints.proxy.container(
|
|
297
|
+
shouldfly=True, mode="rb"
|
|
298
|
+
)
|
|
277
299
|
try:
|
|
278
300
|
rc = callback(remote, tempcontainer.iotarget(), options)
|
|
279
301
|
except (OSError, ExecutionError):
|
|
280
302
|
# This may happen if the user has insufficient rights on
|
|
281
303
|
# the current directory
|
|
282
|
-
tempcontainer = footprints.proxy.container(
|
|
304
|
+
tempcontainer = footprints.proxy.container(
|
|
305
|
+
incore=True, mode="w+b"
|
|
306
|
+
)
|
|
283
307
|
rc = callback(remote, tempcontainer.iotarget(), options)
|
|
284
308
|
except (OSError, ExecutionError):
|
|
285
|
-
logger.warning(
|
|
309
|
+
logger.warning(
|
|
310
|
+
"Something went very wrong when fetching the hash file ! (assuming rc=False)"
|
|
311
|
+
)
|
|
286
312
|
rc = False
|
|
287
313
|
# check the hash key
|
|
288
314
|
hadapt = hashutils.HashAdapter(self.storehash)
|
|
@@ -298,34 +324,40 @@ class Store(footprints.FootprintBase):
|
|
|
298
324
|
|
|
299
325
|
def _actual_get(self, action, remote, local, options, result_id=None):
|
|
300
326
|
"""Proxy method to dedicated get method according to scheme."""
|
|
301
|
-
logger.debug(
|
|
327
|
+
logger.debug("Store %s from %s to %s", action, remote, local)
|
|
302
328
|
if not self._incache_inarchive_check(options):
|
|
303
329
|
return False
|
|
304
|
-
if not options.get(
|
|
330
|
+
if not options.get("insitu", False) or self.use_cache():
|
|
305
331
|
if result_id:
|
|
306
|
-
rc = getattr(self, self.scheme + action, self.notyet)(
|
|
332
|
+
rc = getattr(self, self.scheme + action, self.notyet)(
|
|
333
|
+
result_id, remote, local, options
|
|
334
|
+
)
|
|
307
335
|
else:
|
|
308
|
-
rc = getattr(self, self.scheme + action, self.notyet)(
|
|
309
|
-
|
|
336
|
+
rc = getattr(self, self.scheme + action, self.notyet)(
|
|
337
|
+
remote, local, options
|
|
338
|
+
)
|
|
339
|
+
self._observer_notify(
|
|
340
|
+
"get", rc, remote, local=local, options=options
|
|
341
|
+
)
|
|
310
342
|
return rc
|
|
311
343
|
else:
|
|
312
|
-
logger.error(
|
|
344
|
+
logger.error("Only cache stores can be used when insitu is True.")
|
|
313
345
|
return False
|
|
314
346
|
|
|
315
347
|
def get(self, remote, local, options=None):
|
|
316
348
|
"""Proxy method to dedicated get method according to scheme."""
|
|
317
349
|
options = self._options_fixup(options)
|
|
318
|
-
return self._actual_get(
|
|
350
|
+
return self._actual_get("get", remote, local, options)
|
|
319
351
|
|
|
320
352
|
def earlyget(self, remote, local, options=None):
|
|
321
353
|
options = self._options_fixup(options)
|
|
322
354
|
"""Proxy method to dedicated earlyget method according to scheme."""
|
|
323
|
-
logger.debug(
|
|
355
|
+
logger.debug("Store earlyget from %s to %s", remote, local)
|
|
324
356
|
if not self._incache_inarchive_check(options):
|
|
325
357
|
return None
|
|
326
358
|
rc = None
|
|
327
|
-
if not options.get(
|
|
328
|
-
available_dget = getattr(self, self.scheme +
|
|
359
|
+
if not options.get("insitu", False) or self.use_cache():
|
|
360
|
+
available_dget = getattr(self, self.scheme + "earlyget", None)
|
|
329
361
|
if available_dget is not None:
|
|
330
362
|
rc = available_dget(remote, local, options)
|
|
331
363
|
return rc
|
|
@@ -333,15 +365,19 @@ class Store(footprints.FootprintBase):
|
|
|
333
365
|
def finaliseget(self, result_id, remote, local, options=None):
|
|
334
366
|
options = self._options_fixup(options)
|
|
335
367
|
"""Proxy method to dedicated finaliseget method according to scheme."""
|
|
336
|
-
return self._actual_get(
|
|
368
|
+
return self._actual_get(
|
|
369
|
+
"finaliseget", remote, local, options, result_id=result_id
|
|
370
|
+
)
|
|
337
371
|
|
|
338
372
|
def _hash_put(self, callback, local, remote, options):
|
|
339
373
|
"""Put a hash file next to the 'real' file."""
|
|
340
|
-
if (self.storehash is None) or (
|
|
374
|
+
if (self.storehash is None) or (
|
|
375
|
+
remote["path"].endswith("." + self.storehash)
|
|
376
|
+
):
|
|
341
377
|
return True
|
|
342
378
|
options = self._hash_store_defaults(options)
|
|
343
379
|
remote = remote.copy()
|
|
344
|
-
remote[
|
|
380
|
+
remote["path"] = remote["path"] + "." + self.storehash
|
|
345
381
|
# Generate the hash sum
|
|
346
382
|
hadapt = hashutils.HashAdapter(self.storehash)
|
|
347
383
|
tmplocal = hadapt.file2hash_fh(local)
|
|
@@ -351,33 +387,41 @@ class Store(footprints.FootprintBase):
|
|
|
351
387
|
def put(self, local, remote, options=None):
|
|
352
388
|
"""Proxy method to dedicated put method according to scheme."""
|
|
353
389
|
options = self._options_fixup(options)
|
|
354
|
-
logger.debug(
|
|
390
|
+
logger.debug("Store put from %s to %s", local, remote)
|
|
355
391
|
self.enforce_readonly()
|
|
356
392
|
if not self._incache_inarchive_check(options):
|
|
357
393
|
return True
|
|
358
394
|
filtered = False
|
|
359
|
-
if options is not None and
|
|
360
|
-
filtered = options[
|
|
395
|
+
if options is not None and "urifilter" in options:
|
|
396
|
+
filtered = options["urifilter"](self, remote)
|
|
361
397
|
if filtered:
|
|
362
398
|
rc = True
|
|
363
|
-
logger.info(
|
|
399
|
+
logger.info(
|
|
400
|
+
"This remote URI has been filtered out: we are skipping it."
|
|
401
|
+
)
|
|
364
402
|
else:
|
|
365
403
|
dryrun = False
|
|
366
|
-
if options is not None and
|
|
367
|
-
dryrun = options[
|
|
368
|
-
rc = dryrun or getattr(self, self.scheme +
|
|
369
|
-
|
|
404
|
+
if options is not None and "dryrun" in options:
|
|
405
|
+
dryrun = options["dryrun"]
|
|
406
|
+
rc = dryrun or getattr(self, self.scheme + "put", self.notyet)(
|
|
407
|
+
local, remote, options
|
|
408
|
+
)
|
|
409
|
+
self._observer_notify(
|
|
410
|
+
"put", rc, remote, local=local, options=options
|
|
411
|
+
)
|
|
370
412
|
return rc
|
|
371
413
|
|
|
372
414
|
def delete(self, remote, options=None):
|
|
373
415
|
"""Proxy method to dedicated delete method according to scheme."""
|
|
374
416
|
options = self._options_fixup(options)
|
|
375
|
-
logger.debug(
|
|
417
|
+
logger.debug("Store delete from %s", remote)
|
|
376
418
|
self.enforce_readonly()
|
|
377
419
|
if not self._incache_inarchive_check(options):
|
|
378
420
|
return True
|
|
379
|
-
rc = getattr(self, self.scheme +
|
|
380
|
-
|
|
421
|
+
rc = getattr(self, self.scheme + "delete", self.notyet)(
|
|
422
|
+
remote, options
|
|
423
|
+
)
|
|
424
|
+
self._observer_notify("del", rc, remote, options=options)
|
|
381
425
|
return rc
|
|
382
426
|
|
|
383
427
|
|
|
@@ -385,46 +429,41 @@ class MultiStore(footprints.FootprintBase):
|
|
|
385
429
|
"""Agregate various :class:`Store` items."""
|
|
386
430
|
|
|
387
431
|
_abstract = True
|
|
388
|
-
_collector = (
|
|
432
|
+
_collector = ("store",)
|
|
389
433
|
_footprint = [
|
|
390
434
|
compressionpipeline, # Not used by cache stores but ok, just in case...
|
|
391
435
|
hashalgo,
|
|
392
436
|
dict(
|
|
393
|
-
info
|
|
394
|
-
attr
|
|
395
|
-
scheme
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
),
|
|
402
|
-
refillstore = dict(
|
|
403
|
-
type = bool,
|
|
404
|
-
optional = True,
|
|
405
|
-
default = False,
|
|
437
|
+
info="Multi store",
|
|
438
|
+
attr=dict(
|
|
439
|
+
scheme=dict(alias=("protocol",)),
|
|
440
|
+
netloc=dict(type=Namespace, alias=("domain", "namespace")),
|
|
441
|
+
refillstore=dict(
|
|
442
|
+
type=bool,
|
|
443
|
+
optional=True,
|
|
444
|
+
default=False,
|
|
406
445
|
),
|
|
407
446
|
storehash=dict(
|
|
408
|
-
values
|
|
447
|
+
values=hashalgo_avail_list,
|
|
409
448
|
),
|
|
410
449
|
# ArchiveStores only be harmless for others...
|
|
411
|
-
storage
|
|
412
|
-
optional
|
|
413
|
-
default
|
|
450
|
+
storage=dict(
|
|
451
|
+
optional=True,
|
|
452
|
+
default=None,
|
|
414
453
|
),
|
|
415
|
-
storetube
|
|
416
|
-
optional
|
|
454
|
+
storetube=dict(
|
|
455
|
+
optional=True,
|
|
456
|
+
),
|
|
457
|
+
storeroot=dict(
|
|
458
|
+
optional=True,
|
|
417
459
|
),
|
|
418
|
-
storeroot = dict(
|
|
419
|
-
optional = True,
|
|
420
|
-
)
|
|
421
460
|
),
|
|
422
|
-
)
|
|
461
|
+
),
|
|
423
462
|
]
|
|
424
463
|
|
|
425
464
|
def __init__(self, *args, **kw):
|
|
426
|
-
logger.debug(
|
|
427
|
-
sh = kw.pop(
|
|
465
|
+
logger.debug("Abstract multi store init %s", self.__class__)
|
|
466
|
+
sh = kw.pop("system", sessions.system())
|
|
428
467
|
super().__init__(*args, **kw)
|
|
429
468
|
self._sh = sh
|
|
430
469
|
self._openedstores = self.loadstores()
|
|
@@ -432,7 +471,7 @@ class MultiStore(footprints.FootprintBase):
|
|
|
432
471
|
|
|
433
472
|
@property
|
|
434
473
|
def realkind(self):
|
|
435
|
-
return
|
|
474
|
+
return "multistore"
|
|
436
475
|
|
|
437
476
|
@property
|
|
438
477
|
def system(self):
|
|
@@ -441,8 +480,8 @@ class MultiStore(footprints.FootprintBase):
|
|
|
441
480
|
|
|
442
481
|
@staticmethod
|
|
443
482
|
def _verbose_log(options, level, *kargs, **kwargs):
|
|
444
|
-
slevel = kwargs.pop(
|
|
445
|
-
if options is not None and options.get(
|
|
483
|
+
slevel = kwargs.pop("slevel", "debug")
|
|
484
|
+
if options is not None and options.get("silent", False):
|
|
446
485
|
level = slevel
|
|
447
486
|
getattr(logger, level)(*kargs, **kwargs)
|
|
448
487
|
|
|
@@ -459,7 +498,9 @@ class MultiStore(footprints.FootprintBase):
|
|
|
459
498
|
xstore = footprints.proxy.store(**desc)
|
|
460
499
|
if xstore:
|
|
461
500
|
activestores.append(xstore)
|
|
462
|
-
logger.debug(
|
|
501
|
+
logger.debug(
|
|
502
|
+
"Multistore %s includes active stores %s", self, activestores
|
|
503
|
+
)
|
|
463
504
|
return activestores
|
|
464
505
|
|
|
465
506
|
@property
|
|
@@ -490,11 +531,17 @@ class MultiStore(footprints.FootprintBase):
|
|
|
490
531
|
while loading alternates stores.
|
|
491
532
|
"""
|
|
492
533
|
return [
|
|
493
|
-
dict(
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
534
|
+
dict(
|
|
535
|
+
system=self.system,
|
|
536
|
+
storehash=self.storehash,
|
|
537
|
+
store_compressed=self.store_compressed,
|
|
538
|
+
storage=self.storage,
|
|
539
|
+
storetube=self.storetube,
|
|
540
|
+
storeroot=self.storeroot,
|
|
541
|
+
scheme=x,
|
|
542
|
+
netloc=y,
|
|
543
|
+
**self.alternates_fpextras(),
|
|
544
|
+
)
|
|
498
545
|
for x in self.alternates_scheme()
|
|
499
546
|
for y in self.alternates_netloc()
|
|
500
547
|
]
|
|
@@ -526,7 +573,7 @@ class MultiStore(footprints.FootprintBase):
|
|
|
526
573
|
def check(self, remote, options=None):
|
|
527
574
|
"""Go through internal opened stores and check for the resource."""
|
|
528
575
|
options = self._options_fixup(options)
|
|
529
|
-
logger.debug(
|
|
576
|
+
logger.debug("Multistore check from %s", remote)
|
|
530
577
|
rc = False
|
|
531
578
|
for sto in self.filtered_readable_openedstores(remote):
|
|
532
579
|
rc = sto.check(remote.copy(), options)
|
|
@@ -537,25 +584,25 @@ class MultiStore(footprints.FootprintBase):
|
|
|
537
584
|
def locate(self, remote, options=None):
|
|
538
585
|
"""Go through internal opened stores and locate the expected resource for each of them."""
|
|
539
586
|
options = self._options_fixup(options)
|
|
540
|
-
logger.debug(
|
|
587
|
+
logger.debug("Multistore locate %s", remote)
|
|
541
588
|
f_ostores = self.filtered_readable_openedstores(remote)
|
|
542
589
|
if not f_ostores:
|
|
543
590
|
return False
|
|
544
591
|
rloc = list()
|
|
545
592
|
for sto in f_ostores:
|
|
546
|
-
logger.debug(
|
|
593
|
+
logger.debug("Multistore locate at %s", sto)
|
|
547
594
|
tmp_rloc = sto.locate(remote.copy(), options)
|
|
548
595
|
if tmp_rloc:
|
|
549
596
|
rloc.append(tmp_rloc)
|
|
550
|
-
return
|
|
597
|
+
return ";".join(rloc)
|
|
551
598
|
|
|
552
599
|
def list(self, remote, options=None):
|
|
553
600
|
"""Go through internal opened stores and list the expected resource for each of them."""
|
|
554
601
|
options = self._options_fixup(options)
|
|
555
|
-
logger.debug(
|
|
602
|
+
logger.debug("Multistore list %s", remote)
|
|
556
603
|
rlist = set()
|
|
557
604
|
for sto in self.filtered_readable_openedstores(remote):
|
|
558
|
-
logger.debug(
|
|
605
|
+
logger.debug("Multistore list at %s", sto)
|
|
559
606
|
tmp_rloc = sto.list(remote.copy(), options)
|
|
560
607
|
if isinstance(tmp_rloc, (list, tuple, set)):
|
|
561
608
|
rlist.update(tmp_rloc)
|
|
@@ -566,18 +613,18 @@ class MultiStore(footprints.FootprintBase):
|
|
|
566
613
|
def prestage(self, remote, options=None):
|
|
567
614
|
"""Go through internal opened stores and prestage the resource for each of them."""
|
|
568
615
|
options = self._options_fixup(options)
|
|
569
|
-
logger.debug(
|
|
616
|
+
logger.debug("Multistore prestage %s", remote)
|
|
570
617
|
f_ostores = self.filtered_readable_openedstores(remote)
|
|
571
618
|
if not f_ostores:
|
|
572
619
|
return False
|
|
573
620
|
if len(f_ostores) == 1:
|
|
574
|
-
logger.debug(
|
|
621
|
+
logger.debug("Multistore prestage at %s", f_ostores[0])
|
|
575
622
|
rc = f_ostores[0].prestage(remote.copy(), options)
|
|
576
623
|
else:
|
|
577
624
|
rc = True
|
|
578
625
|
for sto in f_ostores:
|
|
579
626
|
if sto.check(remote.copy(), options):
|
|
580
|
-
logger.debug(
|
|
627
|
+
logger.debug("Multistore prestage at %s", sto)
|
|
581
628
|
rc = sto.prestage(remote.copy(), options)
|
|
582
629
|
break
|
|
583
630
|
return rc
|
|
@@ -590,13 +637,17 @@ class MultiStore(footprints.FootprintBase):
|
|
|
590
637
|
if self.refillstore:
|
|
591
638
|
f_wr_ostores = self.filtered_writeable_openedstores(remote)
|
|
592
639
|
get_options = copy.copy(options)
|
|
593
|
-
get_options[
|
|
640
|
+
get_options["silent"] = True
|
|
594
641
|
while refill_in_progress:
|
|
595
642
|
for num, sto in enumerate(f_rd_ostores):
|
|
596
|
-
logger.debug(
|
|
643
|
+
logger.debug("Multistore get at %s", sto)
|
|
597
644
|
if result_id and num == len(f_rd_ostores) - 1:
|
|
598
|
-
rc = sto.finaliseget(
|
|
599
|
-
|
|
645
|
+
rc = sto.finaliseget(
|
|
646
|
+
result_id, remote.copy(), local, get_options
|
|
647
|
+
)
|
|
648
|
+
result_id = (
|
|
649
|
+
None # result_ids can not be re-used during refill
|
|
650
|
+
)
|
|
600
651
|
else:
|
|
601
652
|
rc = sto.get(remote.copy(), local, get_options)
|
|
602
653
|
if rc:
|
|
@@ -604,51 +655,82 @@ class MultiStore(footprints.FootprintBase):
|
|
|
604
655
|
# Are we trying a refill ? -> find the previous writeable store
|
|
605
656
|
restores = []
|
|
606
657
|
if rc and self.refillstore and num > 0:
|
|
607
|
-
restores = [
|
|
608
|
-
|
|
609
|
-
|
|
658
|
+
restores = [
|
|
659
|
+
ostore
|
|
660
|
+
for ostore in f_rd_ostores[:num]
|
|
661
|
+
if (
|
|
662
|
+
ostore.writeable
|
|
663
|
+
and ostore in f_wr_ostores
|
|
664
|
+
and ostore.use_cache()
|
|
665
|
+
)
|
|
666
|
+
]
|
|
610
667
|
# Do the refills and check if one of them succeed
|
|
611
668
|
refill_in_progress = False
|
|
612
669
|
for restore in restores:
|
|
613
670
|
# Another refill may have filled the gap...
|
|
614
671
|
if not restore.check(remote.copy(), options):
|
|
615
|
-
logger.info(
|
|
672
|
+
logger.info(
|
|
673
|
+
"Refill back in writeable store [%s].", restore
|
|
674
|
+
)
|
|
616
675
|
try:
|
|
617
|
-
refill_in_progress = (
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
676
|
+
refill_in_progress = (
|
|
677
|
+
restore.put(local, remote.copy(), options)
|
|
678
|
+
and (
|
|
679
|
+
options.get(
|
|
680
|
+
"intent", CACHE_GET_INTENT_DEFAULT
|
|
681
|
+
)
|
|
682
|
+
!= CACHE_PUT_INTENT
|
|
683
|
+
)
|
|
684
|
+
) or refill_in_progress
|
|
621
685
|
except (ExecutionError, OSError) as e:
|
|
622
|
-
logger.error(
|
|
623
|
-
|
|
686
|
+
logger.error(
|
|
687
|
+
"An ExecutionError happened during the refill: %s",
|
|
688
|
+
str(e),
|
|
689
|
+
)
|
|
690
|
+
logger.error(
|
|
691
|
+
"This error is ignored... but that's ugly !"
|
|
692
|
+
)
|
|
624
693
|
if refill_in_progress:
|
|
625
|
-
logger.info(
|
|
694
|
+
logger.info(
|
|
695
|
+
"Starting another round because at least one refill succeeded."
|
|
696
|
+
)
|
|
626
697
|
# Whatever the refill's outcome, that's fine
|
|
627
698
|
if rc:
|
|
628
699
|
break
|
|
629
700
|
if not rc:
|
|
630
|
-
self._verbose_log(
|
|
631
|
-
|
|
632
|
-
|
|
701
|
+
self._verbose_log(
|
|
702
|
+
options,
|
|
703
|
+
"warning",
|
|
704
|
+
"Multistore get {:s}://{:s}: none of the opened store succeeded.".format(
|
|
705
|
+
self.scheme, self.netloc
|
|
706
|
+
),
|
|
707
|
+
slevel="info",
|
|
708
|
+
)
|
|
633
709
|
return rc
|
|
634
710
|
|
|
635
711
|
def get(self, remote, local, options=None):
|
|
636
712
|
"""Go through internal opened stores for the first available resource."""
|
|
637
713
|
options = self._options_fixup(options)
|
|
638
|
-
logger.debug(
|
|
714
|
+
logger.debug("Multistore get from %s to %s", remote, local)
|
|
639
715
|
return self._refilling_get(remote, local, options)
|
|
640
716
|
|
|
641
717
|
def earlyget(self, remote, local, options=None):
|
|
642
718
|
options = self._options_fixup(options)
|
|
643
|
-
logger.debug(
|
|
719
|
+
logger.debug("Multistore earlyget from %s to %s", remote, local)
|
|
644
720
|
f_ostores = self.filtered_readable_openedstores(remote)
|
|
645
721
|
get_options = copy.copy(options)
|
|
646
722
|
if len(f_ostores) > 1:
|
|
647
723
|
first_checkable = all([s.has_fast_check() for s in f_ostores[:-1]])
|
|
648
724
|
# Early-fetch is only available on the last resort store...
|
|
649
|
-
if first_checkable and all(
|
|
650
|
-
|
|
651
|
-
|
|
725
|
+
if first_checkable and all(
|
|
726
|
+
[
|
|
727
|
+
not s.check(remote.copy(), get_options)
|
|
728
|
+
for s in f_ostores[:-1]
|
|
729
|
+
]
|
|
730
|
+
):
|
|
731
|
+
return f_ostores[-1].earlyget(
|
|
732
|
+
remote.copy(), local, get_options
|
|
733
|
+
)
|
|
652
734
|
else:
|
|
653
735
|
return None
|
|
654
736
|
elif len(f_ostores) == 1:
|
|
@@ -658,33 +740,33 @@ class MultiStore(footprints.FootprintBase):
|
|
|
658
740
|
|
|
659
741
|
def finaliseget(self, result_id, remote, local, options=None):
|
|
660
742
|
options = self._options_fixup(options)
|
|
661
|
-
logger.debug(
|
|
743
|
+
logger.debug("Multistore finaliseget from %s to %s", remote, local)
|
|
662
744
|
return self._refilling_get(remote, local, options, result_id=result_id)
|
|
663
745
|
|
|
664
746
|
def put(self, local, remote, options=None):
|
|
665
747
|
"""Go through internal opened stores and put resource for each of them."""
|
|
666
748
|
options = self._options_fixup(options)
|
|
667
|
-
logger.debug(
|
|
749
|
+
logger.debug("Multistore put from %s to %s", local, remote)
|
|
668
750
|
f_ostores = self.filtered_writeable_openedstores(remote)
|
|
669
751
|
if not f_ostores:
|
|
670
|
-
logger.warning(
|
|
752
|
+
logger.warning("Funny attempt to put on an empty multistore...")
|
|
671
753
|
return False
|
|
672
754
|
rc = True
|
|
673
755
|
for sto in [ostore for ostore in f_ostores if ostore.writeable]:
|
|
674
|
-
logger.debug(
|
|
756
|
+
logger.debug("Multistore put at %s", sto)
|
|
675
757
|
rcloc = sto.put(local, remote.copy(), options)
|
|
676
|
-
logger.debug(
|
|
758
|
+
logger.debug("Multistore out = %s", rcloc)
|
|
677
759
|
rc = rc and rcloc
|
|
678
760
|
return rc
|
|
679
761
|
|
|
680
762
|
def delete(self, remote, options=None):
|
|
681
763
|
"""Go through internal opened stores and delete the resource."""
|
|
682
764
|
options = self._options_fixup(options)
|
|
683
|
-
logger.debug(
|
|
765
|
+
logger.debug("Multistore delete from %s", remote)
|
|
684
766
|
f_ostores = self.filtered_writeable_openedstores(remote)
|
|
685
767
|
rc = False
|
|
686
768
|
for sto in [ostore for ostore in f_ostores if ostore.writeable]:
|
|
687
|
-
logger.debug(
|
|
769
|
+
logger.debug("Multistore delete at %s", sto)
|
|
688
770
|
rc = sto.delete(remote.copy(), options)
|
|
689
771
|
if not rc:
|
|
690
772
|
break
|
|
@@ -700,40 +782,42 @@ class ArchiveStore(Store):
|
|
|
700
782
|
_footprint = [
|
|
701
783
|
compressionpipeline,
|
|
702
784
|
dict(
|
|
703
|
-
info
|
|
704
|
-
attr
|
|
705
|
-
scheme
|
|
706
|
-
values
|
|
785
|
+
info="Generic archive store",
|
|
786
|
+
attr=dict(
|
|
787
|
+
scheme=dict(
|
|
788
|
+
values=[
|
|
789
|
+
"inarchive",
|
|
790
|
+
],
|
|
707
791
|
),
|
|
708
|
-
netloc
|
|
709
|
-
values
|
|
792
|
+
netloc=dict(
|
|
793
|
+
values=["open.archive.fr"],
|
|
710
794
|
),
|
|
711
|
-
storehash
|
|
712
|
-
values
|
|
795
|
+
storehash=dict(
|
|
796
|
+
values=hashalgo_avail_list,
|
|
713
797
|
),
|
|
714
|
-
storage
|
|
715
|
-
optional
|
|
798
|
+
storage=dict(
|
|
799
|
+
optional=True,
|
|
716
800
|
),
|
|
717
|
-
storetube
|
|
718
|
-
optional
|
|
801
|
+
storetube=dict(
|
|
802
|
+
optional=True,
|
|
719
803
|
),
|
|
720
|
-
storeroot
|
|
721
|
-
optional
|
|
804
|
+
storeroot=dict(
|
|
805
|
+
optional=True,
|
|
722
806
|
),
|
|
723
|
-
storehead
|
|
724
|
-
optional
|
|
807
|
+
storehead=dict(
|
|
808
|
+
optional=True,
|
|
725
809
|
),
|
|
726
|
-
storetrue
|
|
727
|
-
type
|
|
728
|
-
optional
|
|
729
|
-
default
|
|
810
|
+
storetrue=dict(
|
|
811
|
+
type=bool,
|
|
812
|
+
optional=True,
|
|
813
|
+
default=True,
|
|
730
814
|
),
|
|
731
|
-
)
|
|
815
|
+
),
|
|
732
816
|
),
|
|
733
817
|
]
|
|
734
818
|
|
|
735
819
|
def __init__(self, *args, **kw):
|
|
736
|
-
logger.debug(
|
|
820
|
+
logger.debug("Archive store init %s", self.__class__)
|
|
737
821
|
self._archive = None
|
|
738
822
|
self._actual_storage = None
|
|
739
823
|
self._actual_storetube = None
|
|
@@ -743,33 +827,44 @@ class ArchiveStore(Store):
|
|
|
743
827
|
|
|
744
828
|
@property
|
|
745
829
|
def realkind(self):
|
|
746
|
-
return
|
|
830
|
+
return "archivestore"
|
|
747
831
|
|
|
748
832
|
@property
|
|
749
833
|
def tracking_extraargs(self):
|
|
750
834
|
tea = super().tracking_extraargs
|
|
751
835
|
if self.storage:
|
|
752
|
-
tea[
|
|
836
|
+
tea["storage"] = self.storage
|
|
753
837
|
return tea
|
|
754
838
|
|
|
755
839
|
def _str_more(self):
|
|
756
|
-
return
|
|
840
|
+
return "archive={!r}".format(self.archive)
|
|
757
841
|
|
|
758
842
|
@property
|
|
759
843
|
def underlying_archive_kind(self):
|
|
760
|
-
return
|
|
844
|
+
return "std"
|
|
761
845
|
|
|
762
846
|
@property
|
|
763
847
|
def actual_storage(self):
|
|
764
848
|
"""This archive network name (potentially read form the configuration file)."""
|
|
765
849
|
if self._actual_storage is None:
|
|
766
|
-
|
|
767
|
-
self.
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
850
|
+
try:
|
|
851
|
+
self._actual_storage = (
|
|
852
|
+
self.system.env.VORTEX_DEFAULT_STORAGE
|
|
853
|
+
or self.system.glove.default_fthost
|
|
854
|
+
or from_config(section="storage", key="address")
|
|
855
|
+
)
|
|
856
|
+
except ConfigurationError as e:
|
|
857
|
+
msg = (
|
|
858
|
+
"Trying to access storage archive but no "
|
|
859
|
+
"storage location configured.\n"
|
|
860
|
+
'Make sure configuration section "section" and key '
|
|
861
|
+
'"address" exist.\n'
|
|
862
|
+
"See https://vortex-nwp.readthedocs.io/en/latest/user-guide/configuration.html#storage"
|
|
863
|
+
)
|
|
864
|
+
logger.error(msg)
|
|
865
|
+
raise e
|
|
771
866
|
if self._actual_storage is None:
|
|
772
|
-
raise ValueError(
|
|
867
|
+
raise ValueError("Unable to find the archive network name.")
|
|
773
868
|
return self._actual_storage
|
|
774
869
|
|
|
775
870
|
@property
|
|
@@ -777,10 +872,11 @@ class ArchiveStore(Store):
|
|
|
777
872
|
"""This archive network name (potentially read form the configuration file)."""
|
|
778
873
|
if self._actual_storetube is None:
|
|
779
874
|
self._actual_storetube = from_config(
|
|
780
|
-
section="storage",
|
|
875
|
+
section="storage",
|
|
876
|
+
key="protocol",
|
|
781
877
|
)
|
|
782
878
|
if self._actual_storetube is None:
|
|
783
|
-
raise ValueError(
|
|
879
|
+
raise ValueError("Unable to find the archive access method.")
|
|
784
880
|
return self._actual_storetube
|
|
785
881
|
|
|
786
882
|
def _get_archive(self):
|
|
@@ -808,12 +904,12 @@ class ArchiveStore(Store):
|
|
|
808
904
|
|
|
809
905
|
def _inarchiveformatpath(self, remote):
|
|
810
906
|
# Remove extra slashes
|
|
811
|
-
formatted = remote[
|
|
907
|
+
formatted = remote["path"].lstrip(self.system.path.sep)
|
|
812
908
|
# Store head ?
|
|
813
909
|
if self.storehead:
|
|
814
910
|
formatted = self.system.path.join(self.storehead, formatted)
|
|
815
911
|
# Store root (if specified)
|
|
816
|
-
pathroot = remote.get(
|
|
912
|
+
pathroot = remote.get("root", self.storeroot)
|
|
817
913
|
if pathroot is not None:
|
|
818
914
|
formatted = self.system.path.join(pathroot, formatted)
|
|
819
915
|
return formatted
|
|
@@ -822,355 +918,145 @@ class ArchiveStore(Store):
|
|
|
822
918
|
"""Use the archive object to check if **remote** exists."""
|
|
823
919
|
# Try to delete the md5 file but ignore errors...
|
|
824
920
|
if self._hash_check_or_delete(self.inarchivecheck, remote, options):
|
|
825
|
-
return self.archive.check(
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
921
|
+
return self.archive.check(
|
|
922
|
+
self._inarchiveformatpath(remote),
|
|
923
|
+
username=remote.get("username", None),
|
|
924
|
+
fmt=options.get("fmt", "foo"),
|
|
925
|
+
compressionpipeline=self._actual_cpipeline,
|
|
926
|
+
)
|
|
829
927
|
else:
|
|
830
928
|
return False
|
|
831
929
|
|
|
832
930
|
def inarchivelocate(self, remote, options):
|
|
833
931
|
"""Use the archive object to obtain **remote** physical location."""
|
|
834
|
-
return self.archive.fullpath(
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
932
|
+
return self.archive.fullpath(
|
|
933
|
+
self._inarchiveformatpath(remote),
|
|
934
|
+
username=remote.get("username", None),
|
|
935
|
+
fmt=options.get("fmt", "foo"),
|
|
936
|
+
compressionpipeline=self._actual_cpipeline,
|
|
937
|
+
)
|
|
838
938
|
|
|
839
939
|
def inarchivelist(self, remote, options):
|
|
840
940
|
"""Use the archive object to list available files."""
|
|
841
|
-
return self.archive.list(
|
|
842
|
-
|
|
941
|
+
return self.archive.list(
|
|
942
|
+
self._inarchiveformatpath(remote),
|
|
943
|
+
username=remote.get("username", None),
|
|
944
|
+
)
|
|
843
945
|
|
|
844
946
|
def inarchiveprestageinfo(self, remote, options):
|
|
845
947
|
"""Returns the prestaging informations"""
|
|
846
|
-
return self.archive.prestageinfo(
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
948
|
+
return self.archive.prestageinfo(
|
|
949
|
+
self._inarchiveformatpath(remote),
|
|
950
|
+
username=remote.get("username", None),
|
|
951
|
+
fmt=options.get("fmt", "foo"),
|
|
952
|
+
compressionpipeline=self._actual_cpipeline,
|
|
953
|
+
)
|
|
850
954
|
|
|
851
955
|
def inarchiveget(self, remote, local, options):
|
|
852
956
|
"""Use the archive object to retrieve **remote** in **local**."""
|
|
853
|
-
logger.info(
|
|
854
|
-
|
|
957
|
+
logger.info(
|
|
958
|
+
"inarchiveget on %s://%s/%s (to: %s)",
|
|
959
|
+
self.scheme,
|
|
960
|
+
self.netloc,
|
|
961
|
+
self._inarchiveformatpath(remote),
|
|
962
|
+
local,
|
|
963
|
+
)
|
|
855
964
|
rc = self.archive.retrieve(
|
|
856
|
-
self._inarchiveformatpath(remote),
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
965
|
+
self._inarchiveformatpath(remote),
|
|
966
|
+
local,
|
|
967
|
+
intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
|
|
968
|
+
fmt=options.get("fmt", "foo"),
|
|
969
|
+
info=options.get("rhandler", None),
|
|
970
|
+
username=remote["username"],
|
|
861
971
|
compressionpipeline=self._actual_cpipeline,
|
|
862
972
|
)
|
|
863
|
-
return rc and self._hash_get_check(
|
|
973
|
+
return rc and self._hash_get_check(
|
|
974
|
+
self.inarchiveget, remote, local, options
|
|
975
|
+
)
|
|
864
976
|
|
|
865
977
|
def inarchiveearlyget(self, remote, local, options):
|
|
866
978
|
"""Use the archive object to initiate an early get request on **remote**."""
|
|
867
|
-
logger.debug(
|
|
868
|
-
|
|
979
|
+
logger.debug(
|
|
980
|
+
"inarchiveearlyget on %s://%s/%s (to: %s)",
|
|
981
|
+
self.scheme,
|
|
982
|
+
self.netloc,
|
|
983
|
+
self._inarchiveformatpath(remote),
|
|
984
|
+
local,
|
|
985
|
+
)
|
|
869
986
|
rc = self.archive.earlyretrieve(
|
|
870
|
-
self._inarchiveformatpath(remote),
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
987
|
+
self._inarchiveformatpath(remote),
|
|
988
|
+
local,
|
|
989
|
+
intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
|
|
990
|
+
fmt=options.get("fmt", "foo"),
|
|
991
|
+
info=options.get("rhandler", None),
|
|
992
|
+
username=remote["username"],
|
|
875
993
|
compressionpipeline=self._actual_cpipeline,
|
|
876
994
|
)
|
|
877
995
|
return rc
|
|
878
996
|
|
|
879
997
|
def inarchivefinaliseget(self, result_id, remote, local, options):
|
|
880
998
|
"""Use the archive object to finalise the **result_id** early get request."""
|
|
881
|
-
logger.info(
|
|
882
|
-
|
|
999
|
+
logger.info(
|
|
1000
|
+
"inarchivefinaliseget on %s://%s/%s (to: %s)",
|
|
1001
|
+
self.scheme,
|
|
1002
|
+
self.netloc,
|
|
1003
|
+
self._inarchiveformatpath(remote),
|
|
1004
|
+
local,
|
|
1005
|
+
)
|
|
883
1006
|
rc = self.archive.finaliseretrieve(
|
|
884
1007
|
result_id,
|
|
885
|
-
self._inarchiveformatpath(remote),
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
1008
|
+
self._inarchiveformatpath(remote),
|
|
1009
|
+
local,
|
|
1010
|
+
intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
|
|
1011
|
+
fmt=options.get("fmt", "foo"),
|
|
1012
|
+
info=options.get("rhandler", None),
|
|
1013
|
+
username=remote["username"],
|
|
890
1014
|
compressionpipeline=self._actual_cpipeline,
|
|
891
1015
|
)
|
|
892
|
-
return rc and self._hash_get_check(
|
|
1016
|
+
return rc and self._hash_get_check(
|
|
1017
|
+
self.inarchiveget, remote, local, options
|
|
1018
|
+
)
|
|
893
1019
|
|
|
894
1020
|
def inarchiveput(self, local, remote, options):
|
|
895
1021
|
"""Use the archive object to put **local** to **remote**"""
|
|
896
|
-
logger.info(
|
|
897
|
-
|
|
1022
|
+
logger.info(
|
|
1023
|
+
"inarchiveput to %s://%s/%s (from: %s)",
|
|
1024
|
+
self.scheme,
|
|
1025
|
+
self.netloc,
|
|
1026
|
+
self._inarchiveformatpath(remote),
|
|
1027
|
+
local,
|
|
1028
|
+
)
|
|
898
1029
|
rc = self.archive.insert(
|
|
899
|
-
self._inarchiveformatpath(remote),
|
|
1030
|
+
self._inarchiveformatpath(remote),
|
|
1031
|
+
local,
|
|
900
1032
|
intent=ARCHIVE_PUT_INTENT,
|
|
901
|
-
fmt=options.get(
|
|
902
|
-
info=options.get(
|
|
903
|
-
username=remote[
|
|
1033
|
+
fmt=options.get("fmt", "foo"),
|
|
1034
|
+
info=options.get("rhandler"),
|
|
1035
|
+
username=remote["username"],
|
|
904
1036
|
compressionpipeline=self._actual_cpipeline,
|
|
905
|
-
enforcesync=options.get(
|
|
906
|
-
usejeeves=options.get(
|
|
1037
|
+
enforcesync=options.get("enforcesync", False),
|
|
1038
|
+
usejeeves=options.get("delayed", None),
|
|
907
1039
|
)
|
|
908
1040
|
return rc and self._hash_put(self.inarchiveput, local, remote, options)
|
|
909
1041
|
|
|
910
1042
|
def inarchivedelete(self, remote, options):
|
|
911
|
-
logger.info(
|
|
912
|
-
|
|
1043
|
+
logger.info(
|
|
1044
|
+
"inarchivedelete on %s://%s/%s",
|
|
1045
|
+
self.scheme,
|
|
1046
|
+
self.netloc,
|
|
1047
|
+
self._inarchiveformatpath(remote),
|
|
1048
|
+
)
|
|
913
1049
|
# Try to delete the md5 file but ignore errors...
|
|
914
1050
|
self._hash_check_or_delete(self.inarchivedelete, remote, options)
|
|
915
1051
|
return self.archive.delete(
|
|
916
1052
|
self._inarchiveformatpath(remote),
|
|
917
|
-
fmt=options.get(
|
|
918
|
-
info=options.get(
|
|
919
|
-
username=remote[
|
|
1053
|
+
fmt=options.get("fmt", "foo"),
|
|
1054
|
+
info=options.get("rhandler", None),
|
|
1055
|
+
username=remote["username"],
|
|
920
1056
|
compressionpipeline=self._actual_cpipeline,
|
|
921
1057
|
)
|
|
922
1058
|
|
|
923
1059
|
|
|
924
|
-
def _default_remoteconfig_dict():
|
|
925
|
-
"""Just an utility method for ConfigurableArchiveStore."""
|
|
926
|
-
return dict(restrict=None, seen=False)
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
class ConfigurableArchiveStore:
|
|
930
|
-
"""Generic Archive Store with the ability to read a configuration file.
|
|
931
|
-
|
|
932
|
-
This is a mixin class...
|
|
933
|
-
"""
|
|
934
|
-
|
|
935
|
-
#: Path to the Store configuration file (please overwrite !)
|
|
936
|
-
_store_global_config = None
|
|
937
|
-
_datastore_id = None
|
|
938
|
-
_re_subhosting = re.compile(r'(.*)\s+hosted\s+by\s+([-\w]+)$')
|
|
939
|
-
|
|
940
|
-
@staticmethod
|
|
941
|
-
def _get_remote_config(store, url, container):
|
|
942
|
-
"""Fetch a configuration file from **url** using **store**."""
|
|
943
|
-
rc = store.get(url, container.iotarget(), dict(fmt='ascii'))
|
|
944
|
-
if rc:
|
|
945
|
-
return config.GenericConfigParser(inifile=container.iotarget())
|
|
946
|
-
else:
|
|
947
|
-
return None
|
|
948
|
-
|
|
949
|
-
@staticmethod
|
|
950
|
-
def _please_fix(what):
|
|
951
|
-
logger.error('Please fix that quickly... Meanwhile, "%s" is ignored !', what)
|
|
952
|
-
|
|
953
|
-
def _process_location_section(self, section, section_items):
|
|
954
|
-
section_data = dict()
|
|
955
|
-
m_section = self._re_subhosting.match(section)
|
|
956
|
-
if m_section:
|
|
957
|
-
# A "hosted by" section
|
|
958
|
-
section_data['idrestricts'] = list()
|
|
959
|
-
for k, v in section_items:
|
|
960
|
-
if k.endswith('_idrestrict'):
|
|
961
|
-
try:
|
|
962
|
-
compiled_re = re.compile(v)
|
|
963
|
-
section_data['idrestricts'].append(compiled_re)
|
|
964
|
-
except re.error as e:
|
|
965
|
-
logger.error('The regex provided for "%s" in section "%s" does not compile !: "%s".',
|
|
966
|
-
k, section, str(e))
|
|
967
|
-
self._please_fix(k)
|
|
968
|
-
elif k == 'idrestricts':
|
|
969
|
-
logger.error('A "%s" entrey was found in section "%s". This is not ok.', k, section)
|
|
970
|
-
self._please_fix(k)
|
|
971
|
-
else:
|
|
972
|
-
section_data[k] = v
|
|
973
|
-
if section_data['idrestricts']:
|
|
974
|
-
return m_section.group(1), m_section.group(2), section_data
|
|
975
|
-
else:
|
|
976
|
-
logger.error('No acceptable "_idrestrict" entry was found in section "%s".', section)
|
|
977
|
-
self._please_fix(section)
|
|
978
|
-
return None, None, None
|
|
979
|
-
else:
|
|
980
|
-
# The usual/generic section
|
|
981
|
-
for k, v in section_items:
|
|
982
|
-
if k.endswith('_idrestrict') or k == 'idrestricts':
|
|
983
|
-
logger.error('A "*idrestrict*" entry was found in section "%s". This is not ok.', section)
|
|
984
|
-
self._please_fix(section)
|
|
985
|
-
return None, None, None
|
|
986
|
-
section_data[k] = v
|
|
987
|
-
return section, None, section_data
|
|
988
|
-
|
|
989
|
-
def _ingest_remote_config(self, r_id, r_confdict, global_confdict):
|
|
990
|
-
logger.info("Reading config file: %s (id=%s)", r_confdict['uri'], r_id)
|
|
991
|
-
url = net.uriparse(r_confdict['uri'])
|
|
992
|
-
tempstore = footprints.proxy.store(
|
|
993
|
-
scheme=url['scheme'],
|
|
994
|
-
netloc=url['netloc'],
|
|
995
|
-
storetrack=False,
|
|
996
|
-
)
|
|
997
|
-
retry = False
|
|
998
|
-
# First, try with a temporary ShouldFly
|
|
999
|
-
try:
|
|
1000
|
-
tempcontainer = footprints.proxy.container(shouldfly=True)
|
|
1001
|
-
remotecfg_parser = self._get_remote_config(tempstore, url, tempcontainer)
|
|
1002
|
-
except OSError:
|
|
1003
|
-
# This may happen if the user has insufficient rights on
|
|
1004
|
-
# the current directory
|
|
1005
|
-
retry = True
|
|
1006
|
-
finally:
|
|
1007
|
-
self.system.remove(tempcontainer.filename)
|
|
1008
|
-
# Is retry needed ? This time a completely virtual file is used.
|
|
1009
|
-
if retry:
|
|
1010
|
-
remotecfg_parser = self._get_remote_config(tempstore, url,
|
|
1011
|
-
footprints.proxy.container(incore=True))
|
|
1012
|
-
# Update the configuration using the parser
|
|
1013
|
-
if remotecfg_parser is not None:
|
|
1014
|
-
for section in remotecfg_parser.sections():
|
|
1015
|
-
s_loc, s_entry, s_data = self._process_location_section(
|
|
1016
|
-
section,
|
|
1017
|
-
remotecfg_parser.items(section)
|
|
1018
|
-
)
|
|
1019
|
-
if s_loc is not None:
|
|
1020
|
-
logger.debug("New location entry found: %s (subentry: %s)", s_loc, s_entry)
|
|
1021
|
-
# Filtering based on the regex : No collisions allowed !
|
|
1022
|
-
if r_confdict['restrict'] is not None:
|
|
1023
|
-
if r_confdict['restrict'].search(s_loc):
|
|
1024
|
-
global_confdict['locations'][s_loc][s_entry] = s_data
|
|
1025
|
-
else:
|
|
1026
|
-
logger.error('According to the "restrict" clause, ' +
|
|
1027
|
-
'you are not allowed to define the "%s" location !', s_loc)
|
|
1028
|
-
self._please_fix(section)
|
|
1029
|
-
else:
|
|
1030
|
-
global_confdict['locations'][s_loc][s_entry] = s_data
|
|
1031
|
-
r_confdict['seen'] = True
|
|
1032
|
-
else:
|
|
1033
|
-
raise OSError("The remote configuration {:s} couldn't be found."
|
|
1034
|
-
.format(r_confdict['uri']))
|
|
1035
|
-
|
|
1036
|
-
def _load_config(self, conf, tlocation):
|
|
1037
|
-
"""Load the store configuration.
|
|
1038
|
-
|
|
1039
|
-
1. The global store's configuration file is read (see
|
|
1040
|
-
``self.__store_global_config``)
|
|
1041
|
-
2. Given ``self.storage``, the proper section of the global configuration
|
|
1042
|
-
file is read: it may contain localconf or remoteconfXXX options that
|
|
1043
|
-
describe additional configuration files
|
|
1044
|
-
3. First, the local configuration file is read
|
|
1045
|
-
4. Then, the remote configuration files are read
|
|
1046
|
-
|
|
1047
|
-
The relevant content of the configuration file is stored in the ``conf``
|
|
1048
|
-
dictionary.
|
|
1049
|
-
"""
|
|
1050
|
-
# Because _store_global_config and _datastore_id must be overwritten...
|
|
1051
|
-
assert self._store_global_config is not None
|
|
1052
|
-
assert self._datastore_id is not None
|
|
1053
|
-
|
|
1054
|
-
if not conf:
|
|
1055
|
-
# This is the first call to this method
|
|
1056
|
-
logger.info("Some store configuration data is needed (for %s://%s)",
|
|
1057
|
-
self.scheme, self.netloc)
|
|
1058
|
-
|
|
1059
|
-
# Global configuration file
|
|
1060
|
-
logger.info("Reading config file: %s", self._store_global_config)
|
|
1061
|
-
maincfg = config.GenericConfigParser(inifile=self._store_global_config)
|
|
1062
|
-
if self.actual_storage in maincfg.sections():
|
|
1063
|
-
conf['host'] = dict(maincfg.items(self.actual_storage))
|
|
1064
|
-
else:
|
|
1065
|
-
conf['host'] = dict(maincfg.defaults())
|
|
1066
|
-
|
|
1067
|
-
conf['locations'] = defaultdict(functools.partial(defaultdict, dict))
|
|
1068
|
-
conf['remoteconfigs'] = defaultdict(_default_remoteconfig_dict)
|
|
1069
|
-
conf['uuids_cache'] = dict()
|
|
1070
|
-
|
|
1071
|
-
# Look for a local configuration file
|
|
1072
|
-
localcfg = conf['host'].get('localconf', None)
|
|
1073
|
-
if localcfg is not None:
|
|
1074
|
-
logger.info("Reading config file: %s", localcfg)
|
|
1075
|
-
localcfg = config.GenericConfigParser(inifile=localcfg)
|
|
1076
|
-
conf['locations']['generic'][None] = localcfg.defaults()
|
|
1077
|
-
for section in localcfg.sections():
|
|
1078
|
-
s_loc, s_entry, s_data = self._process_location_section(
|
|
1079
|
-
section,
|
|
1080
|
-
localcfg.items(section)
|
|
1081
|
-
)
|
|
1082
|
-
if s_loc is not None:
|
|
1083
|
-
logger.debug("New location entry found: %s (subentry: %s)", s_loc, s_entry)
|
|
1084
|
-
conf['locations'][s_loc][s_entry] = s_data
|
|
1085
|
-
|
|
1086
|
-
# Look for remote configurations
|
|
1087
|
-
tg_inet = self.system.default_target.inetname
|
|
1088
|
-
for key in conf['host'].keys():
|
|
1089
|
-
k_match = re.match(r'generic_(remoteconf\w*)_uri$', key)
|
|
1090
|
-
if k_match:
|
|
1091
|
-
r_id = k_match.group(1)
|
|
1092
|
-
g_uri_key = key
|
|
1093
|
-
i_uri_key = '{:s}_{:s}_uri'.format(tg_inet, r_id)
|
|
1094
|
-
g_restrict_key = 'generic_{:s}_restrict'.format(r_id)
|
|
1095
|
-
i_restrict_key = '{:s}_{:s}_restrict'.format(tg_inet, r_id)
|
|
1096
|
-
if i_uri_key in conf['host'].keys():
|
|
1097
|
-
conf['remoteconfigs'][r_id]['uri'] = conf['host'][i_uri_key]
|
|
1098
|
-
else:
|
|
1099
|
-
conf['remoteconfigs'][r_id]['uri'] = conf['host'][g_uri_key]
|
|
1100
|
-
if i_restrict_key in conf['host'].keys():
|
|
1101
|
-
conf['remoteconfigs'][r_id]['restrict'] = conf['host'][i_restrict_key]
|
|
1102
|
-
elif g_restrict_key in conf['host'].keys():
|
|
1103
|
-
conf['remoteconfigs'][r_id]['restrict'] = conf['host'][g_restrict_key]
|
|
1104
|
-
# Trying to compile the regex !
|
|
1105
|
-
if conf['remoteconfigs'][r_id]['restrict'] is not None:
|
|
1106
|
-
try:
|
|
1107
|
-
compiled_re = re.compile(conf['remoteconfigs'][r_id]['restrict'])
|
|
1108
|
-
conf['remoteconfigs'][r_id]['restrict'] = compiled_re
|
|
1109
|
-
except re.error as e:
|
|
1110
|
-
logger.error('The regex provided for "%s" does not compile !: "%s".',
|
|
1111
|
-
r_id, str(e))
|
|
1112
|
-
self._please_fix(r_id)
|
|
1113
|
-
del conf['remoteconfigs'][r_id]
|
|
1114
|
-
|
|
1115
|
-
for r_confk, r_conf in conf['remoteconfigs'].items():
|
|
1116
|
-
if r_conf['restrict'] is None:
|
|
1117
|
-
self._ingest_remote_config(r_confk, r_conf, conf)
|
|
1118
|
-
|
|
1119
|
-
for r_confk, r_conf in conf['remoteconfigs'].items():
|
|
1120
|
-
if ((not r_conf['seen']) and r_conf['restrict'] is not None and
|
|
1121
|
-
r_conf['restrict'].search(tlocation)):
|
|
1122
|
-
self._ingest_remote_config(r_confk, r_conf, conf)
|
|
1123
|
-
|
|
1124
|
-
def _actual_fromconf(self, uuid, item):
|
|
1125
|
-
"""For a given **uuid**, Find the corresponding value of the **item** key
|
|
1126
|
-
in the configuration data.
|
|
1127
|
-
|
|
1128
|
-
Access the session's datastore to get the configuration data. If
|
|
1129
|
-
necessary, configuration data are read in using the :meth:`_load_config`
|
|
1130
|
-
method
|
|
1131
|
-
"""
|
|
1132
|
-
ds = sessions.current().datastore
|
|
1133
|
-
conf = ds.get(self._datastore_id, dict(storage=self.actual_storage),
|
|
1134
|
-
default_payload=dict(), readonly=True)
|
|
1135
|
-
if (uuid, item) in conf.get('uuids_cache', dict()):
|
|
1136
|
-
return conf['uuids_cache'][(uuid, item)]
|
|
1137
|
-
else:
|
|
1138
|
-
logger.debug('Looking for %s''s "%s" in config.', uuid, item)
|
|
1139
|
-
mylocation = uuid.location
|
|
1140
|
-
self._load_config(conf, mylocation)
|
|
1141
|
-
st_item = None
|
|
1142
|
-
if mylocation in conf['locations']:
|
|
1143
|
-
# The default
|
|
1144
|
-
if None in conf['locations'][mylocation]:
|
|
1145
|
-
st_item = conf['locations'][mylocation][None].get(item, None)
|
|
1146
|
-
# Id based
|
|
1147
|
-
for s_entry, s_entry_d in conf['locations'][mylocation].items():
|
|
1148
|
-
if s_entry is not None:
|
|
1149
|
-
if any([idrestrict.search(uuid.id)
|
|
1150
|
-
for idrestrict in s_entry_d['idrestricts']]):
|
|
1151
|
-
st_item = s_entry_d.get(item, None)
|
|
1152
|
-
st_item = st_item or conf['locations']['generic'][None].get(item, None)
|
|
1153
|
-
conf['uuids_cache'][(uuid, item)] = st_item
|
|
1154
|
-
return st_item
|
|
1155
|
-
|
|
1156
|
-
def _actual_storeroot(self, uuid):
|
|
1157
|
-
"""For a given **uuid**, determine the proper storeroot."""
|
|
1158
|
-
if self.storeroot is None:
|
|
1159
|
-
# Read the storeroot from the configuration data
|
|
1160
|
-
st_root = self._actual_fromconf(uuid, 'storeroot')
|
|
1161
|
-
if st_root is None:
|
|
1162
|
-
raise OSError("No valid storeroot could be found.")
|
|
1163
|
-
# The location may be an alias: find the real username
|
|
1164
|
-
realname = self._actual_fromconf(uuid, 'realname')
|
|
1165
|
-
if realname is None:
|
|
1166
|
-
mylocation = uuid.location
|
|
1167
|
-
else:
|
|
1168
|
-
mylocation = realname
|
|
1169
|
-
return st_root.format(location=mylocation)
|
|
1170
|
-
else:
|
|
1171
|
-
return self.storeroot
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
1060
|
class CacheStore(Store):
|
|
1175
1061
|
"""Generic Cache Store."""
|
|
1176
1062
|
|
|
@@ -1180,46 +1066,46 @@ class CacheStore(Store):
|
|
|
1180
1066
|
|
|
1181
1067
|
_abstract = True
|
|
1182
1068
|
_footprint = dict(
|
|
1183
|
-
info
|
|
1184
|
-
attr
|
|
1185
|
-
scheme
|
|
1186
|
-
values
|
|
1069
|
+
info="Generic cache store",
|
|
1070
|
+
attr=dict(
|
|
1071
|
+
scheme=dict(
|
|
1072
|
+
values=["incache"],
|
|
1187
1073
|
),
|
|
1188
|
-
netloc
|
|
1189
|
-
values
|
|
1074
|
+
netloc=dict(
|
|
1075
|
+
values=["open.cache.fr"],
|
|
1190
1076
|
),
|
|
1191
|
-
storehash
|
|
1192
|
-
values
|
|
1077
|
+
storehash=dict(
|
|
1078
|
+
values=hashalgo_avail_list,
|
|
1193
1079
|
),
|
|
1194
|
-
strategy
|
|
1195
|
-
optional
|
|
1196
|
-
default
|
|
1080
|
+
strategy=dict(
|
|
1081
|
+
optional=True,
|
|
1082
|
+
default="std",
|
|
1197
1083
|
),
|
|
1198
|
-
headdir
|
|
1199
|
-
optional
|
|
1200
|
-
default
|
|
1084
|
+
headdir=dict(
|
|
1085
|
+
optional=True,
|
|
1086
|
+
default="conf",
|
|
1201
1087
|
),
|
|
1202
|
-
rtouch
|
|
1203
|
-
type
|
|
1204
|
-
optional
|
|
1205
|
-
default
|
|
1088
|
+
rtouch=dict(
|
|
1089
|
+
type=bool,
|
|
1090
|
+
optional=True,
|
|
1091
|
+
default=False,
|
|
1206
1092
|
),
|
|
1207
|
-
rtouchskip
|
|
1208
|
-
type
|
|
1209
|
-
optional
|
|
1210
|
-
default
|
|
1093
|
+
rtouchskip=dict(
|
|
1094
|
+
type=int,
|
|
1095
|
+
optional=True,
|
|
1096
|
+
default=0,
|
|
1211
1097
|
),
|
|
1212
|
-
)
|
|
1098
|
+
),
|
|
1213
1099
|
)
|
|
1214
1100
|
|
|
1215
1101
|
def __init__(self, *args, **kw):
|
|
1216
1102
|
del self.cache
|
|
1217
|
-
logger.debug(
|
|
1103
|
+
logger.debug("Generic cache store init %s", self.__class__)
|
|
1218
1104
|
super().__init__(*args, **kw)
|
|
1219
1105
|
|
|
1220
1106
|
@property
|
|
1221
1107
|
def realkind(self):
|
|
1222
|
-
return
|
|
1108
|
+
return "cachestore"
|
|
1223
1109
|
|
|
1224
1110
|
def use_cache(self):
|
|
1225
1111
|
"""Boolean value to insure that this store is using a cache."""
|
|
@@ -1237,11 +1123,10 @@ class CacheStore(Store):
|
|
|
1237
1123
|
def _get_cache(self):
|
|
1238
1124
|
if not self._cache:
|
|
1239
1125
|
self._cache = footprints.proxy.caches.default(
|
|
1240
|
-
|
|
1241
|
-
headdir=self.headdir,
|
|
1126
|
+
entry=self.location,
|
|
1242
1127
|
rtouch=self.rtouch,
|
|
1243
1128
|
rtouchskip=self.rtouchskip,
|
|
1244
|
-
readonly=self.readonly
|
|
1129
|
+
readonly=self.readonly,
|
|
1245
1130
|
)
|
|
1246
1131
|
self._caches_object_stack.add(self._cache)
|
|
1247
1132
|
return self._cache
|
|
@@ -1258,74 +1143,98 @@ class CacheStore(Store):
|
|
|
1258
1143
|
cache = property(_get_cache, _set_cache, _del_cache)
|
|
1259
1144
|
|
|
1260
1145
|
def _str_more(self):
|
|
1261
|
-
return
|
|
1146
|
+
return "entry={:s}".format(self.cache.entry)
|
|
1262
1147
|
|
|
1263
1148
|
def incachecheck(self, remote, options):
|
|
1264
1149
|
"""Returns a stat-like object if the ``remote`` exists in the current cache."""
|
|
1265
1150
|
if self._hash_check_or_delete(self.incachecheck, remote, options):
|
|
1266
|
-
st = self.cache.check(remote[
|
|
1267
|
-
if options.get(
|
|
1268
|
-
st = self.system.path.isfile(
|
|
1151
|
+
st = self.cache.check(remote["path"])
|
|
1152
|
+
if options.get("isfile", False) and st:
|
|
1153
|
+
st = self.system.path.isfile(
|
|
1154
|
+
self.incachelocate(remote, options)
|
|
1155
|
+
)
|
|
1269
1156
|
return st
|
|
1270
1157
|
else:
|
|
1271
1158
|
return False
|
|
1272
1159
|
|
|
1273
1160
|
def incachelocate(self, remote, options):
|
|
1274
1161
|
"""Agregates cache to remote subpath."""
|
|
1275
|
-
return self.cache.fullpath(remote[
|
|
1162
|
+
return self.cache.fullpath(remote["path"])
|
|
1276
1163
|
|
|
1277
1164
|
def incachelist(self, remote, options):
|
|
1278
1165
|
"""List the content of a remote path."""
|
|
1279
|
-
return self.cache.list(remote[
|
|
1166
|
+
return self.cache.list(remote["path"])
|
|
1280
1167
|
|
|
1281
1168
|
def incacheprestageinfo(self, remote, options):
|
|
1282
1169
|
"""Returns pre-staging informations."""
|
|
1283
|
-
return self.cache.prestageinfo(remote[
|
|
1170
|
+
return self.cache.prestageinfo(remote["path"])
|
|
1284
1171
|
|
|
1285
1172
|
def incacheget(self, remote, local, options):
|
|
1286
1173
|
"""Simple copy from current cache cache to ``local``."""
|
|
1287
|
-
logger.info(
|
|
1288
|
-
|
|
1174
|
+
logger.info(
|
|
1175
|
+
"incacheget on %s://%s/%s (to: %s)",
|
|
1176
|
+
self.scheme,
|
|
1177
|
+
self.netloc,
|
|
1178
|
+
remote["path"],
|
|
1179
|
+
local,
|
|
1180
|
+
)
|
|
1289
1181
|
rc = self.cache.retrieve(
|
|
1290
|
-
remote[
|
|
1182
|
+
remote["path"],
|
|
1291
1183
|
local,
|
|
1292
|
-
intent=options.get(
|
|
1293
|
-
fmt=options.get(
|
|
1294
|
-
info=options.get(
|
|
1295
|
-
tarextract=options.get(
|
|
1296
|
-
dirextract=options.get(
|
|
1297
|
-
uniquelevel_ignore=options.get(
|
|
1298
|
-
silent=options.get(
|
|
1184
|
+
intent=options.get("intent", CACHE_GET_INTENT_DEFAULT),
|
|
1185
|
+
fmt=options.get("fmt"),
|
|
1186
|
+
info=options.get("rhandler", None),
|
|
1187
|
+
tarextract=options.get("auto_tarextract", False),
|
|
1188
|
+
dirextract=options.get("auto_dirextract", False),
|
|
1189
|
+
uniquelevel_ignore=options.get("uniquelevel_ignore", True),
|
|
1190
|
+
silent=options.get("silent", False),
|
|
1191
|
+
)
|
|
1192
|
+
if rc or not options.get("silent", False):
|
|
1193
|
+
logger.info(
|
|
1194
|
+
"incacheget retrieve rc=%s location=%s",
|
|
1195
|
+
str(rc),
|
|
1196
|
+
str(self.incachelocate(remote, options)),
|
|
1197
|
+
)
|
|
1198
|
+
return rc and self._hash_get_check(
|
|
1199
|
+
self.incacheget, remote, local, options
|
|
1299
1200
|
)
|
|
1300
|
-
if rc or not options.get('silent', False):
|
|
1301
|
-
logger.info('incacheget retrieve rc=%s location=%s', str(rc),
|
|
1302
|
-
str(self.incachelocate(remote, options)))
|
|
1303
|
-
return rc and self._hash_get_check(self.incacheget, remote, local, options)
|
|
1304
1201
|
|
|
1305
1202
|
def incacheput(self, local, remote, options):
|
|
1306
1203
|
"""Simple copy from ``local`` to the current cache in readonly mode."""
|
|
1307
|
-
logger.info(
|
|
1308
|
-
|
|
1204
|
+
logger.info(
|
|
1205
|
+
"incacheput to %s://%s/%s (from: %s)",
|
|
1206
|
+
self.scheme,
|
|
1207
|
+
self.netloc,
|
|
1208
|
+
remote["path"],
|
|
1209
|
+
local,
|
|
1210
|
+
)
|
|
1309
1211
|
rc = self.cache.insert(
|
|
1310
|
-
remote[
|
|
1212
|
+
remote["path"],
|
|
1311
1213
|
local,
|
|
1312
1214
|
intent=CACHE_PUT_INTENT,
|
|
1313
|
-
fmt=options.get(
|
|
1314
|
-
info=options.get(
|
|
1215
|
+
fmt=options.get("fmt"),
|
|
1216
|
+
info=options.get("rhandler", None),
|
|
1217
|
+
)
|
|
1218
|
+
logger.info(
|
|
1219
|
+
"incacheput insert rc=%s location=%s",
|
|
1220
|
+
str(rc),
|
|
1221
|
+
str(self.incachelocate(remote, options)),
|
|
1315
1222
|
)
|
|
1316
|
-
logger.info('incacheput insert rc=%s location=%s', str(rc),
|
|
1317
|
-
str(self.incachelocate(remote, options)))
|
|
1318
1223
|
return rc and self._hash_put(self.incacheput, local, remote, options)
|
|
1319
1224
|
|
|
1320
1225
|
def incachedelete(self, remote, options):
|
|
1321
1226
|
"""Simple removing of the remote resource in cache."""
|
|
1322
|
-
logger.info(
|
|
1323
|
-
|
|
1227
|
+
logger.info(
|
|
1228
|
+
"incachedelete on %s://%s/%s",
|
|
1229
|
+
self.scheme,
|
|
1230
|
+
self.netloc,
|
|
1231
|
+
remote["path"],
|
|
1232
|
+
)
|
|
1324
1233
|
self._hash_check_or_delete(self.incachedelete, remote, options)
|
|
1325
1234
|
return self.cache.delete(
|
|
1326
|
-
remote[
|
|
1327
|
-
fmt=options.get(
|
|
1328
|
-
info=options.get(
|
|
1235
|
+
remote["path"],
|
|
1236
|
+
fmt=options.get("fmt"),
|
|
1237
|
+
info=options.get("rhandler", None),
|
|
1329
1238
|
)
|
|
1330
1239
|
|
|
1331
1240
|
|
|
@@ -1333,38 +1242,33 @@ class PromiseStore(footprints.FootprintBase):
|
|
|
1333
1242
|
"""Combined a Promise Store for expected resources and any other matching Store."""
|
|
1334
1243
|
|
|
1335
1244
|
_abstract = True
|
|
1336
|
-
_collector = (
|
|
1245
|
+
_collector = ("store",)
|
|
1337
1246
|
_footprint = dict(
|
|
1338
|
-
info
|
|
1339
|
-
attr
|
|
1340
|
-
scheme
|
|
1341
|
-
|
|
1247
|
+
info="Promise store",
|
|
1248
|
+
attr=dict(
|
|
1249
|
+
scheme=dict(alias=("protocol",)),
|
|
1250
|
+
netloc=dict(type=Namespace, alias=("domain", "namespace")),
|
|
1251
|
+
storetrack=dict(
|
|
1252
|
+
type=bool,
|
|
1253
|
+
default=True,
|
|
1254
|
+
optional=True,
|
|
1342
1255
|
),
|
|
1343
|
-
|
|
1344
|
-
type
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
storetrack = dict(
|
|
1348
|
-
type = bool,
|
|
1349
|
-
default = True,
|
|
1350
|
-
optional = True,
|
|
1351
|
-
),
|
|
1352
|
-
prstorename = dict(
|
|
1353
|
-
type = Namespace,
|
|
1354
|
-
optional = True,
|
|
1355
|
-
default = 'promise.cache.fr',
|
|
1256
|
+
prstorename=dict(
|
|
1257
|
+
type=Namespace,
|
|
1258
|
+
optional=True,
|
|
1259
|
+
default="promise.cache.fr",
|
|
1356
1260
|
),
|
|
1357
1261
|
),
|
|
1358
1262
|
)
|
|
1359
1263
|
|
|
1360
1264
|
def __init__(self, *args, **kw):
|
|
1361
|
-
logger.debug(
|
|
1362
|
-
sh = kw.pop(
|
|
1265
|
+
logger.debug("Abstract promise store init %s", self.__class__)
|
|
1266
|
+
sh = kw.pop("system", sessions.system())
|
|
1363
1267
|
super().__init__(*args, **kw)
|
|
1364
1268
|
self._sh = sh
|
|
1365
1269
|
|
|
1366
1270
|
# Assume that the actual scheme is the current scheme without "x" prefix
|
|
1367
|
-
self.proxyscheme = self.scheme.lstrip(
|
|
1271
|
+
self.proxyscheme = self.scheme.lstrip("x")
|
|
1368
1272
|
|
|
1369
1273
|
# Find a store for the promised resources
|
|
1370
1274
|
self.promise = footprints.proxy.store(
|
|
@@ -1373,9 +1277,12 @@ class PromiseStore(footprints.FootprintBase):
|
|
|
1373
1277
|
storetrack=self.storetrack,
|
|
1374
1278
|
)
|
|
1375
1279
|
if self.promise is None:
|
|
1376
|
-
logger.critical(
|
|
1377
|
-
|
|
1378
|
-
|
|
1280
|
+
logger.critical(
|
|
1281
|
+
"Could not find store scheme <%s> netloc <%s>",
|
|
1282
|
+
self.proxyscheme,
|
|
1283
|
+
self.prstorename,
|
|
1284
|
+
)
|
|
1285
|
+
raise ValueError("Could not get a Promise Store")
|
|
1379
1286
|
|
|
1380
1287
|
# Find the other "real" store (could be a multi-store)
|
|
1381
1288
|
self.other = footprints.proxy.store(
|
|
@@ -1384,15 +1291,19 @@ class PromiseStore(footprints.FootprintBase):
|
|
|
1384
1291
|
storetrack=self.storetrack,
|
|
1385
1292
|
)
|
|
1386
1293
|
if self.other is None:
|
|
1387
|
-
logger.critical(
|
|
1388
|
-
|
|
1294
|
+
logger.critical(
|
|
1295
|
+
"Could not find store scheme <%s> netloc <%s>",
|
|
1296
|
+
self.proxyscheme,
|
|
1297
|
+
self.netloc,
|
|
1298
|
+
)
|
|
1299
|
+
raise ValueError("Could not get an Other Store")
|
|
1389
1300
|
|
|
1390
1301
|
self.openedstores = (self.promise, self.other)
|
|
1391
1302
|
self.delayed = False
|
|
1392
1303
|
|
|
1393
1304
|
@property
|
|
1394
1305
|
def realkind(self):
|
|
1395
|
-
return
|
|
1306
|
+
return "promisestore"
|
|
1396
1307
|
|
|
1397
1308
|
@property
|
|
1398
1309
|
def system(self):
|
|
@@ -1410,13 +1321,13 @@ class PromiseStore(footprints.FootprintBase):
|
|
|
1410
1321
|
stamp=date.stamp(),
|
|
1411
1322
|
itself=self.promise.locate(remote, options),
|
|
1412
1323
|
locate=self.other.locate(remote, options),
|
|
1413
|
-
datafmt=options.get(
|
|
1414
|
-
rhandler=options.get(
|
|
1324
|
+
datafmt=options.get("fmt", None),
|
|
1325
|
+
rhandler=options.get("rhandler", None),
|
|
1415
1326
|
)
|
|
1416
1327
|
|
|
1417
1328
|
def mkpromise_file(self, info, local):
|
|
1418
1329
|
"""Build a virtual container with specified informations."""
|
|
1419
|
-
pfile = local +
|
|
1330
|
+
pfile = local + ".pr"
|
|
1420
1331
|
self.system.json_dump(info, pfile, sort_keys=True, indent=4)
|
|
1421
1332
|
return pfile
|
|
1422
1333
|
|
|
@@ -1427,45 +1338,49 @@ class PromiseStore(footprints.FootprintBase):
|
|
|
1427
1338
|
def check(self, remote, options=None):
|
|
1428
1339
|
"""Go through internal opened stores and check for the resource."""
|
|
1429
1340
|
options = self._options_fixup(options)
|
|
1430
|
-
logger.debug(
|
|
1431
|
-
return self.other.check(remote.copy(), options) or self.promise.check(
|
|
1341
|
+
logger.debug("Promise check from %s", remote)
|
|
1342
|
+
return self.other.check(remote.copy(), options) or self.promise.check(
|
|
1343
|
+
remote.copy(), options
|
|
1344
|
+
)
|
|
1432
1345
|
|
|
1433
1346
|
def locate(self, remote, options=None):
|
|
1434
1347
|
"""Go through internal opened stores and locate the expected resource for each of them."""
|
|
1435
1348
|
options = self._options_fixup(options)
|
|
1436
|
-
logger.debug(
|
|
1349
|
+
logger.debug("Promise locate %s", remote)
|
|
1437
1350
|
inpromise = True
|
|
1438
1351
|
if options:
|
|
1439
|
-
inpromise = options.get(
|
|
1352
|
+
inpromise = options.get("inpromise", True)
|
|
1440
1353
|
|
|
1441
1354
|
locate_other = self.other.locate(remote.copy(), options)
|
|
1442
1355
|
if inpromise:
|
|
1443
1356
|
locate_promised = self.promise.locate(remote.copy(), options)
|
|
1444
|
-
return locate_promised +
|
|
1357
|
+
return locate_promised + ";" + locate_other
|
|
1445
1358
|
return locate_other
|
|
1446
1359
|
|
|
1447
1360
|
def get(self, remote, local, options=None):
|
|
1448
1361
|
"""Go through internal opened stores for the first available resource."""
|
|
1449
1362
|
options = self._options_fixup(options)
|
|
1450
|
-
logger.debug(
|
|
1363
|
+
logger.debug("Promise get %s", remote)
|
|
1451
1364
|
self.delayed = False
|
|
1452
|
-
logger.info(
|
|
1365
|
+
logger.info("Try promise from store %s", self.promise)
|
|
1453
1366
|
try:
|
|
1454
1367
|
rc = self.promise.get(remote.copy(), local, options)
|
|
1455
1368
|
except OSError as e:
|
|
1456
1369
|
# If something goes wrong, assume that the promise file had been
|
|
1457
1370
|
# deleted during the execution of self.promise.check (which can cause
|
|
1458
1371
|
# IOError or OSError to be raised).
|
|
1459
|
-
logger.info(
|
|
1460
|
-
|
|
1372
|
+
logger.info(
|
|
1373
|
+
"An error occurred while fetching the promise file: %s", str(e)
|
|
1374
|
+
)
|
|
1375
|
+
logger.info("Assuming this is a negative result...")
|
|
1461
1376
|
rc = False
|
|
1462
1377
|
if rc:
|
|
1463
1378
|
self.delayed = True
|
|
1464
1379
|
else:
|
|
1465
|
-
logger.info(
|
|
1380
|
+
logger.info("Try promise from store %s", self.other)
|
|
1466
1381
|
rc = self.other.get(remote.copy(), local, options)
|
|
1467
|
-
if not rc and options.get(
|
|
1468
|
-
logger.warning(
|
|
1382
|
+
if not rc and options.get("pretend", False):
|
|
1383
|
+
logger.warning("Pretending to get a promise for <%s>", local)
|
|
1469
1384
|
pr_info = self.mkpromise_info(remote, options)
|
|
1470
1385
|
pr_file = self.mkpromise_file(pr_info, local)
|
|
1471
1386
|
self.system.move(pr_file, local)
|
|
@@ -1475,14 +1390,18 @@ class PromiseStore(footprints.FootprintBase):
|
|
|
1475
1390
|
def earlyget(self, remote, local, options=None):
|
|
1476
1391
|
"""Possible early-get on the target store."""
|
|
1477
1392
|
options = self._options_fixup(options)
|
|
1478
|
-
logger.debug(
|
|
1393
|
+
logger.debug("Promise early-get %s", remote)
|
|
1479
1394
|
result_id = None
|
|
1480
1395
|
try:
|
|
1481
|
-
rc =
|
|
1482
|
-
|
|
1396
|
+
rc = self.promise.has_fast_check and self.promise.check(
|
|
1397
|
+
remote.copy(), options
|
|
1398
|
+
)
|
|
1483
1399
|
except OSError as e:
|
|
1484
|
-
logger.debug(
|
|
1485
|
-
|
|
1400
|
+
logger.debug(
|
|
1401
|
+
"An error occurred while checking for the promise file: %s",
|
|
1402
|
+
str(e),
|
|
1403
|
+
)
|
|
1404
|
+
logger.debug("Assuming this is a negative result...")
|
|
1486
1405
|
rc = False
|
|
1487
1406
|
if not rc:
|
|
1488
1407
|
result_id = self.other.earlyget(remote.copy(), local, options)
|
|
@@ -1490,70 +1409,87 @@ class PromiseStore(footprints.FootprintBase):
|
|
|
1490
1409
|
|
|
1491
1410
|
def finaliseget(self, result_id, remote, local, options=None):
|
|
1492
1411
|
options = self._options_fixup(options)
|
|
1493
|
-
logger.debug(
|
|
1412
|
+
logger.debug("Promise finalise-get %s", remote)
|
|
1494
1413
|
self.delayed = False
|
|
1495
|
-
logger.info(
|
|
1414
|
+
logger.info("Try promise from store %s", self.promise)
|
|
1496
1415
|
try:
|
|
1497
1416
|
rc = self.promise.get(remote.copy(), local, options)
|
|
1498
1417
|
except OSError as e:
|
|
1499
|
-
logger.debug(
|
|
1500
|
-
|
|
1418
|
+
logger.debug(
|
|
1419
|
+
"An error occurred while fetching the promise file: %s", str(e)
|
|
1420
|
+
)
|
|
1421
|
+
logger.debug("Assuming this is a negative result...")
|
|
1501
1422
|
rc = False
|
|
1502
1423
|
if rc:
|
|
1503
1424
|
self.delayed = True
|
|
1504
1425
|
else:
|
|
1505
|
-
logger.info(
|
|
1506
|
-
rc = self.other.finaliseget(
|
|
1426
|
+
logger.info("Try promise from store %s", self.other)
|
|
1427
|
+
rc = self.other.finaliseget(
|
|
1428
|
+
result_id, remote.copy(), local, options
|
|
1429
|
+
)
|
|
1507
1430
|
return rc
|
|
1508
1431
|
|
|
1509
1432
|
@staticmethod
|
|
1510
1433
|
def _clean_pr_json(prjson):
|
|
1511
|
-
del prjson[
|
|
1512
|
-
if
|
|
1513
|
-
prjson[
|
|
1434
|
+
del prjson["stamp"]
|
|
1435
|
+
if "options" in prjson["rhandler"]:
|
|
1436
|
+
prjson["rhandler"]["options"].pop("storetrack", False)
|
|
1514
1437
|
return prjson
|
|
1515
1438
|
|
|
1516
1439
|
def put(self, local, remote, options=None):
|
|
1517
1440
|
"""Put a promise or the actual resource if available."""
|
|
1518
1441
|
options = self._options_fixup(options)
|
|
1519
|
-
logger.debug(
|
|
1520
|
-
if options.get(
|
|
1442
|
+
logger.debug("Multistore put from %s to %s", local, remote)
|
|
1443
|
+
if options.get("force", False) or not self.system.path.exists(local):
|
|
1521
1444
|
options = options.copy()
|
|
1522
1445
|
if not self.other.use_cache():
|
|
1523
|
-
logger.critical(
|
|
1524
|
-
|
|
1446
|
+
logger.critical(
|
|
1447
|
+
"Could not promise resource without other cache <%s>",
|
|
1448
|
+
self.other,
|
|
1449
|
+
)
|
|
1450
|
+
raise ValueError(
|
|
1451
|
+
"Could not promise: other store does not use cache"
|
|
1452
|
+
)
|
|
1525
1453
|
pr_info = self.mkpromise_info(remote, options)
|
|
1526
1454
|
pr_file = self.mkpromise_file(pr_info, local)
|
|
1527
1455
|
# Check if a previous promise with the same description exists
|
|
1528
1456
|
preexisting = self.promise.check(remote.copy(), options)
|
|
1529
1457
|
if preexisting:
|
|
1530
1458
|
pr_old_file = self.promise.locate(remote.copy())
|
|
1531
|
-
prcheck = self._clean_pr_json(
|
|
1459
|
+
prcheck = self._clean_pr_json(
|
|
1460
|
+
self.system.json_load(pr_old_file)
|
|
1461
|
+
)
|
|
1532
1462
|
prnew = self._clean_pr_json(self.system.json_load(pr_file))
|
|
1533
1463
|
preexisting = prcheck == prnew
|
|
1534
1464
|
if preexisting:
|
|
1535
|
-
logger.info(
|
|
1536
|
-
|
|
1465
|
+
logger.info(
|
|
1466
|
+
"The promise file <%s> preexisted and is compatible",
|
|
1467
|
+
pr_old_file,
|
|
1468
|
+
)
|
|
1537
1469
|
rc = True
|
|
1538
1470
|
else:
|
|
1539
|
-
logger.warning(
|
|
1540
|
-
|
|
1471
|
+
logger.warning(
|
|
1472
|
+
"The promise file <%s> already exists but doesn't match",
|
|
1473
|
+
pr_old_file,
|
|
1474
|
+
)
|
|
1541
1475
|
|
|
1542
1476
|
# Put the new promise file in the PromiseCache
|
|
1543
|
-
options[
|
|
1477
|
+
options["obs_overridelocal"] = local # Pretty nasty :-(
|
|
1544
1478
|
if not preexisting:
|
|
1545
|
-
logger.warning(
|
|
1479
|
+
logger.warning(
|
|
1480
|
+
"Log a promise instead of missing resource <%s>", local
|
|
1481
|
+
)
|
|
1546
1482
|
rc = self.promise.put(pr_file, remote.copy(), options)
|
|
1547
1483
|
if rc:
|
|
1548
|
-
del options[
|
|
1484
|
+
del options["obs_overridelocal"]
|
|
1549
1485
|
self.other.delete(remote.copy(), options)
|
|
1550
1486
|
else:
|
|
1551
|
-
options[
|
|
1487
|
+
options["dryrun"] = True # Just update the tracker
|
|
1552
1488
|
rc = self.promise.put(pr_file, remote.copy(), options)
|
|
1553
1489
|
self.system.remove(pr_file)
|
|
1554
1490
|
|
|
1555
1491
|
else:
|
|
1556
|
-
logger.info(
|
|
1492
|
+
logger.info("Actual promise does exists <%s>", local)
|
|
1557
1493
|
rc = self.other.put(local, remote.copy(), options)
|
|
1558
1494
|
if rc:
|
|
1559
1495
|
self.promise.delete(remote.copy(), options)
|
|
@@ -1562,11 +1498,13 @@ class PromiseStore(footprints.FootprintBase):
|
|
|
1562
1498
|
def delete(self, remote, options=None):
|
|
1563
1499
|
"""Go through internal opened stores and delete the resource."""
|
|
1564
1500
|
options = self._options_fixup(options)
|
|
1565
|
-
logger.debug(
|
|
1566
|
-
return self.promise.delete(
|
|
1501
|
+
logger.debug("Promise delete from %s", remote)
|
|
1502
|
+
return self.promise.delete(
|
|
1503
|
+
remote.copy(), options
|
|
1504
|
+
) and self.other.delete(remote.copy(), options)
|
|
1567
1505
|
|
|
1568
1506
|
|
|
1569
1507
|
# Activate the footprint's fasttrack on the stores collector
|
|
1570
|
-
fcollect = footprints.collectors.get(tag=
|
|
1571
|
-
fcollect.fasttrack = (
|
|
1508
|
+
fcollect = footprints.collectors.get(tag="store")
|
|
1509
|
+
fcollect.fasttrack = ("netloc", "scheme")
|
|
1572
1510
|
del fcollect
|