vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +75 -47
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +944 -618
- vortex/algo/mpitools.py +802 -497
- vortex/algo/mpitools_templates/__init__.py +1 -0
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +436 -227
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +540 -417
- vortex/data/sync_templates/__init__.py +0 -0
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +416 -275
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +215 -122
- vortex/nwp/algo/monitoring.py +137 -76
- vortex/nwp/algo/mpitools.py +330 -190
- vortex/nwp/algo/odbtools.py +637 -353
- vortex/nwp/algo/oopsroot.py +454 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +110 -121
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +341 -162
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +655 -299
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +403 -334
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1234 -643
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +378 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.1.0.dist-info/METADATA +67 -0
- vortex_nwp-2.1.0.dist-info/RECORD +144 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
vortex/data/stores.py
CHANGED
|
@@ -8,6 +8,7 @@ Store objects use the :mod:`footprints` mechanism.
|
|
|
8
8
|
import copy
|
|
9
9
|
import ftplib
|
|
10
10
|
import io
|
|
11
|
+
import os
|
|
11
12
|
import re
|
|
12
13
|
|
|
13
14
|
from bronx.fancies import loggers
|
|
@@ -15,12 +16,15 @@ import footprints
|
|
|
15
16
|
|
|
16
17
|
from vortex import sessions
|
|
17
18
|
from vortex import config
|
|
18
|
-
from vortex.data.abstractstores import
|
|
19
|
+
from vortex.data.abstractstores import (
|
|
20
|
+
Store,
|
|
21
|
+
ArchiveStore,
|
|
22
|
+
CacheStore,
|
|
23
|
+
)
|
|
19
24
|
from vortex.data.abstractstores import MultiStore, PromiseStore
|
|
20
25
|
from vortex.data.abstractstores import ARCHIVE_GET_INTENT_DEFAULT
|
|
21
26
|
from vortex.layout import dataflow
|
|
22
27
|
from vortex.syntax.stdattrs import hashalgo_avail_list
|
|
23
|
-
from vortex.syntax.stdattrs import FreeXPid
|
|
24
28
|
from vortex.syntax.stdattrs import DelayedEnvValue
|
|
25
29
|
from vortex.tools.systems import ExecutionError
|
|
26
30
|
|
|
@@ -30,24 +34,35 @@ __all__ = []
|
|
|
30
34
|
logger = loggers.getLogger(__name__)
|
|
31
35
|
|
|
32
36
|
|
|
37
|
+
def get_cache_location():
|
|
38
|
+
try:
|
|
39
|
+
cacheloc = config.from_config(
|
|
40
|
+
section="data-tree",
|
|
41
|
+
key="rootdir",
|
|
42
|
+
)
|
|
43
|
+
except config.ConfigurationError:
|
|
44
|
+
cacheloc = os.path.join(os.environ["HOME"], ".vortex.d")
|
|
45
|
+
return cacheloc
|
|
46
|
+
|
|
47
|
+
|
|
33
48
|
class MagicPlace(Store):
|
|
34
49
|
"""Somewhere, over the rainbow!"""
|
|
35
50
|
|
|
36
51
|
_footprint = dict(
|
|
37
|
-
info
|
|
38
|
-
attr
|
|
39
|
-
scheme
|
|
40
|
-
values
|
|
52
|
+
info="Evanescent physical store",
|
|
53
|
+
attr=dict(
|
|
54
|
+
scheme=dict(
|
|
55
|
+
values=["magic"],
|
|
41
56
|
),
|
|
42
57
|
),
|
|
43
|
-
priority
|
|
44
|
-
level
|
|
45
|
-
)
|
|
58
|
+
priority=dict(
|
|
59
|
+
level=footprints.priorities.top.DEFAULT # @UndefinedVariable
|
|
60
|
+
),
|
|
46
61
|
)
|
|
47
62
|
|
|
48
63
|
@property
|
|
49
64
|
def realkind(self):
|
|
50
|
-
return
|
|
65
|
+
return "magicstore"
|
|
51
66
|
|
|
52
67
|
def has_fast_check(self):
|
|
53
68
|
"""A void check is very fast !"""
|
|
@@ -59,7 +74,7 @@ class MagicPlace(Store):
|
|
|
59
74
|
|
|
60
75
|
def magiclocate(self, remote, options):
|
|
61
76
|
"""Void - Empty string returned."""
|
|
62
|
-
return
|
|
77
|
+
return ""
|
|
63
78
|
|
|
64
79
|
def magicget(self, remote, local, options):
|
|
65
80
|
"""Void - Always True."""
|
|
@@ -108,23 +123,23 @@ class FunctionStore(Store):
|
|
|
108
123
|
"""
|
|
109
124
|
|
|
110
125
|
_footprint = dict(
|
|
111
|
-
info
|
|
112
|
-
attr
|
|
113
|
-
scheme
|
|
114
|
-
values
|
|
126
|
+
info="Dummy store that calls a function",
|
|
127
|
+
attr=dict(
|
|
128
|
+
scheme=dict(
|
|
129
|
+
values=["function"],
|
|
130
|
+
),
|
|
131
|
+
netloc=dict(
|
|
132
|
+
values=[""],
|
|
115
133
|
),
|
|
116
|
-
netloc = dict(
|
|
117
|
-
values = [''],
|
|
118
|
-
)
|
|
119
134
|
),
|
|
120
|
-
priority
|
|
121
|
-
level
|
|
122
|
-
)
|
|
135
|
+
priority=dict(
|
|
136
|
+
level=footprints.priorities.top.DEFAULT # @UndefinedVariable
|
|
137
|
+
),
|
|
123
138
|
)
|
|
124
139
|
|
|
125
140
|
@property
|
|
126
141
|
def realkind(self):
|
|
127
|
-
return
|
|
142
|
+
return "functionstore"
|
|
128
143
|
|
|
129
144
|
def has_fast_check(self):
|
|
130
145
|
"""A void check is very fast !"""
|
|
@@ -136,20 +151,21 @@ class FunctionStore(Store):
|
|
|
136
151
|
|
|
137
152
|
def functionlocate(self, remote, options):
|
|
138
153
|
"""The name of the function that will be called."""
|
|
139
|
-
cleanname = remote[
|
|
140
|
-
if cleanname.endswith(
|
|
154
|
+
cleanname = remote["path"][1:]
|
|
155
|
+
if cleanname.endswith("/"):
|
|
141
156
|
cleanname = cleanname[:-1]
|
|
142
157
|
return cleanname
|
|
143
158
|
|
|
144
159
|
def functionget(self, remote, local, options):
|
|
145
160
|
"""Calls the appropriate function and writes the result."""
|
|
146
161
|
# Find the appropriate function
|
|
147
|
-
cbfunc = self.system.import_function(
|
|
148
|
-
|
|
162
|
+
cbfunc = self.system.import_function(
|
|
163
|
+
self.functionlocate(remote, options)
|
|
164
|
+
)
|
|
149
165
|
# ... and call it
|
|
150
166
|
opts = dict()
|
|
151
167
|
opts.update(options)
|
|
152
|
-
opts.update(remote[
|
|
168
|
+
opts.update(remote["query"])
|
|
153
169
|
try:
|
|
154
170
|
fres = cbfunc(opts)
|
|
155
171
|
except FunctionStoreCallbackError as e:
|
|
@@ -157,15 +173,15 @@ class FunctionStore(Store):
|
|
|
157
173
|
logger.error("Here is the exception: %s", str(e))
|
|
158
174
|
fres = None
|
|
159
175
|
if fres is not None:
|
|
160
|
-
if
|
|
161
|
-
logger.info(
|
|
176
|
+
if "intent" in options and options["intent"] == dataflow.intent.IN:
|
|
177
|
+
logger.info("Ignore intent <in> for function input.")
|
|
162
178
|
# Handle StringIO objects, by changing them to ByteIOs...
|
|
163
179
|
if isinstance(fres, io.StringIO):
|
|
164
180
|
s_fres = fres
|
|
165
181
|
s_fres.seek(0)
|
|
166
182
|
fres = io.BytesIO()
|
|
167
183
|
for l in s_fres:
|
|
168
|
-
fres.write(l.encode(encoding=
|
|
184
|
+
fres.write(l.encode(encoding="utf-8"))
|
|
169
185
|
fres.seek(0)
|
|
170
186
|
# NB: fres should be a file like object (BytesIO will do the trick)
|
|
171
187
|
return self.system.cp(fres, local)
|
|
@@ -187,30 +203,30 @@ class Finder(Store):
|
|
|
187
203
|
"""The most usual store: your current filesystem!"""
|
|
188
204
|
|
|
189
205
|
_footprint = dict(
|
|
190
|
-
info
|
|
191
|
-
attr
|
|
192
|
-
scheme
|
|
193
|
-
values
|
|
206
|
+
info="Miscellaneous file access",
|
|
207
|
+
attr=dict(
|
|
208
|
+
scheme=dict(
|
|
209
|
+
values=["file", "ftp", "symlink", "rcp", "scp"],
|
|
194
210
|
),
|
|
195
|
-
netloc
|
|
196
|
-
outcast
|
|
211
|
+
netloc=dict(
|
|
212
|
+
outcast=["oper.inline.fr"],
|
|
197
213
|
),
|
|
198
|
-
storehash
|
|
199
|
-
values
|
|
214
|
+
storehash=dict(
|
|
215
|
+
values=hashalgo_avail_list,
|
|
200
216
|
),
|
|
201
217
|
),
|
|
202
|
-
priority
|
|
203
|
-
level
|
|
204
|
-
)
|
|
218
|
+
priority=dict(
|
|
219
|
+
level=footprints.priorities.top.DEFAULT # @UndefinedVariable
|
|
220
|
+
),
|
|
205
221
|
)
|
|
206
222
|
|
|
207
223
|
def __init__(self, *args, **kw):
|
|
208
|
-
logger.debug(
|
|
224
|
+
logger.debug("Finder store init %s", self.__class__)
|
|
209
225
|
super().__init__(*args, **kw)
|
|
210
226
|
|
|
211
227
|
@property
|
|
212
228
|
def realkind(self):
|
|
213
|
-
return
|
|
229
|
+
return "finder"
|
|
214
230
|
|
|
215
231
|
def hostname(self):
|
|
216
232
|
"""Returns the current :attr:`netloc`."""
|
|
@@ -218,21 +234,28 @@ class Finder(Store):
|
|
|
218
234
|
|
|
219
235
|
def fullpath(self, remote):
|
|
220
236
|
"""Return actual path unless explicitly defined as relative path."""
|
|
221
|
-
if remote[
|
|
222
|
-
return remote[
|
|
237
|
+
if remote["query"].get("relative", False):
|
|
238
|
+
return remote["path"].lstrip("/")
|
|
223
239
|
else:
|
|
224
|
-
return remote[
|
|
240
|
+
return remote["path"]
|
|
225
241
|
|
|
226
242
|
def _localtarfix(self, local):
|
|
227
|
-
if (
|
|
228
|
-
|
|
229
|
-
|
|
243
|
+
if (
|
|
244
|
+
isinstance(local, str)
|
|
245
|
+
and self.system.path.isfile(local)
|
|
246
|
+
and self.system.is_tarfile(local)
|
|
247
|
+
):
|
|
248
|
+
destdir = self.system.path.dirname(
|
|
249
|
+
self.system.path.realpath(local)
|
|
250
|
+
)
|
|
230
251
|
try:
|
|
231
252
|
self.system.smartuntar(local, destdir)
|
|
232
253
|
except ExecutionError:
|
|
233
254
|
if not self.system.is_tarname(local):
|
|
234
|
-
logger.warning(
|
|
235
|
-
|
|
255
|
+
logger.warning(
|
|
256
|
+
"An automatic untar was attempted but it failed. "
|
|
257
|
+
+ "Maybe the system's is_tarfile got it wrong ?"
|
|
258
|
+
)
|
|
236
259
|
else:
|
|
237
260
|
raise
|
|
238
261
|
|
|
@@ -251,10 +274,12 @@ class Finder(Store):
|
|
|
251
274
|
def fileget(self, remote, local, options):
|
|
252
275
|
"""Delegates to ``system`` the copy of ``remote`` to ``local``."""
|
|
253
276
|
rpath = self.fullpath(remote)
|
|
254
|
-
logger.info(
|
|
255
|
-
if
|
|
256
|
-
logger.info(
|
|
257
|
-
rc = self.system.cp(
|
|
277
|
+
logger.info("fileget on %s (to: %s)", rpath, local)
|
|
278
|
+
if "intent" in options and options["intent"] == dataflow.intent.IN:
|
|
279
|
+
logger.info("Ignore intent <in> for remote input %s", rpath)
|
|
280
|
+
rc = self.system.cp(
|
|
281
|
+
rpath, local, fmt=options.get("fmt"), intent=dataflow.intent.INOUT
|
|
282
|
+
)
|
|
258
283
|
rc = rc and self._hash_get_check(self.fileget, remote, local, options)
|
|
259
284
|
if rc:
|
|
260
285
|
self._localtarfix(local)
|
|
@@ -263,8 +288,8 @@ class Finder(Store):
|
|
|
263
288
|
def fileput(self, local, remote, options):
|
|
264
289
|
"""Delegates to ``system`` the copy of ``local`` to ``remote``."""
|
|
265
290
|
rpath = self.fullpath(remote)
|
|
266
|
-
logger.info(
|
|
267
|
-
rc = self.system.cp(local, rpath, fmt=options.get(
|
|
291
|
+
logger.info("fileput to %s (from: %s)", rpath, local)
|
|
292
|
+
rc = self.system.cp(local, rpath, fmt=options.get("fmt"))
|
|
268
293
|
return rc and self._hash_put(self.fileput, local, remote, options)
|
|
269
294
|
|
|
270
295
|
def filedelete(self, remote, options):
|
|
@@ -272,10 +297,13 @@ class Finder(Store):
|
|
|
272
297
|
rc = None
|
|
273
298
|
if self.filecheck(remote, options):
|
|
274
299
|
rpath = self.fullpath(remote)
|
|
275
|
-
logger.info(
|
|
276
|
-
rc = self.system.remove(rpath, fmt=options.get(
|
|
300
|
+
logger.info("filedelete on %s", rpath)
|
|
301
|
+
rc = self.system.remove(rpath, fmt=options.get("fmt"))
|
|
277
302
|
else:
|
|
278
|
-
logger.error(
|
|
303
|
+
logger.error(
|
|
304
|
+
"Try to remove a non-existing resource <%s>",
|
|
305
|
+
self.fullpath(remote),
|
|
306
|
+
)
|
|
279
307
|
return rc
|
|
280
308
|
|
|
281
309
|
symlinkcheck = filecheck
|
|
@@ -283,34 +311,40 @@ class Finder(Store):
|
|
|
283
311
|
|
|
284
312
|
def symlinkget(self, remote, local, options):
|
|
285
313
|
rpath = self.fullpath(remote)
|
|
286
|
-
if
|
|
287
|
-
logger.error(
|
|
314
|
+
if "intent" in options and options["intent"] == dataflow.intent.INOUT:
|
|
315
|
+
logger.error(
|
|
316
|
+
"It is unsafe to have a symlink with intent=inout: %s", rpath
|
|
317
|
+
)
|
|
288
318
|
return False
|
|
289
319
|
rc = self.system.remove(local)
|
|
290
320
|
self.system.symlink(rpath, local)
|
|
291
321
|
return rc and self.system.path.exists(local)
|
|
292
322
|
|
|
293
323
|
def symlinkput(self, local, remote, options):
|
|
294
|
-
logger.error(
|
|
324
|
+
logger.error(
|
|
325
|
+
"The Finder store with scheme:symlink is not able to perform Puts."
|
|
326
|
+
)
|
|
295
327
|
return False
|
|
296
328
|
|
|
297
329
|
def symlinkdelete(self, remote, options):
|
|
298
|
-
logger.error(
|
|
330
|
+
logger.error(
|
|
331
|
+
"The Finder store with scheme:symlink is not able to perform Deletes."
|
|
332
|
+
)
|
|
299
333
|
return False
|
|
300
334
|
|
|
301
335
|
def _ftpinfos(self, remote, **kwargs):
|
|
302
336
|
args = kwargs.copy()
|
|
303
|
-
args[
|
|
304
|
-
args[
|
|
337
|
+
args["hostname"] = self.hostname()
|
|
338
|
+
args["logname"] = remote["username"]
|
|
305
339
|
port = self.hostname().netport
|
|
306
340
|
if port is not None:
|
|
307
|
-
args[
|
|
341
|
+
args["port"] = port
|
|
308
342
|
return args
|
|
309
343
|
|
|
310
344
|
def ftpcheck(self, remote, options):
|
|
311
345
|
"""Delegates to ``system.ftp`` a distant check."""
|
|
312
346
|
rc = None
|
|
313
|
-
ftp = self.system.ftp(**
|
|
347
|
+
ftp = self.system.ftp(**self._ftpinfos(remote))
|
|
314
348
|
if ftp:
|
|
315
349
|
try:
|
|
316
350
|
rc = ftp.size(self.fullpath(remote))
|
|
@@ -324,7 +358,7 @@ class Finder(Store):
|
|
|
324
358
|
|
|
325
359
|
def ftplocate(self, remote, options):
|
|
326
360
|
"""Delegates to ``system`` qualified name creation."""
|
|
327
|
-
ftp = self.system.ftp(**
|
|
361
|
+
ftp = self.system.ftp(**self._ftpinfos(remote, delayed=True))
|
|
328
362
|
if ftp:
|
|
329
363
|
rloc = ftp.netpath(self.fullpath(remote))
|
|
330
364
|
ftp.close()
|
|
@@ -335,13 +369,15 @@ class Finder(Store):
|
|
|
335
369
|
def ftpget(self, remote, local, options):
|
|
336
370
|
"""Delegates to ``system`` the file transfer of ``remote`` to ``local``."""
|
|
337
371
|
rpath = self.fullpath(remote)
|
|
338
|
-
logger.info(
|
|
372
|
+
logger.info(
|
|
373
|
+
"ftpget on ftp://%s/%s (to: %s)", self.hostname(), rpath, local
|
|
374
|
+
)
|
|
339
375
|
rc = self.system.smartftget(
|
|
340
376
|
rpath,
|
|
341
377
|
local,
|
|
342
|
-
fmt=options.get(
|
|
378
|
+
fmt=options.get("fmt"),
|
|
343
379
|
# ftp control
|
|
344
|
-
**
|
|
380
|
+
**self._ftpinfos(remote),
|
|
345
381
|
)
|
|
346
382
|
rc = rc and self._hash_get_check(self.ftpget, remote, local, options)
|
|
347
383
|
if rc:
|
|
@@ -352,14 +388,16 @@ class Finder(Store):
|
|
|
352
388
|
"""Delegates to ``system`` the file transfer of ``local`` to ``remote``."""
|
|
353
389
|
rpath = self.fullpath(remote)
|
|
354
390
|
put_opts = dict()
|
|
355
|
-
put_opts[
|
|
356
|
-
put_opts[
|
|
357
|
-
logger.info(
|
|
391
|
+
put_opts["fmt"] = options.get("fmt")
|
|
392
|
+
put_opts["sync"] = options.get("enforcesync", False)
|
|
393
|
+
logger.info(
|
|
394
|
+
"ftpput to ftp://%s/%s (from: %s)", self.hostname(), rpath, local
|
|
395
|
+
)
|
|
358
396
|
rc = self.system.smartftput(
|
|
359
397
|
local,
|
|
360
398
|
rpath,
|
|
361
399
|
# ftp control
|
|
362
|
-
**
|
|
400
|
+
**self._ftpinfos(remote, **put_opts),
|
|
363
401
|
)
|
|
364
402
|
return rc and self._hash_put(self.ftpput, local, remote, options)
|
|
365
403
|
|
|
@@ -368,7 +406,9 @@ class Finder(Store):
|
|
|
368
406
|
rc = None
|
|
369
407
|
actualpath = self.fullpath(remote)
|
|
370
408
|
if self.ftpcheck(remote, options):
|
|
371
|
-
logger.info(
|
|
409
|
+
logger.info(
|
|
410
|
+
"ftpdelete on ftp://%s/%s", self.hostname(), actualpath
|
|
411
|
+
)
|
|
372
412
|
ftp = self.system.ftp(**self._ftpinfos(remote))
|
|
373
413
|
if ftp:
|
|
374
414
|
try:
|
|
@@ -376,14 +416,16 @@ class Finder(Store):
|
|
|
376
416
|
finally:
|
|
377
417
|
ftp.close()
|
|
378
418
|
else:
|
|
379
|
-
logger.error(
|
|
419
|
+
logger.error(
|
|
420
|
+
"Try to remove a non-existing resource <%s>", actualpath
|
|
421
|
+
)
|
|
380
422
|
return rc
|
|
381
423
|
|
|
382
424
|
|
|
383
425
|
class _VortexStackedStorageMixin:
|
|
384
426
|
"""Mixin class that adds utility functions to work with stacked data."""
|
|
385
427
|
|
|
386
|
-
_STACKED_RE = re.compile(
|
|
428
|
+
_STACKED_RE = re.compile("stacked-")
|
|
387
429
|
|
|
388
430
|
@property
|
|
389
431
|
def stackedstore(self):
|
|
@@ -391,25 +433,31 @@ class _VortexStackedStorageMixin:
|
|
|
391
433
|
return self._STACKED_RE.search(self.netloc)
|
|
392
434
|
|
|
393
435
|
def _stacked_remainder(self, remote, stackpath):
|
|
394
|
-
path_remainder = remote[
|
|
395
|
-
for a_spath in stackpath.split(
|
|
436
|
+
path_remainder = remote["path"].strip("/").split("/")
|
|
437
|
+
for a_spath in stackpath.split("/"):
|
|
396
438
|
if path_remainder and path_remainder[0] == a_spath:
|
|
397
439
|
del path_remainder[0]
|
|
398
440
|
else:
|
|
399
441
|
break
|
|
400
|
-
return
|
|
442
|
+
return "/".join(path_remainder)
|
|
401
443
|
|
|
402
444
|
def _stacked_xremote(self, remote):
|
|
403
445
|
"""The path to **remote** with its stack."""
|
|
404
446
|
if self.stackedstore:
|
|
405
447
|
remote = remote.copy()
|
|
406
|
-
remote[
|
|
407
|
-
stackpath = remote[
|
|
408
|
-
stackfmt = remote[
|
|
448
|
+
remote["query"] = remote["query"].copy()
|
|
449
|
+
stackpath = remote["query"].pop("stackpath", (None,))[0]
|
|
450
|
+
stackfmt = remote["query"].pop("stackfmt", (None,))[0]
|
|
409
451
|
if stackpath is None or stackfmt is None:
|
|
410
|
-
raise ValueError(
|
|
452
|
+
raise ValueError(
|
|
453
|
+
'"stackpath" and "stackfmt" are not available in the query.'
|
|
454
|
+
)
|
|
411
455
|
else:
|
|
412
|
-
remote[
|
|
456
|
+
remote["path"] = (
|
|
457
|
+
stackpath
|
|
458
|
+
+ "/"
|
|
459
|
+
+ self._stacked_remainder(remote, stackpath)
|
|
460
|
+
)
|
|
413
461
|
return remote
|
|
414
462
|
|
|
415
463
|
def _stacked_xegglocate(self, remote):
|
|
@@ -423,14 +471,16 @@ class _VortexStackedStorageMixin:
|
|
|
423
471
|
|
|
424
472
|
"""
|
|
425
473
|
remote = remote.copy()
|
|
426
|
-
remote[
|
|
427
|
-
stackpath = remote[
|
|
428
|
-
stackfmt = remote[
|
|
474
|
+
remote["query"] = remote["query"].copy()
|
|
475
|
+
stackpath = remote["query"].pop("stackpath", (None,))[0].strip("/")
|
|
476
|
+
stackfmt = remote["query"].pop("stackfmt", (None,))[0]
|
|
429
477
|
if stackpath is None or stackfmt is None:
|
|
430
|
-
raise ValueError(
|
|
478
|
+
raise ValueError(
|
|
479
|
+
'"stackpath" and "stackfmt" are not available in the query.'
|
|
480
|
+
)
|
|
431
481
|
else:
|
|
432
482
|
resource_remainder = self._stacked_remainder(remote, stackpath)
|
|
433
|
-
remote[
|
|
483
|
+
remote["path"] = "/" + stackpath
|
|
434
484
|
return remote, stackfmt, resource_remainder
|
|
435
485
|
|
|
436
486
|
|
|
@@ -438,11 +488,13 @@ _vortex_readonly_store = footprints.Footprint(
|
|
|
438
488
|
info="Abstract store' readonly=True attribute",
|
|
439
489
|
attr=dict(
|
|
440
490
|
readonly=dict(
|
|
441
|
-
values=[
|
|
491
|
+
values=[
|
|
492
|
+
True,
|
|
493
|
+
],
|
|
442
494
|
optional=True,
|
|
443
|
-
default=True
|
|
495
|
+
default=True,
|
|
444
496
|
)
|
|
445
|
-
)
|
|
497
|
+
),
|
|
446
498
|
)
|
|
447
499
|
|
|
448
500
|
|
|
@@ -451,46 +503,49 @@ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
|
|
|
451
503
|
|
|
452
504
|
_abstract = True
|
|
453
505
|
_footprint = dict(
|
|
454
|
-
info
|
|
455
|
-
attr
|
|
456
|
-
scheme
|
|
457
|
-
values
|
|
458
|
-
),
|
|
459
|
-
netloc = dict(
|
|
506
|
+
info="VORTEX archive access",
|
|
507
|
+
attr=dict(
|
|
508
|
+
scheme=dict(
|
|
509
|
+
values=["vortex"],
|
|
460
510
|
),
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
511
|
+
netloc=dict(),
|
|
512
|
+
storehead=dict(
|
|
513
|
+
optional=True,
|
|
514
|
+
default="vortex",
|
|
515
|
+
outcast=["xp"],
|
|
465
516
|
),
|
|
466
|
-
)
|
|
517
|
+
),
|
|
467
518
|
)
|
|
468
519
|
|
|
469
|
-
_STACKS_AUTOREFILL_CRIT =
|
|
520
|
+
_STACKS_AUTOREFILL_CRIT = "stacked-archive-smart"
|
|
470
521
|
|
|
471
522
|
def __init__(self, *args, **kw):
|
|
472
|
-
logger.debug(
|
|
523
|
+
logger.debug("Vortex archive store init %s", self.__class__)
|
|
473
524
|
super().__init__(*args, **kw)
|
|
474
525
|
|
|
475
526
|
def remap_read(self, remote, options):
|
|
476
527
|
"""Remap actual remote path to distant store path for intrusive actions."""
|
|
477
|
-
|
|
528
|
+
raise NotImplementedError
|
|
529
|
+
|
|
530
|
+
def remap_write(self, remote, options):
|
|
531
|
+
"""Remap actual remote path to distant store path for intrusive actions."""
|
|
532
|
+
raise NotImplementedError
|
|
478
533
|
|
|
479
534
|
def remap_list(self, remote, options):
|
|
480
535
|
"""Reformulates the remote path to compatible vortex namespace."""
|
|
481
|
-
if len(remote[
|
|
536
|
+
if len(remote["path"].split("/")) >= 4:
|
|
482
537
|
return self.remap_read(remote, options)
|
|
483
538
|
else:
|
|
484
|
-
logger.critical(
|
|
539
|
+
logger.critical(
|
|
540
|
+
"The << %s >> path is not listable.", remote["path"]
|
|
541
|
+
)
|
|
485
542
|
return None
|
|
486
543
|
|
|
487
|
-
remap_write = remap_read
|
|
488
|
-
|
|
489
544
|
@property
|
|
490
545
|
def stacks_autorefill(self):
|
|
491
546
|
"""Where to refill a stack retrieved from the archive."""
|
|
492
547
|
if self._STACKS_AUTOREFILL_CRIT in self.netloc:
|
|
493
|
-
return self.netloc.replace(self._STACKS_AUTOREFILL_CRIT,
|
|
548
|
+
return self.netloc.replace(self._STACKS_AUTOREFILL_CRIT, "cache")
|
|
494
549
|
else:
|
|
495
550
|
return None
|
|
496
551
|
|
|
@@ -500,24 +555,34 @@ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
|
|
|
500
555
|
rundir = sessions.current().context.rundir
|
|
501
556
|
if not rundir:
|
|
502
557
|
rundir = self.system.pwd()
|
|
503
|
-
rundir = self.system.path.join(rundir,
|
|
504
|
-
target = self.system.path.join(
|
|
558
|
+
rundir = self.system.path.join(rundir, "vortex_stacks_xeggs")
|
|
559
|
+
target = self.system.path.join(
|
|
560
|
+
rundir, *remote["path"].strip("/").split("/")
|
|
561
|
+
)
|
|
505
562
|
targetopts = dict(fmt=remotefmt, intent=dataflow.intent.IN)
|
|
506
563
|
if self.system.path.exists(target):
|
|
507
|
-
logger.info(
|
|
564
|
+
logger.info(
|
|
565
|
+
"Stack previously retrieved (in %s). Using it.", target
|
|
566
|
+
)
|
|
508
567
|
rc = True
|
|
509
568
|
else:
|
|
510
569
|
if result_id:
|
|
511
|
-
rc = self._vortexfinaliseget(
|
|
570
|
+
rc = self._vortexfinaliseget(
|
|
571
|
+
result_id, remote, target, targetopts
|
|
572
|
+
)
|
|
512
573
|
else:
|
|
513
574
|
rc = self._vortexget(remote, target, targetopts)
|
|
514
575
|
if rc and self.stacks_autorefill:
|
|
515
|
-
rstore = footprints.proxy.store(
|
|
576
|
+
rstore = footprints.proxy.store(
|
|
577
|
+
scheme=self.scheme, netloc=self.stacks_autorefill
|
|
578
|
+
)
|
|
516
579
|
logger.info("Refilling the stack egg to [%s]", rstore)
|
|
517
580
|
try:
|
|
518
581
|
rstore.put(target, remote.copy(), targetopts)
|
|
519
582
|
except (ExecutionError, OSError) as e:
|
|
520
|
-
logger.error(
|
|
583
|
+
logger.error(
|
|
584
|
+
"An ExecutionError happened during the refill: %s", str(e)
|
|
585
|
+
)
|
|
521
586
|
logger.error("This error is ignored... but that's ugly !")
|
|
522
587
|
return rc, target, remainder
|
|
523
588
|
|
|
@@ -526,11 +591,15 @@ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
|
|
|
526
591
|
if self.stackedstore:
|
|
527
592
|
s_remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
|
|
528
593
|
options = options.copy()
|
|
529
|
-
options[
|
|
594
|
+
options["fmt"] = s_remotefmt
|
|
530
595
|
rc = self._vortexcheck(s_remote, options)
|
|
531
596
|
if rc:
|
|
532
|
-
rc, target, remainder = self._vortex_stacked_egg_retrieve(
|
|
533
|
-
|
|
597
|
+
rc, target, remainder = self._vortex_stacked_egg_retrieve(
|
|
598
|
+
remote
|
|
599
|
+
)
|
|
600
|
+
rc = rc and self.system.path.exists(
|
|
601
|
+
self.system.path.join(target, remainder)
|
|
602
|
+
)
|
|
534
603
|
return rc
|
|
535
604
|
else:
|
|
536
605
|
return self._vortexcheck(remote, options)
|
|
@@ -545,7 +614,7 @@ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
|
|
|
545
614
|
if self.stackedstore:
|
|
546
615
|
remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
|
|
547
616
|
options = options.copy()
|
|
548
|
-
options[
|
|
617
|
+
options["fmt"] = s_remotefmt
|
|
549
618
|
return self._vortexlocate(remote, options)
|
|
550
619
|
|
|
551
620
|
def _vortexlocate(self, remote, options):
|
|
@@ -573,7 +642,7 @@ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
|
|
|
573
642
|
if self.stackedstore:
|
|
574
643
|
remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
|
|
575
644
|
options = options.copy()
|
|
576
|
-
options[
|
|
645
|
+
options["fmt"] = s_remotefmt
|
|
577
646
|
return self._vortexprestageinfo(remote, options)
|
|
578
647
|
|
|
579
648
|
def _vortexprestageinfo(self, remote, options):
|
|
@@ -585,10 +654,12 @@ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
|
|
|
585
654
|
"""Vortex' archive get sequence."""
|
|
586
655
|
if self.stackedstore:
|
|
587
656
|
rc, target, remainder = self._vortex_stacked_egg_retrieve(remote)
|
|
588
|
-
rc = rc and self.system.cp(
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
657
|
+
rc = rc and self.system.cp(
|
|
658
|
+
self.system.path.join(target, remainder),
|
|
659
|
+
local,
|
|
660
|
+
fmt=options.get("fmt"),
|
|
661
|
+
intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
|
|
662
|
+
)
|
|
592
663
|
return rc
|
|
593
664
|
else:
|
|
594
665
|
return self._vortexget(remote, local, options)
|
|
@@ -603,7 +674,7 @@ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
|
|
|
603
674
|
if self.stackedstore:
|
|
604
675
|
s_remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
|
|
605
676
|
targetopts = dict(fmt=s_remotefmt, intent=dataflow.intent.IN)
|
|
606
|
-
return self._vortexearlyget(s_remote,
|
|
677
|
+
return self._vortexearlyget(s_remote, "somelocalfile", targetopts)
|
|
607
678
|
else:
|
|
608
679
|
return self._vortexearlyget(remote, local, options)
|
|
609
680
|
|
|
@@ -615,11 +686,15 @@ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
|
|
|
615
686
|
def vortexfinaliseget(self, result_id, remote, local, options):
|
|
616
687
|
"""Vortex' archive finaliseget sequence."""
|
|
617
688
|
if self.stackedstore:
|
|
618
|
-
rc, target, remainder = self._vortex_stacked_egg_retrieve(
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
689
|
+
rc, target, remainder = self._vortex_stacked_egg_retrieve(
|
|
690
|
+
remote, result_id=result_id
|
|
691
|
+
)
|
|
692
|
+
rc = rc and self.system.cp(
|
|
693
|
+
self.system.path.join(target, remainder),
|
|
694
|
+
local,
|
|
695
|
+
fmt=options.get("fmt"),
|
|
696
|
+
intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
|
|
697
|
+
)
|
|
623
698
|
return rc
|
|
624
699
|
else:
|
|
625
700
|
return self._vortexfinaliseget(result_id, remote, local, options)
|
|
@@ -655,90 +730,38 @@ class VortexStdBaseArchiveStore(_VortexBaseArchiveStore):
|
|
|
655
730
|
"""
|
|
656
731
|
|
|
657
732
|
_footprint = dict(
|
|
658
|
-
info
|
|
659
|
-
attr
|
|
660
|
-
netloc
|
|
661
|
-
values
|
|
733
|
+
info="VORTEX archive access for casual experiments",
|
|
734
|
+
attr=dict(
|
|
735
|
+
netloc=dict(
|
|
736
|
+
values=["vortex.archive-legacy.fr"],
|
|
662
737
|
),
|
|
663
|
-
)
|
|
738
|
+
),
|
|
664
739
|
)
|
|
665
740
|
|
|
666
|
-
@property
|
|
667
|
-
def _actual_mappingroot(self):
|
|
668
|
-
"""Read the get entry point form configuration."""
|
|
669
|
-
return config.from_config(
|
|
670
|
-
section="storage", key="rootdir",
|
|
671
|
-
)
|
|
672
|
-
|
|
673
741
|
def remap_read(self, remote, options):
|
|
674
742
|
"""Reformulates the remote path to compatible vortex namespace."""
|
|
675
743
|
remote = copy.copy(remote)
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
xpath[3:4] = list(xpath[3])
|
|
681
|
-
remote['path'] = self.system.path.join(*xpath)
|
|
682
|
-
return remote
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
class VortexStdStackedArchiveStore(VortexStdBaseArchiveStore):
|
|
686
|
-
"""Archive for casual VORTEX experiments: Support for legacy/Olive XPIDs.
|
|
687
|
-
|
|
688
|
-
This 'stacked-archive-legacy' or 'stacked-archive-smart' store looks into
|
|
689
|
-
the stack associated to the resource. The '-smart' variant, has the ability
|
|
690
|
-
to refill the whole stack into local cache (to be faster in the future).
|
|
691
|
-
"""
|
|
692
|
-
|
|
693
|
-
_footprint = [
|
|
694
|
-
_vortex_readonly_store,
|
|
695
|
-
dict(
|
|
696
|
-
attr = dict(
|
|
697
|
-
netloc = dict(
|
|
698
|
-
values = ['vortex.stacked-archive-legacy.fr',
|
|
699
|
-
'vortex.stacked-archive-smart.fr'],
|
|
700
|
-
),
|
|
744
|
+
try:
|
|
745
|
+
remote["root"] = config.from_config(
|
|
746
|
+
section="storage",
|
|
747
|
+
key="rootdir",
|
|
701
748
|
)
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
"""
|
|
712
|
-
|
|
713
|
-
#: Path to the vortex-free Store configuration file
|
|
714
|
-
_store_global_config = '@store-vortex-free.ini'
|
|
715
|
-
_datastore_id = 'store-vortex-free-conf'
|
|
716
|
-
|
|
717
|
-
_footprint = dict(
|
|
718
|
-
info = 'VORTEX archive access for casual experiments',
|
|
719
|
-
attr = dict(
|
|
720
|
-
netloc = dict(
|
|
721
|
-
values = ['vortex-free.archive-legacy.fr'],
|
|
722
|
-
),
|
|
723
|
-
)
|
|
724
|
-
)
|
|
725
|
-
|
|
726
|
-
def remap_read(self, remote, options):
|
|
727
|
-
"""Reformulates the remote path to compatible vortex namespace."""
|
|
728
|
-
remote = copy.copy(remote)
|
|
729
|
-
xpath = remote['path'].strip('/').split('/')
|
|
730
|
-
f_xpid = FreeXPid(xpath[2])
|
|
731
|
-
xpath[2] = f_xpid.id
|
|
732
|
-
if 'root' not in remote:
|
|
733
|
-
remote['root'] = self._actual_storeroot(f_xpid)
|
|
734
|
-
remote['path'] = self.system.path.join(*xpath)
|
|
749
|
+
except config.ConfigurationError as e:
|
|
750
|
+
msg = (
|
|
751
|
+
"Trying to write to archive but location is not configured.\n"
|
|
752
|
+
'Make sure key "rootdir" is defined in storage section of '
|
|
753
|
+
"the configuration.\n"
|
|
754
|
+
"See https://vortex-nwp.readthedocs.io/en/latest/user-guide/configuration.html#storage"
|
|
755
|
+
)
|
|
756
|
+
logger.error(msg)
|
|
757
|
+
raise e
|
|
735
758
|
return remote
|
|
736
759
|
|
|
737
760
|
remap_write = remap_read
|
|
738
761
|
|
|
739
762
|
|
|
740
|
-
class
|
|
741
|
-
"""Archive for casual VORTEX experiments: Support for
|
|
763
|
+
class VortexStdStackedArchiveStore(VortexStdBaseArchiveStore):
|
|
764
|
+
"""Archive for casual VORTEX experiments: Support for legacy/Olive XPIDs.
|
|
742
765
|
|
|
743
766
|
This 'stacked-archive-legacy' or 'stacked-archive-smart' store looks into
|
|
744
767
|
the stack associated to the resource. The '-smart' variant, has the ability
|
|
@@ -748,13 +771,16 @@ class VortexFreeStdStackedArchiveStore(VortexFreeStdBaseArchiveStore):
|
|
|
748
771
|
_footprint = [
|
|
749
772
|
_vortex_readonly_store,
|
|
750
773
|
dict(
|
|
751
|
-
attr
|
|
752
|
-
netloc
|
|
753
|
-
values
|
|
754
|
-
|
|
774
|
+
attr=dict(
|
|
775
|
+
netloc=dict(
|
|
776
|
+
values=[
|
|
777
|
+
"vortex.stacked-archive-legacy.fr",
|
|
778
|
+
"vortex.stacked-archive-smart.fr",
|
|
779
|
+
],
|
|
755
780
|
),
|
|
756
781
|
)
|
|
757
|
-
)
|
|
782
|
+
),
|
|
783
|
+
]
|
|
758
784
|
|
|
759
785
|
|
|
760
786
|
class VortexOpBaseArchiveStore(_VortexBaseArchiveStore):
|
|
@@ -765,39 +791,43 @@ class VortexOpBaseArchiveStore(_VortexBaseArchiveStore):
|
|
|
765
791
|
"""
|
|
766
792
|
|
|
767
793
|
_footprint = dict(
|
|
768
|
-
info
|
|
769
|
-
attr
|
|
770
|
-
netloc
|
|
771
|
-
values
|
|
794
|
+
info="VORTEX archive access for op experiments",
|
|
795
|
+
attr=dict(
|
|
796
|
+
netloc=dict(
|
|
797
|
+
values=["vsop.archive-legacy.fr"],
|
|
772
798
|
),
|
|
773
|
-
storetrue
|
|
774
|
-
default
|
|
799
|
+
storetrue=dict(
|
|
800
|
+
default=DelayedEnvValue("op_archive", True),
|
|
775
801
|
),
|
|
776
|
-
)
|
|
802
|
+
),
|
|
777
803
|
)
|
|
778
804
|
|
|
779
|
-
@property
|
|
780
|
-
def _actual_storeroot(self):
|
|
781
|
-
return (
|
|
782
|
-
self.storeroot or
|
|
783
|
-
config.from_config(
|
|
784
|
-
section="storage", key="op_rootdir",
|
|
785
|
-
)
|
|
786
|
-
)
|
|
787
|
-
|
|
788
805
|
def remap_read(self, remote, options):
|
|
789
806
|
"""Reformulates the remote path to compatible vortex namespace."""
|
|
790
807
|
remote = copy.copy(remote)
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
808
|
+
try:
|
|
809
|
+
remote["root"] = config.from_config(
|
|
810
|
+
section="storage",
|
|
811
|
+
key="op_rootdir",
|
|
812
|
+
)
|
|
813
|
+
except config.ConfigurationError as e:
|
|
814
|
+
msg = (
|
|
815
|
+
"Trying to write to operational data archive but location"
|
|
816
|
+
'is not configured.\nMake sure key "rootdir" is defined in '
|
|
817
|
+
"the storage section of the configuration.\n"
|
|
818
|
+
"See https://vortex-nwp.readthedocs.io/en/latest/user-guide/configuration.html#storage"
|
|
819
|
+
)
|
|
820
|
+
logger.error(msg)
|
|
821
|
+
raise e
|
|
822
|
+
xpath = remote["path"].split("/")
|
|
823
|
+
if len(xpath) >= 5 and re.match(r"^\d{8}T\d{2,4}", xpath[4]):
|
|
794
824
|
# If a date is detected
|
|
795
825
|
vxdate = list(xpath[4])
|
|
796
|
-
vxdate.insert(4,
|
|
797
|
-
vxdate.insert(7,
|
|
798
|
-
vxdate.insert(10,
|
|
799
|
-
xpath[4] =
|
|
800
|
-
remote[
|
|
826
|
+
vxdate.insert(4, "/")
|
|
827
|
+
vxdate.insert(7, "/")
|
|
828
|
+
vxdate.insert(10, "/")
|
|
829
|
+
xpath[4] = "".join(vxdate)
|
|
830
|
+
remote["path"] = self.system.path.join(*xpath)
|
|
801
831
|
return remote
|
|
802
832
|
|
|
803
833
|
remap_write = remap_read
|
|
@@ -814,13 +844,16 @@ class VortexOpStackedArchiveStore(VortexOpBaseArchiveStore):
|
|
|
814
844
|
_footprint = [
|
|
815
845
|
_vortex_readonly_store,
|
|
816
846
|
dict(
|
|
817
|
-
attr
|
|
818
|
-
netloc
|
|
819
|
-
values
|
|
820
|
-
|
|
847
|
+
attr=dict(
|
|
848
|
+
netloc=dict(
|
|
849
|
+
values=[
|
|
850
|
+
"vsop.stacked-archive-legacy.fr",
|
|
851
|
+
"vsop.stacked-archive-smart.fr",
|
|
852
|
+
],
|
|
821
853
|
),
|
|
822
854
|
)
|
|
823
|
-
)
|
|
855
|
+
),
|
|
856
|
+
]
|
|
824
857
|
|
|
825
858
|
|
|
826
859
|
class VortexArchiveStore(MultiStore):
|
|
@@ -835,42 +868,58 @@ class VortexArchiveStore(MultiStore):
|
|
|
835
868
|
"""
|
|
836
869
|
|
|
837
870
|
_footprint = dict(
|
|
838
|
-
info
|
|
839
|
-
attr
|
|
840
|
-
scheme
|
|
841
|
-
values
|
|
871
|
+
info="VORTEX archive access",
|
|
872
|
+
attr=dict(
|
|
873
|
+
scheme=dict(
|
|
874
|
+
values=["vortex"],
|
|
842
875
|
),
|
|
843
|
-
netloc
|
|
844
|
-
values
|
|
876
|
+
netloc=dict(
|
|
877
|
+
values=[
|
|
878
|
+
"vortex.archive.fr",
|
|
879
|
+
"vortex-free.archive.fr",
|
|
880
|
+
"vsop.archive.fr",
|
|
881
|
+
],
|
|
845
882
|
),
|
|
846
|
-
refillstore
|
|
847
|
-
default
|
|
883
|
+
refillstore=dict(
|
|
884
|
+
default=False,
|
|
848
885
|
),
|
|
849
|
-
storehead
|
|
850
|
-
optional
|
|
886
|
+
storehead=dict(
|
|
887
|
+
optional=True,
|
|
851
888
|
),
|
|
852
|
-
storesync
|
|
853
|
-
alias
|
|
854
|
-
type
|
|
855
|
-
optional
|
|
889
|
+
storesync=dict(
|
|
890
|
+
alias=("archsync", "synchro"),
|
|
891
|
+
type=bool,
|
|
892
|
+
optional=True,
|
|
856
893
|
),
|
|
857
|
-
)
|
|
894
|
+
),
|
|
858
895
|
)
|
|
859
896
|
|
|
860
897
|
def filtered_readable_openedstores(self, remote):
|
|
861
898
|
"""Only use the stacked store if sensible."""
|
|
862
|
-
ostores = [
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
899
|
+
ostores = [
|
|
900
|
+
self.openedstores[0],
|
|
901
|
+
]
|
|
902
|
+
ostores.extend(
|
|
903
|
+
[
|
|
904
|
+
sto
|
|
905
|
+
for sto in self.openedstores[1:]
|
|
906
|
+
if not sto.stackedstore or "stackpath" in remote["query"]
|
|
907
|
+
]
|
|
908
|
+
)
|
|
866
909
|
return ostores
|
|
867
910
|
|
|
868
911
|
def alternates_netloc(self):
|
|
869
912
|
"""Return netlocs describing both base and stacked archives."""
|
|
870
|
-
netloc_m = re.match(
|
|
913
|
+
netloc_m = re.match(
|
|
914
|
+
r"(?P<base>v.*)\.archive\.(?P<country>\w+)", self.netloc
|
|
915
|
+
)
|
|
871
916
|
return [
|
|
872
|
-
|
|
873
|
-
|
|
917
|
+
"{base:s}.archive-legacy.{country:s}".format(
|
|
918
|
+
**netloc_m.groupdict()
|
|
919
|
+
),
|
|
920
|
+
"{base:s}.stacked-archive-legacy.{country:s}".format(
|
|
921
|
+
**netloc_m.groupdict()
|
|
922
|
+
),
|
|
874
923
|
]
|
|
875
924
|
|
|
876
925
|
def alternates_fpextras(self):
|
|
@@ -883,26 +932,28 @@ class _VortexCacheBaseStore(CacheStore, _VortexStackedStorageMixin):
|
|
|
883
932
|
|
|
884
933
|
_abstract = True
|
|
885
934
|
_footprint = dict(
|
|
886
|
-
info
|
|
887
|
-
attr
|
|
888
|
-
scheme
|
|
889
|
-
values
|
|
935
|
+
info="VORTEX cache access",
|
|
936
|
+
attr=dict(
|
|
937
|
+
scheme=dict(
|
|
938
|
+
values=["vortex"],
|
|
890
939
|
),
|
|
891
|
-
headdir
|
|
892
|
-
default
|
|
893
|
-
outcast
|
|
940
|
+
headdir=dict(
|
|
941
|
+
default="",
|
|
942
|
+
outcast=[
|
|
943
|
+
"xp",
|
|
944
|
+
],
|
|
894
945
|
),
|
|
895
|
-
rtouch
|
|
896
|
-
default
|
|
946
|
+
rtouch=dict(
|
|
947
|
+
default=True,
|
|
897
948
|
),
|
|
898
|
-
rtouchskip
|
|
899
|
-
default
|
|
949
|
+
rtouchskip=dict(
|
|
950
|
+
default=3,
|
|
900
951
|
),
|
|
901
|
-
)
|
|
952
|
+
),
|
|
902
953
|
)
|
|
903
954
|
|
|
904
955
|
def __init__(self, *args, **kw):
|
|
905
|
-
logger.debug(
|
|
956
|
+
logger.debug("Vortex cache store init %s", self.__class__)
|
|
906
957
|
del self.cache
|
|
907
958
|
super().__init__(*args, **kw)
|
|
908
959
|
|
|
@@ -939,45 +990,56 @@ class VortexCacheMtStore(_VortexCacheBaseStore):
|
|
|
939
990
|
"""Some kind of MTOOL cache for VORTEX experiments."""
|
|
940
991
|
|
|
941
992
|
_footprint = dict(
|
|
942
|
-
info
|
|
943
|
-
attr
|
|
944
|
-
netloc
|
|
945
|
-
values
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
993
|
+
info="VORTEX MTOOL like Cache access",
|
|
994
|
+
attr=dict(
|
|
995
|
+
netloc=dict(
|
|
996
|
+
values=[
|
|
997
|
+
"{:s}.{:s}cache-mt.fr".format(v, s)
|
|
998
|
+
for v in ("vortex", "vortex-free", "vsop")
|
|
999
|
+
for s in ("", "stacked-")
|
|
1000
|
+
]
|
|
950
1001
|
),
|
|
951
|
-
)
|
|
1002
|
+
),
|
|
952
1003
|
)
|
|
953
1004
|
|
|
1005
|
+
def __init__(self, *args, **kw):
|
|
1006
|
+
super().__init__(*args, **kw)
|
|
1007
|
+
self.location = get_cache_location()
|
|
1008
|
+
|
|
954
1009
|
|
|
955
|
-
# TODO Not sure this class is needed anymore
|
|
956
1010
|
class VortexCacheOp2ResearchStore(_VortexCacheBaseStore):
|
|
957
1011
|
"""The DSI/OP VORTEX cache where researchers can get the freshest data."""
|
|
958
1012
|
|
|
959
1013
|
_footprint = dict(
|
|
960
|
-
info
|
|
961
|
-
attr
|
|
962
|
-
netloc
|
|
963
|
-
values
|
|
964
|
-
|
|
965
|
-
for s in (
|
|
1014
|
+
info="VORTEX Mtool cache access",
|
|
1015
|
+
attr=dict(
|
|
1016
|
+
netloc=dict(
|
|
1017
|
+
values=[
|
|
1018
|
+
"vsop.{:s}cache-op2r.fr".format(s)
|
|
1019
|
+
for s in ("", "stacked-")
|
|
966
1020
|
],
|
|
967
1021
|
),
|
|
968
|
-
|
|
969
|
-
default
|
|
1022
|
+
readonly=dict(
|
|
1023
|
+
default=True,
|
|
970
1024
|
),
|
|
971
|
-
|
|
972
|
-
default = True,
|
|
973
|
-
)
|
|
974
|
-
)
|
|
1025
|
+
),
|
|
975
1026
|
)
|
|
976
1027
|
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
1028
|
+
def __init__(self, *args, **kw):
|
|
1029
|
+
super().__init__(*args, **kw)
|
|
1030
|
+
try:
|
|
1031
|
+
cachepath = config.from_config(
|
|
1032
|
+
section="data-tree",
|
|
1033
|
+
key="op_rootdir",
|
|
1034
|
+
)
|
|
1035
|
+
except config.ConfigurationError as e:
|
|
1036
|
+
logger.error(
|
|
1037
|
+
"Cannot use special experiment cache without providing",
|
|
1038
|
+
"cache location",
|
|
1039
|
+
)
|
|
1040
|
+
raise e
|
|
1041
|
+
|
|
1042
|
+
self.location = os.path.join(cachepath, "vortex")
|
|
981
1043
|
|
|
982
1044
|
|
|
983
1045
|
class _AbstractVortexCacheMultiStore(MultiStore):
|
|
@@ -985,33 +1047,49 @@ class _AbstractVortexCacheMultiStore(MultiStore):
|
|
|
985
1047
|
|
|
986
1048
|
_abstract = True
|
|
987
1049
|
_footprint = dict(
|
|
988
|
-
info
|
|
989
|
-
attr
|
|
990
|
-
scheme
|
|
991
|
-
values
|
|
1050
|
+
info="VORTEX cache access",
|
|
1051
|
+
attr=dict(
|
|
1052
|
+
scheme=dict(
|
|
1053
|
+
values=["vortex"],
|
|
992
1054
|
),
|
|
993
|
-
refillstore
|
|
994
|
-
default
|
|
995
|
-
)
|
|
996
|
-
)
|
|
1055
|
+
refillstore=dict(
|
|
1056
|
+
default=False,
|
|
1057
|
+
),
|
|
1058
|
+
),
|
|
997
1059
|
)
|
|
998
1060
|
|
|
999
1061
|
def filtered_readable_openedstores(self, remote):
|
|
1000
1062
|
"""Deals with stacked stores that are not always active."""
|
|
1001
|
-
ostores = [
|
|
1063
|
+
ostores = [
|
|
1064
|
+
self.openedstores[0],
|
|
1065
|
+
]
|
|
1002
1066
|
# TODO is the call to cache.allow_reads still required without
|
|
1003
1067
|
# marketplace stores?
|
|
1004
|
-
ostores.extend(
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1068
|
+
ostores.extend(
|
|
1069
|
+
[
|
|
1070
|
+
sto
|
|
1071
|
+
for sto in self.openedstores[1:]
|
|
1072
|
+
if (
|
|
1073
|
+
(not sto.stackedstore or "stackpath" in remote["query"])
|
|
1074
|
+
and sto.cache.allow_reads(remote["path"])
|
|
1075
|
+
)
|
|
1076
|
+
]
|
|
1077
|
+
)
|
|
1008
1078
|
return ostores
|
|
1009
1079
|
|
|
1010
1080
|
def filtered_writeable_openedstores(self, remote):
|
|
1011
1081
|
"""never writes into stack stores."""
|
|
1012
|
-
ostores = [
|
|
1013
|
-
|
|
1014
|
-
|
|
1082
|
+
ostores = [
|
|
1083
|
+
self.openedstores[0],
|
|
1084
|
+
]
|
|
1085
|
+
ostores.extend(
|
|
1086
|
+
[
|
|
1087
|
+
sto
|
|
1088
|
+
for sto in self.openedstores[1:]
|
|
1089
|
+
if not sto.stackedstore
|
|
1090
|
+
and sto.cache.allow_writes(remote["path"])
|
|
1091
|
+
]
|
|
1092
|
+
)
|
|
1015
1093
|
return ostores
|
|
1016
1094
|
|
|
1017
1095
|
|
|
@@ -1019,19 +1097,22 @@ class VortexCacheStore(_AbstractVortexCacheMultiStore):
|
|
|
1019
1097
|
"""The go to store for data cached by VORTEX R&D experiments."""
|
|
1020
1098
|
|
|
1021
1099
|
_footprint = dict(
|
|
1022
|
-
attr
|
|
1023
|
-
netloc
|
|
1024
|
-
values
|
|
1100
|
+
attr=dict(
|
|
1101
|
+
netloc=dict(
|
|
1102
|
+
values=[
|
|
1103
|
+
"vortex.cache.fr",
|
|
1104
|
+
"vortex-free.cache.fr",
|
|
1105
|
+
],
|
|
1025
1106
|
),
|
|
1026
1107
|
)
|
|
1027
1108
|
)
|
|
1028
1109
|
|
|
1029
1110
|
def alternates_netloc(self):
|
|
1030
1111
|
"""For Non-Op users, Op caches may be accessed in read-only mode."""
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1112
|
+
return [
|
|
1113
|
+
f"{self.netloc.firstname}.cache-mt.fr",
|
|
1114
|
+
f"{self.netloc.firstname}.stacked-cache-mt.fr",
|
|
1115
|
+
]
|
|
1035
1116
|
|
|
1036
1117
|
|
|
1037
1118
|
class VortexVsopCacheStore(_AbstractVortexCacheMultiStore):
|
|
@@ -1042,35 +1123,36 @@ class VortexVsopCacheStore(_AbstractVortexCacheMultiStore):
|
|
|
1042
1123
|
"""
|
|
1043
1124
|
|
|
1044
1125
|
_footprint = dict(
|
|
1045
|
-
info
|
|
1046
|
-
attr
|
|
1047
|
-
netloc
|
|
1048
|
-
values
|
|
1126
|
+
info="VORTEX vsop magic cache access",
|
|
1127
|
+
attr=dict(
|
|
1128
|
+
netloc=dict(
|
|
1129
|
+
values=[
|
|
1130
|
+
"vsop.cache.fr",
|
|
1131
|
+
],
|
|
1049
1132
|
),
|
|
1050
|
-
glovekind
|
|
1051
|
-
optional
|
|
1052
|
-
default
|
|
1133
|
+
glovekind=dict(
|
|
1134
|
+
optional=True,
|
|
1135
|
+
default="[glove::realkind]",
|
|
1053
1136
|
),
|
|
1054
|
-
)
|
|
1137
|
+
),
|
|
1055
1138
|
)
|
|
1056
1139
|
|
|
1057
1140
|
def alternates_netloc(self):
|
|
1058
1141
|
"""For Non-Op users, Op caches may be accessed in read-only mode."""
|
|
1059
1142
|
todo = [
|
|
1060
|
-
|
|
1061
|
-
|
|
1143
|
+
"vsop.cache-mt.fr",
|
|
1144
|
+
"vsop.stacked-cache-mt.fr",
|
|
1062
1145
|
]
|
|
1063
1146
|
|
|
1064
1147
|
# Only set up op2r cache if the associated filepath
|
|
1065
1148
|
# is configured
|
|
1066
|
-
if (
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
section="data-tree", key="op_rootdir",
|
|
1070
|
-
)
|
|
1149
|
+
if (self.glovekind != "opuser") and config.is_defined(
|
|
1150
|
+
section="data-tree",
|
|
1151
|
+
key="op_rootdir",
|
|
1071
1152
|
):
|
|
1072
1153
|
todo += [
|
|
1073
|
-
|
|
1154
|
+
"vsop.cache-op2r.fr",
|
|
1155
|
+
"vsop.stacked-cache-op2r.fr",
|
|
1074
1156
|
]
|
|
1075
1157
|
return todo
|
|
1076
1158
|
|
|
@@ -1080,30 +1162,44 @@ class _AbstractVortexStackMultiStore(MultiStore):
|
|
|
1080
1162
|
|
|
1081
1163
|
_abstract = True
|
|
1082
1164
|
_footprint = dict(
|
|
1083
|
-
info
|
|
1084
|
-
attr
|
|
1085
|
-
scheme
|
|
1086
|
-
values
|
|
1165
|
+
info="VORTEX stack access",
|
|
1166
|
+
attr=dict(
|
|
1167
|
+
scheme=dict(
|
|
1168
|
+
values=["vortex"],
|
|
1087
1169
|
),
|
|
1088
|
-
refillstore
|
|
1089
|
-
default
|
|
1090
|
-
)
|
|
1091
|
-
)
|
|
1170
|
+
refillstore=dict(
|
|
1171
|
+
default=False,
|
|
1172
|
+
),
|
|
1173
|
+
),
|
|
1092
1174
|
)
|
|
1093
1175
|
|
|
1094
1176
|
# TODO is this still needed without marketplace stores?
|
|
1095
1177
|
def filtered_readable_openedstores(self, remote):
|
|
1096
1178
|
"""Deals with marketplace stores that are not always active."""
|
|
1097
|
-
ostores = [
|
|
1098
|
-
|
|
1099
|
-
|
|
1179
|
+
ostores = [
|
|
1180
|
+
self.openedstores[0],
|
|
1181
|
+
]
|
|
1182
|
+
ostores.extend(
|
|
1183
|
+
[
|
|
1184
|
+
sto
|
|
1185
|
+
for sto in self.openedstores[1:]
|
|
1186
|
+
if sto.cache.allow_reads(remote["path"])
|
|
1187
|
+
]
|
|
1188
|
+
)
|
|
1100
1189
|
return ostores
|
|
1101
1190
|
|
|
1102
1191
|
def filtered_writeable_openedstores(self, remote):
|
|
1103
1192
|
"""Deals with marketplace stores that are not always active."""
|
|
1104
|
-
ostores = [
|
|
1105
|
-
|
|
1106
|
-
|
|
1193
|
+
ostores = [
|
|
1194
|
+
self.openedstores[0],
|
|
1195
|
+
]
|
|
1196
|
+
ostores.extend(
|
|
1197
|
+
[
|
|
1198
|
+
sto
|
|
1199
|
+
for sto in self.openedstores[1:]
|
|
1200
|
+
if sto.cache.allow_writes(remote["path"])
|
|
1201
|
+
]
|
|
1202
|
+
)
|
|
1107
1203
|
return ostores
|
|
1108
1204
|
|
|
1109
1205
|
|
|
@@ -1111,41 +1207,41 @@ class VortexStackStore(_AbstractVortexStackMultiStore):
|
|
|
1111
1207
|
"""Store intended to read and write data into VORTEX R&D stacks."""
|
|
1112
1208
|
|
|
1113
1209
|
_footprint = dict(
|
|
1114
|
-
info
|
|
1115
|
-
attr
|
|
1116
|
-
netloc
|
|
1117
|
-
values
|
|
1210
|
+
info="VORTEX stack access",
|
|
1211
|
+
attr=dict(
|
|
1212
|
+
netloc=dict(
|
|
1213
|
+
values=["vortex.stack.fr", "vortex-free.stack.fr"],
|
|
1118
1214
|
),
|
|
1119
|
-
)
|
|
1215
|
+
),
|
|
1120
1216
|
)
|
|
1121
1217
|
|
|
1122
1218
|
def alternates_netloc(self):
|
|
1123
1219
|
"""Go through the various stacked stores."""
|
|
1124
|
-
|
|
1125
|
-
s_mt_netloc = '{base:s}.stacked-cache-mt.{country:s}'.format(** netloc_m.groupdict())
|
|
1126
|
-
return [s_mt_netloc]
|
|
1220
|
+
return [f"{self.netloc.firstname}.stacked-cache-mt.fr"]
|
|
1127
1221
|
|
|
1128
1222
|
|
|
1129
1223
|
class VortexVsopStackStore(_AbstractVortexStackMultiStore):
|
|
1130
1224
|
"""Store intended to read and write data into VORTEX R&D stacks."""
|
|
1131
1225
|
|
|
1132
1226
|
_footprint = dict(
|
|
1133
|
-
info
|
|
1134
|
-
attr
|
|
1135
|
-
netloc
|
|
1136
|
-
values
|
|
1227
|
+
info="VORTEX stack access",
|
|
1228
|
+
attr=dict(
|
|
1229
|
+
netloc=dict(
|
|
1230
|
+
values=["vsop.stack.fr"],
|
|
1137
1231
|
),
|
|
1138
|
-
glovekind
|
|
1139
|
-
optional
|
|
1140
|
-
default
|
|
1232
|
+
glovekind=dict(
|
|
1233
|
+
optional=True,
|
|
1234
|
+
default="[glove::realkind]",
|
|
1141
1235
|
),
|
|
1142
|
-
)
|
|
1236
|
+
),
|
|
1143
1237
|
)
|
|
1144
1238
|
|
|
1145
1239
|
def alternates_netloc(self):
|
|
1146
1240
|
"""For Non-Op users, Op caches may be accessed in read-only mode."""
|
|
1147
|
-
todo = [
|
|
1148
|
-
|
|
1241
|
+
todo = [
|
|
1242
|
+
"vsop.stacked-cache-mt.fr",
|
|
1243
|
+
]
|
|
1244
|
+
if self.glovekind != "opuser":
|
|
1149
1245
|
todo.append("vsop.stacked-cache-op2r.fr")
|
|
1150
1246
|
return todo
|
|
1151
1247
|
|
|
@@ -1157,24 +1253,30 @@ class VortexStoreLegacy(MultiStore):
|
|
|
1157
1253
|
"""
|
|
1158
1254
|
|
|
1159
1255
|
_footprint = dict(
|
|
1160
|
-
info=
|
|
1256
|
+
info="VORTEX multi access",
|
|
1161
1257
|
attr=dict(
|
|
1162
1258
|
scheme=dict(
|
|
1163
|
-
values=[
|
|
1259
|
+
values=["vortex"],
|
|
1164
1260
|
),
|
|
1165
1261
|
netloc=dict(
|
|
1166
|
-
values=[
|
|
1262
|
+
values=[
|
|
1263
|
+
"vortex.multi-legacy.fr",
|
|
1264
|
+
"vortex-free.multi-legacy.fr",
|
|
1265
|
+
"vsop.multi-legacy.fr",
|
|
1266
|
+
],
|
|
1167
1267
|
),
|
|
1168
1268
|
refillstore=dict(
|
|
1169
1269
|
default=True,
|
|
1170
|
-
)
|
|
1171
|
-
)
|
|
1270
|
+
),
|
|
1271
|
+
),
|
|
1172
1272
|
)
|
|
1173
1273
|
|
|
1174
1274
|
def alternates_netloc(self):
|
|
1175
1275
|
"""Tuple of alternates domains names, e.g. ``cache`` and ``archive``."""
|
|
1176
|
-
return [
|
|
1177
|
-
|
|
1276
|
+
return [
|
|
1277
|
+
self.netloc.firstname + d
|
|
1278
|
+
for d in (".cache.fr", ".archive-legacy.fr")
|
|
1279
|
+
]
|
|
1178
1280
|
|
|
1179
1281
|
|
|
1180
1282
|
class VortexStore(MultiStore):
|
|
@@ -1184,64 +1286,81 @@ class VortexStore(MultiStore):
|
|
|
1184
1286
|
"""
|
|
1185
1287
|
|
|
1186
1288
|
_footprint = dict(
|
|
1187
|
-
info
|
|
1188
|
-
attr
|
|
1189
|
-
scheme
|
|
1190
|
-
values
|
|
1289
|
+
info="VORTEX multi access",
|
|
1290
|
+
attr=dict(
|
|
1291
|
+
scheme=dict(
|
|
1292
|
+
values=["vortex"],
|
|
1191
1293
|
),
|
|
1192
|
-
netloc
|
|
1193
|
-
values
|
|
1294
|
+
netloc=dict(
|
|
1295
|
+
values=[
|
|
1296
|
+
"vortex.multi.fr",
|
|
1297
|
+
"vortex-free.multi.fr",
|
|
1298
|
+
"vsop.multi.fr",
|
|
1299
|
+
],
|
|
1194
1300
|
),
|
|
1195
|
-
refillstore
|
|
1196
|
-
|
|
1197
|
-
)
|
|
1198
|
-
)
|
|
1301
|
+
refillstore=dict(default=False),
|
|
1302
|
+
),
|
|
1199
1303
|
)
|
|
1200
1304
|
|
|
1201
1305
|
def filtered_readable_openedstores(self, remote):
|
|
1202
1306
|
"""Deals with stacked stores that are not always active."""
|
|
1203
|
-
ostores = [
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1307
|
+
ostores = [
|
|
1308
|
+
self.openedstores[0],
|
|
1309
|
+
]
|
|
1310
|
+
ostores.extend(
|
|
1311
|
+
[
|
|
1312
|
+
sto
|
|
1313
|
+
for sto in self.openedstores[1:]
|
|
1314
|
+
if not sto.stackedstore or "stackpath" in remote["query"]
|
|
1315
|
+
]
|
|
1316
|
+
)
|
|
1207
1317
|
return ostores
|
|
1208
1318
|
|
|
1209
1319
|
def alternates_netloc(self):
|
|
1210
1320
|
"""Tuple of alternates domains names, e.g. ``cache`` and ``archive``."""
|
|
1211
|
-
return [
|
|
1212
|
-
|
|
1321
|
+
return [
|
|
1322
|
+
self.netloc.firstname + d
|
|
1323
|
+
for d in (
|
|
1324
|
+
".multi-legacy.fr",
|
|
1325
|
+
".stacked-archive-smart.fr",
|
|
1326
|
+
)
|
|
1327
|
+
]
|
|
1213
1328
|
|
|
1214
1329
|
|
|
1215
1330
|
class PromiseCacheStore(VortexCacheMtStore):
|
|
1216
1331
|
"""Some kind of vortex cache for EXPECTED resources."""
|
|
1217
1332
|
|
|
1218
1333
|
_footprint = dict(
|
|
1219
|
-
info
|
|
1220
|
-
attr
|
|
1221
|
-
netloc
|
|
1222
|
-
values
|
|
1334
|
+
info="EXPECTED cache access",
|
|
1335
|
+
attr=dict(
|
|
1336
|
+
netloc=dict(
|
|
1337
|
+
values=["promise.cache.fr"],
|
|
1223
1338
|
),
|
|
1224
|
-
headdir
|
|
1225
|
-
default
|
|
1226
|
-
outcast
|
|
1339
|
+
headdir=dict(
|
|
1340
|
+
default="promise",
|
|
1341
|
+
outcast=["xp", "vortex"],
|
|
1227
1342
|
),
|
|
1228
|
-
)
|
|
1343
|
+
),
|
|
1229
1344
|
)
|
|
1230
1345
|
|
|
1231
1346
|
@staticmethod
|
|
1232
1347
|
def _add_default_options(options):
|
|
1233
1348
|
options_upd = options.copy()
|
|
1234
|
-
options_upd[
|
|
1235
|
-
options_upd[
|
|
1349
|
+
options_upd["fmt"] = "ascii" # Promises are always JSON files
|
|
1350
|
+
options_upd["intent"] = "in" # Promises are always read-only
|
|
1236
1351
|
return options_upd
|
|
1237
1352
|
|
|
1238
1353
|
def vortexget(self, remote, local, options):
|
|
1239
1354
|
"""Proxy to :meth:`incacheget`."""
|
|
1240
|
-
return super().vortexget(
|
|
1355
|
+
return super().vortexget(
|
|
1356
|
+
remote, local, self._add_default_options(options)
|
|
1357
|
+
)
|
|
1241
1358
|
|
|
1242
1359
|
def vortexput(self, local, remote, options):
|
|
1243
1360
|
"""Proxy to :meth:`incacheput`."""
|
|
1244
|
-
return super().vortexput(
|
|
1361
|
+
return super().vortexput(
|
|
1362
|
+
local, remote, self._add_default_options(options)
|
|
1363
|
+
)
|
|
1245
1364
|
|
|
1246
1365
|
def vortexdelete(self, remote, options):
|
|
1247
1366
|
"""Proxy to :meth:`incachedelete`."""
|
|
@@ -1252,20 +1371,24 @@ class VortexPromiseStore(PromiseStore):
|
|
|
1252
1371
|
"""Combine a Promise Store for expected resources and any VORTEX Store."""
|
|
1253
1372
|
|
|
1254
1373
|
_footprint = dict(
|
|
1255
|
-
info
|
|
1256
|
-
attr
|
|
1257
|
-
scheme
|
|
1258
|
-
values
|
|
1374
|
+
info="VORTEX promise store",
|
|
1375
|
+
attr=dict(
|
|
1376
|
+
scheme=dict(
|
|
1377
|
+
values=["xvortex"],
|
|
1259
1378
|
),
|
|
1260
1379
|
netloc=dict(
|
|
1261
|
-
outcast
|
|
1262
|
-
|
|
1380
|
+
outcast=[
|
|
1381
|
+
"vortex-demo.cache.fr",
|
|
1382
|
+
"vortex-demo.multi.fr",
|
|
1383
|
+
"vortex.testcache.fr",
|
|
1384
|
+
"vortex.testmulti.fr",
|
|
1385
|
+
],
|
|
1263
1386
|
),
|
|
1264
|
-
)
|
|
1387
|
+
),
|
|
1265
1388
|
)
|
|
1266
1389
|
|
|
1267
1390
|
|
|
1268
1391
|
# Activate the footprint's fasttrack on the stores collector
|
|
1269
|
-
fcollect = footprints.collectors.get(tag=
|
|
1270
|
-
fcollect.fasttrack = (
|
|
1392
|
+
fcollect = footprints.collectors.get(tag="store")
|
|
1393
|
+
fcollect.fasttrack = ("netloc", "scheme")
|
|
1271
1394
|
del fcollect
|