vortex-nwp 2.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. vortex/__init__.py +135 -0
  2. vortex/algo/__init__.py +12 -0
  3. vortex/algo/components.py +2136 -0
  4. vortex/algo/mpitools.py +1648 -0
  5. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  7. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  8. vortex/algo/serversynctools.py +170 -0
  9. vortex/config.py +115 -0
  10. vortex/data/__init__.py +13 -0
  11. vortex/data/abstractstores.py +1572 -0
  12. vortex/data/containers.py +780 -0
  13. vortex/data/contents.py +596 -0
  14. vortex/data/executables.py +284 -0
  15. vortex/data/flow.py +113 -0
  16. vortex/data/geometries.ini +2689 -0
  17. vortex/data/geometries.py +703 -0
  18. vortex/data/handlers.py +1021 -0
  19. vortex/data/outflow.py +67 -0
  20. vortex/data/providers.py +465 -0
  21. vortex/data/resources.py +201 -0
  22. vortex/data/stores.py +1271 -0
  23. vortex/gloves.py +282 -0
  24. vortex/layout/__init__.py +27 -0
  25. vortex/layout/appconf.py +109 -0
  26. vortex/layout/contexts.py +511 -0
  27. vortex/layout/dataflow.py +1069 -0
  28. vortex/layout/jobs.py +1276 -0
  29. vortex/layout/monitor.py +833 -0
  30. vortex/layout/nodes.py +1424 -0
  31. vortex/layout/subjobs.py +464 -0
  32. vortex/nwp/__init__.py +11 -0
  33. vortex/nwp/algo/__init__.py +12 -0
  34. vortex/nwp/algo/assim.py +483 -0
  35. vortex/nwp/algo/clim.py +920 -0
  36. vortex/nwp/algo/coupling.py +609 -0
  37. vortex/nwp/algo/eda.py +632 -0
  38. vortex/nwp/algo/eps.py +613 -0
  39. vortex/nwp/algo/forecasts.py +745 -0
  40. vortex/nwp/algo/fpserver.py +927 -0
  41. vortex/nwp/algo/ifsnaming.py +403 -0
  42. vortex/nwp/algo/ifsroot.py +311 -0
  43. vortex/nwp/algo/monitoring.py +202 -0
  44. vortex/nwp/algo/mpitools.py +554 -0
  45. vortex/nwp/algo/odbtools.py +974 -0
  46. vortex/nwp/algo/oopsroot.py +735 -0
  47. vortex/nwp/algo/oopstests.py +186 -0
  48. vortex/nwp/algo/request.py +579 -0
  49. vortex/nwp/algo/stdpost.py +1285 -0
  50. vortex/nwp/data/__init__.py +12 -0
  51. vortex/nwp/data/assim.py +392 -0
  52. vortex/nwp/data/boundaries.py +261 -0
  53. vortex/nwp/data/climfiles.py +539 -0
  54. vortex/nwp/data/configfiles.py +149 -0
  55. vortex/nwp/data/consts.py +929 -0
  56. vortex/nwp/data/ctpini.py +133 -0
  57. vortex/nwp/data/diagnostics.py +181 -0
  58. vortex/nwp/data/eda.py +148 -0
  59. vortex/nwp/data/eps.py +383 -0
  60. vortex/nwp/data/executables.py +1039 -0
  61. vortex/nwp/data/fields.py +96 -0
  62. vortex/nwp/data/gridfiles.py +308 -0
  63. vortex/nwp/data/logs.py +551 -0
  64. vortex/nwp/data/modelstates.py +334 -0
  65. vortex/nwp/data/monitoring.py +220 -0
  66. vortex/nwp/data/namelists.py +644 -0
  67. vortex/nwp/data/obs.py +748 -0
  68. vortex/nwp/data/oopsexec.py +72 -0
  69. vortex/nwp/data/providers.py +182 -0
  70. vortex/nwp/data/query.py +217 -0
  71. vortex/nwp/data/stores.py +147 -0
  72. vortex/nwp/data/surfex.py +338 -0
  73. vortex/nwp/syntax/__init__.py +9 -0
  74. vortex/nwp/syntax/stdattrs.py +375 -0
  75. vortex/nwp/tools/__init__.py +10 -0
  76. vortex/nwp/tools/addons.py +35 -0
  77. vortex/nwp/tools/agt.py +55 -0
  78. vortex/nwp/tools/bdap.py +48 -0
  79. vortex/nwp/tools/bdcp.py +38 -0
  80. vortex/nwp/tools/bdm.py +21 -0
  81. vortex/nwp/tools/bdmp.py +49 -0
  82. vortex/nwp/tools/conftools.py +1311 -0
  83. vortex/nwp/tools/drhook.py +62 -0
  84. vortex/nwp/tools/grib.py +268 -0
  85. vortex/nwp/tools/gribdiff.py +99 -0
  86. vortex/nwp/tools/ifstools.py +163 -0
  87. vortex/nwp/tools/igastuff.py +249 -0
  88. vortex/nwp/tools/mars.py +56 -0
  89. vortex/nwp/tools/odb.py +548 -0
  90. vortex/nwp/tools/partitioning.py +234 -0
  91. vortex/nwp/tools/satrad.py +56 -0
  92. vortex/nwp/util/__init__.py +6 -0
  93. vortex/nwp/util/async.py +184 -0
  94. vortex/nwp/util/beacon.py +40 -0
  95. vortex/nwp/util/diffpygram.py +359 -0
  96. vortex/nwp/util/ens.py +198 -0
  97. vortex/nwp/util/hooks.py +128 -0
  98. vortex/nwp/util/taskdeco.py +81 -0
  99. vortex/nwp/util/usepygram.py +591 -0
  100. vortex/nwp/util/usetnt.py +87 -0
  101. vortex/proxy.py +6 -0
  102. vortex/sessions.py +341 -0
  103. vortex/syntax/__init__.py +9 -0
  104. vortex/syntax/stdattrs.py +628 -0
  105. vortex/syntax/stddeco.py +176 -0
  106. vortex/toolbox.py +982 -0
  107. vortex/tools/__init__.py +11 -0
  108. vortex/tools/actions.py +457 -0
  109. vortex/tools/addons.py +297 -0
  110. vortex/tools/arm.py +76 -0
  111. vortex/tools/compression.py +322 -0
  112. vortex/tools/date.py +20 -0
  113. vortex/tools/ddhpack.py +10 -0
  114. vortex/tools/delayedactions.py +672 -0
  115. vortex/tools/env.py +513 -0
  116. vortex/tools/folder.py +663 -0
  117. vortex/tools/grib.py +559 -0
  118. vortex/tools/lfi.py +746 -0
  119. vortex/tools/listings.py +354 -0
  120. vortex/tools/names.py +575 -0
  121. vortex/tools/net.py +1790 -0
  122. vortex/tools/odb.py +10 -0
  123. vortex/tools/parallelism.py +336 -0
  124. vortex/tools/prestaging.py +186 -0
  125. vortex/tools/rawfiles.py +10 -0
  126. vortex/tools/schedulers.py +413 -0
  127. vortex/tools/services.py +871 -0
  128. vortex/tools/storage.py +1061 -0
  129. vortex/tools/surfex.py +61 -0
  130. vortex/tools/systems.py +3396 -0
  131. vortex/tools/targets.py +384 -0
  132. vortex/util/__init__.py +9 -0
  133. vortex/util/config.py +1071 -0
  134. vortex/util/empty.py +24 -0
  135. vortex/util/helpers.py +184 -0
  136. vortex/util/introspection.py +63 -0
  137. vortex/util/iosponge.py +76 -0
  138. vortex/util/roles.py +51 -0
  139. vortex/util/storefunctions.py +103 -0
  140. vortex/util/structs.py +26 -0
  141. vortex/util/worker.py +150 -0
  142. vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
  143. vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
  144. vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
  145. vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
  146. vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
vortex/data/stores.py ADDED
@@ -0,0 +1,1271 @@
1
+ # pylint: disable=unused-argument
2
+
3
+ """
4
+ This module handles store objects in charge of physically accessing resources.
5
+ Store objects use the :mod:`footprints` mechanism.
6
+ """
7
+
8
+ import copy
9
+ import ftplib
10
+ import io
11
+ import re
12
+
13
+ from bronx.fancies import loggers
14
+ import footprints
15
+
16
+ from vortex import sessions
17
+ from vortex import config
18
+ from vortex.data.abstractstores import Store, ArchiveStore, ConfigurableArchiveStore, CacheStore
19
+ from vortex.data.abstractstores import MultiStore, PromiseStore
20
+ from vortex.data.abstractstores import ARCHIVE_GET_INTENT_DEFAULT
21
+ from vortex.layout import dataflow
22
+ from vortex.syntax.stdattrs import hashalgo_avail_list
23
+ from vortex.syntax.stdattrs import FreeXPid
24
+ from vortex.syntax.stdattrs import DelayedEnvValue
25
+ from vortex.tools.systems import ExecutionError
26
+
27
+ #: Export base class
28
+ __all__ = []
29
+
30
+ logger = loggers.getLogger(__name__)
31
+
32
+
33
+ class MagicPlace(Store):
34
+ """Somewhere, over the rainbow!"""
35
+
36
+ _footprint = dict(
37
+ info = 'Evanescent physical store',
38
+ attr = dict(
39
+ scheme = dict(
40
+ values = ['magic'],
41
+ ),
42
+ ),
43
+ priority = dict(
44
+ level = footprints.priorities.top.DEFAULT # @UndefinedVariable
45
+ )
46
+ )
47
+
48
+ @property
49
+ def realkind(self):
50
+ return 'magicstore'
51
+
52
+ def has_fast_check(self):
53
+ """A void check is very fast !"""
54
+ return True
55
+
56
+ def magiccheck(self, remote, options):
57
+ """Void - Always False."""
58
+ return False
59
+
60
+ def magiclocate(self, remote, options):
61
+ """Void - Empty string returned."""
62
+ return ''
63
+
64
+ def magicget(self, remote, local, options):
65
+ """Void - Always True."""
66
+ return True
67
+
68
+ def magicput(self, local, remote, options):
69
+ """Void - Always True."""
70
+ return True
71
+
72
+ def magicdelete(self, remote, options):
73
+ """Void - Always False."""
74
+ return False
75
+
76
+
77
+ class FunctionStoreCallbackError(Exception):
78
+ pass
79
+
80
+
81
+ class FunctionStore(Store):
82
+ """Calls a function that returns a File like object (get only).
83
+
84
+ This store is only able to perform the get action: it imports and calls
85
+ the function specified in the URI path. This function should return a
86
+ file like object that will be written in the local container.
87
+
88
+ The function is given an option dictionary that contains all of the
89
+ options provided to the store's get function, plus any additional
90
+ information specified in the 'query' part of the URI.
91
+
92
+ :Example:
93
+
94
+ Lets consider the following URI:
95
+
96
+ ``function:///sandbox.util.storefunctions.echofunction?msg=toto&msg=titi``
97
+
98
+ It will be seen as follows:
99
+
100
+ * scheme: ``'function'``
101
+ * netloc: ``''``
102
+ * path: ``'/sandbox.utils.storefunctions.echofunction'``
103
+ * query: ``dict(msg=['toto', 'titi'])``
104
+
105
+ As a result, the :func:`sandbox.utils.storefunctions.echofunction` will be
106
+ called with an option dictionary that contains ['toto', 'titi'] for the
107
+ 'msg' key (plus any other options passed to the store's get method).
108
+ """
109
+
110
+ _footprint = dict(
111
+ info = 'Dummy store that calls a function',
112
+ attr = dict(
113
+ scheme = dict(
114
+ values = ['function'],
115
+ ),
116
+ netloc = dict(
117
+ values = [''],
118
+ )
119
+ ),
120
+ priority = dict(
121
+ level = footprints.priorities.top.DEFAULT # @UndefinedVariable
122
+ )
123
+ )
124
+
125
+ @property
126
+ def realkind(self):
127
+ return 'functionstore'
128
+
129
+ def has_fast_check(self):
130
+ """A void check is very fast !"""
131
+ return True
132
+
133
+ def functioncheck(self, remote, options):
134
+ """Void - Always False."""
135
+ return False
136
+
137
+ def functionlocate(self, remote, options):
138
+ """The name of the function that will be called."""
139
+ cleanname = remote['path'][1:]
140
+ if cleanname.endswith('/'):
141
+ cleanname = cleanname[:-1]
142
+ return cleanname
143
+
144
+ def functionget(self, remote, local, options):
145
+ """Calls the appropriate function and writes the result."""
146
+ # Find the appropriate function
147
+ cbfunc = self.system.import_function(self.functionlocate(remote,
148
+ options))
149
+ # ... and call it
150
+ opts = dict()
151
+ opts.update(options)
152
+ opts.update(remote['query'])
153
+ try:
154
+ fres = cbfunc(opts)
155
+ except FunctionStoreCallbackError as e:
156
+ logger.error("An exception was raised in the Callback function")
157
+ logger.error("Here is the exception: %s", str(e))
158
+ fres = None
159
+ if fres is not None:
160
+ if 'intent' in options and options['intent'] == dataflow.intent.IN:
161
+ logger.info('Ignore intent <in> for function input.')
162
+ # Handle StringIO objects, by changing them to ByteIOs...
163
+ if isinstance(fres, io.StringIO):
164
+ s_fres = fres
165
+ s_fres.seek(0)
166
+ fres = io.BytesIO()
167
+ for l in s_fres:
168
+ fres.write(l.encode(encoding='utf-8'))
169
+ fres.seek(0)
170
+ # NB: fres should be a file like object (BytesIO will do the trick)
171
+ return self.system.cp(fres, local)
172
+ else:
173
+ return False
174
+
175
+ def functionput(self, local, remote, options):
176
+ """This should not happen - Always False."""
177
+ logger.error("The function store is not able to perform PUTs.")
178
+ return False
179
+
180
+ def functiondelete(self, remote, options):
181
+ """This should not happen - Always False."""
182
+ logger.error("The function store is not able to perform Deletes.")
183
+ return False
184
+
185
+
186
+ class Finder(Store):
187
+ """The most usual store: your current filesystem!"""
188
+
189
+ _footprint = dict(
190
+ info = 'Miscellaneous file access',
191
+ attr = dict(
192
+ scheme = dict(
193
+ values = ['file', 'ftp', 'symlink', 'rcp', 'scp'],
194
+ ),
195
+ netloc = dict(
196
+ outcast = ['oper.inline.fr'],
197
+ ),
198
+ storehash = dict(
199
+ values = hashalgo_avail_list,
200
+ ),
201
+ ),
202
+ priority = dict(
203
+ level = footprints.priorities.top.DEFAULT # @UndefinedVariable
204
+ )
205
+ )
206
+
207
+ def __init__(self, *args, **kw):
208
+ logger.debug('Finder store init %s', self.__class__)
209
+ super().__init__(*args, **kw)
210
+
211
+ @property
212
+ def realkind(self):
213
+ return 'finder'
214
+
215
+ def hostname(self):
216
+ """Returns the current :attr:`netloc`."""
217
+ return self.netloc
218
+
219
+ def fullpath(self, remote):
220
+ """Return actual path unless explicitly defined as relative path."""
221
+ if remote['query'].get('relative', False):
222
+ return remote['path'].lstrip('/')
223
+ else:
224
+ return remote['path']
225
+
226
+ def _localtarfix(self, local):
227
+ if (isinstance(local, str) and self.system.path.isfile(local) and
228
+ self.system.is_tarfile(local)):
229
+ destdir = self.system.path.dirname(self.system.path.realpath(local))
230
+ try:
231
+ self.system.smartuntar(local, destdir)
232
+ except ExecutionError:
233
+ if not self.system.is_tarname(local):
234
+ logger.warning("An automatic untar was attempted but it failed. " +
235
+ "Maybe the system's is_tarfile got it wrong ?")
236
+ else:
237
+ raise
238
+
239
+ def filecheck(self, remote, options):
240
+ """Returns a stat-like object if the ``remote`` exists on the ``system`` provided."""
241
+ try:
242
+ st = self.system.stat(self.fullpath(remote))
243
+ except OSError:
244
+ st = None
245
+ return st
246
+
247
+ def filelocate(self, remote, options):
248
+ """Returns the real path."""
249
+ return self.fullpath(remote)
250
+
251
+ def fileget(self, remote, local, options):
252
+ """Delegates to ``system`` the copy of ``remote`` to ``local``."""
253
+ rpath = self.fullpath(remote)
254
+ logger.info('fileget on %s (to: %s)', rpath, local)
255
+ if 'intent' in options and options['intent'] == dataflow.intent.IN:
256
+ logger.info('Ignore intent <in> for remote input %s', rpath)
257
+ rc = self.system.cp(rpath, local, fmt=options.get('fmt'), intent=dataflow.intent.INOUT)
258
+ rc = rc and self._hash_get_check(self.fileget, remote, local, options)
259
+ if rc:
260
+ self._localtarfix(local)
261
+ return rc
262
+
263
+ def fileput(self, local, remote, options):
264
+ """Delegates to ``system`` the copy of ``local`` to ``remote``."""
265
+ rpath = self.fullpath(remote)
266
+ logger.info('fileput to %s (from: %s)', rpath, local)
267
+ rc = self.system.cp(local, rpath, fmt=options.get('fmt'))
268
+ return rc and self._hash_put(self.fileput, local, remote, options)
269
+
270
+ def filedelete(self, remote, options):
271
+ """Delegates to ``system`` the removing of ``remote``."""
272
+ rc = None
273
+ if self.filecheck(remote, options):
274
+ rpath = self.fullpath(remote)
275
+ logger.info('filedelete on %s', rpath)
276
+ rc = self.system.remove(rpath, fmt=options.get('fmt'))
277
+ else:
278
+ logger.error('Try to remove a non-existing resource <%s>', self.fullpath(remote))
279
+ return rc
280
+
281
+ symlinkcheck = filecheck
282
+ symlinklocate = filelocate
283
+
284
+ def symlinkget(self, remote, local, options):
285
+ rpath = self.fullpath(remote)
286
+ if 'intent' in options and options['intent'] == dataflow.intent.INOUT:
287
+ logger.error('It is unsafe to have a symlink with intent=inout: %s', rpath)
288
+ return False
289
+ rc = self.system.remove(local)
290
+ self.system.symlink(rpath, local)
291
+ return rc and self.system.path.exists(local)
292
+
293
+ def symlinkput(self, local, remote, options):
294
+ logger.error("The Finder store with scheme:symlink is not able to perform Puts.")
295
+ return False
296
+
297
+ def symlinkdelete(self, remote, options):
298
+ logger.error("The Finder store with scheme:symlink is not able to perform Deletes.")
299
+ return False
300
+
301
+ def _ftpinfos(self, remote, **kwargs):
302
+ args = kwargs.copy()
303
+ args['hostname'] = self.hostname()
304
+ args['logname'] = remote['username']
305
+ port = self.hostname().netport
306
+ if port is not None:
307
+ args['port'] = port
308
+ return args
309
+
310
+ def ftpcheck(self, remote, options):
311
+ """Delegates to ``system.ftp`` a distant check."""
312
+ rc = None
313
+ ftp = self.system.ftp(** self._ftpinfos(remote))
314
+ if ftp:
315
+ try:
316
+ rc = ftp.size(self.fullpath(remote))
317
+ except (ValueError, TypeError):
318
+ pass
319
+ except ftplib.all_errors:
320
+ pass
321
+ finally:
322
+ ftp.close()
323
+ return rc
324
+
325
+ def ftplocate(self, remote, options):
326
+ """Delegates to ``system`` qualified name creation."""
327
+ ftp = self.system.ftp(** self._ftpinfos(remote, delayed=True))
328
+ if ftp:
329
+ rloc = ftp.netpath(self.fullpath(remote))
330
+ ftp.close()
331
+ return rloc
332
+ else:
333
+ return None
334
+
335
+ def ftpget(self, remote, local, options):
336
+ """Delegates to ``system`` the file transfer of ``remote`` to ``local``."""
337
+ rpath = self.fullpath(remote)
338
+ logger.info('ftpget on ftp://%s/%s (to: %s)', self.hostname(), rpath, local)
339
+ rc = self.system.smartftget(
340
+ rpath,
341
+ local,
342
+ fmt=options.get('fmt'),
343
+ # ftp control
344
+ ** self._ftpinfos(remote)
345
+ )
346
+ rc = rc and self._hash_get_check(self.ftpget, remote, local, options)
347
+ if rc:
348
+ self._localtarfix(local)
349
+ return rc
350
+
351
+ def ftpput(self, local, remote, options):
352
+ """Delegates to ``system`` the file transfer of ``local`` to ``remote``."""
353
+ rpath = self.fullpath(remote)
354
+ put_opts = dict()
355
+ put_opts['fmt'] = options.get('fmt')
356
+ put_opts['sync'] = options.get('enforcesync', False)
357
+ logger.info('ftpput to ftp://%s/%s (from: %s)', self.hostname(), rpath, local)
358
+ rc = self.system.smartftput(
359
+ local,
360
+ rpath,
361
+ # ftp control
362
+ ** self._ftpinfos(remote, ** put_opts)
363
+ )
364
+ return rc and self._hash_put(self.ftpput, local, remote, options)
365
+
366
+ def ftpdelete(self, remote, options):
367
+ """Delegates to ``system`` a distant remove."""
368
+ rc = None
369
+ actualpath = self.fullpath(remote)
370
+ if self.ftpcheck(remote, options):
371
+ logger.info('ftpdelete on ftp://%s/%s', self.hostname(), actualpath)
372
+ ftp = self.system.ftp(**self._ftpinfos(remote))
373
+ if ftp:
374
+ try:
375
+ rc = ftp.delete(actualpath)
376
+ finally:
377
+ ftp.close()
378
+ else:
379
+ logger.error('Try to remove a non-existing resource <%s>', actualpath)
380
+ return rc
381
+
382
+
383
+ class _VortexStackedStorageMixin:
384
+ """Mixin class that adds utility functions to work with stacked data."""
385
+
386
+ _STACKED_RE = re.compile('stacked-')
387
+
388
+ @property
389
+ def stackedstore(self):
390
+ """Tell if the present store is looking into a stack of resources."""
391
+ return self._STACKED_RE.search(self.netloc)
392
+
393
+ def _stacked_remainder(self, remote, stackpath):
394
+ path_remainder = remote['path'].strip('/').split('/')
395
+ for a_spath in stackpath.split('/'):
396
+ if path_remainder and path_remainder[0] == a_spath:
397
+ del path_remainder[0]
398
+ else:
399
+ break
400
+ return '/'.join(path_remainder)
401
+
402
+ def _stacked_xremote(self, remote):
403
+ """The path to **remote** with its stack."""
404
+ if self.stackedstore:
405
+ remote = remote.copy()
406
+ remote['query'] = remote['query'].copy()
407
+ stackpath = remote['query'].pop('stackpath', (None, ))[0]
408
+ stackfmt = remote['query'].pop('stackfmt', (None, ))[0]
409
+ if stackpath is None or stackfmt is None:
410
+ raise ValueError('"stackpath" and "stackfmt" are not available in the query.')
411
+ else:
412
+ remote['path'] = stackpath + '/' + self._stacked_remainder(remote, stackpath)
413
+ return remote
414
+
415
+ def _stacked_xegglocate(self, remote):
416
+ """Return various informations about the stack associated with **remote**.
417
+
418
+ It returns a 3 elements tuple:
419
+
420
+ * The remote-like dictionary to the stack resource
421
+ * The format of the stack resource
422
+ * The path to **remote** within the stacked resource
423
+
424
+ """
425
+ remote = remote.copy()
426
+ remote['query'] = remote['query'].copy()
427
+ stackpath = remote['query'].pop('stackpath', (None, ))[0].strip('/')
428
+ stackfmt = remote['query'].pop('stackfmt', (None, ))[0]
429
+ if stackpath is None or stackfmt is None:
430
+ raise ValueError('"stackpath" and "stackfmt" are not available in the query.')
431
+ else:
432
+ resource_remainder = self._stacked_remainder(remote, stackpath)
433
+ remote['path'] = '/' + stackpath
434
+ return remote, stackfmt, resource_remainder
435
+
436
+
437
+ _vortex_readonly_store = footprints.Footprint(
438
+ info="Abstract store' readonly=True attribute",
439
+ attr=dict(
440
+ readonly=dict(
441
+ values=[True, ],
442
+ optional=True,
443
+ default=True
444
+ )
445
+ )
446
+ )
447
+
448
+
449
+ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
450
+ """Some kind of archive for VORTEX experiments."""
451
+
452
+ _abstract = True
453
+ _footprint = dict(
454
+ info = 'VORTEX archive access',
455
+ attr = dict(
456
+ scheme = dict(
457
+ values = ['vortex'],
458
+ ),
459
+ netloc = dict(
460
+ ),
461
+ storehead = dict(
462
+ optional = True,
463
+ default = 'vortex',
464
+ outcast = ['xp'],
465
+ ),
466
+ )
467
+ )
468
+
469
+ _STACKS_AUTOREFILL_CRIT = 'stacked-archive-smart'
470
+
471
+ def __init__(self, *args, **kw):
472
+ logger.debug('Vortex archive store init %s', self.__class__)
473
+ super().__init__(*args, **kw)
474
+
475
+ def remap_read(self, remote, options):
476
+ """Remap actual remote path to distant store path for intrusive actions."""
477
+ return copy.copy(remote)
478
+
479
+ def remap_list(self, remote, options):
480
+ """Reformulates the remote path to compatible vortex namespace."""
481
+ if len(remote['path'].split('/')) >= 4:
482
+ return self.remap_read(remote, options)
483
+ else:
484
+ logger.critical('The << %s >> path is not listable.', remote['path'])
485
+ return None
486
+
487
+ remap_write = remap_read
488
+
489
+ @property
490
+ def stacks_autorefill(self):
491
+ """Where to refill a stack retrieved from the archive."""
492
+ if self._STACKS_AUTOREFILL_CRIT in self.netloc:
493
+ return self.netloc.replace(self._STACKS_AUTOREFILL_CRIT, 'cache')
494
+ else:
495
+ return None
496
+
497
+ def _vortex_stacked_egg_retrieve(self, remote, result_id=None):
498
+ """Retrieve the stack associated with **remote**."""
499
+ remote, remotefmt, remainder = self._stacked_xegglocate(remote)
500
+ rundir = sessions.current().context.rundir
501
+ if not rundir:
502
+ rundir = self.system.pwd()
503
+ rundir = self.system.path.join(rundir, 'vortex_stacks_xeggs')
504
+ target = self.system.path.join(rundir, * remote['path'].strip('/').split('/'))
505
+ targetopts = dict(fmt=remotefmt, intent=dataflow.intent.IN)
506
+ if self.system.path.exists(target):
507
+ logger.info("Stack previously retrieved (in %s). Using it.", target)
508
+ rc = True
509
+ else:
510
+ if result_id:
511
+ rc = self._vortexfinaliseget(result_id, remote, target, targetopts)
512
+ else:
513
+ rc = self._vortexget(remote, target, targetopts)
514
+ if rc and self.stacks_autorefill:
515
+ rstore = footprints.proxy.store(scheme=self.scheme, netloc=self.stacks_autorefill)
516
+ logger.info("Refilling the stack egg to [%s]", rstore)
517
+ try:
518
+ rstore.put(target, remote.copy(), targetopts)
519
+ except (ExecutionError, OSError) as e:
520
+ logger.error("An ExecutionError happened during the refill: %s", str(e))
521
+ logger.error("This error is ignored... but that's ugly !")
522
+ return rc, target, remainder
523
+
524
+ def vortexcheck(self, remote, options):
525
+ """Vortex' archive check sequence."""
526
+ if self.stackedstore:
527
+ s_remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
528
+ options = options.copy()
529
+ options['fmt'] = s_remotefmt
530
+ rc = self._vortexcheck(s_remote, options)
531
+ if rc:
532
+ rc, target, remainder = self._vortex_stacked_egg_retrieve(remote)
533
+ rc = rc and self.system.path.exists(self.system.path.join(target, remainder))
534
+ return rc
535
+ else:
536
+ return self._vortexcheck(remote, options)
537
+
538
+ def _vortexcheck(self, remote, options):
539
+ """Remap and ftpcheck sequence."""
540
+ remote = self.remap_read(remote, options)
541
+ return self.inarchivecheck(remote, options)
542
+
543
+ def vortexlocate(self, remote, options):
544
+ """Vortex' archive locate sequence."""
545
+ if self.stackedstore:
546
+ remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
547
+ options = options.copy()
548
+ options['fmt'] = s_remotefmt
549
+ return self._vortexlocate(remote, options)
550
+
551
+ def _vortexlocate(self, remote, options):
552
+ """Remap and ftplocate sequence."""
553
+ remote = self.remap_read(remote, options)
554
+ return self.inarchivelocate(remote, options)
555
+
556
+ def vortexlist(self, remote, options):
557
+ """Vortex' archive list sequence."""
558
+ if self.stackedstore:
559
+ return None
560
+ else:
561
+ return self._vortexlist(remote, options)
562
+
563
+ def _vortexlist(self, remote, options):
564
+ """Remap and ftplist sequence."""
565
+ remote = self.remap_list(remote, options)
566
+ if remote:
567
+ return self.inarchivelist(remote, options)
568
+ else:
569
+ return None
570
+
571
+ def vortexprestageinfo(self, remote, options):
572
+ """Vortex' archive prestageinfo sequence."""
573
+ if self.stackedstore:
574
+ remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
575
+ options = options.copy()
576
+ options['fmt'] = s_remotefmt
577
+ return self._vortexprestageinfo(remote, options)
578
+
579
+ def _vortexprestageinfo(self, remote, options):
580
+ """Remap and ftpprestageinfo sequence."""
581
+ remote = self.remap_read(remote, options)
582
+ return self.inarchiveprestageinfo(remote, options)
583
+
584
+ def vortexget(self, remote, local, options):
585
+ """Vortex' archive get sequence."""
586
+ if self.stackedstore:
587
+ rc, target, remainder = self._vortex_stacked_egg_retrieve(remote)
588
+ rc = rc and self.system.cp(self.system.path.join(target, remainder), local,
589
+ fmt=options.get('fmt'),
590
+ intent=options.get('intent',
591
+ ARCHIVE_GET_INTENT_DEFAULT))
592
+ return rc
593
+ else:
594
+ return self._vortexget(remote, local, options)
595
+
596
+ def _vortexget(self, remote, local, options):
597
+ """Remap and ftpget sequence."""
598
+ remote = self.remap_read(remote, options)
599
+ return self.inarchiveget(remote, local, options)
600
+
601
+ def vortexearlyget(self, remote, local, options):
602
+ """Vortex' archive earlyget sequence."""
603
+ if self.stackedstore:
604
+ s_remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
605
+ targetopts = dict(fmt=s_remotefmt, intent=dataflow.intent.IN)
606
+ return self._vortexearlyget(s_remote, 'somelocalfile', targetopts)
607
+ else:
608
+ return self._vortexearlyget(remote, local, options)
609
+
610
+ def _vortexearlyget(self, remote, local, options):
611
+ """Remap and ftpget sequence."""
612
+ remote = self.remap_read(remote, options)
613
+ return self.inarchiveearlyget(remote, local, options)
614
+
615
+ def vortexfinaliseget(self, result_id, remote, local, options):
616
+ """Vortex' archive finaliseget sequence."""
617
+ if self.stackedstore:
618
+ rc, target, remainder = self._vortex_stacked_egg_retrieve(remote, result_id=result_id)
619
+ rc = rc and self.system.cp(self.system.path.join(target, remainder), local,
620
+ fmt=options.get('fmt'),
621
+ intent=options.get('intent',
622
+ ARCHIVE_GET_INTENT_DEFAULT))
623
+ return rc
624
+ else:
625
+ return self._vortexfinaliseget(result_id, remote, local, options)
626
+
627
+ def _vortexfinaliseget(self, result_id, remote, local, options):
628
+ """Remap and ftpget sequence."""
629
+ remote = self.remap_read(remote, options)
630
+ return self.inarchivefinaliseget(result_id, remote, local, options)
631
+
632
+ def vortexput(self, local, remote, options):
633
+ """Remap root dir and ftpput sequence."""
634
+ if self.stackedstore:
635
+ raise RuntimeError("stacked archive stores are never writable.")
636
+ if not self.storetrue:
637
+ logger.info("put deactivated for %s", str(local))
638
+ return True
639
+ remote = self.remap_write(remote, options)
640
+ return self.inarchiveput(local, remote, options)
641
+
642
+ def vortexdelete(self, remote, options):
643
+ """Remap root dir and ftpdelete sequence."""
644
+ if self.stackedstore:
645
+ raise RuntimeError("stacked archive stores are never writable.")
646
+ remote = self.remap_write(remote, options)
647
+ return self.inarchivedelete(remote, options)
648
+
649
+
650
+ class VortexStdBaseArchiveStore(_VortexBaseArchiveStore):
651
+ """Archive for casual VORTEX experiments: Support for legacy/Olive XPIDs.
652
+
653
+ This 'archive-legacy' store looks into the resource 'main' location not
654
+ into a potential stack.
655
+ """
656
+
657
+ _footprint = dict(
658
+ info = 'VORTEX archive access for casual experiments',
659
+ attr = dict(
660
+ netloc = dict(
661
+ values = ['vortex.archive-legacy.fr'],
662
+ ),
663
+ )
664
+ )
665
+
666
+ @property
667
+ def _actual_mappingroot(self):
668
+ """Read the get entry point form configuration."""
669
+ return config.from_config(
670
+ section="storage", key="rootdir",
671
+ )
672
+
673
+ def remap_read(self, remote, options):
674
+ """Reformulates the remote path to compatible vortex namespace."""
675
+ remote = copy.copy(remote)
676
+ xpath = remote['path'].split('/')
677
+ actual_mappingroot = self._actual_mappingroot
678
+ if not self.storeroot and actual_mappingroot:
679
+ remote['root'] = actual_mappingroot
680
+ xpath[3:4] = list(xpath[3])
681
+ remote['path'] = self.system.path.join(*xpath)
682
+ return remote
683
+
684
+
685
+ class VortexStdStackedArchiveStore(VortexStdBaseArchiveStore):
686
+ """Archive for casual VORTEX experiments: Support for legacy/Olive XPIDs.
687
+
688
+ This 'stacked-archive-legacy' or 'stacked-archive-smart' store looks into
689
+ the stack associated to the resource. The '-smart' variant, has the ability
690
+ to refill the whole stack into local cache (to be faster in the future).
691
+ """
692
+
693
+ _footprint = [
694
+ _vortex_readonly_store,
695
+ dict(
696
+ attr = dict(
697
+ netloc = dict(
698
+ values = ['vortex.stacked-archive-legacy.fr',
699
+ 'vortex.stacked-archive-smart.fr'],
700
+ ),
701
+ )
702
+ )
703
+ ]
704
+
705
+
706
+ class VortexFreeStdBaseArchiveStore(_VortexBaseArchiveStore, ConfigurableArchiveStore):
707
+ """Archive for casual VORTEX experiments: Support for Free XPIDs.
708
+
709
+ This 'archive-legacy' store looks into the resource 'main' location not
710
+ into a potential stack.
711
+ """
712
+
713
+ #: Path to the vortex-free Store configuration file
714
+ _store_global_config = '@store-vortex-free.ini'
715
+ _datastore_id = 'store-vortex-free-conf'
716
+
717
+ _footprint = dict(
718
+ info = 'VORTEX archive access for casual experiments',
719
+ attr = dict(
720
+ netloc = dict(
721
+ values = ['vortex-free.archive-legacy.fr'],
722
+ ),
723
+ )
724
+ )
725
+
726
+ def remap_read(self, remote, options):
727
+ """Reformulates the remote path to compatible vortex namespace."""
728
+ remote = copy.copy(remote)
729
+ xpath = remote['path'].strip('/').split('/')
730
+ f_xpid = FreeXPid(xpath[2])
731
+ xpath[2] = f_xpid.id
732
+ if 'root' not in remote:
733
+ remote['root'] = self._actual_storeroot(f_xpid)
734
+ remote['path'] = self.system.path.join(*xpath)
735
+ return remote
736
+
737
+ remap_write = remap_read
738
+
739
+
740
+ class VortexFreeStdStackedArchiveStore(VortexFreeStdBaseArchiveStore):
741
+ """Archive for casual VORTEX experiments: Support for Free XPIDs.
742
+
743
+ This 'stacked-archive-legacy' or 'stacked-archive-smart' store looks into
744
+ the stack associated to the resource. The '-smart' variant, has the ability
745
+ to refill the whole stack into local cache (to be faster in the future).
746
+ """
747
+
748
+ _footprint = [
749
+ _vortex_readonly_store,
750
+ dict(
751
+ attr = dict(
752
+ netloc = dict(
753
+ values = ['vortex-free.stacked-archive-legacy.fr',
754
+ 'vortex-free.stacked-archive-smart.fr'],
755
+ ),
756
+ )
757
+ )]
758
+
759
+
760
+ class VortexOpBaseArchiveStore(_VortexBaseArchiveStore):
761
+ """Archive for op VORTEX experiments.
762
+
763
+ This 'archive-legacy' store looks into the resource 'main' location not
764
+ into a potential stack.
765
+ """
766
+
767
+ _footprint = dict(
768
+ info = 'VORTEX archive access for op experiments',
769
+ attr = dict(
770
+ netloc = dict(
771
+ values = ['vsop.archive-legacy.fr'],
772
+ ),
773
+ storetrue = dict(
774
+ default = DelayedEnvValue('op_archive', True),
775
+ ),
776
+ )
777
+ )
778
+
779
+ @property
780
+ def _actual_storeroot(self):
781
+ return (
782
+ self.storeroot or
783
+ config.from_config(
784
+ section="storage", key="op_rootdir",
785
+ )
786
+ )
787
+
788
+ def remap_read(self, remote, options):
789
+ """Reformulates the remote path to compatible vortex namespace."""
790
+ remote = copy.copy(remote)
791
+ xpath = remote['path'].split('/')
792
+ remote['root'] = self._actual_storeroot
793
+ if len(xpath) >= 5 and re.match(r'^\d{8}T\d{2,4}', xpath[4]):
794
+ # If a date is detected
795
+ vxdate = list(xpath[4])
796
+ vxdate.insert(4, '/')
797
+ vxdate.insert(7, '/')
798
+ vxdate.insert(10, '/')
799
+ xpath[4] = ''.join(vxdate)
800
+ remote['path'] = self.system.path.join(*xpath)
801
+ return remote
802
+
803
+ remap_write = remap_read
804
+
805
+
806
+ class VortexOpStackedArchiveStore(VortexOpBaseArchiveStore):
807
+ """Archive for op VORTEX experiments.
808
+
809
+ This 'stacked-archive-legacy' or 'stacked-archive-smart' store looks into
810
+ the stack associated to the resource. The '-smart' variant, has the ability
811
+ to refill the whole stack into local cache (to be faster in the future).
812
+ """
813
+
814
+ _footprint = [
815
+ _vortex_readonly_store,
816
+ dict(
817
+ attr = dict(
818
+ netloc = dict(
819
+ values = ['vsop.stacked-archive-legacy.fr',
820
+ 'vsop.stacked-archive-smart.fr'],
821
+ ),
822
+ )
823
+ )]
824
+
825
+
826
+ class VortexArchiveStore(MultiStore):
827
+ """Archive store for any Vortex experiments.
828
+
829
+ Depending on the netloc, legacy/Olive XPIDs ('vortex'), free XPIDs
830
+ ('vortex-free') or operational experiments ('vsop') will be dealt with.
831
+
832
+ First, this multi store will look onto the resource 'main' location. In a
833
+ second phase, if sensible, il will also dig into the stack associated with
834
+ the resource.
835
+ """
836
+
837
+ _footprint = dict(
838
+ info = 'VORTEX archive access',
839
+ attr = dict(
840
+ scheme = dict(
841
+ values = ['vortex'],
842
+ ),
843
+ netloc = dict(
844
+ values = ['vortex.archive.fr', 'vortex-free.archive.fr', 'vsop.archive.fr'],
845
+ ),
846
+ refillstore = dict(
847
+ default = False,
848
+ ),
849
+ storehead = dict(
850
+ optional = True,
851
+ ),
852
+ storesync = dict(
853
+ alias = ('archsync', 'synchro'),
854
+ type = bool,
855
+ optional = True,
856
+ ),
857
+ )
858
+ )
859
+
860
+ def filtered_readable_openedstores(self, remote):
861
+ """Only use the stacked store if sensible."""
862
+ ostores = [self.openedstores[0], ]
863
+ ostores.extend([sto for sto in self.openedstores[1:]
864
+ if not sto.stackedstore or 'stackpath' in remote['query']
865
+ ])
866
+ return ostores
867
+
868
+ def alternates_netloc(self):
869
+ """Return netlocs describing both base and stacked archives."""
870
+ netloc_m = re.match(r'(?P<base>v.*)\.archive\.(?P<country>\w+)', self.netloc)
871
+ return [
872
+ '{base:s}.archive-legacy.{country:s}'.format(** netloc_m.groupdict()),
873
+ '{base:s}.stacked-archive-legacy.{country:s}'.format(** netloc_m.groupdict()),
874
+ ]
875
+
876
+ def alternates_fpextras(self):
877
+ """Deal with some ArchiveStores' specific attributes."""
878
+ return dict(storehead=self.storehead, storesync=self.storesync)
879
+
880
+
881
+ class _VortexCacheBaseStore(CacheStore, _VortexStackedStorageMixin):
882
+ """Some kind of cache for VORTEX experiments: one still needs to choose the cache strategy."""
883
+
884
+ _abstract = True
885
+ _footprint = dict(
886
+ info = 'VORTEX cache access',
887
+ attr = dict(
888
+ scheme = dict(
889
+ values = ['vortex'],
890
+ ),
891
+ headdir = dict(
892
+ default = "",
893
+ outcast = ['xp', ],
894
+ ),
895
+ rtouch = dict(
896
+ default = True,
897
+ ),
898
+ rtouchskip = dict(
899
+ default = 3,
900
+ ),
901
+ )
902
+ )
903
+
904
+ def __init__(self, *args, **kw):
905
+ logger.debug('Vortex cache store init %s', self.__class__)
906
+ del self.cache
907
+ super().__init__(*args, **kw)
908
+
909
+ def vortexcheck(self, remote, options):
910
+ """Proxy to :meth:`incachecheck`."""
911
+ return self.incachecheck(self._stacked_xremote(remote), options)
912
+
913
+ def vortexlocate(self, remote, options):
914
+ """Proxy to :meth:`incachelocate`."""
915
+ return self.incachelocate(self._stacked_xremote(remote), options)
916
+
917
+ def vortexlist(self, remote, options):
918
+ """Proxy to :meth:`incachelocate`."""
919
+ return self.incachelist(remote, options)
920
+
921
+ def vortexprestageinfo(self, remote, options):
922
+ """Proxy to :meth:`incacheprestageinfo`."""
923
+ return self.incacheprestageinfo(self._stacked_xremote(remote), options)
924
+
925
+ def vortexget(self, remote, local, options):
926
+ """Proxy to :meth:`incacheget`."""
927
+ return self.incacheget(self._stacked_xremote(remote), local, options)
928
+
929
+ def vortexput(self, local, remote, options):
930
+ """Proxy to :meth:`incacheput`."""
931
+ return self.incacheput(local, self._stacked_xremote(remote), options)
932
+
933
+ def vortexdelete(self, remote, options):
934
+ """Proxy to :meth:`incachedelete`."""
935
+ return self.incachedelete(self._stacked_xremote(remote), options)
936
+
937
+
938
+ class VortexCacheMtStore(_VortexCacheBaseStore):
939
+ """Some kind of MTOOL cache for VORTEX experiments."""
940
+
941
+ _footprint = dict(
942
+ info = 'VORTEX MTOOL like Cache access',
943
+ attr = dict(
944
+ netloc = dict(
945
+ values = ['{:s}.{:s}cache-mt.fr'.format(v, s)
946
+ for v in ('vortex', 'vortex-free', 'vsop') for s in ('', 'stacked-')]
947
+ ),
948
+ strategy = dict(
949
+ default = 'mtool',
950
+ ),
951
+ )
952
+ )
953
+
954
+
955
+ # TODO Not sure this class is needed anymore
956
+ class VortexCacheOp2ResearchStore(_VortexCacheBaseStore):
957
+ """The DSI/OP VORTEX cache where researchers can get the freshest data."""
958
+
959
+ _footprint = dict(
960
+ info = 'VORTEX Mtool cache access',
961
+ attr = dict(
962
+ netloc = dict(
963
+ values = [
964
+ 'vsop.{:s}cache-op2r.fr'.format(s)
965
+ for s in ('', 'stacked-')
966
+ ],
967
+ ),
968
+ strategy = dict(
969
+ default = 'op2r',
970
+ ),
971
+ readonly = dict(
972
+ default = True,
973
+ )
974
+ )
975
+ )
976
+
977
+ @property
978
+ def underlying_cache_kind(self):
979
+ """The kind of cache that will be used."""
980
+ return self.strategy
981
+
982
+
983
+ class _AbstractVortexCacheMultiStore(MultiStore):
984
+ """Any Cache based Vortex multi store."""
985
+
986
+ _abstract = True
987
+ _footprint = dict(
988
+ info = 'VORTEX cache access',
989
+ attr = dict(
990
+ scheme = dict(
991
+ values = ['vortex'],
992
+ ),
993
+ refillstore = dict(
994
+ default = False,
995
+ )
996
+ )
997
+ )
998
+
999
+ def filtered_readable_openedstores(self, remote):
1000
+ """Deals with stacked stores that are not always active."""
1001
+ ostores = [self.openedstores[0], ]
1002
+ # TODO is the call to cache.allow_reads still required without
1003
+ # marketplace stores?
1004
+ ostores.extend([sto for sto in self.openedstores[1:]
1005
+ if ((not sto.stackedstore or 'stackpath' in remote['query']) and
1006
+ sto.cache.allow_reads(remote['path']))
1007
+ ])
1008
+ return ostores
1009
+
1010
+ def filtered_writeable_openedstores(self, remote):
1011
+ """never writes into stack stores."""
1012
+ ostores = [self.openedstores[0], ]
1013
+ ostores.extend([sto for sto in self.openedstores[1:]
1014
+ if not sto.stackedstore and sto.cache.allow_writes(remote['path'])])
1015
+ return ostores
1016
+
1017
+
1018
+ class VortexCacheStore(_AbstractVortexCacheMultiStore):
1019
+ """The go to store for data cached by VORTEX R&D experiments."""
1020
+
1021
+ _footprint = dict(
1022
+ attr = dict(
1023
+ netloc = dict(
1024
+ values = ['vortex.cache.fr', 'vortex-free.cache.fr', ],
1025
+ ),
1026
+ )
1027
+ )
1028
+
1029
+ def alternates_netloc(self):
1030
+ """For Non-Op users, Op caches may be accessed in read-only mode."""
1031
+ netloc_m = re.match(r'(?P<base>vortex.*)\.cache\.(?P<country>\w+)', self.netloc)
1032
+ mt_netloc = '{base:s}.cache-mt.{country:s}'.format(** netloc_m.groupdict())
1033
+ s_mt_netloc = '{base:s}.stacked-cache-mt.{country:s}'.format(** netloc_m.groupdict())
1034
+ return [mt_netloc, s_mt_netloc]
1035
+
1036
+
1037
+ class VortexVsopCacheStore(_AbstractVortexCacheMultiStore):
1038
+ """The go to store for data cached by VORTEX operational experiments.
1039
+
1040
+ It behaves differently depending on the profile of the user running the
1041
+ code 'see the **glovekind** attribute).
1042
+ """
1043
+
1044
+ _footprint = dict(
1045
+ info = 'VORTEX vsop magic cache access',
1046
+ attr = dict(
1047
+ netloc = dict(
1048
+ values = ['vsop.cache.fr', ],
1049
+ ),
1050
+ glovekind = dict(
1051
+ optional = True,
1052
+ default = '[glove::realkind]',
1053
+ ),
1054
+ )
1055
+ )
1056
+
1057
+ def alternates_netloc(self):
1058
+ """For Non-Op users, Op caches may be accessed in read-only mode."""
1059
+ todo = [
1060
+ 'vsop.cache-mt.fr',
1061
+ 'vsop.stacked-cache-mt.fr',
1062
+ ]
1063
+
1064
+ # Only set up op2r cache if the associated filepath
1065
+ # is configured
1066
+ if (
1067
+ (self.glovekind != 'opuser') and
1068
+ config.is_defined(
1069
+ section="data-tree", key="op_rootdir",
1070
+ )
1071
+ ):
1072
+ todo += [
1073
+ 'vsop.cache-op2r.fr', 'vsop.stacked-cache-op2r.fr',
1074
+ ]
1075
+ return todo
1076
+
1077
+
1078
+ class _AbstractVortexStackMultiStore(MultiStore):
1079
+ """Any Cache based Vortex multi store."""
1080
+
1081
+ _abstract = True
1082
+ _footprint = dict(
1083
+ info = 'VORTEX stack access',
1084
+ attr = dict(
1085
+ scheme = dict(
1086
+ values = ['vortex'],
1087
+ ),
1088
+ refillstore = dict(
1089
+ default = False,
1090
+ )
1091
+ )
1092
+ )
1093
+
1094
+ # TODO is this still needed without marketplace stores?
1095
+ def filtered_readable_openedstores(self, remote):
1096
+ """Deals with marketplace stores that are not always active."""
1097
+ ostores = [self.openedstores[0], ]
1098
+ ostores.extend([sto for sto in self.openedstores[1:]
1099
+ if sto.cache.allow_reads(remote['path'])])
1100
+ return ostores
1101
+
1102
+ def filtered_writeable_openedstores(self, remote):
1103
+ """Deals with marketplace stores that are not always active."""
1104
+ ostores = [self.openedstores[0], ]
1105
+ ostores.extend([sto for sto in self.openedstores[1:]
1106
+ if sto.cache.allow_writes(remote['path'])])
1107
+ return ostores
1108
+
1109
+
1110
+ class VortexStackStore(_AbstractVortexStackMultiStore):
1111
+ """Store intended to read and write data into VORTEX R&D stacks."""
1112
+
1113
+ _footprint = dict(
1114
+ info = 'VORTEX stack access',
1115
+ attr = dict(
1116
+ netloc = dict(
1117
+ values = ['vortex.stack.fr', 'vortex-free.stack.fr'],
1118
+ ),
1119
+ )
1120
+ )
1121
+
1122
+ def alternates_netloc(self):
1123
+ """Go through the various stacked stores."""
1124
+ netloc_m = re.match(r'(?P<base>vortex.*)\.stack\.(?P<country>\w+)', self.netloc)
1125
+ s_mt_netloc = '{base:s}.stacked-cache-mt.{country:s}'.format(** netloc_m.groupdict())
1126
+ return [s_mt_netloc]
1127
+
1128
+
1129
+ class VortexVsopStackStore(_AbstractVortexStackMultiStore):
1130
+ """Store intended to read and write data into VORTEX R&D stacks."""
1131
+
1132
+ _footprint = dict(
1133
+ info = 'VORTEX stack access',
1134
+ attr = dict(
1135
+ netloc = dict(
1136
+ values = ['vsop.stack.fr'],
1137
+ ),
1138
+ glovekind = dict(
1139
+ optional = True,
1140
+ default = '[glove::realkind]',
1141
+ ),
1142
+ )
1143
+ )
1144
+
1145
+ def alternates_netloc(self):
1146
+ """For Non-Op users, Op caches may be accessed in read-only mode."""
1147
+ todo = ['vsop.stacked-cache-mt.fr', ]
1148
+ if self.glovekind != 'opuser':
1149
+ todo.append("vsop.stacked-cache-op2r.fr")
1150
+ return todo
1151
+
1152
+
1153
+ class VortexStoreLegacy(MultiStore):
1154
+ """Combined cache and archive legacy VORTEX stores.
1155
+
1156
+ By '-legacy' we mean that stack resources are ignored.
1157
+ """
1158
+
1159
+ _footprint = dict(
1160
+ info='VORTEX multi access',
1161
+ attr=dict(
1162
+ scheme=dict(
1163
+ values=['vortex'],
1164
+ ),
1165
+ netloc=dict(
1166
+ values=['vortex.multi-legacy.fr', 'vortex-free.multi-legacy.fr', 'vsop.multi-legacy.fr'],
1167
+ ),
1168
+ refillstore=dict(
1169
+ default=True,
1170
+ )
1171
+ )
1172
+ )
1173
+
1174
+ def alternates_netloc(self):
1175
+ """Tuple of alternates domains names, e.g. ``cache`` and ``archive``."""
1176
+ return [self.netloc.firstname + d for d in ('.cache.fr',
1177
+ '.archive-legacy.fr')]
1178
+
1179
+
1180
+ class VortexStore(MultiStore):
1181
+ """Combined cache and archive VORTEX stores.
1182
+
1183
+ If sensible, stack will be explored and might be refilled into cache.
1184
+ """
1185
+
1186
+ _footprint = dict(
1187
+ info = 'VORTEX multi access',
1188
+ attr = dict(
1189
+ scheme = dict(
1190
+ values = ['vortex'],
1191
+ ),
1192
+ netloc = dict(
1193
+ values = ['vortex.multi.fr', 'vortex-free.multi.fr', 'vsop.multi.fr'],
1194
+ ),
1195
+ refillstore = dict(
1196
+ default = False
1197
+ )
1198
+ )
1199
+ )
1200
+
1201
+ def filtered_readable_openedstores(self, remote):
1202
+ """Deals with stacked stores that are not always active."""
1203
+ ostores = [self.openedstores[0], ]
1204
+ ostores.extend([sto for sto in self.openedstores[1:]
1205
+ if not sto.stackedstore or 'stackpath' in remote['query']
1206
+ ])
1207
+ return ostores
1208
+
1209
+ def alternates_netloc(self):
1210
+ """Tuple of alternates domains names, e.g. ``cache`` and ``archive``."""
1211
+ return [self.netloc.firstname + d for d in ('.multi-legacy.fr',
1212
+ '.stacked-archive-smart.fr',)]
1213
+
1214
+
1215
+ class PromiseCacheStore(VortexCacheMtStore):
1216
+ """Some kind of vortex cache for EXPECTED resources."""
1217
+
1218
+ _footprint = dict(
1219
+ info = 'EXPECTED cache access',
1220
+ attr = dict(
1221
+ netloc = dict(
1222
+ values = ['promise.cache.fr'],
1223
+ ),
1224
+ headdir = dict(
1225
+ default = 'promise',
1226
+ outcast = ['xp', 'vortex'],
1227
+ ),
1228
+ )
1229
+ )
1230
+
1231
+ @staticmethod
1232
+ def _add_default_options(options):
1233
+ options_upd = options.copy()
1234
+ options_upd['fmt'] = 'ascii' # Promises are always JSON files
1235
+ options_upd['intent'] = 'in' # Promises are always read-only
1236
+ return options_upd
1237
+
1238
+ def vortexget(self, remote, local, options):
1239
+ """Proxy to :meth:`incacheget`."""
1240
+ return super().vortexget(remote, local, self._add_default_options(options))
1241
+
1242
+ def vortexput(self, local, remote, options):
1243
+ """Proxy to :meth:`incacheput`."""
1244
+ return super().vortexput(local, remote, self._add_default_options(options))
1245
+
1246
+ def vortexdelete(self, remote, options):
1247
+ """Proxy to :meth:`incachedelete`."""
1248
+ return super().vortexdelete(remote, self._add_default_options(options))
1249
+
1250
+
1251
+ class VortexPromiseStore(PromiseStore):
1252
+ """Combine a Promise Store for expected resources and any VORTEX Store."""
1253
+
1254
+ _footprint = dict(
1255
+ info = 'VORTEX promise store',
1256
+ attr = dict(
1257
+ scheme = dict(
1258
+ values = ['xvortex'],
1259
+ ),
1260
+ netloc=dict(
1261
+ outcast = ['vortex-demo.cache.fr', 'vortex-demo.multi.fr',
1262
+ 'vortex.testcache.fr', 'vortex.testmulti.fr'],
1263
+ ),
1264
+ )
1265
+ )
1266
+
1267
+
1268
+ # Activate the footprint's fasttrack on the stores collector
1269
+ fcollect = footprints.collectors.get(tag='store')
1270
+ fcollect.fasttrack = ('netloc', 'scheme')
1271
+ del fcollect