vortex-nwp 2.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. vortex/__init__.py +135 -0
  2. vortex/algo/__init__.py +12 -0
  3. vortex/algo/components.py +2136 -0
  4. vortex/algo/mpitools.py +1648 -0
  5. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  7. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  8. vortex/algo/serversynctools.py +170 -0
  9. vortex/config.py +115 -0
  10. vortex/data/__init__.py +13 -0
  11. vortex/data/abstractstores.py +1572 -0
  12. vortex/data/containers.py +780 -0
  13. vortex/data/contents.py +596 -0
  14. vortex/data/executables.py +284 -0
  15. vortex/data/flow.py +113 -0
  16. vortex/data/geometries.ini +2689 -0
  17. vortex/data/geometries.py +703 -0
  18. vortex/data/handlers.py +1021 -0
  19. vortex/data/outflow.py +67 -0
  20. vortex/data/providers.py +465 -0
  21. vortex/data/resources.py +201 -0
  22. vortex/data/stores.py +1271 -0
  23. vortex/gloves.py +282 -0
  24. vortex/layout/__init__.py +27 -0
  25. vortex/layout/appconf.py +109 -0
  26. vortex/layout/contexts.py +511 -0
  27. vortex/layout/dataflow.py +1069 -0
  28. vortex/layout/jobs.py +1276 -0
  29. vortex/layout/monitor.py +833 -0
  30. vortex/layout/nodes.py +1424 -0
  31. vortex/layout/subjobs.py +464 -0
  32. vortex/nwp/__init__.py +11 -0
  33. vortex/nwp/algo/__init__.py +12 -0
  34. vortex/nwp/algo/assim.py +483 -0
  35. vortex/nwp/algo/clim.py +920 -0
  36. vortex/nwp/algo/coupling.py +609 -0
  37. vortex/nwp/algo/eda.py +632 -0
  38. vortex/nwp/algo/eps.py +613 -0
  39. vortex/nwp/algo/forecasts.py +745 -0
  40. vortex/nwp/algo/fpserver.py +927 -0
  41. vortex/nwp/algo/ifsnaming.py +403 -0
  42. vortex/nwp/algo/ifsroot.py +311 -0
  43. vortex/nwp/algo/monitoring.py +202 -0
  44. vortex/nwp/algo/mpitools.py +554 -0
  45. vortex/nwp/algo/odbtools.py +974 -0
  46. vortex/nwp/algo/oopsroot.py +735 -0
  47. vortex/nwp/algo/oopstests.py +186 -0
  48. vortex/nwp/algo/request.py +579 -0
  49. vortex/nwp/algo/stdpost.py +1285 -0
  50. vortex/nwp/data/__init__.py +12 -0
  51. vortex/nwp/data/assim.py +392 -0
  52. vortex/nwp/data/boundaries.py +261 -0
  53. vortex/nwp/data/climfiles.py +539 -0
  54. vortex/nwp/data/configfiles.py +149 -0
  55. vortex/nwp/data/consts.py +929 -0
  56. vortex/nwp/data/ctpini.py +133 -0
  57. vortex/nwp/data/diagnostics.py +181 -0
  58. vortex/nwp/data/eda.py +148 -0
  59. vortex/nwp/data/eps.py +383 -0
  60. vortex/nwp/data/executables.py +1039 -0
  61. vortex/nwp/data/fields.py +96 -0
  62. vortex/nwp/data/gridfiles.py +308 -0
  63. vortex/nwp/data/logs.py +551 -0
  64. vortex/nwp/data/modelstates.py +334 -0
  65. vortex/nwp/data/monitoring.py +220 -0
  66. vortex/nwp/data/namelists.py +644 -0
  67. vortex/nwp/data/obs.py +748 -0
  68. vortex/nwp/data/oopsexec.py +72 -0
  69. vortex/nwp/data/providers.py +182 -0
  70. vortex/nwp/data/query.py +217 -0
  71. vortex/nwp/data/stores.py +147 -0
  72. vortex/nwp/data/surfex.py +338 -0
  73. vortex/nwp/syntax/__init__.py +9 -0
  74. vortex/nwp/syntax/stdattrs.py +375 -0
  75. vortex/nwp/tools/__init__.py +10 -0
  76. vortex/nwp/tools/addons.py +35 -0
  77. vortex/nwp/tools/agt.py +55 -0
  78. vortex/nwp/tools/bdap.py +48 -0
  79. vortex/nwp/tools/bdcp.py +38 -0
  80. vortex/nwp/tools/bdm.py +21 -0
  81. vortex/nwp/tools/bdmp.py +49 -0
  82. vortex/nwp/tools/conftools.py +1311 -0
  83. vortex/nwp/tools/drhook.py +62 -0
  84. vortex/nwp/tools/grib.py +268 -0
  85. vortex/nwp/tools/gribdiff.py +99 -0
  86. vortex/nwp/tools/ifstools.py +163 -0
  87. vortex/nwp/tools/igastuff.py +249 -0
  88. vortex/nwp/tools/mars.py +56 -0
  89. vortex/nwp/tools/odb.py +548 -0
  90. vortex/nwp/tools/partitioning.py +234 -0
  91. vortex/nwp/tools/satrad.py +56 -0
  92. vortex/nwp/util/__init__.py +6 -0
  93. vortex/nwp/util/async.py +184 -0
  94. vortex/nwp/util/beacon.py +40 -0
  95. vortex/nwp/util/diffpygram.py +359 -0
  96. vortex/nwp/util/ens.py +198 -0
  97. vortex/nwp/util/hooks.py +128 -0
  98. vortex/nwp/util/taskdeco.py +81 -0
  99. vortex/nwp/util/usepygram.py +591 -0
  100. vortex/nwp/util/usetnt.py +87 -0
  101. vortex/proxy.py +6 -0
  102. vortex/sessions.py +341 -0
  103. vortex/syntax/__init__.py +9 -0
  104. vortex/syntax/stdattrs.py +628 -0
  105. vortex/syntax/stddeco.py +176 -0
  106. vortex/toolbox.py +982 -0
  107. vortex/tools/__init__.py +11 -0
  108. vortex/tools/actions.py +457 -0
  109. vortex/tools/addons.py +297 -0
  110. vortex/tools/arm.py +76 -0
  111. vortex/tools/compression.py +322 -0
  112. vortex/tools/date.py +20 -0
  113. vortex/tools/ddhpack.py +10 -0
  114. vortex/tools/delayedactions.py +672 -0
  115. vortex/tools/env.py +513 -0
  116. vortex/tools/folder.py +663 -0
  117. vortex/tools/grib.py +559 -0
  118. vortex/tools/lfi.py +746 -0
  119. vortex/tools/listings.py +354 -0
  120. vortex/tools/names.py +575 -0
  121. vortex/tools/net.py +1790 -0
  122. vortex/tools/odb.py +10 -0
  123. vortex/tools/parallelism.py +336 -0
  124. vortex/tools/prestaging.py +186 -0
  125. vortex/tools/rawfiles.py +10 -0
  126. vortex/tools/schedulers.py +413 -0
  127. vortex/tools/services.py +871 -0
  128. vortex/tools/storage.py +1061 -0
  129. vortex/tools/surfex.py +61 -0
  130. vortex/tools/systems.py +3396 -0
  131. vortex/tools/targets.py +384 -0
  132. vortex/util/__init__.py +9 -0
  133. vortex/util/config.py +1071 -0
  134. vortex/util/empty.py +24 -0
  135. vortex/util/helpers.py +184 -0
  136. vortex/util/introspection.py +63 -0
  137. vortex/util/iosponge.py +76 -0
  138. vortex/util/roles.py +51 -0
  139. vortex/util/storefunctions.py +103 -0
  140. vortex/util/structs.py +26 -0
  141. vortex/util/worker.py +150 -0
  142. vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
  143. vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
  144. vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
  145. vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
  146. vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1572 @@
1
+ # pylint: disable=unused-argument
2
+
3
+ """
4
+ This module handles store objects in charge of physically accessing resources.
5
+ Store objects use the :mod:`footprints` mechanism.
6
+ """
7
+
8
+ from collections import defaultdict
9
+ import contextlib
10
+ import copy
11
+ import functools
12
+ import re
13
+
14
+ from bronx.fancies import loggers
15
+ from bronx.patterns import observer
16
+ from bronx.stdtypes import date
17
+ from bronx.system import hash as hashutils
18
+ import footprints
19
+
20
+ from vortex import sessions
21
+ from vortex.config import from_config
22
+ from vortex.util import config
23
+ from vortex.syntax.stdattrs import hashalgo, hashalgo_avail_list, compressionpipeline
24
+ from vortex.tools import storage
25
+ from vortex.tools import compression
26
+ from vortex.tools import net
27
+ from vortex.tools.env import vartrue
28
+ from vortex.tools.systems import ExecutionError
29
+ from vortex.syntax.stdattrs import Namespace
30
+
31
+ #: Export base class
32
+ __all__ = ['Store']
33
+
34
+ logger = loggers.getLogger(__name__)
35
+
36
+ OBSERVER_TAG = 'Stores-Activity'
37
+
38
+ CACHE_PUT_INTENT = 'in'
39
+ CACHE_GET_INTENT_DEFAULT = 'in'
40
+
41
+ ARCHIVE_PUT_INTENT = 'in'
42
+ ARCHIVE_GET_INTENT_DEFAULT = 'in'
43
+
44
+
45
+ def observer_board(obsname=None):
46
+ """Proxy to :func:`footprints.observers.get`."""
47
+ if obsname is None:
48
+ obsname = OBSERVER_TAG
49
+ return observer.get(tag=obsname)
50
+
51
+
52
+ class Store(footprints.FootprintBase):
53
+ """Root class for any :class:`Store` subclasses."""
54
+
55
+ _abstract = True
56
+ _collector = ('store',)
57
+ _footprint = [
58
+ hashalgo,
59
+ dict(
60
+ info = 'Default store',
61
+ attr = dict(
62
+ scheme = dict(
63
+ alias = ('protocol',)
64
+ ),
65
+ netloc = dict(
66
+ type = Namespace,
67
+ alias = ('domain', 'namespace')
68
+ ),
69
+ storetrack = dict(
70
+ type = bool,
71
+ default = True,
72
+ optional = True,
73
+ ),
74
+ readonly = dict(
75
+ type = bool,
76
+ optional = True,
77
+ default = False,
78
+ ),
79
+ ),
80
+ )
81
+ ]
82
+
83
+ def __init__(self, *args, **kw):
84
+ logger.debug('Abstract store init %s', self.__class__)
85
+ sh = kw.pop('system', sessions.system())
86
+ super().__init__(*args, **kw)
87
+ self._sh = sh
88
+ self._observer = observer_board()
89
+ self._observer.notify_new(self, dict())
90
+ self._cpipeline = False
91
+ self.delayed = False
92
+
93
+ @property
94
+ def realkind(self):
95
+ return 'store'
96
+
97
+ @property
98
+ def system(self):
99
+ """Shortcut to current system interface."""
100
+ return self._sh
101
+
102
+ def use_cache(self):
103
+ """Boolean function to check if the current store uses a local cache."""
104
+ return False
105
+
106
+ def use_archive(self):
107
+ """Boolean function to check if the current store uses a remote archive."""
108
+ return not self.use_cache()
109
+
110
+ def has_fast_check(self):
111
+ """How fast and reliable is a check call ?"""
112
+ return False
113
+
114
+ def _observer_notify(self, action, rc, remote, local=None, options=None):
115
+ strack = options is None or options.get('obs_notify', True)
116
+ if self.storetrack and strack:
117
+ infos = dict(action=action, status=rc, remote=remote)
118
+ # Is a localpath provided ?
119
+ if local is not None:
120
+ infos['local'] = local
121
+ # We may want to cheat on the localpath...
122
+ if options is not None and 'obs_overridelocal' in options:
123
+ infos['local'] = options['obs_overridelocal']
124
+ self._observer.notify_upd(self, infos)
125
+
126
+ def notyet(self, *args):
127
+ """
128
+ Internal method to be used as a critical backup method
129
+ when a specific method is not yet defined.
130
+ """
131
+ logger.critical('Scheme %s not yet implemented', self.scheme)
132
+
133
+ @property
134
+ def writeable(self):
135
+ return not self.readonly
136
+
137
+ def enforce_readonly(self):
138
+ if self.readonly:
139
+ raise OSError('This store is in readonly mode')
140
+
141
+ @staticmethod
142
+ def _verbose_log(options, level, *kargs, **kwargs):
143
+ slevel = kwargs.pop('slevel', 'debug')
144
+ if options is not None and options.get('silent', False):
145
+ level = slevel
146
+ getattr(logger, level)(*kargs, **kwargs)
147
+
148
+ @property
149
+ def _actual_cpipeline(self):
150
+ """Check if the current store has a CompressionPipeline."""
151
+ if self._cpipeline is False:
152
+ cpipeline_desc = getattr(self, 'store_compressed', None)
153
+ if cpipeline_desc is not None:
154
+ self._cpipeline = compression.CompressionPipeline(self.system,
155
+ cpipeline_desc)
156
+ else:
157
+ self._cpipeline = None
158
+ return self._cpipeline
159
+
160
+ @property
161
+ def tracking_extraargs(self):
162
+ """When tracking get/put request: extra args that will be added to the URI query."""
163
+ return dict()
164
+
165
+ def _incache_inarchive_check(self, options):
166
+ rc = True
167
+ incache = options.get('incache', False)
168
+ inarchive = options.get('inarchive', False)
169
+ if incache and inarchive:
170
+ raise ValueError("'incache=True' and 'inarchive=True' are mutually exclusive")
171
+ if incache and not self.use_cache():
172
+ self._verbose_log(options, 'info',
173
+ 'Skip this "%s" store because a cache is requested', self.__class__)
174
+ rc = False
175
+ if inarchive and not self.use_archive():
176
+ self._verbose_log(options, 'info',
177
+ 'Skip this "%s" store because an archive is requested', self.__class__)
178
+ rc = False
179
+ return rc
180
+
181
+ def _hash_check_or_delete(self, callback, remote, options):
182
+ """Check or delete a hash file."""
183
+ if (self.storehash is None) or (remote['path'].endswith('.' + self.storehash)):
184
+ return True
185
+ options = self._hash_store_defaults(options)
186
+ remote = remote.copy()
187
+ remote['path'] = remote['path'] + '.' + self.storehash
188
+ return callback(remote, options)
189
+
190
+ @staticmethod
191
+ def _options_fixup(options):
192
+ return dict() if options is None else options
193
+
194
+ def check(self, remote, options=None):
195
+ """Proxy method to dedicated check method according to scheme."""
196
+ logger.debug('Store check from %s', remote)
197
+ options = self._options_fixup(options)
198
+ if not self._incache_inarchive_check(options):
199
+ return False
200
+ rc = getattr(self, self.scheme + 'check', self.notyet)(remote, options)
201
+ self._observer_notify('check', rc, remote, options=options)
202
+ return rc
203
+
204
+ def locate(self, remote, options=None):
205
+ """Proxy method to dedicated locate method according to scheme."""
206
+ options = self._options_fixup(options)
207
+ logger.debug('Store locate %s', remote)
208
+ if not self._incache_inarchive_check(options):
209
+ return None
210
+ return getattr(self, self.scheme + 'locate', self.notyet)(remote, options)
211
+
212
+ def list(self, remote, options=None):
213
+ """Proxy method to dedicated list method according to scheme."""
214
+ options = self._options_fixup(options)
215
+ logger.debug('Store list %s', remote)
216
+ if not self._incache_inarchive_check(options):
217
+ return None
218
+ return getattr(self, self.scheme + 'list', self.notyet)(remote, options)
219
+
220
+ def prestage_advertise(self, remote, options=None):
221
+ """Use the Stores-Activity observer board to advertise the prestaging request.
222
+
223
+ Hopefully, something will register to the ober board in order to process
224
+ the request.
225
+ """
226
+ options = self._options_fixup(options)
227
+ logger.debug('Store prestage through hub %s', remote)
228
+ infos_cb = getattr(self, self.scheme + 'prestageinfo', None)
229
+ if infos_cb:
230
+ infodict = infos_cb(remote, options)
231
+ infodict.setdefault('issuerkind', self.realkind)
232
+ infodict.setdefault('scheme', self.scheme)
233
+ if options and 'priority' in options:
234
+ infodict['priority'] = options['priority']
235
+ infodict['action'] = 'prestage_req'
236
+ self._observer.notify_upd(self, infodict)
237
+ else:
238
+ logger.info('Prestaging is not supported for scheme: %s', self.scheme)
239
+ return True
240
+
241
+ def prestage(self, remote, options=None):
242
+ """Proxy method to dedicated prestage method according to scheme."""
243
+ options = self._options_fixup(options)
244
+ logger.debug('Store prestage %s', remote)
245
+ if not self._incache_inarchive_check(options):
246
+ return True
247
+ return getattr(self, self.scheme + 'prestage', self.prestage_advertise)(remote, options)
248
+
249
+ @staticmethod
250
+ def _hash_store_defaults(options):
251
+ """Update default options when fetching hash files."""
252
+ options = options.copy()
253
+ options['obs_notify'] = False
254
+ options['fmt'] = 'ascii'
255
+ options['intent'] = CACHE_GET_INTENT_DEFAULT
256
+ options['auto_tarextract'] = False
257
+ options['auto_dirextract'] = False
258
+ return options
259
+
260
+ def _hash_get_check(self, callback, remote, local, options):
261
+ """Update default options when fetching hash files."""
262
+ if (self.storehash is None) or (remote['path'].endswith('.' + self.storehash)):
263
+ return True
264
+ if isinstance(local, str) and not self.system.path.isfile(local):
265
+ logger.info("< %s > is not a plain file. The control sum can't be checked.", local)
266
+ return True
267
+ options = self._hash_store_defaults(options)
268
+ remote = remote.copy()
269
+ remote['path'] = remote['path'] + '.' + self.storehash # Name of the hash file
270
+ remote['query'].pop('extract', None) # Ignore any extract request
271
+ try:
272
+ tempcontainer = None
273
+ try:
274
+ # First, try to fetch the sum in a real file
275
+ # (in order to potentially use ftserv...)
276
+ tempcontainer = footprints.proxy.container(shouldfly=True, mode='rb')
277
+ try:
278
+ rc = callback(remote, tempcontainer.iotarget(), options)
279
+ except (OSError, ExecutionError):
280
+ # This may happen if the user has insufficient rights on
281
+ # the current directory
282
+ tempcontainer = footprints.proxy.container(incore=True, mode='w+b')
283
+ rc = callback(remote, tempcontainer.iotarget(), options)
284
+ except (OSError, ExecutionError):
285
+ logger.warning('Something went very wrong when fetching the hash file ! (assuming rc=False)')
286
+ rc = False
287
+ # check the hash key
288
+ hadapt = hashutils.HashAdapter(self.storehash)
289
+ rc = rc and hadapt.filecheck(local, tempcontainer)
290
+ if rc:
291
+ logger.info("%s hash sanity check succeeded.", self.storehash)
292
+ else:
293
+ logger.warning("%s hash sanity check failed.", self.storehash)
294
+ finally:
295
+ if tempcontainer is not None:
296
+ tempcontainer.clear()
297
+ return rc
298
+
299
+ def _actual_get(self, action, remote, local, options, result_id=None):
300
+ """Proxy method to dedicated get method according to scheme."""
301
+ logger.debug('Store %s from %s to %s', action, remote, local)
302
+ if not self._incache_inarchive_check(options):
303
+ return False
304
+ if not options.get('insitu', False) or self.use_cache():
305
+ if result_id:
306
+ rc = getattr(self, self.scheme + action, self.notyet)(result_id, remote, local, options)
307
+ else:
308
+ rc = getattr(self, self.scheme + action, self.notyet)(remote, local, options)
309
+ self._observer_notify('get', rc, remote, local=local, options=options)
310
+ return rc
311
+ else:
312
+ logger.error('Only cache stores can be used when insitu is True.')
313
+ return False
314
+
315
+ def get(self, remote, local, options=None):
316
+ """Proxy method to dedicated get method according to scheme."""
317
+ options = self._options_fixup(options)
318
+ return self._actual_get('get', remote, local, options)
319
+
320
+ def earlyget(self, remote, local, options=None):
321
+ options = self._options_fixup(options)
322
+ """Proxy method to dedicated earlyget method according to scheme."""
323
+ logger.debug('Store earlyget from %s to %s', remote, local)
324
+ if not self._incache_inarchive_check(options):
325
+ return None
326
+ rc = None
327
+ if not options.get('insitu', False) or self.use_cache():
328
+ available_dget = getattr(self, self.scheme + 'earlyget', None)
329
+ if available_dget is not None:
330
+ rc = available_dget(remote, local, options)
331
+ return rc
332
+
333
+ def finaliseget(self, result_id, remote, local, options=None):
334
+ options = self._options_fixup(options)
335
+ """Proxy method to dedicated finaliseget method according to scheme."""
336
+ return self._actual_get('finaliseget', remote, local, options, result_id=result_id)
337
+
338
+ def _hash_put(self, callback, local, remote, options):
339
+ """Put a hash file next to the 'real' file."""
340
+ if (self.storehash is None) or (remote['path'].endswith('.' + self.storehash)):
341
+ return True
342
+ options = self._hash_store_defaults(options)
343
+ remote = remote.copy()
344
+ remote['path'] = remote['path'] + '.' + self.storehash
345
+ # Generate the hash sum
346
+ hadapt = hashutils.HashAdapter(self.storehash)
347
+ tmplocal = hadapt.file2hash_fh(local)
348
+ # Write it whereever the original store wants to.
349
+ return callback(tmplocal, remote, options)
350
+
351
+ def put(self, local, remote, options=None):
352
+ """Proxy method to dedicated put method according to scheme."""
353
+ options = self._options_fixup(options)
354
+ logger.debug('Store put from %s to %s', local, remote)
355
+ self.enforce_readonly()
356
+ if not self._incache_inarchive_check(options):
357
+ return True
358
+ filtered = False
359
+ if options is not None and 'urifilter' in options:
360
+ filtered = options['urifilter'](self, remote)
361
+ if filtered:
362
+ rc = True
363
+ logger.info("This remote URI has been filtered out: we are skipping it.")
364
+ else:
365
+ dryrun = False
366
+ if options is not None and 'dryrun' in options:
367
+ dryrun = options['dryrun']
368
+ rc = dryrun or getattr(self, self.scheme + 'put', self.notyet)(local, remote, options)
369
+ self._observer_notify('put', rc, remote, local=local, options=options)
370
+ return rc
371
+
372
+ def delete(self, remote, options=None):
373
+ """Proxy method to dedicated delete method according to scheme."""
374
+ options = self._options_fixup(options)
375
+ logger.debug('Store delete from %s', remote)
376
+ self.enforce_readonly()
377
+ if not self._incache_inarchive_check(options):
378
+ return True
379
+ rc = getattr(self, self.scheme + 'delete', self.notyet)(remote, options)
380
+ self._observer_notify('del', rc, remote, options=options)
381
+ return rc
382
+
383
+
384
+ class MultiStore(footprints.FootprintBase):
385
+ """Agregate various :class:`Store` items."""
386
+
387
+ _abstract = True
388
+ _collector = ('store',)
389
+ _footprint = [
390
+ compressionpipeline, # Not used by cache stores but ok, just in case...
391
+ hashalgo,
392
+ dict(
393
+ info = 'Multi store',
394
+ attr = dict(
395
+ scheme = dict(
396
+ alias = ('protocol',)
397
+ ),
398
+ netloc = dict(
399
+ type = Namespace,
400
+ alias = ('domain', 'namespace')
401
+ ),
402
+ refillstore = dict(
403
+ type = bool,
404
+ optional = True,
405
+ default = False,
406
+ ),
407
+ storehash=dict(
408
+ values = hashalgo_avail_list,
409
+ ),
410
+ # ArchiveStores only be harmless for others...
411
+ storage = dict(
412
+ optional = True,
413
+ default = None,
414
+ ),
415
+ storetube = dict(
416
+ optional = True,
417
+ ),
418
+ storeroot = dict(
419
+ optional = True,
420
+ )
421
+ ),
422
+ )
423
+ ]
424
+
425
+ def __init__(self, *args, **kw):
426
+ logger.debug('Abstract multi store init %s', self.__class__)
427
+ sh = kw.pop('system', sessions.system())
428
+ super().__init__(*args, **kw)
429
+ self._sh = sh
430
+ self._openedstores = self.loadstores()
431
+ self.delayed = False
432
+
433
+ @property
434
+ def realkind(self):
435
+ return 'multistore'
436
+
437
+ @property
438
+ def system(self):
439
+ """Shortcut to current system interface."""
440
+ return self._sh
441
+
442
+ @staticmethod
443
+ def _verbose_log(options, level, *kargs, **kwargs):
444
+ slevel = kwargs.pop('slevel', 'debug')
445
+ if options is not None and options.get('silent', False):
446
+ level = slevel
447
+ getattr(logger, level)(*kargs, **kwargs)
448
+
449
+ def loadstores(self):
450
+ """
451
+ Load default stores during the initialisation of the current object.
452
+ Stores could be reloaded at any time. The current method provides
453
+ a default loading mechanism through the actual module :func:`load` function
454
+ and an alternate list of footprint descriptors as returned by method
455
+ :func:`alternates_fp`.
456
+ """
457
+ activestores = list()
458
+ for desc in self.alternates_fp():
459
+ xstore = footprints.proxy.store(**desc)
460
+ if xstore:
461
+ activestores.append(xstore)
462
+ logger.debug('Multistore %s includes active stores %s', self, activestores)
463
+ return activestores
464
+
465
+ @property
466
+ def openedstores(self):
467
+ return self._openedstores
468
+
469
+ def filtered_readable_openedstores(self, remote): # @UnusedVariable
470
+ return self._openedstores
471
+
472
+ def filtered_writeable_openedstores(self, remote): # @UnusedVariable
473
+ return self._openedstores
474
+
475
+ def alternates_scheme(self):
476
+ """Default method returns actual scheme in a tuple."""
477
+ return (self.scheme,)
478
+
479
+ def alternates_netloc(self):
480
+ """Abstract method."""
481
+ pass
482
+
483
+ def alternates_fpextras(self):
484
+ """Abstract method."""
485
+ return dict()
486
+
487
+ def alternates_fp(self):
488
+ """
489
+ Returns a list of anonymous descriptions to be used as footprint entries
490
+ while loading alternates stores.
491
+ """
492
+ return [
493
+ dict(system=self.system,
494
+ storehash=self.storehash, store_compressed=self.store_compressed,
495
+ storage=self.storage, storetube=self.storetube,
496
+ storeroot=self.storeroot,
497
+ scheme=x, netloc=y, ** self.alternates_fpextras())
498
+ for x in self.alternates_scheme()
499
+ for y in self.alternates_netloc()
500
+ ]
501
+
502
+ def use_cache(self):
503
+ """Boolean function to check if any included store uses a local cache."""
504
+ return any([x.use_cache() for x in self.openedstores])
505
+
506
+ def use_archive(self):
507
+ """Boolean function to check if any included store uses a remote archive."""
508
+ return any([x.use_archive() for x in self.openedstores])
509
+
510
+ def has_fast_check(self):
511
+ """How fast and reliable is a check call ?"""
512
+ return all([x.has_fast_check() for x in self.openedstores])
513
+
514
+ @property
515
+ def readonly(self):
516
+ return all([x.readonly for x in self.openedstores])
517
+
518
+ @property
519
+ def writeable(self):
520
+ return not self.readonly
521
+
522
+ @staticmethod
523
+ def _options_fixup(options):
524
+ return dict() if options is None else options
525
+
526
+ def check(self, remote, options=None):
527
+ """Go through internal opened stores and check for the resource."""
528
+ options = self._options_fixup(options)
529
+ logger.debug('Multistore check from %s', remote)
530
+ rc = False
531
+ for sto in self.filtered_readable_openedstores(remote):
532
+ rc = sto.check(remote.copy(), options)
533
+ if rc:
534
+ break
535
+ return rc
536
+
537
+ def locate(self, remote, options=None):
538
+ """Go through internal opened stores and locate the expected resource for each of them."""
539
+ options = self._options_fixup(options)
540
+ logger.debug('Multistore locate %s', remote)
541
+ f_ostores = self.filtered_readable_openedstores(remote)
542
+ if not f_ostores:
543
+ return False
544
+ rloc = list()
545
+ for sto in f_ostores:
546
+ logger.debug('Multistore locate at %s', sto)
547
+ tmp_rloc = sto.locate(remote.copy(), options)
548
+ if tmp_rloc:
549
+ rloc.append(tmp_rloc)
550
+ return ';'.join(rloc)
551
+
552
+ def list(self, remote, options=None):
553
+ """Go through internal opened stores and list the expected resource for each of them."""
554
+ options = self._options_fixup(options)
555
+ logger.debug('Multistore list %s', remote)
556
+ rlist = set()
557
+ for sto in self.filtered_readable_openedstores(remote):
558
+ logger.debug('Multistore list at %s', sto)
559
+ tmp_rloc = sto.list(remote.copy(), options)
560
+ if isinstance(tmp_rloc, (list, tuple, set)):
561
+ rlist.update(tmp_rloc)
562
+ elif tmp_rloc is True:
563
+ return True
564
+ return sorted(rlist)
565
+
566
+ def prestage(self, remote, options=None):
567
+ """Go through internal opened stores and prestage the resource for each of them."""
568
+ options = self._options_fixup(options)
569
+ logger.debug('Multistore prestage %s', remote)
570
+ f_ostores = self.filtered_readable_openedstores(remote)
571
+ if not f_ostores:
572
+ return False
573
+ if len(f_ostores) == 1:
574
+ logger.debug('Multistore prestage at %s', f_ostores[0])
575
+ rc = f_ostores[0].prestage(remote.copy(), options)
576
+ else:
577
+ rc = True
578
+ for sto in f_ostores:
579
+ if sto.check(remote.copy(), options):
580
+ logger.debug('Multistore prestage at %s', sto)
581
+ rc = sto.prestage(remote.copy(), options)
582
+ break
583
+ return rc
584
+
585
+ def _refilling_get(self, remote, local, options, result_id=None):
586
+ """Go through internal opened stores for the first available resource."""
587
+ rc = False
588
+ refill_in_progress = True
589
+ f_rd_ostores = self.filtered_readable_openedstores(remote)
590
+ if self.refillstore:
591
+ f_wr_ostores = self.filtered_writeable_openedstores(remote)
592
+ get_options = copy.copy(options)
593
+ get_options['silent'] = True
594
+ while refill_in_progress:
595
+ for num, sto in enumerate(f_rd_ostores):
596
+ logger.debug('Multistore get at %s', sto)
597
+ if result_id and num == len(f_rd_ostores) - 1:
598
+ rc = sto.finaliseget(result_id, remote.copy(), local, get_options)
599
+ result_id = None # result_ids can not be re-used during refill
600
+ else:
601
+ rc = sto.get(remote.copy(), local, get_options)
602
+ if rc:
603
+ result_id = None # result_ids can not be re-used during refills
604
+ # Are we trying a refill ? -> find the previous writeable store
605
+ restores = []
606
+ if rc and self.refillstore and num > 0:
607
+ restores = [ostore for ostore in f_rd_ostores[:num]
608
+ if (ostore.writeable and ostore in f_wr_ostores and
609
+ ostore.use_cache())]
610
+ # Do the refills and check if one of them succeed
611
+ refill_in_progress = False
612
+ for restore in restores:
613
+ # Another refill may have filled the gap...
614
+ if not restore.check(remote.copy(), options):
615
+ logger.info('Refill back in writeable store [%s].', restore)
616
+ try:
617
+ refill_in_progress = ((restore.put(local, remote.copy(), options) and
618
+ (options.get('intent', CACHE_GET_INTENT_DEFAULT) !=
619
+ CACHE_PUT_INTENT)) or
620
+ refill_in_progress)
621
+ except (ExecutionError, OSError) as e:
622
+ logger.error("An ExecutionError happened during the refill: %s", str(e))
623
+ logger.error("This error is ignored... but that's ugly !")
624
+ if refill_in_progress:
625
+ logger.info("Starting another round because at least one refill succeeded.")
626
+ # Whatever the refill's outcome, that's fine
627
+ if rc:
628
+ break
629
+ if not rc:
630
+ self._verbose_log(options, 'warning',
631
+ "Multistore get {:s}://{:s}: none of the opened store succeeded."
632
+ .format(self.scheme, self.netloc), slevel='info')
633
+ return rc
634
+
635
+ def get(self, remote, local, options=None):
636
+ """Go through internal opened stores for the first available resource."""
637
+ options = self._options_fixup(options)
638
+ logger.debug('Multistore get from %s to %s', remote, local)
639
+ return self._refilling_get(remote, local, options)
640
+
641
+ def earlyget(self, remote, local, options=None):
642
+ options = self._options_fixup(options)
643
+ logger.debug('Multistore earlyget from %s to %s', remote, local)
644
+ f_ostores = self.filtered_readable_openedstores(remote)
645
+ get_options = copy.copy(options)
646
+ if len(f_ostores) > 1:
647
+ first_checkable = all([s.has_fast_check() for s in f_ostores[:-1]])
648
+ # Early-fetch is only available on the last resort store...
649
+ if first_checkable and all([not s.check(remote.copy(), get_options)
650
+ for s in f_ostores[:-1]]):
651
+ return f_ostores[-1].earlyget(remote.copy(), local, get_options)
652
+ else:
653
+ return None
654
+ elif len(f_ostores) == 1:
655
+ return f_ostores[0].earlyget(remote.copy(), local, get_options)
656
+ else:
657
+ return None
658
+
659
+ def finaliseget(self, result_id, remote, local, options=None):
660
+ options = self._options_fixup(options)
661
+ logger.debug('Multistore finaliseget from %s to %s', remote, local)
662
+ return self._refilling_get(remote, local, options, result_id=result_id)
663
+
664
+ def put(self, local, remote, options=None):
665
+ """Go through internal opened stores and put resource for each of them."""
666
+ options = self._options_fixup(options)
667
+ logger.debug('Multistore put from %s to %s', local, remote)
668
+ f_ostores = self.filtered_writeable_openedstores(remote)
669
+ if not f_ostores:
670
+ logger.warning('Funny attempt to put on an empty multistore...')
671
+ return False
672
+ rc = True
673
+ for sto in [ostore for ostore in f_ostores if ostore.writeable]:
674
+ logger.debug('Multistore put at %s', sto)
675
+ rcloc = sto.put(local, remote.copy(), options)
676
+ logger.debug('Multistore out = %s', rcloc)
677
+ rc = rc and rcloc
678
+ return rc
679
+
680
+ def delete(self, remote, options=None):
681
+ """Go through internal opened stores and delete the resource."""
682
+ options = self._options_fixup(options)
683
+ logger.debug('Multistore delete from %s', remote)
684
+ f_ostores = self.filtered_writeable_openedstores(remote)
685
+ rc = False
686
+ for sto in [ostore for ostore in f_ostores if ostore.writeable]:
687
+ logger.debug('Multistore delete at %s', sto)
688
+ rc = sto.delete(remote.copy(), options)
689
+ if not rc:
690
+ break
691
+ return rc
692
+
693
+
694
+ class ArchiveStore(Store):
695
+ """Generic Archive Store."""
696
+
697
+ _archives_object_stack = set()
698
+
699
+ _abstract = True
700
+ _footprint = [
701
+ compressionpipeline,
702
+ dict(
703
+ info = 'Generic archive store',
704
+ attr = dict(
705
+ scheme = dict(
706
+ values = ['inarchive', ],
707
+ ),
708
+ netloc = dict(
709
+ values = ['open.archive.fr'],
710
+ ),
711
+ storehash = dict(
712
+ values = hashalgo_avail_list,
713
+ ),
714
+ storage = dict(
715
+ optional = True,
716
+ ),
717
+ storetube = dict(
718
+ optional = True,
719
+ ),
720
+ storeroot = dict(
721
+ optional = True,
722
+ ),
723
+ storehead = dict(
724
+ optional = True,
725
+ ),
726
+ storetrue = dict(
727
+ type = bool,
728
+ optional = True,
729
+ default = True,
730
+ ),
731
+ )
732
+ ),
733
+ ]
734
+
735
+ def __init__(self, *args, **kw):
736
+ logger.debug('Archive store init %s', self.__class__)
737
+ self._archive = None
738
+ self._actual_storage = None
739
+ self._actual_storetube = None
740
+ super().__init__(*args, **kw)
741
+ self._actual_storage = self.storage
742
+ self._actual_storetube = self.storetube
743
+
744
+ @property
745
+ def realkind(self):
746
+ return 'archivestore'
747
+
748
+ @property
749
+ def tracking_extraargs(self):
750
+ tea = super().tracking_extraargs
751
+ if self.storage:
752
+ tea['storage'] = self.storage
753
+ return tea
754
+
755
+ def _str_more(self):
756
+ return 'archive={!r}'.format(self.archive)
757
+
758
+ @property
759
+ def underlying_archive_kind(self):
760
+ return 'std'
761
+
762
+ @property
763
+ def actual_storage(self):
764
+ """This archive network name (potentially read form the configuration file)."""
765
+ if self._actual_storage is None:
766
+ self._actual_storage = (
767
+ self.system.env.VORTEX_DEFAULT_STORAGE or
768
+ self.system.glove.default_fthost or
769
+ from_config(section="storage", key="address")
770
+ )
771
+ if self._actual_storage is None:
772
+ raise ValueError('Unable to find the archive network name.')
773
+ return self._actual_storage
774
+
775
+ @property
776
+ def actual_storetube(self):
777
+ """This archive network name (potentially read form the configuration file)."""
778
+ if self._actual_storetube is None:
779
+ self._actual_storetube = from_config(
780
+ section="storage", key="protocol",
781
+ )
782
+ if self._actual_storetube is None:
783
+ raise ValueError('Unable to find the archive access method.')
784
+ return self._actual_storetube
785
+
786
+ def _get_archive(self):
787
+ """Create a new Archive object only if needed."""
788
+ if not self._archive:
789
+ self._archive = footprints.proxy.archives.default(
790
+ kind=self.underlying_archive_kind,
791
+ storage=self.actual_storage,
792
+ tube=self.actual_storetube,
793
+ readonly=self.readonly,
794
+ )
795
+ self._archives_object_stack.add(self._archive)
796
+ return self._archive
797
+
798
+ def _set_archive(self, newarchive):
799
+ """Set a new archive reference."""
800
+ if isinstance(newarchive, storage.Archive):
801
+ self._archive = newarchive
802
+
803
+ def _del_archive(self):
804
+ """Invalidate internal archive reference."""
805
+ self._archive = None
806
+
807
+ archive = property(_get_archive, _set_archive, _del_archive)
808
+
809
+ def _inarchiveformatpath(self, remote):
810
+ # Remove extra slashes
811
+ formatted = remote['path'].lstrip(self.system.path.sep)
812
+ # Store head ?
813
+ if self.storehead:
814
+ formatted = self.system.path.join(self.storehead, formatted)
815
+ # Store root (if specified)
816
+ pathroot = remote.get('root', self.storeroot)
817
+ if pathroot is not None:
818
+ formatted = self.system.path.join(pathroot, formatted)
819
+ return formatted
820
+
821
+ def inarchivecheck(self, remote, options):
822
+ """Use the archive object to check if **remote** exists."""
823
+ # Try to delete the md5 file but ignore errors...
824
+ if self._hash_check_or_delete(self.inarchivecheck, remote, options):
825
+ return self.archive.check(self._inarchiveformatpath(remote),
826
+ username=remote.get('username', None),
827
+ fmt=options.get('fmt', 'foo'),
828
+ compressionpipeline=self._actual_cpipeline)
829
+ else:
830
+ return False
831
+
832
+ def inarchivelocate(self, remote, options):
833
+ """Use the archive object to obtain **remote** physical location."""
834
+ return self.archive.fullpath(self._inarchiveformatpath(remote),
835
+ username=remote.get('username', None),
836
+ fmt=options.get('fmt', 'foo'),
837
+ compressionpipeline=self._actual_cpipeline)
838
+
839
+ def inarchivelist(self, remote, options):
840
+ """Use the archive object to list available files."""
841
+ return self.archive.list(self._inarchiveformatpath(remote),
842
+ username=remote.get('username', None))
843
+
844
+ def inarchiveprestageinfo(self, remote, options):
845
+ """Returns the prestaging informations"""
846
+ return self.archive.prestageinfo(self._inarchiveformatpath(remote),
847
+ username=remote.get('username', None),
848
+ fmt=options.get('fmt', 'foo'),
849
+ compressionpipeline=self._actual_cpipeline)
850
+
851
+ def inarchiveget(self, remote, local, options):
852
+ """Use the archive object to retrieve **remote** in **local**."""
853
+ logger.info('inarchiveget on %s://%s/%s (to: %s)',
854
+ self.scheme, self.netloc, self._inarchiveformatpath(remote), local)
855
+ rc = self.archive.retrieve(
856
+ self._inarchiveformatpath(remote), local,
857
+ intent=options.get('intent', ARCHIVE_GET_INTENT_DEFAULT),
858
+ fmt=options.get('fmt', 'foo'),
859
+ info=options.get('rhandler', None),
860
+ username=remote['username'],
861
+ compressionpipeline=self._actual_cpipeline,
862
+ )
863
+ return rc and self._hash_get_check(self.inarchiveget, remote, local, options)
864
+
865
+ def inarchiveearlyget(self, remote, local, options):
866
+ """Use the archive object to initiate an early get request on **remote**."""
867
+ logger.debug('inarchiveearlyget on %s://%s/%s (to: %s)',
868
+ self.scheme, self.netloc, self._inarchiveformatpath(remote), local)
869
+ rc = self.archive.earlyretrieve(
870
+ self._inarchiveformatpath(remote), local,
871
+ intent=options.get('intent', ARCHIVE_GET_INTENT_DEFAULT),
872
+ fmt=options.get('fmt', 'foo'),
873
+ info=options.get('rhandler', None),
874
+ username=remote['username'],
875
+ compressionpipeline=self._actual_cpipeline,
876
+ )
877
+ return rc
878
+
879
+ def inarchivefinaliseget(self, result_id, remote, local, options):
880
+ """Use the archive object to finalise the **result_id** early get request."""
881
+ logger.info('inarchivefinaliseget on %s://%s/%s (to: %s)',
882
+ self.scheme, self.netloc, self._inarchiveformatpath(remote), local)
883
+ rc = self.archive.finaliseretrieve(
884
+ result_id,
885
+ self._inarchiveformatpath(remote), local,
886
+ intent=options.get('intent', ARCHIVE_GET_INTENT_DEFAULT),
887
+ fmt=options.get('fmt', 'foo'),
888
+ info=options.get('rhandler', None),
889
+ username=remote['username'],
890
+ compressionpipeline=self._actual_cpipeline,
891
+ )
892
+ return rc and self._hash_get_check(self.inarchiveget, remote, local, options)
893
+
894
+ def inarchiveput(self, local, remote, options):
895
+ """Use the archive object to put **local** to **remote**"""
896
+ logger.info('inarchiveput to %s://%s/%s (from: %s)',
897
+ self.scheme, self.netloc, self._inarchiveformatpath(remote), local)
898
+ rc = self.archive.insert(
899
+ self._inarchiveformatpath(remote), local,
900
+ intent=ARCHIVE_PUT_INTENT,
901
+ fmt=options.get('fmt', 'foo'),
902
+ info=options.get('rhandler'),
903
+ username=remote['username'],
904
+ compressionpipeline=self._actual_cpipeline,
905
+ enforcesync=options.get('enforcesync', False),
906
+ usejeeves=options.get('delayed', None),
907
+ )
908
+ return rc and self._hash_put(self.inarchiveput, local, remote, options)
909
+
910
+ def inarchivedelete(self, remote, options):
911
+ logger.info('inarchivedelete on %s://%s/%s',
912
+ self.scheme, self.netloc, self._inarchiveformatpath(remote))
913
+ # Try to delete the md5 file but ignore errors...
914
+ self._hash_check_or_delete(self.inarchivedelete, remote, options)
915
+ return self.archive.delete(
916
+ self._inarchiveformatpath(remote),
917
+ fmt=options.get('fmt', 'foo'),
918
+ info=options.get('rhandler', None),
919
+ username=remote['username'],
920
+ compressionpipeline=self._actual_cpipeline,
921
+ )
922
+
923
+
924
+ def _default_remoteconfig_dict():
925
+ """Just an utility method for ConfigurableArchiveStore."""
926
+ return dict(restrict=None, seen=False)
927
+
928
+
929
+ class ConfigurableArchiveStore:
930
+ """Generic Archive Store with the ability to read a configuration file.
931
+
932
+ This is a mixin class...
933
+ """
934
+
935
+ #: Path to the Store configuration file (please overwrite !)
936
+ _store_global_config = None
937
+ _datastore_id = None
938
+ _re_subhosting = re.compile(r'(.*)\s+hosted\s+by\s+([-\w]+)$')
939
+
940
+ @staticmethod
941
+ def _get_remote_config(store, url, container):
942
+ """Fetch a configuration file from **url** using **store**."""
943
+ rc = store.get(url, container.iotarget(), dict(fmt='ascii'))
944
+ if rc:
945
+ return config.GenericConfigParser(inifile=container.iotarget())
946
+ else:
947
+ return None
948
+
949
+ @staticmethod
950
+ def _please_fix(what):
951
+ logger.error('Please fix that quickly... Meanwhile, "%s" is ignored !', what)
952
+
953
+ def _process_location_section(self, section, section_items):
954
+ section_data = dict()
955
+ m_section = self._re_subhosting.match(section)
956
+ if m_section:
957
+ # A "hosted by" section
958
+ section_data['idrestricts'] = list()
959
+ for k, v in section_items:
960
+ if k.endswith('_idrestrict'):
961
+ try:
962
+ compiled_re = re.compile(v)
963
+ section_data['idrestricts'].append(compiled_re)
964
+ except re.error as e:
965
+ logger.error('The regex provided for "%s" in section "%s" does not compile !: "%s".',
966
+ k, section, str(e))
967
+ self._please_fix(k)
968
+ elif k == 'idrestricts':
969
+ logger.error('A "%s" entrey was found in section "%s". This is not ok.', k, section)
970
+ self._please_fix(k)
971
+ else:
972
+ section_data[k] = v
973
+ if section_data['idrestricts']:
974
+ return m_section.group(1), m_section.group(2), section_data
975
+ else:
976
+ logger.error('No acceptable "_idrestrict" entry was found in section "%s".', section)
977
+ self._please_fix(section)
978
+ return None, None, None
979
+ else:
980
+ # The usual/generic section
981
+ for k, v in section_items:
982
+ if k.endswith('_idrestrict') or k == 'idrestricts':
983
+ logger.error('A "*idrestrict*" entry was found in section "%s". This is not ok.', section)
984
+ self._please_fix(section)
985
+ return None, None, None
986
+ section_data[k] = v
987
+ return section, None, section_data
988
+
989
+ def _ingest_remote_config(self, r_id, r_confdict, global_confdict):
990
+ logger.info("Reading config file: %s (id=%s)", r_confdict['uri'], r_id)
991
+ url = net.uriparse(r_confdict['uri'])
992
+ tempstore = footprints.proxy.store(
993
+ scheme=url['scheme'],
994
+ netloc=url['netloc'],
995
+ storetrack=False,
996
+ )
997
+ retry = False
998
+ # First, try with a temporary ShouldFly
999
+ try:
1000
+ tempcontainer = footprints.proxy.container(shouldfly=True)
1001
+ remotecfg_parser = self._get_remote_config(tempstore, url, tempcontainer)
1002
+ except OSError:
1003
+ # This may happen if the user has insufficient rights on
1004
+ # the current directory
1005
+ retry = True
1006
+ finally:
1007
+ self.system.remove(tempcontainer.filename)
1008
+ # Is retry needed ? This time a completely virtual file is used.
1009
+ if retry:
1010
+ remotecfg_parser = self._get_remote_config(tempstore, url,
1011
+ footprints.proxy.container(incore=True))
1012
+ # Update the configuration using the parser
1013
+ if remotecfg_parser is not None:
1014
+ for section in remotecfg_parser.sections():
1015
+ s_loc, s_entry, s_data = self._process_location_section(
1016
+ section,
1017
+ remotecfg_parser.items(section)
1018
+ )
1019
+ if s_loc is not None:
1020
+ logger.debug("New location entry found: %s (subentry: %s)", s_loc, s_entry)
1021
+ # Filtering based on the regex : No collisions allowed !
1022
+ if r_confdict['restrict'] is not None:
1023
+ if r_confdict['restrict'].search(s_loc):
1024
+ global_confdict['locations'][s_loc][s_entry] = s_data
1025
+ else:
1026
+ logger.error('According to the "restrict" clause, ' +
1027
+ 'you are not allowed to define the "%s" location !', s_loc)
1028
+ self._please_fix(section)
1029
+ else:
1030
+ global_confdict['locations'][s_loc][s_entry] = s_data
1031
+ r_confdict['seen'] = True
1032
+ else:
1033
+ raise OSError("The remote configuration {:s} couldn't be found."
1034
+ .format(r_confdict['uri']))
1035
+
1036
+ def _load_config(self, conf, tlocation):
1037
+ """Load the store configuration.
1038
+
1039
+ 1. The global store's configuration file is read (see
1040
+ ``self.__store_global_config``)
1041
+ 2. Given ``self.storage``, the proper section of the global configuration
1042
+ file is read: it may contain localconf or remoteconfXXX options that
1043
+ describe additional configuration files
1044
+ 3. First, the local configuration file is read
1045
+ 4. Then, the remote configuration files are read
1046
+
1047
+ The relevant content of the configuration file is stored in the ``conf``
1048
+ dictionary.
1049
+ """
1050
+ # Because _store_global_config and _datastore_id must be overwritten...
1051
+ assert self._store_global_config is not None
1052
+ assert self._datastore_id is not None
1053
+
1054
+ if not conf:
1055
+ # This is the first call to this method
1056
+ logger.info("Some store configuration data is needed (for %s://%s)",
1057
+ self.scheme, self.netloc)
1058
+
1059
+ # Global configuration file
1060
+ logger.info("Reading config file: %s", self._store_global_config)
1061
+ maincfg = config.GenericConfigParser(inifile=self._store_global_config)
1062
+ if self.actual_storage in maincfg.sections():
1063
+ conf['host'] = dict(maincfg.items(self.actual_storage))
1064
+ else:
1065
+ conf['host'] = dict(maincfg.defaults())
1066
+
1067
+ conf['locations'] = defaultdict(functools.partial(defaultdict, dict))
1068
+ conf['remoteconfigs'] = defaultdict(_default_remoteconfig_dict)
1069
+ conf['uuids_cache'] = dict()
1070
+
1071
+ # Look for a local configuration file
1072
+ localcfg = conf['host'].get('localconf', None)
1073
+ if localcfg is not None:
1074
+ logger.info("Reading config file: %s", localcfg)
1075
+ localcfg = config.GenericConfigParser(inifile=localcfg)
1076
+ conf['locations']['generic'][None] = localcfg.defaults()
1077
+ for section in localcfg.sections():
1078
+ s_loc, s_entry, s_data = self._process_location_section(
1079
+ section,
1080
+ localcfg.items(section)
1081
+ )
1082
+ if s_loc is not None:
1083
+ logger.debug("New location entry found: %s (subentry: %s)", s_loc, s_entry)
1084
+ conf['locations'][s_loc][s_entry] = s_data
1085
+
1086
+ # Look for remote configurations
1087
+ tg_inet = self.system.default_target.inetname
1088
+ for key in conf['host'].keys():
1089
+ k_match = re.match(r'generic_(remoteconf\w*)_uri$', key)
1090
+ if k_match:
1091
+ r_id = k_match.group(1)
1092
+ g_uri_key = key
1093
+ i_uri_key = '{:s}_{:s}_uri'.format(tg_inet, r_id)
1094
+ g_restrict_key = 'generic_{:s}_restrict'.format(r_id)
1095
+ i_restrict_key = '{:s}_{:s}_restrict'.format(tg_inet, r_id)
1096
+ if i_uri_key in conf['host'].keys():
1097
+ conf['remoteconfigs'][r_id]['uri'] = conf['host'][i_uri_key]
1098
+ else:
1099
+ conf['remoteconfigs'][r_id]['uri'] = conf['host'][g_uri_key]
1100
+ if i_restrict_key in conf['host'].keys():
1101
+ conf['remoteconfigs'][r_id]['restrict'] = conf['host'][i_restrict_key]
1102
+ elif g_restrict_key in conf['host'].keys():
1103
+ conf['remoteconfigs'][r_id]['restrict'] = conf['host'][g_restrict_key]
1104
+ # Trying to compile the regex !
1105
+ if conf['remoteconfigs'][r_id]['restrict'] is not None:
1106
+ try:
1107
+ compiled_re = re.compile(conf['remoteconfigs'][r_id]['restrict'])
1108
+ conf['remoteconfigs'][r_id]['restrict'] = compiled_re
1109
+ except re.error as e:
1110
+ logger.error('The regex provided for "%s" does not compile !: "%s".',
1111
+ r_id, str(e))
1112
+ self._please_fix(r_id)
1113
+ del conf['remoteconfigs'][r_id]
1114
+
1115
+ for r_confk, r_conf in conf['remoteconfigs'].items():
1116
+ if r_conf['restrict'] is None:
1117
+ self._ingest_remote_config(r_confk, r_conf, conf)
1118
+
1119
+ for r_confk, r_conf in conf['remoteconfigs'].items():
1120
+ if ((not r_conf['seen']) and r_conf['restrict'] is not None and
1121
+ r_conf['restrict'].search(tlocation)):
1122
+ self._ingest_remote_config(r_confk, r_conf, conf)
1123
+
1124
+ def _actual_fromconf(self, uuid, item):
1125
+ """For a given **uuid**, Find the corresponding value of the **item** key
1126
+ in the configuration data.
1127
+
1128
+ Access the session's datastore to get the configuration data. If
1129
+ necessary, configuration data are read in using the :meth:`_load_config`
1130
+ method
1131
+ """
1132
+ ds = sessions.current().datastore
1133
+ conf = ds.get(self._datastore_id, dict(storage=self.actual_storage),
1134
+ default_payload=dict(), readonly=True)
1135
+ if (uuid, item) in conf.get('uuids_cache', dict()):
1136
+ return conf['uuids_cache'][(uuid, item)]
1137
+ else:
1138
+ logger.debug('Looking for %s''s "%s" in config.', uuid, item)
1139
+ mylocation = uuid.location
1140
+ self._load_config(conf, mylocation)
1141
+ st_item = None
1142
+ if mylocation in conf['locations']:
1143
+ # The default
1144
+ if None in conf['locations'][mylocation]:
1145
+ st_item = conf['locations'][mylocation][None].get(item, None)
1146
+ # Id based
1147
+ for s_entry, s_entry_d in conf['locations'][mylocation].items():
1148
+ if s_entry is not None:
1149
+ if any([idrestrict.search(uuid.id)
1150
+ for idrestrict in s_entry_d['idrestricts']]):
1151
+ st_item = s_entry_d.get(item, None)
1152
+ st_item = st_item or conf['locations']['generic'][None].get(item, None)
1153
+ conf['uuids_cache'][(uuid, item)] = st_item
1154
+ return st_item
1155
+
1156
+ def _actual_storeroot(self, uuid):
1157
+ """For a given **uuid**, determine the proper storeroot."""
1158
+ if self.storeroot is None:
1159
+ # Read the storeroot from the configuration data
1160
+ st_root = self._actual_fromconf(uuid, 'storeroot')
1161
+ if st_root is None:
1162
+ raise OSError("No valid storeroot could be found.")
1163
+ # The location may be an alias: find the real username
1164
+ realname = self._actual_fromconf(uuid, 'realname')
1165
+ if realname is None:
1166
+ mylocation = uuid.location
1167
+ else:
1168
+ mylocation = realname
1169
+ return st_root.format(location=mylocation)
1170
+ else:
1171
+ return self.storeroot
1172
+
1173
+
1174
+ class CacheStore(Store):
1175
+ """Generic Cache Store."""
1176
+
1177
+ # Each Cache object created by a CacheStore will be stored here:
1178
+ # This way it won't be garbage collect and could be re-used later on
1179
+ _caches_object_stack = set()
1180
+
1181
+ _abstract = True
1182
+ _footprint = dict(
1183
+ info = 'Generic cache store',
1184
+ attr = dict(
1185
+ scheme = dict(
1186
+ values = ['incache'],
1187
+ ),
1188
+ netloc = dict(
1189
+ values = ['open.cache.fr'],
1190
+ ),
1191
+ storehash = dict(
1192
+ values = hashalgo_avail_list,
1193
+ ),
1194
+ strategy = dict(
1195
+ optional = True,
1196
+ default = 'std',
1197
+ ),
1198
+ headdir = dict(
1199
+ optional = True,
1200
+ default = 'conf',
1201
+ ),
1202
+ rtouch = dict(
1203
+ type = bool,
1204
+ optional = True,
1205
+ default = False,
1206
+ ),
1207
+ rtouchskip = dict(
1208
+ type = int,
1209
+ optional = True,
1210
+ default = 0,
1211
+ ),
1212
+ )
1213
+ )
1214
+
1215
+ def __init__(self, *args, **kw):
1216
+ del self.cache
1217
+ logger.debug('Generic cache store init %s', self.__class__)
1218
+ super().__init__(*args, **kw)
1219
+
1220
+ @property
1221
+ def realkind(self):
1222
+ return 'cachestore'
1223
+
1224
+ def use_cache(self):
1225
+ """Boolean value to insure that this store is using a cache."""
1226
+ return True
1227
+
1228
+ def has_fast_check(self):
1229
+ """Because that's why caching is used !"""
1230
+ return True
1231
+
1232
+ @property
1233
+ def underlying_cache_kind(self):
1234
+ """The kind of cache that will be used."""
1235
+ return self.strategy
1236
+
1237
+ def _get_cache(self):
1238
+ if not self._cache:
1239
+ self._cache = footprints.proxy.caches.default(
1240
+ kind=self.underlying_cache_kind,
1241
+ headdir=self.headdir,
1242
+ rtouch=self.rtouch,
1243
+ rtouchskip=self.rtouchskip,
1244
+ readonly=self.readonly
1245
+ )
1246
+ self._caches_object_stack.add(self._cache)
1247
+ return self._cache
1248
+
1249
+ def _set_cache(self, newcache):
1250
+ """Set a new cache reference."""
1251
+ if isinstance(newcache, storage.Cache):
1252
+ self._cache = newcache
1253
+
1254
+ def _del_cache(self):
1255
+ """Invalidate internal cache reference."""
1256
+ self._cache = None
1257
+
1258
+ cache = property(_get_cache, _set_cache, _del_cache)
1259
+
1260
+ def _str_more(self):
1261
+ return 'entry={:s}'.format(self.cache.entry)
1262
+
1263
+ def incachecheck(self, remote, options):
1264
+ """Returns a stat-like object if the ``remote`` exists in the current cache."""
1265
+ if self._hash_check_or_delete(self.incachecheck, remote, options):
1266
+ st = self.cache.check(remote['path'])
1267
+ if options.get('isfile', False) and st:
1268
+ st = self.system.path.isfile(self.incachelocate(remote, options))
1269
+ return st
1270
+ else:
1271
+ return False
1272
+
1273
+ def incachelocate(self, remote, options):
1274
+ """Agregates cache to remote subpath."""
1275
+ return self.cache.fullpath(remote['path'])
1276
+
1277
+ def incachelist(self, remote, options):
1278
+ """List the content of a remote path."""
1279
+ return self.cache.list(remote['path'])
1280
+
1281
+ def incacheprestageinfo(self, remote, options):
1282
+ """Returns pre-staging informations."""
1283
+ return self.cache.prestageinfo(remote['path'])
1284
+
1285
+ def incacheget(self, remote, local, options):
1286
+ """Simple copy from current cache cache to ``local``."""
1287
+ logger.info('incacheget on %s://%s/%s (to: %s)',
1288
+ self.scheme, self.netloc, remote['path'], local)
1289
+ rc = self.cache.retrieve(
1290
+ remote['path'],
1291
+ local,
1292
+ intent=options.get('intent', CACHE_GET_INTENT_DEFAULT),
1293
+ fmt=options.get('fmt'),
1294
+ info=options.get('rhandler', None),
1295
+ tarextract=options.get('auto_tarextract', False),
1296
+ dirextract=options.get('auto_dirextract', False),
1297
+ uniquelevel_ignore=options.get('uniquelevel_ignore', True),
1298
+ silent=options.get('silent', False),
1299
+ )
1300
+ if rc or not options.get('silent', False):
1301
+ logger.info('incacheget retrieve rc=%s location=%s', str(rc),
1302
+ str(self.incachelocate(remote, options)))
1303
+ return rc and self._hash_get_check(self.incacheget, remote, local, options)
1304
+
1305
+ def incacheput(self, local, remote, options):
1306
+ """Simple copy from ``local`` to the current cache in readonly mode."""
1307
+ logger.info('incacheput to %s://%s/%s (from: %s)',
1308
+ self.scheme, self.netloc, remote['path'], local)
1309
+ rc = self.cache.insert(
1310
+ remote['path'],
1311
+ local,
1312
+ intent=CACHE_PUT_INTENT,
1313
+ fmt=options.get('fmt'),
1314
+ info=options.get('rhandler', None),
1315
+ )
1316
+ logger.info('incacheput insert rc=%s location=%s', str(rc),
1317
+ str(self.incachelocate(remote, options)))
1318
+ return rc and self._hash_put(self.incacheput, local, remote, options)
1319
+
1320
+ def incachedelete(self, remote, options):
1321
+ """Simple removing of the remote resource in cache."""
1322
+ logger.info('incachedelete on %s://%s/%s',
1323
+ self.scheme, self.netloc, remote['path'])
1324
+ self._hash_check_or_delete(self.incachedelete, remote, options)
1325
+ return self.cache.delete(
1326
+ remote['path'],
1327
+ fmt=options.get('fmt'),
1328
+ info=options.get('rhandler', None),
1329
+ )
1330
+
1331
+
1332
+ class PromiseStore(footprints.FootprintBase):
1333
+ """Combined a Promise Store for expected resources and any other matching Store."""
1334
+
1335
+ _abstract = True
1336
+ _collector = ('store',)
1337
+ _footprint = dict(
1338
+ info = 'Promise store',
1339
+ attr = dict(
1340
+ scheme = dict(
1341
+ alias = ('protocol',)
1342
+ ),
1343
+ netloc = dict(
1344
+ type = Namespace,
1345
+ alias = ('domain', 'namespace')
1346
+ ),
1347
+ storetrack = dict(
1348
+ type = bool,
1349
+ default = True,
1350
+ optional = True,
1351
+ ),
1352
+ prstorename = dict(
1353
+ type = Namespace,
1354
+ optional = True,
1355
+ default = 'promise.cache.fr',
1356
+ ),
1357
+ ),
1358
+ )
1359
+
1360
+ def __init__(self, *args, **kw):
1361
+ logger.debug('Abstract promise store init %s', self.__class__)
1362
+ sh = kw.pop('system', sessions.system())
1363
+ super().__init__(*args, **kw)
1364
+ self._sh = sh
1365
+
1366
+ # Assume that the actual scheme is the current scheme without "x" prefix
1367
+ self.proxyscheme = self.scheme.lstrip('x')
1368
+
1369
+ # Find a store for the promised resources
1370
+ self.promise = footprints.proxy.store(
1371
+ scheme=self.proxyscheme,
1372
+ netloc=self.prstorename,
1373
+ storetrack=self.storetrack,
1374
+ )
1375
+ if self.promise is None:
1376
+ logger.critical('Could not find store scheme <%s> netloc <%s>',
1377
+ self.proxyscheme, self.prstorename)
1378
+ raise ValueError('Could not get a Promise Store')
1379
+
1380
+ # Find the other "real" store (could be a multi-store)
1381
+ self.other = footprints.proxy.store(
1382
+ scheme=self.proxyscheme,
1383
+ netloc=self.netloc,
1384
+ storetrack=self.storetrack,
1385
+ )
1386
+ if self.other is None:
1387
+ logger.critical('Could not find store scheme <%s> netloc <%s>', self.proxyscheme, self.netloc)
1388
+ raise ValueError('Could not get an Other Store')
1389
+
1390
+ self.openedstores = (self.promise, self.other)
1391
+ self.delayed = False
1392
+
1393
+ @property
1394
+ def realkind(self):
1395
+ return 'promisestore'
1396
+
1397
+ @property
1398
+ def system(self):
1399
+ """Shortcut to current system interface."""
1400
+ return self._sh
1401
+
1402
+ def has_fast_check(self):
1403
+ """It depends..."""
1404
+ return self.other.has_fast_check()
1405
+
1406
+ def mkpromise_info(self, remote, options):
1407
+ """Build a dictionary with relevant informations for the promise."""
1408
+ return dict(
1409
+ promise=True,
1410
+ stamp=date.stamp(),
1411
+ itself=self.promise.locate(remote, options),
1412
+ locate=self.other.locate(remote, options),
1413
+ datafmt=options.get('fmt', None),
1414
+ rhandler=options.get('rhandler', None),
1415
+ )
1416
+
1417
+ def mkpromise_file(self, info, local):
1418
+ """Build a virtual container with specified informations."""
1419
+ pfile = local + '.pr'
1420
+ self.system.json_dump(info, pfile, sort_keys=True, indent=4)
1421
+ return pfile
1422
+
1423
+ @staticmethod
1424
+ def _options_fixup(options):
1425
+ return dict() if options is None else options
1426
+
1427
+ def check(self, remote, options=None):
1428
+ """Go through internal opened stores and check for the resource."""
1429
+ options = self._options_fixup(options)
1430
+ logger.debug('Promise check from %s', remote)
1431
+ return self.other.check(remote.copy(), options) or self.promise.check(remote.copy(), options)
1432
+
1433
+ def locate(self, remote, options=None):
1434
+ """Go through internal opened stores and locate the expected resource for each of them."""
1435
+ options = self._options_fixup(options)
1436
+ logger.debug('Promise locate %s', remote)
1437
+ inpromise = True
1438
+ if options:
1439
+ inpromise = options.get('inpromise', True)
1440
+
1441
+ locate_other = self.other.locate(remote.copy(), options)
1442
+ if inpromise:
1443
+ locate_promised = self.promise.locate(remote.copy(), options)
1444
+ return locate_promised + ';' + locate_other
1445
+ return locate_other
1446
+
1447
+ def get(self, remote, local, options=None):
1448
+ """Go through internal opened stores for the first available resource."""
1449
+ options = self._options_fixup(options)
1450
+ logger.debug('Promise get %s', remote)
1451
+ self.delayed = False
1452
+ logger.info('Try promise from store %s', self.promise)
1453
+ try:
1454
+ rc = self.promise.get(remote.copy(), local, options)
1455
+ except OSError as e:
1456
+ # If something goes wrong, assume that the promise file had been
1457
+ # deleted during the execution of self.promise.check (which can cause
1458
+ # IOError or OSError to be raised).
1459
+ logger.info('An error occurred while fetching the promise file: %s', str(e))
1460
+ logger.info('Assuming this is a negative result...')
1461
+ rc = False
1462
+ if rc:
1463
+ self.delayed = True
1464
+ else:
1465
+ logger.info('Try promise from store %s', self.other)
1466
+ rc = self.other.get(remote.copy(), local, options)
1467
+ if not rc and options.get('pretend', False):
1468
+ logger.warning('Pretending to get a promise for <%s>', local)
1469
+ pr_info = self.mkpromise_info(remote, options)
1470
+ pr_file = self.mkpromise_file(pr_info, local)
1471
+ self.system.move(pr_file, local)
1472
+ rc = self.delayed = True
1473
+ return rc
1474
+
1475
+ def earlyget(self, remote, local, options=None):
1476
+ """Possible early-get on the target store."""
1477
+ options = self._options_fixup(options)
1478
+ logger.debug('Promise early-get %s', remote)
1479
+ result_id = None
1480
+ try:
1481
+ rc = (self.promise.has_fast_check and
1482
+ self.promise.check(remote.copy(), options))
1483
+ except OSError as e:
1484
+ logger.debug('An error occurred while checking for the promise file: %s', str(e))
1485
+ logger.debug('Assuming this is a negative result...')
1486
+ rc = False
1487
+ if not rc:
1488
+ result_id = self.other.earlyget(remote.copy(), local, options)
1489
+ return result_id
1490
+
1491
+ def finaliseget(self, result_id, remote, local, options=None):
1492
+ options = self._options_fixup(options)
1493
+ logger.debug('Promise finalise-get %s', remote)
1494
+ self.delayed = False
1495
+ logger.info('Try promise from store %s', self.promise)
1496
+ try:
1497
+ rc = self.promise.get(remote.copy(), local, options)
1498
+ except OSError as e:
1499
+ logger.debug('An error occurred while fetching the promise file: %s', str(e))
1500
+ logger.debug('Assuming this is a negative result...')
1501
+ rc = False
1502
+ if rc:
1503
+ self.delayed = True
1504
+ else:
1505
+ logger.info('Try promise from store %s', self.other)
1506
+ rc = self.other.finaliseget(result_id, remote.copy(), local, options)
1507
+ return rc
1508
+
1509
+ @staticmethod
1510
+ def _clean_pr_json(prjson):
1511
+ del prjson['stamp']
1512
+ if 'options' in prjson['rhandler']:
1513
+ prjson['rhandler']['options'].pop('storetrack', False)
1514
+ return prjson
1515
+
1516
+ def put(self, local, remote, options=None):
1517
+ """Put a promise or the actual resource if available."""
1518
+ options = self._options_fixup(options)
1519
+ logger.debug('Multistore put from %s to %s', local, remote)
1520
+ if options.get('force', False) or not self.system.path.exists(local):
1521
+ options = options.copy()
1522
+ if not self.other.use_cache():
1523
+ logger.critical('Could not promise resource without other cache <%s>', self.other)
1524
+ raise ValueError('Could not promise: other store does not use cache')
1525
+ pr_info = self.mkpromise_info(remote, options)
1526
+ pr_file = self.mkpromise_file(pr_info, local)
1527
+ # Check if a previous promise with the same description exists
1528
+ preexisting = self.promise.check(remote.copy(), options)
1529
+ if preexisting:
1530
+ pr_old_file = self.promise.locate(remote.copy())
1531
+ prcheck = self._clean_pr_json(self.system.json_load(pr_old_file))
1532
+ prnew = self._clean_pr_json(self.system.json_load(pr_file))
1533
+ preexisting = prcheck == prnew
1534
+ if preexisting:
1535
+ logger.info("The promise file <%s> preexisted and is compatible",
1536
+ pr_old_file)
1537
+ rc = True
1538
+ else:
1539
+ logger.warning("The promise file <%s> already exists but doesn't match",
1540
+ pr_old_file)
1541
+
1542
+ # Put the new promise file in the PromiseCache
1543
+ options['obs_overridelocal'] = local # Pretty nasty :-(
1544
+ if not preexisting:
1545
+ logger.warning('Log a promise instead of missing resource <%s>', local)
1546
+ rc = self.promise.put(pr_file, remote.copy(), options)
1547
+ if rc:
1548
+ del options['obs_overridelocal']
1549
+ self.other.delete(remote.copy(), options)
1550
+ else:
1551
+ options['dryrun'] = True # Just update the tracker
1552
+ rc = self.promise.put(pr_file, remote.copy(), options)
1553
+ self.system.remove(pr_file)
1554
+
1555
+ else:
1556
+ logger.info('Actual promise does exists <%s>', local)
1557
+ rc = self.other.put(local, remote.copy(), options)
1558
+ if rc:
1559
+ self.promise.delete(remote.copy(), options)
1560
+ return rc
1561
+
1562
+ def delete(self, remote, options=None):
1563
+ """Go through internal opened stores and delete the resource."""
1564
+ options = self._options_fixup(options)
1565
+ logger.debug('Promise delete from %s', remote)
1566
+ return self.promise.delete(remote.copy(), options) and self.other.delete(remote.copy(), options)
1567
+
1568
+
1569
+ # Activate the footprint's fasttrack on the stores collector
1570
+ fcollect = footprints.collectors.get(tag='store')
1571
+ fcollect.fasttrack = ('netloc', 'scheme')
1572
+ del fcollect