vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1510 @@
1
+ # pylint: disable=unused-argument
2
+
3
+ """
4
+ This module handles store objects in charge of physically accessing resources.
5
+ Store objects use the :mod:`footprints` mechanism.
6
+ """
7
+
8
+ import copy
9
+
10
+ from bronx.fancies import loggers
11
+ from bronx.patterns import observer
12
+ from bronx.stdtypes import date
13
+ from bronx.system import hash as hashutils
14
+ import footprints
15
+
16
+ from vortex import sessions
17
+ from vortex.config import from_config, ConfigurationError
18
+ from vortex.syntax.stdattrs import (
19
+ hashalgo,
20
+ hashalgo_avail_list,
21
+ compressionpipeline,
22
+ )
23
+ from vortex.tools import storage
24
+ from vortex.tools import compression
25
+ from vortex.tools.systems import ExecutionError
26
+ from vortex.syntax.stdattrs import Namespace
27
+
28
+ #: Export base class
29
+ __all__ = ["Store"]
30
+
31
+ logger = loggers.getLogger(__name__)
32
+
33
+ OBSERVER_TAG = "Stores-Activity"
34
+
35
+ CACHE_PUT_INTENT = "in"
36
+ CACHE_GET_INTENT_DEFAULT = "in"
37
+
38
+ ARCHIVE_PUT_INTENT = "in"
39
+ ARCHIVE_GET_INTENT_DEFAULT = "in"
40
+
41
+
42
+ def observer_board(obsname=None):
43
+ """Proxy to :func:`footprints.observers.get`."""
44
+ if obsname is None:
45
+ obsname = OBSERVER_TAG
46
+ return observer.get(tag=obsname)
47
+
48
+
49
+ class Store(footprints.FootprintBase):
50
+ """Root class for any :class:`Store` subclasses."""
51
+
52
+ _abstract = True
53
+ _collector = ("store",)
54
+ _footprint = [
55
+ hashalgo,
56
+ dict(
57
+ info="Default store",
58
+ attr=dict(
59
+ scheme=dict(alias=("protocol",)),
60
+ netloc=dict(type=Namespace, alias=("domain", "namespace")),
61
+ storetrack=dict(
62
+ type=bool,
63
+ default=True,
64
+ optional=True,
65
+ ),
66
+ readonly=dict(
67
+ type=bool,
68
+ optional=True,
69
+ default=False,
70
+ ),
71
+ ),
72
+ ),
73
+ ]
74
+
75
+ def __init__(self, *args, **kw):
76
+ logger.debug("Abstract store init %s", self.__class__)
77
+ sh = kw.pop("system", sessions.system())
78
+ super().__init__(*args, **kw)
79
+ self._sh = sh
80
+ self._observer = observer_board()
81
+ self._observer.notify_new(self, dict())
82
+ self._cpipeline = False
83
+ self.delayed = False
84
+
85
+ @property
86
+ def realkind(self):
87
+ return "store"
88
+
89
+ @property
90
+ def system(self):
91
+ """Shortcut to current system interface."""
92
+ return self._sh
93
+
94
+ def use_cache(self):
95
+ """Boolean function to check if the current store uses a local cache."""
96
+ return False
97
+
98
+ def use_archive(self):
99
+ """Boolean function to check if the current store uses a remote archive."""
100
+ return not self.use_cache()
101
+
102
+ def has_fast_check(self):
103
+ """How fast and reliable is a check call ?"""
104
+ return False
105
+
106
+ def _observer_notify(self, action, rc, remote, local=None, options=None):
107
+ strack = options is None or options.get("obs_notify", True)
108
+ if self.storetrack and strack:
109
+ infos = dict(action=action, status=rc, remote=remote)
110
+ # Is a localpath provided ?
111
+ if local is not None:
112
+ infos["local"] = local
113
+ # We may want to cheat on the localpath...
114
+ if options is not None and "obs_overridelocal" in options:
115
+ infos["local"] = options["obs_overridelocal"]
116
+ self._observer.notify_upd(self, infos)
117
+
118
+ def notyet(self, *args):
119
+ """
120
+ Internal method to be used as a critical backup method
121
+ when a specific method is not yet defined.
122
+ """
123
+ logger.critical("Scheme %s not yet implemented", self.scheme)
124
+
125
+ @property
126
+ def writeable(self):
127
+ return not self.readonly
128
+
129
+ def enforce_readonly(self):
130
+ if self.readonly:
131
+ raise OSError("This store is in readonly mode")
132
+
133
+ @staticmethod
134
+ def _verbose_log(options, level, *kargs, **kwargs):
135
+ slevel = kwargs.pop("slevel", "debug")
136
+ if options is not None and options.get("silent", False):
137
+ level = slevel
138
+ getattr(logger, level)(*kargs, **kwargs)
139
+
140
+ @property
141
+ def _actual_cpipeline(self):
142
+ """Check if the current store has a CompressionPipeline."""
143
+ if self._cpipeline is False:
144
+ cpipeline_desc = getattr(self, "store_compressed", None)
145
+ if cpipeline_desc is not None:
146
+ self._cpipeline = compression.CompressionPipeline(
147
+ self.system, cpipeline_desc
148
+ )
149
+ else:
150
+ self._cpipeline = None
151
+ return self._cpipeline
152
+
153
+ @property
154
+ def tracking_extraargs(self):
155
+ """When tracking get/put request: extra args that will be added to the URI query."""
156
+ return dict()
157
+
158
+ def _incache_inarchive_check(self, options):
159
+ rc = True
160
+ incache = options.get("incache", False)
161
+ inarchive = options.get("inarchive", False)
162
+ if incache and inarchive:
163
+ raise ValueError(
164
+ "'incache=True' and 'inarchive=True' are mutually exclusive"
165
+ )
166
+ if incache and not self.use_cache():
167
+ self._verbose_log(
168
+ options,
169
+ "info",
170
+ 'Skip this "%s" store because a cache is requested',
171
+ self.__class__,
172
+ )
173
+ rc = False
174
+ if inarchive and not self.use_archive():
175
+ self._verbose_log(
176
+ options,
177
+ "info",
178
+ 'Skip this "%s" store because an archive is requested',
179
+ self.__class__,
180
+ )
181
+ rc = False
182
+ return rc
183
+
184
+ def _hash_check_or_delete(self, callback, remote, options):
185
+ """Check or delete a hash file."""
186
+ if (self.storehash is None) or (
187
+ remote["path"].endswith("." + self.storehash)
188
+ ):
189
+ return True
190
+ options = self._hash_store_defaults(options)
191
+ remote = remote.copy()
192
+ remote["path"] = remote["path"] + "." + self.storehash
193
+ return callback(remote, options)
194
+
195
+ @staticmethod
196
+ def _options_fixup(options):
197
+ return dict() if options is None else options
198
+
199
+ def check(self, remote, options=None):
200
+ """Proxy method to dedicated check method according to scheme."""
201
+ logger.debug("Store check from %s", remote)
202
+ options = self._options_fixup(options)
203
+ if not self._incache_inarchive_check(options):
204
+ return False
205
+ rc = getattr(self, self.scheme + "check", self.notyet)(remote, options)
206
+ self._observer_notify("check", rc, remote, options=options)
207
+ return rc
208
+
209
+ def locate(self, remote, options=None):
210
+ """Proxy method to dedicated locate method according to scheme."""
211
+ options = self._options_fixup(options)
212
+ logger.debug("Store locate %s", remote)
213
+ if not self._incache_inarchive_check(options):
214
+ return None
215
+ return getattr(self, self.scheme + "locate", self.notyet)(
216
+ remote, options
217
+ )
218
+
219
+ def list(self, remote, options=None):
220
+ """Proxy method to dedicated list method according to scheme."""
221
+ options = self._options_fixup(options)
222
+ logger.debug("Store list %s", remote)
223
+ if not self._incache_inarchive_check(options):
224
+ return None
225
+ return getattr(self, self.scheme + "list", self.notyet)(
226
+ remote, options
227
+ )
228
+
229
+ def prestage_advertise(self, remote, options=None):
230
+ """Use the Stores-Activity observer board to advertise the prestaging request.
231
+
232
+ Hopefully, something will register to the ober board in order to process
233
+ the request.
234
+ """
235
+ options = self._options_fixup(options)
236
+ logger.debug("Store prestage through hub %s", remote)
237
+ infos_cb = getattr(self, self.scheme + "prestageinfo", None)
238
+ if infos_cb:
239
+ infodict = infos_cb(remote, options)
240
+ infodict.setdefault("issuerkind", self.realkind)
241
+ infodict.setdefault("scheme", self.scheme)
242
+ if options and "priority" in options:
243
+ infodict["priority"] = options["priority"]
244
+ infodict["action"] = "prestage_req"
245
+ self._observer.notify_upd(self, infodict)
246
+ else:
247
+ logger.info(
248
+ "Prestaging is not supported for scheme: %s", self.scheme
249
+ )
250
+ return True
251
+
252
+ def prestage(self, remote, options=None):
253
+ """Proxy method to dedicated prestage method according to scheme."""
254
+ options = self._options_fixup(options)
255
+ logger.debug("Store prestage %s", remote)
256
+ if not self._incache_inarchive_check(options):
257
+ return True
258
+ return getattr(
259
+ self, self.scheme + "prestage", self.prestage_advertise
260
+ )(remote, options)
261
+
262
+ @staticmethod
263
+ def _hash_store_defaults(options):
264
+ """Update default options when fetching hash files."""
265
+ options = options.copy()
266
+ options["obs_notify"] = False
267
+ options["fmt"] = "ascii"
268
+ options["intent"] = CACHE_GET_INTENT_DEFAULT
269
+ options["auto_tarextract"] = False
270
+ options["auto_dirextract"] = False
271
+ return options
272
+
273
+ def _hash_get_check(self, callback, remote, local, options):
274
+ """Update default options when fetching hash files."""
275
+ if (self.storehash is None) or (
276
+ remote["path"].endswith("." + self.storehash)
277
+ ):
278
+ return True
279
+ if isinstance(local, str) and not self.system.path.isfile(local):
280
+ logger.info(
281
+ "< %s > is not a plain file. The control sum can't be checked.",
282
+ local,
283
+ )
284
+ return True
285
+ options = self._hash_store_defaults(options)
286
+ remote = remote.copy()
287
+ remote["path"] = (
288
+ remote["path"] + "." + self.storehash
289
+ ) # Name of the hash file
290
+ remote["query"].pop("extract", None) # Ignore any extract request
291
+ try:
292
+ tempcontainer = None
293
+ try:
294
+ # First, try to fetch the sum in a real file
295
+ # (in order to potentially use ftserv...)
296
+ tempcontainer = footprints.proxy.container(
297
+ shouldfly=True, mode="rb"
298
+ )
299
+ try:
300
+ rc = callback(remote, tempcontainer.iotarget(), options)
301
+ except (OSError, ExecutionError):
302
+ # This may happen if the user has insufficient rights on
303
+ # the current directory
304
+ tempcontainer = footprints.proxy.container(
305
+ incore=True, mode="w+b"
306
+ )
307
+ rc = callback(remote, tempcontainer.iotarget(), options)
308
+ except (OSError, ExecutionError):
309
+ logger.warning(
310
+ "Something went very wrong when fetching the hash file ! (assuming rc=False)"
311
+ )
312
+ rc = False
313
+ # check the hash key
314
+ hadapt = hashutils.HashAdapter(self.storehash)
315
+ rc = rc and hadapt.filecheck(local, tempcontainer)
316
+ if rc:
317
+ logger.info("%s hash sanity check succeeded.", self.storehash)
318
+ else:
319
+ logger.warning("%s hash sanity check failed.", self.storehash)
320
+ finally:
321
+ if tempcontainer is not None:
322
+ tempcontainer.clear()
323
+ return rc
324
+
325
+ def _actual_get(self, action, remote, local, options, result_id=None):
326
+ """Proxy method to dedicated get method according to scheme."""
327
+ logger.debug("Store %s from %s to %s", action, remote, local)
328
+ if not self._incache_inarchive_check(options):
329
+ return False
330
+ if not options.get("insitu", False) or self.use_cache():
331
+ if result_id:
332
+ rc = getattr(self, self.scheme + action, self.notyet)(
333
+ result_id, remote, local, options
334
+ )
335
+ else:
336
+ rc = getattr(self, self.scheme + action, self.notyet)(
337
+ remote, local, options
338
+ )
339
+ self._observer_notify(
340
+ "get", rc, remote, local=local, options=options
341
+ )
342
+ return rc
343
+ else:
344
+ logger.error("Only cache stores can be used when insitu is True.")
345
+ return False
346
+
347
+ def get(self, remote, local, options=None):
348
+ """Proxy method to dedicated get method according to scheme."""
349
+ options = self._options_fixup(options)
350
+ return self._actual_get("get", remote, local, options)
351
+
352
+ def earlyget(self, remote, local, options=None):
353
+ options = self._options_fixup(options)
354
+ """Proxy method to dedicated earlyget method according to scheme."""
355
+ logger.debug("Store earlyget from %s to %s", remote, local)
356
+ if not self._incache_inarchive_check(options):
357
+ return None
358
+ rc = None
359
+ if not options.get("insitu", False) or self.use_cache():
360
+ available_dget = getattr(self, self.scheme + "earlyget", None)
361
+ if available_dget is not None:
362
+ rc = available_dget(remote, local, options)
363
+ return rc
364
+
365
+ def finaliseget(self, result_id, remote, local, options=None):
366
+ options = self._options_fixup(options)
367
+ """Proxy method to dedicated finaliseget method according to scheme."""
368
+ return self._actual_get(
369
+ "finaliseget", remote, local, options, result_id=result_id
370
+ )
371
+
372
+ def _hash_put(self, callback, local, remote, options):
373
+ """Put a hash file next to the 'real' file."""
374
+ if (self.storehash is None) or (
375
+ remote["path"].endswith("." + self.storehash)
376
+ ):
377
+ return True
378
+ options = self._hash_store_defaults(options)
379
+ remote = remote.copy()
380
+ remote["path"] = remote["path"] + "." + self.storehash
381
+ # Generate the hash sum
382
+ hadapt = hashutils.HashAdapter(self.storehash)
383
+ tmplocal = hadapt.file2hash_fh(local)
384
+ # Write it whereever the original store wants to.
385
+ return callback(tmplocal, remote, options)
386
+
387
+ def put(self, local, remote, options=None):
388
+ """Proxy method to dedicated put method according to scheme."""
389
+ options = self._options_fixup(options)
390
+ logger.debug("Store put from %s to %s", local, remote)
391
+ self.enforce_readonly()
392
+ if not self._incache_inarchive_check(options):
393
+ return True
394
+ filtered = False
395
+ if options is not None and "urifilter" in options:
396
+ filtered = options["urifilter"](self, remote)
397
+ if filtered:
398
+ rc = True
399
+ logger.info(
400
+ "This remote URI has been filtered out: we are skipping it."
401
+ )
402
+ else:
403
+ dryrun = False
404
+ if options is not None and "dryrun" in options:
405
+ dryrun = options["dryrun"]
406
+ rc = dryrun or getattr(self, self.scheme + "put", self.notyet)(
407
+ local, remote, options
408
+ )
409
+ self._observer_notify(
410
+ "put", rc, remote, local=local, options=options
411
+ )
412
+ return rc
413
+
414
+ def delete(self, remote, options=None):
415
+ """Proxy method to dedicated delete method according to scheme."""
416
+ options = self._options_fixup(options)
417
+ logger.debug("Store delete from %s", remote)
418
+ self.enforce_readonly()
419
+ if not self._incache_inarchive_check(options):
420
+ return True
421
+ rc = getattr(self, self.scheme + "delete", self.notyet)(
422
+ remote, options
423
+ )
424
+ self._observer_notify("del", rc, remote, options=options)
425
+ return rc
426
+
427
+
428
+ class MultiStore(footprints.FootprintBase):
429
+ """Agregate various :class:`Store` items."""
430
+
431
+ _abstract = True
432
+ _collector = ("store",)
433
+ _footprint = [
434
+ compressionpipeline, # Not used by cache stores but ok, just in case...
435
+ hashalgo,
436
+ dict(
437
+ info="Multi store",
438
+ attr=dict(
439
+ scheme=dict(alias=("protocol",)),
440
+ netloc=dict(type=Namespace, alias=("domain", "namespace")),
441
+ refillstore=dict(
442
+ type=bool,
443
+ optional=True,
444
+ default=False,
445
+ ),
446
+ storehash=dict(
447
+ values=hashalgo_avail_list,
448
+ ),
449
+ # ArchiveStores only be harmless for others...
450
+ storage=dict(
451
+ optional=True,
452
+ default=None,
453
+ ),
454
+ storetube=dict(
455
+ optional=True,
456
+ ),
457
+ storeroot=dict(
458
+ optional=True,
459
+ ),
460
+ ),
461
+ ),
462
+ ]
463
+
464
+ def __init__(self, *args, **kw):
465
+ logger.debug("Abstract multi store init %s", self.__class__)
466
+ sh = kw.pop("system", sessions.system())
467
+ super().__init__(*args, **kw)
468
+ self._sh = sh
469
+ self._openedstores = self.loadstores()
470
+ self.delayed = False
471
+
472
+ @property
473
+ def realkind(self):
474
+ return "multistore"
475
+
476
+ @property
477
+ def system(self):
478
+ """Shortcut to current system interface."""
479
+ return self._sh
480
+
481
+ @staticmethod
482
+ def _verbose_log(options, level, *kargs, **kwargs):
483
+ slevel = kwargs.pop("slevel", "debug")
484
+ if options is not None and options.get("silent", False):
485
+ level = slevel
486
+ getattr(logger, level)(*kargs, **kwargs)
487
+
488
+ def loadstores(self):
489
+ """
490
+ Load default stores during the initialisation of the current object.
491
+ Stores could be reloaded at any time. The current method provides
492
+ a default loading mechanism through the actual module :func:`load` function
493
+ and an alternate list of footprint descriptors as returned by method
494
+ :func:`alternates_fp`.
495
+ """
496
+ activestores = list()
497
+ for desc in self.alternates_fp():
498
+ xstore = footprints.proxy.store(**desc)
499
+ if xstore:
500
+ activestores.append(xstore)
501
+ logger.debug(
502
+ "Multistore %s includes active stores %s", self, activestores
503
+ )
504
+ return activestores
505
+
506
+ @property
507
+ def openedstores(self):
508
+ return self._openedstores
509
+
510
+ def filtered_readable_openedstores(self, remote): # @UnusedVariable
511
+ return self._openedstores
512
+
513
+ def filtered_writeable_openedstores(self, remote): # @UnusedVariable
514
+ return self._openedstores
515
+
516
+ def alternates_scheme(self):
517
+ """Default method returns actual scheme in a tuple."""
518
+ return (self.scheme,)
519
+
520
+ def alternates_netloc(self):
521
+ """Abstract method."""
522
+ pass
523
+
524
+ def alternates_fpextras(self):
525
+ """Abstract method."""
526
+ return dict()
527
+
528
+ def alternates_fp(self):
529
+ """
530
+ Returns a list of anonymous descriptions to be used as footprint entries
531
+ while loading alternates stores.
532
+ """
533
+ return [
534
+ dict(
535
+ system=self.system,
536
+ storehash=self.storehash,
537
+ store_compressed=self.store_compressed,
538
+ storage=self.storage,
539
+ storetube=self.storetube,
540
+ storeroot=self.storeroot,
541
+ scheme=x,
542
+ netloc=y,
543
+ **self.alternates_fpextras(),
544
+ )
545
+ for x in self.alternates_scheme()
546
+ for y in self.alternates_netloc()
547
+ ]
548
+
549
+ def use_cache(self):
550
+ """Boolean function to check if any included store uses a local cache."""
551
+ return any([x.use_cache() for x in self.openedstores])
552
+
553
+ def use_archive(self):
554
+ """Boolean function to check if any included store uses a remote archive."""
555
+ return any([x.use_archive() for x in self.openedstores])
556
+
557
+ def has_fast_check(self):
558
+ """How fast and reliable is a check call ?"""
559
+ return all([x.has_fast_check() for x in self.openedstores])
560
+
561
+ @property
562
+ def readonly(self):
563
+ return all([x.readonly for x in self.openedstores])
564
+
565
+ @property
566
+ def writeable(self):
567
+ return not self.readonly
568
+
569
+ @staticmethod
570
+ def _options_fixup(options):
571
+ return dict() if options is None else options
572
+
573
+ def check(self, remote, options=None):
574
+ """Go through internal opened stores and check for the resource."""
575
+ options = self._options_fixup(options)
576
+ logger.debug("Multistore check from %s", remote)
577
+ rc = False
578
+ for sto in self.filtered_readable_openedstores(remote):
579
+ rc = sto.check(remote.copy(), options)
580
+ if rc:
581
+ break
582
+ return rc
583
+
584
+ def locate(self, remote, options=None):
585
+ """Go through internal opened stores and locate the expected resource for each of them."""
586
+ options = self._options_fixup(options)
587
+ logger.debug("Multistore locate %s", remote)
588
+ f_ostores = self.filtered_readable_openedstores(remote)
589
+ if not f_ostores:
590
+ return False
591
+ rloc = list()
592
+ for sto in f_ostores:
593
+ logger.debug("Multistore locate at %s", sto)
594
+ tmp_rloc = sto.locate(remote.copy(), options)
595
+ if tmp_rloc:
596
+ rloc.append(tmp_rloc)
597
+ return ";".join(rloc)
598
+
599
+ def list(self, remote, options=None):
600
+ """Go through internal opened stores and list the expected resource for each of them."""
601
+ options = self._options_fixup(options)
602
+ logger.debug("Multistore list %s", remote)
603
+ rlist = set()
604
+ for sto in self.filtered_readable_openedstores(remote):
605
+ logger.debug("Multistore list at %s", sto)
606
+ tmp_rloc = sto.list(remote.copy(), options)
607
+ if isinstance(tmp_rloc, (list, tuple, set)):
608
+ rlist.update(tmp_rloc)
609
+ elif tmp_rloc is True:
610
+ return True
611
+ return sorted(rlist)
612
+
613
+ def prestage(self, remote, options=None):
614
+ """Go through internal opened stores and prestage the resource for each of them."""
615
+ options = self._options_fixup(options)
616
+ logger.debug("Multistore prestage %s", remote)
617
+ f_ostores = self.filtered_readable_openedstores(remote)
618
+ if not f_ostores:
619
+ return False
620
+ if len(f_ostores) == 1:
621
+ logger.debug("Multistore prestage at %s", f_ostores[0])
622
+ rc = f_ostores[0].prestage(remote.copy(), options)
623
+ else:
624
+ rc = True
625
+ for sto in f_ostores:
626
+ if sto.check(remote.copy(), options):
627
+ logger.debug("Multistore prestage at %s", sto)
628
+ rc = sto.prestage(remote.copy(), options)
629
+ break
630
+ return rc
631
+
632
+ def _refilling_get(self, remote, local, options, result_id=None):
633
+ """Go through internal opened stores for the first available resource."""
634
+ rc = False
635
+ refill_in_progress = True
636
+ f_rd_ostores = self.filtered_readable_openedstores(remote)
637
+ if self.refillstore:
638
+ f_wr_ostores = self.filtered_writeable_openedstores(remote)
639
+ get_options = copy.copy(options)
640
+ get_options["silent"] = True
641
+ while refill_in_progress:
642
+ for num, sto in enumerate(f_rd_ostores):
643
+ logger.debug("Multistore get at %s", sto)
644
+ if result_id and num == len(f_rd_ostores) - 1:
645
+ rc = sto.finaliseget(
646
+ result_id, remote.copy(), local, get_options
647
+ )
648
+ result_id = (
649
+ None # result_ids can not be re-used during refill
650
+ )
651
+ else:
652
+ rc = sto.get(remote.copy(), local, get_options)
653
+ if rc:
654
+ result_id = None # result_ids can not be re-used during refills
655
+ # Are we trying a refill ? -> find the previous writeable store
656
+ restores = []
657
+ if rc and self.refillstore and num > 0:
658
+ restores = [
659
+ ostore
660
+ for ostore in f_rd_ostores[:num]
661
+ if (
662
+ ostore.writeable
663
+ and ostore in f_wr_ostores
664
+ and ostore.use_cache()
665
+ )
666
+ ]
667
+ # Do the refills and check if one of them succeed
668
+ refill_in_progress = False
669
+ for restore in restores:
670
+ # Another refill may have filled the gap...
671
+ if not restore.check(remote.copy(), options):
672
+ logger.info(
673
+ "Refill back in writeable store [%s].", restore
674
+ )
675
+ try:
676
+ refill_in_progress = (
677
+ restore.put(local, remote.copy(), options)
678
+ and (
679
+ options.get(
680
+ "intent", CACHE_GET_INTENT_DEFAULT
681
+ )
682
+ != CACHE_PUT_INTENT
683
+ )
684
+ ) or refill_in_progress
685
+ except (ExecutionError, OSError) as e:
686
+ logger.error(
687
+ "An ExecutionError happened during the refill: %s",
688
+ str(e),
689
+ )
690
+ logger.error(
691
+ "This error is ignored... but that's ugly !"
692
+ )
693
+ if refill_in_progress:
694
+ logger.info(
695
+ "Starting another round because at least one refill succeeded."
696
+ )
697
+ # Whatever the refill's outcome, that's fine
698
+ if rc:
699
+ break
700
+ if not rc:
701
+ self._verbose_log(
702
+ options,
703
+ "warning",
704
+ "Multistore get {:s}://{:s}: none of the opened store succeeded.".format(
705
+ self.scheme, self.netloc
706
+ ),
707
+ slevel="info",
708
+ )
709
+ return rc
710
+
711
+ def get(self, remote, local, options=None):
712
+ """Go through internal opened stores for the first available resource."""
713
+ options = self._options_fixup(options)
714
+ logger.debug("Multistore get from %s to %s", remote, local)
715
+ return self._refilling_get(remote, local, options)
716
+
717
+ def earlyget(self, remote, local, options=None):
718
+ options = self._options_fixup(options)
719
+ logger.debug("Multistore earlyget from %s to %s", remote, local)
720
+ f_ostores = self.filtered_readable_openedstores(remote)
721
+ get_options = copy.copy(options)
722
+ if len(f_ostores) > 1:
723
+ first_checkable = all([s.has_fast_check() for s in f_ostores[:-1]])
724
+ # Early-fetch is only available on the last resort store...
725
+ if first_checkable and all(
726
+ [
727
+ not s.check(remote.copy(), get_options)
728
+ for s in f_ostores[:-1]
729
+ ]
730
+ ):
731
+ return f_ostores[-1].earlyget(
732
+ remote.copy(), local, get_options
733
+ )
734
+ else:
735
+ return None
736
+ elif len(f_ostores) == 1:
737
+ return f_ostores[0].earlyget(remote.copy(), local, get_options)
738
+ else:
739
+ return None
740
+
741
+ def finaliseget(self, result_id, remote, local, options=None):
742
+ options = self._options_fixup(options)
743
+ logger.debug("Multistore finaliseget from %s to %s", remote, local)
744
+ return self._refilling_get(remote, local, options, result_id=result_id)
745
+
746
+ def put(self, local, remote, options=None):
747
+ """Go through internal opened stores and put resource for each of them."""
748
+ options = self._options_fixup(options)
749
+ logger.debug("Multistore put from %s to %s", local, remote)
750
+ f_ostores = self.filtered_writeable_openedstores(remote)
751
+ if not f_ostores:
752
+ logger.warning("Funny attempt to put on an empty multistore...")
753
+ return False
754
+ rc = True
755
+ for sto in [ostore for ostore in f_ostores if ostore.writeable]:
756
+ logger.debug("Multistore put at %s", sto)
757
+ rcloc = sto.put(local, remote.copy(), options)
758
+ logger.debug("Multistore out = %s", rcloc)
759
+ rc = rc and rcloc
760
+ return rc
761
+
762
+ def delete(self, remote, options=None):
763
+ """Go through internal opened stores and delete the resource."""
764
+ options = self._options_fixup(options)
765
+ logger.debug("Multistore delete from %s", remote)
766
+ f_ostores = self.filtered_writeable_openedstores(remote)
767
+ rc = False
768
+ for sto in [ostore for ostore in f_ostores if ostore.writeable]:
769
+ logger.debug("Multistore delete at %s", sto)
770
+ rc = sto.delete(remote.copy(), options)
771
+ if not rc:
772
+ break
773
+ return rc
774
+
775
+
776
+ class ArchiveStore(Store):
777
+ """Generic Archive Store."""
778
+
779
+ _archives_object_stack = set()
780
+
781
+ _abstract = True
782
+ _footprint = [
783
+ compressionpipeline,
784
+ dict(
785
+ info="Generic archive store",
786
+ attr=dict(
787
+ scheme=dict(
788
+ values=[
789
+ "inarchive",
790
+ ],
791
+ ),
792
+ netloc=dict(
793
+ values=["open.archive.fr"],
794
+ ),
795
+ storehash=dict(
796
+ values=hashalgo_avail_list,
797
+ ),
798
+ storage=dict(
799
+ optional=True,
800
+ ),
801
+ storetube=dict(
802
+ optional=True,
803
+ ),
804
+ storeroot=dict(
805
+ optional=True,
806
+ ),
807
+ storehead=dict(
808
+ optional=True,
809
+ ),
810
+ storetrue=dict(
811
+ type=bool,
812
+ optional=True,
813
+ default=True,
814
+ ),
815
+ ),
816
+ ),
817
+ ]
818
+
819
+ def __init__(self, *args, **kw):
820
+ logger.debug("Archive store init %s", self.__class__)
821
+ self._archive = None
822
+ self._actual_storage = None
823
+ self._actual_storetube = None
824
+ super().__init__(*args, **kw)
825
+ self._actual_storage = self.storage
826
+ self._actual_storetube = self.storetube
827
+
828
+ @property
829
+ def realkind(self):
830
+ return "archivestore"
831
+
832
+ @property
833
+ def tracking_extraargs(self):
834
+ tea = super().tracking_extraargs
835
+ if self.storage:
836
+ tea["storage"] = self.storage
837
+ return tea
838
+
839
+ def _str_more(self):
840
+ return "archive={!r}".format(self.archive)
841
+
842
+ @property
843
+ def underlying_archive_kind(self):
844
+ return "std"
845
+
846
+ @property
847
+ def actual_storage(self):
848
+ """This archive network name (potentially read form the configuration file)."""
849
+ if self._actual_storage is None:
850
+ try:
851
+ self._actual_storage = (
852
+ self.system.env.VORTEX_DEFAULT_STORAGE
853
+ or self.system.glove.default_fthost
854
+ or from_config(section="storage", key="address")
855
+ )
856
+ except ConfigurationError as e:
857
+ msg = (
858
+ "Trying to access storage archive but no "
859
+ "storage location configured.\n"
860
+ 'Make sure configuration section "section" and key '
861
+ '"address" exist.\n'
862
+ "See https://vortex-nwp.readthedocs.io/en/latest/user-guide/configuration.html#storage"
863
+ )
864
+ logger.error(msg)
865
+ raise e
866
+ if self._actual_storage is None:
867
+ raise ValueError("Unable to find the archive network name.")
868
+ return self._actual_storage
869
+
870
+ @property
871
+ def actual_storetube(self):
872
+ """This archive network name (potentially read form the configuration file)."""
873
+ if self._actual_storetube is None:
874
+ self._actual_storetube = from_config(
875
+ section="storage",
876
+ key="protocol",
877
+ )
878
+ if self._actual_storetube is None:
879
+ raise ValueError("Unable to find the archive access method.")
880
+ return self._actual_storetube
881
+
882
+ def _get_archive(self):
883
+ """Create a new Archive object only if needed."""
884
+ if not self._archive:
885
+ self._archive = footprints.proxy.archives.default(
886
+ kind=self.underlying_archive_kind,
887
+ storage=self.actual_storage,
888
+ tube=self.actual_storetube,
889
+ readonly=self.readonly,
890
+ )
891
+ self._archives_object_stack.add(self._archive)
892
+ return self._archive
893
+
894
+ def _set_archive(self, newarchive):
895
+ """Set a new archive reference."""
896
+ if isinstance(newarchive, storage.Archive):
897
+ self._archive = newarchive
898
+
899
+ def _del_archive(self):
900
+ """Invalidate internal archive reference."""
901
+ self._archive = None
902
+
903
+ archive = property(_get_archive, _set_archive, _del_archive)
904
+
905
+ def _inarchiveformatpath(self, remote):
906
+ # Remove extra slashes
907
+ formatted = remote["path"].lstrip(self.system.path.sep)
908
+ # Store head ?
909
+ if self.storehead:
910
+ formatted = self.system.path.join(self.storehead, formatted)
911
+ # Store root (if specified)
912
+ pathroot = remote.get("root", self.storeroot)
913
+ if pathroot is not None:
914
+ formatted = self.system.path.join(pathroot, formatted)
915
+ return formatted
916
+
917
+ def inarchivecheck(self, remote, options):
918
+ """Use the archive object to check if **remote** exists."""
919
+ # Try to delete the md5 file but ignore errors...
920
+ if self._hash_check_or_delete(self.inarchivecheck, remote, options):
921
+ return self.archive.check(
922
+ self._inarchiveformatpath(remote),
923
+ username=remote.get("username", None),
924
+ fmt=options.get("fmt", "foo"),
925
+ compressionpipeline=self._actual_cpipeline,
926
+ )
927
+ else:
928
+ return False
929
+
930
+ def inarchivelocate(self, remote, options):
931
+ """Use the archive object to obtain **remote** physical location."""
932
+ return self.archive.fullpath(
933
+ self._inarchiveformatpath(remote),
934
+ username=remote.get("username", None),
935
+ fmt=options.get("fmt", "foo"),
936
+ compressionpipeline=self._actual_cpipeline,
937
+ )
938
+
939
+ def inarchivelist(self, remote, options):
940
+ """Use the archive object to list available files."""
941
+ return self.archive.list(
942
+ self._inarchiveformatpath(remote),
943
+ username=remote.get("username", None),
944
+ )
945
+
946
+ def inarchiveprestageinfo(self, remote, options):
947
+ """Returns the prestaging informations"""
948
+ return self.archive.prestageinfo(
949
+ self._inarchiveformatpath(remote),
950
+ username=remote.get("username", None),
951
+ fmt=options.get("fmt", "foo"),
952
+ compressionpipeline=self._actual_cpipeline,
953
+ )
954
+
955
+ def inarchiveget(self, remote, local, options):
956
+ """Use the archive object to retrieve **remote** in **local**."""
957
+ logger.info(
958
+ "inarchiveget on %s://%s/%s (to: %s)",
959
+ self.scheme,
960
+ self.netloc,
961
+ self._inarchiveformatpath(remote),
962
+ local,
963
+ )
964
+ rc = self.archive.retrieve(
965
+ self._inarchiveformatpath(remote),
966
+ local,
967
+ intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
968
+ fmt=options.get("fmt", "foo"),
969
+ info=options.get("rhandler", None),
970
+ username=remote["username"],
971
+ compressionpipeline=self._actual_cpipeline,
972
+ )
973
+ return rc and self._hash_get_check(
974
+ self.inarchiveget, remote, local, options
975
+ )
976
+
977
+ def inarchiveearlyget(self, remote, local, options):
978
+ """Use the archive object to initiate an early get request on **remote**."""
979
+ logger.debug(
980
+ "inarchiveearlyget on %s://%s/%s (to: %s)",
981
+ self.scheme,
982
+ self.netloc,
983
+ self._inarchiveformatpath(remote),
984
+ local,
985
+ )
986
+ rc = self.archive.earlyretrieve(
987
+ self._inarchiveformatpath(remote),
988
+ local,
989
+ intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
990
+ fmt=options.get("fmt", "foo"),
991
+ info=options.get("rhandler", None),
992
+ username=remote["username"],
993
+ compressionpipeline=self._actual_cpipeline,
994
+ )
995
+ return rc
996
+
997
+ def inarchivefinaliseget(self, result_id, remote, local, options):
998
+ """Use the archive object to finalise the **result_id** early get request."""
999
+ logger.info(
1000
+ "inarchivefinaliseget on %s://%s/%s (to: %s)",
1001
+ self.scheme,
1002
+ self.netloc,
1003
+ self._inarchiveformatpath(remote),
1004
+ local,
1005
+ )
1006
+ rc = self.archive.finaliseretrieve(
1007
+ result_id,
1008
+ self._inarchiveformatpath(remote),
1009
+ local,
1010
+ intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
1011
+ fmt=options.get("fmt", "foo"),
1012
+ info=options.get("rhandler", None),
1013
+ username=remote["username"],
1014
+ compressionpipeline=self._actual_cpipeline,
1015
+ )
1016
+ return rc and self._hash_get_check(
1017
+ self.inarchiveget, remote, local, options
1018
+ )
1019
+
1020
+ def inarchiveput(self, local, remote, options):
1021
+ """Use the archive object to put **local** to **remote**"""
1022
+ logger.info(
1023
+ "inarchiveput to %s://%s/%s (from: %s)",
1024
+ self.scheme,
1025
+ self.netloc,
1026
+ self._inarchiveformatpath(remote),
1027
+ local,
1028
+ )
1029
+ rc = self.archive.insert(
1030
+ self._inarchiveformatpath(remote),
1031
+ local,
1032
+ intent=ARCHIVE_PUT_INTENT,
1033
+ fmt=options.get("fmt", "foo"),
1034
+ info=options.get("rhandler"),
1035
+ username=remote["username"],
1036
+ compressionpipeline=self._actual_cpipeline,
1037
+ enforcesync=options.get("enforcesync", False),
1038
+ usejeeves=options.get("delayed", None),
1039
+ )
1040
+ return rc and self._hash_put(self.inarchiveput, local, remote, options)
1041
+
1042
+ def inarchivedelete(self, remote, options):
1043
+ logger.info(
1044
+ "inarchivedelete on %s://%s/%s",
1045
+ self.scheme,
1046
+ self.netloc,
1047
+ self._inarchiveformatpath(remote),
1048
+ )
1049
+ # Try to delete the md5 file but ignore errors...
1050
+ self._hash_check_or_delete(self.inarchivedelete, remote, options)
1051
+ return self.archive.delete(
1052
+ self._inarchiveformatpath(remote),
1053
+ fmt=options.get("fmt", "foo"),
1054
+ info=options.get("rhandler", None),
1055
+ username=remote["username"],
1056
+ compressionpipeline=self._actual_cpipeline,
1057
+ )
1058
+
1059
+
1060
+ class CacheStore(Store):
1061
+ """Generic Cache Store."""
1062
+
1063
+ # Each Cache object created by a CacheStore will be stored here:
1064
+ # This way it won't be garbage collect and could be re-used later on
1065
+ _caches_object_stack = set()
1066
+
1067
+ _abstract = True
1068
+ _footprint = dict(
1069
+ info="Generic cache store",
1070
+ attr=dict(
1071
+ scheme=dict(
1072
+ values=["incache"],
1073
+ ),
1074
+ netloc=dict(
1075
+ values=["open.cache.fr"],
1076
+ ),
1077
+ storehash=dict(
1078
+ values=hashalgo_avail_list,
1079
+ ),
1080
+ strategy=dict(
1081
+ optional=True,
1082
+ default="std",
1083
+ ),
1084
+ headdir=dict(
1085
+ optional=True,
1086
+ default="conf",
1087
+ ),
1088
+ rtouch=dict(
1089
+ type=bool,
1090
+ optional=True,
1091
+ default=False,
1092
+ ),
1093
+ rtouchskip=dict(
1094
+ type=int,
1095
+ optional=True,
1096
+ default=0,
1097
+ ),
1098
+ ),
1099
+ )
1100
+
1101
+ def __init__(self, *args, **kw):
1102
+ del self.cache
1103
+ logger.debug("Generic cache store init %s", self.__class__)
1104
+ super().__init__(*args, **kw)
1105
+
1106
+ @property
1107
+ def realkind(self):
1108
+ return "cachestore"
1109
+
1110
+ def use_cache(self):
1111
+ """Boolean value to insure that this store is using a cache."""
1112
+ return True
1113
+
1114
+ def has_fast_check(self):
1115
+ """Because that's why caching is used !"""
1116
+ return True
1117
+
1118
+ @property
1119
+ def underlying_cache_kind(self):
1120
+ """The kind of cache that will be used."""
1121
+ return self.strategy
1122
+
1123
+ def _get_cache(self):
1124
+ if not self._cache:
1125
+ self._cache = footprints.proxy.caches.default(
1126
+ entry=self.location,
1127
+ rtouch=self.rtouch,
1128
+ rtouchskip=self.rtouchskip,
1129
+ readonly=self.readonly,
1130
+ )
1131
+ self._caches_object_stack.add(self._cache)
1132
+ return self._cache
1133
+
1134
+ def _set_cache(self, newcache):
1135
+ """Set a new cache reference."""
1136
+ if isinstance(newcache, storage.Cache):
1137
+ self._cache = newcache
1138
+
1139
+ def _del_cache(self):
1140
+ """Invalidate internal cache reference."""
1141
+ self._cache = None
1142
+
1143
+ cache = property(_get_cache, _set_cache, _del_cache)
1144
+
1145
+ def _str_more(self):
1146
+ return "entry={:s}".format(self.cache.entry)
1147
+
1148
+ def incachecheck(self, remote, options):
1149
+ """Returns a stat-like object if the ``remote`` exists in the current cache."""
1150
+ if self._hash_check_or_delete(self.incachecheck, remote, options):
1151
+ st = self.cache.check(remote["path"])
1152
+ if options.get("isfile", False) and st:
1153
+ st = self.system.path.isfile(
1154
+ self.incachelocate(remote, options)
1155
+ )
1156
+ return st
1157
+ else:
1158
+ return False
1159
+
1160
+ def incachelocate(self, remote, options):
1161
+ """Agregates cache to remote subpath."""
1162
+ return self.cache.fullpath(remote["path"])
1163
+
1164
+ def incachelist(self, remote, options):
1165
+ """List the content of a remote path."""
1166
+ return self.cache.list(remote["path"])
1167
+
1168
+ def incacheprestageinfo(self, remote, options):
1169
+ """Returns pre-staging informations."""
1170
+ return self.cache.prestageinfo(remote["path"])
1171
+
1172
+ def incacheget(self, remote, local, options):
1173
+ """Simple copy from current cache cache to ``local``."""
1174
+ logger.info(
1175
+ "incacheget on %s://%s/%s (to: %s)",
1176
+ self.scheme,
1177
+ self.netloc,
1178
+ remote["path"],
1179
+ local,
1180
+ )
1181
+ rc = self.cache.retrieve(
1182
+ remote["path"],
1183
+ local,
1184
+ intent=options.get("intent", CACHE_GET_INTENT_DEFAULT),
1185
+ fmt=options.get("fmt"),
1186
+ info=options.get("rhandler", None),
1187
+ tarextract=options.get("auto_tarextract", False),
1188
+ dirextract=options.get("auto_dirextract", False),
1189
+ uniquelevel_ignore=options.get("uniquelevel_ignore", True),
1190
+ silent=options.get("silent", False),
1191
+ )
1192
+ if rc or not options.get("silent", False):
1193
+ logger.info(
1194
+ "incacheget retrieve rc=%s location=%s",
1195
+ str(rc),
1196
+ str(self.incachelocate(remote, options)),
1197
+ )
1198
+ return rc and self._hash_get_check(
1199
+ self.incacheget, remote, local, options
1200
+ )
1201
+
1202
+ def incacheput(self, local, remote, options):
1203
+ """Simple copy from ``local`` to the current cache in readonly mode."""
1204
+ logger.info(
1205
+ "incacheput to %s://%s/%s (from: %s)",
1206
+ self.scheme,
1207
+ self.netloc,
1208
+ remote["path"],
1209
+ local,
1210
+ )
1211
+ rc = self.cache.insert(
1212
+ remote["path"],
1213
+ local,
1214
+ intent=CACHE_PUT_INTENT,
1215
+ fmt=options.get("fmt"),
1216
+ info=options.get("rhandler", None),
1217
+ )
1218
+ logger.info(
1219
+ "incacheput insert rc=%s location=%s",
1220
+ str(rc),
1221
+ str(self.incachelocate(remote, options)),
1222
+ )
1223
+ return rc and self._hash_put(self.incacheput, local, remote, options)
1224
+
1225
+ def incachedelete(self, remote, options):
1226
+ """Simple removing of the remote resource in cache."""
1227
+ logger.info(
1228
+ "incachedelete on %s://%s/%s",
1229
+ self.scheme,
1230
+ self.netloc,
1231
+ remote["path"],
1232
+ )
1233
+ self._hash_check_or_delete(self.incachedelete, remote, options)
1234
+ return self.cache.delete(
1235
+ remote["path"],
1236
+ fmt=options.get("fmt"),
1237
+ info=options.get("rhandler", None),
1238
+ )
1239
+
1240
+
1241
+ class PromiseStore(footprints.FootprintBase):
1242
+ """Combined a Promise Store for expected resources and any other matching Store."""
1243
+
1244
+ _abstract = True
1245
+ _collector = ("store",)
1246
+ _footprint = dict(
1247
+ info="Promise store",
1248
+ attr=dict(
1249
+ scheme=dict(alias=("protocol",)),
1250
+ netloc=dict(type=Namespace, alias=("domain", "namespace")),
1251
+ storetrack=dict(
1252
+ type=bool,
1253
+ default=True,
1254
+ optional=True,
1255
+ ),
1256
+ prstorename=dict(
1257
+ type=Namespace,
1258
+ optional=True,
1259
+ default="promise.cache.fr",
1260
+ ),
1261
+ ),
1262
+ )
1263
+
1264
+ def __init__(self, *args, **kw):
1265
+ logger.debug("Abstract promise store init %s", self.__class__)
1266
+ sh = kw.pop("system", sessions.system())
1267
+ super().__init__(*args, **kw)
1268
+ self._sh = sh
1269
+
1270
+ # Assume that the actual scheme is the current scheme without "x" prefix
1271
+ self.proxyscheme = self.scheme.lstrip("x")
1272
+
1273
+ # Find a store for the promised resources
1274
+ self.promise = footprints.proxy.store(
1275
+ scheme=self.proxyscheme,
1276
+ netloc=self.prstorename,
1277
+ storetrack=self.storetrack,
1278
+ )
1279
+ if self.promise is None:
1280
+ logger.critical(
1281
+ "Could not find store scheme <%s> netloc <%s>",
1282
+ self.proxyscheme,
1283
+ self.prstorename,
1284
+ )
1285
+ raise ValueError("Could not get a Promise Store")
1286
+
1287
+ # Find the other "real" store (could be a multi-store)
1288
+ self.other = footprints.proxy.store(
1289
+ scheme=self.proxyscheme,
1290
+ netloc=self.netloc,
1291
+ storetrack=self.storetrack,
1292
+ )
1293
+ if self.other is None:
1294
+ logger.critical(
1295
+ "Could not find store scheme <%s> netloc <%s>",
1296
+ self.proxyscheme,
1297
+ self.netloc,
1298
+ )
1299
+ raise ValueError("Could not get an Other Store")
1300
+
1301
+ self.openedstores = (self.promise, self.other)
1302
+ self.delayed = False
1303
+
1304
+ @property
1305
+ def realkind(self):
1306
+ return "promisestore"
1307
+
1308
+ @property
1309
+ def system(self):
1310
+ """Shortcut to current system interface."""
1311
+ return self._sh
1312
+
1313
+ def has_fast_check(self):
1314
+ """It depends..."""
1315
+ return self.other.has_fast_check()
1316
+
1317
+ def mkpromise_info(self, remote, options):
1318
+ """Build a dictionary with relevant informations for the promise."""
1319
+ return dict(
1320
+ promise=True,
1321
+ stamp=date.stamp(),
1322
+ itself=self.promise.locate(remote, options),
1323
+ locate=self.other.locate(remote, options),
1324
+ datafmt=options.get("fmt", None),
1325
+ rhandler=options.get("rhandler", None),
1326
+ )
1327
+
1328
+ def mkpromise_file(self, info, local):
1329
+ """Build a virtual container with specified informations."""
1330
+ pfile = local + ".pr"
1331
+ self.system.json_dump(info, pfile, sort_keys=True, indent=4)
1332
+ return pfile
1333
+
1334
+ @staticmethod
1335
+ def _options_fixup(options):
1336
+ return dict() if options is None else options
1337
+
1338
+ def check(self, remote, options=None):
1339
+ """Go through internal opened stores and check for the resource."""
1340
+ options = self._options_fixup(options)
1341
+ logger.debug("Promise check from %s", remote)
1342
+ return self.other.check(remote.copy(), options) or self.promise.check(
1343
+ remote.copy(), options
1344
+ )
1345
+
1346
+ def locate(self, remote, options=None):
1347
+ """Go through internal opened stores and locate the expected resource for each of them."""
1348
+ options = self._options_fixup(options)
1349
+ logger.debug("Promise locate %s", remote)
1350
+ inpromise = True
1351
+ if options:
1352
+ inpromise = options.get("inpromise", True)
1353
+
1354
+ locate_other = self.other.locate(remote.copy(), options)
1355
+ if inpromise:
1356
+ locate_promised = self.promise.locate(remote.copy(), options)
1357
+ return locate_promised + ";" + locate_other
1358
+ return locate_other
1359
+
1360
+ def get(self, remote, local, options=None):
1361
+ """Go through internal opened stores for the first available resource."""
1362
+ options = self._options_fixup(options)
1363
+ logger.debug("Promise get %s", remote)
1364
+ self.delayed = False
1365
+ logger.info("Try promise from store %s", self.promise)
1366
+ try:
1367
+ rc = self.promise.get(remote.copy(), local, options)
1368
+ except OSError as e:
1369
+ # If something goes wrong, assume that the promise file had been
1370
+ # deleted during the execution of self.promise.check (which can cause
1371
+ # IOError or OSError to be raised).
1372
+ logger.info(
1373
+ "An error occurred while fetching the promise file: %s", str(e)
1374
+ )
1375
+ logger.info("Assuming this is a negative result...")
1376
+ rc = False
1377
+ if rc:
1378
+ self.delayed = True
1379
+ else:
1380
+ logger.info("Try promise from store %s", self.other)
1381
+ rc = self.other.get(remote.copy(), local, options)
1382
+ if not rc and options.get("pretend", False):
1383
+ logger.warning("Pretending to get a promise for <%s>", local)
1384
+ pr_info = self.mkpromise_info(remote, options)
1385
+ pr_file = self.mkpromise_file(pr_info, local)
1386
+ self.system.move(pr_file, local)
1387
+ rc = self.delayed = True
1388
+ return rc
1389
+
1390
+ def earlyget(self, remote, local, options=None):
1391
+ """Possible early-get on the target store."""
1392
+ options = self._options_fixup(options)
1393
+ logger.debug("Promise early-get %s", remote)
1394
+ result_id = None
1395
+ try:
1396
+ rc = self.promise.has_fast_check and self.promise.check(
1397
+ remote.copy(), options
1398
+ )
1399
+ except OSError as e:
1400
+ logger.debug(
1401
+ "An error occurred while checking for the promise file: %s",
1402
+ str(e),
1403
+ )
1404
+ logger.debug("Assuming this is a negative result...")
1405
+ rc = False
1406
+ if not rc:
1407
+ result_id = self.other.earlyget(remote.copy(), local, options)
1408
+ return result_id
1409
+
1410
+ def finaliseget(self, result_id, remote, local, options=None):
1411
+ options = self._options_fixup(options)
1412
+ logger.debug("Promise finalise-get %s", remote)
1413
+ self.delayed = False
1414
+ logger.info("Try promise from store %s", self.promise)
1415
+ try:
1416
+ rc = self.promise.get(remote.copy(), local, options)
1417
+ except OSError as e:
1418
+ logger.debug(
1419
+ "An error occurred while fetching the promise file: %s", str(e)
1420
+ )
1421
+ logger.debug("Assuming this is a negative result...")
1422
+ rc = False
1423
+ if rc:
1424
+ self.delayed = True
1425
+ else:
1426
+ logger.info("Try promise from store %s", self.other)
1427
+ rc = self.other.finaliseget(
1428
+ result_id, remote.copy(), local, options
1429
+ )
1430
+ return rc
1431
+
1432
+ @staticmethod
1433
+ def _clean_pr_json(prjson):
1434
+ del prjson["stamp"]
1435
+ if "options" in prjson["rhandler"]:
1436
+ prjson["rhandler"]["options"].pop("storetrack", False)
1437
+ return prjson
1438
+
1439
+ def put(self, local, remote, options=None):
1440
+ """Put a promise or the actual resource if available."""
1441
+ options = self._options_fixup(options)
1442
+ logger.debug("Multistore put from %s to %s", local, remote)
1443
+ if options.get("force", False) or not self.system.path.exists(local):
1444
+ options = options.copy()
1445
+ if not self.other.use_cache():
1446
+ logger.critical(
1447
+ "Could not promise resource without other cache <%s>",
1448
+ self.other,
1449
+ )
1450
+ raise ValueError(
1451
+ "Could not promise: other store does not use cache"
1452
+ )
1453
+ pr_info = self.mkpromise_info(remote, options)
1454
+ pr_file = self.mkpromise_file(pr_info, local)
1455
+ # Check if a previous promise with the same description exists
1456
+ preexisting = self.promise.check(remote.copy(), options)
1457
+ if preexisting:
1458
+ pr_old_file = self.promise.locate(remote.copy())
1459
+ prcheck = self._clean_pr_json(
1460
+ self.system.json_load(pr_old_file)
1461
+ )
1462
+ prnew = self._clean_pr_json(self.system.json_load(pr_file))
1463
+ preexisting = prcheck == prnew
1464
+ if preexisting:
1465
+ logger.info(
1466
+ "The promise file <%s> preexisted and is compatible",
1467
+ pr_old_file,
1468
+ )
1469
+ rc = True
1470
+ else:
1471
+ logger.warning(
1472
+ "The promise file <%s> already exists but doesn't match",
1473
+ pr_old_file,
1474
+ )
1475
+
1476
+ # Put the new promise file in the PromiseCache
1477
+ options["obs_overridelocal"] = local # Pretty nasty :-(
1478
+ if not preexisting:
1479
+ logger.warning(
1480
+ "Log a promise instead of missing resource <%s>", local
1481
+ )
1482
+ rc = self.promise.put(pr_file, remote.copy(), options)
1483
+ if rc:
1484
+ del options["obs_overridelocal"]
1485
+ self.other.delete(remote.copy(), options)
1486
+ else:
1487
+ options["dryrun"] = True # Just update the tracker
1488
+ rc = self.promise.put(pr_file, remote.copy(), options)
1489
+ self.system.remove(pr_file)
1490
+
1491
+ else:
1492
+ logger.info("Actual promise does exists <%s>", local)
1493
+ rc = self.other.put(local, remote.copy(), options)
1494
+ if rc:
1495
+ self.promise.delete(remote.copy(), options)
1496
+ return rc
1497
+
1498
+ def delete(self, remote, options=None):
1499
+ """Go through internal opened stores and delete the resource."""
1500
+ options = self._options_fixup(options)
1501
+ logger.debug("Promise delete from %s", remote)
1502
+ return self.promise.delete(
1503
+ remote.copy(), options
1504
+ ) and self.other.delete(remote.copy(), options)
1505
+
1506
+
1507
+ # Activate the footprint's fasttrack on the stores collector
1508
+ fcollect = footprints.collectors.get(tag="store")
1509
+ fcollect.fasttrack = ("netloc", "scheme")
1510
+ del fcollect