vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
vortex/data/stores.py ADDED
@@ -0,0 +1,1390 @@
1
+ # pylint: disable=unused-argument
2
+
3
+ """
4
+ This module handles store objects in charge of physically accessing resources.
5
+ Store objects use the :mod:`footprints` mechanism.
6
+ """
7
+
8
+ import copy
9
+ import ftplib
10
+ import io
11
+ import os
12
+ import re
13
+
14
+ from bronx.fancies import loggers
15
+ import footprints
16
+
17
+ from vortex import sessions
18
+ from vortex import config
19
+ from vortex.data.abstractstores import (
20
+ Store,
21
+ ArchiveStore,
22
+ CacheStore,
23
+ )
24
+ from vortex.data.abstractstores import MultiStore, PromiseStore
25
+ from vortex.data.abstractstores import ARCHIVE_GET_INTENT_DEFAULT
26
+ from vortex.layout import dataflow
27
+ from vortex.syntax.stdattrs import hashalgo_avail_list
28
+ from vortex.syntax.stdattrs import DelayedEnvValue
29
+ from vortex.tools.systems import ExecutionError
30
+
31
+ #: Export base class
32
+ __all__ = []
33
+
34
+ logger = loggers.getLogger(__name__)
35
+
36
+
37
+ def get_cache_location():
38
+ try:
39
+ cacheloc = config.from_config(
40
+ section="data-tree",
41
+ key="rootdir",
42
+ )
43
+ except config.ConfigurationError:
44
+ cacheloc = os.path.join(os.environ["HOME"], ".vortex.d")
45
+ return cacheloc
46
+
47
+
48
+ class MagicPlace(Store):
49
+ """Somewhere, over the rainbow!"""
50
+
51
+ _footprint = dict(
52
+ info="Evanescent physical store",
53
+ attr=dict(
54
+ scheme=dict(
55
+ values=["magic"],
56
+ ),
57
+ ),
58
+ priority=dict(
59
+ level=footprints.priorities.top.DEFAULT # @UndefinedVariable
60
+ ),
61
+ )
62
+
63
+ @property
64
+ def realkind(self):
65
+ return "magicstore"
66
+
67
+ def has_fast_check(self):
68
+ """A void check is very fast !"""
69
+ return True
70
+
71
+ def magiccheck(self, remote, options):
72
+ """Void - Always False."""
73
+ return False
74
+
75
+ def magiclocate(self, remote, options):
76
+ """Void - Empty string returned."""
77
+ return ""
78
+
79
+ def magicget(self, remote, local, options):
80
+ """Void - Always True."""
81
+ return True
82
+
83
+ def magicput(self, local, remote, options):
84
+ """Void - Always True."""
85
+ return True
86
+
87
+ def magicdelete(self, remote, options):
88
+ """Void - Always False."""
89
+ return False
90
+
91
+
92
+ class FunctionStoreCallbackError(Exception):
93
+ pass
94
+
95
+
96
+ class FunctionStore(Store):
97
+ """Calls a function that returns a File like object (get only).
98
+
99
+ This store is only able to perform the get action: it imports and calls
100
+ the function specified in the URI path. This function should return a
101
+ file like object that will be written in the local container.
102
+
103
+ The function is given an option dictionary that contains all of the
104
+ options provided to the store's get function, plus any additional
105
+ information specified in the 'query' part of the URI.
106
+
107
+ :Example:
108
+
109
+ Lets consider the following URI:
110
+
111
+ ``function:///sandbox.util.storefunctions.echofunction?msg=toto&msg=titi``
112
+
113
+ It will be seen as follows:
114
+
115
+ * scheme: ``'function'``
116
+ * netloc: ``''``
117
+ * path: ``'/sandbox.utils.storefunctions.echofunction'``
118
+ * query: ``dict(msg=['toto', 'titi'])``
119
+
120
+ As a result, the :func:`sandbox.utils.storefunctions.echofunction` will be
121
+ called with an option dictionary that contains ['toto', 'titi'] for the
122
+ 'msg' key (plus any other options passed to the store's get method).
123
+ """
124
+
125
+ _footprint = dict(
126
+ info="Dummy store that calls a function",
127
+ attr=dict(
128
+ scheme=dict(
129
+ values=["function"],
130
+ ),
131
+ netloc=dict(
132
+ values=[""],
133
+ ),
134
+ ),
135
+ priority=dict(
136
+ level=footprints.priorities.top.DEFAULT # @UndefinedVariable
137
+ ),
138
+ )
139
+
140
+ @property
141
+ def realkind(self):
142
+ return "functionstore"
143
+
144
+ def has_fast_check(self):
145
+ """A void check is very fast !"""
146
+ return True
147
+
148
+ def functioncheck(self, remote, options):
149
+ """Void - Always False."""
150
+ return False
151
+
152
+ def functionlocate(self, remote, options):
153
+ """The name of the function that will be called."""
154
+ cleanname = remote["path"][1:]
155
+ if cleanname.endswith("/"):
156
+ cleanname = cleanname[:-1]
157
+ return cleanname
158
+
159
+ def functionget(self, remote, local, options):
160
+ """Calls the appropriate function and writes the result."""
161
+ # Find the appropriate function
162
+ cbfunc = self.system.import_function(
163
+ self.functionlocate(remote, options)
164
+ )
165
+ # ... and call it
166
+ opts = dict()
167
+ opts.update(options)
168
+ opts.update(remote["query"])
169
+ try:
170
+ fres = cbfunc(opts)
171
+ except FunctionStoreCallbackError as e:
172
+ logger.error("An exception was raised in the Callback function")
173
+ logger.error("Here is the exception: %s", str(e))
174
+ fres = None
175
+ if fres is not None:
176
+ if "intent" in options and options["intent"] == dataflow.intent.IN:
177
+ logger.info("Ignore intent <in> for function input.")
178
+ # Handle StringIO objects, by changing them to ByteIOs...
179
+ if isinstance(fres, io.StringIO):
180
+ s_fres = fres
181
+ s_fres.seek(0)
182
+ fres = io.BytesIO()
183
+ for l in s_fres:
184
+ fres.write(l.encode(encoding="utf-8"))
185
+ fres.seek(0)
186
+ # NB: fres should be a file like object (BytesIO will do the trick)
187
+ return self.system.cp(fres, local)
188
+ else:
189
+ return False
190
+
191
+ def functionput(self, local, remote, options):
192
+ """This should not happen - Always False."""
193
+ logger.error("The function store is not able to perform PUTs.")
194
+ return False
195
+
196
+ def functiondelete(self, remote, options):
197
+ """This should not happen - Always False."""
198
+ logger.error("The function store is not able to perform Deletes.")
199
+ return False
200
+
201
+
202
+ class Finder(Store):
203
+ """The most usual store: your current filesystem!"""
204
+
205
+ _footprint = dict(
206
+ info="Miscellaneous file access",
207
+ attr=dict(
208
+ scheme=dict(
209
+ values=["file", "ftp", "symlink", "rcp", "scp"],
210
+ ),
211
+ netloc=dict(
212
+ outcast=["oper.inline.fr"],
213
+ ),
214
+ storehash=dict(
215
+ values=hashalgo_avail_list,
216
+ ),
217
+ ),
218
+ priority=dict(
219
+ level=footprints.priorities.top.DEFAULT # @UndefinedVariable
220
+ ),
221
+ )
222
+
223
+ def __init__(self, *args, **kw):
224
+ logger.debug("Finder store init %s", self.__class__)
225
+ super().__init__(*args, **kw)
226
+
227
+ @property
228
+ def realkind(self):
229
+ return "finder"
230
+
231
+ def hostname(self):
232
+ """Returns the current :attr:`netloc`."""
233
+ return self.netloc
234
+
235
+ def fullpath(self, remote):
236
+ """Return actual path unless explicitly defined as relative path."""
237
+ if remote["query"].get("relative", False):
238
+ return remote["path"].lstrip("/")
239
+ else:
240
+ return remote["path"]
241
+
242
+ def _localtarfix(self, local):
243
+ if (
244
+ isinstance(local, str)
245
+ and self.system.path.isfile(local)
246
+ and self.system.is_tarfile(local)
247
+ ):
248
+ destdir = self.system.path.dirname(
249
+ self.system.path.realpath(local)
250
+ )
251
+ try:
252
+ self.system.smartuntar(local, destdir)
253
+ except ExecutionError:
254
+ if not self.system.is_tarname(local):
255
+ logger.warning(
256
+ "An automatic untar was attempted but it failed. "
257
+ + "Maybe the system's is_tarfile got it wrong ?"
258
+ )
259
+ else:
260
+ raise
261
+
262
+ def filecheck(self, remote, options):
263
+ """Returns a stat-like object if the ``remote`` exists on the ``system`` provided."""
264
+ try:
265
+ st = self.system.stat(self.fullpath(remote))
266
+ except OSError:
267
+ st = None
268
+ return st
269
+
270
+ def filelocate(self, remote, options):
271
+ """Returns the real path."""
272
+ return self.fullpath(remote)
273
+
274
+ def fileget(self, remote, local, options):
275
+ """Delegates to ``system`` the copy of ``remote`` to ``local``."""
276
+ rpath = self.fullpath(remote)
277
+ logger.info("fileget on %s (to: %s)", rpath, local)
278
+ if "intent" in options and options["intent"] == dataflow.intent.IN:
279
+ logger.info("Ignore intent <in> for remote input %s", rpath)
280
+ rc = self.system.cp(
281
+ rpath, local, fmt=options.get("fmt"), intent=dataflow.intent.INOUT
282
+ )
283
+ rc = rc and self._hash_get_check(self.fileget, remote, local, options)
284
+ if rc:
285
+ self._localtarfix(local)
286
+ return rc
287
+
288
+ def fileput(self, local, remote, options):
289
+ """Delegates to ``system`` the copy of ``local`` to ``remote``."""
290
+ rpath = self.fullpath(remote)
291
+ logger.info("fileput to %s (from: %s)", rpath, local)
292
+ rc = self.system.cp(local, rpath, fmt=options.get("fmt"))
293
+ return rc and self._hash_put(self.fileput, local, remote, options)
294
+
295
+ def filedelete(self, remote, options):
296
+ """Delegates to ``system`` the removing of ``remote``."""
297
+ rc = None
298
+ if self.filecheck(remote, options):
299
+ rpath = self.fullpath(remote)
300
+ logger.info("filedelete on %s", rpath)
301
+ rc = self.system.remove(rpath, fmt=options.get("fmt"))
302
+ else:
303
+ logger.error(
304
+ "Try to remove a non-existing resource <%s>",
305
+ self.fullpath(remote),
306
+ )
307
+ return rc
308
+
309
+ symlinkcheck = filecheck
310
+ symlinklocate = filelocate
311
+
312
+ def symlinkget(self, remote, local, options):
313
+ rpath = self.fullpath(remote)
314
+ if "intent" in options and options["intent"] == dataflow.intent.INOUT:
315
+ logger.error(
316
+ "It is unsafe to have a symlink with intent=inout: %s", rpath
317
+ )
318
+ return False
319
+ rc = self.system.remove(local)
320
+ self.system.symlink(rpath, local)
321
+ return rc and self.system.path.exists(local)
322
+
323
+ def symlinkput(self, local, remote, options):
324
+ logger.error(
325
+ "The Finder store with scheme:symlink is not able to perform Puts."
326
+ )
327
+ return False
328
+
329
+ def symlinkdelete(self, remote, options):
330
+ logger.error(
331
+ "The Finder store with scheme:symlink is not able to perform Deletes."
332
+ )
333
+ return False
334
+
335
+ def _ftpinfos(self, remote, **kwargs):
336
+ args = kwargs.copy()
337
+ args["hostname"] = self.hostname()
338
+ args["logname"] = remote["username"]
339
+ port = self.hostname().netport
340
+ if port is not None:
341
+ args["port"] = port
342
+ return args
343
+
344
+ def ftpcheck(self, remote, options):
345
+ """Delegates to ``system.ftp`` a distant check."""
346
+ rc = None
347
+ ftp = self.system.ftp(**self._ftpinfos(remote))
348
+ if ftp:
349
+ try:
350
+ rc = ftp.size(self.fullpath(remote))
351
+ except (ValueError, TypeError):
352
+ pass
353
+ except ftplib.all_errors:
354
+ pass
355
+ finally:
356
+ ftp.close()
357
+ return rc
358
+
359
+ def ftplocate(self, remote, options):
360
+ """Delegates to ``system`` qualified name creation."""
361
+ ftp = self.system.ftp(**self._ftpinfos(remote, delayed=True))
362
+ if ftp:
363
+ rloc = ftp.netpath(self.fullpath(remote))
364
+ ftp.close()
365
+ return rloc
366
+ else:
367
+ return None
368
+
369
+ def ftpget(self, remote, local, options):
370
+ """Delegates to ``system`` the file transfer of ``remote`` to ``local``."""
371
+ rpath = self.fullpath(remote)
372
+ logger.info(
373
+ "ftpget on ftp://%s/%s (to: %s)", self.hostname(), rpath, local
374
+ )
375
+ rc = self.system.smartftget(
376
+ rpath,
377
+ local,
378
+ fmt=options.get("fmt"),
379
+ # ftp control
380
+ **self._ftpinfos(remote),
381
+ )
382
+ rc = rc and self._hash_get_check(self.ftpget, remote, local, options)
383
+ if rc:
384
+ self._localtarfix(local)
385
+ return rc
386
+
387
+ def ftpput(self, local, remote, options):
388
+ """Delegates to ``system`` the file transfer of ``local`` to ``remote``."""
389
+ rpath = self.fullpath(remote)
390
+ put_opts = dict()
391
+ put_opts["fmt"] = options.get("fmt")
392
+ put_opts["sync"] = options.get("enforcesync", False)
393
+ logger.info(
394
+ "ftpput to ftp://%s/%s (from: %s)", self.hostname(), rpath, local
395
+ )
396
+ rc = self.system.smartftput(
397
+ local,
398
+ rpath,
399
+ # ftp control
400
+ **self._ftpinfos(remote, **put_opts),
401
+ )
402
+ return rc and self._hash_put(self.ftpput, local, remote, options)
403
+
404
+ def ftpdelete(self, remote, options):
405
+ """Delegates to ``system`` a distant remove."""
406
+ rc = None
407
+ actualpath = self.fullpath(remote)
408
+ if self.ftpcheck(remote, options):
409
+ logger.info(
410
+ "ftpdelete on ftp://%s/%s", self.hostname(), actualpath
411
+ )
412
+ ftp = self.system.ftp(**self._ftpinfos(remote))
413
+ if ftp:
414
+ try:
415
+ rc = ftp.delete(actualpath)
416
+ finally:
417
+ ftp.close()
418
+ else:
419
+ logger.error(
420
+ "Try to remove a non-existing resource <%s>", actualpath
421
+ )
422
+ return rc
423
+
424
+
425
+ class _VortexStackedStorageMixin:
426
+ """Mixin class that adds utility functions to work with stacked data."""
427
+
428
+ _STACKED_RE = re.compile("stacked-")
429
+
430
+ @property
431
+ def stackedstore(self):
432
+ """Tell if the present store is looking into a stack of resources."""
433
+ return self._STACKED_RE.search(self.netloc)
434
+
435
+ def _stacked_remainder(self, remote, stackpath):
436
+ path_remainder = remote["path"].strip("/").split("/")
437
+ for a_spath in stackpath.split("/"):
438
+ if path_remainder and path_remainder[0] == a_spath:
439
+ del path_remainder[0]
440
+ else:
441
+ break
442
+ return "/".join(path_remainder)
443
+
444
+ def _stacked_xremote(self, remote):
445
+ """The path to **remote** with its stack."""
446
+ if self.stackedstore:
447
+ remote = remote.copy()
448
+ remote["query"] = remote["query"].copy()
449
+ stackpath = remote["query"].pop("stackpath", (None,))[0]
450
+ stackfmt = remote["query"].pop("stackfmt", (None,))[0]
451
+ if stackpath is None or stackfmt is None:
452
+ raise ValueError(
453
+ '"stackpath" and "stackfmt" are not available in the query.'
454
+ )
455
+ else:
456
+ remote["path"] = (
457
+ stackpath
458
+ + "/"
459
+ + self._stacked_remainder(remote, stackpath)
460
+ )
461
+ return remote
462
+
463
+ def _stacked_xegglocate(self, remote):
464
+ """Return various informations about the stack associated with **remote**.
465
+
466
+ It returns a 3 elements tuple:
467
+
468
+ * The remote-like dictionary to the stack resource
469
+ * The format of the stack resource
470
+ * The path to **remote** within the stacked resource
471
+
472
+ """
473
+ remote = remote.copy()
474
+ remote["query"] = remote["query"].copy()
475
+ stackpath = remote["query"].pop("stackpath", (None,))[0].strip("/")
476
+ stackfmt = remote["query"].pop("stackfmt", (None,))[0]
477
+ if stackpath is None or stackfmt is None:
478
+ raise ValueError(
479
+ '"stackpath" and "stackfmt" are not available in the query.'
480
+ )
481
+ else:
482
+ resource_remainder = self._stacked_remainder(remote, stackpath)
483
+ remote["path"] = "/" + stackpath
484
+ return remote, stackfmt, resource_remainder
485
+
486
+
487
+ _vortex_readonly_store = footprints.Footprint(
488
+ info="Abstract store' readonly=True attribute",
489
+ attr=dict(
490
+ readonly=dict(
491
+ values=[
492
+ True,
493
+ ],
494
+ optional=True,
495
+ default=True,
496
+ )
497
+ ),
498
+ )
499
+
500
+
501
+ class _VortexBaseArchiveStore(ArchiveStore, _VortexStackedStorageMixin):
502
+ """Some kind of archive for VORTEX experiments."""
503
+
504
+ _abstract = True
505
+ _footprint = dict(
506
+ info="VORTEX archive access",
507
+ attr=dict(
508
+ scheme=dict(
509
+ values=["vortex"],
510
+ ),
511
+ netloc=dict(),
512
+ storehead=dict(
513
+ optional=True,
514
+ default="vortex",
515
+ outcast=["xp"],
516
+ ),
517
+ ),
518
+ )
519
+
520
+ _STACKS_AUTOREFILL_CRIT = "stacked-archive-smart"
521
+
522
+ def __init__(self, *args, **kw):
523
+ logger.debug("Vortex archive store init %s", self.__class__)
524
+ super().__init__(*args, **kw)
525
+
526
+ def remap_read(self, remote, options):
527
+ """Remap actual remote path to distant store path for intrusive actions."""
528
+ raise NotImplementedError
529
+
530
+ def remap_list(self, remote, options):
531
+ """Reformulates the remote path to compatible vortex namespace."""
532
+ if len(remote["path"].split("/")) >= 4:
533
+ return self.remap_read(remote, options)
534
+ else:
535
+ logger.critical(
536
+ "The << %s >> path is not listable.", remote["path"]
537
+ )
538
+ return None
539
+
540
+ remap_write = remap_read
541
+
542
+ @property
543
+ def stacks_autorefill(self):
544
+ """Where to refill a stack retrieved from the archive."""
545
+ if self._STACKS_AUTOREFILL_CRIT in self.netloc:
546
+ return self.netloc.replace(self._STACKS_AUTOREFILL_CRIT, "cache")
547
+ else:
548
+ return None
549
+
550
+ def _vortex_stacked_egg_retrieve(self, remote, result_id=None):
551
+ """Retrieve the stack associated with **remote**."""
552
+ remote, remotefmt, remainder = self._stacked_xegglocate(remote)
553
+ rundir = sessions.current().context.rundir
554
+ if not rundir:
555
+ rundir = self.system.pwd()
556
+ rundir = self.system.path.join(rundir, "vortex_stacks_xeggs")
557
+ target = self.system.path.join(
558
+ rundir, *remote["path"].strip("/").split("/")
559
+ )
560
+ targetopts = dict(fmt=remotefmt, intent=dataflow.intent.IN)
561
+ if self.system.path.exists(target):
562
+ logger.info(
563
+ "Stack previously retrieved (in %s). Using it.", target
564
+ )
565
+ rc = True
566
+ else:
567
+ if result_id:
568
+ rc = self._vortexfinaliseget(
569
+ result_id, remote, target, targetopts
570
+ )
571
+ else:
572
+ rc = self._vortexget(remote, target, targetopts)
573
+ if rc and self.stacks_autorefill:
574
+ rstore = footprints.proxy.store(
575
+ scheme=self.scheme, netloc=self.stacks_autorefill
576
+ )
577
+ logger.info("Refilling the stack egg to [%s]", rstore)
578
+ try:
579
+ rstore.put(target, remote.copy(), targetopts)
580
+ except (ExecutionError, OSError) as e:
581
+ logger.error(
582
+ "An ExecutionError happened during the refill: %s", str(e)
583
+ )
584
+ logger.error("This error is ignored... but that's ugly !")
585
+ return rc, target, remainder
586
+
587
+ def vortexcheck(self, remote, options):
588
+ """Vortex' archive check sequence."""
589
+ if self.stackedstore:
590
+ s_remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
591
+ options = options.copy()
592
+ options["fmt"] = s_remotefmt
593
+ rc = self._vortexcheck(s_remote, options)
594
+ if rc:
595
+ rc, target, remainder = self._vortex_stacked_egg_retrieve(
596
+ remote
597
+ )
598
+ rc = rc and self.system.path.exists(
599
+ self.system.path.join(target, remainder)
600
+ )
601
+ return rc
602
+ else:
603
+ return self._vortexcheck(remote, options)
604
+
605
+ def _vortexcheck(self, remote, options):
606
+ """Remap and ftpcheck sequence."""
607
+ remote = self.remap_read(remote, options)
608
+ return self.inarchivecheck(remote, options)
609
+
610
+ def vortexlocate(self, remote, options):
611
+ """Vortex' archive locate sequence."""
612
+ if self.stackedstore:
613
+ remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
614
+ options = options.copy()
615
+ options["fmt"] = s_remotefmt
616
+ return self._vortexlocate(remote, options)
617
+
618
+ def _vortexlocate(self, remote, options):
619
+ """Remap and ftplocate sequence."""
620
+ remote = self.remap_read(remote, options)
621
+ return self.inarchivelocate(remote, options)
622
+
623
+ def vortexlist(self, remote, options):
624
+ """Vortex' archive list sequence."""
625
+ if self.stackedstore:
626
+ return None
627
+ else:
628
+ return self._vortexlist(remote, options)
629
+
630
+ def _vortexlist(self, remote, options):
631
+ """Remap and ftplist sequence."""
632
+ remote = self.remap_list(remote, options)
633
+ if remote:
634
+ return self.inarchivelist(remote, options)
635
+ else:
636
+ return None
637
+
638
+ def vortexprestageinfo(self, remote, options):
639
+ """Vortex' archive prestageinfo sequence."""
640
+ if self.stackedstore:
641
+ remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
642
+ options = options.copy()
643
+ options["fmt"] = s_remotefmt
644
+ return self._vortexprestageinfo(remote, options)
645
+
646
+ def _vortexprestageinfo(self, remote, options):
647
+ """Remap and ftpprestageinfo sequence."""
648
+ remote = self.remap_read(remote, options)
649
+ return self.inarchiveprestageinfo(remote, options)
650
+
651
+ def vortexget(self, remote, local, options):
652
+ """Vortex' archive get sequence."""
653
+ if self.stackedstore:
654
+ rc, target, remainder = self._vortex_stacked_egg_retrieve(remote)
655
+ rc = rc and self.system.cp(
656
+ self.system.path.join(target, remainder),
657
+ local,
658
+ fmt=options.get("fmt"),
659
+ intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
660
+ )
661
+ return rc
662
+ else:
663
+ return self._vortexget(remote, local, options)
664
+
665
+ def _vortexget(self, remote, local, options):
666
+ """Remap and ftpget sequence."""
667
+ remote = self.remap_read(remote, options)
668
+ return self.inarchiveget(remote, local, options)
669
+
670
+ def vortexearlyget(self, remote, local, options):
671
+ """Vortex' archive earlyget sequence."""
672
+ if self.stackedstore:
673
+ s_remote, s_remotefmt, _ = self._stacked_xegglocate(remote)
674
+ targetopts = dict(fmt=s_remotefmt, intent=dataflow.intent.IN)
675
+ return self._vortexearlyget(s_remote, "somelocalfile", targetopts)
676
+ else:
677
+ return self._vortexearlyget(remote, local, options)
678
+
679
+ def _vortexearlyget(self, remote, local, options):
680
+ """Remap and ftpget sequence."""
681
+ remote = self.remap_read(remote, options)
682
+ return self.inarchiveearlyget(remote, local, options)
683
+
684
+ def vortexfinaliseget(self, result_id, remote, local, options):
685
+ """Vortex' archive finaliseget sequence."""
686
+ if self.stackedstore:
687
+ rc, target, remainder = self._vortex_stacked_egg_retrieve(
688
+ remote, result_id=result_id
689
+ )
690
+ rc = rc and self.system.cp(
691
+ self.system.path.join(target, remainder),
692
+ local,
693
+ fmt=options.get("fmt"),
694
+ intent=options.get("intent", ARCHIVE_GET_INTENT_DEFAULT),
695
+ )
696
+ return rc
697
+ else:
698
+ return self._vortexfinaliseget(result_id, remote, local, options)
699
+
700
+ def _vortexfinaliseget(self, result_id, remote, local, options):
701
+ """Remap and ftpget sequence."""
702
+ remote = self.remap_read(remote, options)
703
+ return self.inarchivefinaliseget(result_id, remote, local, options)
704
+
705
+ def vortexput(self, local, remote, options):
706
+ """Remap root dir and ftpput sequence."""
707
+ if self.stackedstore:
708
+ raise RuntimeError("stacked archive stores are never writable.")
709
+ if not self.storetrue:
710
+ logger.info("put deactivated for %s", str(local))
711
+ return True
712
+ remote = self.remap_write(remote, options)
713
+ return self.inarchiveput(local, remote, options)
714
+
715
+ def vortexdelete(self, remote, options):
716
+ """Remap root dir and ftpdelete sequence."""
717
+ if self.stackedstore:
718
+ raise RuntimeError("stacked archive stores are never writable.")
719
+ remote = self.remap_write(remote, options)
720
+ return self.inarchivedelete(remote, options)
721
+
722
+
723
+ class VortexStdBaseArchiveStore(_VortexBaseArchiveStore):
724
+ """Archive for casual VORTEX experiments: Support for legacy/Olive XPIDs.
725
+
726
+ This 'archive-legacy' store looks into the resource 'main' location not
727
+ into a potential stack.
728
+ """
729
+
730
+ _footprint = dict(
731
+ info="VORTEX archive access for casual experiments",
732
+ attr=dict(
733
+ netloc=dict(
734
+ values=["vortex.archive-legacy.fr"],
735
+ ),
736
+ ),
737
+ )
738
+
739
+ def remap_read(self, remote, options):
740
+ """Reformulates the remote path to compatible vortex namespace."""
741
+ remote = copy.copy(remote)
742
+ try:
743
+ remote["root"] = config.from_config(
744
+ section="storage",
745
+ key="rootdir",
746
+ )
747
+ except config.ConfigurationError as e:
748
+ msg = (
749
+ "Trying to write to archive but location is not configured.\n"
750
+ 'Make sure key "rootdir" is defined in storage section of '
751
+ "the configuration.\n"
752
+ "See https://vortex-nwp.readthedocs.io/en/latest/user-guide/configuration.html#storage"
753
+ )
754
+ logger.error(msg)
755
+ raise e
756
+ return remote
757
+
758
+
759
+ class VortexStdStackedArchiveStore(VortexStdBaseArchiveStore):
760
+ """Archive for casual VORTEX experiments: Support for legacy/Olive XPIDs.
761
+
762
+ This 'stacked-archive-legacy' or 'stacked-archive-smart' store looks into
763
+ the stack associated to the resource. The '-smart' variant, has the ability
764
+ to refill the whole stack into local cache (to be faster in the future).
765
+ """
766
+
767
+ _footprint = [
768
+ _vortex_readonly_store,
769
+ dict(
770
+ attr=dict(
771
+ netloc=dict(
772
+ values=[
773
+ "vortex.stacked-archive-legacy.fr",
774
+ "vortex.stacked-archive-smart.fr",
775
+ ],
776
+ ),
777
+ )
778
+ ),
779
+ ]
780
+
781
+
782
+ class VortexOpBaseArchiveStore(_VortexBaseArchiveStore):
783
+ """Archive for op VORTEX experiments.
784
+
785
+ This 'archive-legacy' store looks into the resource 'main' location not
786
+ into a potential stack.
787
+ """
788
+
789
+ _footprint = dict(
790
+ info="VORTEX archive access for op experiments",
791
+ attr=dict(
792
+ netloc=dict(
793
+ values=["vsop.archive-legacy.fr"],
794
+ ),
795
+ storetrue=dict(
796
+ default=DelayedEnvValue("op_archive", True),
797
+ ),
798
+ ),
799
+ )
800
+
801
+ def remap_read(self, remote, options):
802
+ """Reformulates the remote path to compatible vortex namespace."""
803
+ remote = copy.copy(remote)
804
+ try:
805
+ remote["root"] = config.from_config(
806
+ section="storage",
807
+ key="op_rootdir",
808
+ )
809
+ except config.ConfigurationError as e:
810
+ msg = (
811
+ "Trying to write to operational data archive but location"
812
+ 'is not configured.\nMake sure key "rootdir" is defined in '
813
+ "the storage section of the configuration.\n"
814
+ "See https://vortex-nwp.readthedocs.io/en/latest/user-guide/configuration.html#storage"
815
+ )
816
+ logger.error(msg)
817
+ raise e
818
+ xpath = remote["path"].split("/")
819
+ if len(xpath) >= 5 and re.match(r"^\d{8}T\d{2,4}", xpath[4]):
820
+ # If a date is detected
821
+ vxdate = list(xpath[4])
822
+ vxdate.insert(4, "/")
823
+ vxdate.insert(7, "/")
824
+ vxdate.insert(10, "/")
825
+ xpath[4] = "".join(vxdate)
826
+ remote["path"] = self.system.path.join(*xpath)
827
+ return remote
828
+
829
+ remap_write = remap_read
830
+
831
+
832
+ class VortexOpStackedArchiveStore(VortexOpBaseArchiveStore):
833
+ """Archive for op VORTEX experiments.
834
+
835
+ This 'stacked-archive-legacy' or 'stacked-archive-smart' store looks into
836
+ the stack associated to the resource. The '-smart' variant, has the ability
837
+ to refill the whole stack into local cache (to be faster in the future).
838
+ """
839
+
840
+ _footprint = [
841
+ _vortex_readonly_store,
842
+ dict(
843
+ attr=dict(
844
+ netloc=dict(
845
+ values=[
846
+ "vsop.stacked-archive-legacy.fr",
847
+ "vsop.stacked-archive-smart.fr",
848
+ ],
849
+ ),
850
+ )
851
+ ),
852
+ ]
853
+
854
+
855
+ class VortexArchiveStore(MultiStore):
856
+ """Archive store for any Vortex experiments.
857
+
858
+ Depending on the netloc, legacy/Olive XPIDs ('vortex'), free XPIDs
859
+ ('vortex-free') or operational experiments ('vsop') will be dealt with.
860
+
861
+ First, this multi store will look onto the resource 'main' location. In a
862
+ second phase, if sensible, il will also dig into the stack associated with
863
+ the resource.
864
+ """
865
+
866
+ _footprint = dict(
867
+ info="VORTEX archive access",
868
+ attr=dict(
869
+ scheme=dict(
870
+ values=["vortex"],
871
+ ),
872
+ netloc=dict(
873
+ values=[
874
+ "vortex.archive.fr",
875
+ "vortex-free.archive.fr",
876
+ "vsop.archive.fr",
877
+ ],
878
+ ),
879
+ refillstore=dict(
880
+ default=False,
881
+ ),
882
+ storehead=dict(
883
+ optional=True,
884
+ ),
885
+ storesync=dict(
886
+ alias=("archsync", "synchro"),
887
+ type=bool,
888
+ optional=True,
889
+ ),
890
+ ),
891
+ )
892
+
893
+ def filtered_readable_openedstores(self, remote):
894
+ """Only use the stacked store if sensible."""
895
+ ostores = [
896
+ self.openedstores[0],
897
+ ]
898
+ ostores.extend(
899
+ [
900
+ sto
901
+ for sto in self.openedstores[1:]
902
+ if not sto.stackedstore or "stackpath" in remote["query"]
903
+ ]
904
+ )
905
+ return ostores
906
+
907
+ def alternates_netloc(self):
908
+ """Return netlocs describing both base and stacked archives."""
909
+ netloc_m = re.match(
910
+ r"(?P<base>v.*)\.archive\.(?P<country>\w+)", self.netloc
911
+ )
912
+ return [
913
+ "{base:s}.archive-legacy.{country:s}".format(
914
+ **netloc_m.groupdict()
915
+ ),
916
+ "{base:s}.stacked-archive-legacy.{country:s}".format(
917
+ **netloc_m.groupdict()
918
+ ),
919
+ ]
920
+
921
+ def alternates_fpextras(self):
922
+ """Deal with some ArchiveStores' specific attributes."""
923
+ return dict(storehead=self.storehead, storesync=self.storesync)
924
+
925
+
926
+ class _VortexCacheBaseStore(CacheStore, _VortexStackedStorageMixin):
927
+ """Some kind of cache for VORTEX experiments: one still needs to choose the cache strategy."""
928
+
929
+ _abstract = True
930
+ _footprint = dict(
931
+ info="VORTEX cache access",
932
+ attr=dict(
933
+ scheme=dict(
934
+ values=["vortex"],
935
+ ),
936
+ headdir=dict(
937
+ default="",
938
+ outcast=[
939
+ "xp",
940
+ ],
941
+ ),
942
+ rtouch=dict(
943
+ default=True,
944
+ ),
945
+ rtouchskip=dict(
946
+ default=3,
947
+ ),
948
+ ),
949
+ )
950
+
951
+ def __init__(self, *args, **kw):
952
+ logger.debug("Vortex cache store init %s", self.__class__)
953
+ del self.cache
954
+ super().__init__(*args, **kw)
955
+
956
+ def vortexcheck(self, remote, options):
957
+ """Proxy to :meth:`incachecheck`."""
958
+ return self.incachecheck(self._stacked_xremote(remote), options)
959
+
960
+ def vortexlocate(self, remote, options):
961
+ """Proxy to :meth:`incachelocate`."""
962
+ return self.incachelocate(self._stacked_xremote(remote), options)
963
+
964
+ def vortexlist(self, remote, options):
965
+ """Proxy to :meth:`incachelocate`."""
966
+ return self.incachelist(remote, options)
967
+
968
+ def vortexprestageinfo(self, remote, options):
969
+ """Proxy to :meth:`incacheprestageinfo`."""
970
+ return self.incacheprestageinfo(self._stacked_xremote(remote), options)
971
+
972
+ def vortexget(self, remote, local, options):
973
+ """Proxy to :meth:`incacheget`."""
974
+ return self.incacheget(self._stacked_xremote(remote), local, options)
975
+
976
+ def vortexput(self, local, remote, options):
977
+ """Proxy to :meth:`incacheput`."""
978
+ return self.incacheput(local, self._stacked_xremote(remote), options)
979
+
980
+ def vortexdelete(self, remote, options):
981
+ """Proxy to :meth:`incachedelete`."""
982
+ return self.incachedelete(self._stacked_xremote(remote), options)
983
+
984
+
985
+ class VortexCacheMtStore(_VortexCacheBaseStore):
986
+ """Some kind of MTOOL cache for VORTEX experiments."""
987
+
988
+ _footprint = dict(
989
+ info="VORTEX MTOOL like Cache access",
990
+ attr=dict(
991
+ netloc=dict(
992
+ values=[
993
+ "{:s}.{:s}cache-mt.fr".format(v, s)
994
+ for v in ("vortex", "vortex-free", "vsop")
995
+ for s in ("", "stacked-")
996
+ ]
997
+ ),
998
+ ),
999
+ )
1000
+
1001
+ def __init__(self, *args, **kw):
1002
+ super().__init__(*args, **kw)
1003
+ self.location = get_cache_location()
1004
+
1005
+
1006
+ class VortexCacheOp2ResearchStore(_VortexCacheBaseStore):
1007
+ """The DSI/OP VORTEX cache where researchers can get the freshest data."""
1008
+
1009
+ _footprint = dict(
1010
+ info="VORTEX Mtool cache access",
1011
+ attr=dict(
1012
+ netloc=dict(
1013
+ values=[
1014
+ "vsop.{:s}cache-op2r.fr".format(s)
1015
+ for s in ("", "stacked-")
1016
+ ],
1017
+ ),
1018
+ readonly=dict(
1019
+ default=True,
1020
+ ),
1021
+ ),
1022
+ )
1023
+
1024
+ def __init__(self, *args, **kw):
1025
+ super().__init__(*args, **kw)
1026
+ try:
1027
+ cachepath = config.from_config(
1028
+ section="data-tree",
1029
+ key="op_rootdir",
1030
+ )
1031
+ except config.ConfigurationError as e:
1032
+ logger.error(
1033
+ "Cannot use special experiment cache without providing",
1034
+ "cache location",
1035
+ )
1036
+ raise e
1037
+
1038
+ self.location = os.path.join(cachepath, "vortex")
1039
+
1040
+
1041
+ class _AbstractVortexCacheMultiStore(MultiStore):
1042
+ """Any Cache based Vortex multi store."""
1043
+
1044
+ _abstract = True
1045
+ _footprint = dict(
1046
+ info="VORTEX cache access",
1047
+ attr=dict(
1048
+ scheme=dict(
1049
+ values=["vortex"],
1050
+ ),
1051
+ refillstore=dict(
1052
+ default=False,
1053
+ ),
1054
+ ),
1055
+ )
1056
+
1057
+ def filtered_readable_openedstores(self, remote):
1058
+ """Deals with stacked stores that are not always active."""
1059
+ ostores = [
1060
+ self.openedstores[0],
1061
+ ]
1062
+ # TODO is the call to cache.allow_reads still required without
1063
+ # marketplace stores?
1064
+ ostores.extend(
1065
+ [
1066
+ sto
1067
+ for sto in self.openedstores[1:]
1068
+ if (
1069
+ (not sto.stackedstore or "stackpath" in remote["query"])
1070
+ and sto.cache.allow_reads(remote["path"])
1071
+ )
1072
+ ]
1073
+ )
1074
+ return ostores
1075
+
1076
+ def filtered_writeable_openedstores(self, remote):
1077
+ """never writes into stack stores."""
1078
+ ostores = [
1079
+ self.openedstores[0],
1080
+ ]
1081
+ ostores.extend(
1082
+ [
1083
+ sto
1084
+ for sto in self.openedstores[1:]
1085
+ if not sto.stackedstore
1086
+ and sto.cache.allow_writes(remote["path"])
1087
+ ]
1088
+ )
1089
+ return ostores
1090
+
1091
+
1092
+ class VortexCacheStore(_AbstractVortexCacheMultiStore):
1093
+ """The go to store for data cached by VORTEX R&D experiments."""
1094
+
1095
+ _footprint = dict(
1096
+ attr=dict(
1097
+ netloc=dict(
1098
+ values=[
1099
+ "vortex.cache.fr",
1100
+ "vortex-free.cache.fr",
1101
+ ],
1102
+ ),
1103
+ )
1104
+ )
1105
+
1106
+ def alternates_netloc(self):
1107
+ """For Non-Op users, Op caches may be accessed in read-only mode."""
1108
+ return [
1109
+ f"{self.netloc.firstname}.cache-mt.fr",
1110
+ f"{self.netloc.firstname}.stacked-cache-mt.fr",
1111
+ ]
1112
+
1113
+
1114
+ class VortexVsopCacheStore(_AbstractVortexCacheMultiStore):
1115
+ """The go to store for data cached by VORTEX operational experiments.
1116
+
1117
+ It behaves differently depending on the profile of the user running the
1118
+ code 'see the **glovekind** attribute).
1119
+ """
1120
+
1121
+ _footprint = dict(
1122
+ info="VORTEX vsop magic cache access",
1123
+ attr=dict(
1124
+ netloc=dict(
1125
+ values=[
1126
+ "vsop.cache.fr",
1127
+ ],
1128
+ ),
1129
+ glovekind=dict(
1130
+ optional=True,
1131
+ default="[glove::realkind]",
1132
+ ),
1133
+ ),
1134
+ )
1135
+
1136
+ def alternates_netloc(self):
1137
+ """For Non-Op users, Op caches may be accessed in read-only mode."""
1138
+ todo = [
1139
+ "vsop.cache-mt.fr",
1140
+ "vsop.stacked-cache-mt.fr",
1141
+ ]
1142
+
1143
+ # Only set up op2r cache if the associated filepath
1144
+ # is configured
1145
+ if (self.glovekind != "opuser") and config.is_defined(
1146
+ section="data-tree",
1147
+ key="op_rootdir",
1148
+ ):
1149
+ todo += [
1150
+ "vsop.cache-op2r.fr",
1151
+ "vsop.stacked-cache-op2r.fr",
1152
+ ]
1153
+ return todo
1154
+
1155
+
1156
+ class _AbstractVortexStackMultiStore(MultiStore):
1157
+ """Any Cache based Vortex multi store."""
1158
+
1159
+ _abstract = True
1160
+ _footprint = dict(
1161
+ info="VORTEX stack access",
1162
+ attr=dict(
1163
+ scheme=dict(
1164
+ values=["vortex"],
1165
+ ),
1166
+ refillstore=dict(
1167
+ default=False,
1168
+ ),
1169
+ ),
1170
+ )
1171
+
1172
+ # TODO is this still needed without marketplace stores?
1173
+ def filtered_readable_openedstores(self, remote):
1174
+ """Deals with marketplace stores that are not always active."""
1175
+ ostores = [
1176
+ self.openedstores[0],
1177
+ ]
1178
+ ostores.extend(
1179
+ [
1180
+ sto
1181
+ for sto in self.openedstores[1:]
1182
+ if sto.cache.allow_reads(remote["path"])
1183
+ ]
1184
+ )
1185
+ return ostores
1186
+
1187
+ def filtered_writeable_openedstores(self, remote):
1188
+ """Deals with marketplace stores that are not always active."""
1189
+ ostores = [
1190
+ self.openedstores[0],
1191
+ ]
1192
+ ostores.extend(
1193
+ [
1194
+ sto
1195
+ for sto in self.openedstores[1:]
1196
+ if sto.cache.allow_writes(remote["path"])
1197
+ ]
1198
+ )
1199
+ return ostores
1200
+
1201
+
1202
+ class VortexStackStore(_AbstractVortexStackMultiStore):
1203
+ """Store intended to read and write data into VORTEX R&D stacks."""
1204
+
1205
+ _footprint = dict(
1206
+ info="VORTEX stack access",
1207
+ attr=dict(
1208
+ netloc=dict(
1209
+ values=["vortex.stack.fr", "vortex-free.stack.fr"],
1210
+ ),
1211
+ ),
1212
+ )
1213
+
1214
+ def alternates_netloc(self):
1215
+ """Go through the various stacked stores."""
1216
+ return [f"{self.netloc.firstname}.stacked-cache-mt.fr"]
1217
+
1218
+
1219
+ class VortexVsopStackStore(_AbstractVortexStackMultiStore):
1220
+ """Store intended to read and write data into VORTEX R&D stacks."""
1221
+
1222
+ _footprint = dict(
1223
+ info="VORTEX stack access",
1224
+ attr=dict(
1225
+ netloc=dict(
1226
+ values=["vsop.stack.fr"],
1227
+ ),
1228
+ glovekind=dict(
1229
+ optional=True,
1230
+ default="[glove::realkind]",
1231
+ ),
1232
+ ),
1233
+ )
1234
+
1235
+ def alternates_netloc(self):
1236
+ """For Non-Op users, Op caches may be accessed in read-only mode."""
1237
+ todo = [
1238
+ "vsop.stacked-cache-mt.fr",
1239
+ ]
1240
+ if self.glovekind != "opuser":
1241
+ todo.append("vsop.stacked-cache-op2r.fr")
1242
+ return todo
1243
+
1244
+
1245
+ class VortexStoreLegacy(MultiStore):
1246
+ """Combined cache and archive legacy VORTEX stores.
1247
+
1248
+ By '-legacy' we mean that stack resources are ignored.
1249
+ """
1250
+
1251
+ _footprint = dict(
1252
+ info="VORTEX multi access",
1253
+ attr=dict(
1254
+ scheme=dict(
1255
+ values=["vortex"],
1256
+ ),
1257
+ netloc=dict(
1258
+ values=[
1259
+ "vortex.multi-legacy.fr",
1260
+ "vortex-free.multi-legacy.fr",
1261
+ "vsop.multi-legacy.fr",
1262
+ ],
1263
+ ),
1264
+ refillstore=dict(
1265
+ default=True,
1266
+ ),
1267
+ ),
1268
+ )
1269
+
1270
+ def alternates_netloc(self):
1271
+ """Tuple of alternates domains names, e.g. ``cache`` and ``archive``."""
1272
+ return [
1273
+ self.netloc.firstname + d
1274
+ for d in (".cache.fr", ".archive-legacy.fr")
1275
+ ]
1276
+
1277
+
1278
+ class VortexStore(MultiStore):
1279
+ """Combined cache and archive VORTEX stores.
1280
+
1281
+ If sensible, stack will be explored and might be refilled into cache.
1282
+ """
1283
+
1284
+ _footprint = dict(
1285
+ info="VORTEX multi access",
1286
+ attr=dict(
1287
+ scheme=dict(
1288
+ values=["vortex"],
1289
+ ),
1290
+ netloc=dict(
1291
+ values=[
1292
+ "vortex.multi.fr",
1293
+ "vortex-free.multi.fr",
1294
+ "vsop.multi.fr",
1295
+ ],
1296
+ ),
1297
+ refillstore=dict(default=False),
1298
+ ),
1299
+ )
1300
+
1301
+ def filtered_readable_openedstores(self, remote):
1302
+ """Deals with stacked stores that are not always active."""
1303
+ ostores = [
1304
+ self.openedstores[0],
1305
+ ]
1306
+ ostores.extend(
1307
+ [
1308
+ sto
1309
+ for sto in self.openedstores[1:]
1310
+ if not sto.stackedstore or "stackpath" in remote["query"]
1311
+ ]
1312
+ )
1313
+ return ostores
1314
+
1315
+ def alternates_netloc(self):
1316
+ """Tuple of alternates domains names, e.g. ``cache`` and ``archive``."""
1317
+ return [
1318
+ self.netloc.firstname + d
1319
+ for d in (
1320
+ ".multi-legacy.fr",
1321
+ ".stacked-archive-smart.fr",
1322
+ )
1323
+ ]
1324
+
1325
+
1326
+ class PromiseCacheStore(VortexCacheMtStore):
1327
+ """Some kind of vortex cache for EXPECTED resources."""
1328
+
1329
+ _footprint = dict(
1330
+ info="EXPECTED cache access",
1331
+ attr=dict(
1332
+ netloc=dict(
1333
+ values=["promise.cache.fr"],
1334
+ ),
1335
+ headdir=dict(
1336
+ default="promise",
1337
+ outcast=["xp", "vortex"],
1338
+ ),
1339
+ ),
1340
+ )
1341
+
1342
+ @staticmethod
1343
+ def _add_default_options(options):
1344
+ options_upd = options.copy()
1345
+ options_upd["fmt"] = "ascii" # Promises are always JSON files
1346
+ options_upd["intent"] = "in" # Promises are always read-only
1347
+ return options_upd
1348
+
1349
+ def vortexget(self, remote, local, options):
1350
+ """Proxy to :meth:`incacheget`."""
1351
+ return super().vortexget(
1352
+ remote, local, self._add_default_options(options)
1353
+ )
1354
+
1355
+ def vortexput(self, local, remote, options):
1356
+ """Proxy to :meth:`incacheput`."""
1357
+ return super().vortexput(
1358
+ local, remote, self._add_default_options(options)
1359
+ )
1360
+
1361
+ def vortexdelete(self, remote, options):
1362
+ """Proxy to :meth:`incachedelete`."""
1363
+ return super().vortexdelete(remote, self._add_default_options(options))
1364
+
1365
+
1366
+ class VortexPromiseStore(PromiseStore):
1367
+ """Combine a Promise Store for expected resources and any VORTEX Store."""
1368
+
1369
+ _footprint = dict(
1370
+ info="VORTEX promise store",
1371
+ attr=dict(
1372
+ scheme=dict(
1373
+ values=["xvortex"],
1374
+ ),
1375
+ netloc=dict(
1376
+ outcast=[
1377
+ "vortex-demo.cache.fr",
1378
+ "vortex-demo.multi.fr",
1379
+ "vortex.testcache.fr",
1380
+ "vortex.testmulti.fr",
1381
+ ],
1382
+ ),
1383
+ ),
1384
+ )
1385
+
1386
+
1387
+ # Activate the footprint's fasttrack on the stores collector
1388
+ fcollect = footprints.collectors.get(tag="store")
1389
+ fcollect.fasttrack = ("netloc", "scheme")
1390
+ del fcollect