vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,996 @@
1
+ """
2
+ This package handles :class:`Storage` objects that could be in charge of
3
+ hosting data resources both locally ("Cache") or on a remote host "Archive").
4
+
5
+ * :class:`Storage` is the main abstract class that defines the user-interface for
6
+ every class of this module. :meth:`Storage.fullpath`, :meth:`Storage.check`,
7
+ :meth:`Storage.insert`, :meth:`Storage.retrieve` and :meth:`Storage.delete` are
8
+ frequently used from a user point of view.
9
+ * The :class:`Cache` abstract class is a specialisation of the :class:`Storage`
10
+ class that handles data resources locally (i.e. data hosted on the same machine
11
+ that are readily and timelessly accessible). In this module, various concrete
12
+ implementations are provided for this class in order to support various cache
13
+ flavors.
14
+ * The :class:`Archive` class (readily usable) is a specialisation of the
15
+ :class:`Storage` class dedicated to data resources stored remotely (e.g on a
16
+ mass archive system).
17
+
18
+ These classes purely focus on the technical aspects (e.g. how to transfer a given
19
+ filename, directory or file like object to its storage place). For :class:`Cache`
20
+ based storage it determines the location of the data on the filesystem, in a
21
+ database, ... For :class:`Archive` based storage it smoothly handles communication
22
+ protocol between the local host and the remote archive.
23
+
24
+ These classes are used by :class:`Store` objects to access data. Thus,
25
+ :class:`Store` objects do not need to worry anymore about the technical
26
+ aspects. Using the :mod:`footprints` package, for a given execution target, it
27
+ allows to customise the way data are accessed leaving the :class:`Store` objects
28
+ unchanged.
29
+ """
30
+
31
+ import contextlib
32
+ import ftplib
33
+ import re
34
+ import time
35
+ from datetime import datetime
36
+
37
+ import footprints
38
+ from bronx.fancies import loggers
39
+ from bronx.stdtypes.history import History
40
+ from bronx.syntax.decorators import nicedeco
41
+ from vortex import sessions
42
+ from vortex.tools.actions import actiond as ad
43
+ from vortex.tools.delayedactions import d_action_status
44
+
45
+ from vortex import config
46
+
47
+ #: No automatic export
48
+ __all__ = []
49
+
50
+ logger = loggers.getLogger(__name__)
51
+
52
+ # If the source file size exceed this threshold, a hard link will be
53
+ # used (as much as possible). Otherwise a simple copy will be used.
54
+ HARDLINK_THRESHOLD = 1048576
55
+
56
+
57
+ # Decorators: for internal use in the Storage class
58
+ # -------------------------------------------------
59
+
60
+
61
+ def do_recording(flag):
62
+ """Add a record line in the History object (if sensible)."""
63
+
64
+ @nicedeco
65
+ def do_flagged_recording(f):
66
+ def wrapped_action(self, item, *kargs, **kwargs):
67
+ infos = self._findout_record_infos(kwargs)
68
+ (rc, extrainfos) = f(self, item, *kargs, **kwargs)
69
+ infos.update(extrainfos)
70
+ self.addrecord(flag, item, status=rc, **infos)
71
+ return rc
72
+
73
+ return wrapped_action
74
+
75
+ return do_flagged_recording
76
+
77
+
78
+ @nicedeco
79
+ def enforce_readonly(f):
80
+ """Check that the current storage object is not readonly."""
81
+
82
+ def wrapped_action(self, item, *kargs, **kwargs):
83
+ if self.readonly:
84
+ raise OSError("This Storage place is readonly.")
85
+ return f(self, item, *kargs, **kwargs)
86
+
87
+ return wrapped_action
88
+
89
+
90
+ # Main Storage abstract class
91
+ # ---------------------------
92
+
93
+
94
+ class Storage(footprints.FootprintBase):
95
+ """Root class for any Storage class, ex: Cache, Archive, ...
96
+
97
+ Tips for developers:
98
+
99
+ The following methods needs to be defined in the child classes:
100
+
101
+ * *_actual_fullpath*
102
+ * *_actual_prestageinfo*
103
+ * *_actual_check*
104
+ * *_actual_list*
105
+ * *_actual_insert*
106
+ * *_actual_retrieve*
107
+ * *_actual_delete*
108
+
109
+ They must return a two elements tuple consisting of a returncode and a
110
+ dictionary whose items will be written in the object's record.
111
+ """
112
+
113
+ _abstract = (True,)
114
+ _footprint = dict(
115
+ info="Default/Abstract storage place description.",
116
+ attr=dict(
117
+ storage=dict(
118
+ info="The storage target.",
119
+ ),
120
+ record=dict(
121
+ info="Record insert, retrieve, delete actions in an History object.",
122
+ type=bool,
123
+ optional=True,
124
+ default=False,
125
+ access="rwx",
126
+ ),
127
+ readonly=dict(
128
+ info="Disallow insert and delete action for this storage place.",
129
+ type=bool,
130
+ optional=True,
131
+ default=False,
132
+ ),
133
+ ),
134
+ )
135
+
136
+ def __init__(self, *args, **kw):
137
+ logger.debug("Abstract storage init %s", self.__class__)
138
+ super().__init__(*args, **kw)
139
+ self._history = History(tag=self.tag)
140
+
141
+ @property
142
+ def tag(self):
143
+ """The identifier of the storage place."""
144
+ raise NotImplementedError()
145
+
146
+ @property
147
+ def realkind(self):
148
+ return "storage"
149
+
150
+ def _str_more(self):
151
+ return "tag={:s}".format(self.tag)
152
+
153
+ @property
154
+ def context(self):
155
+ """Shortcut to the active context object."""
156
+ return sessions.get().context
157
+
158
+ @property
159
+ def session(self):
160
+ return sessions.current()
161
+
162
+ @property
163
+ def sh(self):
164
+ """Shortcut to the active System object."""
165
+ return sessions.system()
166
+
167
+ @property
168
+ def history(self):
169
+ """The History object that will be used by this storage place.
170
+
171
+ :note: History objects are associated with the self.tag identifier. i.e.
172
+ all Storage's objects with the same tag will use the same History
173
+ object.
174
+ """
175
+ return self._history
176
+
177
+ def addrecord(self, action, item, **infos):
178
+ """Push a new record to the storage place log/history."""
179
+ if self.record:
180
+ self.history.append(action, item, infos)
181
+
182
+ def flush(self, dumpfile=None):
183
+ """Flush actual history to the specified ``dumpfile`` if record is on.
184
+
185
+ :note: May raise the :class:`NotImplementedError` exception.
186
+ """
187
+ raise NotImplementedError()
188
+
189
+ def _findout_record_infos(self, kwargs):
190
+ return dict(info=kwargs.get("info", None))
191
+
192
+ def allow_reads(self, item): # @UnusedVariable
193
+ """
194
+ This method can be used to determine whether or not the present object
195
+ supports reads for **item**.
196
+
197
+ :note: This is different from **check** since, **item**'s existence is
198
+ not checked. It just tells if reads to **item** are supported...
199
+ """
200
+ return True
201
+
202
+ def allow_writes(self, item): # @UnusedVariable
203
+ """
204
+ This method can be used to determine whether or not the present object
205
+ supports writes for **item**.
206
+
207
+ :note: This is different from **check** since, **item**'s existence is
208
+ not checked. It just tells if writes to **item** are supported...
209
+ """
210
+ return True
211
+
212
+ def fullpath(self, item, **kwargs):
213
+ """Return the path/URI to the **item**'s storage location."""
214
+ # Currently no recording is performed for the check action
215
+ (rc, _) = self._actual_fullpath(item, **kwargs)
216
+ return rc
217
+
218
+ def prestageinfo(self, item, **kwargs):
219
+ """Return the prestage infos for an **item** in the current storage place."""
220
+ # Currently no recording is performed for the check action
221
+ (rc, _) = self._actual_prestageinfo(item, **kwargs)
222
+ return rc
223
+
224
+ def check(self, item, **kwargs):
225
+ """Check/Stat an **item** from the current storage place."""
226
+ # Currently no recording is performed for the check action
227
+ (rc, _) = self._actual_check(item, **kwargs)
228
+ return rc
229
+
230
+ def list(self, item, **kwargs):
231
+ """List all data resources available in the **item** directory."""
232
+ # Currently no recording is performed for the check action
233
+ (rc, _) = self._actual_list(item, **kwargs)
234
+ return rc
235
+
236
+ @enforce_readonly
237
+ @do_recording("INSERT")
238
+ def insert(self, item, local, **kwargs):
239
+ """Insert an **item** in the current storage place.
240
+
241
+ :note: **local** may be a path to a file or any kind of file like objects.
242
+ """
243
+ return self._actual_insert(item, local, **kwargs)
244
+
245
+ @do_recording("RETRIEVE")
246
+ def retrieve(self, item, local, **kwargs):
247
+ """Retrieve an **item** from the current storage place.
248
+
249
+ :note: **local** may be a path to a file or any kind of file like objects.
250
+ """
251
+ return self._actual_retrieve(item, local, **kwargs)
252
+
253
+ def earlyretrieve(self, item, local, **kwargs):
254
+ """Trigger a delayed retrieve of **item** from the current storage place.
255
+
256
+ :note: **local** may be a path to a file or any kind of file like objects.
257
+ """
258
+ return self._actual_earlyretrieve(item, local, **kwargs)
259
+
260
+ def _actual_earlyretrieve(self, item, local, **kwargs): # @UnusedVariable
261
+ """No earlyretrieve implemented by default."""
262
+ return None
263
+
264
+ def finaliseretrieve(self, retrieve_id, item, local, **kwargs):
265
+ """Finalise a delayed retrieve from the current storage place.
266
+
267
+ :note: **local** may be a path to a file or any kind of file like objects.
268
+ """
269
+ rc, idict = self._actual_finaliseretrieve(
270
+ retrieve_id, item, local, **kwargs
271
+ )
272
+ if rc is not None:
273
+ infos = self._findout_record_infos(kwargs)
274
+ infos.update(idict)
275
+ self.addrecord("RETRIEVE", item, status=rc, **infos)
276
+ return rc
277
+
278
+ def _actual_finaliseretrieve(
279
+ self, retrieve_id, item, local, **kwargs
280
+ ): # @UnusedVariable
281
+ """No delayedretrieve implemented by default."""
282
+ return None, dict()
283
+
284
+ @enforce_readonly
285
+ @do_recording("DELETE")
286
+ def delete(self, item, **kwargs):
287
+ """Delete an **item** from the current storage place."""
288
+ return self._actual_delete(item, **kwargs)
289
+
290
+
291
+ # Defining the two main flavours of storage places
292
+ # -----------------------------------------------
293
+
294
+
295
+ class Cache(Storage):
296
+ """Root class for any :class:Cache subclasses."""
297
+
298
+ _collector = ("cache",)
299
+ _footprint = dict(
300
+ info="Default cache description",
301
+ attr=dict(
302
+ entry=dict(
303
+ optional=False,
304
+ type=str,
305
+ info="The absolute path to the cache space",
306
+ ),
307
+ # TODO is 'storage' used in any way?
308
+ storage=dict(
309
+ optional=True,
310
+ default="localhost",
311
+ ),
312
+ rtouch=dict(
313
+ info="Perform the recursive touch command on the directory structure.",
314
+ type=bool,
315
+ optional=True,
316
+ default=False,
317
+ ),
318
+ rtouchskip=dict(
319
+ info="Do not 'touch' the first **rtouchskip** directories.",
320
+ type=int,
321
+ optional=True,
322
+ default=0,
323
+ ),
324
+ rtouchdelay=dict(
325
+ info=(
326
+ "Do not perfom a touch if it has already been done in "
327
+ + "the last X seconds."
328
+ ),
329
+ type=float,
330
+ optional=True,
331
+ default=600.0, # 10 minutes
332
+ ),
333
+ ),
334
+ )
335
+
336
+ def __init__(self, *kargs, **kwargs):
337
+ super().__init__(*kargs, **kwargs)
338
+ self._touch_tracker = dict()
339
+
340
+ @property
341
+ def realkind(self):
342
+ return "cache"
343
+
344
+ @property
345
+ def tag(self):
346
+ """The identifier of this cache place."""
347
+ return "{:s}_{:s}".format(self.realkind, self.entry)
348
+
349
+ def _formatted_path(self, subpath, **kwargs): # @UnusedVariable
350
+ return self.sh.path.join(self.entry, subpath.lstrip("/"))
351
+
352
+ def catalog(self):
353
+ """List all files present in this cache.
354
+
355
+ :note: It might be quite slow...
356
+ """
357
+ entry = self.sh.path.expanduser(self.entry)
358
+ files = self.sh.ffind(entry)
359
+ return [f[len(entry) :] for f in files]
360
+
361
+ def _xtouch(self, path):
362
+ """
363
+ Perform a touch operation only if the last one, on te same path, was
364
+ less than `self.rtouchdelay` seconds ago.
365
+ """
366
+ ts = time.time()
367
+ ts_delay = ts - self._touch_tracker.get(path, 0)
368
+ if ts_delay > self.rtouchdelay:
369
+ logger.debug("Touching: %s (delay was %.2f)", path, ts_delay)
370
+ self.sh.touch(path)
371
+ self._touch_tracker[path] = ts
372
+ else:
373
+ logger.debug("Skipping touch: %s (delay was %.2f)", path, ts_delay)
374
+
375
+ def _recursive_touch(self, rc, item, writing=False):
376
+ """Make recursive touches on parent directories.
377
+
378
+ It might be useful for cleaning scripts.
379
+ """
380
+ if self.rtouch and (not self.readonly) and rc:
381
+ items = item.lstrip("/").split("/")
382
+ items = items[:-1]
383
+ if writing:
384
+ # It's useless to touch the rightmost directory
385
+ items = items[:-1] if len(items) > 1 else []
386
+ for index in range(len(items), self.rtouchskip, -1):
387
+ self._xtouch(
388
+ self._formatted_path(self.sh.path.join(*items[:index]))
389
+ )
390
+
391
+ def _actual_fullpath(self, item, **kwargs):
392
+ """Return the path/URI to the **item**'s storage location."""
393
+ return self._formatted_path(item, **kwargs), dict()
394
+
395
+ def _actual_prestageinfo(self, item, **kwargs):
396
+ """Returns pre-staging informations."""
397
+ return dict(
398
+ strategy="std", location=self.fullpath(item, **kwargs)
399
+ ), dict()
400
+
401
+ def _actual_check(self, item, **kwargs):
402
+ """Check/Stat an **item** from the current storage place."""
403
+ path = self._formatted_path(item, **kwargs)
404
+ if path is None:
405
+ return None, dict()
406
+ try:
407
+ st = self.sh.stat(path)
408
+ except OSError:
409
+ st = None
410
+ return st, dict()
411
+
412
+ def _actual_list(self, item, **kwargs):
413
+ """List all data resources available in the **item** directory."""
414
+ path = self.fullpath(item, **kwargs)
415
+ if path is not None and self.sh.path.exists(path):
416
+ if self.sh.path.isdir(path):
417
+ return self.sh.listdir(path), dict()
418
+ else:
419
+ return True, dict()
420
+ else:
421
+ return None, dict()
422
+
423
+ def _actual_insert(self, item, local, **kwargs):
424
+ """Insert an **item** in the current storage place."""
425
+ # Get the relevant options
426
+ intent = kwargs.get("intent", "in")
427
+ fmt = kwargs.get("fmt", "foo")
428
+ # Insert the element
429
+ tpath = self._formatted_path(item)
430
+ if not self.sh.path.exists(self.entry):
431
+ self.sh.mkdir(self.entry)
432
+ if tpath is not None:
433
+ rc = self.sh.cp(
434
+ local,
435
+ tpath,
436
+ intent=intent,
437
+ fmt=fmt,
438
+ smartcp_threshold=HARDLINK_THRESHOLD,
439
+ )
440
+ else:
441
+ logger.warning("No target location for < %s >", item)
442
+ rc = False
443
+ self._recursive_touch(rc, item, writing=True)
444
+ return rc, dict(intent=intent, fmt=fmt)
445
+
446
+ def _actual_retrieve(self, item, local, **kwargs):
447
+ """Retrieve an **item** from the current storage place."""
448
+ # Get the relevant options
449
+ intent = kwargs.get("intent", "in")
450
+ fmt = kwargs.get("fmt", "foo")
451
+ silent = kwargs.get("silent", False)
452
+ dirextract = kwargs.get("dirextract", False)
453
+ tarextract = kwargs.get("tarextract", False)
454
+ uniquelevel_ignore = kwargs.get("uniquelevel_ignore", True)
455
+ source = self._formatted_path(item)
456
+ if source is not None:
457
+ # If auto_dirextract, copy recursively each file contained in source
458
+ if (
459
+ dirextract
460
+ and self.sh.path.isdir(source)
461
+ and self.sh.is_tarname(local)
462
+ ):
463
+ rc = True
464
+ destdir = self.sh.path.dirname(self.sh.path.realpath(local))
465
+ logger.info("Automatic directory extract to: %s", destdir)
466
+ for subpath in self.sh.glob(source + "/*"):
467
+ rc = rc and self.sh.cp(
468
+ subpath,
469
+ self.sh.path.join(
470
+ destdir, self.sh.path.basename(subpath)
471
+ ),
472
+ intent=intent,
473
+ fmt=fmt,
474
+ smartcp_threshold=HARDLINK_THRESHOLD,
475
+ )
476
+ # For the insitu feature to work...
477
+ rc = rc and self.sh.touch(local)
478
+ # The usual case: just copy source
479
+ else:
480
+ rc = self.sh.cp(
481
+ source,
482
+ local,
483
+ intent=intent,
484
+ fmt=fmt,
485
+ silent=silent,
486
+ smartcp_threshold=HARDLINK_THRESHOLD,
487
+ )
488
+ # If auto_tarextract, a potential tar file is extracted
489
+ if (
490
+ rc
491
+ and tarextract
492
+ and not self.sh.path.isdir(local)
493
+ and self.sh.is_tarname(local)
494
+ and self.sh.is_tarfile(local)
495
+ ):
496
+ destdir = self.sh.path.dirname(
497
+ self.sh.path.realpath(local)
498
+ )
499
+ logger.info("Automatic Tar extract to: %s", destdir)
500
+ rc = rc and self.sh.smartuntar(
501
+ local, destdir, uniquelevel_ignore=uniquelevel_ignore
502
+ )
503
+ else:
504
+ getattr(logger, "info" if silent else "warning")(
505
+ "No readable source for < %s >", item
506
+ )
507
+ rc = False
508
+ self._recursive_touch(rc, item)
509
+ return rc, dict(intent=intent, fmt=fmt)
510
+
511
+ def _actual_delete(self, item, **kwargs):
512
+ """Delete an **item** from the current storage place."""
513
+ # Get the relevant options
514
+ fmt = kwargs.get("fmt", "foo")
515
+ # Delete the element
516
+ tpath = self._formatted_path(item)
517
+ if tpath is not None:
518
+ rc = self.sh.remove(tpath, fmt=fmt)
519
+ else:
520
+ logger.warning("No target location for < %s >", item)
521
+ rc = False
522
+ return rc, dict(fmt=fmt)
523
+
524
+ def flush(self, dumpfile=None):
525
+ """Flush actual history to the specified ``dumpfile`` if record is on."""
526
+ if dumpfile is None:
527
+ logfile = ".".join(
528
+ (
529
+ "HISTORY",
530
+ datetime.now().strftime("%Y%m%d%H%M%S.%f"),
531
+ "P{:06d}".format(self.sh.getpid()),
532
+ self.sh.getlogname(),
533
+ )
534
+ )
535
+ dumpfile = self.sh.path.join(self.entry, ".history", logfile)
536
+ if self.record:
537
+ self.sh.pickle_dump(self.history, dumpfile)
538
+
539
+
540
+ class AbstractArchive(Storage):
541
+ """The default class to handle storage to some kind if Archive."""
542
+
543
+ _abstract = True
544
+ _collector = ("archive",)
545
+ _footprint = dict(
546
+ info="Default archive description",
547
+ attr=dict(
548
+ tube=dict(
549
+ info="How to communicate with the archive ?",
550
+ ),
551
+ ),
552
+ )
553
+
554
+ @property
555
+ def tag(self):
556
+ """The identifier of this cache place."""
557
+ return "{:s}_{:s}".format(self.realkind, self.storage)
558
+
559
+ @property
560
+ def realkind(self):
561
+ return "archive"
562
+
563
+ def _formatted_path(self, rawpath, **kwargs):
564
+ root = kwargs.get("root", None)
565
+ if root is not None:
566
+ rawpath = self.sh.path.join(root, rawpath.lstrip("/"))
567
+ # Deal with compression
568
+ compressionpipeline = kwargs.get("compressionpipeline", None)
569
+ if compressionpipeline is not None:
570
+ rawpath += compressionpipeline.suffix
571
+ return self.sh.anyft_remote_rewrite(
572
+ rawpath, fmt=kwargs.get("fmt", "foo")
573
+ )
574
+
575
+ def _actual_proxy_method(self, pmethod):
576
+ """Create a proxy method based on the **pmethod** actual method."""
577
+
578
+ def actual_proxy(item, *kargs, **kwargs):
579
+ path = self._formatted_path(item, **kwargs)
580
+ if path is None:
581
+ raise ValueError("The archive's path is void.")
582
+ return pmethod(path, *kargs, **kwargs)
583
+
584
+ actual_proxy.__name__ = pmethod.__name__
585
+ actual_proxy.__doc__ = pmethod.__doc__
586
+ return actual_proxy
587
+
588
+ def __getattr__(self, attr):
589
+ """Provides proxy methods for _actual_* methods."""
590
+ methods = r"fullpath|prestageinfo|check|list|insert|retrieve|delete"
591
+ mattr = re.match(r"_actual_(?P<action>" + methods + r")", attr)
592
+ if mattr:
593
+ pmethod = getattr(
594
+ self, "_{:s}{:s}".format(self.tube, mattr.group("action"))
595
+ )
596
+ return self._actual_proxy_method(pmethod)
597
+ else:
598
+ raise AttributeError(
599
+ "The {:s} attribute was not found in this object".format(attr)
600
+ )
601
+
602
+ def _actual_earlyretrieve(self, item, local, **kwargs):
603
+ """Proxy to the appropriate tube dependent earlyretrieve method (if available)."""
604
+ pmethod = getattr(
605
+ self, "_{:s}{:s}".format(self.tube, "earlyretrieve"), None
606
+ )
607
+ if pmethod:
608
+ return self._actual_proxy_method(pmethod)(item, local, **kwargs)
609
+ else:
610
+ return None
611
+
612
+ def _actual_finaliseretrieve(self, retrieve_id, item, local, **kwargs):
613
+ """Proxy to the appropriate tube dependent finaliseretrieve method (if available)."""
614
+ pmethod = getattr(
615
+ self, "_{:s}{:s}".format(self.tube, "finaliseretrieve"), None
616
+ )
617
+ if pmethod:
618
+ return self._actual_proxy_method(pmethod)(
619
+ item, local, retrieve_id, **kwargs
620
+ )
621
+ else:
622
+ return None, dict()
623
+
624
+
625
+ class Archive(AbstractArchive):
626
+ """The default class to handle storage to a remote location."""
627
+
628
+ _footprint = dict(
629
+ info="Default archive description",
630
+ attr=dict(
631
+ tube=dict(
632
+ values=["ftp"],
633
+ ),
634
+ ),
635
+ )
636
+
637
+ def __init__(self, *kargs, **kwargs):
638
+ super().__init__(*kargs, **kwargs)
639
+ self.default_usejeeves = config.from_config(
640
+ section="storage",
641
+ key="usejeeves",
642
+ )
643
+
644
+ @property
645
+ def _ftp_hostinfos(self):
646
+ """Return the FTP hostname end port number."""
647
+ s_storage = self.storage.split(":", 1)
648
+ hostname = s_storage[0]
649
+ port = None
650
+ if len(s_storage) > 1:
651
+ try:
652
+ port = int(s_storage[1])
653
+ except ValueError:
654
+ logger.error(
655
+ "Invalid port number < %s >. Ignoring it", s_storage[1]
656
+ )
657
+ return hostname, port
658
+
659
+ def _ftp_client(self, logname=None, delayed=False):
660
+ """Return a FTP client object."""
661
+ hostname, port = self._ftp_hostinfos
662
+ return self.sh.ftp(
663
+ hostname, logname=logname, delayed=delayed, port=port
664
+ )
665
+
666
+ def _ftpfullpath(self, item, **kwargs):
667
+ """Actual _fullpath using ftp."""
668
+ username = kwargs.get("username", None)
669
+ rc = None
670
+ ftp = self._ftp_client(logname=username, delayed=True)
671
+ if ftp:
672
+ try:
673
+ rc = ftp.netpath(item)
674
+ finally:
675
+ ftp.close()
676
+ return rc, dict()
677
+
678
+ def _ftpprestageinfo(self, item, **kwargs):
679
+ """Actual _prestageinfo using ftp."""
680
+ username = kwargs.get("username", None)
681
+ if username is None:
682
+ ftp = self._ftp_client(logname=username, delayed=True)
683
+ if ftp:
684
+ try:
685
+ username = ftp.logname
686
+ finally:
687
+ ftp.close()
688
+ baseinfo = dict(
689
+ storage=self.storage,
690
+ logname=username,
691
+ location=item,
692
+ )
693
+ return baseinfo, dict()
694
+
695
+ def _ftpcheck(self, item, **kwargs):
696
+ """Actual _check using ftp."""
697
+ rc = None
698
+ ftp = self._ftp_client(logname=kwargs.get("username", None))
699
+ if ftp:
700
+ try:
701
+ rc = ftp.size(item)
702
+ except (ValueError, TypeError):
703
+ pass
704
+ except ftplib.all_errors:
705
+ pass
706
+ finally:
707
+ ftp.close()
708
+ return rc, dict()
709
+
710
+ def _ftplist(self, item, **kwargs):
711
+ """Actual _list using ftp."""
712
+ ftp = self._ftp_client(logname=kwargs.get("username", None))
713
+ rc = None
714
+ if ftp:
715
+ try:
716
+ # Is this a directory ?
717
+ rc = ftp.cd(item)
718
+ except ftplib.all_errors:
719
+ # Apparently not...
720
+ try:
721
+ # Is it a file ?
722
+ if ftp.size(item) is not None:
723
+ rc = True
724
+ except (ValueError, TypeError):
725
+ pass
726
+ except ftplib.all_errors:
727
+ pass
728
+ else:
729
+ # Content of the directory...
730
+ if rc:
731
+ rc = ftp.nlst(".")
732
+ finally:
733
+ ftp.close()
734
+ return rc, dict()
735
+
736
+ def _ftpretrieve(self, item, local, **kwargs):
737
+ """Actual _retrieve using ftp."""
738
+ logger.info(
739
+ "ftpget on ftp://%s/%s (to: %s)", self.storage, item, local
740
+ )
741
+ extras = dict(
742
+ fmt=kwargs.get("fmt", "foo"),
743
+ cpipeline=kwargs.get("compressionpipeline", None),
744
+ )
745
+ hostname, port = self._ftp_hostinfos
746
+ if port is not None:
747
+ extras["port"] = port
748
+ rc = self.sh.smartftget(
749
+ item,
750
+ local,
751
+ # Ftp control
752
+ hostname=hostname,
753
+ logname=kwargs.get("username", None),
754
+ **extras,
755
+ )
756
+ return rc, extras
757
+
758
+ def _ftpearlyretrieve(self, item, local, **kwargs):
759
+ """
760
+ If FtServ/ftraw is used, trigger a delayed action in order to fetch
761
+ several files at once.
762
+ """
763
+ cpipeline = kwargs.get("compressionpipeline", None)
764
+ if self.sh.rawftget_worthy(item, local, cpipeline):
765
+ return self.context.delayedactions_hub.register(
766
+ (item, kwargs.get("fmt", "foo")),
767
+ kind="archive",
768
+ storage=self.storage,
769
+ goal="get",
770
+ tube="ftp",
771
+ raw=True,
772
+ logname=kwargs.get("username", None),
773
+ )
774
+ else:
775
+ return None
776
+
777
+ def _ftpfinaliseretrieve(
778
+ self, item, local, retrieve_id, **kwargs
779
+ ): # @UnusedVariable
780
+ """
781
+ Get the resource given the **retrieve_id** identifier returned by the
782
+ :meth:`_ftpearlyretrieve` method.
783
+ """
784
+ extras = dict(
785
+ fmt=kwargs.get("fmt", "foo"),
786
+ )
787
+ d_action = self.context.delayedactions_hub.retrieve(
788
+ retrieve_id, bareobject=True
789
+ )
790
+ if d_action.status == d_action_status.done:
791
+ if self.sh.filecocoon(local):
792
+ rc = self.sh.mv(d_action.result, local, **extras)
793
+ else:
794
+ raise OSError("Could not cocoon: {!s}".format(local))
795
+ elif d_action.status == d_action_status.failed:
796
+ logger.info(
797
+ "The earlyretrieve failed (retrieve_id=%s)", retrieve_id
798
+ )
799
+ rc = False
800
+ else:
801
+ rc = None
802
+ return rc, extras
803
+
804
+ def _ftpinsert(self, item, local, **kwargs):
805
+ """Actual _insert using ftp."""
806
+ usejeeves = kwargs.get("usejeeves", None)
807
+ if usejeeves is None:
808
+ usejeeves = self.default_usejeeves
809
+ hostname, port = self._ftp_hostinfos
810
+ if not usejeeves:
811
+ logger.info(
812
+ "ftpput to ftp://%s/%s (from: %s)", self.storage, item, local
813
+ )
814
+ extras = dict(
815
+ fmt=kwargs.get("fmt", "foo"),
816
+ cpipeline=kwargs.get("compressionpipeline", None),
817
+ )
818
+ if port is not None:
819
+ extras["port"] = port
820
+ rc = self.sh.smartftput(
821
+ local,
822
+ item,
823
+ # Ftp control
824
+ hostname=hostname,
825
+ logname=kwargs.get("username", None),
826
+ sync=kwargs.get("enforcesync", False),
827
+ **extras,
828
+ )
829
+ else:
830
+ logger.info(
831
+ "delayed ftpput to ftp://%s/%s (from: %s)",
832
+ self.storage,
833
+ item,
834
+ local,
835
+ )
836
+ tempo = footprints.proxy.service(
837
+ kind="hiddencache", asfmt=kwargs.get("fmt")
838
+ )
839
+ compressionpipeline = kwargs.get("compressionpipeline", "")
840
+ if compressionpipeline:
841
+ compressionpipeline = compressionpipeline.description_string
842
+ extras = dict(
843
+ fmt=kwargs.get("fmt", "foo"), cpipeline=compressionpipeline
844
+ )
845
+ if port is not None:
846
+ extras["port"] = port
847
+
848
+ rc = ad.jeeves(
849
+ hostname=hostname,
850
+ # Explicitly resolve the logname (because jeeves FTP client is not
851
+ # running with the same glove (i.e. Jeeves ftuser configuration may
852
+ # be different).
853
+ logname=self.sh.fix_ftuser(
854
+ hostname, kwargs.get("username", None)
855
+ ),
856
+ todo="ftput",
857
+ rhandler=kwargs.get("info", None),
858
+ source=tempo(local),
859
+ destination=item,
860
+ original=self.sh.path.abspath(local),
861
+ **extras,
862
+ )
863
+ return rc, extras
864
+
865
+ def _ftpdelete(self, item, **kwargs):
866
+ """Actual _delete using ftp."""
867
+ rc = None
868
+ ftp = self._ftp_client(logname=kwargs.get("username", None))
869
+ if ftp:
870
+ if self._ftpcheck(item, **kwargs)[0]:
871
+ logger.info("ftpdelete on ftp://%s/%s", self.storage, item)
872
+ rc = ftp.delete(item)
873
+ ftp.close()
874
+ else:
875
+ logger.error(
876
+ "Try to remove a non-existing resource <%s>", item
877
+ )
878
+ return rc, dict()
879
+
880
+
881
+ class AbstractLocalArchive(AbstractArchive):
882
+ """The default class to handle storage to the same host."""
883
+
884
+ _abstract = True
885
+ _footprint = dict(
886
+ info="Generic local archive description",
887
+ attr=dict(
888
+ tube=dict(
889
+ values=[
890
+ "inplace",
891
+ ],
892
+ ),
893
+ ),
894
+ )
895
+
896
+ def _inplacefullpath(self, item, **kwargs):
897
+ """Actual _fullpath."""
898
+ return item, dict()
899
+
900
+ def _inplacecheck(self, item, **kwargs):
901
+ """Actual _check."""
902
+ try:
903
+ st = self.sh.stat(item)
904
+ except OSError:
905
+ rc = None
906
+ else:
907
+ rc = st.st_size
908
+ return rc, dict()
909
+
910
+ def _inplacelist(self, item, **kwargs):
911
+ """Actual _list."""
912
+ if self.sh.path.exists(item):
913
+ if self.sh.path.isdir(item):
914
+ return self.sh.listdir(item), dict()
915
+ else:
916
+ return True, dict()
917
+ else:
918
+ return None, dict()
919
+
920
+ def _inplaceretrieve(self, item, local, **kwargs):
921
+ """Actual _retrieve using ftp."""
922
+ logger.info("inplaceget on file:///%s (to: %s)", item, local)
923
+ fmt = kwargs.get("fmt", "foo")
924
+ cpipeline = kwargs.get("compressionpipeline", None)
925
+ if cpipeline:
926
+ rc = cpipeline.file2uncompress(item, local)
927
+ else:
928
+ # Do not use fmt=... on purpose (otherwise "forceunpack" may be called twice)
929
+ rc = self.sh.cp(item, local, intent="in")
930
+ rc = rc and self.sh.forceunpack(local, fmt=fmt)
931
+ return rc, dict(fmt=fmt, cpipeline=cpipeline)
932
+
933
+ @contextlib.contextmanager
934
+ def _inplaceinsert_pack(self, local, fmt):
935
+ local_packed = self.sh.forcepack(local, fmt=fmt)
936
+ if local_packed != local:
937
+ try:
938
+ yield local_packed
939
+ finally:
940
+ self.sh.rm(local_packed, fmt=fmt)
941
+ else:
942
+ yield local
943
+
944
+ def _inplaceinsert(self, item, local, **kwargs):
945
+ """Actual _insert using ftp."""
946
+ logger.info("inplaceput to file:///%s (from: %s)", item, local)
947
+ cpipeline = kwargs.get("compressionpipeline", None)
948
+ fmt = kwargs.get("fmt", "foo")
949
+ with self._inplaceinsert_pack(local, fmt) as local_packed:
950
+ if cpipeline:
951
+ rc = cpipeline.compress2file(local_packed, item)
952
+ else:
953
+ # Do not use fmt=... on purpose (otherwise "forcepack" may be called twice)
954
+ rc = self.sh.cp(local_packed, item, intent="in")
955
+ return rc, dict(fmt=fmt, cpipeline=cpipeline)
956
+
957
+ def _inplacedelete(self, item, **kwargs):
958
+ """Actual _delete using ftp."""
959
+ fmt = kwargs.get("fmt", "foo")
960
+ rc = None
961
+ if self._inplacecheck(item, **kwargs)[0]:
962
+ rc = self.sh.rm(item, fmt=fmt)
963
+ return rc, dict(fmt=fmt)
964
+
965
+
966
+ class LocalArchive(AbstractLocalArchive):
967
+ """The default class to handle storage to the same host."""
968
+
969
+ _footprint = dict(
970
+ info="Default local archive description",
971
+ attr=dict(
972
+ storage=dict(
973
+ values=[
974
+ "localhost",
975
+ ],
976
+ ),
977
+ auto_self_expand=dict(
978
+ info=(
979
+ "Automatically expand the current user home if "
980
+ + "a relative path is given (should always be True "
981
+ + "except during unit-testing)"
982
+ ),
983
+ type=bool,
984
+ default=True,
985
+ optional=True,
986
+ ),
987
+ ),
988
+ )
989
+
990
+ def _formatted_path(self, rawpath, **kwargs):
991
+ rawpath = self.sh.path.expanduser(rawpath)
992
+ if "~" in rawpath:
993
+ raise OSError('User expansion failed for "{:s}"'.format(rawpath))
994
+ if self.auto_self_expand and not self.sh.path.isabs(rawpath):
995
+ rawpath = self.sh.path.expanduser(self.sh.path.join("~", rawpath))
996
+ return super()._formatted_path(rawpath, **kwargs)