vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +75 -47
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +944 -618
- vortex/algo/mpitools.py +802 -497
- vortex/algo/mpitools_templates/__init__.py +1 -0
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +436 -227
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +540 -417
- vortex/data/sync_templates/__init__.py +0 -0
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +416 -275
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +215 -122
- vortex/nwp/algo/monitoring.py +137 -76
- vortex/nwp/algo/mpitools.py +330 -190
- vortex/nwp/algo/odbtools.py +637 -353
- vortex/nwp/algo/oopsroot.py +454 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +110 -121
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +341 -162
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +655 -299
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +403 -334
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1234 -643
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +378 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.1.0.dist-info/METADATA +67 -0
- vortex_nwp-2.1.0.dist-info/RECORD +144 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
vortex/data/handlers.py
CHANGED
|
@@ -4,8 +4,8 @@ objects are in charge of manipulating data between the working directory and
|
|
|
4
4
|
the various caches or archives".
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
|
|
8
7
|
import functools
|
|
8
|
+
import importlib
|
|
9
9
|
import re
|
|
10
10
|
import sys
|
|
11
11
|
|
|
@@ -28,11 +28,12 @@ __all__ = []
|
|
|
28
28
|
|
|
29
29
|
logger = loggers.getLogger(__name__)
|
|
30
30
|
|
|
31
|
-
OBSERVER_TAG =
|
|
31
|
+
OBSERVER_TAG = "Resources-Handlers"
|
|
32
32
|
|
|
33
33
|
|
|
34
34
|
class HandlerError(RuntimeError):
|
|
35
35
|
"""Exception in case of missing resource during the wait mechanism."""
|
|
36
|
+
|
|
36
37
|
pass
|
|
37
38
|
|
|
38
39
|
|
|
@@ -65,27 +66,31 @@ class IdCardAttrDumper(bronx.fancies.dump.TxtDumper):
|
|
|
65
66
|
if level + 1 > self.max_depth:
|
|
66
67
|
return "{}{{...}}{}".format(
|
|
67
68
|
self._indent(level, self.break_before_dict_begin),
|
|
68
|
-
self._indent(level, self.break_after_dict_end)
|
|
69
|
+
self._indent(level, self.break_after_dict_end),
|
|
69
70
|
)
|
|
70
71
|
else:
|
|
71
|
-
items = [
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
72
|
+
items = [
|
|
73
|
+
"{}{} = {}{},".format(
|
|
74
|
+
self._indent(level + 1, self.break_before_dict_key),
|
|
75
|
+
str(k),
|
|
76
|
+
self._indent(level + 2, self.break_before_dict_value),
|
|
77
|
+
self._recursive_dump(v, level + 1),
|
|
78
|
+
)
|
|
79
|
+
for k, v in sorted(fpobj.footprint_as_shallow_dict().items())
|
|
80
|
+
]
|
|
81
|
+
return " ".join(items)
|
|
77
82
|
|
|
78
83
|
def dump_default(self, obj, level=0, nextline=True):
|
|
79
84
|
"""Generic dump function. Concise view for GetByTag objects."""
|
|
80
85
|
if level + 1 > self.max_depth:
|
|
81
86
|
return " <%s...>" % type(obj).__class__
|
|
82
87
|
else:
|
|
83
|
-
if hasattr(obj,
|
|
88
|
+
if hasattr(obj, "tag"):
|
|
84
89
|
return "{:s} obj: tag={:s}".format(type(obj).__name__, obj.tag)
|
|
85
90
|
else:
|
|
86
|
-
parent_dump = super(
|
|
87
|
-
|
|
88
|
-
|
|
91
|
+
parent_dump = super(
|
|
92
|
+
bronx.fancies.dump.TxtDumper, self
|
|
93
|
+
).dump_default(obj, level, nextline and self.break_default)
|
|
89
94
|
return "{:s} obj: {!s}".format(type(obj).__name__, parent_dump)
|
|
90
95
|
|
|
91
96
|
|
|
@@ -98,32 +103,32 @@ class Handler:
|
|
|
98
103
|
"""
|
|
99
104
|
|
|
100
105
|
def __init__(self, rd, **kw):
|
|
101
|
-
if
|
|
102
|
-
del rd[
|
|
103
|
-
self._resource = rd.pop(
|
|
104
|
-
self._provider = rd.pop(
|
|
105
|
-
self._container = rd.pop(
|
|
106
|
-
self._empty = rd.pop(
|
|
106
|
+
if "glove" in rd:
|
|
107
|
+
del rd["glove"]
|
|
108
|
+
self._resource = rd.pop("resource", None)
|
|
109
|
+
self._provider = rd.pop("provider", None)
|
|
110
|
+
self._container = rd.pop("container", None)
|
|
111
|
+
self._empty = rd.pop("empty", False)
|
|
107
112
|
self._contents = None
|
|
108
113
|
self._uridata = None
|
|
109
114
|
self._options = rd.copy()
|
|
110
|
-
self._observer = observer_board(obsname=kw.pop(
|
|
115
|
+
self._observer = observer_board(obsname=kw.pop("observer", None))
|
|
111
116
|
self._options.update(kw)
|
|
112
|
-
self._mdcheck = self._options.pop(
|
|
113
|
-
self._mddelta = self._options.pop(
|
|
114
|
-
self._ghost = self._options.pop(
|
|
115
|
-
hook_names = [x for x in self._options.keys() if x.startswith(
|
|
117
|
+
self._mdcheck = self._options.pop("metadatacheck", False)
|
|
118
|
+
self._mddelta = self._options.pop("metadatadelta", dict())
|
|
119
|
+
self._ghost = self._options.pop("ghost", False)
|
|
120
|
+
hook_names = [x for x in self._options.keys() if x.startswith("hook_")]
|
|
116
121
|
self._hooks = {x[5:]: self._options.pop(x) for x in hook_names}
|
|
117
|
-
self._delayhooks = self._options.pop(
|
|
122
|
+
self._delayhooks = self._options.pop("delayhooks", False)
|
|
118
123
|
|
|
119
|
-
self._history = History(tag=
|
|
120
|
-
self._history.append(self.__class__.__name__,
|
|
121
|
-
self._stage = [
|
|
122
|
-
self._observer.notify_new(self, dict(stage=
|
|
124
|
+
self._history = History(tag="data-handler")
|
|
125
|
+
self._history.append(self.__class__.__name__, "init", True)
|
|
126
|
+
self._stage = ["load"]
|
|
127
|
+
self._observer.notify_new(self, dict(stage="load"))
|
|
123
128
|
self._localpr_cache = None # To cache the promise dictionary
|
|
124
129
|
self._latest_earlyget_id = None
|
|
125
130
|
self._latest_earlyget_opts = None
|
|
126
|
-
logger.debug(
|
|
131
|
+
logger.debug("New resource handler %s", self.__dict__)
|
|
127
132
|
|
|
128
133
|
def __str__(self):
|
|
129
134
|
return str(self.__dict__)
|
|
@@ -140,7 +145,9 @@ class Handler:
|
|
|
140
145
|
self._notifyhash(oldhash)
|
|
141
146
|
self.reset_contents()
|
|
142
147
|
else:
|
|
143
|
-
raise ValueError(
|
|
148
|
+
raise ValueError(
|
|
149
|
+
"This value is not a plain Resource <{!s}>".format(value)
|
|
150
|
+
)
|
|
144
151
|
|
|
145
152
|
resource = property(_get_resource, _set_resource)
|
|
146
153
|
|
|
@@ -156,7 +163,9 @@ class Handler:
|
|
|
156
163
|
self._notifyhash(oldhash)
|
|
157
164
|
self.reset_contents()
|
|
158
165
|
else:
|
|
159
|
-
raise ValueError(
|
|
166
|
+
raise ValueError(
|
|
167
|
+
"This value is not a plain Provider <{!s}>".format(value)
|
|
168
|
+
)
|
|
160
169
|
|
|
161
170
|
provider = property(_get_provider, _set_provider)
|
|
162
171
|
|
|
@@ -171,7 +180,9 @@ class Handler:
|
|
|
171
180
|
self._container = value
|
|
172
181
|
self._notifyhash(oldhash)
|
|
173
182
|
else:
|
|
174
|
-
raise ValueError(
|
|
183
|
+
raise ValueError(
|
|
184
|
+
"This value is not a plain Container <{!s}>".format(value)
|
|
185
|
+
)
|
|
175
186
|
|
|
176
187
|
container = property(_get_container, _set_container)
|
|
177
188
|
|
|
@@ -206,11 +217,11 @@ class Handler:
|
|
|
206
217
|
def simplified_hashkey(self):
|
|
207
218
|
"""Returns a tuple that can be used as a hashkey to quickly identify the handler."""
|
|
208
219
|
if self.complete:
|
|
209
|
-
rkind = getattr(self.resource,
|
|
210
|
-
rfile = getattr(self.container,
|
|
220
|
+
rkind = getattr(self.resource, "kind", None)
|
|
221
|
+
rfile = getattr(self.container, "filename", None)
|
|
211
222
|
return (rkind, rfile)
|
|
212
223
|
else:
|
|
213
|
-
return (
|
|
224
|
+
return ("incomplete",)
|
|
214
225
|
|
|
215
226
|
@property
|
|
216
227
|
def _cur_session(self):
|
|
@@ -228,7 +239,7 @@ class Handler:
|
|
|
228
239
|
Update the stage upon request (e.g. the file has been fetched by another process).
|
|
229
240
|
"""
|
|
230
241
|
self._stage.append(newstage)
|
|
231
|
-
if newstage in (
|
|
242
|
+
if newstage in ("get",):
|
|
232
243
|
self.container.updfill(True)
|
|
233
244
|
|
|
234
245
|
def _updstage(self, newstage, insitu=False):
|
|
@@ -242,15 +253,25 @@ class Handler:
|
|
|
242
253
|
|
|
243
254
|
def _notifyclear(self):
|
|
244
255
|
"""Notify that the hashkey has changed."""
|
|
245
|
-
self._observer.notify_upd(
|
|
256
|
+
self._observer.notify_upd(
|
|
257
|
+
self,
|
|
258
|
+
dict(
|
|
259
|
+
clear=True,
|
|
260
|
+
),
|
|
261
|
+
)
|
|
246
262
|
|
|
247
263
|
def _notifyhash(self, oldhash):
|
|
248
264
|
"""Notify that the hashkey has changed."""
|
|
249
|
-
self._observer.notify_upd(
|
|
265
|
+
self._observer.notify_upd(
|
|
266
|
+
self,
|
|
267
|
+
dict(
|
|
268
|
+
oldhash=oldhash,
|
|
269
|
+
),
|
|
270
|
+
)
|
|
250
271
|
|
|
251
272
|
def is_expected(self):
|
|
252
273
|
"""Return a boolean value according to the last stage value (expected or not)."""
|
|
253
|
-
return self.stage.startswith(
|
|
274
|
+
return self.stage.startswith("expect")
|
|
254
275
|
|
|
255
276
|
@property
|
|
256
277
|
def contents(self):
|
|
@@ -259,19 +280,27 @@ class Handler:
|
|
|
259
280
|
is complete and the container filled.
|
|
260
281
|
"""
|
|
261
282
|
if self._empty:
|
|
262
|
-
self.container.write(
|
|
283
|
+
self.container.write("")
|
|
263
284
|
self._empty = False
|
|
264
285
|
if self.complete:
|
|
265
|
-
if self.container.filled or self.stage ==
|
|
286
|
+
if self.container.filled or self.stage == "put":
|
|
266
287
|
if self._contents is None:
|
|
267
|
-
self._contents = self.resource.contents_handler(
|
|
288
|
+
self._contents = self.resource.contents_handler(
|
|
289
|
+
datafmt=self.container.actualfmt
|
|
290
|
+
)
|
|
268
291
|
with self.container.iod_context():
|
|
269
292
|
self._contents.slurp(self.container)
|
|
270
293
|
return self._contents
|
|
271
294
|
else:
|
|
272
|
-
logger.warning(
|
|
295
|
+
logger.warning(
|
|
296
|
+
"Contents requested on an empty container [%s]",
|
|
297
|
+
self.container,
|
|
298
|
+
)
|
|
273
299
|
else:
|
|
274
|
-
logger.warning(
|
|
300
|
+
logger.warning(
|
|
301
|
+
"Contents requested for an uncomplete handler [%s]",
|
|
302
|
+
self.container,
|
|
303
|
+
)
|
|
275
304
|
return None
|
|
276
305
|
|
|
277
306
|
def reset_contents(self):
|
|
@@ -316,10 +345,12 @@ class Handler:
|
|
|
316
345
|
if fatal:
|
|
317
346
|
raise
|
|
318
347
|
else:
|
|
319
|
-
return
|
|
348
|
+
return "OOPS: {!s} (but fatal is False)".format(e)
|
|
320
349
|
return self._lasturl
|
|
321
350
|
else:
|
|
322
|
-
logger.warning(
|
|
351
|
+
logger.warning(
|
|
352
|
+
"Resource handler %s could not build location", self
|
|
353
|
+
)
|
|
323
354
|
return None
|
|
324
355
|
|
|
325
356
|
def idcard(self, indent=2):
|
|
@@ -327,39 +358,45 @@ class Handler:
|
|
|
327
358
|
Returns a multilines documentation string with a summary
|
|
328
359
|
of the valuable information contained by this handler.
|
|
329
360
|
"""
|
|
330
|
-
tab =
|
|
331
|
-
card = "\n".join(
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
361
|
+
tab = " " * indent
|
|
362
|
+
card = "\n".join(
|
|
363
|
+
(
|
|
364
|
+
"{0}Handler {1!r}",
|
|
365
|
+
"{0}{0}Complete : {2}",
|
|
366
|
+
"{0}{0}Options : {3}",
|
|
367
|
+
"{0}{0}Location : {4}",
|
|
368
|
+
)
|
|
369
|
+
).format(tab, self, self.complete, self.options, self.location())
|
|
338
370
|
if self.hooks:
|
|
339
|
-
card +=
|
|
340
|
-
|
|
371
|
+
card += "\n{0}{0}Hooks : {1}".format(
|
|
372
|
+
tab, ",".join(list(self.hooks.keys()))
|
|
373
|
+
)
|
|
374
|
+
d = IdCardAttrDumper(tag="idcarddumper")
|
|
341
375
|
d.reset()
|
|
342
376
|
d.indent_first = 2 * len(tab)
|
|
343
|
-
for subobj in (
|
|
377
|
+
for subobj in ("resource", "provider", "container"):
|
|
344
378
|
obj = getattr(self, subobj, None)
|
|
345
379
|
if obj:
|
|
346
|
-
thisdoc =
|
|
347
|
-
|
|
380
|
+
thisdoc = "{0}{0}{1:s} {2!r}".format(
|
|
381
|
+
tab, subobj.capitalize(), obj
|
|
382
|
+
)
|
|
348
383
|
thisdoc += d.dump_fpattrs(obj)
|
|
349
384
|
else:
|
|
350
|
-
thisdoc =
|
|
385
|
+
thisdoc = "{0}{0}{1:s} undefined".format(
|
|
386
|
+
tab, subobj.capitalize()
|
|
387
|
+
)
|
|
351
388
|
card = card + "\n" + thisdoc
|
|
352
389
|
return card
|
|
353
390
|
|
|
354
391
|
def quickview(self, nb=0, indent=0):
|
|
355
392
|
"""Standard glance to objects."""
|
|
356
|
-
tab =
|
|
357
|
-
print(
|
|
358
|
-
print(
|
|
359
|
-
for subobj in (
|
|
393
|
+
tab = " " * indent
|
|
394
|
+
print("{}{:02d}. {:s}".format(tab, nb, repr(self)))
|
|
395
|
+
print("{} Complete : {!s}".format(tab, self.complete))
|
|
396
|
+
for subobj in ("container", "provider", "resource"):
|
|
360
397
|
obj = getattr(self, subobj, None)
|
|
361
398
|
if obj:
|
|
362
|
-
print(
|
|
399
|
+
print("{} {:10s}: {!s}".format(tab, subobj.capitalize(), obj))
|
|
363
400
|
|
|
364
401
|
def wide_key_lookup(self, key, exports=False, fatal=True):
|
|
365
402
|
"""Return the *key* attribute if it exists in the provider or resource.
|
|
@@ -368,11 +405,11 @@ class Handler:
|
|
|
368
405
|
is called upon the return value.
|
|
369
406
|
"""
|
|
370
407
|
try:
|
|
371
|
-
if key ==
|
|
408
|
+
if key == "safeblock":
|
|
372
409
|
# In olive experiments, the block may contain an indication of
|
|
373
410
|
# the member's number. Usually we do not want to get that...
|
|
374
|
-
a_value = getattr(self.provider,
|
|
375
|
-
a_value = re.sub(r
|
|
411
|
+
a_value = getattr(self.provider, "block")
|
|
412
|
+
a_value = re.sub(r"(member|fc)_?\d+/", "", a_value)
|
|
376
413
|
else:
|
|
377
414
|
a_value = getattr(self.provider, key)
|
|
378
415
|
except AttributeError:
|
|
@@ -380,13 +417,17 @@ class Handler:
|
|
|
380
417
|
a_value = getattr(self.resource, key)
|
|
381
418
|
except AttributeError:
|
|
382
419
|
if fatal:
|
|
383
|
-
raise AttributeError(
|
|
420
|
+
raise AttributeError(
|
|
421
|
+
"The {:s} attribute could not be found in {!r}".format(
|
|
422
|
+
key, self
|
|
423
|
+
)
|
|
424
|
+
)
|
|
384
425
|
else:
|
|
385
426
|
a_value = None
|
|
386
427
|
if exports:
|
|
387
|
-
if hasattr(a_value,
|
|
428
|
+
if hasattr(a_value, "footprint_export"):
|
|
388
429
|
a_value = a_value.footprint_export()
|
|
389
|
-
elif hasattr(a_value,
|
|
430
|
+
elif hasattr(a_value, "export_dict"):
|
|
390
431
|
a_value = a_value.export_dict()
|
|
391
432
|
return a_value
|
|
392
433
|
|
|
@@ -398,8 +439,8 @@ class Handler:
|
|
|
398
439
|
v = v.export_dict()
|
|
399
440
|
except (AttributeError, TypeError):
|
|
400
441
|
pass
|
|
401
|
-
rhd[
|
|
402
|
-
for subobj in (
|
|
442
|
+
rhd["options"][k] = v
|
|
443
|
+
for subobj in ("resource", "provider", "container"):
|
|
403
444
|
obj = getattr(self, subobj, None)
|
|
404
445
|
if obj is not None:
|
|
405
446
|
rhd[subobj] = obj.footprint_export()
|
|
@@ -419,11 +460,13 @@ class Handler:
|
|
|
419
460
|
def store(self):
|
|
420
461
|
if self.resource and self.provider:
|
|
421
462
|
self._uridata = net.uriparse(self.location())
|
|
422
|
-
stopts = {
|
|
463
|
+
stopts = {
|
|
464
|
+
k: v for k, v in self.options.items() if k.startswith("stor")
|
|
465
|
+
}
|
|
423
466
|
return footprints.proxy.store(
|
|
424
|
-
scheme=self._uridata.pop(
|
|
425
|
-
netloc=self._uridata.pop(
|
|
426
|
-
**stopts
|
|
467
|
+
scheme=self._uridata.pop("scheme"),
|
|
468
|
+
netloc=self._uridata.pop("netloc"),
|
|
469
|
+
**stopts,
|
|
427
470
|
)
|
|
428
471
|
else:
|
|
429
472
|
return None
|
|
@@ -434,46 +477,60 @@ class Handler:
|
|
|
434
477
|
if self.resource and self.provider:
|
|
435
478
|
store = self.store
|
|
436
479
|
if store:
|
|
437
|
-
logger.debug(
|
|
438
|
-
|
|
439
|
-
self
|
|
440
|
-
self.
|
|
480
|
+
logger.debug(
|
|
481
|
+
"Check resource %s at %s from %s",
|
|
482
|
+
self,
|
|
483
|
+
self.lasturl,
|
|
484
|
+
store,
|
|
441
485
|
)
|
|
486
|
+
rst = store.check(self.uridata, self.mkopts(extras))
|
|
442
487
|
if rst and self._mdcheck:
|
|
443
|
-
logger.info(
|
|
488
|
+
logger.info(
|
|
489
|
+
"metadatacheck is on: we are forcing a real get()..."
|
|
490
|
+
)
|
|
444
491
|
# We are using a temporary fake container
|
|
445
|
-
mycontainer = footprints.proxy.container(
|
|
446
|
-
|
|
492
|
+
mycontainer = footprints.proxy.container(
|
|
493
|
+
shouldfly=True, actualfmt=self.container.actualfmt
|
|
494
|
+
)
|
|
447
495
|
try:
|
|
448
496
|
tmp_options = self.mkopts(extras)
|
|
449
|
-
tmp_options[
|
|
497
|
+
tmp_options["obs_notify"] = False
|
|
450
498
|
rst = store.get(
|
|
451
|
-
self.uridata,
|
|
452
|
-
mycontainer.iotarget(),
|
|
453
|
-
tmp_options
|
|
499
|
+
self.uridata, mycontainer.iotarget(), tmp_options
|
|
454
500
|
)
|
|
455
501
|
if rst:
|
|
456
502
|
if store.delayed:
|
|
457
|
-
logger.warning(
|
|
503
|
+
logger.warning(
|
|
504
|
+
"The resource is expected... let's say that's fine."
|
|
505
|
+
)
|
|
458
506
|
else:
|
|
459
507
|
# Create the contents manually and drop it when we are done.
|
|
460
|
-
contents = self.resource.contents_handler(
|
|
508
|
+
contents = self.resource.contents_handler(
|
|
509
|
+
datafmt=mycontainer.actualfmt
|
|
510
|
+
)
|
|
461
511
|
contents.slurp(mycontainer)
|
|
462
|
-
rst = contents.metadata_check(
|
|
512
|
+
rst = contents.metadata_check(
|
|
513
|
+
self.resource, delta=self._mddelta
|
|
514
|
+
)
|
|
463
515
|
finally:
|
|
464
516
|
# Delete the temporary container
|
|
465
517
|
mycontainer.clear()
|
|
466
|
-
self.history.append(store.fullname(),
|
|
467
|
-
if rst and self.stage ==
|
|
518
|
+
self.history.append(store.fullname(), "check", rst)
|
|
519
|
+
if rst and self.stage == "load":
|
|
468
520
|
# Indicate that the resource was checked
|
|
469
|
-
self._updstage(
|
|
521
|
+
self._updstage("checked")
|
|
470
522
|
if not rst:
|
|
471
523
|
# Always signal failures
|
|
472
|
-
self._updstage(
|
|
524
|
+
self._updstage("void")
|
|
473
525
|
else:
|
|
474
|
-
logger.error(
|
|
526
|
+
logger.error(
|
|
527
|
+
"Could not find any store to check %s", self.lasturl
|
|
528
|
+
)
|
|
475
529
|
else:
|
|
476
|
-
logger.error(
|
|
530
|
+
logger.error(
|
|
531
|
+
"Could not check a rh without defined resource and provider %s",
|
|
532
|
+
self,
|
|
533
|
+
)
|
|
477
534
|
return rst
|
|
478
535
|
|
|
479
536
|
def locate(self, **extras):
|
|
@@ -482,16 +539,20 @@ class Handler:
|
|
|
482
539
|
if self.resource and self.provider:
|
|
483
540
|
store = self.store
|
|
484
541
|
if store:
|
|
485
|
-
logger.debug(
|
|
486
|
-
|
|
487
|
-
self
|
|
488
|
-
self.
|
|
542
|
+
logger.debug(
|
|
543
|
+
"Locate resource %s at %s from %s",
|
|
544
|
+
self,
|
|
545
|
+
self.lasturl,
|
|
546
|
+
store,
|
|
489
547
|
)
|
|
490
|
-
|
|
548
|
+
rst = store.locate(self.uridata, self.mkopts(extras))
|
|
549
|
+
self.history.append(store.fullname(), "locate", rst)
|
|
491
550
|
else:
|
|
492
|
-
logger.error(
|
|
551
|
+
logger.error(
|
|
552
|
+
"Could not find any store to locate %s", self.lasturl
|
|
553
|
+
)
|
|
493
554
|
else:
|
|
494
|
-
logger.error(
|
|
555
|
+
logger.error("Could not locate an incomplete rh %s", self)
|
|
495
556
|
return rst
|
|
496
557
|
|
|
497
558
|
def prestage(self, **extras):
|
|
@@ -500,62 +561,73 @@ class Handler:
|
|
|
500
561
|
if self.resource and self.provider:
|
|
501
562
|
store = self.store
|
|
502
563
|
if store:
|
|
503
|
-
logger.debug(
|
|
504
|
-
|
|
505
|
-
self
|
|
506
|
-
self.
|
|
564
|
+
logger.debug(
|
|
565
|
+
"Prestage resource %s at %s from %s",
|
|
566
|
+
self,
|
|
567
|
+
self.lasturl,
|
|
568
|
+
store,
|
|
507
569
|
)
|
|
508
|
-
|
|
570
|
+
rst = store.prestage(self.uridata, self.mkopts(extras))
|
|
571
|
+
self.history.append(store.fullname(), "prestage", rst)
|
|
509
572
|
else:
|
|
510
|
-
logger.error(
|
|
573
|
+
logger.error(
|
|
574
|
+
"Could not find any store to prestage %s", self.lasturl
|
|
575
|
+
)
|
|
511
576
|
else:
|
|
512
|
-
logger.error(
|
|
577
|
+
logger.error("Could not prestage an incomplete rh %s", self)
|
|
513
578
|
return rst
|
|
514
579
|
|
|
515
580
|
def _generic_apply_hooks(self, action, **extras):
|
|
516
581
|
"""Apply the hooks after a get request (or verify that they were done)."""
|
|
517
582
|
if self.hooks:
|
|
518
|
-
mytracker = extras.get(
|
|
583
|
+
mytracker = extras.get("mytracker", None)
|
|
519
584
|
if mytracker is None:
|
|
520
585
|
iotarget = self.container.iotarget()
|
|
521
586
|
mytracker = self._cur_context.localtracker[iotarget]
|
|
522
587
|
for hook_name in sorted(self.hooks.keys()):
|
|
523
588
|
if mytracker.redundant_hook(action, hook_name):
|
|
524
|
-
logger.info(
|
|
589
|
+
logger.info(
|
|
590
|
+
"Hook already executed <hook_name:%s>", hook_name
|
|
591
|
+
)
|
|
525
592
|
else:
|
|
526
|
-
logger.info(
|
|
593
|
+
logger.info("Executing Hook <hook_name:%s>", hook_name)
|
|
527
594
|
hook_func, hook_args = self.hooks[hook_name]
|
|
528
595
|
hook_func(self._cur_session, self, *hook_args)
|
|
529
596
|
self._notifyhook(action, hook_name)
|
|
530
597
|
|
|
531
598
|
def apply_get_hooks(self, **extras):
|
|
532
599
|
"""Apply the hooks after a get request (or verify that they were done)."""
|
|
533
|
-
self._generic_apply_hooks(action=
|
|
600
|
+
self._generic_apply_hooks(action="get", **extras)
|
|
534
601
|
|
|
535
602
|
def apply_put_hooks(self, **extras):
|
|
536
603
|
"""Apply the hooks before a put request (or verify that they were done)."""
|
|
537
|
-
self._generic_apply_hooks(action=
|
|
604
|
+
self._generic_apply_hooks(action="put", **extras)
|
|
538
605
|
|
|
539
606
|
def _postproc_get(self, store, rst, extras):
|
|
540
607
|
self.container.updfill(rst)
|
|
541
608
|
# Check metadata if sensible
|
|
542
609
|
if self._mdcheck and rst and not store.delayed:
|
|
543
|
-
rst = self.contents.metadata_check(
|
|
544
|
-
|
|
610
|
+
rst = self.contents.metadata_check(
|
|
611
|
+
self.resource, delta=self._mddelta
|
|
612
|
+
)
|
|
545
613
|
if not rst:
|
|
546
|
-
logger.info(
|
|
614
|
+
logger.info(
|
|
615
|
+
"We are now cleaning up the container and data contents."
|
|
616
|
+
)
|
|
547
617
|
self.reset_contents()
|
|
548
618
|
self.clear()
|
|
549
619
|
# For the record...
|
|
550
|
-
self.history.append(store.fullname(),
|
|
620
|
+
self.history.append(store.fullname(), "get", rst)
|
|
551
621
|
if rst:
|
|
552
622
|
# This is an expected resource
|
|
553
623
|
if store.delayed:
|
|
554
|
-
self._updstage(
|
|
555
|
-
logger.info(
|
|
624
|
+
self._updstage("expected")
|
|
625
|
+
logger.info(
|
|
626
|
+
"Resource <%s> is expected", self.container.iotarget()
|
|
627
|
+
)
|
|
556
628
|
# This is a "real" resource
|
|
557
629
|
else:
|
|
558
|
-
self._updstage(
|
|
630
|
+
self._updstage("get")
|
|
559
631
|
if self.hooks:
|
|
560
632
|
if not self.delayhooks:
|
|
561
633
|
self.apply_get_hooks(**extras)
|
|
@@ -563,7 +635,7 @@ class Handler:
|
|
|
563
635
|
logger.info("(get-)Hooks were delayed")
|
|
564
636
|
else:
|
|
565
637
|
# Always signal failures
|
|
566
|
-
self._updstage(
|
|
638
|
+
self._updstage("void")
|
|
567
639
|
return rst
|
|
568
640
|
|
|
569
641
|
def _actual_get(self, **extras):
|
|
@@ -575,7 +647,9 @@ class Handler:
|
|
|
575
647
|
rst = False
|
|
576
648
|
store = self.store
|
|
577
649
|
if store:
|
|
578
|
-
logger.debug(
|
|
650
|
+
logger.debug(
|
|
651
|
+
"Get resource %s at %s from %s", self, self.lasturl, store
|
|
652
|
+
)
|
|
579
653
|
st_options = self.mkopts(dict(rhandler=self.as_dict()), extras)
|
|
580
654
|
# Actual get
|
|
581
655
|
try:
|
|
@@ -590,7 +664,7 @@ class Handler:
|
|
|
590
664
|
finally:
|
|
591
665
|
rst = self._postproc_get(store, rst, extras)
|
|
592
666
|
else:
|
|
593
|
-
logger.error(
|
|
667
|
+
logger.error("Could not find any store to get %s", self.lasturl)
|
|
594
668
|
|
|
595
669
|
# Reset the promise dictionary cache
|
|
596
670
|
self._localpr_cache = None # To cache the promise dictionary
|
|
@@ -608,11 +682,18 @@ class Handler:
|
|
|
608
682
|
try:
|
|
609
683
|
store = self.store
|
|
610
684
|
except Exception as e:
|
|
611
|
-
logger.error(
|
|
612
|
-
|
|
685
|
+
logger.error(
|
|
686
|
+
"The Resource handler was unable to create a store object (%s).",
|
|
687
|
+
str(e),
|
|
688
|
+
)
|
|
613
689
|
store = None
|
|
614
690
|
if store:
|
|
615
|
-
logger.debug(
|
|
691
|
+
logger.debug(
|
|
692
|
+
"Early-Get resource %s at %s from %s",
|
|
693
|
+
self,
|
|
694
|
+
self.lasturl,
|
|
695
|
+
store,
|
|
696
|
+
)
|
|
616
697
|
st_options = self.mkopts(dict(rhandler=self.as_dict()), extras)
|
|
617
698
|
# Actual earlyget
|
|
618
699
|
try:
|
|
@@ -622,11 +703,13 @@ class Handler:
|
|
|
622
703
|
st_options,
|
|
623
704
|
)
|
|
624
705
|
except Exception as e:
|
|
625
|
-
logger.error(
|
|
626
|
-
|
|
706
|
+
logger.error(
|
|
707
|
+
"The store's earlyget method did not return (%s): it should never append!",
|
|
708
|
+
str(e),
|
|
709
|
+
)
|
|
627
710
|
return None
|
|
628
711
|
else:
|
|
629
|
-
logger.error(
|
|
712
|
+
logger.error("Could not find any store to get %s", self.lasturl)
|
|
630
713
|
return None
|
|
631
714
|
|
|
632
715
|
def _get_proxy(self, callback, alternate=False, **extras):
|
|
@@ -636,66 +719,98 @@ class Handler:
|
|
|
636
719
|
"""
|
|
637
720
|
rst = False
|
|
638
721
|
if self.complete:
|
|
639
|
-
if self.options.get(
|
|
722
|
+
if self.options.get(
|
|
723
|
+
"insitu", False
|
|
724
|
+
): # This a second pass (or third, forth, ...)
|
|
640
725
|
cur_tracker = self._cur_context.localtracker
|
|
641
726
|
cur_seq = self._cur_context.sequence
|
|
642
727
|
iotarget = self.container.iotarget()
|
|
643
728
|
# The localpath is here and listed in the tracker
|
|
644
|
-
if self.container.exists() and cur_tracker.is_tracked_input(
|
|
729
|
+
if self.container.exists() and cur_tracker.is_tracked_input(
|
|
730
|
+
iotarget
|
|
731
|
+
):
|
|
645
732
|
# Am I consistent with the ResourceHandler recorded in the tracker ?
|
|
646
|
-
if cur_tracker[iotarget].match_rh(
|
|
733
|
+
if cur_tracker[iotarget].match_rh("get", self):
|
|
647
734
|
rst = True
|
|
648
735
|
# There is the tricky usecase where we are dealing with an alternate
|
|
649
736
|
# that was already dealt with (yes, sometimes the nominal case and
|
|
650
737
|
# the alternate is the same !)
|
|
651
|
-
if not (
|
|
652
|
-
|
|
738
|
+
if not (
|
|
739
|
+
alternate
|
|
740
|
+
and iotarget
|
|
741
|
+
in [
|
|
742
|
+
s.rh.container.iotarget()
|
|
743
|
+
for s in cur_seq.effective_inputs()
|
|
744
|
+
]
|
|
745
|
+
):
|
|
653
746
|
self.container.updfill(True)
|
|
654
|
-
self._updstage(
|
|
747
|
+
self._updstage("get", insitu=True)
|
|
655
748
|
logger.info(
|
|
656
|
-
|
|
657
|
-
self.container.iotarget()
|
|
749
|
+
"The <%s> resource is already here and matches the RH description :-)",
|
|
750
|
+
self.container.iotarget(),
|
|
751
|
+
)
|
|
658
752
|
else:
|
|
659
753
|
# This may happen if fatal=False and the local file was fetched
|
|
660
754
|
# by an alternate
|
|
661
755
|
if alternate:
|
|
662
756
|
if not self.container.is_virtual():
|
|
663
757
|
lpath = self.container.localpath()
|
|
664
|
-
for
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
758
|
+
for (
|
|
759
|
+
isec
|
|
760
|
+
) in self._cur_context.sequence.rinputs():
|
|
761
|
+
if (
|
|
762
|
+
isec.stage in ("get" or "expected")
|
|
763
|
+
and not isec.rh.container.is_virtual()
|
|
764
|
+
and isec.rh.container.localpath()
|
|
765
|
+
== lpath
|
|
766
|
+
):
|
|
668
767
|
rst = True
|
|
669
768
|
break
|
|
670
769
|
if rst:
|
|
671
|
-
logger.info(
|
|
770
|
+
logger.info(
|
|
771
|
+
"Alternate is on and the local file exists."
|
|
772
|
+
)
|
|
672
773
|
else:
|
|
673
|
-
logger.info(
|
|
674
|
-
|
|
774
|
+
logger.info(
|
|
775
|
+
"Alternate is on but the local file is not yet matched."
|
|
776
|
+
)
|
|
777
|
+
self._updstage("void", insitu=True)
|
|
675
778
|
else:
|
|
676
|
-
logger.info(
|
|
779
|
+
logger.info(
|
|
780
|
+
"Alternate is on. The local file exists. The container is virtual."
|
|
781
|
+
)
|
|
677
782
|
rst = True
|
|
678
783
|
else:
|
|
679
|
-
logger.info(
|
|
680
|
-
|
|
681
|
-
|
|
784
|
+
logger.info(
|
|
785
|
+
"The resource is already here but doesn't match the RH description :-("
|
|
786
|
+
)
|
|
787
|
+
cur_tracker[iotarget].match_rh(
|
|
788
|
+
"get", self, verbose=True
|
|
789
|
+
)
|
|
790
|
+
self._updstage("void", insitu=True)
|
|
682
791
|
# Bloody hell, the localpath doesn't exist
|
|
683
792
|
else:
|
|
684
|
-
rst = callback(
|
|
793
|
+
rst = callback(
|
|
794
|
+
**extras
|
|
795
|
+
) # This might be an expected resource...
|
|
685
796
|
if rst:
|
|
686
|
-
logger.info(
|
|
797
|
+
logger.info(
|
|
798
|
+
"The resource was successfully fetched :-)"
|
|
799
|
+
)
|
|
687
800
|
else:
|
|
688
801
|
logger.info("Could not get the resource :-(")
|
|
689
802
|
else:
|
|
690
803
|
if alternate and self.container.exists():
|
|
691
|
-
logger.info(
|
|
804
|
+
logger.info("Alternate <%s> exists", alternate)
|
|
692
805
|
rst = True
|
|
693
806
|
else:
|
|
694
807
|
if self.container.exists():
|
|
695
|
-
logger.warning(
|
|
808
|
+
logger.warning(
|
|
809
|
+
"The resource is already here: that should not happen at this stage !"
|
|
810
|
+
)
|
|
696
811
|
rst = callback(**extras)
|
|
697
812
|
else:
|
|
698
|
-
logger.error(
|
|
813
|
+
logger.error("Could not get an incomplete rh %s", self)
|
|
699
814
|
return rst
|
|
700
815
|
|
|
701
816
|
def get(self, alternate=False, **extras):
|
|
@@ -752,8 +867,10 @@ class Handler:
|
|
|
752
867
|
"""
|
|
753
868
|
r_opts = extras.copy()
|
|
754
869
|
self._latest_earlyget_opts = r_opts
|
|
755
|
-
self._latest_earlyget_opts[
|
|
756
|
-
self._latest_earlyget_id = self._get_proxy(
|
|
870
|
+
self._latest_earlyget_opts["alternate"] = alternate
|
|
871
|
+
self._latest_earlyget_id = self._get_proxy(
|
|
872
|
+
self._actual_earlyget, alternate=alternate, **extras
|
|
873
|
+
)
|
|
757
874
|
return self._latest_earlyget_id
|
|
758
875
|
|
|
759
876
|
def finaliseget(self):
|
|
@@ -767,8 +884,13 @@ class Handler:
|
|
|
767
884
|
:raises HandlerError: if :meth:`earlyget` is not called prior to this
|
|
768
885
|
method.
|
|
769
886
|
"""
|
|
770
|
-
if
|
|
771
|
-
|
|
887
|
+
if (
|
|
888
|
+
self._latest_earlyget_id is None
|
|
889
|
+
and self._latest_earlyget_opts is None
|
|
890
|
+
):
|
|
891
|
+
raise HandlerError(
|
|
892
|
+
"earlyget was not called yet. Calling finaliseget is not Allowed !"
|
|
893
|
+
)
|
|
772
894
|
try:
|
|
773
895
|
if self._latest_earlyget_id is True:
|
|
774
896
|
# Nothing to be done...
|
|
@@ -776,20 +898,28 @@ class Handler:
|
|
|
776
898
|
elif self._latest_earlyget_id is None:
|
|
777
899
|
# Delayed get not available... do the usual get !
|
|
778
900
|
e_opts = self._latest_earlyget_opts.copy()
|
|
779
|
-
e_opts[
|
|
901
|
+
e_opts["insitu"] = False
|
|
780
902
|
return self._get_proxy(self._actual_get, **e_opts)
|
|
781
903
|
else:
|
|
782
|
-
alternate = self._latest_earlyget_opts.get(
|
|
904
|
+
alternate = self._latest_earlyget_opts.get("alternate", False)
|
|
783
905
|
if alternate and self.container.exists():
|
|
784
906
|
# The container may have been filled be another finaliseget
|
|
785
|
-
logger.info(
|
|
907
|
+
logger.info("Alternate <%s> exists", alternate)
|
|
786
908
|
rst = True
|
|
787
909
|
else:
|
|
788
910
|
rst = False
|
|
789
911
|
store = self.store
|
|
790
912
|
if store:
|
|
791
|
-
logger.debug(
|
|
792
|
-
|
|
913
|
+
logger.debug(
|
|
914
|
+
"Finalise-Get resource %s at %s from %s",
|
|
915
|
+
self,
|
|
916
|
+
self.lasturl,
|
|
917
|
+
store,
|
|
918
|
+
)
|
|
919
|
+
st_options = self.mkopts(
|
|
920
|
+
dict(rhandler=self.as_dict()),
|
|
921
|
+
self._latest_earlyget_opts,
|
|
922
|
+
)
|
|
793
923
|
# Actual get
|
|
794
924
|
rst = store.finaliseget(
|
|
795
925
|
self._latest_earlyget_id,
|
|
@@ -799,17 +929,25 @@ class Handler:
|
|
|
799
929
|
)
|
|
800
930
|
if rst is None:
|
|
801
931
|
# Delayed get failed... attempt the usual get
|
|
802
|
-
logger.warning(
|
|
932
|
+
logger.warning(
|
|
933
|
+
"Delayed get result was unclear ! Reverting to the usual get."
|
|
934
|
+
)
|
|
803
935
|
e_opts = self._latest_earlyget_opts.copy()
|
|
804
|
-
e_opts[
|
|
936
|
+
e_opts["insitu"] = False
|
|
805
937
|
return self._get_proxy(self._actual_get, **e_opts)
|
|
806
938
|
else:
|
|
807
|
-
rst = self._postproc_get(
|
|
939
|
+
rst = self._postproc_get(
|
|
940
|
+
store, rst, self._latest_earlyget_opts
|
|
941
|
+
)
|
|
808
942
|
else:
|
|
809
|
-
logger.error(
|
|
943
|
+
logger.error(
|
|
944
|
+
"Could not find any store to get %s", self.lasturl
|
|
945
|
+
)
|
|
810
946
|
|
|
811
947
|
# Reset the promise dictionary cache
|
|
812
|
-
self._localpr_cache =
|
|
948
|
+
self._localpr_cache = (
|
|
949
|
+
None # To cache the promise dictionary
|
|
950
|
+
)
|
|
813
951
|
|
|
814
952
|
return rst
|
|
815
953
|
finally:
|
|
@@ -823,30 +961,44 @@ class Handler:
|
|
|
823
961
|
"""
|
|
824
962
|
rst = False
|
|
825
963
|
if self.complete:
|
|
826
|
-
if self.options.get(
|
|
964
|
+
if self.options.get(
|
|
965
|
+
"insitu", False
|
|
966
|
+
): # This a second pass (or third, forth, ...)
|
|
827
967
|
cur_tracker = self._cur_context.localtracker
|
|
828
968
|
cur_seq = self._cur_context.sequence
|
|
829
969
|
iotarget = self.container.iotarget()
|
|
830
970
|
# The localpath is here and listed in the tracker
|
|
831
|
-
if
|
|
832
|
-
|
|
833
|
-
|
|
971
|
+
if self.container.exists() and cur_tracker.is_tracked_input(
|
|
972
|
+
iotarget
|
|
973
|
+
):
|
|
974
|
+
if cur_tracker[iotarget].match_rh("get", self):
|
|
834
975
|
rst = True
|
|
835
976
|
# There is the tricky usecase where we are dealing with an alternate
|
|
836
977
|
# that was already dealt with (yes, sometimes the nominal case and
|
|
837
978
|
# the alternate is the same !)
|
|
838
|
-
if not (
|
|
839
|
-
|
|
979
|
+
if not (
|
|
980
|
+
alternate
|
|
981
|
+
and iotarget
|
|
982
|
+
in [
|
|
983
|
+
s.rh.container.iotarget()
|
|
984
|
+
for s in cur_seq.effective_inputs()
|
|
985
|
+
]
|
|
986
|
+
):
|
|
840
987
|
self.container.updfill(True)
|
|
841
|
-
self._updstage(
|
|
988
|
+
self._updstage("get", insitu=True)
|
|
842
989
|
elif alternate:
|
|
843
990
|
# Alternate is on and the local file exists: check if
|
|
844
991
|
# the file has already been fetch previously in the sequence
|
|
845
|
-
if iotarget in [
|
|
846
|
-
|
|
992
|
+
if iotarget in [
|
|
993
|
+
s.rh.container.iotarget()
|
|
994
|
+
for s in cur_seq.effective_inputs()
|
|
995
|
+
]:
|
|
847
996
|
rst = True
|
|
848
997
|
else:
|
|
849
|
-
logger.error(
|
|
998
|
+
logger.error(
|
|
999
|
+
"This method should not be called with insitu=False (rh %s)",
|
|
1000
|
+
self,
|
|
1001
|
+
)
|
|
850
1002
|
return rst
|
|
851
1003
|
|
|
852
1004
|
def put(self, **extras):
|
|
@@ -864,40 +1016,60 @@ class Handler:
|
|
|
864
1016
|
store = self.store
|
|
865
1017
|
if store:
|
|
866
1018
|
iotarget = self.container.iotarget()
|
|
867
|
-
logger.debug(
|
|
868
|
-
|
|
1019
|
+
logger.debug(
|
|
1020
|
+
"Put resource %s as io %s at store %s",
|
|
1021
|
+
self,
|
|
1022
|
+
iotarget,
|
|
1023
|
+
store,
|
|
1024
|
+
)
|
|
1025
|
+
if iotarget is not None and (
|
|
1026
|
+
self.container.exists() or self.provider.expected
|
|
1027
|
+
):
|
|
869
1028
|
mytracker = self._cur_context.localtracker[iotarget]
|
|
870
1029
|
# Execute the hooks only if the local file exists
|
|
871
1030
|
if self.container.exists():
|
|
872
1031
|
self.container.updfill(True)
|
|
873
1032
|
if self.hooks:
|
|
874
1033
|
if not self.delayhooks:
|
|
875
|
-
self.apply_put_hooks(
|
|
1034
|
+
self.apply_put_hooks(
|
|
1035
|
+
mytracker=mytracker, **extras
|
|
1036
|
+
)
|
|
876
1037
|
else:
|
|
877
1038
|
logger.info("(put-)Hooks were delayed")
|
|
878
1039
|
# Add a filter function to remove duplicated PUTs to the same uri
|
|
879
1040
|
extras_ext = dict(extras)
|
|
880
|
-
extras_ext[
|
|
1041
|
+
extras_ext["urifilter"] = functools.partial(
|
|
1042
|
+
mytracker.redundant_uri, "put"
|
|
1043
|
+
)
|
|
881
1044
|
# Actual put
|
|
882
|
-
logger.debug(
|
|
1045
|
+
logger.debug(
|
|
1046
|
+
"Put resource %s at %s from %s",
|
|
1047
|
+
self,
|
|
1048
|
+
self.lasturl,
|
|
1049
|
+
store,
|
|
1050
|
+
)
|
|
883
1051
|
rst = store.put(
|
|
884
1052
|
iotarget,
|
|
885
1053
|
self.uridata,
|
|
886
|
-
self.mkopts(dict(rhandler=self.as_dict()), extras_ext)
|
|
1054
|
+
self.mkopts(dict(rhandler=self.as_dict()), extras_ext),
|
|
887
1055
|
)
|
|
888
1056
|
# For the record...
|
|
889
|
-
self.history.append(store.fullname(),
|
|
890
|
-
self._updstage(
|
|
1057
|
+
self.history.append(store.fullname(), "put", rst)
|
|
1058
|
+
self._updstage("put")
|
|
891
1059
|
elif self.ghost:
|
|
892
|
-
self.history.append(store.fullname(),
|
|
893
|
-
self._updstage(
|
|
1060
|
+
self.history.append(store.fullname(), "put", False)
|
|
1061
|
+
self._updstage("ghost")
|
|
894
1062
|
rst = True
|
|
895
1063
|
else:
|
|
896
|
-
logger.error(
|
|
1064
|
+
logger.error(
|
|
1065
|
+
"Could not find any source to put [%s]", iotarget
|
|
1066
|
+
)
|
|
897
1067
|
else:
|
|
898
|
-
logger.error(
|
|
1068
|
+
logger.error(
|
|
1069
|
+
"Could not find any store to put [%s]", self.lasturl
|
|
1070
|
+
)
|
|
899
1071
|
else:
|
|
900
|
-
logger.error(
|
|
1072
|
+
logger.error("Could not put an incomplete rh [%s]", self)
|
|
901
1073
|
return rst
|
|
902
1074
|
|
|
903
1075
|
def delete(self, **extras):
|
|
@@ -906,46 +1078,70 @@ class Handler:
|
|
|
906
1078
|
if self.resource and self.provider:
|
|
907
1079
|
store = self.store
|
|
908
1080
|
if store:
|
|
909
|
-
logger.debug(
|
|
1081
|
+
logger.debug(
|
|
1082
|
+
"Delete resource %s at %s from %s",
|
|
1083
|
+
self,
|
|
1084
|
+
self.lasturl,
|
|
1085
|
+
store,
|
|
1086
|
+
)
|
|
910
1087
|
rst = store.delete(
|
|
911
1088
|
self.uridata,
|
|
912
|
-
self.mkopts(dict(rhandler=self.as_dict()), extras)
|
|
1089
|
+
self.mkopts(dict(rhandler=self.as_dict()), extras),
|
|
913
1090
|
)
|
|
914
|
-
self.history.append(store.fullname(),
|
|
1091
|
+
self.history.append(store.fullname(), "delete", rst)
|
|
915
1092
|
else:
|
|
916
|
-
logger.error(
|
|
1093
|
+
logger.error(
|
|
1094
|
+
"Could not find any store to delete %s", self.lasturl
|
|
1095
|
+
)
|
|
917
1096
|
else:
|
|
918
|
-
logger.error(
|
|
1097
|
+
logger.error(
|
|
1098
|
+
"Could not delete a rh without defined resource and provider %s",
|
|
1099
|
+
self,
|
|
1100
|
+
)
|
|
919
1101
|
return rst
|
|
920
1102
|
|
|
921
1103
|
def clear(self):
|
|
922
1104
|
"""Clear the local container contents."""
|
|
923
1105
|
rst = False
|
|
924
1106
|
if self.container:
|
|
925
|
-
logger.debug(
|
|
1107
|
+
logger.debug("Remove resource container %s", self.container)
|
|
926
1108
|
rst = self.container.clear()
|
|
927
|
-
self.history.append(self.container.actualpath(),
|
|
1109
|
+
self.history.append(self.container.actualpath(), "clear", rst)
|
|
928
1110
|
self._notifyclear()
|
|
929
|
-
stage_clear_mapping = dict(expected=
|
|
1111
|
+
stage_clear_mapping = dict(expected="checked", get="checked")
|
|
930
1112
|
if self.stage in stage_clear_mapping:
|
|
931
1113
|
self._updstage(stage_clear_mapping[self.stage])
|
|
932
1114
|
return rst
|
|
933
1115
|
|
|
934
|
-
def mkgetpr(
|
|
935
|
-
|
|
1116
|
+
def mkgetpr(
|
|
1117
|
+
self,
|
|
1118
|
+
pr_getter=None,
|
|
1119
|
+
tplfile=None,
|
|
1120
|
+
py_exec=sys.executable,
|
|
1121
|
+
py_opts="",
|
|
1122
|
+
):
|
|
936
1123
|
"""Build a getter for the expected resource."""
|
|
937
1124
|
if tplfile is None:
|
|
938
|
-
tplfile =
|
|
1125
|
+
tplfile = (
|
|
1126
|
+
"sync-" + ("fetch" if self.is_expected() else "skip") + ".tpl"
|
|
1127
|
+
)
|
|
1128
|
+
with importlib.resources.path(
|
|
1129
|
+
"vortex.data.sync_templates",
|
|
1130
|
+
tplfile,
|
|
1131
|
+
) as tplpath:
|
|
1132
|
+
tpl = config.load_template(tplpath)
|
|
939
1133
|
if pr_getter is None:
|
|
940
|
-
pr_getter = self.container.localpath() +
|
|
1134
|
+
pr_getter = self.container.localpath() + ".getpr"
|
|
941
1135
|
t = self._cur_session
|
|
942
|
-
|
|
943
|
-
with open(pr_getter,
|
|
944
|
-
fd.write(
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
1136
|
+
|
|
1137
|
+
with open(pr_getter, "w", encoding="utf-8") as fd:
|
|
1138
|
+
fd.write(
|
|
1139
|
+
tpl.substitute(
|
|
1140
|
+
python=py_exec,
|
|
1141
|
+
pyopts=py_opts,
|
|
1142
|
+
promise=self.container.localpath(),
|
|
1143
|
+
)
|
|
1144
|
+
)
|
|
949
1145
|
t.sh.chmod(pr_getter, 0o555)
|
|
950
1146
|
return pr_getter
|
|
951
1147
|
|
|
@@ -953,7 +1149,9 @@ class Handler:
|
|
|
953
1149
|
def _localpr_json(self):
|
|
954
1150
|
if self.is_expected():
|
|
955
1151
|
if self._localpr_cache is None:
|
|
956
|
-
self._localpr_cache = self._cur_session.sh.json_load(
|
|
1152
|
+
self._localpr_cache = self._cur_session.sh.json_load(
|
|
1153
|
+
self.container.localpath()
|
|
1154
|
+
)
|
|
957
1155
|
return self._localpr_cache
|
|
958
1156
|
else:
|
|
959
1157
|
return None
|
|
@@ -966,10 +1164,10 @@ class Handler:
|
|
|
966
1164
|
rc = True
|
|
967
1165
|
if self.is_expected():
|
|
968
1166
|
pr = self._localpr_json
|
|
969
|
-
itself = pr.get(
|
|
1167
|
+
itself = pr.get("itself")
|
|
970
1168
|
rc = not self._cur_session.sh.path.exists(itself)
|
|
971
1169
|
if rc and check_exists:
|
|
972
|
-
remote = pr.get(
|
|
1170
|
+
remote = pr.get("locate").split(";")[0]
|
|
973
1171
|
rc = self._cur_session.sh.path.exists(remote)
|
|
974
1172
|
return rc
|
|
975
1173
|
|
|
@@ -981,28 +1179,39 @@ class Handler:
|
|
|
981
1179
|
nb = 0
|
|
982
1180
|
sh = self._cur_session.sh
|
|
983
1181
|
pr = self._localpr_json
|
|
984
|
-
itself = pr.get(
|
|
1182
|
+
itself = pr.get("itself")
|
|
985
1183
|
nbtries = int(timeout / sleep)
|
|
986
|
-
logger.info(
|
|
1184
|
+
logger.info(
|
|
1185
|
+
"Waiting %d x %d s. for expected resource <%s>",
|
|
1186
|
+
nbtries,
|
|
1187
|
+
sleep,
|
|
1188
|
+
local,
|
|
1189
|
+
)
|
|
987
1190
|
while sh.path.exists(itself):
|
|
988
1191
|
sh.sleep(sleep)
|
|
989
1192
|
nb += 1
|
|
990
1193
|
if nb > nbtries:
|
|
991
|
-
logger.error(
|
|
1194
|
+
logger.error("Could not wait anymore <%d>", nb)
|
|
992
1195
|
rc = False
|
|
993
1196
|
if fatal:
|
|
994
|
-
logger.critical(
|
|
995
|
-
|
|
1197
|
+
logger.critical(
|
|
1198
|
+
"Missing expected resource is fatal <%s>", local
|
|
1199
|
+
)
|
|
1200
|
+
raise HandlerError("Expected resource missing")
|
|
996
1201
|
break
|
|
997
1202
|
else:
|
|
998
|
-
remote = pr.get(
|
|
1203
|
+
remote = pr.get("locate").split(";")[0]
|
|
999
1204
|
if sh.path.exists(remote):
|
|
1000
|
-
logger.info(
|
|
1205
|
+
logger.info(
|
|
1206
|
+
"Keeping promise for remote resource <%s>", remote
|
|
1207
|
+
)
|
|
1001
1208
|
else:
|
|
1002
|
-
logger.warning(
|
|
1209
|
+
logger.warning(
|
|
1210
|
+
"Empty promise for remote resource <%s>", remote
|
|
1211
|
+
)
|
|
1003
1212
|
rc = False
|
|
1004
1213
|
else:
|
|
1005
|
-
logger.info(
|
|
1214
|
+
logger.info("Resource <%s> not expected", local)
|
|
1006
1215
|
return rc
|
|
1007
1216
|
|
|
1008
1217
|
def save(self):
|
|
@@ -1013,9 +1222,9 @@ class Handler:
|
|
|
1013
1222
|
if not self.container.is_virtual():
|
|
1014
1223
|
self.container.close()
|
|
1015
1224
|
else:
|
|
1016
|
-
logger.warning(
|
|
1225
|
+
logger.warning("Try to save undefined contents %s", self)
|
|
1017
1226
|
return rst
|
|
1018
1227
|
|
|
1019
1228
|
def strlast(self):
|
|
1020
1229
|
"""String formatted log of the last action."""
|
|
1021
|
-
return
|
|
1230
|
+
return " ".join([str(x) for x in self.history.last])
|