vortex-nwp 2.0.0b1__py3-none-any.whl → 2.0.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +59 -45
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +940 -614
- vortex/algo/mpitools.py +802 -497
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +428 -225
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +544 -413
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +420 -271
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +210 -122
- vortex/nwp/algo/monitoring.py +132 -76
- vortex/nwp/algo/mpitools.py +321 -191
- vortex/nwp/algo/odbtools.py +617 -353
- vortex/nwp/algo/oopsroot.py +449 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +125 -59
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +311 -149
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +632 -298
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +402 -333
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1211 -631
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +377 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.0.0b2.dist-info/METADATA +66 -0
- vortex_nwp-2.0.0b2.dist-info/RECORD +142 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.0.0b2.dist-info}/top_level.txt +0 -0
vortex/data/handlers.py
CHANGED
|
@@ -4,7 +4,6 @@ objects are in charge of manipulating data between the working directory and
|
|
|
4
4
|
the various caches or archives".
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
|
|
8
7
|
import functools
|
|
9
8
|
import re
|
|
10
9
|
import sys
|
|
@@ -28,11 +27,12 @@ __all__ = []
|
|
|
28
27
|
|
|
29
28
|
logger = loggers.getLogger(__name__)
|
|
30
29
|
|
|
31
|
-
OBSERVER_TAG =
|
|
30
|
+
OBSERVER_TAG = "Resources-Handlers"
|
|
32
31
|
|
|
33
32
|
|
|
34
33
|
class HandlerError(RuntimeError):
|
|
35
34
|
"""Exception in case of missing resource during the wait mechanism."""
|
|
35
|
+
|
|
36
36
|
pass
|
|
37
37
|
|
|
38
38
|
|
|
@@ -65,27 +65,31 @@ class IdCardAttrDumper(bronx.fancies.dump.TxtDumper):
|
|
|
65
65
|
if level + 1 > self.max_depth:
|
|
66
66
|
return "{}{{...}}{}".format(
|
|
67
67
|
self._indent(level, self.break_before_dict_begin),
|
|
68
|
-
self._indent(level, self.break_after_dict_end)
|
|
68
|
+
self._indent(level, self.break_after_dict_end),
|
|
69
69
|
)
|
|
70
70
|
else:
|
|
71
|
-
items = [
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
71
|
+
items = [
|
|
72
|
+
"{}{} = {}{},".format(
|
|
73
|
+
self._indent(level + 1, self.break_before_dict_key),
|
|
74
|
+
str(k),
|
|
75
|
+
self._indent(level + 2, self.break_before_dict_value),
|
|
76
|
+
self._recursive_dump(v, level + 1),
|
|
77
|
+
)
|
|
78
|
+
for k, v in sorted(fpobj.footprint_as_shallow_dict().items())
|
|
79
|
+
]
|
|
80
|
+
return " ".join(items)
|
|
77
81
|
|
|
78
82
|
def dump_default(self, obj, level=0, nextline=True):
|
|
79
83
|
"""Generic dump function. Concise view for GetByTag objects."""
|
|
80
84
|
if level + 1 > self.max_depth:
|
|
81
85
|
return " <%s...>" % type(obj).__class__
|
|
82
86
|
else:
|
|
83
|
-
if hasattr(obj,
|
|
87
|
+
if hasattr(obj, "tag"):
|
|
84
88
|
return "{:s} obj: tag={:s}".format(type(obj).__name__, obj.tag)
|
|
85
89
|
else:
|
|
86
|
-
parent_dump = super(
|
|
87
|
-
|
|
88
|
-
|
|
90
|
+
parent_dump = super(
|
|
91
|
+
bronx.fancies.dump.TxtDumper, self
|
|
92
|
+
).dump_default(obj, level, nextline and self.break_default)
|
|
89
93
|
return "{:s} obj: {!s}".format(type(obj).__name__, parent_dump)
|
|
90
94
|
|
|
91
95
|
|
|
@@ -98,32 +102,32 @@ class Handler:
|
|
|
98
102
|
"""
|
|
99
103
|
|
|
100
104
|
def __init__(self, rd, **kw):
|
|
101
|
-
if
|
|
102
|
-
del rd[
|
|
103
|
-
self._resource = rd.pop(
|
|
104
|
-
self._provider = rd.pop(
|
|
105
|
-
self._container = rd.pop(
|
|
106
|
-
self._empty = rd.pop(
|
|
105
|
+
if "glove" in rd:
|
|
106
|
+
del rd["glove"]
|
|
107
|
+
self._resource = rd.pop("resource", None)
|
|
108
|
+
self._provider = rd.pop("provider", None)
|
|
109
|
+
self._container = rd.pop("container", None)
|
|
110
|
+
self._empty = rd.pop("empty", False)
|
|
107
111
|
self._contents = None
|
|
108
112
|
self._uridata = None
|
|
109
113
|
self._options = rd.copy()
|
|
110
|
-
self._observer = observer_board(obsname=kw.pop(
|
|
114
|
+
self._observer = observer_board(obsname=kw.pop("observer", None))
|
|
111
115
|
self._options.update(kw)
|
|
112
|
-
self._mdcheck = self._options.pop(
|
|
113
|
-
self._mddelta = self._options.pop(
|
|
114
|
-
self._ghost = self._options.pop(
|
|
115
|
-
hook_names = [x for x in self._options.keys() if x.startswith(
|
|
116
|
+
self._mdcheck = self._options.pop("metadatacheck", False)
|
|
117
|
+
self._mddelta = self._options.pop("metadatadelta", dict())
|
|
118
|
+
self._ghost = self._options.pop("ghost", False)
|
|
119
|
+
hook_names = [x for x in self._options.keys() if x.startswith("hook_")]
|
|
116
120
|
self._hooks = {x[5:]: self._options.pop(x) for x in hook_names}
|
|
117
|
-
self._delayhooks = self._options.pop(
|
|
121
|
+
self._delayhooks = self._options.pop("delayhooks", False)
|
|
118
122
|
|
|
119
|
-
self._history = History(tag=
|
|
120
|
-
self._history.append(self.__class__.__name__,
|
|
121
|
-
self._stage = [
|
|
122
|
-
self._observer.notify_new(self, dict(stage=
|
|
123
|
+
self._history = History(tag="data-handler")
|
|
124
|
+
self._history.append(self.__class__.__name__, "init", True)
|
|
125
|
+
self._stage = ["load"]
|
|
126
|
+
self._observer.notify_new(self, dict(stage="load"))
|
|
123
127
|
self._localpr_cache = None # To cache the promise dictionary
|
|
124
128
|
self._latest_earlyget_id = None
|
|
125
129
|
self._latest_earlyget_opts = None
|
|
126
|
-
logger.debug(
|
|
130
|
+
logger.debug("New resource handler %s", self.__dict__)
|
|
127
131
|
|
|
128
132
|
def __str__(self):
|
|
129
133
|
return str(self.__dict__)
|
|
@@ -140,7 +144,9 @@ class Handler:
|
|
|
140
144
|
self._notifyhash(oldhash)
|
|
141
145
|
self.reset_contents()
|
|
142
146
|
else:
|
|
143
|
-
raise ValueError(
|
|
147
|
+
raise ValueError(
|
|
148
|
+
"This value is not a plain Resource <{!s}>".format(value)
|
|
149
|
+
)
|
|
144
150
|
|
|
145
151
|
resource = property(_get_resource, _set_resource)
|
|
146
152
|
|
|
@@ -156,7 +162,9 @@ class Handler:
|
|
|
156
162
|
self._notifyhash(oldhash)
|
|
157
163
|
self.reset_contents()
|
|
158
164
|
else:
|
|
159
|
-
raise ValueError(
|
|
165
|
+
raise ValueError(
|
|
166
|
+
"This value is not a plain Provider <{!s}>".format(value)
|
|
167
|
+
)
|
|
160
168
|
|
|
161
169
|
provider = property(_get_provider, _set_provider)
|
|
162
170
|
|
|
@@ -171,7 +179,9 @@ class Handler:
|
|
|
171
179
|
self._container = value
|
|
172
180
|
self._notifyhash(oldhash)
|
|
173
181
|
else:
|
|
174
|
-
raise ValueError(
|
|
182
|
+
raise ValueError(
|
|
183
|
+
"This value is not a plain Container <{!s}>".format(value)
|
|
184
|
+
)
|
|
175
185
|
|
|
176
186
|
container = property(_get_container, _set_container)
|
|
177
187
|
|
|
@@ -206,11 +216,11 @@ class Handler:
|
|
|
206
216
|
def simplified_hashkey(self):
|
|
207
217
|
"""Returns a tuple that can be used as a hashkey to quickly identify the handler."""
|
|
208
218
|
if self.complete:
|
|
209
|
-
rkind = getattr(self.resource,
|
|
210
|
-
rfile = getattr(self.container,
|
|
219
|
+
rkind = getattr(self.resource, "kind", None)
|
|
220
|
+
rfile = getattr(self.container, "filename", None)
|
|
211
221
|
return (rkind, rfile)
|
|
212
222
|
else:
|
|
213
|
-
return (
|
|
223
|
+
return ("incomplete",)
|
|
214
224
|
|
|
215
225
|
@property
|
|
216
226
|
def _cur_session(self):
|
|
@@ -228,7 +238,7 @@ class Handler:
|
|
|
228
238
|
Update the stage upon request (e.g. the file has been fetched by another process).
|
|
229
239
|
"""
|
|
230
240
|
self._stage.append(newstage)
|
|
231
|
-
if newstage in (
|
|
241
|
+
if newstage in ("get",):
|
|
232
242
|
self.container.updfill(True)
|
|
233
243
|
|
|
234
244
|
def _updstage(self, newstage, insitu=False):
|
|
@@ -242,15 +252,25 @@ class Handler:
|
|
|
242
252
|
|
|
243
253
|
def _notifyclear(self):
|
|
244
254
|
"""Notify that the hashkey has changed."""
|
|
245
|
-
self._observer.notify_upd(
|
|
255
|
+
self._observer.notify_upd(
|
|
256
|
+
self,
|
|
257
|
+
dict(
|
|
258
|
+
clear=True,
|
|
259
|
+
),
|
|
260
|
+
)
|
|
246
261
|
|
|
247
262
|
def _notifyhash(self, oldhash):
|
|
248
263
|
"""Notify that the hashkey has changed."""
|
|
249
|
-
self._observer.notify_upd(
|
|
264
|
+
self._observer.notify_upd(
|
|
265
|
+
self,
|
|
266
|
+
dict(
|
|
267
|
+
oldhash=oldhash,
|
|
268
|
+
),
|
|
269
|
+
)
|
|
250
270
|
|
|
251
271
|
def is_expected(self):
|
|
252
272
|
"""Return a boolean value according to the last stage value (expected or not)."""
|
|
253
|
-
return self.stage.startswith(
|
|
273
|
+
return self.stage.startswith("expect")
|
|
254
274
|
|
|
255
275
|
@property
|
|
256
276
|
def contents(self):
|
|
@@ -259,19 +279,27 @@ class Handler:
|
|
|
259
279
|
is complete and the container filled.
|
|
260
280
|
"""
|
|
261
281
|
if self._empty:
|
|
262
|
-
self.container.write(
|
|
282
|
+
self.container.write("")
|
|
263
283
|
self._empty = False
|
|
264
284
|
if self.complete:
|
|
265
|
-
if self.container.filled or self.stage ==
|
|
285
|
+
if self.container.filled or self.stage == "put":
|
|
266
286
|
if self._contents is None:
|
|
267
|
-
self._contents = self.resource.contents_handler(
|
|
287
|
+
self._contents = self.resource.contents_handler(
|
|
288
|
+
datafmt=self.container.actualfmt
|
|
289
|
+
)
|
|
268
290
|
with self.container.iod_context():
|
|
269
291
|
self._contents.slurp(self.container)
|
|
270
292
|
return self._contents
|
|
271
293
|
else:
|
|
272
|
-
logger.warning(
|
|
294
|
+
logger.warning(
|
|
295
|
+
"Contents requested on an empty container [%s]",
|
|
296
|
+
self.container,
|
|
297
|
+
)
|
|
273
298
|
else:
|
|
274
|
-
logger.warning(
|
|
299
|
+
logger.warning(
|
|
300
|
+
"Contents requested for an uncomplete handler [%s]",
|
|
301
|
+
self.container,
|
|
302
|
+
)
|
|
275
303
|
return None
|
|
276
304
|
|
|
277
305
|
def reset_contents(self):
|
|
@@ -316,10 +344,12 @@ class Handler:
|
|
|
316
344
|
if fatal:
|
|
317
345
|
raise
|
|
318
346
|
else:
|
|
319
|
-
return
|
|
347
|
+
return "OOPS: {!s} (but fatal is False)".format(e)
|
|
320
348
|
return self._lasturl
|
|
321
349
|
else:
|
|
322
|
-
logger.warning(
|
|
350
|
+
logger.warning(
|
|
351
|
+
"Resource handler %s could not build location", self
|
|
352
|
+
)
|
|
323
353
|
return None
|
|
324
354
|
|
|
325
355
|
def idcard(self, indent=2):
|
|
@@ -327,39 +357,45 @@ class Handler:
|
|
|
327
357
|
Returns a multilines documentation string with a summary
|
|
328
358
|
of the valuable information contained by this handler.
|
|
329
359
|
"""
|
|
330
|
-
tab =
|
|
331
|
-
card = "\n".join(
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
360
|
+
tab = " " * indent
|
|
361
|
+
card = "\n".join(
|
|
362
|
+
(
|
|
363
|
+
"{0}Handler {1!r}",
|
|
364
|
+
"{0}{0}Complete : {2}",
|
|
365
|
+
"{0}{0}Options : {3}",
|
|
366
|
+
"{0}{0}Location : {4}",
|
|
367
|
+
)
|
|
368
|
+
).format(tab, self, self.complete, self.options, self.location())
|
|
338
369
|
if self.hooks:
|
|
339
|
-
card +=
|
|
340
|
-
|
|
370
|
+
card += "\n{0}{0}Hooks : {1}".format(
|
|
371
|
+
tab, ",".join(list(self.hooks.keys()))
|
|
372
|
+
)
|
|
373
|
+
d = IdCardAttrDumper(tag="idcarddumper")
|
|
341
374
|
d.reset()
|
|
342
375
|
d.indent_first = 2 * len(tab)
|
|
343
|
-
for subobj in (
|
|
376
|
+
for subobj in ("resource", "provider", "container"):
|
|
344
377
|
obj = getattr(self, subobj, None)
|
|
345
378
|
if obj:
|
|
346
|
-
thisdoc =
|
|
347
|
-
|
|
379
|
+
thisdoc = "{0}{0}{1:s} {2!r}".format(
|
|
380
|
+
tab, subobj.capitalize(), obj
|
|
381
|
+
)
|
|
348
382
|
thisdoc += d.dump_fpattrs(obj)
|
|
349
383
|
else:
|
|
350
|
-
thisdoc =
|
|
384
|
+
thisdoc = "{0}{0}{1:s} undefined".format(
|
|
385
|
+
tab, subobj.capitalize()
|
|
386
|
+
)
|
|
351
387
|
card = card + "\n" + thisdoc
|
|
352
388
|
return card
|
|
353
389
|
|
|
354
390
|
def quickview(self, nb=0, indent=0):
|
|
355
391
|
"""Standard glance to objects."""
|
|
356
|
-
tab =
|
|
357
|
-
print(
|
|
358
|
-
print(
|
|
359
|
-
for subobj in (
|
|
392
|
+
tab = " " * indent
|
|
393
|
+
print("{}{:02d}. {:s}".format(tab, nb, repr(self)))
|
|
394
|
+
print("{} Complete : {!s}".format(tab, self.complete))
|
|
395
|
+
for subobj in ("container", "provider", "resource"):
|
|
360
396
|
obj = getattr(self, subobj, None)
|
|
361
397
|
if obj:
|
|
362
|
-
print(
|
|
398
|
+
print("{} {:10s}: {!s}".format(tab, subobj.capitalize(), obj))
|
|
363
399
|
|
|
364
400
|
def wide_key_lookup(self, key, exports=False, fatal=True):
|
|
365
401
|
"""Return the *key* attribute if it exists in the provider or resource.
|
|
@@ -368,11 +404,11 @@ class Handler:
|
|
|
368
404
|
is called upon the return value.
|
|
369
405
|
"""
|
|
370
406
|
try:
|
|
371
|
-
if key ==
|
|
407
|
+
if key == "safeblock":
|
|
372
408
|
# In olive experiments, the block may contain an indication of
|
|
373
409
|
# the member's number. Usually we do not want to get that...
|
|
374
|
-
a_value = getattr(self.provider,
|
|
375
|
-
a_value = re.sub(r
|
|
410
|
+
a_value = getattr(self.provider, "block")
|
|
411
|
+
a_value = re.sub(r"(member|fc)_?\d+/", "", a_value)
|
|
376
412
|
else:
|
|
377
413
|
a_value = getattr(self.provider, key)
|
|
378
414
|
except AttributeError:
|
|
@@ -380,13 +416,17 @@ class Handler:
|
|
|
380
416
|
a_value = getattr(self.resource, key)
|
|
381
417
|
except AttributeError:
|
|
382
418
|
if fatal:
|
|
383
|
-
raise AttributeError(
|
|
419
|
+
raise AttributeError(
|
|
420
|
+
"The {:s} attribute could not be found in {!r}".format(
|
|
421
|
+
key, self
|
|
422
|
+
)
|
|
423
|
+
)
|
|
384
424
|
else:
|
|
385
425
|
a_value = None
|
|
386
426
|
if exports:
|
|
387
|
-
if hasattr(a_value,
|
|
427
|
+
if hasattr(a_value, "footprint_export"):
|
|
388
428
|
a_value = a_value.footprint_export()
|
|
389
|
-
elif hasattr(a_value,
|
|
429
|
+
elif hasattr(a_value, "export_dict"):
|
|
390
430
|
a_value = a_value.export_dict()
|
|
391
431
|
return a_value
|
|
392
432
|
|
|
@@ -398,8 +438,8 @@ class Handler:
|
|
|
398
438
|
v = v.export_dict()
|
|
399
439
|
except (AttributeError, TypeError):
|
|
400
440
|
pass
|
|
401
|
-
rhd[
|
|
402
|
-
for subobj in (
|
|
441
|
+
rhd["options"][k] = v
|
|
442
|
+
for subobj in ("resource", "provider", "container"):
|
|
403
443
|
obj = getattr(self, subobj, None)
|
|
404
444
|
if obj is not None:
|
|
405
445
|
rhd[subobj] = obj.footprint_export()
|
|
@@ -419,11 +459,13 @@ class Handler:
|
|
|
419
459
|
def store(self):
|
|
420
460
|
if self.resource and self.provider:
|
|
421
461
|
self._uridata = net.uriparse(self.location())
|
|
422
|
-
stopts = {
|
|
462
|
+
stopts = {
|
|
463
|
+
k: v for k, v in self.options.items() if k.startswith("stor")
|
|
464
|
+
}
|
|
423
465
|
return footprints.proxy.store(
|
|
424
|
-
scheme=self._uridata.pop(
|
|
425
|
-
netloc=self._uridata.pop(
|
|
426
|
-
**stopts
|
|
466
|
+
scheme=self._uridata.pop("scheme"),
|
|
467
|
+
netloc=self._uridata.pop("netloc"),
|
|
468
|
+
**stopts,
|
|
427
469
|
)
|
|
428
470
|
else:
|
|
429
471
|
return None
|
|
@@ -434,46 +476,60 @@ class Handler:
|
|
|
434
476
|
if self.resource and self.provider:
|
|
435
477
|
store = self.store
|
|
436
478
|
if store:
|
|
437
|
-
logger.debug(
|
|
438
|
-
|
|
439
|
-
self
|
|
440
|
-
self.
|
|
479
|
+
logger.debug(
|
|
480
|
+
"Check resource %s at %s from %s",
|
|
481
|
+
self,
|
|
482
|
+
self.lasturl,
|
|
483
|
+
store,
|
|
441
484
|
)
|
|
485
|
+
rst = store.check(self.uridata, self.mkopts(extras))
|
|
442
486
|
if rst and self._mdcheck:
|
|
443
|
-
logger.info(
|
|
487
|
+
logger.info(
|
|
488
|
+
"metadatacheck is on: we are forcing a real get()..."
|
|
489
|
+
)
|
|
444
490
|
# We are using a temporary fake container
|
|
445
|
-
mycontainer = footprints.proxy.container(
|
|
446
|
-
|
|
491
|
+
mycontainer = footprints.proxy.container(
|
|
492
|
+
shouldfly=True, actualfmt=self.container.actualfmt
|
|
493
|
+
)
|
|
447
494
|
try:
|
|
448
495
|
tmp_options = self.mkopts(extras)
|
|
449
|
-
tmp_options[
|
|
496
|
+
tmp_options["obs_notify"] = False
|
|
450
497
|
rst = store.get(
|
|
451
|
-
self.uridata,
|
|
452
|
-
mycontainer.iotarget(),
|
|
453
|
-
tmp_options
|
|
498
|
+
self.uridata, mycontainer.iotarget(), tmp_options
|
|
454
499
|
)
|
|
455
500
|
if rst:
|
|
456
501
|
if store.delayed:
|
|
457
|
-
logger.warning(
|
|
502
|
+
logger.warning(
|
|
503
|
+
"The resource is expected... let's say that's fine."
|
|
504
|
+
)
|
|
458
505
|
else:
|
|
459
506
|
# Create the contents manually and drop it when we are done.
|
|
460
|
-
contents = self.resource.contents_handler(
|
|
507
|
+
contents = self.resource.contents_handler(
|
|
508
|
+
datafmt=mycontainer.actualfmt
|
|
509
|
+
)
|
|
461
510
|
contents.slurp(mycontainer)
|
|
462
|
-
rst = contents.metadata_check(
|
|
511
|
+
rst = contents.metadata_check(
|
|
512
|
+
self.resource, delta=self._mddelta
|
|
513
|
+
)
|
|
463
514
|
finally:
|
|
464
515
|
# Delete the temporary container
|
|
465
516
|
mycontainer.clear()
|
|
466
|
-
self.history.append(store.fullname(),
|
|
467
|
-
if rst and self.stage ==
|
|
517
|
+
self.history.append(store.fullname(), "check", rst)
|
|
518
|
+
if rst and self.stage == "load":
|
|
468
519
|
# Indicate that the resource was checked
|
|
469
|
-
self._updstage(
|
|
520
|
+
self._updstage("checked")
|
|
470
521
|
if not rst:
|
|
471
522
|
# Always signal failures
|
|
472
|
-
self._updstage(
|
|
523
|
+
self._updstage("void")
|
|
473
524
|
else:
|
|
474
|
-
logger.error(
|
|
525
|
+
logger.error(
|
|
526
|
+
"Could not find any store to check %s", self.lasturl
|
|
527
|
+
)
|
|
475
528
|
else:
|
|
476
|
-
logger.error(
|
|
529
|
+
logger.error(
|
|
530
|
+
"Could not check a rh without defined resource and provider %s",
|
|
531
|
+
self,
|
|
532
|
+
)
|
|
477
533
|
return rst
|
|
478
534
|
|
|
479
535
|
def locate(self, **extras):
|
|
@@ -482,16 +538,20 @@ class Handler:
|
|
|
482
538
|
if self.resource and self.provider:
|
|
483
539
|
store = self.store
|
|
484
540
|
if store:
|
|
485
|
-
logger.debug(
|
|
486
|
-
|
|
487
|
-
self
|
|
488
|
-
self.
|
|
541
|
+
logger.debug(
|
|
542
|
+
"Locate resource %s at %s from %s",
|
|
543
|
+
self,
|
|
544
|
+
self.lasturl,
|
|
545
|
+
store,
|
|
489
546
|
)
|
|
490
|
-
|
|
547
|
+
rst = store.locate(self.uridata, self.mkopts(extras))
|
|
548
|
+
self.history.append(store.fullname(), "locate", rst)
|
|
491
549
|
else:
|
|
492
|
-
logger.error(
|
|
550
|
+
logger.error(
|
|
551
|
+
"Could not find any store to locate %s", self.lasturl
|
|
552
|
+
)
|
|
493
553
|
else:
|
|
494
|
-
logger.error(
|
|
554
|
+
logger.error("Could not locate an incomplete rh %s", self)
|
|
495
555
|
return rst
|
|
496
556
|
|
|
497
557
|
def prestage(self, **extras):
|
|
@@ -500,62 +560,73 @@ class Handler:
|
|
|
500
560
|
if self.resource and self.provider:
|
|
501
561
|
store = self.store
|
|
502
562
|
if store:
|
|
503
|
-
logger.debug(
|
|
504
|
-
|
|
505
|
-
self
|
|
506
|
-
self.
|
|
563
|
+
logger.debug(
|
|
564
|
+
"Prestage resource %s at %s from %s",
|
|
565
|
+
self,
|
|
566
|
+
self.lasturl,
|
|
567
|
+
store,
|
|
507
568
|
)
|
|
508
|
-
|
|
569
|
+
rst = store.prestage(self.uridata, self.mkopts(extras))
|
|
570
|
+
self.history.append(store.fullname(), "prestage", rst)
|
|
509
571
|
else:
|
|
510
|
-
logger.error(
|
|
572
|
+
logger.error(
|
|
573
|
+
"Could not find any store to prestage %s", self.lasturl
|
|
574
|
+
)
|
|
511
575
|
else:
|
|
512
|
-
logger.error(
|
|
576
|
+
logger.error("Could not prestage an incomplete rh %s", self)
|
|
513
577
|
return rst
|
|
514
578
|
|
|
515
579
|
def _generic_apply_hooks(self, action, **extras):
|
|
516
580
|
"""Apply the hooks after a get request (or verify that they were done)."""
|
|
517
581
|
if self.hooks:
|
|
518
|
-
mytracker = extras.get(
|
|
582
|
+
mytracker = extras.get("mytracker", None)
|
|
519
583
|
if mytracker is None:
|
|
520
584
|
iotarget = self.container.iotarget()
|
|
521
585
|
mytracker = self._cur_context.localtracker[iotarget]
|
|
522
586
|
for hook_name in sorted(self.hooks.keys()):
|
|
523
587
|
if mytracker.redundant_hook(action, hook_name):
|
|
524
|
-
logger.info(
|
|
588
|
+
logger.info(
|
|
589
|
+
"Hook already executed <hook_name:%s>", hook_name
|
|
590
|
+
)
|
|
525
591
|
else:
|
|
526
|
-
logger.info(
|
|
592
|
+
logger.info("Executing Hook <hook_name:%s>", hook_name)
|
|
527
593
|
hook_func, hook_args = self.hooks[hook_name]
|
|
528
594
|
hook_func(self._cur_session, self, *hook_args)
|
|
529
595
|
self._notifyhook(action, hook_name)
|
|
530
596
|
|
|
531
597
|
def apply_get_hooks(self, **extras):
|
|
532
598
|
"""Apply the hooks after a get request (or verify that they were done)."""
|
|
533
|
-
self._generic_apply_hooks(action=
|
|
599
|
+
self._generic_apply_hooks(action="get", **extras)
|
|
534
600
|
|
|
535
601
|
def apply_put_hooks(self, **extras):
|
|
536
602
|
"""Apply the hooks before a put request (or verify that they were done)."""
|
|
537
|
-
self._generic_apply_hooks(action=
|
|
603
|
+
self._generic_apply_hooks(action="put", **extras)
|
|
538
604
|
|
|
539
605
|
def _postproc_get(self, store, rst, extras):
|
|
540
606
|
self.container.updfill(rst)
|
|
541
607
|
# Check metadata if sensible
|
|
542
608
|
if self._mdcheck and rst and not store.delayed:
|
|
543
|
-
rst = self.contents.metadata_check(
|
|
544
|
-
|
|
609
|
+
rst = self.contents.metadata_check(
|
|
610
|
+
self.resource, delta=self._mddelta
|
|
611
|
+
)
|
|
545
612
|
if not rst:
|
|
546
|
-
logger.info(
|
|
613
|
+
logger.info(
|
|
614
|
+
"We are now cleaning up the container and data contents."
|
|
615
|
+
)
|
|
547
616
|
self.reset_contents()
|
|
548
617
|
self.clear()
|
|
549
618
|
# For the record...
|
|
550
|
-
self.history.append(store.fullname(),
|
|
619
|
+
self.history.append(store.fullname(), "get", rst)
|
|
551
620
|
if rst:
|
|
552
621
|
# This is an expected resource
|
|
553
622
|
if store.delayed:
|
|
554
|
-
self._updstage(
|
|
555
|
-
logger.info(
|
|
623
|
+
self._updstage("expected")
|
|
624
|
+
logger.info(
|
|
625
|
+
"Resource <%s> is expected", self.container.iotarget()
|
|
626
|
+
)
|
|
556
627
|
# This is a "real" resource
|
|
557
628
|
else:
|
|
558
|
-
self._updstage(
|
|
629
|
+
self._updstage("get")
|
|
559
630
|
if self.hooks:
|
|
560
631
|
if not self.delayhooks:
|
|
561
632
|
self.apply_get_hooks(**extras)
|
|
@@ -563,7 +634,7 @@ class Handler:
|
|
|
563
634
|
logger.info("(get-)Hooks were delayed")
|
|
564
635
|
else:
|
|
565
636
|
# Always signal failures
|
|
566
|
-
self._updstage(
|
|
637
|
+
self._updstage("void")
|
|
567
638
|
return rst
|
|
568
639
|
|
|
569
640
|
def _actual_get(self, **extras):
|
|
@@ -575,7 +646,9 @@ class Handler:
|
|
|
575
646
|
rst = False
|
|
576
647
|
store = self.store
|
|
577
648
|
if store:
|
|
578
|
-
logger.debug(
|
|
649
|
+
logger.debug(
|
|
650
|
+
"Get resource %s at %s from %s", self, self.lasturl, store
|
|
651
|
+
)
|
|
579
652
|
st_options = self.mkopts(dict(rhandler=self.as_dict()), extras)
|
|
580
653
|
# Actual get
|
|
581
654
|
try:
|
|
@@ -590,7 +663,7 @@ class Handler:
|
|
|
590
663
|
finally:
|
|
591
664
|
rst = self._postproc_get(store, rst, extras)
|
|
592
665
|
else:
|
|
593
|
-
logger.error(
|
|
666
|
+
logger.error("Could not find any store to get %s", self.lasturl)
|
|
594
667
|
|
|
595
668
|
# Reset the promise dictionary cache
|
|
596
669
|
self._localpr_cache = None # To cache the promise dictionary
|
|
@@ -608,11 +681,18 @@ class Handler:
|
|
|
608
681
|
try:
|
|
609
682
|
store = self.store
|
|
610
683
|
except Exception as e:
|
|
611
|
-
logger.error(
|
|
612
|
-
|
|
684
|
+
logger.error(
|
|
685
|
+
"The Resource handler was unable to create a store object (%s).",
|
|
686
|
+
str(e),
|
|
687
|
+
)
|
|
613
688
|
store = None
|
|
614
689
|
if store:
|
|
615
|
-
logger.debug(
|
|
690
|
+
logger.debug(
|
|
691
|
+
"Early-Get resource %s at %s from %s",
|
|
692
|
+
self,
|
|
693
|
+
self.lasturl,
|
|
694
|
+
store,
|
|
695
|
+
)
|
|
616
696
|
st_options = self.mkopts(dict(rhandler=self.as_dict()), extras)
|
|
617
697
|
# Actual earlyget
|
|
618
698
|
try:
|
|
@@ -622,11 +702,13 @@ class Handler:
|
|
|
622
702
|
st_options,
|
|
623
703
|
)
|
|
624
704
|
except Exception as e:
|
|
625
|
-
logger.error(
|
|
626
|
-
|
|
705
|
+
logger.error(
|
|
706
|
+
"The store's earlyget method did not return (%s): it should never append!",
|
|
707
|
+
str(e),
|
|
708
|
+
)
|
|
627
709
|
return None
|
|
628
710
|
else:
|
|
629
|
-
logger.error(
|
|
711
|
+
logger.error("Could not find any store to get %s", self.lasturl)
|
|
630
712
|
return None
|
|
631
713
|
|
|
632
714
|
def _get_proxy(self, callback, alternate=False, **extras):
|
|
@@ -636,66 +718,98 @@ class Handler:
|
|
|
636
718
|
"""
|
|
637
719
|
rst = False
|
|
638
720
|
if self.complete:
|
|
639
|
-
if self.options.get(
|
|
721
|
+
if self.options.get(
|
|
722
|
+
"insitu", False
|
|
723
|
+
): # This a second pass (or third, forth, ...)
|
|
640
724
|
cur_tracker = self._cur_context.localtracker
|
|
641
725
|
cur_seq = self._cur_context.sequence
|
|
642
726
|
iotarget = self.container.iotarget()
|
|
643
727
|
# The localpath is here and listed in the tracker
|
|
644
|
-
if self.container.exists() and cur_tracker.is_tracked_input(
|
|
728
|
+
if self.container.exists() and cur_tracker.is_tracked_input(
|
|
729
|
+
iotarget
|
|
730
|
+
):
|
|
645
731
|
# Am I consistent with the ResourceHandler recorded in the tracker ?
|
|
646
|
-
if cur_tracker[iotarget].match_rh(
|
|
732
|
+
if cur_tracker[iotarget].match_rh("get", self):
|
|
647
733
|
rst = True
|
|
648
734
|
# There is the tricky usecase where we are dealing with an alternate
|
|
649
735
|
# that was already dealt with (yes, sometimes the nominal case and
|
|
650
736
|
# the alternate is the same !)
|
|
651
|
-
if not (
|
|
652
|
-
|
|
737
|
+
if not (
|
|
738
|
+
alternate
|
|
739
|
+
and iotarget
|
|
740
|
+
in [
|
|
741
|
+
s.rh.container.iotarget()
|
|
742
|
+
for s in cur_seq.effective_inputs()
|
|
743
|
+
]
|
|
744
|
+
):
|
|
653
745
|
self.container.updfill(True)
|
|
654
|
-
self._updstage(
|
|
746
|
+
self._updstage("get", insitu=True)
|
|
655
747
|
logger.info(
|
|
656
|
-
|
|
657
|
-
self.container.iotarget()
|
|
748
|
+
"The <%s> resource is already here and matches the RH description :-)",
|
|
749
|
+
self.container.iotarget(),
|
|
750
|
+
)
|
|
658
751
|
else:
|
|
659
752
|
# This may happen if fatal=False and the local file was fetched
|
|
660
753
|
# by an alternate
|
|
661
754
|
if alternate:
|
|
662
755
|
if not self.container.is_virtual():
|
|
663
756
|
lpath = self.container.localpath()
|
|
664
|
-
for
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
757
|
+
for (
|
|
758
|
+
isec
|
|
759
|
+
) in self._cur_context.sequence.rinputs():
|
|
760
|
+
if (
|
|
761
|
+
isec.stage in ("get" or "expected")
|
|
762
|
+
and not isec.rh.container.is_virtual()
|
|
763
|
+
and isec.rh.container.localpath()
|
|
764
|
+
== lpath
|
|
765
|
+
):
|
|
668
766
|
rst = True
|
|
669
767
|
break
|
|
670
768
|
if rst:
|
|
671
|
-
logger.info(
|
|
769
|
+
logger.info(
|
|
770
|
+
"Alternate is on and the local file exists."
|
|
771
|
+
)
|
|
672
772
|
else:
|
|
673
|
-
logger.info(
|
|
674
|
-
|
|
773
|
+
logger.info(
|
|
774
|
+
"Alternate is on but the local file is not yet matched."
|
|
775
|
+
)
|
|
776
|
+
self._updstage("void", insitu=True)
|
|
675
777
|
else:
|
|
676
|
-
logger.info(
|
|
778
|
+
logger.info(
|
|
779
|
+
"Alternate is on. The local file exists. The container is virtual."
|
|
780
|
+
)
|
|
677
781
|
rst = True
|
|
678
782
|
else:
|
|
679
|
-
logger.info(
|
|
680
|
-
|
|
681
|
-
|
|
783
|
+
logger.info(
|
|
784
|
+
"The resource is already here but doesn't match the RH description :-("
|
|
785
|
+
)
|
|
786
|
+
cur_tracker[iotarget].match_rh(
|
|
787
|
+
"get", self, verbose=True
|
|
788
|
+
)
|
|
789
|
+
self._updstage("void", insitu=True)
|
|
682
790
|
# Bloody hell, the localpath doesn't exist
|
|
683
791
|
else:
|
|
684
|
-
rst = callback(
|
|
792
|
+
rst = callback(
|
|
793
|
+
**extras
|
|
794
|
+
) # This might be an expected resource...
|
|
685
795
|
if rst:
|
|
686
|
-
logger.info(
|
|
796
|
+
logger.info(
|
|
797
|
+
"The resource was successfully fetched :-)"
|
|
798
|
+
)
|
|
687
799
|
else:
|
|
688
800
|
logger.info("Could not get the resource :-(")
|
|
689
801
|
else:
|
|
690
802
|
if alternate and self.container.exists():
|
|
691
|
-
logger.info(
|
|
803
|
+
logger.info("Alternate <%s> exists", alternate)
|
|
692
804
|
rst = True
|
|
693
805
|
else:
|
|
694
806
|
if self.container.exists():
|
|
695
|
-
logger.warning(
|
|
807
|
+
logger.warning(
|
|
808
|
+
"The resource is already here: that should not happen at this stage !"
|
|
809
|
+
)
|
|
696
810
|
rst = callback(**extras)
|
|
697
811
|
else:
|
|
698
|
-
logger.error(
|
|
812
|
+
logger.error("Could not get an incomplete rh %s", self)
|
|
699
813
|
return rst
|
|
700
814
|
|
|
701
815
|
def get(self, alternate=False, **extras):
|
|
@@ -752,8 +866,10 @@ class Handler:
|
|
|
752
866
|
"""
|
|
753
867
|
r_opts = extras.copy()
|
|
754
868
|
self._latest_earlyget_opts = r_opts
|
|
755
|
-
self._latest_earlyget_opts[
|
|
756
|
-
self._latest_earlyget_id = self._get_proxy(
|
|
869
|
+
self._latest_earlyget_opts["alternate"] = alternate
|
|
870
|
+
self._latest_earlyget_id = self._get_proxy(
|
|
871
|
+
self._actual_earlyget, alternate=alternate, **extras
|
|
872
|
+
)
|
|
757
873
|
return self._latest_earlyget_id
|
|
758
874
|
|
|
759
875
|
def finaliseget(self):
|
|
@@ -767,8 +883,13 @@ class Handler:
|
|
|
767
883
|
:raises HandlerError: if :meth:`earlyget` is not called prior to this
|
|
768
884
|
method.
|
|
769
885
|
"""
|
|
770
|
-
if
|
|
771
|
-
|
|
886
|
+
if (
|
|
887
|
+
self._latest_earlyget_id is None
|
|
888
|
+
and self._latest_earlyget_opts is None
|
|
889
|
+
):
|
|
890
|
+
raise HandlerError(
|
|
891
|
+
"earlyget was not called yet. Calling finaliseget is not Allowed !"
|
|
892
|
+
)
|
|
772
893
|
try:
|
|
773
894
|
if self._latest_earlyget_id is True:
|
|
774
895
|
# Nothing to be done...
|
|
@@ -776,20 +897,28 @@ class Handler:
|
|
|
776
897
|
elif self._latest_earlyget_id is None:
|
|
777
898
|
# Delayed get not available... do the usual get !
|
|
778
899
|
e_opts = self._latest_earlyget_opts.copy()
|
|
779
|
-
e_opts[
|
|
900
|
+
e_opts["insitu"] = False
|
|
780
901
|
return self._get_proxy(self._actual_get, **e_opts)
|
|
781
902
|
else:
|
|
782
|
-
alternate = self._latest_earlyget_opts.get(
|
|
903
|
+
alternate = self._latest_earlyget_opts.get("alternate", False)
|
|
783
904
|
if alternate and self.container.exists():
|
|
784
905
|
# The container may have been filled be another finaliseget
|
|
785
|
-
logger.info(
|
|
906
|
+
logger.info("Alternate <%s> exists", alternate)
|
|
786
907
|
rst = True
|
|
787
908
|
else:
|
|
788
909
|
rst = False
|
|
789
910
|
store = self.store
|
|
790
911
|
if store:
|
|
791
|
-
logger.debug(
|
|
792
|
-
|
|
912
|
+
logger.debug(
|
|
913
|
+
"Finalise-Get resource %s at %s from %s",
|
|
914
|
+
self,
|
|
915
|
+
self.lasturl,
|
|
916
|
+
store,
|
|
917
|
+
)
|
|
918
|
+
st_options = self.mkopts(
|
|
919
|
+
dict(rhandler=self.as_dict()),
|
|
920
|
+
self._latest_earlyget_opts,
|
|
921
|
+
)
|
|
793
922
|
# Actual get
|
|
794
923
|
rst = store.finaliseget(
|
|
795
924
|
self._latest_earlyget_id,
|
|
@@ -799,17 +928,25 @@ class Handler:
|
|
|
799
928
|
)
|
|
800
929
|
if rst is None:
|
|
801
930
|
# Delayed get failed... attempt the usual get
|
|
802
|
-
logger.warning(
|
|
931
|
+
logger.warning(
|
|
932
|
+
"Delayed get result was unclear ! Reverting to the usual get."
|
|
933
|
+
)
|
|
803
934
|
e_opts = self._latest_earlyget_opts.copy()
|
|
804
|
-
e_opts[
|
|
935
|
+
e_opts["insitu"] = False
|
|
805
936
|
return self._get_proxy(self._actual_get, **e_opts)
|
|
806
937
|
else:
|
|
807
|
-
rst = self._postproc_get(
|
|
938
|
+
rst = self._postproc_get(
|
|
939
|
+
store, rst, self._latest_earlyget_opts
|
|
940
|
+
)
|
|
808
941
|
else:
|
|
809
|
-
logger.error(
|
|
942
|
+
logger.error(
|
|
943
|
+
"Could not find any store to get %s", self.lasturl
|
|
944
|
+
)
|
|
810
945
|
|
|
811
946
|
# Reset the promise dictionary cache
|
|
812
|
-
self._localpr_cache =
|
|
947
|
+
self._localpr_cache = (
|
|
948
|
+
None # To cache the promise dictionary
|
|
949
|
+
)
|
|
813
950
|
|
|
814
951
|
return rst
|
|
815
952
|
finally:
|
|
@@ -823,30 +960,44 @@ class Handler:
|
|
|
823
960
|
"""
|
|
824
961
|
rst = False
|
|
825
962
|
if self.complete:
|
|
826
|
-
if self.options.get(
|
|
963
|
+
if self.options.get(
|
|
964
|
+
"insitu", False
|
|
965
|
+
): # This a second pass (or third, forth, ...)
|
|
827
966
|
cur_tracker = self._cur_context.localtracker
|
|
828
967
|
cur_seq = self._cur_context.sequence
|
|
829
968
|
iotarget = self.container.iotarget()
|
|
830
969
|
# The localpath is here and listed in the tracker
|
|
831
|
-
if
|
|
832
|
-
|
|
833
|
-
|
|
970
|
+
if self.container.exists() and cur_tracker.is_tracked_input(
|
|
971
|
+
iotarget
|
|
972
|
+
):
|
|
973
|
+
if cur_tracker[iotarget].match_rh("get", self):
|
|
834
974
|
rst = True
|
|
835
975
|
# There is the tricky usecase where we are dealing with an alternate
|
|
836
976
|
# that was already dealt with (yes, sometimes the nominal case and
|
|
837
977
|
# the alternate is the same !)
|
|
838
|
-
if not (
|
|
839
|
-
|
|
978
|
+
if not (
|
|
979
|
+
alternate
|
|
980
|
+
and iotarget
|
|
981
|
+
in [
|
|
982
|
+
s.rh.container.iotarget()
|
|
983
|
+
for s in cur_seq.effective_inputs()
|
|
984
|
+
]
|
|
985
|
+
):
|
|
840
986
|
self.container.updfill(True)
|
|
841
|
-
self._updstage(
|
|
987
|
+
self._updstage("get", insitu=True)
|
|
842
988
|
elif alternate:
|
|
843
989
|
# Alternate is on and the local file exists: check if
|
|
844
990
|
# the file has already been fetch previously in the sequence
|
|
845
|
-
if iotarget in [
|
|
846
|
-
|
|
991
|
+
if iotarget in [
|
|
992
|
+
s.rh.container.iotarget()
|
|
993
|
+
for s in cur_seq.effective_inputs()
|
|
994
|
+
]:
|
|
847
995
|
rst = True
|
|
848
996
|
else:
|
|
849
|
-
logger.error(
|
|
997
|
+
logger.error(
|
|
998
|
+
"This method should not be called with insitu=False (rh %s)",
|
|
999
|
+
self,
|
|
1000
|
+
)
|
|
850
1001
|
return rst
|
|
851
1002
|
|
|
852
1003
|
def put(self, **extras):
|
|
@@ -864,40 +1015,60 @@ class Handler:
|
|
|
864
1015
|
store = self.store
|
|
865
1016
|
if store:
|
|
866
1017
|
iotarget = self.container.iotarget()
|
|
867
|
-
logger.debug(
|
|
868
|
-
|
|
1018
|
+
logger.debug(
|
|
1019
|
+
"Put resource %s as io %s at store %s",
|
|
1020
|
+
self,
|
|
1021
|
+
iotarget,
|
|
1022
|
+
store,
|
|
1023
|
+
)
|
|
1024
|
+
if iotarget is not None and (
|
|
1025
|
+
self.container.exists() or self.provider.expected
|
|
1026
|
+
):
|
|
869
1027
|
mytracker = self._cur_context.localtracker[iotarget]
|
|
870
1028
|
# Execute the hooks only if the local file exists
|
|
871
1029
|
if self.container.exists():
|
|
872
1030
|
self.container.updfill(True)
|
|
873
1031
|
if self.hooks:
|
|
874
1032
|
if not self.delayhooks:
|
|
875
|
-
self.apply_put_hooks(
|
|
1033
|
+
self.apply_put_hooks(
|
|
1034
|
+
mytracker=mytracker, **extras
|
|
1035
|
+
)
|
|
876
1036
|
else:
|
|
877
1037
|
logger.info("(put-)Hooks were delayed")
|
|
878
1038
|
# Add a filter function to remove duplicated PUTs to the same uri
|
|
879
1039
|
extras_ext = dict(extras)
|
|
880
|
-
extras_ext[
|
|
1040
|
+
extras_ext["urifilter"] = functools.partial(
|
|
1041
|
+
mytracker.redundant_uri, "put"
|
|
1042
|
+
)
|
|
881
1043
|
# Actual put
|
|
882
|
-
logger.debug(
|
|
1044
|
+
logger.debug(
|
|
1045
|
+
"Put resource %s at %s from %s",
|
|
1046
|
+
self,
|
|
1047
|
+
self.lasturl,
|
|
1048
|
+
store,
|
|
1049
|
+
)
|
|
883
1050
|
rst = store.put(
|
|
884
1051
|
iotarget,
|
|
885
1052
|
self.uridata,
|
|
886
|
-
self.mkopts(dict(rhandler=self.as_dict()), extras_ext)
|
|
1053
|
+
self.mkopts(dict(rhandler=self.as_dict()), extras_ext),
|
|
887
1054
|
)
|
|
888
1055
|
# For the record...
|
|
889
|
-
self.history.append(store.fullname(),
|
|
890
|
-
self._updstage(
|
|
1056
|
+
self.history.append(store.fullname(), "put", rst)
|
|
1057
|
+
self._updstage("put")
|
|
891
1058
|
elif self.ghost:
|
|
892
|
-
self.history.append(store.fullname(),
|
|
893
|
-
self._updstage(
|
|
1059
|
+
self.history.append(store.fullname(), "put", False)
|
|
1060
|
+
self._updstage("ghost")
|
|
894
1061
|
rst = True
|
|
895
1062
|
else:
|
|
896
|
-
logger.error(
|
|
1063
|
+
logger.error(
|
|
1064
|
+
"Could not find any source to put [%s]", iotarget
|
|
1065
|
+
)
|
|
897
1066
|
else:
|
|
898
|
-
logger.error(
|
|
1067
|
+
logger.error(
|
|
1068
|
+
"Could not find any store to put [%s]", self.lasturl
|
|
1069
|
+
)
|
|
899
1070
|
else:
|
|
900
|
-
logger.error(
|
|
1071
|
+
logger.error("Could not put an incomplete rh [%s]", self)
|
|
901
1072
|
return rst
|
|
902
1073
|
|
|
903
1074
|
def delete(self, **extras):
|
|
@@ -906,46 +1077,65 @@ class Handler:
|
|
|
906
1077
|
if self.resource and self.provider:
|
|
907
1078
|
store = self.store
|
|
908
1079
|
if store:
|
|
909
|
-
logger.debug(
|
|
1080
|
+
logger.debug(
|
|
1081
|
+
"Delete resource %s at %s from %s",
|
|
1082
|
+
self,
|
|
1083
|
+
self.lasturl,
|
|
1084
|
+
store,
|
|
1085
|
+
)
|
|
910
1086
|
rst = store.delete(
|
|
911
1087
|
self.uridata,
|
|
912
|
-
self.mkopts(dict(rhandler=self.as_dict()), extras)
|
|
1088
|
+
self.mkopts(dict(rhandler=self.as_dict()), extras),
|
|
913
1089
|
)
|
|
914
|
-
self.history.append(store.fullname(),
|
|
1090
|
+
self.history.append(store.fullname(), "delete", rst)
|
|
915
1091
|
else:
|
|
916
|
-
logger.error(
|
|
1092
|
+
logger.error(
|
|
1093
|
+
"Could not find any store to delete %s", self.lasturl
|
|
1094
|
+
)
|
|
917
1095
|
else:
|
|
918
|
-
logger.error(
|
|
1096
|
+
logger.error(
|
|
1097
|
+
"Could not delete a rh without defined resource and provider %s",
|
|
1098
|
+
self,
|
|
1099
|
+
)
|
|
919
1100
|
return rst
|
|
920
1101
|
|
|
921
1102
|
def clear(self):
|
|
922
1103
|
"""Clear the local container contents."""
|
|
923
1104
|
rst = False
|
|
924
1105
|
if self.container:
|
|
925
|
-
logger.debug(
|
|
1106
|
+
logger.debug("Remove resource container %s", self.container)
|
|
926
1107
|
rst = self.container.clear()
|
|
927
|
-
self.history.append(self.container.actualpath(),
|
|
1108
|
+
self.history.append(self.container.actualpath(), "clear", rst)
|
|
928
1109
|
self._notifyclear()
|
|
929
|
-
stage_clear_mapping = dict(expected=
|
|
1110
|
+
stage_clear_mapping = dict(expected="checked", get="checked")
|
|
930
1111
|
if self.stage in stage_clear_mapping:
|
|
931
1112
|
self._updstage(stage_clear_mapping[self.stage])
|
|
932
1113
|
return rst
|
|
933
1114
|
|
|
934
|
-
def mkgetpr(
|
|
935
|
-
|
|
1115
|
+
def mkgetpr(
|
|
1116
|
+
self,
|
|
1117
|
+
pr_getter=None,
|
|
1118
|
+
tplfile=None,
|
|
1119
|
+
tplskip="@sync-skip.tpl",
|
|
1120
|
+
tplfetch="@sync-fetch.tpl",
|
|
1121
|
+
py_exec=sys.executable,
|
|
1122
|
+
py_opts="",
|
|
1123
|
+
):
|
|
936
1124
|
"""Build a getter for the expected resource."""
|
|
937
1125
|
if tplfile is None:
|
|
938
1126
|
tplfile = tplfetch if self.is_expected() else tplskip
|
|
939
1127
|
if pr_getter is None:
|
|
940
|
-
pr_getter = self.container.localpath() +
|
|
1128
|
+
pr_getter = self.container.localpath() + ".getpr"
|
|
941
1129
|
t = self._cur_session
|
|
942
1130
|
tpl = config.load_template(t, tplfile)
|
|
943
|
-
with open(pr_getter,
|
|
944
|
-
fd.write(
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
1131
|
+
with open(pr_getter, "w", encoding="utf-8") as fd:
|
|
1132
|
+
fd.write(
|
|
1133
|
+
tpl.substitute(
|
|
1134
|
+
python=py_exec,
|
|
1135
|
+
pyopts=py_opts,
|
|
1136
|
+
promise=self.container.localpath(),
|
|
1137
|
+
)
|
|
1138
|
+
)
|
|
949
1139
|
t.sh.chmod(pr_getter, 0o555)
|
|
950
1140
|
return pr_getter
|
|
951
1141
|
|
|
@@ -953,7 +1143,9 @@ class Handler:
|
|
|
953
1143
|
def _localpr_json(self):
|
|
954
1144
|
if self.is_expected():
|
|
955
1145
|
if self._localpr_cache is None:
|
|
956
|
-
self._localpr_cache = self._cur_session.sh.json_load(
|
|
1146
|
+
self._localpr_cache = self._cur_session.sh.json_load(
|
|
1147
|
+
self.container.localpath()
|
|
1148
|
+
)
|
|
957
1149
|
return self._localpr_cache
|
|
958
1150
|
else:
|
|
959
1151
|
return None
|
|
@@ -966,10 +1158,10 @@ class Handler:
|
|
|
966
1158
|
rc = True
|
|
967
1159
|
if self.is_expected():
|
|
968
1160
|
pr = self._localpr_json
|
|
969
|
-
itself = pr.get(
|
|
1161
|
+
itself = pr.get("itself")
|
|
970
1162
|
rc = not self._cur_session.sh.path.exists(itself)
|
|
971
1163
|
if rc and check_exists:
|
|
972
|
-
remote = pr.get(
|
|
1164
|
+
remote = pr.get("locate").split(";")[0]
|
|
973
1165
|
rc = self._cur_session.sh.path.exists(remote)
|
|
974
1166
|
return rc
|
|
975
1167
|
|
|
@@ -981,28 +1173,39 @@ class Handler:
|
|
|
981
1173
|
nb = 0
|
|
982
1174
|
sh = self._cur_session.sh
|
|
983
1175
|
pr = self._localpr_json
|
|
984
|
-
itself = pr.get(
|
|
1176
|
+
itself = pr.get("itself")
|
|
985
1177
|
nbtries = int(timeout / sleep)
|
|
986
|
-
logger.info(
|
|
1178
|
+
logger.info(
|
|
1179
|
+
"Waiting %d x %d s. for expected resource <%s>",
|
|
1180
|
+
nbtries,
|
|
1181
|
+
sleep,
|
|
1182
|
+
local,
|
|
1183
|
+
)
|
|
987
1184
|
while sh.path.exists(itself):
|
|
988
1185
|
sh.sleep(sleep)
|
|
989
1186
|
nb += 1
|
|
990
1187
|
if nb > nbtries:
|
|
991
|
-
logger.error(
|
|
1188
|
+
logger.error("Could not wait anymore <%d>", nb)
|
|
992
1189
|
rc = False
|
|
993
1190
|
if fatal:
|
|
994
|
-
logger.critical(
|
|
995
|
-
|
|
1191
|
+
logger.critical(
|
|
1192
|
+
"Missing expected resource is fatal <%s>", local
|
|
1193
|
+
)
|
|
1194
|
+
raise HandlerError("Expected resource missing")
|
|
996
1195
|
break
|
|
997
1196
|
else:
|
|
998
|
-
remote = pr.get(
|
|
1197
|
+
remote = pr.get("locate").split(";")[0]
|
|
999
1198
|
if sh.path.exists(remote):
|
|
1000
|
-
logger.info(
|
|
1199
|
+
logger.info(
|
|
1200
|
+
"Keeping promise for remote resource <%s>", remote
|
|
1201
|
+
)
|
|
1001
1202
|
else:
|
|
1002
|
-
logger.warning(
|
|
1203
|
+
logger.warning(
|
|
1204
|
+
"Empty promise for remote resource <%s>", remote
|
|
1205
|
+
)
|
|
1003
1206
|
rc = False
|
|
1004
1207
|
else:
|
|
1005
|
-
logger.info(
|
|
1208
|
+
logger.info("Resource <%s> not expected", local)
|
|
1006
1209
|
return rc
|
|
1007
1210
|
|
|
1008
1211
|
def save(self):
|
|
@@ -1013,9 +1216,9 @@ class Handler:
|
|
|
1013
1216
|
if not self.container.is_virtual():
|
|
1014
1217
|
self.container.close()
|
|
1015
1218
|
else:
|
|
1016
|
-
logger.warning(
|
|
1219
|
+
logger.warning("Try to save undefined contents %s", self)
|
|
1017
1220
|
return rst
|
|
1018
1221
|
|
|
1019
1222
|
def strlast(self):
|
|
1020
1223
|
"""String formatted log of the last action."""
|
|
1021
|
-
return
|
|
1224
|
+
return " ".join([str(x) for x in self.history.last])
|