vortex-nwp 2.0.0b1__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +75 -47
- vortex/algo/__init__.py +3 -2
- vortex/algo/components.py +944 -618
- vortex/algo/mpitools.py +802 -497
- vortex/algo/mpitools_templates/__init__.py +1 -0
- vortex/algo/serversynctools.py +34 -33
- vortex/config.py +19 -22
- vortex/data/__init__.py +9 -3
- vortex/data/abstractstores.py +593 -655
- vortex/data/containers.py +217 -162
- vortex/data/contents.py +65 -39
- vortex/data/executables.py +93 -102
- vortex/data/flow.py +40 -34
- vortex/data/geometries.py +228 -132
- vortex/data/handlers.py +436 -227
- vortex/data/outflow.py +15 -15
- vortex/data/providers.py +185 -163
- vortex/data/resources.py +48 -42
- vortex/data/stores.py +540 -417
- vortex/data/sync_templates/__init__.py +0 -0
- vortex/gloves.py +114 -87
- vortex/layout/__init__.py +1 -8
- vortex/layout/contexts.py +150 -84
- vortex/layout/dataflow.py +353 -202
- vortex/layout/monitor.py +264 -128
- vortex/nwp/__init__.py +5 -2
- vortex/nwp/algo/__init__.py +14 -5
- vortex/nwp/algo/assim.py +205 -151
- vortex/nwp/algo/clim.py +683 -517
- vortex/nwp/algo/coupling.py +447 -225
- vortex/nwp/algo/eda.py +437 -229
- vortex/nwp/algo/eps.py +403 -231
- vortex/nwp/algo/forecasts.py +416 -275
- vortex/nwp/algo/fpserver.py +683 -307
- vortex/nwp/algo/ifsnaming.py +205 -145
- vortex/nwp/algo/ifsroot.py +215 -122
- vortex/nwp/algo/monitoring.py +137 -76
- vortex/nwp/algo/mpitools.py +330 -190
- vortex/nwp/algo/odbtools.py +637 -353
- vortex/nwp/algo/oopsroot.py +454 -273
- vortex/nwp/algo/oopstests.py +90 -56
- vortex/nwp/algo/request.py +287 -206
- vortex/nwp/algo/stdpost.py +878 -522
- vortex/nwp/data/__init__.py +22 -4
- vortex/nwp/data/assim.py +125 -137
- vortex/nwp/data/boundaries.py +121 -68
- vortex/nwp/data/climfiles.py +193 -211
- vortex/nwp/data/configfiles.py +73 -69
- vortex/nwp/data/consts.py +426 -401
- vortex/nwp/data/ctpini.py +59 -43
- vortex/nwp/data/diagnostics.py +94 -66
- vortex/nwp/data/eda.py +50 -51
- vortex/nwp/data/eps.py +195 -146
- vortex/nwp/data/executables.py +440 -434
- vortex/nwp/data/fields.py +63 -48
- vortex/nwp/data/gridfiles.py +183 -111
- vortex/nwp/data/logs.py +250 -217
- vortex/nwp/data/modelstates.py +180 -151
- vortex/nwp/data/monitoring.py +72 -99
- vortex/nwp/data/namelists.py +254 -202
- vortex/nwp/data/obs.py +400 -308
- vortex/nwp/data/oopsexec.py +22 -20
- vortex/nwp/data/providers.py +90 -65
- vortex/nwp/data/query.py +71 -82
- vortex/nwp/data/stores.py +49 -36
- vortex/nwp/data/surfex.py +136 -137
- vortex/nwp/syntax/__init__.py +1 -1
- vortex/nwp/syntax/stdattrs.py +173 -111
- vortex/nwp/tools/__init__.py +2 -2
- vortex/nwp/tools/addons.py +22 -17
- vortex/nwp/tools/agt.py +24 -12
- vortex/nwp/tools/bdap.py +16 -5
- vortex/nwp/tools/bdcp.py +4 -1
- vortex/nwp/tools/bdm.py +3 -0
- vortex/nwp/tools/bdmp.py +14 -9
- vortex/nwp/tools/conftools.py +728 -378
- vortex/nwp/tools/drhook.py +12 -8
- vortex/nwp/tools/grib.py +65 -39
- vortex/nwp/tools/gribdiff.py +22 -17
- vortex/nwp/tools/ifstools.py +82 -42
- vortex/nwp/tools/igastuff.py +167 -143
- vortex/nwp/tools/mars.py +14 -2
- vortex/nwp/tools/odb.py +234 -125
- vortex/nwp/tools/partitioning.py +61 -37
- vortex/nwp/tools/satrad.py +27 -12
- vortex/nwp/util/async.py +83 -55
- vortex/nwp/util/beacon.py +10 -10
- vortex/nwp/util/diffpygram.py +174 -86
- vortex/nwp/util/ens.py +144 -63
- vortex/nwp/util/hooks.py +30 -19
- vortex/nwp/util/taskdeco.py +28 -24
- vortex/nwp/util/usepygram.py +278 -172
- vortex/nwp/util/usetnt.py +31 -17
- vortex/sessions.py +72 -39
- vortex/syntax/__init__.py +1 -1
- vortex/syntax/stdattrs.py +410 -171
- vortex/syntax/stddeco.py +31 -22
- vortex/toolbox.py +327 -192
- vortex/tools/__init__.py +11 -2
- vortex/tools/actions.py +110 -121
- vortex/tools/addons.py +111 -92
- vortex/tools/arm.py +42 -22
- vortex/tools/compression.py +72 -69
- vortex/tools/date.py +11 -4
- vortex/tools/delayedactions.py +242 -132
- vortex/tools/env.py +75 -47
- vortex/tools/folder.py +342 -171
- vortex/tools/grib.py +341 -162
- vortex/tools/lfi.py +423 -216
- vortex/tools/listings.py +109 -40
- vortex/tools/names.py +218 -156
- vortex/tools/net.py +655 -299
- vortex/tools/parallelism.py +93 -61
- vortex/tools/prestaging.py +55 -31
- vortex/tools/schedulers.py +172 -105
- vortex/tools/services.py +403 -334
- vortex/tools/storage.py +293 -358
- vortex/tools/surfex.py +24 -24
- vortex/tools/systems.py +1234 -643
- vortex/tools/targets.py +156 -100
- vortex/util/__init__.py +1 -1
- vortex/util/config.py +378 -327
- vortex/util/empty.py +2 -2
- vortex/util/helpers.py +56 -24
- vortex/util/introspection.py +18 -12
- vortex/util/iosponge.py +8 -4
- vortex/util/roles.py +4 -6
- vortex/util/storefunctions.py +39 -13
- vortex/util/structs.py +3 -3
- vortex/util/worker.py +29 -17
- vortex_nwp-2.1.0.dist-info/METADATA +67 -0
- vortex_nwp-2.1.0.dist-info/RECORD +144 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/WHEEL +1 -1
- vortex/layout/appconf.py +0 -109
- vortex/layout/jobs.py +0 -1276
- vortex/layout/nodes.py +0 -1424
- vortex/layout/subjobs.py +0 -464
- vortex_nwp-2.0.0b1.dist-info/METADATA +0 -50
- vortex_nwp-2.0.0b1.dist-info/RECORD +0 -146
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info/licenses}/LICENSE +0 -0
- {vortex_nwp-2.0.0b1.dist-info → vortex_nwp-2.1.0.dist-info}/top_level.txt +0 -0
vortex/tools/parallelism.py
CHANGED
|
@@ -34,13 +34,13 @@ class TaylorVortexWorker(taylorism.Worker):
|
|
|
34
34
|
|
|
35
35
|
_abstract = True
|
|
36
36
|
_footprint = dict(
|
|
37
|
-
attr
|
|
38
|
-
kind
|
|
39
|
-
taskdebug
|
|
40
|
-
info
|
|
41
|
-
type
|
|
42
|
-
default
|
|
43
|
-
optional
|
|
37
|
+
attr=dict(
|
|
38
|
+
kind=dict(),
|
|
39
|
+
taskdebug=dict(
|
|
40
|
+
info="Dump all stdout/stderr to a file (in real live !)",
|
|
41
|
+
type=bool,
|
|
42
|
+
default=False,
|
|
43
|
+
optional=True,
|
|
44
44
|
),
|
|
45
45
|
)
|
|
46
46
|
)
|
|
@@ -62,7 +62,9 @@ class TaylorVortexWorker(taylorism.Worker):
|
|
|
62
62
|
def _task(self, **kwargs):
|
|
63
63
|
"""Should not be overridden anymore: see :meth:`vortex_task`."""
|
|
64
64
|
self._vortex_shortcuts()
|
|
65
|
-
with ParallelSilencer(
|
|
65
|
+
with ParallelSilencer(
|
|
66
|
+
self.context, self.name, debug=self.taskdebug
|
|
67
|
+
) as psi:
|
|
66
68
|
rc = self.vortex_task(**kwargs)
|
|
67
69
|
psi_rc = psi.export_result()
|
|
68
70
|
return self._vortex_rc_wrapup(rc, psi_rc)
|
|
@@ -77,30 +79,29 @@ class VortexWorkerBlindRun(TaylorVortexWorker):
|
|
|
77
79
|
|
|
78
80
|
_abstract = True
|
|
79
81
|
_footprint = dict(
|
|
80
|
-
attr
|
|
81
|
-
progname
|
|
82
|
+
attr=dict(
|
|
83
|
+
progname=dict(),
|
|
84
|
+
progargs=dict(
|
|
85
|
+
type=footprints.FPList,
|
|
86
|
+
default=footprints.FPList(),
|
|
87
|
+
optional=True,
|
|
82
88
|
),
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
default
|
|
86
|
-
optional
|
|
89
|
+
progtaskset=dict(
|
|
90
|
+
info="Topology/Method to set up the CPU affinity of the child task.",
|
|
91
|
+
default=None,
|
|
92
|
+
optional=True,
|
|
87
93
|
),
|
|
88
|
-
|
|
89
|
-
info
|
|
90
|
-
|
|
91
|
-
|
|
94
|
+
progtaskset_bsize=dict(
|
|
95
|
+
info="The number of threads used by one task",
|
|
96
|
+
type=int,
|
|
97
|
+
default=1,
|
|
98
|
+
optional=True,
|
|
92
99
|
),
|
|
93
|
-
|
|
94
|
-
info
|
|
95
|
-
type
|
|
96
|
-
default
|
|
97
|
-
optional
|
|
98
|
-
),
|
|
99
|
-
progenvdelta = dict(
|
|
100
|
-
info = 'Any alteration to environment variables',
|
|
101
|
-
type = footprints.FPDict,
|
|
102
|
-
default = footprints.FPDict({}),
|
|
103
|
-
optional = True
|
|
100
|
+
progenvdelta=dict(
|
|
101
|
+
info="Any alteration to environment variables",
|
|
102
|
+
type=footprints.FPDict,
|
|
103
|
+
default=footprints.FPDict({}),
|
|
104
|
+
optional=True,
|
|
104
105
|
),
|
|
105
106
|
)
|
|
106
107
|
)
|
|
@@ -115,22 +116,37 @@ class VortexWorkerBlindRun(TaylorVortexWorker):
|
|
|
115
116
|
:param stdoutfile: Path to the file where the standard/error output will
|
|
116
117
|
be saved.
|
|
117
118
|
"""
|
|
118
|
-
tmpio = open(stdoutfile,
|
|
119
|
+
tmpio = open(stdoutfile, "wb")
|
|
119
120
|
try:
|
|
120
|
-
self.system.softlink(
|
|
121
|
+
self.system.softlink("/dev/null", "core")
|
|
121
122
|
except FileExistsError:
|
|
122
123
|
pass
|
|
123
124
|
self.local_spawn_hook()
|
|
124
125
|
self.system.default_target.spawn_hook(self.system)
|
|
125
126
|
logger.info("The program stdout/err will be saved to %s", stdoutfile)
|
|
126
|
-
logger.info(
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
127
|
+
logger.info(
|
|
128
|
+
"Starting the following command: %s (taskset=%s, id=%d)",
|
|
129
|
+
" ".join(
|
|
130
|
+
[
|
|
131
|
+
self.progname,
|
|
132
|
+
]
|
|
133
|
+
+ self.progargs
|
|
134
|
+
),
|
|
135
|
+
str(self.progtaskset),
|
|
136
|
+
self.scheduler_ticket,
|
|
137
|
+
)
|
|
138
|
+
with self.system.env.delta_context(**self.progenvdelta):
|
|
139
|
+
self.system.spawn(
|
|
140
|
+
[
|
|
141
|
+
self.progname,
|
|
142
|
+
]
|
|
143
|
+
+ self.progargs,
|
|
144
|
+
output=tmpio,
|
|
145
|
+
fatal=True,
|
|
146
|
+
taskset=self.progtaskset,
|
|
147
|
+
taskset_id=self.scheduler_ticket,
|
|
148
|
+
taskset_bsize=self.progtaskset_bsize,
|
|
149
|
+
)
|
|
134
150
|
|
|
135
151
|
def delayed_error_local_spawn(self, stdoutfile, rcdict):
|
|
136
152
|
"""local_spawn wrapped in a try/except in order to trigger delayed exceptions."""
|
|
@@ -138,13 +154,16 @@ class VortexWorkerBlindRun(TaylorVortexWorker):
|
|
|
138
154
|
self.local_spawn(stdoutfile)
|
|
139
155
|
except ExecutionError as e:
|
|
140
156
|
logger.error("The execution failed.")
|
|
141
|
-
rcdict[
|
|
157
|
+
rcdict["rc"] = e
|
|
142
158
|
return rcdict
|
|
143
159
|
|
|
144
160
|
def find_namelists(self, opts=None): # @UnusedVariable
|
|
145
161
|
"""Find any namelists candidates in actual context inputs."""
|
|
146
|
-
namcandidates = [
|
|
147
|
-
|
|
162
|
+
namcandidates = [
|
|
163
|
+
x.rh
|
|
164
|
+
for x in self.context.sequence.effective_inputs(kind="namelist")
|
|
165
|
+
]
|
|
166
|
+
self.system.subtitle("Namelist candidates")
|
|
148
167
|
for nam in namcandidates:
|
|
149
168
|
nam.quickview()
|
|
150
169
|
|
|
@@ -158,9 +177,11 @@ class TeeLikeStringIO(io.StringIO):
|
|
|
158
177
|
super().__init__()
|
|
159
178
|
self._tees = set()
|
|
160
179
|
|
|
161
|
-
def record_teefile(self, filename, mode=
|
|
180
|
+
def record_teefile(self, filename, mode="w", line_buffering=True):
|
|
162
181
|
"""Add **filename** to the set of extra logfiles."""
|
|
163
|
-
self._tees.add(
|
|
182
|
+
self._tees.add(
|
|
183
|
+
open(filename, mode=mode, buffering=int(line_buffering))
|
|
184
|
+
)
|
|
164
185
|
|
|
165
186
|
def discard_tees(self):
|
|
166
187
|
"""Dismiss all of the extra logfiles."""
|
|
@@ -174,7 +195,7 @@ class TeeLikeStringIO(io.StringIO):
|
|
|
174
195
|
teeio.write(t)
|
|
175
196
|
super().write(t)
|
|
176
197
|
|
|
177
|
-
def filedump(self, filename, mode=
|
|
198
|
+
def filedump(self, filename, mode="w"):
|
|
178
199
|
"""Dump all of the captured data to **filename**."""
|
|
179
200
|
with open(filename, mode=mode) as fhdump:
|
|
180
201
|
self.seek(0)
|
|
@@ -207,8 +228,9 @@ class ParallelSilencer:
|
|
|
207
228
|
"""
|
|
208
229
|
self._ctx = context
|
|
209
230
|
self._taskdebug = debug
|
|
210
|
-
self._debugfile =
|
|
211
|
-
|
|
231
|
+
self._debugfile = "{:s}_{:s}_stdeo.txt".format(
|
|
232
|
+
taskname, date.now().ymdhms
|
|
233
|
+
)
|
|
212
234
|
self._ctx_r = None
|
|
213
235
|
self._io_r = io.StringIO()
|
|
214
236
|
# Other temporary stuff
|
|
@@ -240,7 +262,9 @@ class ParallelSilencer:
|
|
|
240
262
|
r_logger.addHandler(self._stream_h)
|
|
241
263
|
for a_handler in self._removed_h[r_logger]:
|
|
242
264
|
r_logger.removeHandler(a_handler)
|
|
243
|
-
for a_logger in [
|
|
265
|
+
for a_logger in [
|
|
266
|
+
logging.getLogger(x) for x in loggers.lognames | loggers.roots
|
|
267
|
+
]:
|
|
244
268
|
self._removed_h[a_logger] = list(a_logger.handlers)
|
|
245
269
|
for a_handler in self._removed_h[a_logger]:
|
|
246
270
|
a_logger.removeHandler(a_handler)
|
|
@@ -254,8 +278,11 @@ class ParallelSilencer:
|
|
|
254
278
|
def __exit__(self, exctype, excvalue, exctb): # @UnusedVariable
|
|
255
279
|
"""The end of a context."""
|
|
256
280
|
self._stop_recording()
|
|
257
|
-
if (
|
|
258
|
-
|
|
281
|
+
if (
|
|
282
|
+
exctype is not None
|
|
283
|
+
and not self._taskdebug
|
|
284
|
+
and self._io_r is not None
|
|
285
|
+
):
|
|
259
286
|
# Emergency dump of the outputs (even with debug=False) !
|
|
260
287
|
self._io_r.filedump(self._debugfile)
|
|
261
288
|
|
|
@@ -269,7 +296,9 @@ class ParallelSilencer:
|
|
|
269
296
|
for a_handler in self._removed_h[r_logger]:
|
|
270
297
|
r_logger.addHandler(a_handler)
|
|
271
298
|
r_logger.removeHandler(self._stream_h)
|
|
272
|
-
for a_logger in [
|
|
299
|
+
for a_logger in [
|
|
300
|
+
logging.getLogger(x) for x in loggers.roots | loggers.lognames
|
|
301
|
+
]:
|
|
273
302
|
for a_handler in self._removed_h.get(a_logger, ()):
|
|
274
303
|
a_logger.addHandler(a_handler)
|
|
275
304
|
# flush
|
|
@@ -289,8 +318,9 @@ class ParallelSilencer:
|
|
|
289
318
|
"""
|
|
290
319
|
self._stop_recording()
|
|
291
320
|
self._io_r.seek(0)
|
|
292
|
-
return dict(
|
|
293
|
-
|
|
321
|
+
return dict(
|
|
322
|
+
context_record=self._ctx_r, stdoe_record=self._io_r.readlines()
|
|
323
|
+
)
|
|
294
324
|
|
|
295
325
|
|
|
296
326
|
class ParallelResultParser:
|
|
@@ -319,18 +349,20 @@ class ParallelResultParser:
|
|
|
319
349
|
raise res
|
|
320
350
|
else:
|
|
321
351
|
sys.stdout.flush()
|
|
322
|
-
logger.info(
|
|
352
|
+
logger.info("Parallel processing results for %s", res["name"])
|
|
323
353
|
# Update the context
|
|
324
|
-
logger.info(
|
|
325
|
-
res[
|
|
354
|
+
logger.info("... Updating the current context ...")
|
|
355
|
+
res["report"]["context_record"].replay_in(self.context)
|
|
326
356
|
# Display the stdout
|
|
327
|
-
if res[
|
|
328
|
-
logger.info(
|
|
329
|
-
|
|
357
|
+
if res["report"]["stdoe_record"]:
|
|
358
|
+
logger.info(
|
|
359
|
+
"... Dump of the mixed standard/error output generated by the subprocess ..."
|
|
360
|
+
)
|
|
361
|
+
for l in res["report"]["stdoe_record"]:
|
|
330
362
|
sys.stdout.write(l)
|
|
331
|
-
logger.info("... That's all for all for %s ...", res[
|
|
363
|
+
logger.info("... That's all for all for %s ...", res["name"])
|
|
332
364
|
|
|
333
|
-
return res[
|
|
365
|
+
return res["report"].get("rc", True)
|
|
334
366
|
|
|
335
367
|
def __call__(self, res):
|
|
336
368
|
return self.slurp(res)
|
vortex/tools/prestaging.py
CHANGED
|
@@ -20,7 +20,9 @@ __all__ = []
|
|
|
20
20
|
logger = loggers.getLogger(__name__)
|
|
21
21
|
|
|
22
22
|
#: Definition of a named tuple PrestagingPriorityTuple
|
|
23
|
-
PrestagingPriorityTuple = namedtuple(
|
|
23
|
+
PrestagingPriorityTuple = namedtuple(
|
|
24
|
+
"PrestagingPriorityTuple", ["urgent", "normal", "low"]
|
|
25
|
+
)
|
|
24
26
|
|
|
25
27
|
#: Predefined PrestagingPriorities values for urgent, normal and low
|
|
26
28
|
prestaging_p = PrestagingPriorityTuple(urgent=99, normal=50, low=0)
|
|
@@ -36,23 +38,20 @@ class PrestagingTool(footprints.FootprintBase, Catalog):
|
|
|
36
38
|
"""Abstract class that deal with pre-staging for a given storage target."""
|
|
37
39
|
|
|
38
40
|
_abstract = True
|
|
39
|
-
_collector = (
|
|
41
|
+
_collector = ("prestagingtool",)
|
|
40
42
|
_footprint = dict(
|
|
41
|
-
info
|
|
42
|
-
attr
|
|
43
|
-
system
|
|
44
|
-
|
|
45
|
-
|
|
43
|
+
info="Abstract class that deal with pre-staging for a given storage target.",
|
|
44
|
+
attr=dict(
|
|
45
|
+
system=dict(info="The current system object", type=OSExtended),
|
|
46
|
+
issuerkind=dict(
|
|
47
|
+
info="The kind of store issuing the prestaging request"
|
|
46
48
|
),
|
|
47
|
-
|
|
48
|
-
info
|
|
49
|
+
priority=dict(
|
|
50
|
+
info="The prestaging request priority",
|
|
51
|
+
type=int,
|
|
52
|
+
values=list(prestaging_p),
|
|
49
53
|
),
|
|
50
|
-
|
|
51
|
-
info = 'The prestaging request priority',
|
|
52
|
-
type = int,
|
|
53
|
-
values = list(prestaging_p)
|
|
54
|
-
)
|
|
55
|
-
)
|
|
54
|
+
),
|
|
56
55
|
)
|
|
57
56
|
|
|
58
57
|
def __init__(self, *kargs, **kwargs):
|
|
@@ -66,12 +65,14 @@ class PrestagingTool(footprints.FootprintBase, Catalog):
|
|
|
66
65
|
|
|
67
66
|
def describe(self, fulldump=False):
|
|
68
67
|
"""Print the object's characteristics and content."""
|
|
69
|
-
res =
|
|
68
|
+
res = "PrestagingTool object of class: {!s}\n".format(self.__class__)
|
|
70
69
|
for k, v in self.footprint_as_shallow_dict().items():
|
|
71
|
-
res +=
|
|
70
|
+
res += " * {:s}: {!s}\n".format(k, v)
|
|
72
71
|
if fulldump:
|
|
73
|
-
res +=
|
|
74
|
-
res +=
|
|
72
|
+
res += "\n * Todo list:\n"
|
|
73
|
+
res += "\n".join(
|
|
74
|
+
[" - {:s}".format(item) for item in sorted(self.items())]
|
|
75
|
+
)
|
|
75
76
|
return res
|
|
76
77
|
|
|
77
78
|
def flush(self, email=None):
|
|
@@ -113,24 +114,35 @@ class PrivatePrestagingHub:
|
|
|
113
114
|
# Prestaging tool descriptions
|
|
114
115
|
myptool_desc = self.prestagingtools_default_opts.copy()
|
|
115
116
|
myptool_desc.update(kwargs)
|
|
116
|
-
myptool_desc[
|
|
117
|
-
myptool_desc[
|
|
117
|
+
myptool_desc["priority"] = priority
|
|
118
|
+
myptool_desc["system"] = self._sh
|
|
118
119
|
myptool = None
|
|
119
120
|
# Scan pre-existing prestaging tools to find a suitable one
|
|
120
121
|
for ptool in self._prestagingtools:
|
|
121
|
-
if ptool.footprint_reusable() and ptool.footprint_compatible(
|
|
122
|
-
|
|
122
|
+
if ptool.footprint_reusable() and ptool.footprint_compatible(
|
|
123
|
+
myptool_desc
|
|
124
|
+
):
|
|
125
|
+
logger.debug(
|
|
126
|
+
"Re-usable prestaging tool found: %s",
|
|
127
|
+
lightdump(myptool_desc),
|
|
128
|
+
)
|
|
123
129
|
myptool = ptool
|
|
124
130
|
break
|
|
125
131
|
# If necessary, create a new one
|
|
126
132
|
if myptool is None:
|
|
127
133
|
myptool = fpx.prestagingtool(_emptywarning=False, **myptool_desc)
|
|
128
134
|
if myptool is not None:
|
|
129
|
-
logger.debug(
|
|
135
|
+
logger.debug(
|
|
136
|
+
"Fresh prestaging tool created: %s",
|
|
137
|
+
lightdump(myptool_desc),
|
|
138
|
+
)
|
|
130
139
|
self._prestagingtools.add(myptool)
|
|
131
140
|
# Let's role
|
|
132
141
|
if myptool is None:
|
|
133
|
-
logger.debug(
|
|
142
|
+
logger.debug(
|
|
143
|
+
"Unable to perform prestaging with: %s",
|
|
144
|
+
lightdump(myptool_desc),
|
|
145
|
+
)
|
|
134
146
|
else:
|
|
135
147
|
logger.debug("Prestaging requested accepted for: %s", location)
|
|
136
148
|
myptool.add(location)
|
|
@@ -143,13 +155,21 @@ class PrivatePrestagingHub:
|
|
|
143
155
|
return todo
|
|
144
156
|
|
|
145
157
|
def __repr__(self, *args, **kwargs):
|
|
146
|
-
return
|
|
147
|
-
|
|
148
|
-
|
|
158
|
+
return "{:s} | n_prestagingtools={:d}>".format(
|
|
159
|
+
super().__repr__().rstrip(">"), len(self._prestagingtools)
|
|
160
|
+
)
|
|
149
161
|
|
|
150
162
|
def __str__(self):
|
|
151
|
-
return (
|
|
152
|
-
|
|
163
|
+
return (
|
|
164
|
+
repr(self)
|
|
165
|
+
+ "\n\n"
|
|
166
|
+
+ "\n\n".join(
|
|
167
|
+
[
|
|
168
|
+
ptool.describe(fulldump=True)
|
|
169
|
+
for ptool in self._prestagingtools
|
|
170
|
+
]
|
|
171
|
+
)
|
|
172
|
+
)
|
|
153
173
|
|
|
154
174
|
def flush(self, priority_threshold=prestaging_p.low):
|
|
155
175
|
"""Actually send the pre-staging request to the appropriate location.
|
|
@@ -163,7 +183,10 @@ class PrivatePrestagingHub:
|
|
|
163
183
|
if rc:
|
|
164
184
|
self._prestagingtools.discard(ptool)
|
|
165
185
|
else:
|
|
166
|
-
logger.error(
|
|
186
|
+
logger.error(
|
|
187
|
+
"Something went wrong when flushing the %s prestaging tool",
|
|
188
|
+
ptool,
|
|
189
|
+
)
|
|
167
190
|
|
|
168
191
|
def clear(self, priority_threshold=prestaging_p.low):
|
|
169
192
|
"""Erase the pre-staging requests list.
|
|
@@ -183,4 +206,5 @@ class PrestagingHub(PrivatePrestagingHub, getbytag.GetByTag):
|
|
|
183
206
|
Therefore, a *tag* attribute needs to be specified when building/retrieving
|
|
184
207
|
an object of this class.
|
|
185
208
|
"""
|
|
209
|
+
|
|
186
210
|
pass
|