vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
vortex/tools/odb.py ADDED
@@ -0,0 +1,10 @@
1
+ """
2
+ Backward compatibility module.
3
+
4
+ Please use :mod:`vortex.tools.folder` and :mod:`common.tools.odb` instead.
5
+ """
6
+
7
+ # Backward compatibility...
8
+ from . import folder
9
+
10
+ OdbShell = folder.OdbShell
@@ -0,0 +1,368 @@
1
+ """
2
+ Abstract classes for :mod:`taylorism` workers to be used in conjunction with
3
+ AlgoComponents based on the :class:`~vortex.algo.components.TaylorRun` class.
4
+ """
5
+
6
+ import io
7
+ import logging
8
+ import sys
9
+
10
+ from bronx.fancies import loggers
11
+ from bronx.stdtypes import date
12
+ import footprints
13
+ import taylorism
14
+ import vortex
15
+ from vortex.tools.systems import ExecutionError
16
+
17
+ #: No automatic export
18
+ __all__ = []
19
+
20
+ logger = loggers.getLogger(__name__)
21
+
22
+
23
+ class TaylorVortexWorker(taylorism.Worker):
24
+ """Vortex version of the :class:`taylorism.Worker` class.
25
+
26
+ This class provides additional features:
27
+
28
+ * Useful shortcuts (system, context, ...)
29
+ * Setup a Context recorder to track changes in the Context (and replay them later)
30
+ * Setup necessary hooks to record the logging messages and standard output. They
31
+ are sent back to the main process where they are displayed using the
32
+ :class:`ParallelResultParser` class.
33
+ """
34
+
35
+ _abstract = True
36
+ _footprint = dict(
37
+ attr=dict(
38
+ kind=dict(),
39
+ taskdebug=dict(
40
+ info="Dump all stdout/stderr to a file (in real live !)",
41
+ type=bool,
42
+ default=False,
43
+ optional=True,
44
+ ),
45
+ )
46
+ )
47
+
48
+ def _vortex_shortcuts(self):
49
+ """Setup a few shortcuts."""
50
+ self.ticket = vortex.sessions.current()
51
+ self.context = self.ticket.context
52
+ self.system = self.context.system
53
+
54
+ def _vortex_rc_wrapup(self, rc, psi_rc):
55
+ """Complement the return code with the ParallelSilencer recording."""
56
+ # Update the return values
57
+ if not isinstance(rc, dict):
58
+ rc = dict(msg=rc)
59
+ rc.update(psi_rc)
60
+ return rc
61
+
62
+ def _task(self, **kwargs):
63
+ """Should not be overridden anymore: see :meth:`vortex_task`."""
64
+ self._vortex_shortcuts()
65
+ with ParallelSilencer(
66
+ self.context, self.name, debug=self.taskdebug
67
+ ) as psi:
68
+ rc = self.vortex_task(**kwargs)
69
+ psi_rc = psi.export_result()
70
+ return self._vortex_rc_wrapup(rc, psi_rc)
71
+
72
+ def vortex_task(self, **kwargs):
73
+ """This method is to be implemented through inheritance: the real work happens here!"""
74
+ raise NotImplementedError()
75
+
76
+
77
+ class VortexWorkerBlindRun(TaylorVortexWorker):
78
+ """Include utility methods to run a basic program (i.e no MPI)."""
79
+
80
+ _abstract = True
81
+ _footprint = dict(
82
+ attr=dict(
83
+ progname=dict(),
84
+ progargs=dict(
85
+ type=footprints.FPList,
86
+ default=footprints.FPList(),
87
+ optional=True,
88
+ ),
89
+ progtaskset=dict(
90
+ info="Topology/Method to set up the CPU affinity of the child task.",
91
+ default=None,
92
+ optional=True,
93
+ ),
94
+ progtaskset_bsize=dict(
95
+ info="The number of threads used by one task",
96
+ type=int,
97
+ default=1,
98
+ optional=True,
99
+ ),
100
+ progenvdelta=dict(
101
+ info="Any alteration to environment variables",
102
+ type=footprints.FPDict,
103
+ default=footprints.FPDict({}),
104
+ optional=True,
105
+ ),
106
+ )
107
+ )
108
+
109
+ def local_spawn_hook(self):
110
+ """Last chance to say something before execution."""
111
+ pass
112
+
113
+ def local_spawn(self, stdoutfile):
114
+ """Execute the command specified in the **progname** attributes.
115
+
116
+ :param stdoutfile: Path to the file where the standard/error output will
117
+ be saved.
118
+ """
119
+ tmpio = open(stdoutfile, "wb")
120
+ try:
121
+ self.system.softlink("/dev/null", "core")
122
+ except FileExistsError:
123
+ pass
124
+ self.local_spawn_hook()
125
+ self.system.default_target.spawn_hook(self.system)
126
+ logger.info("The program stdout/err will be saved to %s", stdoutfile)
127
+ logger.info(
128
+ "Starting the following command: %s (taskset=%s, id=%d)",
129
+ " ".join(
130
+ [
131
+ self.progname,
132
+ ]
133
+ + self.progargs
134
+ ),
135
+ str(self.progtaskset),
136
+ self.scheduler_ticket,
137
+ )
138
+ with self.system.env.delta_context(**self.progenvdelta):
139
+ self.system.spawn(
140
+ [
141
+ self.progname,
142
+ ]
143
+ + self.progargs,
144
+ output=tmpio,
145
+ fatal=True,
146
+ taskset=self.progtaskset,
147
+ taskset_id=self.scheduler_ticket,
148
+ taskset_bsize=self.progtaskset_bsize,
149
+ )
150
+
151
+ def delayed_error_local_spawn(self, stdoutfile, rcdict):
152
+ """local_spawn wrapped in a try/except in order to trigger delayed exceptions."""
153
+ try:
154
+ self.local_spawn(stdoutfile)
155
+ except ExecutionError as e:
156
+ logger.error("The execution failed.")
157
+ rcdict["rc"] = e
158
+ return rcdict
159
+
160
+ def find_namelists(self, opts=None): # @UnusedVariable
161
+ """Find any namelists candidates in actual context inputs."""
162
+ namcandidates = [
163
+ x.rh
164
+ for x in self.context.sequence.effective_inputs(kind="namelist")
165
+ ]
166
+ self.system.subtitle("Namelist candidates")
167
+ for nam in namcandidates:
168
+ nam.quickview()
169
+
170
+ return namcandidates
171
+
172
+
173
+ class TeeLikeStringIO(io.StringIO):
174
+ """A StringIO variatn that can also write to several files."""
175
+
176
+ def __init__(self):
177
+ super().__init__()
178
+ self._tees = set()
179
+
180
+ def record_teefile(self, filename, mode="w", line_buffering=True):
181
+ """Add **filename** to the set of extra logfiles."""
182
+ self._tees.add(
183
+ open(filename, mode=mode, buffering=int(line_buffering))
184
+ )
185
+
186
+ def discard_tees(self):
187
+ """Dismiss all of the extra logfiles."""
188
+ for teeio in self._tees:
189
+ teeio.close()
190
+ self._tees = set()
191
+
192
+ def write(self, t):
193
+ """Write in the present StringIO but also in the extra logfiles."""
194
+ for teeio in self._tees:
195
+ teeio.write(t)
196
+ super().write(t)
197
+
198
+ def filedump(self, filename, mode="w"):
199
+ """Dump all of the captured data to **filename**."""
200
+ with open(filename, mode=mode) as fhdump:
201
+ self.seek(0)
202
+ for line in self:
203
+ fhdump.write(line)
204
+
205
+
206
+ class ParallelSilencer:
207
+ """Record everything and suppress all outputs (stdout, loggers, ...).
208
+
209
+ The record is kept within the object: the *export_result* method returns
210
+ the record as a dictionary that can be processed using the
211
+ :class:`ParallelResultParser` class.
212
+
213
+ :note: This object is designed to be used as a Context manager.
214
+
215
+ :example:
216
+ .. code-block:: python
217
+
218
+ with ParallelSilencer(context) as psi:
219
+ # do a lot of stuff here
220
+ psi_record = psi.export_result()
221
+ # do whatever you need with the psi_record
222
+ """
223
+
224
+ def __init__(self, context, taskname, debug=False):
225
+ """
226
+
227
+ :param vortex.layout.contexts.Context context: : The context we will record.
228
+ """
229
+ self._ctx = context
230
+ self._taskdebug = debug
231
+ self._debugfile = "{:s}_{:s}_stdeo.txt".format(
232
+ taskname, date.now().ymdhms
233
+ )
234
+ self._ctx_r = None
235
+ self._io_r = io.StringIO()
236
+ # Other temporary stuff
237
+ self._reset_temporary()
238
+
239
+ def _reset_records(self):
240
+ """Reset variables were the records are stored."""
241
+ self._io_r = TeeLikeStringIO()
242
+ if self._taskdebug:
243
+ self._io_r.record_teefile(self._debugfile)
244
+ self._stream_h = logging.StreamHandler(self._io_r)
245
+ self._stream_h.setLevel(logging.DEBUG)
246
+ self._stream_h.setFormatter(loggers.default_console.formatter)
247
+
248
+ def _reset_temporary(self):
249
+ """Reset other temporary stuff."""
250
+ self._removed_h = dict()
251
+ (self._prev_stdo, self._prev_stde) = (None, None)
252
+
253
+ def __enter__(self):
254
+ """The beginning of a new context."""
255
+ # Reset all
256
+ self._reset_records()
257
+ # Start the recording of the context (to be replayed in the main process)
258
+ self._ctx_r = self._ctx.get_recorder()
259
+ # Reset all the log handlers and slurp everything
260
+ r_logger = logging.getLogger()
261
+ self._removed_h[r_logger] = list(r_logger.handlers)
262
+ r_logger.addHandler(self._stream_h)
263
+ for a_handler in self._removed_h[r_logger]:
264
+ r_logger.removeHandler(a_handler)
265
+ for a_logger in [
266
+ logging.getLogger(x) for x in loggers.lognames | loggers.roots
267
+ ]:
268
+ self._removed_h[a_logger] = list(a_logger.handlers)
269
+ for a_handler in self._removed_h[a_logger]:
270
+ a_logger.removeHandler(a_handler)
271
+ # Do not speak on stdout/err
272
+ self._prev_stdo = sys.stdout
273
+ self._prev_stde = sys.stderr
274
+ sys.stdout = self._io_r
275
+ sys.stderr = self._io_r
276
+ return self
277
+
278
+ def __exit__(self, exctype, excvalue, exctb): # @UnusedVariable
279
+ """The end of a context."""
280
+ self._stop_recording()
281
+ if (
282
+ exctype is not None
283
+ and not self._taskdebug
284
+ and self._io_r is not None
285
+ ):
286
+ # Emergency dump of the outputs (even with debug=False) !
287
+ self._io_r.filedump(self._debugfile)
288
+
289
+ def _stop_recording(self):
290
+ """Stop recording and restore everything."""
291
+ if self._prev_stdo is not None:
292
+ # Stop recording the context
293
+ self._ctx_r.unregister()
294
+ # Restore the loggers
295
+ r_logger = logging.getLogger()
296
+ for a_handler in self._removed_h[r_logger]:
297
+ r_logger.addHandler(a_handler)
298
+ r_logger.removeHandler(self._stream_h)
299
+ for a_logger in [
300
+ logging.getLogger(x) for x in loggers.roots | loggers.lognames
301
+ ]:
302
+ for a_handler in self._removed_h.get(a_logger, ()):
303
+ a_logger.addHandler(a_handler)
304
+ # flush
305
+ self._stream_h.flush()
306
+ # Restore stdout/err
307
+ sys.stdout = self._prev_stdo
308
+ sys.stderr = self._prev_stde
309
+ # Remove all tees
310
+ self._io_r.discard_tees()
311
+ # Cleanup
312
+ self._reset_temporary()
313
+
314
+ def export_result(self):
315
+ """Return everything that has been recorded.
316
+
317
+ :return: A dictionary that can be processed with the :class:`ParallelResultParser` class.
318
+ """
319
+ self._stop_recording()
320
+ self._io_r.seek(0)
321
+ return dict(
322
+ context_record=self._ctx_r, stdoe_record=self._io_r.readlines()
323
+ )
324
+
325
+
326
+ class ParallelResultParser:
327
+ """Summarise the results of a parallel execution.
328
+
329
+ Just pass to this object the `rc` of a `taylorism` worker based on
330
+ :class:`TaylorVortexWorker`. It will:
331
+
332
+ * update the context with the changes made by the worker ;
333
+ * display the standard output/error of the worker
334
+ """
335
+
336
+ def __init__(self, context):
337
+ """
338
+
339
+ :param vortex.layout.contexts.Context context: The context where the results will be replayed.
340
+ """
341
+ self.context = context
342
+
343
+ def slurp(self, res):
344
+ """Summarise the results of a parallel execution.
345
+
346
+ :param dict res: A result record
347
+ """
348
+ if isinstance(res, Exception):
349
+ raise res
350
+ else:
351
+ sys.stdout.flush()
352
+ logger.info("Parallel processing results for %s", res["name"])
353
+ # Update the context
354
+ logger.info("... Updating the current context ...")
355
+ res["report"]["context_record"].replay_in(self.context)
356
+ # Display the stdout
357
+ if res["report"]["stdoe_record"]:
358
+ logger.info(
359
+ "... Dump of the mixed standard/error output generated by the subprocess ..."
360
+ )
361
+ for l in res["report"]["stdoe_record"]:
362
+ sys.stdout.write(l)
363
+ logger.info("... That's all for all for %s ...", res["name"])
364
+
365
+ return res["report"].get("rc", True)
366
+
367
+ def __call__(self, res):
368
+ return self.slurp(res)
@@ -0,0 +1,210 @@
1
+ """
2
+ Advanced tools that deal with resources pre-staging.
3
+ """
4
+
5
+ from collections import namedtuple
6
+
7
+ from bronx.fancies import loggers
8
+ from bronx.fancies.dump import lightdump
9
+ from bronx.patterns import getbytag
10
+ from bronx.stdtypes.catalog import Catalog
11
+
12
+ import footprints
13
+ from footprints import proxy as fpx
14
+
15
+ from vortex.tools.systems import OSExtended
16
+
17
+ #: No automatic export
18
+ __all__ = []
19
+
20
+ logger = loggers.getLogger(__name__)
21
+
22
+ #: Definition of a named tuple PrestagingPriorityTuple
23
+ PrestagingPriorityTuple = namedtuple(
24
+ "PrestagingPriorityTuple", ["urgent", "normal", "low"]
25
+ )
26
+
27
+ #: Predefined PrestagingPriorities values for urgent, normal and low
28
+ prestaging_p = PrestagingPriorityTuple(urgent=99, normal=50, low=0)
29
+
30
+
31
+ # Module Interface
32
+ def get_hub(**kw):
33
+ """Return the actual PrestagingHub object matching the *tag* (or create one)."""
34
+ return PrestagingHub(**kw)
35
+
36
+
37
+ class PrestagingTool(footprints.FootprintBase, Catalog):
38
+ """Abstract class that deal with pre-staging for a given storage target."""
39
+
40
+ _abstract = True
41
+ _collector = ("prestagingtool",)
42
+ _footprint = dict(
43
+ info="Abstract class that deal with pre-staging for a given storage target.",
44
+ attr=dict(
45
+ system=dict(info="The current system object", type=OSExtended),
46
+ issuerkind=dict(
47
+ info="The kind of store issuing the prestaging request"
48
+ ),
49
+ priority=dict(
50
+ info="The prestaging request priority",
51
+ type=int,
52
+ values=list(prestaging_p),
53
+ ),
54
+ ),
55
+ )
56
+
57
+ def __init__(self, *kargs, **kwargs):
58
+ """Abstract PrestagingTools init."""
59
+ # Call both inits
60
+ Catalog.__init__(self)
61
+ footprints.FootprintBase.__init__(self, *kargs, **kwargs)
62
+
63
+ def __str__(self):
64
+ return self.describe(fulldump=False)
65
+
66
+ def describe(self, fulldump=False):
67
+ """Print the object's characteristics and content."""
68
+ res = "PrestagingTool object of class: {!s}\n".format(self.__class__)
69
+ for k, v in self.footprint_as_shallow_dict().items():
70
+ res += " * {:s}: {!s}\n".format(k, v)
71
+ if fulldump:
72
+ res += "\n * Todo list:\n"
73
+ res += "\n".join(
74
+ [" - {:s}".format(item) for item in sorted(self.items())]
75
+ )
76
+ return res
77
+
78
+ def flush(self, email=None):
79
+ """Send the prestaging request to the appropriate location."""
80
+ raise NotImplementedError()
81
+
82
+
83
+ class PrivatePrestagingHub:
84
+ """
85
+ Manages pre-staging request by forwarding them to the appropriate
86
+ :class:`PrestagingTool` object.
87
+
88
+ If no, :class:`PrestagingTool` class is able to handle the pre-staging
89
+ request, just do nothing.
90
+
91
+ :note: When calling the :meth:`record` method, the pre-staging request is
92
+ just stored away. To actually request the pre-statging, one must call the
93
+ :meth:`flush` method.
94
+ """
95
+
96
+ def __init__(self, sh, email=None):
97
+ self._email = email
98
+ self._sh = sh
99
+ self._prestagingtools_default_opts = dict()
100
+ self._prestagingtools = set()
101
+
102
+ @property
103
+ def prestagingtools_default_opts(self):
104
+ """The dictionary of defaults that will be used when creating prestagingtool objects."""
105
+ return self._prestagingtools_default_opts
106
+
107
+ def record(self, location, priority=prestaging_p.normal, **kwargs):
108
+ """Take into consideration a pre-staging request.
109
+
110
+ :param str location: The location of the requested data
111
+ :param int priority: The prestaging request priority
112
+ :param dict kwargs: Any argument that will be used to create the :class:`PrestagingTool` object
113
+ """
114
+ # Prestaging tool descriptions
115
+ myptool_desc = self.prestagingtools_default_opts.copy()
116
+ myptool_desc.update(kwargs)
117
+ myptool_desc["priority"] = priority
118
+ myptool_desc["system"] = self._sh
119
+ myptool = None
120
+ # Scan pre-existing prestaging tools to find a suitable one
121
+ for ptool in self._prestagingtools:
122
+ if ptool.footprint_reusable() and ptool.footprint_compatible(
123
+ myptool_desc
124
+ ):
125
+ logger.debug(
126
+ "Re-usable prestaging tool found: %s",
127
+ lightdump(myptool_desc),
128
+ )
129
+ myptool = ptool
130
+ break
131
+ # If necessary, create a new one
132
+ if myptool is None:
133
+ myptool = fpx.prestagingtool(_emptywarning=False, **myptool_desc)
134
+ if myptool is not None:
135
+ logger.debug(
136
+ "Fresh prestaging tool created: %s",
137
+ lightdump(myptool_desc),
138
+ )
139
+ self._prestagingtools.add(myptool)
140
+ # Let's role
141
+ if myptool is None:
142
+ logger.debug(
143
+ "Unable to perform prestaging with: %s",
144
+ lightdump(myptool_desc),
145
+ )
146
+ else:
147
+ logger.debug("Prestaging requested accepted for: %s", location)
148
+ myptool.add(location)
149
+
150
+ def _get_ptools(self, priority_threshold=prestaging_p.low):
151
+ todo = set()
152
+ for ptool in self._prestagingtools:
153
+ if ptool.priority >= priority_threshold:
154
+ todo.add(ptool)
155
+ return todo
156
+
157
+ def __repr__(self, *args, **kwargs):
158
+ return "{:s} | n_prestagingtools={:d}>".format(
159
+ super().__repr__().rstrip(">"), len(self._prestagingtools)
160
+ )
161
+
162
+ def __str__(self):
163
+ return (
164
+ repr(self)
165
+ + "\n\n"
166
+ + "\n\n".join(
167
+ [
168
+ ptool.describe(fulldump=True)
169
+ for ptool in self._prestagingtools
170
+ ]
171
+ )
172
+ )
173
+
174
+ def flush(self, priority_threshold=prestaging_p.low):
175
+ """Actually send the pre-staging request to the appropriate location.
176
+
177
+ :param int priority_threshold: Only requests with a priority >= *priority_threshold*
178
+ will be sent.
179
+ """
180
+ for ptool in self._get_ptools(priority_threshold):
181
+ print()
182
+ rc = ptool.flush(email=self._email)
183
+ if rc:
184
+ self._prestagingtools.discard(ptool)
185
+ else:
186
+ logger.error(
187
+ "Something went wrong when flushing the %s prestaging tool",
188
+ ptool,
189
+ )
190
+
191
+ def clear(self, priority_threshold=prestaging_p.low):
192
+ """Erase the pre-staging requests list.
193
+
194
+ :param int priority_threshold: Only requests with a priority >= *priority_threshold*
195
+ will be deleted.
196
+ """
197
+ for ptool in self._get_ptools(priority_threshold):
198
+ self._prestagingtools.discard(ptool)
199
+
200
+
201
+ class PrestagingHub(PrivatePrestagingHub, getbytag.GetByTag):
202
+ """
203
+ A subclass of :class:`PrivatePrestagingHub` that using :class:`GetByTag`
204
+ to remain persistent in memory.
205
+
206
+ Therefore, a *tag* attribute needs to be specified when building/retrieving
207
+ an object of this class.
208
+ """
209
+
210
+ pass
@@ -0,0 +1,10 @@
1
+ """
2
+ Module needed to interact with subdirectories of AEARP's rawfiles.
3
+
4
+ :warning: Do not use this module. It is deprecated
5
+ """
6
+
7
+ # Backward compatibility...
8
+ from . import folder
9
+
10
+ RawFilesShell = folder.RawFilesShell