vortex-nwp 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. vortex/__init__.py +159 -0
  2. vortex/algo/__init__.py +13 -0
  3. vortex/algo/components.py +2462 -0
  4. vortex/algo/mpitools.py +1953 -0
  5. vortex/algo/mpitools_templates/__init__.py +1 -0
  6. vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
  7. vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
  8. vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
  9. vortex/algo/serversynctools.py +171 -0
  10. vortex/config.py +112 -0
  11. vortex/data/__init__.py +19 -0
  12. vortex/data/abstractstores.py +1510 -0
  13. vortex/data/containers.py +835 -0
  14. vortex/data/contents.py +622 -0
  15. vortex/data/executables.py +275 -0
  16. vortex/data/flow.py +119 -0
  17. vortex/data/geometries.ini +2689 -0
  18. vortex/data/geometries.py +799 -0
  19. vortex/data/handlers.py +1230 -0
  20. vortex/data/outflow.py +67 -0
  21. vortex/data/providers.py +487 -0
  22. vortex/data/resources.py +207 -0
  23. vortex/data/stores.py +1390 -0
  24. vortex/data/sync_templates/__init__.py +0 -0
  25. vortex/gloves.py +309 -0
  26. vortex/layout/__init__.py +20 -0
  27. vortex/layout/contexts.py +577 -0
  28. vortex/layout/dataflow.py +1220 -0
  29. vortex/layout/monitor.py +969 -0
  30. vortex/nwp/__init__.py +14 -0
  31. vortex/nwp/algo/__init__.py +21 -0
  32. vortex/nwp/algo/assim.py +537 -0
  33. vortex/nwp/algo/clim.py +1086 -0
  34. vortex/nwp/algo/coupling.py +831 -0
  35. vortex/nwp/algo/eda.py +840 -0
  36. vortex/nwp/algo/eps.py +785 -0
  37. vortex/nwp/algo/forecasts.py +886 -0
  38. vortex/nwp/algo/fpserver.py +1303 -0
  39. vortex/nwp/algo/ifsnaming.py +463 -0
  40. vortex/nwp/algo/ifsroot.py +404 -0
  41. vortex/nwp/algo/monitoring.py +263 -0
  42. vortex/nwp/algo/mpitools.py +694 -0
  43. vortex/nwp/algo/odbtools.py +1258 -0
  44. vortex/nwp/algo/oopsroot.py +916 -0
  45. vortex/nwp/algo/oopstests.py +220 -0
  46. vortex/nwp/algo/request.py +660 -0
  47. vortex/nwp/algo/stdpost.py +1641 -0
  48. vortex/nwp/data/__init__.py +30 -0
  49. vortex/nwp/data/assim.py +380 -0
  50. vortex/nwp/data/boundaries.py +314 -0
  51. vortex/nwp/data/climfiles.py +521 -0
  52. vortex/nwp/data/configfiles.py +153 -0
  53. vortex/nwp/data/consts.py +954 -0
  54. vortex/nwp/data/ctpini.py +149 -0
  55. vortex/nwp/data/diagnostics.py +209 -0
  56. vortex/nwp/data/eda.py +147 -0
  57. vortex/nwp/data/eps.py +432 -0
  58. vortex/nwp/data/executables.py +1045 -0
  59. vortex/nwp/data/fields.py +111 -0
  60. vortex/nwp/data/gridfiles.py +380 -0
  61. vortex/nwp/data/logs.py +584 -0
  62. vortex/nwp/data/modelstates.py +363 -0
  63. vortex/nwp/data/monitoring.py +193 -0
  64. vortex/nwp/data/namelists.py +696 -0
  65. vortex/nwp/data/obs.py +840 -0
  66. vortex/nwp/data/oopsexec.py +74 -0
  67. vortex/nwp/data/providers.py +207 -0
  68. vortex/nwp/data/query.py +206 -0
  69. vortex/nwp/data/stores.py +160 -0
  70. vortex/nwp/data/surfex.py +337 -0
  71. vortex/nwp/syntax/__init__.py +9 -0
  72. vortex/nwp/syntax/stdattrs.py +437 -0
  73. vortex/nwp/tools/__init__.py +10 -0
  74. vortex/nwp/tools/addons.py +40 -0
  75. vortex/nwp/tools/agt.py +67 -0
  76. vortex/nwp/tools/bdap.py +59 -0
  77. vortex/nwp/tools/bdcp.py +41 -0
  78. vortex/nwp/tools/bdm.py +24 -0
  79. vortex/nwp/tools/bdmp.py +54 -0
  80. vortex/nwp/tools/conftools.py +1661 -0
  81. vortex/nwp/tools/drhook.py +66 -0
  82. vortex/nwp/tools/grib.py +294 -0
  83. vortex/nwp/tools/gribdiff.py +104 -0
  84. vortex/nwp/tools/ifstools.py +203 -0
  85. vortex/nwp/tools/igastuff.py +273 -0
  86. vortex/nwp/tools/mars.py +68 -0
  87. vortex/nwp/tools/odb.py +657 -0
  88. vortex/nwp/tools/partitioning.py +258 -0
  89. vortex/nwp/tools/satrad.py +71 -0
  90. vortex/nwp/util/__init__.py +6 -0
  91. vortex/nwp/util/async.py +212 -0
  92. vortex/nwp/util/beacon.py +40 -0
  93. vortex/nwp/util/diffpygram.py +447 -0
  94. vortex/nwp/util/ens.py +279 -0
  95. vortex/nwp/util/hooks.py +139 -0
  96. vortex/nwp/util/taskdeco.py +85 -0
  97. vortex/nwp/util/usepygram.py +697 -0
  98. vortex/nwp/util/usetnt.py +101 -0
  99. vortex/proxy.py +6 -0
  100. vortex/sessions.py +374 -0
  101. vortex/syntax/__init__.py +9 -0
  102. vortex/syntax/stdattrs.py +867 -0
  103. vortex/syntax/stddeco.py +185 -0
  104. vortex/toolbox.py +1117 -0
  105. vortex/tools/__init__.py +20 -0
  106. vortex/tools/actions.py +523 -0
  107. vortex/tools/addons.py +316 -0
  108. vortex/tools/arm.py +96 -0
  109. vortex/tools/compression.py +325 -0
  110. vortex/tools/date.py +27 -0
  111. vortex/tools/ddhpack.py +10 -0
  112. vortex/tools/delayedactions.py +782 -0
  113. vortex/tools/env.py +541 -0
  114. vortex/tools/folder.py +834 -0
  115. vortex/tools/grib.py +738 -0
  116. vortex/tools/lfi.py +953 -0
  117. vortex/tools/listings.py +423 -0
  118. vortex/tools/names.py +637 -0
  119. vortex/tools/net.py +2124 -0
  120. vortex/tools/odb.py +10 -0
  121. vortex/tools/parallelism.py +368 -0
  122. vortex/tools/prestaging.py +210 -0
  123. vortex/tools/rawfiles.py +10 -0
  124. vortex/tools/schedulers.py +480 -0
  125. vortex/tools/services.py +940 -0
  126. vortex/tools/storage.py +996 -0
  127. vortex/tools/surfex.py +61 -0
  128. vortex/tools/systems.py +3976 -0
  129. vortex/tools/targets.py +440 -0
  130. vortex/util/__init__.py +9 -0
  131. vortex/util/config.py +1122 -0
  132. vortex/util/empty.py +24 -0
  133. vortex/util/helpers.py +216 -0
  134. vortex/util/introspection.py +69 -0
  135. vortex/util/iosponge.py +80 -0
  136. vortex/util/roles.py +49 -0
  137. vortex/util/storefunctions.py +129 -0
  138. vortex/util/structs.py +26 -0
  139. vortex/util/worker.py +162 -0
  140. vortex_nwp-2.0.0.dist-info/METADATA +67 -0
  141. vortex_nwp-2.0.0.dist-info/RECORD +144 -0
  142. vortex_nwp-2.0.0.dist-info/WHEEL +5 -0
  143. vortex_nwp-2.0.0.dist-info/licenses/LICENSE +517 -0
  144. vortex_nwp-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,258 @@
1
+ """
2
+ Compute simple domain partitionings.
3
+
4
+ The partitioning classes can be used on their own. Alternatively, the
5
+ :meth:`setup_partitioning_in_namelist` method can be used to interact with
6
+ namelist's Content objects.
7
+ """
8
+
9
+ import functools
10
+ import math
11
+ import re
12
+
13
+ from bronx.syntax import iterators as b_iter
14
+ from bronx.fancies import loggers
15
+
16
+
17
+ logger = loggers.getLogger(__name__)
18
+
19
+ __all__ = [
20
+ "PartitioningError",
21
+ "Rectangular2DPartitioner",
22
+ "setup_partitioning_in_namelist",
23
+ ]
24
+
25
+
26
+ class PartitioningError(ValueError):
27
+ """Any error raised during domain partitionings."""
28
+
29
+ pass
30
+
31
+
32
+ class AbstratctPartitioner:
33
+ """The base class for any concrete partitioning class.
34
+
35
+ Provides the mechanism to filter the partitioning methods and
36
+ cache the results.
37
+ """
38
+
39
+ _REGISTERED_METHODS = ()
40
+
41
+ def __init__(self, p_method_specification):
42
+ """
43
+ :param p_method_specification: The partitioning method definition
44
+ """
45
+ # Process the partitioning method specification string
46
+ p_method_parts = p_method_specification.lower().split("_")
47
+ self.p_method_name = None
48
+ self.p_method_args = ()
49
+ for a_method, a_method_n_args in self._REGISTERED_METHODS:
50
+ if p_method_parts[0] == a_method:
51
+ self.p_method_name = p_method_parts[0]
52
+ if len(p_method_parts) - 1 != a_method_n_args:
53
+ raise ValueError(
54
+ "Erroneous number of interger args "
55
+ + "for the {:s} p_method ({:d} required)".format(
56
+ a_method, a_method_n_args
57
+ )
58
+ )
59
+ self.p_method_args = tuple(
60
+ [int(s) for s in p_method_parts[1:]]
61
+ )
62
+ # Unknown method -> crash
63
+ if self.p_method_name is None:
64
+ raise PartitioningError(
65
+ "Unknown partitioning method ({:s}).".format(
66
+ p_method_specification
67
+ )
68
+ )
69
+ # The actual class' method that will be used to compute a and b
70
+ self.p_method = functools.partial(
71
+ getattr(self, "_" + self.p_method_name), *self.p_method_args
72
+ )
73
+ # Implement a caching mechanism
74
+ self._p_cache = dict()
75
+
76
+ def __call__(self, ntasks):
77
+ """Return the appropriate partitioning given **ntasks**."""
78
+ if ntasks not in self._p_cache:
79
+ self._p_cache[ntasks] = self.p_method(ntasks)
80
+ return self._p_cache[ntasks]
81
+
82
+
83
+ class Rectangular2DPartitioner(AbstratctPartitioner):
84
+ """Find an adequat 2D simple partitioning given the number of tasks.
85
+
86
+ Here is a description of the problem :
87
+
88
+ * Let ``D`` be a 2D array/field;
89
+ * Let ``N`` be the number of partitions you want to create;
90
+ * What are ``x`` and ``x`` that satisfies ``x * y = N`` (so that the
91
+ ``D`` array can be partitionned in ``x`` (reps. ``y``) chunks
92
+ in its first (resp. second) dimension ?
93
+
94
+ For exemple, with N=16, an array can be split in 4 chunks in each
95
+ dimension. It can also be partitioned in 2 chunks in the first
96
+ dimension and 8 in the second... There is no unique solution.
97
+ Consequently, the user needs to provide a partitioning method.
98
+
99
+ Example::
100
+
101
+ # Look for a partitioning around a given fixed value
102
+ # e.g with xcloseto_16, the x value will be close to 16
103
+ >>> Rectangular2DPartitioner('xcloseto_16')(128)
104
+ (16, 8)
105
+ >>> Rectangular2DPartitioner('xcloseto_16')(990)
106
+ (15, 66)
107
+ >>> Rectangular2DPartitioner('xcloseto_16')(500)
108
+ (20, 25)
109
+
110
+ # e.g with ycloseto_16, the y value will be close to 16
111
+ >>> Rectangular2DPartitioner('ycloseto_16')(128)
112
+ (8, 16)
113
+ >>> Rectangular2DPartitioner('ycloseto_16')(990)
114
+ (66, 15)
115
+ >>> Rectangular2DPartitioner('ycloseto_16')(500)
116
+ (25, 20)
117
+
118
+ # Squar-est partition of the domain: y and y as close
119
+ # as possible
120
+ >>> Rectangular2DPartitioner('square')(16)
121
+ (4, 4)
122
+ >>> Rectangular2DPartitioner('square')(12)
123
+ (3, 4)
124
+ >>> Rectangular2DPartitioner('square')(7)
125
+ (1, 7)
126
+
127
+ # Try to find x and y so that a given aspect ratio is preserved
128
+ # e.g with aspect_16_9, x / y should roughly be equal to 16 / 9
129
+ >>> Rectangular2DPartitioner('aspect_2_1')(32)
130
+ (8, 4)
131
+ >>> Rectangular2DPartitioner('aspect_2_1')(27)
132
+ (9, 3)
133
+ >>> Rectangular2DPartitioner('aspect_16_9')(28) # roughly 16/9e like a TV...
134
+ (7, 4)
135
+
136
+ """
137
+
138
+ _REGISTERED_METHODS = (
139
+ ("xcloseto", 1),
140
+ ("ycloseto", 1),
141
+ ("square", 0),
142
+ ("aspect", 2),
143
+ )
144
+
145
+ @staticmethod
146
+ def _test_and_return(ntasks, guesses):
147
+ found = 1
148
+ for i_guess in guesses:
149
+ if ntasks % i_guess == 0:
150
+ found = i_guess
151
+ break
152
+ return found, ntasks // found
153
+
154
+ def _xcloseto(self, close_to_what, ntasks):
155
+ """Find ``x`` as the closest possible value to **close_to_what**."""
156
+ guesses = b_iter.interleave(
157
+ range(close_to_what, 0, -1),
158
+ range(close_to_what + 1, min(close_to_what * 2, ntasks)),
159
+ )
160
+ return self._test_and_return(ntasks, guesses)
161
+
162
+ def _ycloseto(self, close_to_what, ntasks):
163
+ """Find ``y`` as the closest possible value to **close_to_what**."""
164
+ y_value, x_value = self._xcloseto(close_to_what, ntasks)
165
+ return x_value, y_value
166
+
167
+ def _square(self, ntasks):
168
+ """Find ``x`` and ``y`` so that they are close to the square root of ``N``.
169
+
170
+ With this method, ``x`` is always the smalest value.
171
+ """
172
+ guesses = range(int(math.sqrt(ntasks)), 0, -1)
173
+ return self._test_and_return(ntasks, guesses)
174
+
175
+ def _aspect(self, x_spec, y_spec, ntasks):
176
+ """Find ``x`` and ``y`` so that ``x / y =~ x_spec / y_spec``."""
177
+ aspect_ratio = x_spec / y_spec
178
+ return self._xcloseto(int(math.sqrt(ntasks * aspect_ratio)), ntasks)
179
+
180
+
181
+ _PARTITIONERS_CACHE = dict()
182
+
183
+
184
+ def setup_partitioning_in_namelist(
185
+ namcontents, effective_tasks, effective_threads, namlocal=None
186
+ ):
187
+ """Look in a namelist Content object and replace the macros related to partitioning.
188
+
189
+ :param nwp.data.namelists.NamelistContent namcontents: The namelist's Content
190
+ object to work with
191
+ :param int effective_tasks: The number of tasks that will be used when computing
192
+ the partitioning
193
+ :param int effective_threads: The number of threads that will be used when computing
194
+ the partitioning
195
+ :param str namlocal: The namelist's file name
196
+ :return: ``True`` if the namelist's Contents object has been modified
197
+ :rtype: bool
198
+
199
+ This function will detect namelist macros like ``PART_TASKS2D_X_SQUARE`` where:
200
+
201
+ * ``TASKS`` tells that **effective_tasks** will be used to compute the
202
+ decomposition (alternatively, ``THREADS`` can be used.
203
+ * ``2D`` tells that the :class:`Rectangular2DPartitioner` class will be used
204
+ to compute the partitioning. For now, ``2D`` is the only available option.
205
+ * ``X`` tells that the user wants to get the X value of the computed partioning.
206
+ Alternatively, ``Y`` can be used.
207
+ * ``SQUARE`` refers to the partitioning method that will be used by the
208
+ partitioning class. Any value that is accepted by the partitioning class is
209
+ fine.
210
+ """
211
+ macrovalid = re.compile(
212
+ "PART_"
213
+ + "(?P<what>TASKS|THREADS)(?P<cls>2D)_"
214
+ + "(?P<dim>[XY])_(?P<def>.*)$"
215
+ )
216
+ partitioning_classes = {"2D": Rectangular2DPartitioner}
217
+ namw = False
218
+ # Find the list of existing macros
219
+ all_macros = set()
220
+ for nam in namcontents.values():
221
+ all_macros.update(nam.macros())
222
+ # Consider only relevant macros
223
+ for macroname in all_macros:
224
+ macroname_re = macrovalid.match(macroname)
225
+ if macroname_re:
226
+ cache_key = (macroname_re.group("cls"), macroname_re.group("def"))
227
+ if cache_key not in _PARTITIONERS_CACHE:
228
+ partitioning_class = partitioning_classes[
229
+ macroname_re.group("cls")
230
+ ]
231
+ _PARTITIONERS_CACHE[cache_key] = partitioning_class(
232
+ macroname_re.group("def")
233
+ )
234
+ effective_n = dict(
235
+ TASKS=effective_tasks, THREADS=effective_threads
236
+ )[macroname_re.group("what")]
237
+ part_x, part_y = _PARTITIONERS_CACHE[cache_key](effective_n)
238
+ final_result = (
239
+ part_x if macroname_re.group("dim") == "X" else part_y
240
+ )
241
+ if namlocal:
242
+ logger.info(
243
+ "Setup macro %s=%s in %s",
244
+ macroname,
245
+ final_result,
246
+ namlocal,
247
+ )
248
+ else:
249
+ logger.info("Setup macro %s=%s", macroname, final_result)
250
+ namcontents.setmacro(macroname, final_result)
251
+ namw = True
252
+ return namw
253
+
254
+
255
+ if __name__ == "__main__":
256
+ import doctest
257
+
258
+ doctest.testmod()
@@ -0,0 +1,71 @@
1
+ """
2
+ Common interest classes to help setup the RTTOV/IFS environment.
3
+ """
4
+
5
+ import re
6
+
7
+ from bronx.fancies import loggers
8
+
9
+ from vortex.algo.components import AlgoComponentDecoMixin, AlgoComponentError
10
+ from vortex.algo.components import algo_component_deco_mixin_autodoc
11
+
12
+ #: No automatic export
13
+ __all__ = []
14
+
15
+ logger = loggers.getLogger(__name__)
16
+
17
+
18
+ @algo_component_deco_mixin_autodoc
19
+ class SatRadDecoMixin(AlgoComponentDecoMixin):
20
+ """RTTOV settings + Satellites related stuffs.
21
+
22
+ This mixin class is intended to be used with AlgoComponent classes. It will
23
+ automatically set up the path to RTTOV coefficient files
24
+ (:meth:`_satrad_coeffdir_setup`).
25
+
26
+ In addition it provides the :meth:`setchannels` utility method (that have to
27
+ be called manually if needed).
28
+ """
29
+
30
+ def _satrad_coeffdir_setup(self, rh, opts): # @UnusedVariable
31
+ """Look for RTTOV coefficient files and act on it."""
32
+ rtcoefs = self.context.sequence.effective_inputs(
33
+ role="RtCoef", kind="rtcoef"
34
+ )
35
+ if rtcoefs:
36
+ sh = self.system
37
+ rtpaths = {
38
+ sh.path.dirname(
39
+ sh.path.realpath(rtcoef.rh.container.localpath())
40
+ )
41
+ for rtcoef in rtcoefs
42
+ }
43
+ if len(rtpaths) != 1:
44
+ raise AlgoComponentError(
45
+ "The Radiative Transfer Coefficients are scattered in"
46
+ + "several directories: {!s}".format(rtpaths)
47
+ )
48
+ rtpath = rtpaths.pop()
49
+ logger.info("Setting %s = %s", "RTTOV_COEFDIR", rtpath)
50
+ self.env["RTTOV_COEFDIR"] = rtpath
51
+
52
+ _MIXIN_PREPARE_HOOKS = (_satrad_coeffdir_setup,)
53
+
54
+ def setchannels(self):
55
+ """Look up for channels namelists in effective inputs."""
56
+ namchan = [
57
+ x.rh
58
+ for x in self.context.sequence.effective_inputs(kind="namelist")
59
+ if "channel" in x.rh.options
60
+ ]
61
+ for thisnam in namchan:
62
+ thisloc = (
63
+ re.sub(r"\d+$", "", thisnam.options["channel"]) + "channels"
64
+ )
65
+ if thisloc != thisnam.container.localpath():
66
+ logger.info(
67
+ "Linking < %s > to < %s >",
68
+ thisnam.container.localpath(),
69
+ thisloc,
70
+ )
71
+ self.system.softlink(thisnam.container.localpath(), thisloc)
@@ -0,0 +1,6 @@
1
+ """
2
+ Utilities mostly for NWP.
3
+ """
4
+
5
+ #: No automatic export
6
+ __all__ = []
@@ -0,0 +1,212 @@
1
+ """
2
+ Callback functions for Jeeves.
3
+ If needed, VORTEX must be loaded via a VortexWorker in this context.
4
+ """
5
+
6
+ from vortex.tools import compression, systems
7
+ from vortex.util.worker import VortexWorker
8
+
9
+ #: No automatic export
10
+ __all__ = []
11
+
12
+
13
+ def _double_ssh(sh, loginnode, transfernode):
14
+ """Applies a double ssh to retrieve the effective name of a machine.
15
+
16
+ This trick enables the load balancing and node crash recovery
17
+ capabilities handled by the network teams through DNS names.
18
+
19
+ May return None when network problems occur.
20
+ """
21
+ cmd = ["ssh", "-x", loginnode, "ssh", "-x", transfernode, "hostname", "-s"]
22
+ rc = sh.spawn(cmd, shell=False, output=True, fatal=False)
23
+ if not rc:
24
+ return None
25
+ return rc[0]
26
+
27
+
28
+ def system_ftput(pnum, ask, config, logger, **opts):
29
+ """Ftp transfer to some archive host.
30
+
31
+ Removes the source on success.
32
+ In phase mode, raw ftp is not allowed, and the hostname is dynamically
33
+ obtained by a double ssh.
34
+ """
35
+
36
+ logger.info("System", todo=ask.todo, pnum=pnum, opts=opts)
37
+ value = dict(rpool="retry")
38
+
39
+ phasemode = opts.get("phasemode", False)
40
+ nbtries = opts.get("attempts", 1)
41
+ if phasemode:
42
+ rawftput = False
43
+ else:
44
+ rawftput = opts.get("rawftput", False)
45
+ trynum = 0
46
+
47
+ profile = config["driver"].get("profile", None)
48
+ with VortexWorker(logger=logger, profile=profile) as vwork:
49
+ sh = vwork.session.sh
50
+ sh.trace = "log"
51
+ sh.ftpflavour = (
52
+ systems.FTP_FLAVOUR.STD
53
+ ) # Because errors are handled directly by jeeves
54
+
55
+ data = vwork.get_dataset(ask)
56
+ logger.info("ftput", source=data.source, destination=data.destination)
57
+ if not sh.path.exists(data.source):
58
+ logger.error("The source file is missing - sorry")
59
+ return pnum, False, dict(rpool="error")
60
+
61
+ if phasemode:
62
+ data.hostname = _double_ssh(
63
+ sh, data.phase_loginnode, data.phase_transfernode
64
+ )
65
+ if data.hostname is None:
66
+ return pnum, False, dict(rpool="retry")
67
+
68
+ cpipeline = (
69
+ None
70
+ if not hasattr(data, "cpipeline") or not data.cpipeline
71
+ else compression.CompressionPipeline(sh, data.cpipeline)
72
+ )
73
+
74
+ logger.info("FTPut host", hostname=data.hostname, logname=data.logname)
75
+ logger.info(
76
+ "FTPut data", source=data.source, destination=data.destination
77
+ )
78
+ while trynum < nbtries:
79
+ trynum += 1
80
+ if nbtries > 1:
81
+ logger.info("FTPut loop", attempt=trynum)
82
+ try:
83
+ if rawftput:
84
+ putrc = sh.rawftput(
85
+ data.source,
86
+ data.destination,
87
+ hostname=data.hostname,
88
+ logname=data.logname,
89
+ cpipeline=cpipeline,
90
+ fmt=data.fmt,
91
+ )
92
+ else:
93
+ putrc = sh.ftput(
94
+ data.source,
95
+ data.destination,
96
+ hostname=data.hostname,
97
+ logname=data.logname,
98
+ cpipeline=cpipeline,
99
+ fmt=data.fmt,
100
+ )
101
+ except Exception as e:
102
+ logger.warning("FTPut failed", attempt=trynum, error=e)
103
+ putrc = False
104
+ if putrc:
105
+ value = dict(clear=sh.rm(data.source, fmt=data.fmt))
106
+ break
107
+
108
+ return pnum, putrc and vwork.rc, value
109
+
110
+
111
+ def system_cp(pnum, ask, config, logger, **opts):
112
+ """Local transfers (between filesystems) on a given host."""
113
+
114
+ logger.info("System", todo=ask.todo, pnum=pnum, opts=opts)
115
+ value = dict(rpool="retry")
116
+
117
+ profile = config["driver"].get("profile", None)
118
+ with VortexWorker(logger=logger, profile=profile) as vwork:
119
+ sh = vwork.session.sh
120
+ sh.trace = "log"
121
+ data = vwork.get_dataset(ask)
122
+ logger.info("cp", source=data.source, destination=data.destination)
123
+ if not sh.path.exists(data.source):
124
+ logger.error("The source file is missing - sorry")
125
+ return pnum, False, dict(rpool="error")
126
+
127
+ try:
128
+ rc = sh.cp(data.source, data.destination, fmt=data.fmt)
129
+ except Exception as e:
130
+ logger.warning("cp failed", error=e)
131
+ rc = False
132
+ if rc:
133
+ value = dict(clear=sh.rm(data.source, fmt=data.fmt))
134
+
135
+ return pnum, rc and vwork.rc, value
136
+
137
+
138
+ def system_scp(pnum, ask, config, logger, **opts):
139
+ """Scp transfer to some archive host.
140
+
141
+ Removes the source on success.
142
+ In phase mode, raw ftp is not allowed, and the hostname is dynamically
143
+ obtained by a double ssh.
144
+ """
145
+ logger.info("System", todo=ask.todo, pnum=pnum, opts=opts)
146
+ value = dict(rpool="retry")
147
+
148
+ phasemode = opts.get("phasemode", False)
149
+
150
+ profile = config["driver"].get("profile", None)
151
+ with VortexWorker(logger=logger, profile=profile) as vwork:
152
+ sh = vwork.session.sh
153
+ sh.trace = "log"
154
+
155
+ data = vwork.get_dataset(ask)
156
+ logger.info("scp", source=data.source, destination=data.destination)
157
+ if not sh.path.exists(data.source):
158
+ logger.error("The source file is missing - sorry")
159
+ return pnum, False, dict(rpool="error")
160
+
161
+ if phasemode:
162
+ data.hostname = _double_ssh(
163
+ sh, data.phase_loginnode, data.phase_transfernode
164
+ )
165
+ if data.hostname is None:
166
+ return pnum, False, value
167
+ logger.info("scp host", hostname=data.hostname, logname=data.logname)
168
+ logger.info(
169
+ "scp data", source=data.source, destination=data.destination
170
+ )
171
+ try:
172
+ putrc = sh.scpput(
173
+ data.source,
174
+ data.destination,
175
+ hostname=data.hostname,
176
+ logname=data.logname,
177
+ fmt=data.fmt,
178
+ )
179
+ except Exception as e:
180
+ logger.warning("scp failed", error=e)
181
+ putrc = False
182
+ if putrc:
183
+ value = dict(clear=sh.rm(data.source, fmt=data.fmt))
184
+
185
+ return pnum, putrc and vwork.rc, value
186
+
187
+
188
+ def system_noop(pnum, ask, config, logger, **opts):
189
+ """A callback able to do nothing, but cleanly.
190
+
191
+ Used to desactivate jeeves when mirroring the operational suite.
192
+ """
193
+ logger.info("Noop", todo=ask.todo, pnum=pnum, opts=opts)
194
+
195
+ profile = config["driver"].get("profile", None)
196
+ with VortexWorker(logger=logger, profile=profile) as vwork:
197
+ sh = vwork.session.sh
198
+ sh.trace = "log"
199
+ data = vwork.get_dataset(ask)
200
+ value = dict(clear=sh.rm(data.source, fmt=data.fmt))
201
+
202
+ return pnum, vwork.rc, value
203
+
204
+
205
+ if __name__ == "__main__":
206
+ import vortex
207
+
208
+ t = vortex.ticket()
209
+ main_sh = t.sh
210
+ main_sh.trace = True
211
+ main_sh.verbose = True
212
+ print(_double_ssh(main_sh, "beaufixoper", "beaufixtransfert-agt"))
@@ -0,0 +1,40 @@
1
+ """
2
+ Functions to create and write a few information in a file using Vortex
3
+ (FunctionStore).
4
+ """
5
+
6
+ import io
7
+ import json
8
+
9
+ #: No automatic export
10
+ __all__ = []
11
+
12
+
13
+ def beaconfunction(options):
14
+ """Function to create a file and write information in it.
15
+
16
+ - model
17
+ - date
18
+ - cutoff
19
+ - vapp
20
+ - vconf
21
+ - member (optional)
22
+ """
23
+ rst = dict()
24
+
25
+ # Find out if a resource handler is present and load the elements to be written
26
+ rhdict = options.get("rhandler", None)
27
+ if rhdict:
28
+ rst["vapp"] = rhdict.get("provider", {}).get("vapp", "")
29
+ rst["vconf"] = rhdict.get("provider", {}).get("vconf", "")
30
+ rst["model"] = rhdict.get("resource", {}).get("model", "")
31
+ rst["date"] = rhdict.get("resource", {}).get("date", "")
32
+ rst["cutoff"] = rhdict.get("resource", {}).get("cutoff", "")
33
+ member = rhdict.get("provider", {}).get("member", None)
34
+ if member is not None:
35
+ rst["member"] = member
36
+ else:
37
+ rst["error"] = "No resource handler here"
38
+ outstr = json.dumps(rst)
39
+ # Return the string, which has to be converted to a file like object
40
+ return io.BytesIO(outstr.encode(encoding="utf_8"))