vortex-nwp 2.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vortex/__init__.py +135 -0
- vortex/algo/__init__.py +12 -0
- vortex/algo/components.py +2136 -0
- vortex/algo/mpitools.py +1648 -0
- vortex/algo/mpitools_templates/envelope_wrapper_default.tpl +27 -0
- vortex/algo/mpitools_templates/envelope_wrapper_mpiauto.tpl +29 -0
- vortex/algo/mpitools_templates/wrapstd_wrapper_default.tpl +18 -0
- vortex/algo/serversynctools.py +170 -0
- vortex/config.py +115 -0
- vortex/data/__init__.py +13 -0
- vortex/data/abstractstores.py +1572 -0
- vortex/data/containers.py +780 -0
- vortex/data/contents.py +596 -0
- vortex/data/executables.py +284 -0
- vortex/data/flow.py +113 -0
- vortex/data/geometries.ini +2689 -0
- vortex/data/geometries.py +703 -0
- vortex/data/handlers.py +1021 -0
- vortex/data/outflow.py +67 -0
- vortex/data/providers.py +465 -0
- vortex/data/resources.py +201 -0
- vortex/data/stores.py +1271 -0
- vortex/gloves.py +282 -0
- vortex/layout/__init__.py +27 -0
- vortex/layout/appconf.py +109 -0
- vortex/layout/contexts.py +511 -0
- vortex/layout/dataflow.py +1069 -0
- vortex/layout/jobs.py +1276 -0
- vortex/layout/monitor.py +833 -0
- vortex/layout/nodes.py +1424 -0
- vortex/layout/subjobs.py +464 -0
- vortex/nwp/__init__.py +11 -0
- vortex/nwp/algo/__init__.py +12 -0
- vortex/nwp/algo/assim.py +483 -0
- vortex/nwp/algo/clim.py +920 -0
- vortex/nwp/algo/coupling.py +609 -0
- vortex/nwp/algo/eda.py +632 -0
- vortex/nwp/algo/eps.py +613 -0
- vortex/nwp/algo/forecasts.py +745 -0
- vortex/nwp/algo/fpserver.py +927 -0
- vortex/nwp/algo/ifsnaming.py +403 -0
- vortex/nwp/algo/ifsroot.py +311 -0
- vortex/nwp/algo/monitoring.py +202 -0
- vortex/nwp/algo/mpitools.py +554 -0
- vortex/nwp/algo/odbtools.py +974 -0
- vortex/nwp/algo/oopsroot.py +735 -0
- vortex/nwp/algo/oopstests.py +186 -0
- vortex/nwp/algo/request.py +579 -0
- vortex/nwp/algo/stdpost.py +1285 -0
- vortex/nwp/data/__init__.py +12 -0
- vortex/nwp/data/assim.py +392 -0
- vortex/nwp/data/boundaries.py +261 -0
- vortex/nwp/data/climfiles.py +539 -0
- vortex/nwp/data/configfiles.py +149 -0
- vortex/nwp/data/consts.py +929 -0
- vortex/nwp/data/ctpini.py +133 -0
- vortex/nwp/data/diagnostics.py +181 -0
- vortex/nwp/data/eda.py +148 -0
- vortex/nwp/data/eps.py +383 -0
- vortex/nwp/data/executables.py +1039 -0
- vortex/nwp/data/fields.py +96 -0
- vortex/nwp/data/gridfiles.py +308 -0
- vortex/nwp/data/logs.py +551 -0
- vortex/nwp/data/modelstates.py +334 -0
- vortex/nwp/data/monitoring.py +220 -0
- vortex/nwp/data/namelists.py +644 -0
- vortex/nwp/data/obs.py +748 -0
- vortex/nwp/data/oopsexec.py +72 -0
- vortex/nwp/data/providers.py +182 -0
- vortex/nwp/data/query.py +217 -0
- vortex/nwp/data/stores.py +147 -0
- vortex/nwp/data/surfex.py +338 -0
- vortex/nwp/syntax/__init__.py +9 -0
- vortex/nwp/syntax/stdattrs.py +375 -0
- vortex/nwp/tools/__init__.py +10 -0
- vortex/nwp/tools/addons.py +35 -0
- vortex/nwp/tools/agt.py +55 -0
- vortex/nwp/tools/bdap.py +48 -0
- vortex/nwp/tools/bdcp.py +38 -0
- vortex/nwp/tools/bdm.py +21 -0
- vortex/nwp/tools/bdmp.py +49 -0
- vortex/nwp/tools/conftools.py +1311 -0
- vortex/nwp/tools/drhook.py +62 -0
- vortex/nwp/tools/grib.py +268 -0
- vortex/nwp/tools/gribdiff.py +99 -0
- vortex/nwp/tools/ifstools.py +163 -0
- vortex/nwp/tools/igastuff.py +249 -0
- vortex/nwp/tools/mars.py +56 -0
- vortex/nwp/tools/odb.py +548 -0
- vortex/nwp/tools/partitioning.py +234 -0
- vortex/nwp/tools/satrad.py +56 -0
- vortex/nwp/util/__init__.py +6 -0
- vortex/nwp/util/async.py +184 -0
- vortex/nwp/util/beacon.py +40 -0
- vortex/nwp/util/diffpygram.py +359 -0
- vortex/nwp/util/ens.py +198 -0
- vortex/nwp/util/hooks.py +128 -0
- vortex/nwp/util/taskdeco.py +81 -0
- vortex/nwp/util/usepygram.py +591 -0
- vortex/nwp/util/usetnt.py +87 -0
- vortex/proxy.py +6 -0
- vortex/sessions.py +341 -0
- vortex/syntax/__init__.py +9 -0
- vortex/syntax/stdattrs.py +628 -0
- vortex/syntax/stddeco.py +176 -0
- vortex/toolbox.py +982 -0
- vortex/tools/__init__.py +11 -0
- vortex/tools/actions.py +457 -0
- vortex/tools/addons.py +297 -0
- vortex/tools/arm.py +76 -0
- vortex/tools/compression.py +322 -0
- vortex/tools/date.py +20 -0
- vortex/tools/ddhpack.py +10 -0
- vortex/tools/delayedactions.py +672 -0
- vortex/tools/env.py +513 -0
- vortex/tools/folder.py +663 -0
- vortex/tools/grib.py +559 -0
- vortex/tools/lfi.py +746 -0
- vortex/tools/listings.py +354 -0
- vortex/tools/names.py +575 -0
- vortex/tools/net.py +1790 -0
- vortex/tools/odb.py +10 -0
- vortex/tools/parallelism.py +336 -0
- vortex/tools/prestaging.py +186 -0
- vortex/tools/rawfiles.py +10 -0
- vortex/tools/schedulers.py +413 -0
- vortex/tools/services.py +871 -0
- vortex/tools/storage.py +1061 -0
- vortex/tools/surfex.py +61 -0
- vortex/tools/systems.py +3396 -0
- vortex/tools/targets.py +384 -0
- vortex/util/__init__.py +9 -0
- vortex/util/config.py +1071 -0
- vortex/util/empty.py +24 -0
- vortex/util/helpers.py +184 -0
- vortex/util/introspection.py +63 -0
- vortex/util/iosponge.py +76 -0
- vortex/util/roles.py +51 -0
- vortex/util/storefunctions.py +103 -0
- vortex/util/structs.py +26 -0
- vortex/util/worker.py +150 -0
- vortex_nwp-2.0.0b1.dist-info/LICENSE +517 -0
- vortex_nwp-2.0.0b1.dist-info/METADATA +50 -0
- vortex_nwp-2.0.0b1.dist-info/RECORD +146 -0
- vortex_nwp-2.0.0b1.dist-info/WHEEL +5 -0
- vortex_nwp-2.0.0b1.dist-info/top_level.txt +1 -0
vortex/tools/folder.py
ADDED
|
@@ -0,0 +1,663 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Various shell addons that handle formats relying on a folder structure.
|
|
3
|
+
|
|
4
|
+
In any kind of cache directories, the folder structure is kept as is. When
|
|
5
|
+
data are sent using FTP or SSH, a tar file is created on the fly.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import contextlib
|
|
9
|
+
import ftplib
|
|
10
|
+
import tempfile
|
|
11
|
+
|
|
12
|
+
from bronx.fancies import loggers
|
|
13
|
+
from vortex.tools.net import DEFAULT_FTP_PORT
|
|
14
|
+
from vortex.util.iosponge import IoSponge
|
|
15
|
+
from . import addons
|
|
16
|
+
|
|
17
|
+
#: No automatic export
|
|
18
|
+
__all__ = []
|
|
19
|
+
|
|
20
|
+
logger = loggers.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
_folder_exposed_methods = {'cp', 'mv', 'forcepack', 'forceunpack',
|
|
23
|
+
'anyft_remote_rewrite',
|
|
24
|
+
'ftget', 'rawftget', 'batchrawftget', 'ftput', 'rawftput',
|
|
25
|
+
'scpget', 'scpput',
|
|
26
|
+
'ecfsget', 'ecfsput', 'ectransget', 'ectransput'}
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def folderize(cls):
|
|
30
|
+
"""Create the necessary methods in a class that inherits from :class:`FolderShell`."""
|
|
31
|
+
addon_kind = cls.footprint_retrieve().get_values('kind')
|
|
32
|
+
if len(addon_kind) != 1:
|
|
33
|
+
raise SyntaxError("Authorised values for a given Addon's kind must be unique")
|
|
34
|
+
addon_kind = addon_kind[0]
|
|
35
|
+
for basic_mtdname in _folder_exposed_methods:
|
|
36
|
+
expected_mtdname = '{:s}_{:s}'.format(addon_kind, basic_mtdname)
|
|
37
|
+
if not hasattr(cls, expected_mtdname):
|
|
38
|
+
parent_mtd = getattr(cls, '_folder_{:s}'.format(basic_mtdname))
|
|
39
|
+
setattr(cls, expected_mtdname, parent_mtd)
|
|
40
|
+
return cls
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class FolderShell(addons.FtrawEnableAddon):
|
|
44
|
+
"""
|
|
45
|
+
This abstract class defines methods to manipulate folders.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
_COMPRESSED = 'gz'
|
|
49
|
+
|
|
50
|
+
_abstract = True
|
|
51
|
+
_footprint = dict(
|
|
52
|
+
info = 'Tools for manipulating folders',
|
|
53
|
+
attr = dict(
|
|
54
|
+
kind = dict(
|
|
55
|
+
values = ['folder'],
|
|
56
|
+
),
|
|
57
|
+
tmpname = dict(
|
|
58
|
+
optional = True,
|
|
59
|
+
default = 'folder_tmpunpack',
|
|
60
|
+
),
|
|
61
|
+
pipeget = dict(
|
|
62
|
+
type = bool,
|
|
63
|
+
optional = True,
|
|
64
|
+
default = False,
|
|
65
|
+
),
|
|
66
|
+
supportedfmt = dict(
|
|
67
|
+
optional = True,
|
|
68
|
+
default = '[kind]',
|
|
69
|
+
),
|
|
70
|
+
)
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
def _folder_cp(self, source, destination,
|
|
74
|
+
smartcp_threshold=0, intent='in', silent=False):
|
|
75
|
+
"""Extended copy for a folder repository."""
|
|
76
|
+
rc, source, destination = self._folder_tarfix_out(source, destination)
|
|
77
|
+
rc = rc and self.sh.cp(source, destination,
|
|
78
|
+
smartcp_threshold=smartcp_threshold, intent=intent)
|
|
79
|
+
if rc:
|
|
80
|
+
rc, source, destination = self._folder_tarfix_in(source, destination)
|
|
81
|
+
if rc and intent == 'inout':
|
|
82
|
+
self.sh.stderr('chmod', 0o644, destination)
|
|
83
|
+
with self.sh.mute_stderr():
|
|
84
|
+
for infile in self.sh.ffind(destination):
|
|
85
|
+
self.sh.chmod(infile, 0o644)
|
|
86
|
+
return rc
|
|
87
|
+
|
|
88
|
+
def _folder_mv(self, source, destination):
|
|
89
|
+
"""Shortcut to :meth:`move` method (file or directory)."""
|
|
90
|
+
if not isinstance(source, str) or not isinstance(destination, str):
|
|
91
|
+
rc = self.sh.hybridcp(source, destination)
|
|
92
|
+
if isinstance(source, str):
|
|
93
|
+
rc = rc and self.sh.remove(source)
|
|
94
|
+
else:
|
|
95
|
+
rc, source, destination = self._folder_tarfix_out(source, destination)
|
|
96
|
+
rc = rc and self.sh.move(source, destination)
|
|
97
|
+
if rc:
|
|
98
|
+
rc, source, destination = self._folder_tarfix_in(source, destination)
|
|
99
|
+
return rc
|
|
100
|
+
|
|
101
|
+
def _folder_forcepack(self, source, destination=None):
|
|
102
|
+
"""Returned a path to a packed data."""
|
|
103
|
+
if not self.sh.is_tarname(source):
|
|
104
|
+
destination = (destination if destination else
|
|
105
|
+
"{:s}.{:s}".format(self.sh.safe_fileaddsuffix(source),
|
|
106
|
+
self._folder_tarfix_extension))
|
|
107
|
+
if not self.sh.path.exists(destination):
|
|
108
|
+
absdestination = self.sh.path.abspath(destination)
|
|
109
|
+
with self.sh.cdcontext(self.sh.path.dirname(source)):
|
|
110
|
+
self.sh.tar(absdestination, self.sh.path.basename(source))
|
|
111
|
+
return destination
|
|
112
|
+
else:
|
|
113
|
+
return source
|
|
114
|
+
|
|
115
|
+
def _folder_forceunpack(self, source):
|
|
116
|
+
"""Unpack the data "inplace"."""
|
|
117
|
+
fakesource = '{:s}.{:s}'.format(self.sh.safe_fileaddsuffix(source),
|
|
118
|
+
self._folder_tarfix_extension)
|
|
119
|
+
rc, _, _ = self._folder_tarfix_in(fakesource, source)
|
|
120
|
+
return rc
|
|
121
|
+
|
|
122
|
+
def _folder_pack_stream(self, source, stdout=True):
|
|
123
|
+
source_name = self.sh.path.basename(source)
|
|
124
|
+
source_dirname = self.sh.path.dirname(source)
|
|
125
|
+
compression_map = {'gz': 'z', 'bz2': 'j'}
|
|
126
|
+
compression_opt = compression_map.get(self._COMPRESSED, '')
|
|
127
|
+
cmd = ['tar', '--directory', source_dirname,
|
|
128
|
+
'-c' + compression_opt, source_name]
|
|
129
|
+
return self.sh.popen(cmd, stdout=stdout, bufsize=8192)
|
|
130
|
+
|
|
131
|
+
def _folder_unpack_stream(self, stdin=True, options='xvf'):
|
|
132
|
+
return self.sh.popen(
|
|
133
|
+
# the z option is omitted consequently it also works if the file is not compressed
|
|
134
|
+
['tar', options, '-'], stdin=stdin, bufsize=8192)
|
|
135
|
+
|
|
136
|
+
def _packed_size(self, source):
|
|
137
|
+
"""Size of the final file, must be exact or be an overestimation.
|
|
138
|
+
|
|
139
|
+
A file 1 byte bigger than this estimation might be rejected,
|
|
140
|
+
hence the conservative options:
|
|
141
|
+
- tar adds 1% with a minimum of 1 Mbytes
|
|
142
|
+
- compression gain is 0%
|
|
143
|
+
"""
|
|
144
|
+
dir_size = self.sh.treesize(source)
|
|
145
|
+
tar_mini = 1024 * 1024 # 1 Mbytes
|
|
146
|
+
tar_loss = 1 # 1%
|
|
147
|
+
zip_gain = 0
|
|
148
|
+
tar_size = dir_size + max(tar_mini, (dir_size * tar_loss) // 100)
|
|
149
|
+
return (tar_size * (100 - zip_gain)) // 100
|
|
150
|
+
|
|
151
|
+
def _folder_preftget(self, source, destination):
|
|
152
|
+
"""Prepare source and destination"""
|
|
153
|
+
destination = self.sh.path.abspath(self.sh.path.expanduser(destination))
|
|
154
|
+
self.sh.rm(destination)
|
|
155
|
+
return source, destination
|
|
156
|
+
|
|
157
|
+
def _folder_postftget(self, destination):
|
|
158
|
+
"""Move the untared stuff to the destination and clean-up things."""
|
|
159
|
+
try:
|
|
160
|
+
unpacked = self.sh.glob('*')
|
|
161
|
+
if unpacked:
|
|
162
|
+
if (len(unpacked) == 1 and
|
|
163
|
+
self.sh.path.isdir(self.sh.path.join(unpacked[-1]))):
|
|
164
|
+
# This is the most usual case... (ODB, DDH packs produced by Vortex)
|
|
165
|
+
self.sh.wpermtree(unpacked[-1], force=True)
|
|
166
|
+
if self.sh.path.isdir(unpacked[-1]):
|
|
167
|
+
with self.sh.secure_directory_move(destination):
|
|
168
|
+
self.sh.mv(unpacked[-1], destination)
|
|
169
|
+
else:
|
|
170
|
+
self.sh.mv(unpacked[-1], destination)
|
|
171
|
+
else:
|
|
172
|
+
# Old-style DDH packs (produced by Olive)
|
|
173
|
+
with self.sh.secure_directory_move(destination):
|
|
174
|
+
self.sh.mkdir(destination)
|
|
175
|
+
for item in unpacked:
|
|
176
|
+
self.sh.wpermtree(item, force=True)
|
|
177
|
+
self.sh.mv(item, self.sh.path.join(destination, item))
|
|
178
|
+
else:
|
|
179
|
+
logger.error('Nothing to unpack')
|
|
180
|
+
except Exception as trouble:
|
|
181
|
+
logger.critical('Unable to proceed folder post-ftget step')
|
|
182
|
+
raise trouble
|
|
183
|
+
|
|
184
|
+
@contextlib.contextmanager
|
|
185
|
+
def _folder_postftget_context(self, destination):
|
|
186
|
+
"""Move the untared stuff to the destination and clean-up things."""
|
|
187
|
+
with self.sh.temporary_dir_cdcontext(prefix='folder_', dir=self.sh.getcwd()):
|
|
188
|
+
try:
|
|
189
|
+
yield
|
|
190
|
+
finally:
|
|
191
|
+
self._folder_postftget(destination)
|
|
192
|
+
|
|
193
|
+
@contextlib.contextmanager
|
|
194
|
+
def _folder_ftget_file_extract(self, source):
|
|
195
|
+
ext_name = self.sh.tarname_splitext(source)[1]
|
|
196
|
+
target = self.tmpname + ext_name
|
|
197
|
+
try:
|
|
198
|
+
yield target
|
|
199
|
+
if self.sh.path.exists(target):
|
|
200
|
+
self.sh.untar(target, autocompress=False)
|
|
201
|
+
finally:
|
|
202
|
+
self.sh.rm(target)
|
|
203
|
+
|
|
204
|
+
@contextlib.contextmanager
|
|
205
|
+
def _folder_ftget_pipe_extract(self):
|
|
206
|
+
p = self._folder_unpack_stream()
|
|
207
|
+
yield p
|
|
208
|
+
self.sh.pclose(p)
|
|
209
|
+
|
|
210
|
+
@contextlib.contextmanager
|
|
211
|
+
def _folder_ftput_file_compress(self, source):
|
|
212
|
+
c_source = (self.sh.safe_fileaddsuffix(source) +
|
|
213
|
+
'.' + self._folder_tarfix_extension)
|
|
214
|
+
try:
|
|
215
|
+
self.sh.tar(c_source, source)
|
|
216
|
+
yield c_source
|
|
217
|
+
finally:
|
|
218
|
+
self.sh.rm(c_source)
|
|
219
|
+
|
|
220
|
+
@contextlib.contextmanager
|
|
221
|
+
def _folder_ftput_pipe_compress(self, source):
|
|
222
|
+
p = self._folder_pack_stream(source)
|
|
223
|
+
yield p
|
|
224
|
+
self.sh.pclose(p)
|
|
225
|
+
|
|
226
|
+
def _folder_anyft_remote_rewrite(self, remote):
|
|
227
|
+
"""Add the folder suffix before using file transfert protocols."""
|
|
228
|
+
if not self.sh.is_tarname(self.sh.path.basename(remote)):
|
|
229
|
+
return '{:s}.{:s}'.format(remote, self._folder_tarfix_extension)
|
|
230
|
+
else:
|
|
231
|
+
return remote
|
|
232
|
+
|
|
233
|
+
def _folder_ftget(self, source, destination, hostname=None, logname=None,
|
|
234
|
+
port=DEFAULT_FTP_PORT, cpipeline=None):
|
|
235
|
+
"""Proceed direct ftp get on the specified target."""
|
|
236
|
+
if cpipeline is not None:
|
|
237
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
238
|
+
hostname = self.sh.fix_fthostname(hostname)
|
|
239
|
+
with self.sh.ftpcontext(hostname, logname, port=port) as ftp:
|
|
240
|
+
if ftp:
|
|
241
|
+
source, destination = self._folder_preftget(source, destination)
|
|
242
|
+
with self._folder_postftget_context(destination):
|
|
243
|
+
try:
|
|
244
|
+
if self.pipeget:
|
|
245
|
+
with self._folder_ftget_pipe_extract() as p:
|
|
246
|
+
rc = ftp.get(source, p.stdin)
|
|
247
|
+
else:
|
|
248
|
+
with self._folder_ftget_file_extract(source) as tmp_target:
|
|
249
|
+
rc = ftp.get(source, tmp_target)
|
|
250
|
+
except ftplib.all_errors as e:
|
|
251
|
+
logger.warning('An FTP error occurred: %s', str(e))
|
|
252
|
+
rc = False
|
|
253
|
+
return rc
|
|
254
|
+
else:
|
|
255
|
+
return False
|
|
256
|
+
|
|
257
|
+
def _folder_rawftget(self, source, destination, hostname=None, logname=None,
|
|
258
|
+
port=None, cpipeline=None):
|
|
259
|
+
"""Use ftserv as much as possible."""
|
|
260
|
+
if cpipeline is not None:
|
|
261
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
262
|
+
if self.sh.ftraw:
|
|
263
|
+
source, destination = self._folder_preftget(source, destination)
|
|
264
|
+
with self._folder_postftget_context(destination):
|
|
265
|
+
with self._folder_ftget_file_extract(source) as tmp_target:
|
|
266
|
+
rc = self.sh.ftserv_get(source, tmp_target,
|
|
267
|
+
hostname=hostname, logname=logname,
|
|
268
|
+
port=port)
|
|
269
|
+
return rc
|
|
270
|
+
else:
|
|
271
|
+
if port is None:
|
|
272
|
+
port = DEFAULT_FTP_PORT
|
|
273
|
+
return self._folder_ftget(source, destination, hostname, logname, port=port)
|
|
274
|
+
|
|
275
|
+
def _folder_batchrawftget(self, source, destination, hostname=None, logname=None,
|
|
276
|
+
port=None, cpipeline=None):
|
|
277
|
+
"""Use ftserv to fetch several folder-like resources."""
|
|
278
|
+
if cpipeline is not None:
|
|
279
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
280
|
+
if self.sh.ftraw:
|
|
281
|
+
actualsources = list()
|
|
282
|
+
actualdestinations = list()
|
|
283
|
+
tmpdestinations = list()
|
|
284
|
+
try:
|
|
285
|
+
for s, d in zip(source, destination):
|
|
286
|
+
actual_s, actual_d = self._folder_preftget(s, d)
|
|
287
|
+
actualsources.append(actual_s)
|
|
288
|
+
actualdestinations.append(actual_d)
|
|
289
|
+
d_dirname = self.sh.path.dirname(actual_d)
|
|
290
|
+
self.sh.mkdir(d_dirname)
|
|
291
|
+
d_tmpdir = tempfile.mkdtemp(prefix='folder_', dir=d_dirname)
|
|
292
|
+
d_extname = self.sh.tarname_splitext(actual_s)[1]
|
|
293
|
+
tmpdestinations.append(self.sh.path.join(d_tmpdir, self.tmpname + d_extname))
|
|
294
|
+
|
|
295
|
+
rc = self.sh.ftserv_batchget(actualsources, tmpdestinations, hostname,
|
|
296
|
+
logname, port=port)
|
|
297
|
+
|
|
298
|
+
for i, (d, t) in enumerate(zip(actualdestinations, tmpdestinations)):
|
|
299
|
+
if rc[i]:
|
|
300
|
+
with self.sh.cdcontext(self.sh.path.dirname(t)):
|
|
301
|
+
try:
|
|
302
|
+
try:
|
|
303
|
+
rc[i] = rc[i] and bool(self.sh.untar(self.sh.path.basename(t),
|
|
304
|
+
autocompress=False))
|
|
305
|
+
finally:
|
|
306
|
+
self.sh.rm(t)
|
|
307
|
+
finally:
|
|
308
|
+
self._folder_postftget(d)
|
|
309
|
+
finally:
|
|
310
|
+
for t in tmpdestinations:
|
|
311
|
+
self.sh.rm(self.sh.path.dirname(t))
|
|
312
|
+
return rc
|
|
313
|
+
else:
|
|
314
|
+
raise RuntimeError('You are not supposed to land here !')
|
|
315
|
+
|
|
316
|
+
def _folder_ftput(self, source, destination, hostname=None, logname=None,
|
|
317
|
+
port=DEFAULT_FTP_PORT, cpipeline=None, sync=False):
|
|
318
|
+
"""Proceed direct ftp put on the specified target."""
|
|
319
|
+
if cpipeline is not None:
|
|
320
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
321
|
+
hostname = self.sh.fix_fthostname(hostname)
|
|
322
|
+
source = self.sh.path.abspath(source)
|
|
323
|
+
with self.sh.ftpcontext(hostname, logname, port=port) as ftp:
|
|
324
|
+
if ftp:
|
|
325
|
+
packed_size = self._packed_size(source)
|
|
326
|
+
with self._folder_ftput_pipe_compress(source) as p:
|
|
327
|
+
sponge = IoSponge(p.stdout, guessed_size=packed_size)
|
|
328
|
+
try:
|
|
329
|
+
rc = ftp.put(sponge, destination, size=sponge.size, exact=False)
|
|
330
|
+
except ftplib.all_errors as e:
|
|
331
|
+
logger.warning('An FTP error occurred: %s', str(e))
|
|
332
|
+
rc = False
|
|
333
|
+
return rc
|
|
334
|
+
else:
|
|
335
|
+
return False
|
|
336
|
+
|
|
337
|
+
def _folder_rawftput(self, source, destination, hostname=None, logname=None,
|
|
338
|
+
port=None, cpipeline=None, sync=False):
|
|
339
|
+
"""Use ftserv as much as possible."""
|
|
340
|
+
if cpipeline is not None:
|
|
341
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
342
|
+
if self.sh.ftraw and self.rawftshell is not None:
|
|
343
|
+
newsource = self.sh.copy2ftspool(source, nest=True,
|
|
344
|
+
fmt=self.supportedfmt)
|
|
345
|
+
request = self.sh.path.dirname(newsource) + '.request'
|
|
346
|
+
with open(request, 'w') as request_fh:
|
|
347
|
+
request_fh.write(str(self.sh.path.dirname(newsource)))
|
|
348
|
+
self.sh.readonly(request)
|
|
349
|
+
rc = self.sh.ftserv_put(request, destination,
|
|
350
|
+
hostname=hostname, logname=logname, port=port,
|
|
351
|
+
specialshell=self.rawftshell, sync=sync)
|
|
352
|
+
self.sh.rm(request)
|
|
353
|
+
return rc
|
|
354
|
+
else:
|
|
355
|
+
if port is None:
|
|
356
|
+
port = DEFAULT_FTP_PORT
|
|
357
|
+
return self._folder_ftput(source, destination, hostname, logname,
|
|
358
|
+
port=port, sync=sync)
|
|
359
|
+
|
|
360
|
+
def _folder_scpget(self, source, destination, hostname, logname=None, cpipeline=None):
|
|
361
|
+
"""Retrieve a folder using scp."""
|
|
362
|
+
if cpipeline is not None:
|
|
363
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
364
|
+
logname = self.sh.fix_ftuser(hostname, logname, fatal=False, defaults_to_user=False)
|
|
365
|
+
ssh = self.sh.ssh(hostname, logname)
|
|
366
|
+
rc = False
|
|
367
|
+
source, destination = self._folder_preftget(source, destination)
|
|
368
|
+
with self._folder_postftget_context(destination):
|
|
369
|
+
with self._folder_ftget_pipe_extract() as p:
|
|
370
|
+
rc = ssh.scpget_stream(source, p.stdin)
|
|
371
|
+
return rc
|
|
372
|
+
|
|
373
|
+
def _folder_scpput(self, source, destination, hostname, logname=None, cpipeline=None):
|
|
374
|
+
"""Upload a folder using scp."""
|
|
375
|
+
if cpipeline is not None:
|
|
376
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
377
|
+
source = self.sh.path.abspath(source)
|
|
378
|
+
logname = self.sh.fix_ftuser(hostname, logname, fatal=False, defaults_to_user=False)
|
|
379
|
+
ssh = self.sh.ssh(hostname, logname)
|
|
380
|
+
with self._folder_ftput_pipe_compress(source) as p:
|
|
381
|
+
rc = ssh.scpput_stream(p.stdout, destination)
|
|
382
|
+
return rc
|
|
383
|
+
|
|
384
|
+
@addons.require_external_addon('ecfs')
|
|
385
|
+
def _folder_ecfsget(self, source, target, cpipeline=None, options=None):
|
|
386
|
+
"""Get a folder resource using ECfs.
|
|
387
|
+
|
|
388
|
+
:param source: source file
|
|
389
|
+
:param target: target file
|
|
390
|
+
:param cpipeline: compression pipeline to be used, if provided
|
|
391
|
+
:param options: list of options to be used
|
|
392
|
+
:return: return code and additional attributes used
|
|
393
|
+
"""
|
|
394
|
+
# The folder must not be compressed
|
|
395
|
+
if cpipeline is not None:
|
|
396
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
397
|
+
source, target = self._folder_preftget(source, target)
|
|
398
|
+
with self._folder_postftget_context(target):
|
|
399
|
+
with self._folder_ftget_file_extract(source) as tmp_target:
|
|
400
|
+
rc = self.sh.ecfsget(source=source, target=tmp_target, options=options)
|
|
401
|
+
return rc
|
|
402
|
+
|
|
403
|
+
@addons.require_external_addon('ecfs')
|
|
404
|
+
def _folder_ecfsput(self, source, target, cpipeline=None, options=None):
|
|
405
|
+
"""Put a folder resource using ECfs.
|
|
406
|
+
|
|
407
|
+
:param source: source file
|
|
408
|
+
:param target: target file
|
|
409
|
+
:param cpipeline: compression pipeline to be used, if provided
|
|
410
|
+
:param options: list of options to be used
|
|
411
|
+
:return: return code and additional attributes used
|
|
412
|
+
"""
|
|
413
|
+
if cpipeline is not None:
|
|
414
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
415
|
+
source = self.sh.path.abspath(source)
|
|
416
|
+
with self._folder_ftput_file_compress(source) as c_source:
|
|
417
|
+
rc = self.sh.ecfsput(source=c_source, target=target, options=options)
|
|
418
|
+
return rc
|
|
419
|
+
|
|
420
|
+
@addons.require_external_addon('ectrans')
|
|
421
|
+
def _folder_ectransget(self, source, target, gateway=None, remote=None, cpipeline=None):
|
|
422
|
+
"""Get a folder resource using ECtrans.
|
|
423
|
+
|
|
424
|
+
:param source: source file
|
|
425
|
+
:param target: target file
|
|
426
|
+
:param gateway: gateway used by ECtrans
|
|
427
|
+
:param remote: remote used by ECtrans
|
|
428
|
+
:param cpipeline: compression pipeline to be used, if provided
|
|
429
|
+
:return: return code and additional attributes used
|
|
430
|
+
"""
|
|
431
|
+
# The folder must not be compressed
|
|
432
|
+
if cpipeline is not None:
|
|
433
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
434
|
+
source, target = self._folder_preftget(source, target)
|
|
435
|
+
with self._folder_postftget_context(target):
|
|
436
|
+
with self._folder_ftget_file_extract(source) as tmp_target:
|
|
437
|
+
rc = self.sh.raw_ectransget(source=source,
|
|
438
|
+
target=tmp_target,
|
|
439
|
+
gateway=gateway,
|
|
440
|
+
remote=remote)
|
|
441
|
+
return rc
|
|
442
|
+
|
|
443
|
+
@addons.require_external_addon('ectrans')
|
|
444
|
+
def _folder_ectransput(self, source, target, gateway=None, remote=None,
|
|
445
|
+
cpipeline=None, sync=False):
|
|
446
|
+
"""Put a folder resource using ECtrans.
|
|
447
|
+
|
|
448
|
+
:param source: source file
|
|
449
|
+
:param target: target file
|
|
450
|
+
:param gateway: gateway used by ECtrans
|
|
451
|
+
:param remote: remote used by ECtrans
|
|
452
|
+
:param cpipeline: compression pipeline to be used, if provided
|
|
453
|
+
:param bool sync: If False, allow asynchronous transfers.
|
|
454
|
+
:return: return code and additional attributes used
|
|
455
|
+
"""
|
|
456
|
+
if cpipeline is not None:
|
|
457
|
+
raise OSError("It's not allowed to compress folder like data.")
|
|
458
|
+
source = self.sh.path.abspath(source)
|
|
459
|
+
with self._folder_ftput_file_compress(source) as c_source:
|
|
460
|
+
rc = self.sh.raw_ectransput(source=c_source,
|
|
461
|
+
target=target,
|
|
462
|
+
gateway=gateway,
|
|
463
|
+
remote=remote,
|
|
464
|
+
sync=sync)
|
|
465
|
+
return rc
|
|
466
|
+
|
|
467
|
+
@property
|
|
468
|
+
def _folder_tarfix_extension(self):
|
|
469
|
+
"""Return the extension of tar file associated with this extension."""
|
|
470
|
+
if self._COMPRESSED:
|
|
471
|
+
if self._COMPRESSED == 'gz':
|
|
472
|
+
return "tgz"
|
|
473
|
+
elif self._COMPRESSED == 'bz2':
|
|
474
|
+
return "tar.bz2"
|
|
475
|
+
else:
|
|
476
|
+
raise ValueError("Unsupported compression type: {:s}"
|
|
477
|
+
.format(self._COMPRESSED))
|
|
478
|
+
else:
|
|
479
|
+
return "tar"
|
|
480
|
+
|
|
481
|
+
def _folder_tarfix_in(self, source, destination):
|
|
482
|
+
"""Automatically untar **source** if **source** is a tarfile and **destination** is not.
|
|
483
|
+
|
|
484
|
+
This is called after a copy was blindly done: a ``source='foo.tgz'`` might have
|
|
485
|
+
been copied to ``destination='bar'``, which must be untarred here.
|
|
486
|
+
"""
|
|
487
|
+
ok = True
|
|
488
|
+
sh = self.sh
|
|
489
|
+
if sh.is_tarname(source) and not sh.is_tarname(destination):
|
|
490
|
+
logger.info('tarfix_in: untar from get <%s> to <%s>', source, destination)
|
|
491
|
+
(destdir, destfile) = sh.path.split(sh.path.abspath(destination))
|
|
492
|
+
tar_ext = sh.tarname_splitext(source)[1]
|
|
493
|
+
desttar = sh.path.abspath(destination + tar_ext)
|
|
494
|
+
sh.remove(desttar)
|
|
495
|
+
ok = ok and sh.move(destination, desttar)
|
|
496
|
+
with sh.temporary_dir_cdcontext(prefix='untar_', dir=destdir):
|
|
497
|
+
ok = ok and sh.untar(desttar, output=False)
|
|
498
|
+
unpacked = sh.glob('*')
|
|
499
|
+
ok = ok and len(unpacked) == 1 # Only one element allowed in this kind of tarfiles
|
|
500
|
+
ok = ok and sh.move(unpacked[0], sh.path.join(destdir, destfile))
|
|
501
|
+
ok = ok and sh.remove(desttar)
|
|
502
|
+
return (ok, source, destination)
|
|
503
|
+
|
|
504
|
+
def _folder_tarfix_out(self, source, destination):
|
|
505
|
+
"""Automatically tar **source** if **destination** is a tarfile and **source** is not.
|
|
506
|
+
|
|
507
|
+
This is called after a copy was blindly done: a directory might have been copied
|
|
508
|
+
to ``destination='foo.tgz'`` or ``destination='foo.tar.bz2'``.
|
|
509
|
+
The tar and compression implied by the name must be addressed here.
|
|
510
|
+
"""
|
|
511
|
+
ok = True
|
|
512
|
+
sh = self.sh
|
|
513
|
+
if sh.is_tarname(destination) and not sh.is_tarname(source):
|
|
514
|
+
logger.info('tarfix_out: tar before put <%s> to <%s>', source, destination)
|
|
515
|
+
tar_ext = sh.tarname_splitext(destination)[1]
|
|
516
|
+
sourcetar = sh.path.abspath(source + tar_ext)
|
|
517
|
+
source_rel = sh.path.basename(source)
|
|
518
|
+
(sourcedir, sourcefile) = sh.path.split(sourcetar)
|
|
519
|
+
with sh.cdcontext(sourcedir):
|
|
520
|
+
ok = ok and sh.remove(sourcefile)
|
|
521
|
+
ok = ok and sh.tar(sourcefile, source_rel, output=False)
|
|
522
|
+
return (ok, sourcetar, destination)
|
|
523
|
+
else:
|
|
524
|
+
return (ok, source, destination)
|
|
525
|
+
|
|
526
|
+
|
|
527
|
+
@folderize
|
|
528
|
+
class OdbShell(FolderShell):
|
|
529
|
+
"""
|
|
530
|
+
Default interface to ODB commands.
|
|
531
|
+
These commands extend the shell.
|
|
532
|
+
"""
|
|
533
|
+
|
|
534
|
+
_footprint = dict(
|
|
535
|
+
info = 'Default ODB system interface',
|
|
536
|
+
attr = dict(
|
|
537
|
+
kind = dict(
|
|
538
|
+
values = ['odb'],
|
|
539
|
+
),
|
|
540
|
+
)
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
@folderize
|
|
545
|
+
class DdhPackShell(FolderShell):
|
|
546
|
+
"""
|
|
547
|
+
Default interface to DDHpack commands.
|
|
548
|
+
These commands extend the shell.
|
|
549
|
+
"""
|
|
550
|
+
|
|
551
|
+
_footprint = dict(
|
|
552
|
+
info = 'Default DDHpack system interface',
|
|
553
|
+
attr = dict(
|
|
554
|
+
kind = dict(
|
|
555
|
+
values = ['ddhpack'],
|
|
556
|
+
),
|
|
557
|
+
)
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
|
|
561
|
+
@folderize
|
|
562
|
+
class RawFilesShell(FolderShell):
|
|
563
|
+
"""
|
|
564
|
+
Default interface to rawfiles commands.
|
|
565
|
+
These commands extend the shell.
|
|
566
|
+
"""
|
|
567
|
+
|
|
568
|
+
_footprint = dict(
|
|
569
|
+
info = 'Default (g)RRRRawfiles system interface',
|
|
570
|
+
attr = dict(
|
|
571
|
+
kind = dict(
|
|
572
|
+
values = ['rawfiles'],
|
|
573
|
+
),
|
|
574
|
+
)
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
|
|
578
|
+
@folderize
|
|
579
|
+
class ObsLocationPackShell(FolderShell):
|
|
580
|
+
"""
|
|
581
|
+
Default interface to Obs Location packs commands.
|
|
582
|
+
These commands extend the shell.
|
|
583
|
+
"""
|
|
584
|
+
|
|
585
|
+
_footprint = dict(
|
|
586
|
+
info = 'Default Obs Location packs system interface',
|
|
587
|
+
attr = dict(
|
|
588
|
+
kind = dict(
|
|
589
|
+
values = ['obslocationpack'],
|
|
590
|
+
),
|
|
591
|
+
)
|
|
592
|
+
)
|
|
593
|
+
|
|
594
|
+
|
|
595
|
+
@folderize
|
|
596
|
+
class ObsFirePackShell(FolderShell):
|
|
597
|
+
"""
|
|
598
|
+
Default interface to Obs Fire packs commands.
|
|
599
|
+
These commands extend the shell.
|
|
600
|
+
"""
|
|
601
|
+
|
|
602
|
+
_footprint = dict(
|
|
603
|
+
info = 'Default Obs Location packs system interface',
|
|
604
|
+
attr = dict(
|
|
605
|
+
kind = dict(
|
|
606
|
+
values = ['obsfirepack'],
|
|
607
|
+
),
|
|
608
|
+
)
|
|
609
|
+
)
|
|
610
|
+
|
|
611
|
+
|
|
612
|
+
@folderize
|
|
613
|
+
class WavesBCShell(FolderShell):
|
|
614
|
+
"""
|
|
615
|
+
Default interface to waves bounding conditions commands.
|
|
616
|
+
These commands extend the shell.
|
|
617
|
+
"""
|
|
618
|
+
|
|
619
|
+
_footprint = dict(
|
|
620
|
+
info = 'Default waves BC system interface',
|
|
621
|
+
attr = dict(
|
|
622
|
+
kind = dict(
|
|
623
|
+
values = ['wbcpack'],
|
|
624
|
+
),
|
|
625
|
+
)
|
|
626
|
+
)
|
|
627
|
+
|
|
628
|
+
|
|
629
|
+
@folderize
|
|
630
|
+
class FilesPackShell(FolderShell):
|
|
631
|
+
"""
|
|
632
|
+
Default interface to files packs commands.
|
|
633
|
+
These commands extend the shell.
|
|
634
|
+
"""
|
|
635
|
+
|
|
636
|
+
_footprint = dict(
|
|
637
|
+
info = 'Default Files packs system interface',
|
|
638
|
+
attr = dict(
|
|
639
|
+
kind = dict(
|
|
640
|
+
values = ['filespack'],
|
|
641
|
+
),
|
|
642
|
+
)
|
|
643
|
+
)
|
|
644
|
+
|
|
645
|
+
|
|
646
|
+
available_foldershells = [e.footprint_values('kind')[0] for e in locals().values()
|
|
647
|
+
if (isinstance(e, type) and issubclass(e, FolderShell) and
|
|
648
|
+
not e.footprint_abstract())]
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
class FolderShellsGroup(addons.AddonGroup):
|
|
652
|
+
"""The whole bunch of folder shells."""
|
|
653
|
+
|
|
654
|
+
_footprint = dict(
|
|
655
|
+
info = 'The whole bunch of folder shells',
|
|
656
|
+
attr = dict(
|
|
657
|
+
kind = dict(
|
|
658
|
+
values = ['allfolders', ],
|
|
659
|
+
),
|
|
660
|
+
)
|
|
661
|
+
)
|
|
662
|
+
|
|
663
|
+
_addonslist = available_foldershells
|