atex 0.4__py3-none-any.whl → 0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atex/__init__.py +2 -12
- atex/cli/__init__.py +13 -13
- atex/cli/minitmt.py +128 -35
- atex/cli/testingfarm.py +59 -59
- atex/connection/__init__.py +125 -0
- atex/connection/ssh.py +406 -0
- atex/minitmt/__init__.py +17 -109
- atex/minitmt/executor.py +348 -0
- atex/minitmt/fmf.py +87 -53
- atex/minitmt/scripts.py +143 -45
- atex/minitmt/testcontrol.py +354 -0
- atex/{orchestrator.py → orchestrator/__init__.py} +22 -1
- atex/orchestrator/aggregator.py +163 -0
- atex/provision/__init__.py +77 -35
- atex/provision/libvirt/VM_PROVISION +8 -0
- atex/provision/libvirt/__init__.py +4 -4
- atex/provision/nspawn/README +74 -0
- atex/provision/podman/README +59 -0
- atex/provision/podman/host_container.sh +74 -0
- atex/provision/testingfarm/__init__.py +29 -0
- atex/{testingfarm.py → provision/testingfarm/api.py} +116 -93
- atex/provision/testingfarm/foo.py +1 -0
- atex/util/__init__.py +4 -4
- atex/util/dedent.py +1 -1
- atex/util/log.py +12 -12
- atex/util/subprocess.py +14 -13
- {atex-0.4.dist-info → atex-0.7.dist-info}/METADATA +1 -1
- atex-0.7.dist-info/RECORD +32 -0
- atex/minitmt/report.py +0 -174
- atex/minitmt/testme.py +0 -3
- atex/ssh.py +0 -320
- atex/util/lockable_class.py +0 -38
- atex-0.4.dist-info/RECORD +0 -26
- {atex-0.4.dist-info → atex-0.7.dist-info}/WHEEL +0 -0
- {atex-0.4.dist-info → atex-0.7.dist-info}/entry_points.txt +0 -0
- {atex-0.4.dist-info → atex-0.7.dist-info}/licenses/COPYING.txt +0 -0
atex/connection/ssh.py
ADDED
|
@@ -0,0 +1,406 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Connection API implementation using the OpenSSH ssh(1) client.
|
|
3
|
+
|
|
4
|
+
Any SSH options are passed via dictionaries of options, and later translated
|
|
5
|
+
to '-o' client CLI options, incl. Hostname, User, Port, IdentityFile, etc.
|
|
6
|
+
No "typical" ssh CLI switches are used.
|
|
7
|
+
|
|
8
|
+
This allows for a nice flexibility from Python code - this module provides
|
|
9
|
+
some sensible option defaults (for scripted use), but you are free to
|
|
10
|
+
overwrite any options via class or function arguments (where appropriate).
|
|
11
|
+
|
|
12
|
+
Note that .cmd() quotes arguments to really execute individual arguments
|
|
13
|
+
as individual arguments in the remote shell, so you need to give it a proper
|
|
14
|
+
iterable (like for other Connections), not a single string with spaces.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import os
|
|
18
|
+
import time
|
|
19
|
+
import shlex
|
|
20
|
+
import tempfile
|
|
21
|
+
import subprocess
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from .. import util
|
|
25
|
+
from . import Connection
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
DEFAULT_OPTIONS = {
|
|
29
|
+
"LogLevel": "ERROR",
|
|
30
|
+
"StrictHostKeyChecking": "no",
|
|
31
|
+
"UserKnownHostsFile": "/dev/null",
|
|
32
|
+
"ConnectionAttempts": "3",
|
|
33
|
+
"ServerAliveCountMax": "4",
|
|
34
|
+
"ServerAliveInterval": "5",
|
|
35
|
+
"TCPKeepAlive": "no",
|
|
36
|
+
"EscapeChar": "none",
|
|
37
|
+
"ExitOnForwardFailure": "yes",
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class SSHError(Exception):
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class DisconnectedError(SSHError):
|
|
46
|
+
"""
|
|
47
|
+
Raised when an already-connected ssh session goes away (breaks connection).
|
|
48
|
+
"""
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class NotConnectedError(SSHError):
|
|
53
|
+
"""
|
|
54
|
+
Raised when an operation on ssh connection is requested, but the connection
|
|
55
|
+
is not yet open (or has been closed/disconnected).
|
|
56
|
+
"""
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class ConnectError(SSHError):
|
|
61
|
+
"""
|
|
62
|
+
Raised when a to-be-opened ssh connection fails to open.
|
|
63
|
+
"""
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _shell_cmd(command, sudo=None):
|
|
68
|
+
"""
|
|
69
|
+
Make a command line for running 'command' on the target system.
|
|
70
|
+
"""
|
|
71
|
+
quoted_args = (shlex.quote(arg) for arg in command)
|
|
72
|
+
if sudo:
|
|
73
|
+
return " ".join((
|
|
74
|
+
"exec", "sudo", "--no-update", "--non-interactive", "--user", sudo, "--", *quoted_args,
|
|
75
|
+
))
|
|
76
|
+
else:
|
|
77
|
+
return " ".join(("exec", *quoted_args))
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _options_to_cli(options):
|
|
81
|
+
"""
|
|
82
|
+
Assemble an ssh(1) or sshpass(1) command line with -o options.
|
|
83
|
+
"""
|
|
84
|
+
list_opts = []
|
|
85
|
+
for key, value in options.items():
|
|
86
|
+
if isinstance(value, (list, tuple, set)):
|
|
87
|
+
list_opts += (f"-o{key}={v}" for v in value)
|
|
88
|
+
else:
|
|
89
|
+
list_opts.append(f"-o{key}={value}")
|
|
90
|
+
return list_opts
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _options_to_ssh(options, password=None, extra_cli_flags=()):
|
|
94
|
+
"""
|
|
95
|
+
Assemble an ssh(1) or sshpass(1) command line with -o options.
|
|
96
|
+
"""
|
|
97
|
+
cli_opts = _options_to_cli(options)
|
|
98
|
+
if password:
|
|
99
|
+
return (
|
|
100
|
+
"sshpass", "-p", password,
|
|
101
|
+
"ssh", *extra_cli_flags, "-oBatchMode=no", *cli_opts,
|
|
102
|
+
"ignored_arg",
|
|
103
|
+
)
|
|
104
|
+
else:
|
|
105
|
+
# let cli_opts override BatchMode if specified
|
|
106
|
+
return ("ssh", *extra_cli_flags, *cli_opts, "-oBatchMode=yes", "ignored_arg")
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
# return a string usable for rsync -e
|
|
110
|
+
def _options_to_rsync_e(options, password=None):
|
|
111
|
+
"""
|
|
112
|
+
Return a string usable for the rsync -e argument.
|
|
113
|
+
"""
|
|
114
|
+
cli_opts = _options_to_cli(options)
|
|
115
|
+
batch_mode = "-oBatchMode=no" if password else "-oBatchMode=yes"
|
|
116
|
+
return " ".join(("ssh", *cli_opts, batch_mode)) # no ignored_arg inside -e
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _rsync_host_cmd(*args, options, password=None, sudo=None):
|
|
120
|
+
"""
|
|
121
|
+
Assemble a rsync command line, noting that
|
|
122
|
+
- 'sshpass' must be before 'rsync', not inside the '-e' argument
|
|
123
|
+
- 'ignored_arg' must be passed by user as destination, not inside '-e'
|
|
124
|
+
- 'sudo' is part of '--rsync-path', yet another argument
|
|
125
|
+
"""
|
|
126
|
+
return (
|
|
127
|
+
*(("sshpass", "-p", password) if password else ()),
|
|
128
|
+
"rsync",
|
|
129
|
+
"-e", _options_to_rsync_e(options, password=password),
|
|
130
|
+
"--rsync-path", _shell_cmd(("rsync",), sudo=sudo),
|
|
131
|
+
*args,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class StatelessSSHConn(Connection):
|
|
136
|
+
"""
|
|
137
|
+
Implements the Connection API using a ssh(1) client using "standalone"
|
|
138
|
+
(stateless) logic - connect() and disconnect() are no-op, .cmd() simply
|
|
139
|
+
executes the ssh client and .rsync() executes 'rsync -e ssh'.
|
|
140
|
+
|
|
141
|
+
Compared to ManagedSSHConn, this may be slow for many .cmd() calls,
|
|
142
|
+
but every call is stateless, there is no persistent connection.
|
|
143
|
+
|
|
144
|
+
If you need only one .cmd(), this will be faster than ManagedSSHConn.
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
def __init__(self, options, *, password=None, sudo=None):
|
|
148
|
+
"""
|
|
149
|
+
Prepare to connect to an SSH server specified in 'options'.
|
|
150
|
+
|
|
151
|
+
If 'password' is given, spawn the ssh(1) command via 'sshpass' and
|
|
152
|
+
pass the password to it.
|
|
153
|
+
|
|
154
|
+
If 'sudo' specifies a username, call sudo(8) on the remote shell
|
|
155
|
+
to run under a different user on the remote host.
|
|
156
|
+
"""
|
|
157
|
+
super().__init__()
|
|
158
|
+
self.options = DEFAULT_OPTIONS.copy()
|
|
159
|
+
self.options.update(options)
|
|
160
|
+
self.password = password
|
|
161
|
+
self.sudo = sudo
|
|
162
|
+
self._tmpdir = None
|
|
163
|
+
self._master_proc = None
|
|
164
|
+
|
|
165
|
+
def connect(self):
|
|
166
|
+
"""
|
|
167
|
+
Optional, .cmd() and .rsync() work without it, but it is provided here
|
|
168
|
+
for compatibility with the Connection API.
|
|
169
|
+
"""
|
|
170
|
+
# TODO: just wait until .cmd(['true']) starts responding
|
|
171
|
+
pass
|
|
172
|
+
|
|
173
|
+
def disconnect(self):
|
|
174
|
+
pass
|
|
175
|
+
|
|
176
|
+
# def alive(self):
|
|
177
|
+
# return True
|
|
178
|
+
|
|
179
|
+
# have options as kwarg to be compatible with other functions here
|
|
180
|
+
def cmd(self, command, options=None, func=util.subprocess_run, **func_args):
|
|
181
|
+
unified_options = self.options.copy()
|
|
182
|
+
if options:
|
|
183
|
+
unified_options.update(options)
|
|
184
|
+
unified_options["RemoteCommand"] = _shell_cmd(command, sudo=self.sudo)
|
|
185
|
+
return func(
|
|
186
|
+
_options_to_ssh(unified_options, password=self.password),
|
|
187
|
+
skip_frames=1,
|
|
188
|
+
**func_args,
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
def rsync(self, *args, options=None, func=util.subprocess_run, **func_args):
|
|
192
|
+
unified_options = self.options.copy()
|
|
193
|
+
if options:
|
|
194
|
+
unified_options.update(options)
|
|
195
|
+
return func(
|
|
196
|
+
_rsync_host_cmd(
|
|
197
|
+
*args,
|
|
198
|
+
options=unified_options,
|
|
199
|
+
password=self.password,
|
|
200
|
+
sudo=self.sudo,
|
|
201
|
+
),
|
|
202
|
+
skip_frames=1,
|
|
203
|
+
check=True,
|
|
204
|
+
stdin=subprocess.DEVNULL,
|
|
205
|
+
**func_args,
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
# Note that when ControlMaster goes away (connection breaks), any ssh clients
|
|
210
|
+
# connected through it will time out after a combination of
|
|
211
|
+
# ServerAliveCountMax + ServerAliveInterval + ConnectionAttempts
|
|
212
|
+
# identical to the ControlMaster process.
|
|
213
|
+
# Specifying different values for the clients, to make them exit faster when
|
|
214
|
+
# the ControlMaster dies, has no effect. They seem to ignore the options.
|
|
215
|
+
#
|
|
216
|
+
# If you need to kill the clients quickly after ControlMaster disconnects,
|
|
217
|
+
# you need to set up an independent polling logic (ie. every 0.1sec) that
|
|
218
|
+
# checks .assert_master() and manually signals the running clients
|
|
219
|
+
# when it gets DisconnectedError from it.
|
|
220
|
+
|
|
221
|
+
class ManagedSSHConn(Connection):
|
|
222
|
+
"""
|
|
223
|
+
Implements the Connection API using one persistently-running ssh(1) client
|
|
224
|
+
started in a 'ControlMaster' mode, with additional ssh clients using that
|
|
225
|
+
session to execute remote commands. Similarly, .rsync() uses it too.
|
|
226
|
+
|
|
227
|
+
This is much faster than StatelessSSHConn when executing multiple commands,
|
|
228
|
+
but contains a complex internal state (what if ControlMaster disconnects?).
|
|
229
|
+
|
|
230
|
+
Hence why this implementation provides extra non-standard-Connection methods
|
|
231
|
+
to manage this complexity.
|
|
232
|
+
"""
|
|
233
|
+
|
|
234
|
+
# TODO: thread safety and locking via self.lock
|
|
235
|
+
|
|
236
|
+
def __init__(self, options, *, password=None, sudo=None):
|
|
237
|
+
"""
|
|
238
|
+
Prepare to connect to an SSH server specified in 'options'.
|
|
239
|
+
|
|
240
|
+
If 'password' is given, spawn the ssh(1) command via 'sshpass' and
|
|
241
|
+
pass the password to it.
|
|
242
|
+
|
|
243
|
+
If 'sudo' specifies a username, call sudo(8) on the remote shell
|
|
244
|
+
to run under a different user on the remote host.
|
|
245
|
+
"""
|
|
246
|
+
super().__init__()
|
|
247
|
+
self.options = DEFAULT_OPTIONS.copy()
|
|
248
|
+
self.options.update(options)
|
|
249
|
+
self.password = password
|
|
250
|
+
self.sudo = sudo
|
|
251
|
+
self._tmpdir = None
|
|
252
|
+
self._master_proc = None
|
|
253
|
+
|
|
254
|
+
# def __copy__(self):
|
|
255
|
+
# return type(self)(self.options, password=self.password)
|
|
256
|
+
#
|
|
257
|
+
# def copy(self):
|
|
258
|
+
# return self.__copy__()
|
|
259
|
+
|
|
260
|
+
def assert_master(self):
|
|
261
|
+
proc = self._master_proc
|
|
262
|
+
if not proc:
|
|
263
|
+
raise NotConnectedError("SSH ControlMaster is not running")
|
|
264
|
+
# we need to consume any potential proc output for the process to
|
|
265
|
+
# actually terminate (stop being a zombie) if it crashes
|
|
266
|
+
out = proc.stdout.read()
|
|
267
|
+
code = proc.poll()
|
|
268
|
+
if code is not None:
|
|
269
|
+
self._master_proc = None
|
|
270
|
+
out = f":\n{out.decode()}" if out else ""
|
|
271
|
+
raise DisconnectedError(
|
|
272
|
+
f"SSH ControlMaster on {self._tmpdir} exited with {code}{out}",
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
# def alive(self):
|
|
276
|
+
# try:
|
|
277
|
+
# self.assert_master()
|
|
278
|
+
# return True
|
|
279
|
+
# except (NotConnectedError, DisconnectedError):
|
|
280
|
+
# return False
|
|
281
|
+
|
|
282
|
+
def disconnect(self):
|
|
283
|
+
proc = self._master_proc
|
|
284
|
+
if not proc:
|
|
285
|
+
return
|
|
286
|
+
proc.kill()
|
|
287
|
+
# don"t zombie forever, return EPIPE on any attempts to write to us
|
|
288
|
+
proc.stdout.close()
|
|
289
|
+
proc.wait()
|
|
290
|
+
(self._tmpdir / "control.sock").unlink(missing_ok=True)
|
|
291
|
+
self._master_proc = None
|
|
292
|
+
|
|
293
|
+
def connect(self, block=True):
|
|
294
|
+
if not self._tmpdir:
|
|
295
|
+
# _tmpdir_handle just prevents the TemporaryDirectory instance
|
|
296
|
+
# from being garbage collected (and removed on disk)
|
|
297
|
+
# TODO: create/remove it explicitly in connect/disconnect
|
|
298
|
+
# so the removal happens immediately, even if GC delays cleaning
|
|
299
|
+
self._tmpdir_handle = tempfile.TemporaryDirectory(prefix="atex-ssh-")
|
|
300
|
+
self._tmpdir = Path(self._tmpdir_handle.name)
|
|
301
|
+
|
|
302
|
+
sock = self._tmpdir / "control.sock"
|
|
303
|
+
|
|
304
|
+
if not self._master_proc:
|
|
305
|
+
options = self.options.copy()
|
|
306
|
+
options["SessionType"] = "none"
|
|
307
|
+
options["ControlMaster"] = "yes"
|
|
308
|
+
options["ControlPath"] = sock
|
|
309
|
+
self._master_proc = util.subprocess_Popen(
|
|
310
|
+
_options_to_ssh(options),
|
|
311
|
+
stdin=subprocess.DEVNULL,
|
|
312
|
+
stdout=subprocess.PIPE,
|
|
313
|
+
stderr=subprocess.STDOUT,
|
|
314
|
+
cwd=str(self._tmpdir),
|
|
315
|
+
)
|
|
316
|
+
os.set_blocking(self._master_proc.stdout.fileno(), False)
|
|
317
|
+
|
|
318
|
+
proc = self._master_proc
|
|
319
|
+
if block:
|
|
320
|
+
while proc.poll() is None:
|
|
321
|
+
if sock.exists():
|
|
322
|
+
break
|
|
323
|
+
time.sleep(0.1)
|
|
324
|
+
else:
|
|
325
|
+
code = proc.poll()
|
|
326
|
+
out = proc.stdout.read()
|
|
327
|
+
self._master_proc = None
|
|
328
|
+
# TODO: ConnectError should probably be generalized for Connection
|
|
329
|
+
raise ConnectError(
|
|
330
|
+
f"SSH ControlMaster failed to start on {self._tmpdir} with {code}:\n{out}",
|
|
331
|
+
)
|
|
332
|
+
else:
|
|
333
|
+
code = proc.poll()
|
|
334
|
+
if code is not None:
|
|
335
|
+
out = proc.stdout.read()
|
|
336
|
+
self._master_proc = None
|
|
337
|
+
# TODO: ConnectError should probably be generalized for Connection
|
|
338
|
+
raise ConnectError(
|
|
339
|
+
f"SSH ControlMaster failed to start on {self._tmpdir} with {code}:\n{out}",
|
|
340
|
+
)
|
|
341
|
+
elif not sock.exists():
|
|
342
|
+
raise BlockingIOError("SSH ControlMaster not yet ready")
|
|
343
|
+
|
|
344
|
+
def add_local_forward(self, *spec):
|
|
345
|
+
"""
|
|
346
|
+
Add (one or more) ssh forwarding specifications as 'spec' to an
|
|
347
|
+
already-connected instance. Each specification has to follow the
|
|
348
|
+
format of ssh client's LocalForward option (see ssh_config(5)).
|
|
349
|
+
"""
|
|
350
|
+
self.assert_master()
|
|
351
|
+
options = self.options.copy()
|
|
352
|
+
options["LocalForward"] = spec
|
|
353
|
+
options["ControlPath"] = self._tmpdir / "control.sock"
|
|
354
|
+
util.subprocess_run(
|
|
355
|
+
_options_to_ssh(options, extra_cli_flags=("-O", "forward")),
|
|
356
|
+
skip_frames=1,
|
|
357
|
+
check=True,
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
def add_remote_forward(self, *spec):
|
|
361
|
+
"""
|
|
362
|
+
Add (one or more) ssh forwarding specifications as 'spec' to an
|
|
363
|
+
already-connected instance. Each specification has to follow the
|
|
364
|
+
format of ssh client's RemoteForward option (see ssh_config(5)).
|
|
365
|
+
"""
|
|
366
|
+
self.assert_master()
|
|
367
|
+
options = self.options.copy()
|
|
368
|
+
options["RemoteForward"] = spec
|
|
369
|
+
options["ControlPath"] = self._tmpdir / "control.sock"
|
|
370
|
+
util.subprocess_run(
|
|
371
|
+
_options_to_ssh(options, extra_cli_flags=("-O", "forward")),
|
|
372
|
+
skip_frames=1,
|
|
373
|
+
check=True,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
def cmd(self, command, options=None, func=util.subprocess_run, **func_args):
|
|
377
|
+
self.assert_master()
|
|
378
|
+
unified_options = self.options.copy()
|
|
379
|
+
if options:
|
|
380
|
+
unified_options.update(options)
|
|
381
|
+
unified_options["RemoteCommand"] = _shell_cmd(command, sudo=self.sudo)
|
|
382
|
+
unified_options["ControlPath"] = self._tmpdir / "control.sock"
|
|
383
|
+
return func(
|
|
384
|
+
_options_to_ssh(unified_options, password=self.password),
|
|
385
|
+
skip_frames=1,
|
|
386
|
+
**func_args,
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
def rsync(self, *args, options=None, func=util.subprocess_run, **func_args):
|
|
390
|
+
self.assert_master()
|
|
391
|
+
unified_options = self.options.copy()
|
|
392
|
+
if options:
|
|
393
|
+
unified_options.update(options)
|
|
394
|
+
unified_options["ControlPath"] = self._tmpdir / "control.sock"
|
|
395
|
+
return func(
|
|
396
|
+
_rsync_host_cmd(
|
|
397
|
+
*args,
|
|
398
|
+
options=unified_options,
|
|
399
|
+
password=self.password,
|
|
400
|
+
sudo=self.sudo,
|
|
401
|
+
),
|
|
402
|
+
skip_frames=1,
|
|
403
|
+
check=True,
|
|
404
|
+
stdin=subprocess.DEVNULL,
|
|
405
|
+
**func_args,
|
|
406
|
+
)
|
atex/minitmt/__init__.py
CHANGED
|
@@ -1,115 +1,23 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
"""
|
|
2
|
+
TODO Minitmt documentation - reference README, etc.
|
|
3
|
+
"""
|
|
3
4
|
|
|
4
|
-
|
|
5
|
+
import importlib as _importlib
|
|
6
|
+
import pkgutil as _pkgutil
|
|
5
7
|
|
|
6
|
-
|
|
8
|
+
__all__ = [
|
|
9
|
+
info.name for info in _pkgutil.iter_modules(__spec__.submodule_search_locations)
|
|
10
|
+
]
|
|
7
11
|
|
|
8
|
-
# TODO: install rsync on the guest as part of setup
|
|
9
12
|
|
|
10
|
-
|
|
11
|
-
|
|
13
|
+
def __dir__():
|
|
14
|
+
return __all__
|
|
12
15
|
|
|
13
16
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
"""
|
|
22
|
-
Set of utilities for preparing a newly acquired/reserved machine for
|
|
23
|
-
running tests, by installing global package requirements, copying all
|
|
24
|
-
tests over, executing tmt plan 'prepare' step, etc.
|
|
25
|
-
"""
|
|
26
|
-
def __init__(self, ssh_conn):
|
|
27
|
-
self.conn = ssh_conn
|
|
28
|
-
|
|
29
|
-
def copy_tests(self):
|
|
30
|
-
pass
|
|
31
|
-
|
|
32
|
-
def run_prepare_scripts(self):
|
|
33
|
-
pass
|
|
34
|
-
|
|
35
|
-
def __enter__(self):
|
|
36
|
-
self.conn.connect()
|
|
37
|
-
return self
|
|
38
|
-
|
|
39
|
-
def __exit__(self, exc_type, exc_value, traceback):
|
|
40
|
-
self.conn.disconnect()
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
# TODO: have Executor take a finished Preparator instance as input?
|
|
44
|
-
# - for extracting copied tests location
|
|
45
|
-
# - for extracting TMT_PLAN_ENVIRONMENT_FILE location
|
|
46
|
-
# - etc.
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
class Executor:
|
|
50
|
-
"""
|
|
51
|
-
Helper for running one test on a remote system and processing results
|
|
52
|
-
and uploaded files by that test.
|
|
53
|
-
"""
|
|
54
|
-
def __init__(self, fmf_test, ssh_conn):
|
|
55
|
-
self.fmf_test = fmf_test
|
|
56
|
-
self.conn = ssh_conn
|
|
57
|
-
self.remote_socket = self.local_socket = None
|
|
58
|
-
|
|
59
|
-
def __enter__(self):
|
|
60
|
-
# generate a (hopefully) unique test control socket name
|
|
61
|
-
# and modify the SSHConn instance to use it
|
|
62
|
-
rand_name = f'atex-control-{_random_string(50)}.sock'
|
|
63
|
-
self.local_socket = Path(os.environ.get('TMPDIR', '/tmp')) / rand_name
|
|
64
|
-
self.remote_socket = f'/tmp/{rand_name}'
|
|
65
|
-
self.conn.options['RemoteForward'] = f'{self.remote_socket} {self.local_socket}'
|
|
66
|
-
self.conn.connect()
|
|
67
|
-
return self
|
|
68
|
-
|
|
69
|
-
def __exit__(self, exc_type, exc_value, traceback):
|
|
70
|
-
self.conn.ssh(f'rm -f {self.remote_socket}')
|
|
71
|
-
self.local_socket.unlink()
|
|
72
|
-
self.remote_socket = self.local_socket = None
|
|
73
|
-
self.conn.disconnect()
|
|
74
|
-
|
|
75
|
-
# execute all prepares (how:install and how:shell) via ssh
|
|
76
|
-
def prepare(self):
|
|
77
|
-
# TODO: check via __some_attr (named / prefixed after our class)
|
|
78
|
-
# whether this reserved system has been prepared already ... ?
|
|
79
|
-
# ^^^^ in Orchestrator
|
|
80
|
-
#
|
|
81
|
-
# TODO: copy root of fmf metadata to some /var/tmp/somedir to run tests from
|
|
82
|
-
#
|
|
83
|
-
# TODO: move prepare out, possibly to class-less function,
|
|
84
|
-
# we don't want it running over an SSHConn that would set up socket forwarding
|
|
85
|
-
# only to tear it back down, when executed from Orchestrator for setup only
|
|
86
|
-
#
|
|
87
|
-
# TODO: install rsync
|
|
88
|
-
pass
|
|
89
|
-
|
|
90
|
-
def run_script(self, script, duration=None, shell='/bin/bash', **kwargs):
|
|
91
|
-
self.conn.ssh(shell, input=script.encode())
|
|
92
|
-
|
|
93
|
-
# run one test via ssh and parse its results on-the-fly,
|
|
94
|
-
# write out logs
|
|
95
|
-
def run_test(self, fmf_test, reporter):
|
|
96
|
-
# TODO: pass environment from test fmf metadata
|
|
97
|
-
# TODO: watch for test duration, etc. metadata
|
|
98
|
-
# TODO: logging of stdout+stderr to hidden file, doing 'ln' from it to
|
|
99
|
-
# test-named 'testout' files
|
|
100
|
-
# - generate hidden name suffix via:
|
|
101
|
-
# ''.join(random.choices('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', k=20))
|
|
102
|
-
output_logfile = \
|
|
103
|
-
reporter.files_dir(fmf_test.name) / f'.test_output_{self._random_string(50)}.log'
|
|
104
|
-
output_logfile = os.open(reporter.files_dir(fmf_test.name), os.O_WRONLY | os.O_CREAT)
|
|
105
|
-
try:
|
|
106
|
-
#self.conn.ssh(
|
|
107
|
-
pass
|
|
108
|
-
finally:
|
|
109
|
-
os.close(output_logfile)
|
|
110
|
-
# TODO: create temp dir on remote via 'mktemp -d', then call
|
|
111
|
-
# self.conn.add_remote_forward(...) with socket path inside that tmpdir
|
|
112
|
-
|
|
113
|
-
# TODO: run tests by passing stdout/stderr via pre-opened fd so we don't handle it in code
|
|
114
|
-
|
|
115
|
-
# TODO: read unix socket as nonblocking, check test subprocess.Popen proc status every 0.1sec
|
|
17
|
+
# lazily import submodules
|
|
18
|
+
def __getattr__(attr):
|
|
19
|
+
# importing a module known to exist
|
|
20
|
+
if attr in __all__:
|
|
21
|
+
return _importlib.import_module(f".{attr}", __name__)
|
|
22
|
+
else:
|
|
23
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{attr}'")
|