atex 0.10__py3-none-any.whl → 0.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atex/aggregator/__init__.py +8 -6
- atex/aggregator/json.py +234 -51
- atex/cli/__init__.py +3 -0
- atex/cli/fmf.py +7 -7
- atex/cli/testingfarm.py +95 -45
- atex/executor/__init__.py +23 -2
- atex/executor/executor.py +26 -21
- atex/executor/reporter.py +3 -4
- atex/executor/scripts.py +14 -14
- atex/executor/testcontrol.py +32 -27
- atex/orchestrator/adhoc.py +116 -83
- atex/orchestrator/contest.py +116 -0
- atex/provisioner/__init__.py +0 -16
- atex/provisioner/libvirt/libvirt.py +13 -1
- atex/provisioner/testingfarm/api.py +57 -10
- atex/provisioner/testingfarm/testingfarm.py +25 -21
- atex/util/log.py +1 -1
- atex/util/subprocess.py +6 -6
- {atex-0.10.dist-info → atex-0.12.dist-info}/METADATA +1 -1
- {atex-0.10.dist-info → atex-0.12.dist-info}/RECORD +23 -22
- {atex-0.10.dist-info → atex-0.12.dist-info}/WHEEL +1 -1
- {atex-0.10.dist-info → atex-0.12.dist-info}/entry_points.txt +0 -0
- {atex-0.10.dist-info → atex-0.12.dist-info}/licenses/COPYING.txt +0 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import collections
|
|
2
|
+
|
|
3
|
+
from .. import util
|
|
4
|
+
from .adhoc import AdHocOrchestrator
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# copy/pasted from the Contest repo, lib/virt.py
|
|
8
|
+
def calculate_guest_tag(tags):
|
|
9
|
+
if "snapshottable" not in tags:
|
|
10
|
+
return None
|
|
11
|
+
name = "default"
|
|
12
|
+
if "with-gui" in tags:
|
|
13
|
+
name += "_gui"
|
|
14
|
+
if "uefi" in tags:
|
|
15
|
+
name += "_uefi"
|
|
16
|
+
if "fips" in tags:
|
|
17
|
+
name += "_fips"
|
|
18
|
+
return name
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ContestOrchestrator(AdHocOrchestrator):
|
|
22
|
+
"""
|
|
23
|
+
Orchestrator for the Contest test suite:
|
|
24
|
+
https://github.com/RHSecurityCompliance/contest
|
|
25
|
+
|
|
26
|
+
Includes SCAP content upload via rsync and other Contest-specific
|
|
27
|
+
optimizations (around VM snapshots and scheduling).
|
|
28
|
+
"""
|
|
29
|
+
content_dir_on_remote = "/root/upstream-content"
|
|
30
|
+
|
|
31
|
+
def __init__(self, *args, content_dir, max_reruns=1, **kwargs):
|
|
32
|
+
"""
|
|
33
|
+
'content_dir' is a filesystem path to ComplianceAsCode/content local
|
|
34
|
+
directory, to be uploaded to the tested systems.
|
|
35
|
+
|
|
36
|
+
'max_reruns' is an integer of how many times to re-try running a failed
|
|
37
|
+
test (which exited with non-0 or caused an Executor exception).
|
|
38
|
+
"""
|
|
39
|
+
super().__init__(*args, **kwargs)
|
|
40
|
+
self.content_dir = content_dir
|
|
41
|
+
# indexed by test name, value being integer of how many times
|
|
42
|
+
self.reruns = collections.defaultdict(lambda: max_reruns)
|
|
43
|
+
|
|
44
|
+
def run_setup(self, sinfo):
|
|
45
|
+
super().run_setup(sinfo)
|
|
46
|
+
# upload pre-built content
|
|
47
|
+
sinfo.remote.rsync(
|
|
48
|
+
"-r", "--delete", "--exclude=.git/",
|
|
49
|
+
f"{self.content_dir}/",
|
|
50
|
+
f"remote:{self.content_dir_on_remote}",
|
|
51
|
+
func=util.subprocess_log,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
@classmethod
|
|
55
|
+
def next_test(cls, to_run, all_tests, previous):
|
|
56
|
+
# fresh remote, prefer running destructive tests (which likely need
|
|
57
|
+
# clean OS) to get them out of the way and prevent them from running
|
|
58
|
+
# on a tainted OS later
|
|
59
|
+
if type(previous) is AdHocOrchestrator.SetupInfo:
|
|
60
|
+
for next_name in to_run:
|
|
61
|
+
next_tags = all_tests[next_name].get("tag", ())
|
|
62
|
+
util.debug(f"considering next_test for destructivity: {next_name}")
|
|
63
|
+
if "destructive" in next_tags:
|
|
64
|
+
util.debug(f"chosen next_test: {next_name}")
|
|
65
|
+
return next_name
|
|
66
|
+
|
|
67
|
+
# previous test was run and finished non-destructively,
|
|
68
|
+
# try to find a next test with the same Contest lib.virt guest tags
|
|
69
|
+
# as the previous one, allowing snapshot reuse by Contest
|
|
70
|
+
elif type(previous) is AdHocOrchestrator.FinishedInfo:
|
|
71
|
+
finished_tags = all_tests[previous.test_name].get("tag", ())
|
|
72
|
+
util.debug(f"previous finished test on {previous.remote}: {previous.test_name}")
|
|
73
|
+
# if Guest tag is None, don't bother searching
|
|
74
|
+
if finished_guest_tag := calculate_guest_tag(finished_tags):
|
|
75
|
+
for next_name in to_run:
|
|
76
|
+
util.debug(f"considering next_test with tags {finished_tags}: {next_name}")
|
|
77
|
+
next_tags = all_tests[next_name].get("tag", ())
|
|
78
|
+
next_guest_tag = calculate_guest_tag(next_tags)
|
|
79
|
+
if next_guest_tag and finished_guest_tag == next_guest_tag:
|
|
80
|
+
util.debug(f"chosen next_test: {next_name}")
|
|
81
|
+
return next_name
|
|
82
|
+
|
|
83
|
+
# fallback to the default next_test()
|
|
84
|
+
return super().next_test(to_run, all_tests, previous)
|
|
85
|
+
|
|
86
|
+
@staticmethod
|
|
87
|
+
def destructive(info, test_data):
|
|
88
|
+
# if Executor ended with an exception (ie. duration exceeded),
|
|
89
|
+
# consider the test destructive
|
|
90
|
+
if info.exception:
|
|
91
|
+
return True
|
|
92
|
+
|
|
93
|
+
# if the test returned non-0 exit code, it could have thrown
|
|
94
|
+
# a python exception of its own, or (if bash) aborted abruptly
|
|
95
|
+
# due to 'set -e', don't trust the remote, consider it destroyed
|
|
96
|
+
# (0 = pass, 2 = fail, anything else = bad)
|
|
97
|
+
if info.exit_code not in [0,2]:
|
|
98
|
+
return True
|
|
99
|
+
|
|
100
|
+
# if the test was destructive, assume the remote is destroyed
|
|
101
|
+
tags = test_data.get("tag", ())
|
|
102
|
+
if "destructive" in tags:
|
|
103
|
+
return True
|
|
104
|
+
|
|
105
|
+
return False
|
|
106
|
+
|
|
107
|
+
def should_be_rerun(self, info, test_data): # noqa: ARG004, ARG002
|
|
108
|
+
remote_with_test = f"{info.remote}: '{info.test_name}'"
|
|
109
|
+
|
|
110
|
+
reruns_left = self.reruns[info.test_name]
|
|
111
|
+
util.info(f"{remote_with_test}: {reruns_left} reruns left")
|
|
112
|
+
if reruns_left > 0:
|
|
113
|
+
self.reruns[info.test_name] -= 1
|
|
114
|
+
return True
|
|
115
|
+
else:
|
|
116
|
+
return False
|
atex/provisioner/__init__.py
CHANGED
|
@@ -56,10 +56,6 @@ class Provisioner:
|
|
|
56
56
|
that .get_remote() will ever return a Remote. Ie. the caller can call
|
|
57
57
|
.provision(count=math.inf) to receive as many remotes as the Provisioner
|
|
58
58
|
can possibly supply.
|
|
59
|
-
|
|
60
|
-
TODO: remove .defer_stop() (or stop_defer) and mention this below:
|
|
61
|
-
Note that .stop() or .defer_stop() may be called from a different
|
|
62
|
-
thread, asynchronously to any other functions.
|
|
63
59
|
"""
|
|
64
60
|
|
|
65
61
|
def provision(self, count=1):
|
|
@@ -93,18 +89,6 @@ class Provisioner:
|
|
|
93
89
|
"""
|
|
94
90
|
raise NotImplementedError(f"'stop' not implemented for {self.__class__.__name__}")
|
|
95
91
|
|
|
96
|
-
def stop_defer(self):
|
|
97
|
-
"""
|
|
98
|
-
Enable an external caller to stop the Provisioner instance,
|
|
99
|
-
deferring resource deallocation to the caller.
|
|
100
|
-
|
|
101
|
-
Return an iterable of argument-free thread-safe callables that can be
|
|
102
|
-
called, possibly in parallel, to free up resources.
|
|
103
|
-
Ie. a list of 200 .release() functions, to be called in a thread pool
|
|
104
|
-
by the user, speeding up cleanup.
|
|
105
|
-
"""
|
|
106
|
-
return (self.stop,)
|
|
107
|
-
|
|
108
92
|
def __enter__(self):
|
|
109
93
|
try:
|
|
110
94
|
self.start()
|
|
@@ -260,7 +260,19 @@ class LibvirtCloningProvisioner(Provisioner):
|
|
|
260
260
|
# by libvirt natively (because treating nvram as a storage pool
|
|
261
261
|
# is a user hack)
|
|
262
262
|
for p in conn.listAllStoragePools():
|
|
263
|
-
|
|
263
|
+
# retry a few times to work around a libvirt race condition
|
|
264
|
+
for _ in range(10):
|
|
265
|
+
try:
|
|
266
|
+
p.refresh()
|
|
267
|
+
except libvirt.libvirtError as e:
|
|
268
|
+
if "domain is not running" in str(e):
|
|
269
|
+
break
|
|
270
|
+
elif "has asynchronous jobs running" in str(e):
|
|
271
|
+
continue
|
|
272
|
+
else:
|
|
273
|
+
raise
|
|
274
|
+
else:
|
|
275
|
+
break
|
|
264
276
|
try:
|
|
265
277
|
nvram_vol = conn.storageVolLookupByPath(nvram_path)
|
|
266
278
|
except libvirt.libvirtError as e:
|
|
@@ -2,6 +2,7 @@ import os
|
|
|
2
2
|
import re
|
|
3
3
|
import time
|
|
4
4
|
import tempfile
|
|
5
|
+
import datetime
|
|
5
6
|
import textwrap
|
|
6
7
|
import threading
|
|
7
8
|
import subprocess
|
|
@@ -14,11 +15,11 @@ from ... import util
|
|
|
14
15
|
import json
|
|
15
16
|
import urllib3
|
|
16
17
|
|
|
17
|
-
DEFAULT_API_URL = "https://api.testing-farm.io
|
|
18
|
+
DEFAULT_API_URL = "https://api.testing-farm.io"
|
|
18
19
|
|
|
19
20
|
DEFAULT_RESERVE_TEST = {
|
|
20
21
|
"url": "https://github.com/RHSecurityCompliance/atex-reserve",
|
|
21
|
-
"ref": "
|
|
22
|
+
"ref": "0.12",
|
|
22
23
|
"path": ".",
|
|
23
24
|
"name": "/plans/reserve",
|
|
24
25
|
}
|
|
@@ -34,10 +35,10 @@ _http = urllib3.PoolManager(
|
|
|
34
35
|
maxsize=10,
|
|
35
36
|
block=True,
|
|
36
37
|
retries=urllib3.Retry(
|
|
37
|
-
total=
|
|
38
|
+
total=24,
|
|
38
39
|
# account for API restarts / short outages
|
|
39
|
-
backoff_factor=
|
|
40
|
-
backoff_max=
|
|
40
|
+
backoff_factor=10,
|
|
41
|
+
backoff_max=3600,
|
|
41
42
|
# retry on API server errors too, not just connection issues
|
|
42
43
|
status=10,
|
|
43
44
|
status_forcelist={403,404,408,429,500,502,503,504},
|
|
@@ -85,8 +86,8 @@ class TestingFarmAPI:
|
|
|
85
86
|
self.api_url = url
|
|
86
87
|
self.api_token = token or os.environ.get("TESTING_FARM_API_TOKEN")
|
|
87
88
|
|
|
88
|
-
def _query(self, method, path, *args, headers=None, auth=True, **kwargs):
|
|
89
|
-
url = f"{self.api_url}{path}"
|
|
89
|
+
def _query(self, method, path, *args, headers=None, auth=True, version="v0.1", **kwargs):
|
|
90
|
+
url = f"{self.api_url}/{version}{path}"
|
|
90
91
|
if self.api_token and auth:
|
|
91
92
|
if headers is not None:
|
|
92
93
|
headers["Authorization"] = f"Bearer {self.api_token}"
|
|
@@ -137,7 +138,7 @@ class TestingFarmAPI:
|
|
|
137
138
|
if not self.api_token:
|
|
138
139
|
raise ValueError("composes() requires an auth token to identify ranch")
|
|
139
140
|
ranch = self.whoami()["token"]["ranch"]
|
|
140
|
-
return self._query("GET", f"/composes/{ranch}")
|
|
141
|
+
return self._query("GET", f"/composes/{ranch}", version="v0.2")
|
|
141
142
|
|
|
142
143
|
def search_requests(
|
|
143
144
|
self, *, state, ranch=None,
|
|
@@ -180,6 +181,52 @@ class TestingFarmAPI:
|
|
|
180
181
|
|
|
181
182
|
return self._query("GET", "/requests", fields=fields, auth=mine)
|
|
182
183
|
|
|
184
|
+
def search_requests_paged(self, *args, page=43200, **kwargs):
|
|
185
|
+
"""
|
|
186
|
+
An unofficial wrapper for search_requests() that can search a large
|
|
187
|
+
interval incrementally (in "pages") and yield batches of results.
|
|
188
|
+
|
|
189
|
+
Needs 'created_after', with 'created_before' defaulting to now().
|
|
190
|
+
|
|
191
|
+
'page' specifies the time interval of one page, in seconds.
|
|
192
|
+
|
|
193
|
+
'args' and 'kwargs' are passed to search_requests().
|
|
194
|
+
"""
|
|
195
|
+
assert "created_after" in kwargs, "at least 'created_after' is needed for paging"
|
|
196
|
+
|
|
197
|
+
def from_iso8601(date):
|
|
198
|
+
dt = datetime.datetime.fromisoformat(date)
|
|
199
|
+
# if no TZ is specified, treat it as UTC, not localtime
|
|
200
|
+
if dt.tzinfo is None:
|
|
201
|
+
dt = dt.replace(tzinfo=datetime.UTC)
|
|
202
|
+
# convert to UTC
|
|
203
|
+
else:
|
|
204
|
+
dt = dt.astimezone(datetime.UTC)
|
|
205
|
+
return dt
|
|
206
|
+
|
|
207
|
+
after = from_iso8601(kwargs["created_after"])
|
|
208
|
+
if kwargs.get("created_before"):
|
|
209
|
+
before = from_iso8601(kwargs["created_before"])
|
|
210
|
+
else:
|
|
211
|
+
before = datetime.datetime.now(datetime.UTC)
|
|
212
|
+
|
|
213
|
+
# scale down page size to fit between after/before
|
|
214
|
+
page = min(page, (before - after).total_seconds())
|
|
215
|
+
|
|
216
|
+
start = after
|
|
217
|
+
while start < before:
|
|
218
|
+
end = start + datetime.timedelta(seconds=page)
|
|
219
|
+
# clamp to real 'before'
|
|
220
|
+
end = min(end, before)
|
|
221
|
+
new_kwargs = kwargs | {
|
|
222
|
+
"created_after": start.isoformat(),
|
|
223
|
+
"created_before": end.isoformat(),
|
|
224
|
+
}
|
|
225
|
+
found = self.search_requests(*args, **new_kwargs)
|
|
226
|
+
if found is not None:
|
|
227
|
+
yield from found
|
|
228
|
+
start = end
|
|
229
|
+
|
|
183
230
|
def get_request(self, request_id):
|
|
184
231
|
"""
|
|
185
232
|
'request_id' is the UUID (string) of the request.
|
|
@@ -438,7 +485,6 @@ class Reserve:
|
|
|
438
485
|
'api' is a TestingFarmAPI instance - if unspecified, a sensible default
|
|
439
486
|
will be used.
|
|
440
487
|
"""
|
|
441
|
-
util.info(f"will reserve compose:{compose} on arch:{arch} for {timeout}min")
|
|
442
488
|
spec = {
|
|
443
489
|
"test": {
|
|
444
490
|
"fmf": reserve_test or DEFAULT_RESERVE_TEST,
|
|
@@ -571,7 +617,8 @@ class Reserve:
|
|
|
571
617
|
# installs our ssh pubkey into authorized_keys)
|
|
572
618
|
ssh_attempt_cmd = (
|
|
573
619
|
"ssh", "-q", "-i", ssh_key.absolute(), "-oConnectionAttempts=60",
|
|
574
|
-
|
|
620
|
+
"-oStrictHostKeyChecking=no", "-oUserKnownHostsFile=/dev/null",
|
|
621
|
+
"-oBatchMode=yes",
|
|
575
622
|
f"{ssh_user}@{ssh_host}", "exit 123",
|
|
576
623
|
)
|
|
577
624
|
while True:
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import time
|
|
2
2
|
import tempfile
|
|
3
3
|
import threading
|
|
4
|
+
import concurrent.futures
|
|
4
5
|
|
|
5
6
|
from ... import connection, util
|
|
6
7
|
from .. import Provisioner, Remote
|
|
@@ -49,7 +50,12 @@ class TestingFarmRemote(Remote, connection.ssh.ManagedSSHConnection):
|
|
|
49
50
|
|
|
50
51
|
|
|
51
52
|
class TestingFarmProvisioner(Provisioner):
|
|
53
|
+
# maximum number of TF requests the user can .provision(),
|
|
54
|
+
# as a last safety measure against Orchestrator(remotes=math.inf)
|
|
52
55
|
absolute_max_remotes = 100
|
|
56
|
+
# number of parallel threads running HTTP DELETE calls to cancel
|
|
57
|
+
# TF requests on .stop() or Context Manager exit
|
|
58
|
+
stop_release_workers = 10
|
|
53
59
|
|
|
54
60
|
def __init__(self, compose, arch="x86_64", *, max_retries=10, **reserve_kwargs):
|
|
55
61
|
"""
|
|
@@ -129,6 +135,7 @@ class TestingFarmProvisioner(Provisioner):
|
|
|
129
135
|
# instantiate a class Reserve from the Testing Farm api module
|
|
130
136
|
# (which typically provides context manager, but we use its .reserve()
|
|
131
137
|
# and .release() functions directly)
|
|
138
|
+
util.info(f"{repr(self)}: reserving new remote")
|
|
132
139
|
tf_reserve = api.Reserve(
|
|
133
140
|
compose=self.compose,
|
|
134
141
|
arch=self.arch,
|
|
@@ -154,29 +161,25 @@ class TestingFarmProvisioner(Provisioner):
|
|
|
154
161
|
self.ssh_key, self.ssh_pubkey = util.ssh_keygen(self._tmpdir.name)
|
|
155
162
|
|
|
156
163
|
def stop(self):
|
|
157
|
-
|
|
158
|
-
# abort reservations in progress
|
|
159
|
-
while self.reserving:
|
|
160
|
-
# testingfarm api.Reserve instances
|
|
161
|
-
self.reserving.pop().release()
|
|
162
|
-
# cancel/release all Remotes ever created by us
|
|
163
|
-
while self.remotes:
|
|
164
|
-
# TestingFarmRemote instances
|
|
165
|
-
self.remotes.pop().release()
|
|
166
|
-
# explicitly remove the tmpdir rather than relying on destructor
|
|
167
|
-
self._tmpdir.cleanup()
|
|
168
|
-
self._tmpdir = None
|
|
164
|
+
release_funcs = []
|
|
169
165
|
|
|
170
|
-
def stop_defer(self):
|
|
171
|
-
callables = []
|
|
172
166
|
with self.lock:
|
|
173
|
-
|
|
167
|
+
release_funcs += (f.release for f in self.reserving)
|
|
174
168
|
self.reserving = []
|
|
175
|
-
|
|
176
|
-
self.remotes = [] # just in case
|
|
177
|
-
|
|
169
|
+
release_funcs += (r.release for r in self.remotes)
|
|
170
|
+
self.remotes = [] # just in case of a later .start()
|
|
171
|
+
|
|
172
|
+
# parallelize at most stop_release_workers TF API release (DELETE) calls
|
|
173
|
+
if release_funcs:
|
|
174
|
+
workers = min(len(release_funcs), self.stop_release_workers)
|
|
175
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as ex:
|
|
176
|
+
for func in release_funcs:
|
|
177
|
+
ex.submit(func)
|
|
178
|
+
|
|
179
|
+
with self.lock:
|
|
180
|
+
# explicitly remove the tmpdir rather than relying on destructor
|
|
181
|
+
self._tmpdir.cleanup()
|
|
178
182
|
self._tmpdir = None
|
|
179
|
-
return callables
|
|
180
183
|
|
|
181
184
|
def provision(self, count=1):
|
|
182
185
|
with self.lock:
|
|
@@ -198,10 +201,11 @@ class TestingFarmProvisioner(Provisioner):
|
|
|
198
201
|
# always non-blocking
|
|
199
202
|
return None
|
|
200
203
|
except (api.TestingFarmError, connection.ssh.SSHError) as e:
|
|
204
|
+
exc_str = f"{type(e).__name__}({e})"
|
|
201
205
|
with self.lock:
|
|
202
206
|
if self.retries > 0:
|
|
203
207
|
util.warning(
|
|
204
|
-
f"caught while reserving a TF system: {
|
|
208
|
+
f"caught while reserving a TF system: {exc_str}, "
|
|
205
209
|
f"retrying ({self.retries} left)",
|
|
206
210
|
)
|
|
207
211
|
self.retries -= 1
|
|
@@ -212,7 +216,7 @@ class TestingFarmProvisioner(Provisioner):
|
|
|
212
216
|
return None
|
|
213
217
|
else:
|
|
214
218
|
util.warning(
|
|
215
|
-
f"caught while reserving a TF system: {
|
|
219
|
+
f"caught while reserving a TF system: {exc_str}, "
|
|
216
220
|
"exhausted all retries, giving up",
|
|
217
221
|
)
|
|
218
222
|
raise
|
atex/util/log.py
CHANGED
|
@@ -30,7 +30,7 @@ skip_levels = {
|
|
|
30
30
|
|
|
31
31
|
def _log_msg(logger_func, *args, stacklevel=1, **kwargs):
|
|
32
32
|
# inspect.stack() is MUCH slower
|
|
33
|
-
caller = inspect.currentframe().f_back.f_back
|
|
33
|
+
caller = inspect.currentframe().f_back.f_back # TODO: sys._getframe(2)
|
|
34
34
|
extra_levels = 2 # skip this func and the debug/info/warning parent
|
|
35
35
|
while caller.f_back:
|
|
36
36
|
code = caller.f_code
|
atex/util/subprocess.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import subprocess
|
|
2
2
|
|
|
3
|
-
from .log import
|
|
3
|
+
from .log import extradebug
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
def subprocess_run(cmd, **kwargs):
|
|
@@ -9,7 +9,7 @@ def subprocess_run(cmd, **kwargs):
|
|
|
9
9
|
"""
|
|
10
10
|
# when logging, skip current stack frame - report the place we were called
|
|
11
11
|
# from, not util.subprocess_run itself
|
|
12
|
-
|
|
12
|
+
extradebug(f"running: '{cmd}' with {kwargs=}")
|
|
13
13
|
return subprocess.run(cmd, **kwargs)
|
|
14
14
|
|
|
15
15
|
|
|
@@ -17,7 +17,7 @@ def subprocess_output(cmd, *, check=True, text=True, **kwargs):
|
|
|
17
17
|
"""
|
|
18
18
|
A wrapper simulating subprocess.check_output() via a modern .run() API.
|
|
19
19
|
"""
|
|
20
|
-
|
|
20
|
+
extradebug(f"running: '{cmd}' with {check=}, {text=} and {kwargs=}")
|
|
21
21
|
proc = subprocess.run(cmd, check=check, text=text, stdout=subprocess.PIPE, **kwargs)
|
|
22
22
|
return proc.stdout.rstrip("\n") if text else proc.stdout
|
|
23
23
|
|
|
@@ -26,7 +26,7 @@ def subprocess_Popen(cmd, **kwargs): # noqa: N802
|
|
|
26
26
|
"""
|
|
27
27
|
A simple wrapper for the real subprocess.Popen() that logs the command used.
|
|
28
28
|
"""
|
|
29
|
-
|
|
29
|
+
extradebug(f"running: '{cmd}' with {kwargs=}")
|
|
30
30
|
return subprocess.Popen(cmd, **kwargs)
|
|
31
31
|
|
|
32
32
|
|
|
@@ -56,7 +56,7 @@ def subprocess_stream(cmd, *, stream="stdout", check=False, input=None, **kwargs
|
|
|
56
56
|
all_kwargs["stdin"] = subprocess.PIPE
|
|
57
57
|
all_kwargs |= kwargs
|
|
58
58
|
|
|
59
|
-
|
|
59
|
+
extradebug(f"running: '{cmd}' with {all_kwargs=}")
|
|
60
60
|
proc = subprocess.Popen(cmd, **all_kwargs)
|
|
61
61
|
|
|
62
62
|
def generate_lines():
|
|
@@ -80,7 +80,7 @@ def subprocess_log(cmd, **kwargs):
|
|
|
80
80
|
|
|
81
81
|
Uses subprocess_stream() to gather the lines.
|
|
82
82
|
"""
|
|
83
|
-
|
|
83
|
+
extradebug(f"running: '{cmd}' with {kwargs=}")
|
|
84
84
|
_, lines = subprocess_stream(cmd, **kwargs)
|
|
85
85
|
for line in lines:
|
|
86
86
|
extradebug(line)
|
|
@@ -1,44 +1,45 @@
|
|
|
1
1
|
atex/__init__.py,sha256=LdX67gprtHYeAkjLhFPKzpc7ECv2rHxUbHKDGbGXO1c,517
|
|
2
2
|
atex/fmf.py,sha256=gkJXIaRO7_KvwJR-V6Tc1NVn4a9Hq7hoBLQLhxYIdbg,8834
|
|
3
|
-
atex/aggregator/__init__.py,sha256=
|
|
4
|
-
atex/aggregator/json.py,sha256=
|
|
5
|
-
atex/cli/__init__.py,sha256=
|
|
6
|
-
atex/cli/fmf.py,sha256=
|
|
3
|
+
atex/aggregator/__init__.py,sha256=8mN-glHdzR4icKAUGO4JPodsTrLMdJoeuZsO2CTbhyU,1773
|
|
4
|
+
atex/aggregator/json.py,sha256=tpoUZoZM8EMYhZKwVr4LRtgEIDjRxC11BIKVXZKYPOs,10441
|
|
5
|
+
atex/cli/__init__.py,sha256=Ew2z-gC0jvOmU_DqYgXVQla3p1rTnrz64I63q52aHv4,2899
|
|
6
|
+
atex/cli/fmf.py,sha256=pvj_OIp6XT_nVUwziL7-v_HNbyAtuUmb7k_Ey_KkFJc,3616
|
|
7
7
|
atex/cli/libvirt.py,sha256=6tt5ANb8XBBRXOQsYPTWILThKqf-gvt5AZh5Dctg2PA,3782
|
|
8
|
-
atex/cli/testingfarm.py,sha256=
|
|
8
|
+
atex/cli/testingfarm.py,sha256=MXfcKnzbmr71LrTpJcIghC6SfB4EBkKXh3Yy5SZlUUc,10744
|
|
9
9
|
atex/connection/__init__.py,sha256=dj8ZBcEspom7Z_UjecfLGBRNvLZ3dyGR9q19i_B4xpY,3880
|
|
10
10
|
atex/connection/podman.py,sha256=1T56gh1TgbcQWpTIJHL4NaxZOI6aMg7Xp7sn6PQQyBk,1911
|
|
11
11
|
atex/connection/ssh.py,sha256=9A57b9YR_HI-kIu06Asic1y__JPVXEheDZxjbG2Qcsc,13460
|
|
12
|
-
atex/executor/__init__.py,sha256=
|
|
12
|
+
atex/executor/__init__.py,sha256=nmYJCbC36fRGGkjoniFJmsq-sqFw8YS2ndf4q_loVM0,471
|
|
13
13
|
atex/executor/duration.py,sha256=x06sItKOZi6XA8KszQwZGpIb1Z_L-HWqIwZKo2SDo0s,1759
|
|
14
|
-
atex/executor/executor.py,sha256=
|
|
15
|
-
atex/executor/reporter.py,sha256=
|
|
16
|
-
atex/executor/scripts.py,sha256=
|
|
17
|
-
atex/executor/testcontrol.py,sha256=
|
|
14
|
+
atex/executor/executor.py,sha256=WJXPWQo6VQhZgXORVVyvTDAdOQbbZz26E7FpwizbGIk,16126
|
|
15
|
+
atex/executor/reporter.py,sha256=QbzBkaXuhI6lsTYrTlp7O5W9d6etR0KjDdH-J59cXWM,3357
|
|
16
|
+
atex/executor/scripts.py,sha256=1u5ZEGJ7nIvkqbRK3uVusOkineVM8DXo4kAlH2MdQbg,5877
|
|
17
|
+
atex/executor/testcontrol.py,sha256=iju_Cl32D8NHH1ePN1lykR1noP8-0eBDLQ5-V_9DqF0,12834
|
|
18
18
|
atex/orchestrator/__init__.py,sha256=8Q1YknyibilXLjWRYkHm_Mr2HMm0SRw8Zv39KypeASM,2059
|
|
19
|
-
atex/orchestrator/adhoc.py,sha256=
|
|
20
|
-
atex/
|
|
19
|
+
atex/orchestrator/adhoc.py,sha256=VUwHX71Vb6eRLzW3Z3KDZdck7p0PiwzAZrOuUKMkwtM,19667
|
|
20
|
+
atex/orchestrator/contest.py,sha256=SuxT9uZtcs_DEsA3hHyKgrIWNrDeqCCWd3-hy3sHytY,4572
|
|
21
|
+
atex/provisioner/__init__.py,sha256=6hZxQlvTQ0yWWqCRCPqWMoYuim5wDMCcDIYHF-nIfMs,4013
|
|
21
22
|
atex/provisioner/libvirt/VM_PROVISION,sha256=7pkZ-ozgTyK4qNGC-E-HUznr4IhbosWSASbB72Gknl8,2664
|
|
22
23
|
atex/provisioner/libvirt/__init__.py,sha256=pKG5IpZSC2IHs5wL2ecQx_fd9AzAXEbZmDzA7RyZsfM,119
|
|
23
|
-
atex/provisioner/libvirt/libvirt.py,sha256=
|
|
24
|
+
atex/provisioner/libvirt/libvirt.py,sha256=ZKctK2B51olvWvLxz2pZ2s6LtX_7EJ43LvlyJHnI1Ho,18955
|
|
24
25
|
atex/provisioner/libvirt/locking.py,sha256=AXtDyidZNmUoMmrit26g9iTHDqInrzL_RSQEoc_EAXw,5669
|
|
25
26
|
atex/provisioner/libvirt/setup-libvirt.sh,sha256=oCMy9SCnbC_QuAzO2sFwvB5ui1kMQ6uviHsgdXyoFXc,2428
|
|
26
27
|
atex/provisioner/podman/__init__.py,sha256=dM0JzQXWX7edtWSc0KH0cMFXAjArFn2Vme4j_ZMsdYA,138
|
|
27
28
|
atex/provisioner/podman/podman.py,sha256=ztRypoakSf-jF04iER58tEMUZ4Y6AuzIpNpFXp44bB4,4997
|
|
28
29
|
atex/provisioner/testingfarm/__init__.py,sha256=kZncgLGdRCR4FMaRQr2GTwJ8vjlA-24ri8JO2ueZJuw,113
|
|
29
|
-
atex/provisioner/testingfarm/api.py,sha256=
|
|
30
|
-
atex/provisioner/testingfarm/testingfarm.py,sha256=
|
|
30
|
+
atex/provisioner/testingfarm/api.py,sha256=K3s87rWNqZ6q1AMkjYaNTNqIOZR-Bl1JonbKx67O9pQ,23549
|
|
31
|
+
atex/provisioner/testingfarm/testingfarm.py,sha256=yvQzWat92B4UnJNZzCLI8mpAKf_QvHUKyKbjlk5123Q,8573
|
|
31
32
|
atex/util/__init__.py,sha256=cWHFbtQ4mDlKe6lXyPDWRmWJOTcHDGfVuW_-GYa8hB0,1473
|
|
32
33
|
atex/util/dedent.py,sha256=SEuJMtLzqz3dQ7g7qyZzEJ9VYynVlk52tQCJY-FveXo,603
|
|
33
34
|
atex/util/libvirt.py,sha256=kDZmT6xLYEZkQNLZY98gJ2M48DDWXxHF8rQY9PnjB3U,660
|
|
34
|
-
atex/util/log.py,sha256=
|
|
35
|
+
atex/util/log.py,sha256=GfdbLtpRkQoIkRU7AqWDWbJV7yZIpS4MsXhUomZqWjQ,2256
|
|
35
36
|
atex/util/named_mapping.py,sha256=UBMe9TetjV-DGPhjYjJ42YtC40FVPKAAEROXl9MA5fo,4700
|
|
36
37
|
atex/util/path.py,sha256=x-kXqiWCVodfZWbEwtC5A8LFvutpDIPYv2m0boZSlXU,504
|
|
37
38
|
atex/util/ssh_keygen.py,sha256=9yuSl2yBV7pG3Qfsf9tossVC00nbIUrAeLdbwTykpjk,384
|
|
38
|
-
atex/util/subprocess.py,sha256=
|
|
39
|
+
atex/util/subprocess.py,sha256=_oQN8CNgGoH9GAR6nZlpujYe2HjXFBcCuIkLPw-IxJ4,2971
|
|
39
40
|
atex/util/threads.py,sha256=c8hsEc-8SqJGodInorv_6JxpiHiSkGFGob4qbMmOD2M,3531
|
|
40
|
-
atex-0.
|
|
41
|
-
atex-0.
|
|
42
|
-
atex-0.
|
|
43
|
-
atex-0.
|
|
44
|
-
atex-0.
|
|
41
|
+
atex-0.12.dist-info/METADATA,sha256=v2DW_JNAKGa7yF6IVOpICXMant1I2qQNlj69Vxcv3rs,3050
|
|
42
|
+
atex-0.12.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
43
|
+
atex-0.12.dist-info/entry_points.txt,sha256=pLqJdcfeyQTgup2h6dWb6SvkHhtOl-W5Eg9zV8moK0o,39
|
|
44
|
+
atex-0.12.dist-info/licenses/COPYING.txt,sha256=oEuj51jdmbXcCUy7pZ-KE0BNcJTR1okudRp5zQ0yWnU,670
|
|
45
|
+
atex-0.12.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|