atex 0.5__py3-none-any.whl → 0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atex/__init__.py +2 -12
- atex/cli/__init__.py +13 -13
- atex/cli/minitmt.py +128 -35
- atex/cli/testingfarm.py +59 -59
- atex/connection/__init__.py +125 -0
- atex/connection/ssh.py +406 -0
- atex/minitmt/__init__.py +17 -109
- atex/minitmt/executor.py +348 -0
- atex/minitmt/fmf.py +87 -53
- atex/minitmt/scripts.py +143 -45
- atex/minitmt/testcontrol.py +354 -0
- atex/{orchestrator.py → orchestrator/__init__.py} +22 -1
- atex/orchestrator/aggregator.py +163 -0
- atex/provision/__init__.py +77 -35
- atex/provision/libvirt/VM_PROVISION +8 -0
- atex/provision/libvirt/__init__.py +4 -4
- atex/provision/nspawn/README +74 -0
- atex/provision/podman/README +59 -0
- atex/provision/podman/host_container.sh +74 -0
- atex/provision/testingfarm/__init__.py +29 -0
- atex/{testingfarm.py → provision/testingfarm/api.py} +116 -93
- atex/provision/testingfarm/foo.py +1 -0
- atex/util/__init__.py +4 -4
- atex/util/dedent.py +1 -1
- atex/util/log.py +12 -12
- atex/util/subprocess.py +14 -13
- {atex-0.5.dist-info → atex-0.7.dist-info}/METADATA +1 -1
- atex-0.7.dist-info/RECORD +32 -0
- atex/minitmt/report.py +0 -174
- atex/minitmt/testme.py +0 -3
- atex/ssh.py +0 -320
- atex/util/lockable_class.py +0 -38
- atex-0.5.dist-info/RECORD +0 -26
- {atex-0.5.dist-info → atex-0.7.dist-info}/WHEEL +0 -0
- {atex-0.5.dist-info → atex-0.7.dist-info}/entry_points.txt +0 -0
- {atex-0.5.dist-info → atex-0.7.dist-info}/licenses/COPYING.txt +0 -0
atex/minitmt/scripts.py
CHANGED
|
@@ -1,51 +1,149 @@
|
|
|
1
|
-
|
|
1
|
+
from pathlib import Path
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
#
|
|
8
|
-
#
|
|
9
|
-
#
|
|
10
|
-
#
|
|
11
|
-
# # wait for result reporting unix socket to be created by sshd
|
|
12
|
-
# socket=$tmpdir/results.sock
|
|
13
|
-
# while [[ ! -e $socket ]]; do sleep 0.1; done
|
|
14
|
-
# echo "socket=$socket"
|
|
15
|
-
#
|
|
16
|
-
# # tell the controller to start logging test output
|
|
17
|
-
# echo ---
|
|
18
|
-
#
|
|
19
|
-
# # install test dependencies
|
|
20
|
-
# rpms=( {' '.join(requires)} )
|
|
21
|
-
# to_install=()
|
|
22
|
-
# for rpm in "${{rpms[@]}}"; do
|
|
23
|
-
# rpm -q --quiet "$rpm" || to_install+=("$rpm")
|
|
24
|
-
# done
|
|
25
|
-
# dnf -y --setopt=install_weak_deps=False install "${{to_install[@]}}"
|
|
26
|
-
#
|
|
27
|
-
# # run the test
|
|
28
|
-
# ...
|
|
29
|
-
# rc=$?
|
|
3
|
+
from .. import util
|
|
4
|
+
|
|
5
|
+
from . import fmf
|
|
6
|
+
|
|
7
|
+
# NOTE that we split test execution into 3 scripts:
|
|
8
|
+
# - "setup script" (package installs, etc.)
|
|
9
|
+
# - "wrapper script" (runs test script)
|
|
10
|
+
# - "test script" (exact contents of the 'test:' FMF metadata key)
|
|
30
11
|
#
|
|
31
|
-
#
|
|
32
|
-
#
|
|
12
|
+
# this is to allow interactive test execution - the setup script
|
|
13
|
+
# can run in 'bash' via stdin pipe into 'ssh', creating the wrapper
|
|
14
|
+
# script somewhere on the disk, making it executable,
|
|
33
15
|
#
|
|
34
|
-
#
|
|
35
|
-
#''
|
|
16
|
+
# then the "wrapper" script can run via a separate 'ssh' execution,
|
|
17
|
+
# passed by an argument to 'ssh', leaving stdin/out/err untouched,
|
|
18
|
+
# allowing the user to interact with it (if run interactively)
|
|
36
19
|
|
|
37
|
-
# TODO: have another version of ^^^^ for re-execution of test after a reboot
|
|
38
|
-
# or disconnect that sets tmpdir= from us (reusing on-disk test CWD)
|
|
39
|
-
# rather than creating a new one
|
|
40
|
-
# - the second script needs to rm -f the unix socket before echoing
|
|
41
|
-
# something back to let us re-create it via a new ssh channel open
|
|
42
|
-
# because StreamLocalBindUnlink doesn't seem to work
|
|
43
20
|
|
|
21
|
+
def test_wrapper(*, test, tests_dir, test_exec, debug=False):
|
|
22
|
+
"""
|
|
23
|
+
Generate a bash script that runs a user-specified test, preparing
|
|
24
|
+
a test control channel for it, and reporting its exit code.
|
|
25
|
+
The script must be as "transparent" as possible, since any output
|
|
26
|
+
is considered as test output and any unintended environment changes
|
|
27
|
+
will impact the test itself.
|
|
44
28
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
29
|
+
'test' is a atex.minitmt.fmf.FMFTest instance.
|
|
30
|
+
|
|
31
|
+
'test_dir' is a remote directory (repository) of all the tests,
|
|
32
|
+
a.k.a. FMF metadata root.
|
|
33
|
+
|
|
34
|
+
'test_exec' is a remote path to the actual test to run.
|
|
35
|
+
|
|
36
|
+
'debug' specifies whether to include wrapper output inside test output.
|
|
37
|
+
"""
|
|
38
|
+
out = "#!/bin/bash\n"
|
|
39
|
+
|
|
40
|
+
# stdout-over-ssh is used as Test Control (see TEST_CONTROL.md),
|
|
41
|
+
# so duplicate stderr to stdout, and then open a new fd pointing to the
|
|
42
|
+
# original stdout
|
|
43
|
+
out += "exec {orig_stdout}>&1 1>&2\n"
|
|
44
|
+
|
|
45
|
+
# TODO: if interactive, keep original stdin, else exec 0</dev/null ,
|
|
46
|
+
# doing it here avoids unnecessary traffic (reading stdin) via ssh,
|
|
47
|
+
# even if it is fed from subprocess.DEVNULL on the runner
|
|
48
|
+
|
|
49
|
+
if debug:
|
|
50
|
+
out += "set -x\n"
|
|
51
|
+
|
|
52
|
+
# use a subshell to limit the scope of the CWD change
|
|
53
|
+
out += "(\n"
|
|
54
|
+
|
|
55
|
+
# if TMT_PLAN_ENVIRONMENT_FILE exists, export everything from it
|
|
56
|
+
# (limited by the subshell, so it doesn't leak)
|
|
57
|
+
out += util.dedent("""
|
|
58
|
+
if [[ -f $TMT_PLAN_ENVIRONMENT_FILE ]]; then
|
|
59
|
+
set -o allexport
|
|
60
|
+
. "$TMT_PLAN_ENVIRONMENT_FILE"
|
|
61
|
+
set +o allexport
|
|
62
|
+
fi
|
|
63
|
+
""") + "\n"
|
|
64
|
+
|
|
65
|
+
# TODO: custom PATH with tmt-* style commands?
|
|
66
|
+
|
|
67
|
+
# join the directory with all tests and nested path of our test inside it
|
|
68
|
+
test_cwd = Path(tests_dir) / test.dir
|
|
69
|
+
out += f"cd '{test_cwd}' || exit 1\n"
|
|
70
|
+
|
|
71
|
+
# run the test script
|
|
72
|
+
# - the '-e -o pipefail' is to mimic what full fat tmt uses
|
|
73
|
+
out += (
|
|
74
|
+
"ATEX_TEST_CONTROL=$orig_stdout"
|
|
75
|
+
f" exec -a 'bash: atex running {test.name}'"
|
|
76
|
+
f" bash -e -o pipefail '{test_exec}'\n"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
# subshell end
|
|
80
|
+
out += ")\n"
|
|
81
|
+
|
|
82
|
+
# write test exitcode to test control stream
|
|
83
|
+
out += "echo exitcode $? >&$orig_stdout\n"
|
|
84
|
+
|
|
85
|
+
# always exit the wrapper with 0 if test execution was normal
|
|
86
|
+
out += "exit 0\n"
|
|
87
|
+
|
|
88
|
+
return out
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def test_setup(*, test, wrapper_exec, test_exec, debug=False, **kwargs):
|
|
92
|
+
"""
|
|
93
|
+
Generate a bash script that should prepare the remote end for test
|
|
94
|
+
execution.
|
|
95
|
+
|
|
96
|
+
The bash script itself will (among other things) generate two more bash
|
|
97
|
+
scripts: a test script (contents of 'test' from FMF) and a wrapper script
|
|
98
|
+
to run the test script.
|
|
99
|
+
|
|
100
|
+
'wrapper_exec' is the remote path where the wrapper script should be put.
|
|
101
|
+
|
|
102
|
+
'test_exec' is the remote path where the test script should be put.
|
|
103
|
+
|
|
104
|
+
'test' is a atex.minitmt.fmf.FMFTest instance.
|
|
105
|
+
|
|
106
|
+
'debug' specifies whether to make the setup script extra verbose.
|
|
107
|
+
|
|
108
|
+
Any 'kwargs' are passed to test_wrapper().
|
|
109
|
+
"""
|
|
110
|
+
out = "#!/bin/bash\n"
|
|
111
|
+
|
|
112
|
+
# have deterministic stdin, avoid leaking parent console
|
|
113
|
+
# also avoid any accidental stdout output, we use it for wrapper path
|
|
114
|
+
if debug:
|
|
115
|
+
out += "exec {orig_stdout}>&1 1>&2\n"
|
|
116
|
+
out += "set -xe\n"
|
|
117
|
+
else:
|
|
118
|
+
out += "exec {orig_stdout}>&1 2>/dev/null 1>&2\n"
|
|
119
|
+
out += "set -e\n"
|
|
120
|
+
|
|
121
|
+
# install test dependencies
|
|
122
|
+
# - only strings (package names) in require/recommend are supported
|
|
123
|
+
if require := [x for x in fmf.listlike(test.data, "require") if isinstance(x, str)]:
|
|
124
|
+
out += "dnf -y --setopt=install_weak_deps=False install "
|
|
125
|
+
out += " ".join(f"'{pkg}'" for pkg in require) + "\n"
|
|
126
|
+
if recommend := [x for x in fmf.listlike(test.data, "recommend") if isinstance(x, str)]:
|
|
127
|
+
out += "dnf -y --setopt=install_weak_deps=False install --skip-broken "
|
|
128
|
+
out += " ".join(f"'{pkg}'" for pkg in recommend) + "\n"
|
|
129
|
+
|
|
130
|
+
# make the wrapper script
|
|
131
|
+
out += f"cat > '{wrapper_exec}' <<'ATEX_SETUP_EOF'\n"
|
|
132
|
+
out += test_wrapper(
|
|
133
|
+
test=test,
|
|
134
|
+
test_exec=test_exec,
|
|
135
|
+
debug=debug,
|
|
136
|
+
**kwargs,
|
|
137
|
+
)
|
|
138
|
+
out += "ATEX_SETUP_EOF\n"
|
|
139
|
+
# make the test script
|
|
140
|
+
out += f"cat > '{test_exec}' <<'ATEX_SETUP_EOF'\n"
|
|
141
|
+
out += test.data["test"]
|
|
142
|
+
out += "\n" # for safety, in case 'test' doesn't have a newline
|
|
143
|
+
out += "ATEX_SETUP_EOF\n"
|
|
144
|
+
# make both executable
|
|
145
|
+
out += f"chmod 0755 '{wrapper_exec}' '{test_exec}'\n"
|
|
146
|
+
|
|
147
|
+
out += "exit 0\n"
|
|
148
|
+
|
|
149
|
+
return out
|
|
@@ -0,0 +1,354 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import collections
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
from .. import util
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class BufferFullError(Exception):
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class NonblockLineReader:
|
|
13
|
+
"""
|
|
14
|
+
Kind of like io.BufferedReader but capable of reading from non-blocking
|
|
15
|
+
sources (both O_NONBLOCK sockets and os.set_blocking(False) descriptors),
|
|
16
|
+
re-assembling full lines from (potentially) multiple read() calls.
|
|
17
|
+
|
|
18
|
+
It also takes a file descriptor (not a file-like object) and takes extra
|
|
19
|
+
care to read one-byte-at-a-time to not read (and buffer) more data from the
|
|
20
|
+
source descriptor, allowing it to be used for in-kernel move, such as via
|
|
21
|
+
os.sendfile() or os.splice().
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(self, src, maxlen=4096):
|
|
25
|
+
"""
|
|
26
|
+
'src' is an opened file descriptor (integer).
|
|
27
|
+
|
|
28
|
+
'maxlen' is a maximum potential line length, incl. the newline
|
|
29
|
+
character - if reached, a BufferFullError is raised.
|
|
30
|
+
"""
|
|
31
|
+
self.src = src
|
|
32
|
+
self.eof = False
|
|
33
|
+
self.buffer = bytearray(maxlen)
|
|
34
|
+
self.bytes_read = 0
|
|
35
|
+
|
|
36
|
+
def readline(self):
|
|
37
|
+
r"""
|
|
38
|
+
Read a line and return it, without the '\n' terminating character,
|
|
39
|
+
clearing the internal buffer upon return.
|
|
40
|
+
|
|
41
|
+
Returns None if nothing could be read (BlockingIOError) or if EOF
|
|
42
|
+
was reached.
|
|
43
|
+
"""
|
|
44
|
+
while self.bytes_read < len(self.buffer):
|
|
45
|
+
try:
|
|
46
|
+
data = os.read(self.src, 1)
|
|
47
|
+
except BlockingIOError:
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
# stream EOF
|
|
51
|
+
if len(data) == 0:
|
|
52
|
+
self.eof = True
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
char = data[0]
|
|
56
|
+
|
|
57
|
+
if char == 0x0a: # \n
|
|
58
|
+
line = self.buffer[0:self.bytes_read]
|
|
59
|
+
self.bytes_read = 0
|
|
60
|
+
return line
|
|
61
|
+
else:
|
|
62
|
+
self.buffer[self.bytes_read] = char
|
|
63
|
+
self.bytes_read += 1
|
|
64
|
+
|
|
65
|
+
raise BufferFullError(f"line buffer reached {len(self.buffer)} bytes")
|
|
66
|
+
|
|
67
|
+
def clear(self):
|
|
68
|
+
"""
|
|
69
|
+
Clear the internal buffer, clearing any partially-read line data.
|
|
70
|
+
"""
|
|
71
|
+
self.bytes_read = 0
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class BadControlError(Exception):
|
|
75
|
+
"""
|
|
76
|
+
Raised by TestControl when abnormalities are detected in the control stream,
|
|
77
|
+
such as invalid syntax, unknown control word, or bad or unexpected data for
|
|
78
|
+
any given control word.
|
|
79
|
+
"""
|
|
80
|
+
pass
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class BadReportJSONError(BadControlError):
|
|
84
|
+
"""
|
|
85
|
+
Raised on a syntactical or semantical error caused by the test not following
|
|
86
|
+
the TEST_CONROL.md specification when passing JSON data to the 'result'
|
|
87
|
+
control word.
|
|
88
|
+
"""
|
|
89
|
+
pass
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class TestControl:
|
|
93
|
+
"""
|
|
94
|
+
An implementation of the protocol described by TEST_CONTROL.md,
|
|
95
|
+
processing test-issued commands, results and uploaded files.
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
def __init__(self, *, control_fd, aggregator, duration, testout_fd):
|
|
99
|
+
"""
|
|
100
|
+
'control_fd' is a non-blocking file descriptor to be read.
|
|
101
|
+
|
|
102
|
+
'aggregator' is an instance of a result aggregator (ie. CSVAggregator)
|
|
103
|
+
all the results and uploaded files will be written to.
|
|
104
|
+
|
|
105
|
+
'duration' is a class Duration instance.
|
|
106
|
+
|
|
107
|
+
'testout_fd' is an optional file descriptor handle which the test uses
|
|
108
|
+
to write its output to - useful here for the 'result' control word and
|
|
109
|
+
its protocol, which allows "hardlinking" the fd to a real file name.
|
|
110
|
+
"""
|
|
111
|
+
self.control_fd = control_fd
|
|
112
|
+
self.stream = NonblockLineReader(control_fd)
|
|
113
|
+
self.aggregator = aggregator
|
|
114
|
+
self.duration = duration
|
|
115
|
+
self.testout_fd = testout_fd
|
|
116
|
+
self.eof = False
|
|
117
|
+
self.in_progress = None
|
|
118
|
+
self.partial_results = collections.defaultdict(dict)
|
|
119
|
+
self.result_seen = False
|
|
120
|
+
self.exit_code = None
|
|
121
|
+
self.reconnect = None
|
|
122
|
+
|
|
123
|
+
def process(self):
|
|
124
|
+
"""
|
|
125
|
+
Read from the control file descriptor and potentially perform any
|
|
126
|
+
appropriate action based on commands read from the test.
|
|
127
|
+
|
|
128
|
+
Returns True if there is more data expected, False otherwise
|
|
129
|
+
(when the control file descriptor reached EOF).
|
|
130
|
+
"""
|
|
131
|
+
# if a parser operation is in progress, continue calling it,
|
|
132
|
+
# avoid reading a control line
|
|
133
|
+
if self.in_progress:
|
|
134
|
+
try:
|
|
135
|
+
next(self.in_progress)
|
|
136
|
+
return
|
|
137
|
+
except StopIteration:
|
|
138
|
+
# parser is done, continue on to a control line
|
|
139
|
+
self.in_progress = None
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
line = self.stream.readline()
|
|
143
|
+
except BufferFullError as e:
|
|
144
|
+
raise BadControlError(str(e)) from None
|
|
145
|
+
|
|
146
|
+
util.debug(f"got control line: {line}")
|
|
147
|
+
|
|
148
|
+
if self.stream.eof:
|
|
149
|
+
self.eof = True
|
|
150
|
+
return
|
|
151
|
+
# partial read or BlockingIOError, try next time
|
|
152
|
+
if line is None:
|
|
153
|
+
return
|
|
154
|
+
elif len(line) == 0:
|
|
155
|
+
raise BadControlError(r"empty control line (just '\n')")
|
|
156
|
+
|
|
157
|
+
line = line.decode()
|
|
158
|
+
word, _, arg = line.partition(" ")
|
|
159
|
+
|
|
160
|
+
if word == "result":
|
|
161
|
+
parser = self._parser_result(arg)
|
|
162
|
+
elif word == "duration":
|
|
163
|
+
parser = self._parser_duration(arg)
|
|
164
|
+
elif word == "exitcode":
|
|
165
|
+
parser = self._parser_exitcode(arg)
|
|
166
|
+
elif word == "reconnect":
|
|
167
|
+
parser = self._parser_reconnect(arg)
|
|
168
|
+
else:
|
|
169
|
+
raise BadControlError(f"unknown control word: {word}")
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
next(parser)
|
|
173
|
+
# parser not done parsing, run it next time we're called
|
|
174
|
+
self.in_progress = parser
|
|
175
|
+
except StopIteration:
|
|
176
|
+
pass
|
|
177
|
+
|
|
178
|
+
@classmethod
|
|
179
|
+
def _merge(cls, dst, src):
|
|
180
|
+
"""
|
|
181
|
+
Merge a 'src' dict into 'dst', using the rules described by
|
|
182
|
+
TEST_CONTROL.md for 'Partial results'.
|
|
183
|
+
"""
|
|
184
|
+
for key, value in src.items():
|
|
185
|
+
# delete existing if new value is None (JSON null)
|
|
186
|
+
if value is None and key in dst:
|
|
187
|
+
del dst[key]
|
|
188
|
+
continue
|
|
189
|
+
# add new key
|
|
190
|
+
elif key not in dst:
|
|
191
|
+
dst[key] = value
|
|
192
|
+
continue
|
|
193
|
+
|
|
194
|
+
orig_value = dst[key]
|
|
195
|
+
# different type - replace
|
|
196
|
+
if type(value) is not type(orig_value):
|
|
197
|
+
dst[key] = value
|
|
198
|
+
continue
|
|
199
|
+
|
|
200
|
+
# nested dict, merge it recursively
|
|
201
|
+
if isinstance(value, dict):
|
|
202
|
+
cls._merge(orig_value, value)
|
|
203
|
+
# extensible list-like iterable, extend it
|
|
204
|
+
elif isinstance(value, (tuple, list)):
|
|
205
|
+
orig_value += value
|
|
206
|
+
# overridable types, doesn't make sense to extend them
|
|
207
|
+
elif isinstance(value, (str, int, float, bool, bytes, bytearray)):
|
|
208
|
+
dst[key] = value
|
|
209
|
+
# set-like, needs unioning
|
|
210
|
+
elif isinstance(value, set):
|
|
211
|
+
orig_value.update(value)
|
|
212
|
+
else:
|
|
213
|
+
raise BadReportJSONError(f"cannot merge type {type(value)}")
|
|
214
|
+
|
|
215
|
+
def _parser_result(self, arg):
|
|
216
|
+
try:
|
|
217
|
+
json_length = int(arg)
|
|
218
|
+
except ValueError as e:
|
|
219
|
+
raise BadControlError(f"reading json length: {str(e)}") from None
|
|
220
|
+
|
|
221
|
+
# read the full JSON
|
|
222
|
+
json_data = bytearray()
|
|
223
|
+
while json_length > 0:
|
|
224
|
+
try:
|
|
225
|
+
chunk = os.read(self.control_fd, json_length)
|
|
226
|
+
except BlockingIOError:
|
|
227
|
+
yield
|
|
228
|
+
continue
|
|
229
|
+
if chunk == b"":
|
|
230
|
+
raise BadControlError("EOF when reading data")
|
|
231
|
+
json_data += chunk
|
|
232
|
+
json_length -= len(chunk)
|
|
233
|
+
yield
|
|
234
|
+
|
|
235
|
+
# convert to native python dict
|
|
236
|
+
try:
|
|
237
|
+
result = json.loads(json_data)
|
|
238
|
+
except json.decoder.JSONDecodeError as e:
|
|
239
|
+
raise BadReportJSONError(f"JSON decode: {str(e)} caused by: {json_data}") from None
|
|
240
|
+
|
|
241
|
+
name = result.get("name")
|
|
242
|
+
if not name:
|
|
243
|
+
raise BadReportJSONError("'name' not specified, but mandatory")
|
|
244
|
+
|
|
245
|
+
# upload files
|
|
246
|
+
for entry in result.get("files", ()):
|
|
247
|
+
file_name = entry.get("name")
|
|
248
|
+
file_length = entry.get("length")
|
|
249
|
+
if not file_name or file_length is None:
|
|
250
|
+
raise BadReportJSONError(f"file entry missing 'name' or 'length': {entry}")
|
|
251
|
+
try:
|
|
252
|
+
file_length = int(file_length)
|
|
253
|
+
except ValueError as e:
|
|
254
|
+
raise BadReportJSONError(f"file entry {file_name} length: {str(e)}") from None
|
|
255
|
+
|
|
256
|
+
with self.aggregator.open_tmpfile() as fd:
|
|
257
|
+
while file_length > 0:
|
|
258
|
+
try:
|
|
259
|
+
# try a more universal sendfile first, fall back to splice
|
|
260
|
+
try:
|
|
261
|
+
written = os.sendfile(fd, self.control_fd, None, file_length)
|
|
262
|
+
except OSError as e:
|
|
263
|
+
if e.errno == 22: # EINVAL
|
|
264
|
+
written = os.splice(self.control_fd, fd, file_length)
|
|
265
|
+
else:
|
|
266
|
+
raise
|
|
267
|
+
except BlockingIOError:
|
|
268
|
+
yield
|
|
269
|
+
continue
|
|
270
|
+
if written == 0:
|
|
271
|
+
raise BadControlError("EOF when reading data")
|
|
272
|
+
file_length -= written
|
|
273
|
+
yield
|
|
274
|
+
try:
|
|
275
|
+
self.aggregator.link_tmpfile_to(name, file_name, fd)
|
|
276
|
+
except FileExistsError:
|
|
277
|
+
raise BadReportJSONError(
|
|
278
|
+
f"file '{file_name}' for '{name}' already exists",
|
|
279
|
+
) from None
|
|
280
|
+
|
|
281
|
+
# either store partial result + return,
|
|
282
|
+
# or load previous partial result and merge into it
|
|
283
|
+
partial = result.get("partial", False)
|
|
284
|
+
if partial:
|
|
285
|
+
# do not store the 'partial' key in the result
|
|
286
|
+
del result["partial"]
|
|
287
|
+
self._merge(self.partial_results[name], result)
|
|
288
|
+
# partial = do nothing
|
|
289
|
+
return
|
|
290
|
+
|
|
291
|
+
# if previously-stored partial result exist, merge the current one
|
|
292
|
+
# into it, but then use the merged result
|
|
293
|
+
# - avoid .get() or __getitem__() on defaultdict, it would create
|
|
294
|
+
# a new key with an empty value if there was no partial result
|
|
295
|
+
if name in self.partial_results:
|
|
296
|
+
partial_result = self.partial_results[name]
|
|
297
|
+
del self.partial_results[name]
|
|
298
|
+
self._nested_merge(partial_result, result)
|
|
299
|
+
result = partial_result
|
|
300
|
+
|
|
301
|
+
if "testout" in result:
|
|
302
|
+
testout = result.get("testout")
|
|
303
|
+
if not testout:
|
|
304
|
+
raise BadReportJSONError("'testout' specified, but empty")
|
|
305
|
+
try:
|
|
306
|
+
self.aggregator.link_tmpfile_to(name, testout, self.testout_fd)
|
|
307
|
+
except FileExistsError:
|
|
308
|
+
raise BadReportJSONError(f"file '{testout}' for '{name}' already exists") from None
|
|
309
|
+
|
|
310
|
+
self.aggregator.report(result)
|
|
311
|
+
|
|
312
|
+
self.result_seen = True
|
|
313
|
+
|
|
314
|
+
def _parser_duration(self, arg):
|
|
315
|
+
if not arg:
|
|
316
|
+
raise BadControlError("duration argument empty")
|
|
317
|
+
# increment/decrement
|
|
318
|
+
if arg[0] == "+":
|
|
319
|
+
self.duration.increment(arg[1:])
|
|
320
|
+
elif arg[0] == "-":
|
|
321
|
+
self.duration.decrement(arg[1:])
|
|
322
|
+
# save/restore
|
|
323
|
+
elif arg == "save":
|
|
324
|
+
self.duration.save()
|
|
325
|
+
elif arg == "restore":
|
|
326
|
+
self.duration.restore()
|
|
327
|
+
else:
|
|
328
|
+
self.duration.set(arg)
|
|
329
|
+
# pretend to be a generator
|
|
330
|
+
if False:
|
|
331
|
+
yield
|
|
332
|
+
|
|
333
|
+
def _parser_exitcode(self, arg):
|
|
334
|
+
if not arg:
|
|
335
|
+
raise BadControlError("exitcode argument empty")
|
|
336
|
+
try:
|
|
337
|
+
code = int(arg)
|
|
338
|
+
except ValueError:
|
|
339
|
+
raise BadControlError(f"'{arg}' is not an integer exit code") from None
|
|
340
|
+
self.exit_code = code
|
|
341
|
+
# pretend to be a generator
|
|
342
|
+
if False:
|
|
343
|
+
yield
|
|
344
|
+
|
|
345
|
+
def _parser_reconnect(self, arg):
|
|
346
|
+
if not arg:
|
|
347
|
+
self.reconnect = "once"
|
|
348
|
+
elif arg == "always":
|
|
349
|
+
self.reconnect = "always"
|
|
350
|
+
else:
|
|
351
|
+
raise BadControlError(f"unknown reconnect arg: {arg}")
|
|
352
|
+
# pretend to be a generator
|
|
353
|
+
if False:
|
|
354
|
+
yield
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
|
|
1
|
+
import importlib as _importlib
|
|
2
|
+
import pkgutil as _pkgutil
|
|
3
|
+
#import threading as _threading
|
|
2
4
|
|
|
3
5
|
|
|
4
6
|
class Orchestrator:
|
|
@@ -36,3 +38,22 @@ class Orchestrator:
|
|
|
36
38
|
# gets return from run
|
|
37
39
|
# writes it out to somewhere else
|
|
38
40
|
...
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
_submodules = [
|
|
44
|
+
info.name for info in _pkgutil.iter_modules(__spec__.submodule_search_locations)
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
__all__ = [*_submodules, Orchestrator.__name__] # noqa: PLE0604
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def __dir__():
|
|
51
|
+
return __all__
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# lazily import submodules
|
|
55
|
+
def __getattr__(attr):
|
|
56
|
+
if attr in _submodules:
|
|
57
|
+
return _importlib.import_module(f".{attr}", __name__)
|
|
58
|
+
else:
|
|
59
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{attr}'")
|