atex 0.7__py3-none-any.whl → 0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,106 @@
1
+ import os
2
+ import json
3
+ import ctypes
4
+ import ctypes.util
5
+ import contextlib
6
+ from pathlib import Path
7
+
8
+ from .. import util
9
+
10
+
11
+ libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True)
12
+
13
+ # int linkat(int olddirfd, const char *oldpath, int newdirfd, const char *newpath, int flags)
14
+ libc.linkat.argtypes = (
15
+ ctypes.c_int,
16
+ ctypes.c_char_p,
17
+ ctypes.c_int,
18
+ ctypes.c_char_p,
19
+ ctypes.c_int,
20
+ )
21
+ libc.linkat.restype = ctypes.c_int
22
+
23
+ # fcntl.h:#define AT_EMPTY_PATH 0x1000 /* Allow empty relative pathname */
24
+ AT_EMPTY_PATH = 0x1000
25
+
26
+ # fcntl.h:#define AT_FDCWD -100 /* Special value used to indicate
27
+ AT_FDCWD = -100
28
+
29
+
30
+ def linkat(*args):
31
+ if (ret := libc.linkat(*args)) == -1:
32
+ errno = ctypes.get_errno()
33
+ raise OSError(errno, os.strerror(errno))
34
+ return ret
35
+
36
+
37
+ class Reporter:
38
+ """
39
+ Collects reported results (in a format specified by RESULTS.md) for
40
+ a specific test, storing them persistently.
41
+ """
42
+
43
+ def __init__(self, json_file, files_dir):
44
+ """
45
+ 'json_file' is a destination file (string or Path) for results.
46
+
47
+ 'files_dir' is a destination dir (string or Path) for uploaded files.
48
+ """
49
+ self.json_file = json_file
50
+ self.files_dir = Path(files_dir)
51
+ self.json_fobj = None
52
+
53
+ def __enter__(self):
54
+ if self.json_file.exists():
55
+ raise FileExistsError(f"{self.json_file} already exists")
56
+ self.json_fobj = open(self.json_file, "w")
57
+
58
+ if self.files_dir.exists():
59
+ raise FileExistsError(f"{self.files_dir} already exists")
60
+ self.files_dir.mkdir()
61
+
62
+ return self
63
+
64
+ def __exit__(self, exc_type, exc_value, traceback):
65
+ if self.json_fobj:
66
+ self.json_fobj.close()
67
+ self.json_fobj = None
68
+
69
+ def report(self, result_line):
70
+ """
71
+ Persistently record a test result.
72
+
73
+ 'result_line' is a dict in the format specified by RESULTS.md.
74
+ """
75
+ json.dump(result_line, self.json_fobj, indent=None)
76
+ self.json_fobj.write("\n")
77
+ self.json_fobj.flush()
78
+
79
+ @contextlib.contextmanager
80
+ def open_tmpfile(self, open_mode=os.O_WRONLY):
81
+ """
82
+ Open an anonymous (name-less) file for writing and yield its file
83
+ descriptor (int) as context, closing it when the context is exited.
84
+ """
85
+ flags = open_mode | os.O_TMPFILE
86
+ fd = os.open(self.files_dir, flags, 0o644)
87
+ try:
88
+ yield fd
89
+ finally:
90
+ os.close(fd)
91
+
92
+ def link_tmpfile_to(self, fd, file_name, result_name=None):
93
+ """
94
+ Store a file named 'file_name' in a directory relevant to 'result_name'
95
+ whose 'fd' (a file descriptor) was created by .open_tmpfile().
96
+
97
+ This function can be called multiple times with the same 'fd', and
98
+ does not close or otherwise alter the descriptor.
99
+
100
+ If 'result_name' is not given, link files to the test (name) itself.
101
+ """
102
+ result_name = util.normalize_path(result_name) if result_name else "."
103
+ # /path/to/files_dir / path/to/subresult / path/to/file.log
104
+ file_path = self.files_dir / result_name / util.normalize_path(file_name)
105
+ file_path.parent.mkdir(parents=True, exist_ok=True)
106
+ linkat(fd, b"", AT_FDCWD, bytes(file_path), AT_EMPTY_PATH)
@@ -1,8 +1,14 @@
1
+ import collections
1
2
  from pathlib import Path
2
3
 
3
- from .. import util
4
+ from .. import util, fmf
5
+
6
+ # name: fmf path to the test as string, ie. /some/test
7
+ # data: dict of the parsed fmf metadata (ie. {'tag': ... , 'environment': ...})
8
+ # dir: relative pathlib.Path of the test .fmf to repo root, ie. some/test
9
+ # (may be different from name for "virtual" tests that share the same dir)
10
+ Test = collections.namedtuple("Test", ["name", "data", "dir"])
4
11
 
5
- from . import fmf
6
12
 
7
13
  # NOTE that we split test execution into 3 scripts:
8
14
  # - "setup script" (package installs, etc.)
@@ -17,8 +23,7 @@ from . import fmf
17
23
  # passed by an argument to 'ssh', leaving stdin/out/err untouched,
18
24
  # allowing the user to interact with it (if run interactively)
19
25
 
20
-
21
- def test_wrapper(*, test, tests_dir, test_exec, debug=False):
26
+ def test_wrapper(*, test, tests_dir, test_exec):
22
27
  """
23
28
  Generate a bash script that runs a user-specified test, preparing
24
29
  a test control channel for it, and reporting its exit code.
@@ -26,14 +31,12 @@ def test_wrapper(*, test, tests_dir, test_exec, debug=False):
26
31
  is considered as test output and any unintended environment changes
27
32
  will impact the test itself.
28
33
 
29
- 'test' is a atex.minitmt.fmf.FMFTest instance.
34
+ 'test' is a class Test instance.
30
35
 
31
36
  'test_dir' is a remote directory (repository) of all the tests,
32
37
  a.k.a. FMF metadata root.
33
38
 
34
39
  'test_exec' is a remote path to the actual test to run.
35
-
36
- 'debug' specifies whether to include wrapper output inside test output.
37
40
  """
38
41
  out = "#!/bin/bash\n"
39
42
 
@@ -46,7 +49,7 @@ def test_wrapper(*, test, tests_dir, test_exec, debug=False):
46
49
  # doing it here avoids unnecessary traffic (reading stdin) via ssh,
47
50
  # even if it is fed from subprocess.DEVNULL on the runner
48
51
 
49
- if debug:
52
+ if util.in_debug_mode():
50
53
  out += "set -x\n"
51
54
 
52
55
  # use a subshell to limit the scope of the CWD change
@@ -88,7 +91,18 @@ def test_wrapper(*, test, tests_dir, test_exec, debug=False):
88
91
  return out
89
92
 
90
93
 
91
- def test_setup(*, test, wrapper_exec, test_exec, debug=False, **kwargs):
94
+ def _install_packages(pkgs, extra_opts=None):
95
+ pkgs_str = " ".join(pkgs)
96
+ extra_opts = extra_opts or ()
97
+ dnf = ["dnf", "-y", "--setopt=install_weak_deps=False", "install", *extra_opts]
98
+ dnf_str = " ".join(dnf)
99
+ return util.dedent(fr"""
100
+ not_installed=$(rpm -q --qf '' {pkgs_str} | sed -nr 's/^package ([^ ]+) is not installed$/\1/p')
101
+ [[ $not_installed ]] && {dnf_str} $not_installed
102
+ """) # noqa: E501
103
+
104
+
105
+ def test_setup(*, test, wrapper_exec, test_exec, **kwargs):
92
106
  """
93
107
  Generate a bash script that should prepare the remote end for test
94
108
  execution.
@@ -101,38 +115,30 @@ def test_setup(*, test, wrapper_exec, test_exec, debug=False, **kwargs):
101
115
 
102
116
  'test_exec' is the remote path where the test script should be put.
103
117
 
104
- 'test' is a atex.minitmt.fmf.FMFTest instance.
105
-
106
- 'debug' specifies whether to make the setup script extra verbose.
118
+ 'test' is a class Test instance.
107
119
 
108
120
  Any 'kwargs' are passed to test_wrapper().
109
121
  """
110
122
  out = "#!/bin/bash\n"
111
123
 
112
- # have deterministic stdin, avoid leaking parent console
113
- # also avoid any accidental stdout output, we use it for wrapper path
114
- if debug:
115
- out += "exec {orig_stdout}>&1 1>&2\n"
124
+ if util.in_debug_mode():
116
125
  out += "set -xe\n"
117
126
  else:
118
- out += "exec {orig_stdout}>&1 2>/dev/null 1>&2\n"
127
+ out += "exec 1>/dev/null\n"
119
128
  out += "set -e\n"
120
129
 
121
130
  # install test dependencies
122
131
  # - only strings (package names) in require/recommend are supported
123
- if require := [x for x in fmf.listlike(test.data, "require") if isinstance(x, str)]:
124
- out += "dnf -y --setopt=install_weak_deps=False install "
125
- out += " ".join(f"'{pkg}'" for pkg in require) + "\n"
126
- if recommend := [x for x in fmf.listlike(test.data, "recommend") if isinstance(x, str)]:
127
- out += "dnf -y --setopt=install_weak_deps=False install --skip-broken "
128
- out += " ".join(f"'{pkg}'" for pkg in recommend) + "\n"
132
+ if require := list(fmf.test_pkg_requires(test.data, "require")):
133
+ out += _install_packages(require) + "\n"
134
+ if recommend := list(fmf.test_pkg_requires(test.data, "recommend")):
135
+ out += _install_packages(recommend, ("--skip-broken",)) + "\n"
129
136
 
130
137
  # make the wrapper script
131
138
  out += f"cat > '{wrapper_exec}' <<'ATEX_SETUP_EOF'\n"
132
139
  out += test_wrapper(
133
140
  test=test,
134
141
  test_exec=test_exec,
135
- debug=debug,
136
142
  **kwargs,
137
143
  )
138
144
  out += "ATEX_SETUP_EOF\n"
@@ -95,12 +95,12 @@ class TestControl:
95
95
  processing test-issued commands, results and uploaded files.
96
96
  """
97
97
 
98
- def __init__(self, *, control_fd, aggregator, duration, testout_fd):
98
+ def __init__(self, *, control_fd, reporter, duration, testout_fd):
99
99
  """
100
100
  'control_fd' is a non-blocking file descriptor to be read.
101
101
 
102
- 'aggregator' is an instance of a result aggregator (ie. CSVAggregator)
103
- all the results and uploaded files will be written to.
102
+ 'reporter' is an instance of class Reporter all the results
103
+ and uploaded files will be written to.
104
104
 
105
105
  'duration' is a class Duration instance.
106
106
 
@@ -110,15 +110,15 @@ class TestControl:
110
110
  """
111
111
  self.control_fd = control_fd
112
112
  self.stream = NonblockLineReader(control_fd)
113
- self.aggregator = aggregator
113
+ self.reporter = reporter
114
114
  self.duration = duration
115
115
  self.testout_fd = testout_fd
116
116
  self.eof = False
117
117
  self.in_progress = None
118
118
  self.partial_results = collections.defaultdict(dict)
119
- self.result_seen = False
120
119
  self.exit_code = None
121
120
  self.reconnect = None
121
+ self.nameless_result_seen = False
122
122
 
123
123
  def process(self):
124
124
  """
@@ -238,9 +238,10 @@ class TestControl:
238
238
  except json.decoder.JSONDecodeError as e:
239
239
  raise BadReportJSONError(f"JSON decode: {str(e)} caused by: {json_data}") from None
240
240
 
241
+ # note that this may be None (result for the test itself)
241
242
  name = result.get("name")
242
243
  if not name:
243
- raise BadReportJSONError("'name' not specified, but mandatory")
244
+ self.nameless_result_seen = True
244
245
 
245
246
  # upload files
246
247
  for entry in result.get("files", ()):
@@ -253,7 +254,7 @@ class TestControl:
253
254
  except ValueError as e:
254
255
  raise BadReportJSONError(f"file entry {file_name} length: {str(e)}") from None
255
256
 
256
- with self.aggregator.open_tmpfile() as fd:
257
+ with self.reporter.open_tmpfile() as fd:
257
258
  while file_length > 0:
258
259
  try:
259
260
  # try a more universal sendfile first, fall back to splice
@@ -272,11 +273,9 @@ class TestControl:
272
273
  file_length -= written
273
274
  yield
274
275
  try:
275
- self.aggregator.link_tmpfile_to(name, file_name, fd)
276
+ self.reporter.link_tmpfile_to(fd, file_name, name)
276
277
  except FileExistsError:
277
- raise BadReportJSONError(
278
- f"file '{file_name}' for '{name}' already exists",
279
- ) from None
278
+ raise BadReportJSONError(f"file '{file_name}' already exists") from None
280
279
 
281
280
  # either store partial result + return,
282
281
  # or load previous partial result and merge into it
@@ -284,6 +283,8 @@ class TestControl:
284
283
  if partial:
285
284
  # do not store the 'partial' key in the result
286
285
  del result["partial"]
286
+ # note that nameless result will get None as dict key,
287
+ # which is perfectly fine
287
288
  self._merge(self.partial_results[name], result)
288
289
  # partial = do nothing
289
290
  return
@@ -295,7 +296,7 @@ class TestControl:
295
296
  if name in self.partial_results:
296
297
  partial_result = self.partial_results[name]
297
298
  del self.partial_results[name]
298
- self._nested_merge(partial_result, result)
299
+ self._merge(partial_result, result)
299
300
  result = partial_result
300
301
 
301
302
  if "testout" in result:
@@ -303,13 +304,11 @@ class TestControl:
303
304
  if not testout:
304
305
  raise BadReportJSONError("'testout' specified, but empty")
305
306
  try:
306
- self.aggregator.link_tmpfile_to(name, testout, self.testout_fd)
307
+ self.reporter.link_tmpfile_to(self.testout_fd, testout, name)
307
308
  except FileExistsError:
308
- raise BadReportJSONError(f"file '{testout}' for '{name}' already exists") from None
309
+ raise BadReportJSONError(f"file '{testout}' already exists") from None
309
310
 
310
- self.aggregator.report(result)
311
-
312
- self.result_seen = True
311
+ self.reporter.report(result)
313
312
 
314
313
  def _parser_duration(self, arg):
315
314
  if not arg:
@@ -1,16 +1,9 @@
1
1
  import re
2
- import collections
3
2
  from pathlib import Path
4
3
 
5
4
  # from system-wide sys.path
6
5
  import fmf
7
6
 
8
- # name: fmf path to the test as string, ie. /some/test
9
- # data: dict of the parsed fmf metadata (ie. {'tag': ... , 'environment': ...})
10
- # dir: relative pathlib.Path of the test .fmf to repo root, ie. some/test
11
- # (may be different from name for "virtual" tests that share the same dir)
12
- FMFTest = collections.namedtuple("FMFTest", ["name", "data", "dir"])
13
-
14
7
 
15
8
  def listlike(data, key):
16
9
  """
@@ -61,12 +54,14 @@ class FMFTests:
61
54
  # dict indexed by test name, value is pathlib.Path of relative path
62
55
  # of the fmf metadata root towards the test metadata location
63
56
  self.test_dirs = {}
57
+ # fmf.Context instance, as used for test discovery
58
+ self.context = fmf.Context(**context) if context else fmf.Context()
64
59
 
65
60
  tree = fmf_tree.copy() if isinstance(fmf_tree, fmf.Tree) else fmf.Tree(fmf_tree)
66
- ctx = fmf.Context(**context) if context else fmf.Context()
67
- tree.adjust(context=ctx)
61
+ tree.adjust(context=self.context)
68
62
 
69
- self.fmf_root = tree.root
63
+ # Path of the metadata root
64
+ self.root = Path(tree.root)
70
65
 
71
66
  # lookup the plan first
72
67
  plan = tree.find(plan_name)
@@ -146,23 +141,43 @@ class FMFTests:
146
141
  self.tests[child.name] = child.data
147
142
  # child.sources ie. ['/abs/path/to/some.fmf', '/abs/path/to/some/node.fmf']
148
143
  self.test_dirs[child.name] = \
149
- Path(child.sources[-1]).parent.relative_to(self.fmf_root)
150
-
151
- def as_fmftest(self, name):
152
- return FMFTest(name, self.tests[name], self.test_dirs[name])
153
-
154
- def as_fmftests(self):
155
- for name, data in self.tests.items():
156
- yield FMFTest(name, data, self.test_dirs[name])
144
+ Path(child.sources[-1]).parent.relative_to(self.root)
157
145
 
158
146
  def match(self, regex):
159
147
  """
160
- Return an iterable of FMFTest instances with test names matching the
161
- specified regex via re.match(), just like how 'tmt' discovers tests.
148
+ Yield test names that match 'regex', simulating how tmt discovers tests.
162
149
  """
163
- for name, data in self.tests.items():
164
- if re.match(regex, name):
165
- yield FMFTest(name, data, self.test_dirs[name])
150
+ yield from (name for name in self.tests if re.match(regex, name))
151
+
152
+
153
+ def test_pkg_requires(data, key="require"):
154
+ """
155
+ Yield RPM package names specified by test 'data' (fmf metadata dict)
156
+ in the metadata 'key' (require or recommend), ignoring any non-RPM-package
157
+ requires/recommends.
158
+ """
159
+ for entry in listlike(data, key):
160
+ # skip type:library and type:path
161
+ if not isinstance(entry, str):
162
+ continue
163
+ # skip "fake RPMs" that begin with 'library('
164
+ if entry.startswith("library("):
165
+ continue
166
+ yield entry
167
+
168
+
169
+ def all_pkg_requires(fmf_tests, key="require"):
170
+ """
171
+ Yield RPM package names from the plan and all tests discovered by
172
+ a class FMFTests instance 'fmf_tests', ignoring any non-RPM-package
173
+ requires/recommends.
174
+ """
175
+ # use a set to avoid duplicates
176
+ pkgs = set()
177
+ pkgs.update(fmf_tests.prepare_pkgs)
178
+ for data in fmf_tests.tests.values():
179
+ pkgs.update(test_pkg_requires(data, key))
180
+ yield from pkgs
166
181
 
167
182
 
168
183
  # Some extra notes for fmf.prune() arguments:
@@ -186,17 +201,17 @@ class FMFTests:
186
201
  # of tree metadata by the adjust expressions. Ie.
187
202
  # {'distro': 'rhel-9.6.0', 'arch': 'x86_64'}
188
203
 
189
- Platform = collections.namedtuple("Platform", ["distro", "arch"])
190
-
191
-
192
- def combine_platforms(fmf_path, plan_name, platforms):
193
- # TODO: document
194
- fmf_tests = {}
195
- tree = fmf.Tree(fmf_path)
196
- for platform in platforms:
197
- context = {"distro": platform.distro, "arch": platform.arch}
198
- fmf_tests[platform] = FMFTests(tree, plan_name, context=context)
199
- return fmf_tests
204
+ #Platform = collections.namedtuple("Platform", ["distro", "arch"])
205
+ #
206
+ #
207
+ #def combine_platforms(fmf_path, plan_name, platforms):
208
+ # # TODO: document
209
+ # fmf_tests = {}
210
+ # tree = fmf.Tree(fmf_path)
211
+ # for platform in platforms:
212
+ # context = {"distro": platform.distro, "arch": platform.arch}
213
+ # fmf_tests[platform] = FMFTests(tree, plan_name, context=context)
214
+ # return fmf_tests
200
215
 
201
216
  # TODO: in Orchestrator, when a Provisioner becomes free, have it pick a test
202
217
  # from the appropriate tests[platform] per the Provisioner's platform
@@ -1,59 +1,2 @@
1
- import importlib as _importlib
2
- import pkgutil as _pkgutil
3
- #import threading as _threading
4
-
5
-
6
- class Orchestrator:
7
- """
8
- A scheduler for parallel execution on multiple resources (machines/systems).
9
-
10
- Given a list of Provisioner-derived class instances, it attempts to reserve
11
- resources and uses them on-demand as they become available, calling run()
12
- on each.
13
-
14
- Note that run() and report() always run in a separate threads (are allowed
15
- to block), and may access instance attributes, which are transparently
16
- guarded by a thread-aware mutex.
17
-
18
- """
19
-
20
- def __init__(self):
21
- pass
22
- # TODO: configure via args, max workers, etc.
23
-
24
- # def reserve(self, provisioner):
25
- # # call provisioner.reserve(), return its return
26
- # ...
27
-
28
- def add_provisioner(self, provisioner):
29
- # add to a self.* list of provisioners to be used for getting machines
30
- ...
31
-
32
- def run(self, provisioner):
33
- # run tests, if destructive, call provisioner.release()
34
- # returns anything
35
- ...
36
-
37
- def report(self):
38
- # gets return from run
39
- # writes it out to somewhere else
40
- ...
41
-
42
-
43
- _submodules = [
44
- info.name for info in _pkgutil.iter_modules(__spec__.submodule_search_locations)
45
- ]
46
-
47
- __all__ = [*_submodules, Orchestrator.__name__] # noqa: PLE0604
48
-
49
-
50
- def __dir__():
51
- return __all__
52
-
53
-
54
- # lazily import submodules
55
- def __getattr__(attr):
56
- if attr in _submodules:
57
- return _importlib.import_module(f".{attr}", __name__)
58
- else:
59
- raise AttributeError(f"module '{__name__}' has no attribute '{attr}'")
1
+ from .aggregator import CSVAggregator # noqa: F401
2
+ from .orchestrator import Orchestrator # noqa: F401