atex 0.5__py3-none-any.whl → 0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. atex/__init__.py +2 -12
  2. atex/cli/__init__.py +13 -13
  3. atex/cli/fmf.py +93 -0
  4. atex/cli/testingfarm.py +71 -61
  5. atex/connection/__init__.py +117 -0
  6. atex/connection/ssh.py +390 -0
  7. atex/executor/__init__.py +2 -0
  8. atex/executor/duration.py +60 -0
  9. atex/executor/executor.py +378 -0
  10. atex/executor/reporter.py +106 -0
  11. atex/executor/scripts.py +155 -0
  12. atex/executor/testcontrol.py +353 -0
  13. atex/fmf.py +217 -0
  14. atex/orchestrator/__init__.py +2 -0
  15. atex/orchestrator/aggregator.py +106 -0
  16. atex/orchestrator/orchestrator.py +324 -0
  17. atex/provision/__init__.py +101 -90
  18. atex/provision/libvirt/VM_PROVISION +8 -0
  19. atex/provision/libvirt/__init__.py +4 -4
  20. atex/provision/podman/README +59 -0
  21. atex/provision/podman/host_container.sh +74 -0
  22. atex/provision/testingfarm/__init__.py +2 -0
  23. atex/{testingfarm.py → provision/testingfarm/api.py} +170 -132
  24. atex/provision/testingfarm/testingfarm.py +236 -0
  25. atex/util/__init__.py +5 -10
  26. atex/util/dedent.py +1 -1
  27. atex/util/log.py +20 -12
  28. atex/util/path.py +16 -0
  29. atex/util/ssh_keygen.py +14 -0
  30. atex/util/subprocess.py +14 -13
  31. atex/util/threads.py +55 -0
  32. {atex-0.5.dist-info → atex-0.8.dist-info}/METADATA +97 -2
  33. atex-0.8.dist-info/RECORD +37 -0
  34. atex/cli/minitmt.py +0 -82
  35. atex/minitmt/__init__.py +0 -115
  36. atex/minitmt/fmf.py +0 -168
  37. atex/minitmt/report.py +0 -174
  38. atex/minitmt/scripts.py +0 -51
  39. atex/minitmt/testme.py +0 -3
  40. atex/orchestrator.py +0 -38
  41. atex/ssh.py +0 -320
  42. atex/util/lockable_class.py +0 -38
  43. atex-0.5.dist-info/RECORD +0 -26
  44. {atex-0.5.dist-info → atex-0.8.dist-info}/WHEEL +0 -0
  45. {atex-0.5.dist-info → atex-0.8.dist-info}/entry_points.txt +0 -0
  46. {atex-0.5.dist-info → atex-0.8.dist-info}/licenses/COPYING.txt +0 -0
@@ -0,0 +1,236 @@
1
+ import time
2
+ import tempfile
3
+ import threading
4
+
5
+ from ... import connection, util
6
+ from .. import Provisioner, Remote
7
+
8
+ from . import api
9
+
10
+
11
+ class TestingFarmRemote(Remote, connection.ssh.ManagedSSHConn):
12
+ """
13
+ Built on the official Remote API, pulling in the Connection API
14
+ as implemented by ManagedSSHConn.
15
+ """
16
+
17
+ def __init__(self, ssh_options, *, release_hook, provisioner):
18
+ """
19
+ 'ssh_options' are a dict, passed to ManagedSSHConn __init__().
20
+
21
+ 'release_hook' is a callable called on .release() in addition
22
+ to disconnecting the connection.
23
+ """
24
+ # start with empty ssh options, we'll fill them in later
25
+ super().__init__(options=ssh_options)
26
+ self.release_hook = release_hook
27
+ self.provisioner = provisioner
28
+ self.lock = threading.RLock()
29
+ self.release_called = False
30
+
31
+ def release(self):
32
+ with self.lock:
33
+ if not self.release_called:
34
+ self.release_called = True
35
+ else:
36
+ return
37
+ self.release_hook(self)
38
+ self.disconnect()
39
+
40
+ # not /technically/ a valid repr(), but meh
41
+ def __repr__(self):
42
+ class_name = self.__class__.__name__
43
+ compose = self.provisioner.compose
44
+ arch = self.provisioner.arch
45
+ return f"{class_name}({compose} @ {arch}, {hex(id(self))})"
46
+
47
+ # def alive(self):
48
+ # return self.valid
49
+
50
+ # TODO: def __str__(self): as root@1.2.3.4 and arch, ranch, etc.
51
+
52
+
53
+ class TestingFarmProvisioner(Provisioner):
54
+ # TODO: have max_systems as (min,default,max) tuple; have an algorithm that
55
+ # starts at default and scales up/down as needed
56
+
57
+ def __init__(self, compose, arch="x86_64", *, max_systems=1, timeout=60, max_retries=10):
58
+ """
59
+ 'compose' is a Testing Farm compose to prepare.
60
+
61
+ 'arch' is an architecture associated with the compose.
62
+
63
+ 'max_systems' is an int of how many systems to reserve (and keep
64
+ reserved) in an internal pool.
65
+
66
+ 'timeout' is the maximum Testing Farm pipeline timeout (waiting for
67
+ a system + OS installation + reservation time).
68
+
69
+ 'max_retries' is a maximum number of provisioning (Testing Farm) errors
70
+ that will be reprovisioned before giving up.
71
+ """
72
+ super().__init__()
73
+ self.compose = compose # TODO: translate "centos9" to "CentOS-Stream-9"
74
+ self.arch = arch
75
+ self.max_systems = max_systems
76
+ self.timeout = timeout
77
+ self.retries = max_retries
78
+ self._tmpdir = None
79
+ self.ssh_key = self.ssh_pubkey = None
80
+ self.queue = util.ThreadQueue(daemon=True)
81
+ self.tf_api = api.TestingFarmAPI()
82
+
83
+ # TF Reserve instances (not Remotes) actively being provisioned,
84
+ # in case we need to call their .release() on abort
85
+ self.reserving = []
86
+
87
+ # active TestingFarmRemote instances, ready to be handed over to the user,
88
+ # or already in use by the user
89
+ self.remotes = []
90
+
91
+ def _wait_for_reservation(self, tf_reserve, initial_delay):
92
+ # assuming this function will be called many times, attempt to
93
+ # distribute load on TF servers
94
+ # (we can sleep here as this code is running in a separate thread)
95
+ if initial_delay:
96
+ util.debug(f"delaying for {initial_delay}s to distribute load")
97
+ time.sleep(initial_delay)
98
+
99
+ # 'machine' is api.Reserve.ReservedMachine namedtuple
100
+ machine = tf_reserve.reserve()
101
+
102
+ # connect our Remote to the machine via its class Connection API
103
+ ssh_options = {
104
+ "Hostname": machine.host,
105
+ "User": machine.user,
106
+ "Port": machine.port,
107
+ "IdentityFile": machine.ssh_key,
108
+ }
109
+
110
+ def release_hook(remote):
111
+ # remove from the list of remotes inside this Provisioner
112
+ with self.lock:
113
+ try:
114
+ self.remotes.remove(remote)
115
+ except ValueError:
116
+ pass
117
+ # call TF API, cancel the request, etc.
118
+ tf_reserve.release()
119
+
120
+ remote = TestingFarmRemote(
121
+ ssh_options,
122
+ release_hook=release_hook,
123
+ provisioner=self,
124
+ )
125
+ remote.connect()
126
+
127
+ # since the system is fully ready, stop tracking its reservation
128
+ # and return the finished Remote instance
129
+ with self.lock:
130
+ self.remotes.append(remote)
131
+ self.reserving.remove(tf_reserve)
132
+
133
+ return remote
134
+
135
+ def _schedule_one_reservation(self, initial_delay=None):
136
+ # instantiate a class Reserve from the Testing Farm api module
137
+ # (which typically provides context manager, but we use its .reserve()
138
+ # and .release() functions directly)
139
+ tf_reserve = api.Reserve(
140
+ compose=self.compose,
141
+ arch=self.arch,
142
+ timeout=self.timeout,
143
+ ssh_key=self.ssh_key,
144
+ api=self.tf_api,
145
+ )
146
+
147
+ # add it to self.reserving even before we schedule a provision,
148
+ # to avoid races on suddent abort
149
+ with self.lock:
150
+ self.reserving.append(tf_reserve)
151
+
152
+ # start a background wait
153
+ self.queue.start_thread(
154
+ target=self._wait_for_reservation,
155
+ args=(tf_reserve, initial_delay),
156
+ )
157
+
158
+ def start(self):
159
+ with self.lock:
160
+ self._tmpdir = tempfile.TemporaryDirectory()
161
+ self.ssh_key, self.ssh_pubkey = util.ssh_keygen(self._tmpdir.name)
162
+ # start up all initial reservations
163
+ for i in range(self.max_systems):
164
+ delay = (api.API_QUERY_DELAY / self.max_systems) * i
165
+ #self.queue.start_thread(target=self._schedule_one_reservation, args=(delay,))
166
+ self._schedule_one_reservation(delay)
167
+
168
+ def stop(self):
169
+ with self.lock:
170
+ # abort reservations in progress
171
+ for tf_reserve in self.reserving:
172
+ tf_reserve.release()
173
+ self.reserving = []
174
+ # cancel/release all Remotes ever created by us
175
+ for remote in self.remotes:
176
+ remote.release()
177
+ self.remotes = [] # just in case
178
+ # explicitly remove the tmpdir rather than relying on destructor
179
+ self._tmpdir.cleanup()
180
+ self._tmpdir = None
181
+
182
+ def stop_defer(self):
183
+ callables = []
184
+ with self.lock:
185
+ callables += (f.release for f in self.reserving)
186
+ self.reserving = []
187
+ callables += (r.release for r in self.remotes)
188
+ self.remotes = [] # just in case
189
+ callables.append(self._tmpdir.cleanup)
190
+ self._tmpdir = None
191
+ return callables
192
+
193
+ def get_remote(self, block=True):
194
+ # fill .release()d remotes back up with reservations
195
+ with self.lock:
196
+ deficit = self.max_systems - len(self.remotes) - len(self.reserving)
197
+ for i in range(deficit):
198
+ delay = (api.API_QUERY_DELAY / deficit) * i
199
+ self._schedule_one_reservation(delay)
200
+
201
+ while True:
202
+ # otherwise wait on a queue of Remotes being provisioned
203
+ try:
204
+ return self.queue.get(block=block) # thread-safe
205
+ except util.ThreadQueue.Empty:
206
+ # always non-blocking
207
+ return None
208
+ except (api.TestingFarmError, connection.ssh.SSHError) as e:
209
+ with self.lock:
210
+ if self.retries > 0:
211
+ util.warning(
212
+ f"caught while reserving a TF system: {repr(e)}, "
213
+ f"retrying ({self.retries} left)",
214
+ )
215
+ self.retries -= 1
216
+ self._schedule_one_reservation()
217
+ if block:
218
+ continue
219
+ else:
220
+ return None
221
+ else:
222
+ util.warning(
223
+ f"caught while reserving a TF system: {repr(e)}, "
224
+ "exhausted all retries, giving up",
225
+ )
226
+ raise
227
+
228
+ # not /technically/ a valid repr(), but meh
229
+ def __repr__(self):
230
+ class_name = self.__class__.__name__
231
+ reserving = len(self.reserving)
232
+ remotes = len(self.remotes)
233
+ return (
234
+ f"{class_name}({self.compose} @ {self.arch}, {reserving} reserving, "
235
+ f"{remotes} remotes, {hex(id(self))})"
236
+ )
atex/util/__init__.py CHANGED
@@ -1,7 +1,3 @@
1
- """
2
- TODO some description about utilities
3
- """
4
-
5
1
  import importlib as _importlib
6
2
  import pkgutil as _pkgutil
7
3
  import inspect as _inspect
@@ -17,21 +13,21 @@ def __dir__():
17
13
  # (function to avoid polluting global namespace with extra variables)
18
14
  def _import_submodules():
19
15
  for info in _pkgutil.iter_modules(__spec__.submodule_search_locations):
20
- mod = _importlib.import_module(f'.{info.name}', __name__)
16
+ mod = _importlib.import_module(f".{info.name}", __name__)
21
17
 
22
18
  # if the module defines __all__, just use it
23
- if hasattr(mod, '__all__'):
19
+ if hasattr(mod, "__all__"):
24
20
  keys = mod.__all__
25
21
  else:
26
22
  # https://docs.python.org/3/reference/executionmodel.html#binding-of-names
27
- keys = (x for x in dir(mod) if not x.startswith('_'))
23
+ keys = (x for x in dir(mod) if not x.startswith("_"))
28
24
 
29
25
  for key in keys:
30
26
  attr = getattr(mod, key)
31
27
 
32
28
  # avoid objects that belong to other known modules
33
29
  # (ie. imported function from another util module)
34
- if hasattr(attr, '__module__'):
30
+ if hasattr(attr, "__module__"):
35
31
  if attr.__module__ != mod.__name__:
36
32
  continue
37
33
  # avoid some common pollution / imports
@@ -39,8 +35,7 @@ def _import_submodules():
39
35
  if _inspect.ismodule(attr):
40
36
  continue
41
37
  # do not override already processed objects (avoid duplicates)
42
- if key in __all__:
43
- raise AssertionError(f"tried to override already-imported '{key}'")
38
+ assert key not in __all__, f"tried to override already-imported '{key}'"
44
39
 
45
40
  globals()[key] = attr
46
41
  __all__.append(key)
atex/util/dedent.py CHANGED
@@ -22,4 +22,4 @@ def dedent(text):
22
22
  Like textwrap.dedent(), but also strip leading and trailing spaces/newlines
23
23
  up to the content.
24
24
  """
25
- return textwrap.dedent(text.lstrip('\n').rstrip(' \n'))
25
+ return textwrap.dedent(text.lstrip("\n").rstrip(" \n"))
atex/util/log.py CHANGED
@@ -2,7 +2,15 @@ import inspect
2
2
  import logging
3
3
  from pathlib import Path
4
4
 
5
- _logger = logging.getLogger('atex')
5
+ _logger = logging.getLogger("atex")
6
+
7
+
8
+ def in_debug_mode():
9
+ """
10
+ Return True if the root logger is using the DEBUG (or more verbose) level.
11
+ """
12
+ root_level = logging.getLogger().level
13
+ return root_level > 0 and root_level <= logging.DEBUG
6
14
 
7
15
 
8
16
  def _format_msg(msg, *, skip_frames=0):
@@ -13,7 +21,7 @@ def _format_msg(msg, *, skip_frames=0):
13
21
 
14
22
  # bottom of the stack, or runpy executed module
15
23
  for frame_info in stack:
16
- if frame_info.function == '<module>':
24
+ if frame_info.function == "<module>":
17
25
  break
18
26
  module = frame_info
19
27
 
@@ -24,21 +32,21 @@ def _format_msg(msg, *, skip_frames=0):
24
32
  # if the function has 'self' and it looks like a class instance,
25
33
  # prepend it to the function name
26
34
  p_locals = parent.frame.f_locals
27
- if 'self' in p_locals:
28
- self = p_locals['self']
29
- if hasattr(self, '__class__') and inspect.isclass(self.__class__):
30
- function = f'{self.__class__.__name__}.{function}'
35
+ if "self" in p_locals:
36
+ self = p_locals["self"]
37
+ if hasattr(self, "__class__") and inspect.isclass(self.__class__):
38
+ function = f"{self.__class__.__name__}.{function}"
31
39
 
32
40
  # don't report module name of a function if it's the same as running module
33
41
  if parent.filename != module.filename:
34
- parent_modname = parent.frame.f_globals['__name__']
42
+ parent_modname = parent.frame.f_globals["__name__"]
35
43
  # avoid everything having the package name prefixed
36
- parent_modname = parent_modname.partition('.')[2] or parent_modname
37
- return f'{parent_modname}.{function}:{parent.lineno}: {msg}'
38
- elif parent.function != '<module>':
39
- return f'{function}:{parent.lineno}: {msg}'
44
+ parent_modname = parent_modname.partition(".")[2] or parent_modname
45
+ return f"{parent_modname}.{function}:{parent.lineno}: {msg}"
46
+ elif parent.function != "<module>":
47
+ return f"{function}:{parent.lineno}: {msg}"
40
48
  else:
41
- return f'{Path(parent.filename).name}:{parent.lineno}: {msg}'
49
+ return f"{Path(parent.filename).name}:{parent.lineno}: {msg}"
42
50
 
43
51
 
44
52
  def debug(msg, *, skip_frames=0):
atex/util/path.py ADDED
@@ -0,0 +1,16 @@
1
+ import os
2
+
3
+
4
+ def normalize_path(path):
5
+ """
6
+ Transform a potentially dangerous path (leading slash, relative ../../../
7
+ leading beyond parent, etc.) to a safe one.
8
+
9
+ Always returns a relative path.
10
+ """
11
+ # the magic here is to treat any dangerous path as starting at /
12
+ # and resolve any weird constructs relative to /, and then simply
13
+ # strip off the leading / and use it as a relative path
14
+ path = path.lstrip("/")
15
+ path = os.path.normpath(f"/{path}")
16
+ return path[1:]
@@ -0,0 +1,14 @@
1
+ import subprocess
2
+ from pathlib import Path
3
+
4
+ from .subprocess import subprocess_run
5
+
6
+
7
+ def ssh_keygen(dest_dir, key_type="rsa"):
8
+ dest_dir = Path(dest_dir)
9
+ subprocess_run(
10
+ ("ssh-keygen", "-t", key_type, "-N", "", "-f", dest_dir / f"key_{key_type}"),
11
+ stdout=subprocess.DEVNULL,
12
+ check=True,
13
+ )
14
+ return (dest_dir / "key_rsa", dest_dir / "key_rsa.pub")
atex/util/subprocess.py CHANGED
@@ -3,29 +3,30 @@ import subprocess
3
3
  from .log import debug
4
4
 
5
5
 
6
- def _format_subprocess_cmd(cmd):
7
- return cmd
8
- # if isinstance(cmd, (list, tuple)):
9
- # return ' '.join(str(x) for x in cmd)
10
- # else:
11
- # return cmd
12
-
13
-
14
6
  def subprocess_run(cmd, *, skip_frames=0, **kwargs):
15
7
  """
16
8
  A simple wrapper for the real subprocess.run() that logs the command used.
17
9
  """
18
10
  # when logging, skip current stack frame - report the place we were called
19
11
  # from, not util.subprocess_run itself
20
- debug(f'running: {_format_subprocess_cmd(cmd)}', skip_frames=skip_frames+1)
12
+ debug(f"running: {cmd}", skip_frames=skip_frames+1)
21
13
  return subprocess.run(cmd, **kwargs)
22
14
 
23
15
 
16
+ def subprocess_output(cmd, *, skip_frames=0, check=True, text=True, **kwargs):
17
+ """
18
+ A wrapper simulating subprocess.check_output() via a modern .run() API.
19
+ """
20
+ debug(f"running: {cmd}", skip_frames=skip_frames+1)
21
+ proc = subprocess.run(cmd, check=check, text=text, stdout=subprocess.PIPE, **kwargs)
22
+ return proc.stdout.rstrip("\n") if text else proc.stdout
23
+
24
+
24
25
  def subprocess_Popen(cmd, *, skip_frames=0, **kwargs): # noqa: N802
25
26
  """
26
27
  A simple wrapper for the real subprocess.Popen() that logs the command used.
27
28
  """
28
- debug(f'running: {_format_subprocess_cmd(cmd)}', skip_frames=skip_frames+1)
29
+ debug(f"running: {cmd}", skip_frames=skip_frames+1)
29
30
  return subprocess.Popen(cmd, **kwargs)
30
31
 
31
32
 
@@ -38,12 +39,12 @@ def subprocess_stream(cmd, *, check=False, skip_frames=0, **kwargs):
38
39
 
39
40
  To capture both stdout and stderr as yielded lines, use subprocess.STDOUT.
40
41
  """
41
- debug(f'running: {_format_subprocess_cmd(cmd)}', skip_frames=skip_frames+1)
42
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True, **kwargs)
42
+ debug(f"running: {cmd}", skip_frames=skip_frames+1)
43
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, text=True, **kwargs)
43
44
 
44
45
  def generate_lines():
45
46
  for line in proc.stdout:
46
- yield line.rstrip('\n')
47
+ yield line.rstrip("\n")
47
48
  code = proc.wait()
48
49
  if code > 0 and check:
49
50
  raise subprocess.CalledProcessError(cmd=cmd, returncode=code)
atex/util/threads.py ADDED
@@ -0,0 +1,55 @@
1
+ import collections
2
+ import queue
3
+ import threading
4
+
5
+ # TODO: documentation; this is like concurrent.futures, but with daemon=True support
6
+
7
+
8
+ class ThreadQueue:
9
+ ThreadReturn = collections.namedtuple("ThreadReturn", ("thread", "returned", "exception"))
10
+ Empty = queue.Empty
11
+
12
+ def __init__(self, daemon=False):
13
+ self.queue = queue.SimpleQueue()
14
+ self.daemon = daemon
15
+ self.threads = set()
16
+
17
+ def _wrapper(self, func, *args, **kwargs):
18
+ current_thread = threading.current_thread()
19
+ try:
20
+ ret = func(*args, **kwargs)
21
+ result = self.ThreadReturn(current_thread, ret, None)
22
+ except Exception as e:
23
+ result = self.ThreadReturn(current_thread, None, e)
24
+ self.queue.put(result)
25
+
26
+ def start_thread(self, target, name=None, args=None, kwargs=None):
27
+ args = args or ()
28
+ kwargs = kwargs or {}
29
+ t = threading.Thread(
30
+ target=self._wrapper,
31
+ name=name,
32
+ args=(target, *args),
33
+ kwargs=kwargs,
34
+ daemon=self.daemon,
35
+ )
36
+ t.start()
37
+ self.threads.add(t)
38
+
39
+ # get one return value from any thread's function, like .as_completed()
40
+ # or concurrent.futures.FIRST_COMPLETED
41
+ def get(self, block=True, timeout=None):
42
+ if block and timeout is None and not self.threads:
43
+ raise AssertionError("no threads are running, would block forever")
44
+ treturn = self.queue.get(block=block, timeout=timeout)
45
+ self.threads.remove(treturn.thread)
46
+ if treturn.exception is not None:
47
+ raise treturn.exception
48
+ else:
49
+ return treturn.returned
50
+
51
+ # wait for all threads to finish (ignoring queue contents)
52
+ def join(self):
53
+ while self.threads:
54
+ t = self.threads.pop()
55
+ t.join()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atex
3
- Version: 0.5
3
+ Version: 0.8
4
4
  Summary: Ad-hoc Test EXecutor
5
5
  Project-URL: Homepage, https://github.com/RHSecurityCompliance/atex
6
6
  License-Expression: GPL-3.0-or-later
@@ -8,7 +8,7 @@ License-File: COPYING.txt
8
8
  Classifier: Operating System :: POSIX :: Linux
9
9
  Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Topic :: Software Development :: Testing
11
- Requires-Python: >=3.9
11
+ Requires-Python: >=3.11
12
12
  Requires-Dist: fmf>=1.6
13
13
  Requires-Dist: urllib3<3,>=2
14
14
  Description-Content-Type: text/markdown
@@ -45,8 +45,103 @@ BREAK. DO NOT USE IT (for now).
45
45
  Unless specified otherwise, any content within this repository is distributed
46
46
  under the GNU GPLv3 license, see the [COPYING.txt](COPYING.txt) file for more.
47
47
 
48
+ ## Parallelism and cleanup
49
+
50
+ There are effectively 3 methods of running things in parallel in Python:
51
+
52
+ - `threading.Thread` (and related `concurrent.futures` classes)
53
+ - `multiprocessing.Process` (and related `concurrent.futures` classes)
54
+ - `asyncio`
55
+
56
+ and there is no clear winner (in terms of cleanup on `SIGTERM` or Ctrl-C):
57
+
58
+ - `Thread` has signal handlers only in the main thread and is unable to
59
+ interrupt any running threads without super ugly workarounds like `sleep(1)`
60
+ in every thread, checking some "pls exit" variable
61
+ - `Process` is too heavyweight and makes sharing native Python objects hard,
62
+ but it does handle signals in each process individually
63
+ - `asyncio` handles interrupting perfectly (every `try`/`except`/`finally`
64
+ completes just fine, `KeyboardInterrupt` is raised in every async context),
65
+ but async python is still (3.14) too weird and unsupported
66
+ - `asyncio` effectively re-implements `subprocess` with a slightly different
67
+ API, same with `asyncio.Transport` and derivatives reimplementing `socket`
68
+ - 3rd party libraries like `requests` or `urllib3` don't support it, one needs
69
+ to resort to spawning these in separate threads anyway
70
+ - same with `os.*` functions and syscalls
71
+ - every thing exposed via API needs to have 2 copies - async and non-async,
72
+ making it unbearable
73
+ - other stdlib bugs, ie. "large" reads returning BlockingIOError sometimes
74
+
75
+ The approach chosen by this project was to use `threading.Thread`, and
76
+ implement thread safety for classes and their functions that need it.
77
+ For example:
78
+
79
+ ```python
80
+ class MachineReserver:
81
+ def __init__(self):
82
+ self.lock = threading.RLock()
83
+ self.job = None
84
+ self.proc = None
85
+
86
+ def reserve(self, ...):
87
+ try:
88
+ ...
89
+ job = schedule_new_job_on_external_service()
90
+ with self.lock:
91
+ self.job = job
92
+ ...
93
+ while not reserved(self.job):
94
+ time.sleep(60)
95
+ ...
96
+ with self.lock:
97
+ self.proc = subprocess.Popen(["ssh", f"{user}@{host}", ...)
98
+ ...
99
+ return machine
100
+ except Exception:
101
+ self.abort()
102
+ raise
103
+
104
+ def abort(self):
105
+ with self.lock:
106
+ if self.job:
107
+ cancel_external_service(self.job)
108
+ self.job = None
109
+ if self.proc:
110
+ self.proc.kill()
111
+ self.proc = None
112
+ ```
113
+
114
+ Here, it is expected for `.reserve()` to be called in a long-running thread that
115
+ provisions a new machine on some external service, waits for it to be installed
116
+ and reserved, connects an ssh session to it and returns it back.
117
+
118
+ But equally, `.abort()` can be called from an external thread and clean up any
119
+ non-pythonic resources (external jobs, processes, temporary files, etc.) at
120
+ which point **we don't care what happens to .reserve()**, it will probably fail
121
+ with some exception, but doesn't do any harm.
122
+
123
+ Here is where `daemon=True` threads come in handy - we can simply call `.abort()`
124
+ from a `KeyboardInterrupt` (or `SIGTERM`) handle in the main thread, and just
125
+ exit, automatically killing any leftover threads that are uselessly sleeping.
126
+ (Realistically, we might want to spawn new threads to run many `.abort()`s in
127
+ parallel, but the main thread can wait for those just fine.)
128
+
129
+ It is not perfect, but it's probably the best Python can do.
130
+
131
+ Note that races can still occur between a resource being reserved and written
132
+ to `self.*` for `.abort()` to free, so resource de-allocation is not 100%
133
+ guaranteed, but single-threaded interrupting has the same issue.
134
+ Do have fallbacks (ie. max reserve times on the external service).
135
+
136
+ Also note that `.reserve()` and `.abort()` could be also called by a context
137
+ manager as `__enter__` and `__exit__`, ie. by a non-threaded caller (running
138
+ everything in the main thread).
139
+
140
+
48
141
  ## Unsorted notes
49
142
 
143
+ TODO: codestyle from contest
144
+
50
145
  ```
51
146
  - this is not tmt, the goal is to make a python toolbox *for* making runcontest
52
147
  style tools easily, not to replace those tools with tmt-style CLI syntax
@@ -0,0 +1,37 @@
1
+ atex/__init__.py,sha256=LdX67gprtHYeAkjLhFPKzpc7ECv2rHxUbHKDGbGXO1c,517
2
+ atex/fmf.py,sha256=ofbrJx2362qHAxERS-WulK4TMpbp0C4HQ-Js917Ll9w,7871
3
+ atex/cli/__init__.py,sha256=erHv68SsybRbdgJ60013y9jVqY1ec-cb9T9ThPCJ_HY,2408
4
+ atex/cli/fmf.py,sha256=5DbA-3rfbFZ41fJ5z7Tiz5FmuZhXNC7gRAQfIGX7pXc,2516
5
+ atex/cli/testingfarm.py,sha256=wdN26TE9jZ0ozet-JBQQgIcRi0WIV3u_i-7_YYi_SUg,7248
6
+ atex/connection/__init__.py,sha256=xFwGOvlFez1lIt1AD6WXgEEIbsF22pSpQFv41GEAGAI,3798
7
+ atex/connection/ssh.py,sha256=vrrSfVdQoz5kWiZbiPuM8KGneMl2Tlb0VeJIHTFSSYs,13626
8
+ atex/executor/__init__.py,sha256=XCfhi7QDELjey7N1uzhMjc46Kp1Jsd5bOCf52I27SCE,85
9
+ atex/executor/duration.py,sha256=x06sItKOZi6XA8KszQwZGpIb1Z_L-HWqIwZKo2SDo0s,1759
10
+ atex/executor/executor.py,sha256=QYYSlEfBZIm95NhM1gwd2ROeshSAavYu2DP_4TTHlQs,14770
11
+ atex/executor/reporter.py,sha256=nW_Uls3R4Ev80a2ZNJl3nxAYrcYhXk5Cy9nAUMlYPrc,3326
12
+ atex/executor/scripts.py,sha256=yE4Lbfu-TPkBcB5t15-t-tF79H8pBJWbWP6MKRSvKsw,5356
13
+ atex/executor/testcontrol.py,sha256=-rfihfE6kryIGurFrHBPSS8ANaIJkzX-zfpOO8To-9o,12204
14
+ atex/orchestrator/__init__.py,sha256=eF-6ix5rFEu85fBFzgSdTYau7bNTkIQndAU7QqeI-FA,105
15
+ atex/orchestrator/aggregator.py,sha256=5-8nHVeW6kwImoEYOsQqsx6UBdbKc5xuj6qlg7dtOF8,3642
16
+ atex/orchestrator/orchestrator.py,sha256=tQu_d8_9y3rOLHskb694NJKNvxplQWAZ2R452Sy3AXw,12056
17
+ atex/provision/__init__.py,sha256=2d_hRVPxXF5BVbQ_Gn1OR-F2xuqRn8O0yyVbvSrtTIg,4043
18
+ atex/provision/libvirt/VM_PROVISION,sha256=7pkZ-ozgTyK4qNGC-E-HUznr4IhbosWSASbB72Gknl8,2664
19
+ atex/provision/libvirt/__init__.py,sha256=mAkGtciZsXdR9MVVrjm3OWNXZqTs_33-J1qAszFA0k4,768
20
+ atex/provision/libvirt/setup-libvirt.sh,sha256=CXrEFdrj8CSHXQZCd2RWuRvTmw7QYFTVhZeLuhhXooI,1855
21
+ atex/provision/podman/README,sha256=kgP3vcTfWW9gcQzmXnyucjgWbqjNqm_ZM--pnqNTXRg,1345
22
+ atex/provision/podman/host_container.sh,sha256=buCNz0BlsHY5I64sMSTGQHkvzEK0aeIhpGJXWCQVMXk,2283
23
+ atex/provision/testingfarm/__init__.py,sha256=kZncgLGdRCR4FMaRQr2GTwJ8vjlA-24ri8JO2ueZJuw,113
24
+ atex/provision/testingfarm/api.py,sha256=jiEJhYxMTzRihayceHcnDnGKNZJisYWn2o_TAdCI2Xo,19943
25
+ atex/provision/testingfarm/testingfarm.py,sha256=wp8W3bwOmQdO-UUOdqu_JLtOZTGaNg-wERFfLySwZmI,8587
26
+ atex/util/__init__.py,sha256=cWHFbtQ4mDlKe6lXyPDWRmWJOTcHDGfVuW_-GYa8hB0,1473
27
+ atex/util/dedent.py,sha256=SEuJMtLzqz3dQ7g7qyZzEJ9VYynVlk52tQCJY-FveXo,603
28
+ atex/util/log.py,sha256=KZkuw4jl8YTUOHZ4wNBrfDeg16VpLa82-IZYFHfqwgk,1995
29
+ atex/util/path.py,sha256=x-kXqiWCVodfZWbEwtC5A8LFvutpDIPYv2m0boZSlXU,504
30
+ atex/util/ssh_keygen.py,sha256=9yuSl2yBV7pG3Qfsf9tossVC00nbIUrAeLdbwTykpjk,384
31
+ atex/util/subprocess.py,sha256=IQT9QHe2kMaaO_XPSry-DwObYstGsq6_QdwdbhYDjko,1826
32
+ atex/util/threads.py,sha256=bezDIEIMcQinmG7f5E2K6_mHJQOlwx7W3I9CKkCYAYA,1830
33
+ atex-0.8.dist-info/METADATA,sha256=dvXW146ZvIfyzqPqGbKmhTNScLTZM7C5K0FLrGNGIJ0,8981
34
+ atex-0.8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
35
+ atex-0.8.dist-info/entry_points.txt,sha256=pLqJdcfeyQTgup2h6dWb6SvkHhtOl-W5Eg9zV8moK0o,39
36
+ atex-0.8.dist-info/licenses/COPYING.txt,sha256=oEuj51jdmbXcCUy7pZ-KE0BNcJTR1okudRp5zQ0yWnU,670
37
+ atex-0.8.dist-info/RECORD,,