atex 0.13__py3-none-any.whl → 0.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  import os
2
2
  import re
3
3
  import time
4
+ import logging
4
5
  import tempfile
5
6
  import datetime
6
7
  import textwrap
@@ -15,11 +16,13 @@ from ... import util
15
16
  import json
16
17
  import urllib3
17
18
 
19
+ logger = logging.getLogger("atex.provisioner.testingfarm")
20
+
18
21
  DEFAULT_API_URL = "https://api.testing-farm.io"
19
22
 
20
23
  DEFAULT_RESERVE_TEST = {
21
24
  "url": "https://github.com/RHSecurityCompliance/atex-reserve",
22
- "ref": "main",
25
+ "ref": "0.12",
23
26
  "path": ".",
24
27
  "name": "/plans/reserve",
25
28
  }
@@ -42,6 +45,8 @@ _http = urllib3.PoolManager(
42
45
  # retry on API server errors too, not just connection issues
43
46
  status=10,
44
47
  status_forcelist={403,404,408,429,500,502,503,504},
48
+ # retry POST as well, even if risky
49
+ allowed_methods=urllib3.Retry.DEFAULT_ALLOWED_METHODS | {"POST"},
45
50
  ),
46
51
  )
47
52
 
@@ -75,11 +80,11 @@ class TestingFarmAPI:
75
80
 
76
81
  def __init__(self, url=DEFAULT_API_URL, token=None):
77
82
  """
78
- 'url' is Testing Farm API URL, a sensible default is used
79
- if unspecified.
83
+ - `url` is Testing Farm API URL, a sensible default is used
84
+ if unspecified.
80
85
 
81
- 'token' is a secret API token generated by Testing Farm admins,
82
- if empty, the TESTING_FARM_API_TOKEN env var is read instead.
86
+ - `token` is a secret API token generated by Testing Farm admins,
87
+ if empty, the `TESTING_FARM_API_TOKEN` env var is read instead.
83
88
 
84
89
  Note that token-less operation is supported, with limited functionality.
85
90
  """
@@ -132,7 +137,7 @@ class TestingFarmAPI:
132
137
 
133
138
  def composes(self, ranch=None):
134
139
  """
135
- 'ranch' is 'public' or 'redhat', autodetected if token was given.
140
+ - `ranch` is `public` or `redhat`, autodetected if token was given.
136
141
  """
137
142
  if not ranch:
138
143
  if not self.api_token:
@@ -146,19 +151,21 @@ class TestingFarmAPI:
146
151
  created_before=None, created_after=None,
147
152
  ):
148
153
  """
149
- 'state' is one of 'running', 'queued', etc., and is required by the API.
154
+ - `state` is one of `running`, `queued`, etc., and is required by the
155
+ API.
150
156
 
151
- 'ranch' is 'public' or 'redhat', or (probably?) all if left empty.
157
+ - `ranch` is `public` or `redhat`, or (probably?) all if left empty.
152
158
 
153
- If 'mine' is True and a token was given, return only requests for that
154
- token (user), otherwise return *all* requests (use extra filters pls).
159
+ - If `mine` is `True` and a token was given, return only requests for
160
+ that token (user), otherwise return *all* requests (use extra filters
161
+ pls).
155
162
 
156
- 'user_id' and 'token_id' are search API parameters - if not given and
157
- 'mine' is True, these are extracted from a user-provided token.
163
+ - `user_id` and `token_id` are search API parameters - if not given and
164
+ `mine` is `True`, these are extracted from a user-provided token.
158
165
 
159
- 'created_*' take ISO 8601 formatted strings, as returned by the API
160
- elsewhere, ie. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS' (or with '.MS'),
161
- without timezone.
166
+ - `created_*` take ISO 8601 formatted strings, as returned by the API
167
+ elsewhere, ie. `YYYY-MM-DD` or `YYYY-MM-DDTHH:MM:SS` (or with `.MS`),
168
+ without timezone (UTC is used always).
162
169
  """
163
170
  fields = {"state": state}
164
171
  if ranch:
@@ -186,11 +193,11 @@ class TestingFarmAPI:
186
193
  An unofficial wrapper for search_requests() that can search a large
187
194
  interval incrementally (in "pages") and yield batches of results.
188
195
 
189
- Needs 'created_after', with 'created_before' defaulting to now().
196
+ Needs `created_after`, with `created_before` defaulting to `now()`.
190
197
 
191
- 'page' specifies the time interval of one page, in seconds.
198
+ - `page` specifies the time interval of one page, in seconds.
192
199
 
193
- 'args' and 'kwargs' are passed to search_requests().
200
+ - `args` and `kwargs` are passed to `search_requests()`.
194
201
  """
195
202
  assert "created_after" in kwargs, "at least 'created_after' is needed for paging"
196
203
 
@@ -229,13 +236,13 @@ class TestingFarmAPI:
229
236
 
230
237
  def get_request(self, request_id):
231
238
  """
232
- 'request_id' is the UUID (string) of the request.
239
+ - `request_id` is the UUID (string) of the request.
233
240
  """
234
241
  return self._query("GET", f"/requests/{request_id}")
235
242
 
236
243
  def submit_request(self, spec):
237
244
  """
238
- 'spec' is a big dictionary with 'test', 'environment', 'settings', etc.
245
+ - `spec` is a big dictionary with 'test', 'environment', 'settings', etc.
239
246
  keys that specify what should be run and where.
240
247
  """
241
248
  if not self.api_token:
@@ -244,7 +251,7 @@ class TestingFarmAPI:
244
251
 
245
252
  def cancel_request(self, request_id):
246
253
  """
247
- 'request_id' is the UUID (string) of the request.
254
+ - `request_id` is the UUID (string) of the request.
248
255
  """
249
256
  return self._query("DELETE", f"/requests/{request_id}")
250
257
 
@@ -261,11 +268,13 @@ class Request:
261
268
 
262
269
  def __init__(self, id=None, api=None, initial_data=None):
263
270
  """
264
- 'id' is a Testing Farm request UUID
271
+ - `id` is a Testing Farm request UUID.
265
272
 
266
- 'api' is a TestingFarmAPI instance - if unspecified, a sensible default
273
+ - `api` is a TestingFarmAPI instance - if unspecified, a new one
274
+ is instantiated.
267
275
 
268
- 'initial_data' (dict) can be used to pre-fill an initial Request state.
276
+ - `initial_data` (dict) can be used to pre-fill an initial Request
277
+ state.
269
278
  """
270
279
  self.id = id
271
280
  self.api = api or TestingFarmAPI()
@@ -274,8 +283,8 @@ class Request:
274
283
 
275
284
  def submit(self, spec):
276
285
  """
277
- 'spec' is a big dictionary with 'test', 'environment', 'settings', etc.
278
- keys that specify what should be run and where.
286
+ - `spec` is a big dictionary with 'test', 'environment', 'settings',
287
+ etc. keys that specify what should be run and where.
279
288
  """
280
289
  if self.id:
281
290
  raise ValueError("this Request instance already has 'id', refusing submit")
@@ -310,7 +319,7 @@ class Request:
310
319
 
311
320
  def wait_for_state(self, state):
312
321
  """
313
- 'state' is a str or a tuple of states to wait for.
322
+ - `state` is a string or a tuple of states to wait for.
314
323
  """
315
324
  watched = (state,) if isinstance(state, str) else state
316
325
  while True:
@@ -371,12 +380,12 @@ class PipelineLogStreamer:
371
380
  # 403: happens on internal OSCI artifacts server, probably
372
381
  # due to similar reasons (folder exists without log)
373
382
  if reply.status in (404,403):
374
- util.debug(f"got {reply.status} for {log}, retrying")
383
+ logger.info(f"got {reply.status} for {log}, retrying")
375
384
  continue
376
385
  elif reply.status != 200:
377
386
  raise APIError(f"got HTTP {reply.status} on HEAD {log}", reply)
378
387
 
379
- util.info(f"artifacts: {artifacts}")
388
+ logger.info(f"artifacts: {artifacts}")
380
389
 
381
390
  return log
382
391
 
@@ -440,54 +449,59 @@ class Reserve:
440
449
  api=None,
441
450
  ):
442
451
  """
443
- 'compose' (str) is the OS to install, chosen from the composes supported
444
- by the Testing Farm ranch of the authenticated user.
445
-
446
- 'arch' (str) is one of 'x86_64', 's390x', etc.
447
-
448
- 'pool' (str) is a name of a Testing Farm infrastructure pool.
449
-
450
- 'hardware' (dict) is a complex specification of hardware properties
451
- the reserved system should have, see:
452
- https://docs.testing-farm.io/Testing%20Farm/0.1/test-request.html#hardware
453
-
454
- 'kickstart' (dict) is a Beaker-style specification of Anaconda Kickstart
455
- hacks, passed directly to Testing Farm POST /requests API.
456
-
457
- 'timeout' (int) is the maximum time IN MINUTES a Testing Farm request
458
- is alive, which includes initial creation, waiting in queue, preparing
459
- an OS, and the entire reservation period.
460
- Make sure to set it high enough (not just the pure reservation time).
461
-
462
- 'ssh_key' (str) is a path to an OpenSSH private key file (with an
463
- associated public key file in .pub), to be added to the reserved OS.
464
- If unspecified, an attempt to read ~/.ssh/id_rsa will be made and if
465
- that is also unsuccessful, a temporary keypair will be generated.
466
-
467
- 'source_host' (str) is an IPv4 network specified as ie. '1.2.3.4/32'
468
- to be allowed incoming traffic to the reserved system (such as ssh).
469
- If unspecified, an Internet service will be queried to get an outside-
470
- facing address of the current system.
471
- Ignored on the 'redhat' ranch.
472
-
473
- 'reserve_test' is a dict with a fmf test specification to be run on the
474
- target system to reserve it, ie.:
475
- {
476
- "url": "https://some-host/path/to/repo",
477
- "ref": "main",
478
- "name": "/plans/reserve",
479
- }
452
+ - `compose` (str) is the OS to install, chosen from the composes
453
+ supported by the Testing Farm ranch of the authenticated user.
454
+
455
+ - `arch` (str) is one of 'x86_64', 's390x', etc.
456
+
457
+ - `pool` (str) is a name of a Testing Farm infrastructure pool.
458
+
459
+ - `hardware` (dict) is a complex specification of hardware properties
460
+ the reserved system should have, see:
461
+ https://docs.testing-farm.io/Testing%20Farm/0.1/test-request.html#hardware
462
+
463
+ - `kickstart` (dict) is a Beaker-style specification of Anaconda
464
+ Kickstart hacks, passed directly to Testing Farm POST /requests API.
465
+
466
+ - `timeout` (int) is the maximum time **in minutes** a Testing Farm
467
+ request is alive, which includes initial creation, waiting in queue,
468
+ preparing an OS, and the entire reservation period.
469
+
470
+ Make sure to set it high enough (not just the pure reservation time).
471
+
472
+ - `ssh_key` (str) is a path to an OpenSSH private key file (with an
473
+ associated public key file in .pub), to be added to the reserved OS.
474
+
475
+ If unspecified, an attempt to read `~/.ssh/id_rsa` will be made, and
476
+ if that is also unsuccessful, a temporary keypair will be generated.
477
+
478
+ - `source_host` (str) is an IPv4 network specified as ie. `1.2.3.4/32`
479
+ to be allowed incoming traffic to the reserved system (such as ssh).
480
+
481
+ If unspecified, an Internet service will be queried to get an outside-
482
+ facing address of the current system.
483
+
484
+ Ignored on the `redhat` ranch.
485
+
486
+ - `reserve_test` is a dict with a fmf test specification to be run on
487
+ the target system to reserve it, ie.:
488
+ {
489
+ "url": "https://some-host/path/to/repo",
490
+ "ref": "main",
491
+ "name": "/plans/reserve",
492
+ }
480
493
 
481
- 'variables' and 'secrets' are dicts with environment variable key/values
482
- exported for the reserve test - variables are visible via TF API,
483
- secrets are not (but can still be extracted from pipeline log).
494
+ - `variables` and `secrets` are dicts with environment variable
495
+ key/values exported for the reserve test - variables are visible via
496
+ TF API, secrets are not (but can still be extracted from pipeline
497
+ log).
484
498
 
485
- 'tags' is a dict of custom key/values to be submitted in TF Request as
486
- environments->settings->provisioning->tags, useful for storing custom
487
- metadata to be queried later.
499
+ - `tags` is a dict of custom key/values to be submitted in TF Request as
500
+ `environments->settings->provisioning->tags`, useful for storing
501
+ custom metadata to be queried later.
488
502
 
489
- 'api' is a TestingFarmAPI instance - if unspecified, a sensible default
490
- will be used.
503
+ - `api` is a TestingFarmAPI instance - if unspecified, a new one
504
+ is instantiated.
491
505
  """
492
506
  spec = {
493
507
  "test": {
@@ -592,8 +606,8 @@ class Reserve:
592
606
  with self.lock:
593
607
  self.request = Request(api=self.api)
594
608
  self.request.submit(spec)
595
- util.debug(f"submitted request {self.request.id}")
596
- util.extradebug(
609
+ logger.info(f"submitted request {self.request.id}")
610
+ logger.debug(
597
611
  f"request {self.request.id}:\n{textwrap.indent(str(self.request), ' ')}",
598
612
  )
599
613
 
@@ -602,7 +616,7 @@ class Reserve:
602
616
  for line in PipelineLogStreamer(self.request):
603
617
  # the '\033[0m' is to reset colors sometimes left in a bad
604
618
  # state by pipeline.log
605
- util.extradebug(f"{line}\033[0m")
619
+ logger.debug(f"{line}\033[0m")
606
620
  # find hidden login details
607
621
  m = re.search(
608
622
  # host address can be an IP address or a hostname
@@ -1,4 +1,5 @@
1
1
  import time
2
+ import logging
2
3
  import tempfile
3
4
  import threading
4
5
  import concurrent.futures
@@ -8,6 +9,8 @@ from .. import Provisioner, Remote
8
9
 
9
10
  from . import api
10
11
 
12
+ logger = logging.getLogger("atex.provisioner.testingfarm")
13
+
11
14
 
12
15
  class TestingFarmRemote(Remote, connection.ssh.ManagedSSHConnection):
13
16
  """
@@ -17,12 +20,13 @@ class TestingFarmRemote(Remote, connection.ssh.ManagedSSHConnection):
17
20
 
18
21
  def __init__(self, request_id, ssh_options, *, release_hook):
19
22
  """
20
- 'request_id' is a string with Testing Farm request UUID (for printouts).
23
+ - `request_id` is a string with Testing Farm request UUID
24
+ (for printouts).
21
25
 
22
- 'ssh_options' are a dict, passed to ManagedSSHConnection __init__().
26
+ - `ssh_options` are a dict, passed to ManagedSSHConnection `__init__()`.
23
27
 
24
- 'release_hook' is a callable called on .release() in addition
25
- to disconnecting the connection.
28
+ - `release_hook` is a callable called on `.release()` in addition
29
+ to disconnecting the connection.
26
30
  """
27
31
  # NOTE: self.lock inherited from ManagedSSHConnection
28
32
  super().__init__(options=ssh_options)
@@ -59,12 +63,12 @@ class TestingFarmProvisioner(Provisioner):
59
63
 
60
64
  def __init__(self, compose, arch="x86_64", *, max_retries=10, **reserve_kwargs):
61
65
  """
62
- 'compose' is a Testing Farm compose to prepare.
66
+ - `compose` is a Testing Farm compose to prepare.
63
67
 
64
- 'arch' is an architecture associated with the compose.
68
+ - `arch`' is an architecture associated with the compose.
65
69
 
66
- 'max_retries' is a maximum number of provisioning (Testing Farm) errors
67
- that will be reprovisioned before giving up.
70
+ - `max_retries` is a maximum number of provisioning (Testing Farm) errors
71
+ that will be reprovisioned before giving up.
68
72
  """
69
73
  self.lock = threading.RLock()
70
74
  self.compose = compose
@@ -90,7 +94,7 @@ class TestingFarmProvisioner(Provisioner):
90
94
  # distribute load on TF servers
91
95
  # (we can sleep here as this code is running in a separate thread)
92
96
  if initial_delay:
93
- util.debug(f"delaying for {initial_delay}s to distribute load")
97
+ logger.info(f"delaying for {initial_delay}s to distribute load")
94
98
  time.sleep(initial_delay)
95
99
 
96
100
  # 'machine' is api.Reserve.ReservedMachine namedtuple
@@ -135,7 +139,7 @@ class TestingFarmProvisioner(Provisioner):
135
139
  # instantiate a class Reserve from the Testing Farm api module
136
140
  # (which typically provides context manager, but we use its .reserve()
137
141
  # and .release() functions directly)
138
- util.info(f"{repr(self)}: reserving new remote")
142
+ logger.info(f"{repr(self)}: reserving new remote")
139
143
  tf_reserve = api.Reserve(
140
144
  compose=self.compose,
141
145
  arch=self.arch,
@@ -204,7 +208,7 @@ class TestingFarmProvisioner(Provisioner):
204
208
  exc_str = f"{type(e).__name__}({e})"
205
209
  with self.lock:
206
210
  if self.retries > 0:
207
- util.warning(
211
+ logger.warning(
208
212
  f"caught while reserving a TF system: {exc_str}, "
209
213
  f"retrying ({self.retries} left)",
210
214
  )
@@ -215,7 +219,7 @@ class TestingFarmProvisioner(Provisioner):
215
219
  else:
216
220
  return None
217
221
  else:
218
- util.warning(
222
+ logger.warning(
219
223
  f"caught while reserving a TF system: {exc_str}, "
220
224
  "exhausted all retries, giving up",
221
225
  )
atex/util/__init__.py CHANGED
@@ -1,3 +1,26 @@
1
+ """
2
+ The point of this directory is to have miscellaneous utilities (for text
3
+ formatting, subprocess wrappers, whatever) accessible from the `util.*`
4
+ namespace, while being able to break them down into multiple `*.py` files
5
+ for readability.
6
+
7
+ These multiple `*.py` files then get automatically imported into one `globals()`
8
+ of the entire `util` module (package), appearing as a singular `util`.
9
+
10
+ Since the individual submodules cannot easily `from .* import *` themselves,
11
+ and since the intention is to give the impression of a single big `util.py`,
12
+ any local/relative imports between files should extract the necessary
13
+ identifiers via ie.
14
+
15
+ # in wrappers.py
16
+ from .custom_dedent import dedent
17
+
18
+ dedent(...)
19
+
20
+ rather than trying to preserve `custom_dedent.dedent()` or reaching beyond
21
+ parent with `from .. import util` (creating an infinite recursion).
22
+ """
23
+
1
24
  import importlib as _importlib
2
25
  import pkgutil as _pkgutil
3
26
  import inspect as _inspect
atex/util/dedent.py CHANGED
@@ -8,10 +8,10 @@ This allows raw blocks like
8
8
  ''')
9
9
 
10
10
  without the leading or trailing newlines and any common leading whitespaces.
11
- You might think using '''\ would eliminate the first newline, but the string
11
+ You might think using `'''\` would eliminate the first newline, but the string
12
12
  is 'raw', it doesn't have escapes.
13
13
 
14
- textwrap.dedent() does only the common leading whitespaces.
14
+ `textwrap.dedent()` does only the common leading whitespaces.
15
15
  """
16
16
 
17
17
  import textwrap
@@ -27,7 +27,7 @@ as a dict, that are used if omitted from the constructor:
27
27
 
28
28
  m = MyMap() # will have m.key == 678
29
29
 
30
- A class instance can unpack via ** with the entirety of its mapping contents:
30
+ A class instance can unpack via `**` with the entirety of its mapping contents:
31
31
 
32
32
  m = MyMap(key2=456)
33
33
  both = {'key1': 123, **m} # contains both keys
@@ -59,7 +59,7 @@ another dict-like object (does not have to be a parent of the class):
59
59
  s = SmallMap._from(b, extra=555) # can pass extra **kwargs to __init__
60
60
  s = SmallMap(**b) # will copy all keys
61
61
 
62
- Note that this is a fairly basic implementation without __hash__, etc.
62
+ Note that this is a fairly basic implementation without `__hash__`, etc.
63
63
  """
64
64
 
65
65
  import abc
@@ -153,6 +153,6 @@ class NamedMapping(collections.abc.Mapping, metaclass=_NamedMappingMeta):
153
153
  def __repr__(self):
154
154
  return (
155
155
  f"{self.__class__.__name__}("
156
- + ", ".join((f"{k}={repr(v)}" for k,v in self._data.items()))
156
+ + ", ".join(f"{k}={repr(v)}" for k,v in self._data.items())
157
157
  + ")"
158
158
  )
atex/util/path.py CHANGED
@@ -3,7 +3,7 @@ import os
3
3
 
4
4
  def normalize_path(path):
5
5
  """
6
- Transform a potentially dangerous path (leading slash, relative ../../../
6
+ Transform a potentially dangerous path (leading slash, relative `../../../`
7
7
  leading beyond parent, etc.) to a safe one.
8
8
 
9
9
  Always returns a relative path.
atex/util/subprocess.py CHANGED
@@ -1,52 +1,58 @@
1
+ import logging
1
2
  import subprocess
2
3
 
3
- from .log import extradebug
4
+ logger = logging.getLogger("atex.util.subprocess")
4
5
 
5
6
 
6
7
  def subprocess_run(cmd, **kwargs):
7
8
  """
8
- A simple wrapper for the real subprocess.run() that logs the command used.
9
+ A simple wrapper for the real `subprocess.run()` that logs the command used.
9
10
  """
10
11
  # when logging, skip current stack frame - report the place we were called
11
12
  # from, not util.subprocess_run itself
12
- extradebug(f"running: '{cmd}' with {kwargs=}")
13
+ logger.info(f"running: '{cmd}' with {kwargs=}")
13
14
  return subprocess.run(cmd, **kwargs)
14
15
 
15
16
 
16
17
  def subprocess_output(cmd, *, check=True, text=True, **kwargs):
17
18
  """
18
- A wrapper simulating subprocess.check_output() via a modern .run() API.
19
+ A wrapper simulating `subprocess.check_output()` via a modern `.run()` API.
19
20
  """
20
- extradebug(f"running: '{cmd}' with {check=}, {text=} and {kwargs=}")
21
+ logger.info(f"running: '{cmd}' with {check=}, {text=} and {kwargs=}")
21
22
  proc = subprocess.run(cmd, check=check, text=text, stdout=subprocess.PIPE, **kwargs)
22
23
  return proc.stdout.rstrip("\n") if text else proc.stdout
23
24
 
24
25
 
25
26
  def subprocess_Popen(cmd, **kwargs): # noqa: N802
26
27
  """
27
- A simple wrapper for the real subprocess.Popen() that logs the command used.
28
+ A simple wrapper for the real `subprocess.Popen()` that logs the command used.
28
29
  """
29
- extradebug(f"running: '{cmd}' with {kwargs=}")
30
+ logger.info(f"running: '{cmd}' with {kwargs=}")
30
31
  return subprocess.Popen(cmd, **kwargs)
31
32
 
32
33
 
33
34
  def subprocess_stream(cmd, *, stream="stdout", check=False, input=None, **kwargs):
34
35
  """
35
- Run 'cmd' via subprocess.Popen() and return an iterator over any lines
36
+ Run `cmd` via `subprocess.Popen()` and return an iterator over any lines
36
37
  the command outputs on stdout, in text mode.
37
38
 
38
- The 'stream' is a subprocess.Popen attribute (either 'stdout' or 'stderr')
39
- to read from.
40
- To capture both stdout and stderr as yielded lines, use 'stream="stdout"'
41
- and pass an additional 'stderr=subprocess.STDOUT'.
39
+ - The `stream` is a subprocess.Popen attribute (either `stdout` or `stderr`)
40
+ to read from.
42
41
 
43
- With 'check' set to True, raise a CalledProcessError if the 'cmd' failed.
42
+ To capture both stdout and stderr as yielded lines, use `stream="stdout"`
43
+ and pass an additional `stderr=subprocess.STDOUT`.
44
44
 
45
- Similarly, 'input' simulates the 'input' arg of subprocess.run().
46
- Note that the input is written to stdin of the process *before* any outputs
47
- are streamed, so it should be sufficiently small and/or not cause a deadlock
48
- with the process waiting for outputs to be read before consuming more input.
49
- Use 'stdin=subprocess.PIPE' and write to it manually if you need more.
45
+ - With `check` set to `True`, raise a CalledProcessError if the `cmd`
46
+ failed.
47
+
48
+ - Similarly, `input` simulates the `input` arg of `subprocess.run()`.
49
+
50
+ Note that the input is written to stdin of the process *before* any
51
+ outputs are streamed, so it should be sufficiently small and/or not cause
52
+ a deadlock with the process waiting for outputs to be read before
53
+ consuming more input.
54
+
55
+ Use `stdin=subprocess.PIPE` and write to it manually if you need more.
50
56
  """
51
57
  all_kwargs = {
52
58
  "text": True,
@@ -56,7 +62,7 @@ def subprocess_stream(cmd, *, stream="stdout", check=False, input=None, **kwargs
56
62
  all_kwargs["stdin"] = subprocess.PIPE
57
63
  all_kwargs |= kwargs
58
64
 
59
- extradebug(f"running: '{cmd}' with {all_kwargs=}")
65
+ logger.info(f"running: '{cmd}' with {all_kwargs=}")
60
66
  proc = subprocess.Popen(cmd, **all_kwargs)
61
67
 
62
68
  def generate_lines():
@@ -78,9 +84,9 @@ def subprocess_log(cmd, **kwargs):
78
84
  A wrapper to stream every (text) line output from the process to the
79
85
  logging module.
80
86
 
81
- Uses subprocess_stream() to gather the lines.
87
+ Uses `subprocess_stream()` to gather the lines.
82
88
  """
83
- extradebug(f"running: '{cmd}' with {kwargs=}")
89
+ logger.info(f"running: '{cmd}' with {kwargs=}")
84
90
  _, lines = subprocess_stream(cmd, **kwargs)
85
91
  for line in lines:
86
- extradebug(line)
92
+ logger.info(line)
atex/util/threads.py CHANGED
@@ -39,12 +39,12 @@ class ThreadQueue:
39
39
 
40
40
  def start_thread(self, target, target_args=None, target_kwargs=None, **user_kwargs):
41
41
  """
42
- Start a new thread and call 'target' as a callable inside it, passing it
43
- 'target_args' as arguments and 'target_kwargs' as keyword arguments.
42
+ Start a new thread and call `target` as a callable inside it, passing it
43
+ `target_args` as arguments and `target_kwargs` as keyword arguments.
44
44
 
45
- Any additional 'user_kwargs' specified are NOT passed to the callable,
45
+ Any additional `user_kwargs` specified are NOT passed to the callable,
46
46
  but instead become part of the ThreadReturn namespace returned by the
47
- .get_raw() method.
47
+ `.get_raw()` method.
48
48
  """
49
49
  t = threading.Thread(
50
50
  target=self._wrapper,
@@ -59,8 +59,8 @@ class ThreadQueue:
59
59
  def get_raw(self, block=True, timeout=None):
60
60
  """
61
61
  Wait for and return the next available ThreadReturn instance on the
62
- queue, as enqueued by a finished callable started by the .start_thread()
63
- method.
62
+ queue, as enqueued by a finished callable started by the
63
+ `.start_thread()` method.
64
64
  """
65
65
  with self.lock:
66
66
  if block and timeout is None and not self.threads:
@@ -75,7 +75,7 @@ class ThreadQueue:
75
75
  def get(self, block=True, timeout=None):
76
76
  """
77
77
  Wait for and return the next available return value of a callable
78
- enqueued via the .start_thread() method.
78
+ enqueued via the `.start_thread()` method.
79
79
 
80
80
  If the callable raised an exception, the exception is re-raised here.
81
81
  """
@@ -100,7 +100,7 @@ class ThreadQueue:
100
100
 
101
101
  def qsize(self):
102
102
  """
103
- Return the amount of elements .get() can retrieve before it raises
104
- queue.Empty.
103
+ Return the amount of elements `.get()` can retrieve before it raises
104
+ `queue.Empty`.
105
105
  """
106
106
  return self.queue.qsize()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atex
3
- Version: 0.13
3
+ Version: 0.15
4
4
  Summary: Ad-hoc Test EXecutor
5
5
  Project-URL: Homepage, https://github.com/RHSecurityCompliance/atex
6
6
  License-Expression: GPL-3.0-or-later