atex 0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
atex/__init__.py ADDED
@@ -0,0 +1,35 @@
1
+ """
2
+ Ad-hoc Test EXecutor
3
+
4
+ Some documentation here.
5
+ """
6
+
7
+ import importlib as _importlib
8
+ import pkgutil as _pkgutil
9
+
10
+ __all__ = [
11
+ info.name for info in _pkgutil.iter_modules(__spec__.submodule_search_locations)
12
+ ]
13
+
14
+
15
+ def __dir__():
16
+ return __all__
17
+
18
+
19
+ # lazily import submodules
20
+ def __getattr__(attr):
21
+ # # from mod import *
22
+ # if attr == '__all__':
23
+ # print("importing all")
24
+ # for mod in __all__:
25
+ # _importlib.import_module(f'.{mod}', __name__)
26
+ # return __all__
27
+ # # accessing __all__, __getattr__, etc. directly
28
+ # elif attr in globals():
29
+ # print("importing globals")
30
+ # return globals()[attr]
31
+ # importing a module known to exist
32
+ if attr in __all__:
33
+ return _importlib.import_module(f'.{attr}', __name__)
34
+ else:
35
+ raise AttributeError(f'module {__name__} has no attribute {attr}')
atex/cli/__init__.py ADDED
@@ -0,0 +1,83 @@
1
+ r"""
2
+ Command line interface to atex
3
+
4
+ Submodules (subpackages) of this one must define a module-level dict with
5
+ these keys:
6
+
7
+ - help
8
+ - short oneliner about what the submodule is about (for argparse --help)
9
+
10
+ - aliases (optional)
11
+ - tuple of aliases of the module name, for argument parsing
12
+
13
+ - args
14
+ - function (or other callable) for argument specification/parsing,
15
+ gets passed one non-kw argument: argparse-style parser
16
+
17
+ - main
18
+ - function (or other callable) that will be called when invoked by the user,
19
+ gets passed one non-kw argument: argparse-style Namespace
20
+
21
+ This module-level dict must be named 'CLI_SPEC'.
22
+ """
23
+
24
+ import sys
25
+ import importlib
26
+ import pkgutil
27
+ import argparse
28
+ import logging
29
+
30
+
31
+ def setup_logging(level):
32
+ logging.basicConfig(
33
+ level=level,
34
+ stream=sys.stderr,
35
+ format='%(asctime)s %(name)s: %(message)s',
36
+ datefmt='%Y-%m-%d %H:%M:%S',
37
+ )
38
+
39
+
40
+ def collect_modules():
41
+ for info in pkgutil.iter_modules(__spec__.submodule_search_locations):
42
+ mod = importlib.import_module(f'.{info.name}', __name__)
43
+ if not hasattr(mod, 'CLI_SPEC'):
44
+ raise ValueError(f"CLI submodule {info.name} does not define CLI_SPEC")
45
+ yield (info.name, mod.CLI_SPEC)
46
+
47
+
48
+ def main():
49
+ parser = argparse.ArgumentParser()
50
+
51
+ log_grp = parser.add_mutually_exclusive_group()
52
+ log_grp.add_argument(
53
+ '--debug', '-d', action='store_const', dest='loglevel', const=logging.DEBUG,
54
+ help="enable extra debugging (logging.DEBUG)",
55
+ )
56
+ log_grp.add_argument(
57
+ '--quiet', '-q', action='store_const', dest='loglevel', const=logging.WARNING,
58
+ help="be quiet during normal operation (logging.WARNING)",
59
+ )
60
+ parser.set_defaults(loglevel=logging.INFO)
61
+
62
+ mains = {}
63
+ subparsers = parser.add_subparsers(dest='_module', metavar='<module>', required=True)
64
+ for name, spec in collect_modules():
65
+ aliases = spec['aliases'] if 'aliases' in spec else ()
66
+ subp = subparsers.add_parser(
67
+ name,
68
+ aliases=aliases,
69
+ help=spec['help'],
70
+ )
71
+ spec['args'](subp)
72
+ mains[name] = spec['main']
73
+ for alias in aliases:
74
+ mains[alias] = spec['main']
75
+
76
+ args = parser.parse_args()
77
+
78
+ setup_logging(args.loglevel)
79
+
80
+ try:
81
+ mains[args._module](args)
82
+ except KeyboardInterrupt:
83
+ raise SystemExit() from None
@@ -0,0 +1,171 @@
1
+ import sys
2
+ #from datetime import datetime
3
+
4
+ from .. import util
5
+ from .. import testingfarm as tf
6
+
7
+
8
+ def _get_api(args):
9
+ api_args = {}
10
+ if args.url:
11
+ api_args['url'] = args.url
12
+ if args.token:
13
+ api_args['token'] = args.token
14
+ return tf.TestingFarmAPI(**api_args)
15
+
16
+
17
+ def composes(args):
18
+ api = _get_api(args)
19
+ comps = api.composes(ranch=args.ranch)
20
+ comps_list = comps['composes']
21
+ for comp in comps_list:
22
+ print(comp['name'])
23
+
24
+
25
+ def get_request(args):
26
+ api = _get_api(args)
27
+ request = tf.Request(args.request_id, api=api)
28
+ request.update()
29
+ print(str(request))
30
+
31
+
32
+ def search_requests(args):
33
+ api = _get_api(args)
34
+ reply = api.search_requests(
35
+ state=args.state,
36
+ mine=not args.all,
37
+ ranch=args.ranch,
38
+ created_before=args.before,
39
+ created_after=args.after,
40
+ )
41
+ if not reply:
42
+ return
43
+
44
+ for req in sorted(reply, key=lambda x: x['created']):
45
+ req_id = req['id']
46
+ #created_utc = req['created'].partition('.')[0]
47
+ #created_dt = datetime.fromisoformat(f'{created_utc}+00:00')
48
+ #created = created_dt.astimezone().isoformat().partition('.')[0]
49
+ created = req['created'].partition('.')[0]
50
+
51
+ envs = []
52
+ for env in req['environments_requested']:
53
+ if 'os' in env and env['os'] and 'compose' in env['os']:
54
+ compose = env['os']['compose']
55
+ arch = env['arch']
56
+ if compose and arch:
57
+ envs.append(f'{compose}@{arch}')
58
+ envs_str = ', '.join(envs)
59
+
60
+ print(f'{created} {req_id} : {envs_str}')
61
+ #request = tf.Request(initial_data=req)
62
+ #print(str(request))
63
+ #request.update()
64
+ #print(str(request))
65
+
66
+
67
+ def reserve(args):
68
+ util.info(f"Reserving {args.compose} on {args.arch} for {args.timeout} minutes")
69
+
70
+ api = _get_api(args)
71
+ res = tf.Reserve(
72
+ compose=args.compose,
73
+ arch=args.arch,
74
+ timeout=args.timeout,
75
+ api=api,
76
+ )
77
+ with res as m:
78
+ util.info(f"Got machine: {m}")
79
+ util.subprocess_run([
80
+ 'ssh', '-q', '-i', m.ssh_key,
81
+ '-oStrictHostKeyChecking=no', '-oUserKnownHostsFile=/dev/null',
82
+ f'{m.user}@{m.host}',
83
+ ])
84
+
85
+
86
+ def watch_pipeline(args):
87
+ api = _get_api(args)
88
+ request = tf.Request(id=args.request_id, api=api)
89
+
90
+ util.info(f"Waiting for {args.request_id} to be 'running'")
91
+ try:
92
+ request.wait_for_state('running')
93
+ except tf.GoneAwayError:
94
+ util.info(f"Request {args.request_id} already finished")
95
+ return
96
+
97
+ util.info("Querying pipeline.log")
98
+ try:
99
+ for line in tf.PipelineLogStreamer(request):
100
+ sys.stdout.write(line)
101
+ sys.stdout.write('\n')
102
+ except tf.GoneAwayError:
103
+ util.info(f"Request {args.request_id} finished, exiting")
104
+
105
+
106
+ def parse_args(parser):
107
+ parser.add_argument('--url', help='Testing Farm API URL')
108
+ parser.add_argument('--token', help='Testing Farm API auth token')
109
+ cmds = parser.add_subparsers(
110
+ dest='_cmd', help="TF helper to run", metavar='<cmd>', required=True,
111
+ )
112
+
113
+ cmd = cmds.add_parser(
114
+ 'composes',
115
+ help="list all composes available on a given ranch",
116
+ )
117
+ cmd.add_argument('ranch', nargs='?', help="Testing Farm ranch (autodetected if token)")
118
+
119
+ cmd = cmds.add_parser(
120
+ 'get-request', aliases=('gr',),
121
+ help="retrieve and print JSON of a Testing Farm request",
122
+ )
123
+ cmd.add_argument('request_id', help="Testing Farm request UUID")
124
+
125
+ cmd = cmds.add_parser(
126
+ 'search-requests', aliases=('sr',),
127
+ help="return a list of requests matching the criteria",
128
+ )
129
+ cmd.add_argument('--state', help="request state (running, etc.)", required=True)
130
+ cmd.add_argument('--all', help="all requests, not just owned by token", action='store_true')
131
+ cmd.add_argument('--ranch', help="Testing Farm ranch")
132
+ cmd.add_argument('--before', help="only requests created before ISO8601")
133
+ cmd.add_argument('--after', help="only requests created after ISO8601")
134
+
135
+ cmd = cmds.add_parser(
136
+ 'reserve',
137
+ help="reserve a system and ssh into it",
138
+ )
139
+ cmd.add_argument('--compose', '-c', help="OS compose to install", required=True)
140
+ cmd.add_argument('--arch', '-a', help="system HW architecture", default='x86_64')
141
+ cmd.add_argument('--timeout', '-t', help="pipeline timeout (in minutes)", type=int, default=60)
142
+ cmd.add_argument('--ssh-key', help="path to a ssh private key file like 'id_rsa'")
143
+
144
+ cmd = cmds.add_parser(
145
+ 'watch-pipeline', aliases=('wp',),
146
+ help="continuously output pipeline.log like 'tail -f'",
147
+ )
148
+ cmd.add_argument('request_id', help="Testing Farm request UUID")
149
+
150
+
151
+ def main(args):
152
+ if args._cmd == 'composes':
153
+ composes(args)
154
+ elif args._cmd in ('get-request', 'gr'):
155
+ get_request(args)
156
+ elif args._cmd in ('search-requests', 'sr'):
157
+ search_requests(args)
158
+ elif args._cmd == 'reserve':
159
+ reserve(args)
160
+ elif args._cmd in ('watch-pipeline', 'wp'):
161
+ watch_pipeline(args)
162
+ else:
163
+ raise RuntimeError(f"unknown args: {args}")
164
+
165
+
166
+ CLI_SPEC = {
167
+ 'aliases': ('tf',),
168
+ 'help': "various utils for Testing Farm",
169
+ 'args': parse_args,
170
+ 'main': main,
171
+ }
atex/fmf.py ADDED
@@ -0,0 +1,168 @@
1
+ import re
2
+ import collections
3
+ from pathlib import Path
4
+
5
+ # from system-wide sys.path
6
+ import fmf
7
+
8
+ # name: fmf path to the test as string, ie. /some/test
9
+ # data: dict of the parsed fmf metadata (ie. {'tag': ... , 'environment': ...})
10
+ # dir: relative pathlib.Path of the test .fmf to repo root, ie. some/test
11
+ # (may be different from name for "virtual" tests that share the same dir)
12
+ FMFTest = collections.namedtuple('FMFTest', ['name', 'data', 'dir'])
13
+
14
+
15
+ class FMFData:
16
+ """
17
+ Helper class for reading and querying fmf metadata from the filesystem.
18
+ """
19
+ # TODO: usage example ^^^^
20
+
21
+ @staticmethod
22
+ def _listlike(data, key):
23
+ """
24
+ Get a piece of fmf metadata as an iterable regardless of whether it was
25
+ defined as a dict or a list.
26
+
27
+ This is needed because many fmf metadata keys can be used either as
28
+ some_key: 123
29
+ or as lists via YAML syntax
30
+ some_key:
31
+ - 123
32
+ - 456
33
+ and, for simplicity, we want to always deal with lists (iterables).
34
+ """
35
+ if value := data.get(key):
36
+ return value if isinstance(value, list) else (value,)
37
+ else:
38
+ return ()
39
+
40
+ def __init__(self, fmf_tree, plan_name, context=None):
41
+ """
42
+ 'fmf_tree' is filesystem path somewhere inside fmf metadata tree,
43
+ or a root fmf.Tree instance.
44
+
45
+ 'plan_name' is fmf identifier (like /some/thing) of a tmt plan
46
+ to use for discovering tests.
47
+
48
+ 'context' is a dict like {'distro': 'rhel-9.6'} used for filtering
49
+ discovered tests.
50
+ """
51
+ self.prepare_pkgs = []
52
+ self.prepare_scripts = []
53
+ self.tests = []
54
+
55
+ tree = fmf_tree.copy() if isinstance(fmf_tree, fmf.Tree) else fmf.Tree(fmf_tree)
56
+ ctx = fmf.Context(**context) if context else fmf.Context()
57
+ tree.adjust(context=ctx)
58
+
59
+ self.fmf_root = tree.root
60
+
61
+ # lookup the plan first
62
+ plan = tree.find(plan_name)
63
+ if not plan:
64
+ raise ValueError(f"plan {plan_name} not found in {tree.root}")
65
+ if 'test' in plan.data:
66
+ raise ValueError(f"plan {plan_name} appears to be a test")
67
+
68
+ # gather all prepare scripts / packages
69
+ #
70
+ # prepare:
71
+ # - how: install
72
+ # package:
73
+ # - some-rpm-name
74
+ # - how: shell
75
+ # script:
76
+ # - some-command
77
+ for entry in self._listlike(plan.data, 'prepare'):
78
+ if 'how' not in entry:
79
+ continue
80
+ if entry['how'] == 'install':
81
+ self.prepare_pkgs += self._listlike(entry, 'package')
82
+ elif entry['how'] == 'shell':
83
+ self.prepare_scripts += self._listlike(entry, 'script')
84
+
85
+ # gather all tests selected by the plan
86
+ #
87
+ # discover:
88
+ # - how: fmf
89
+ # filter:
90
+ # - tag:some_tag
91
+ # test:
92
+ # - some-test-regex
93
+ # exclude:
94
+ # - some-test-regex
95
+ if 'discover' in plan.data:
96
+ discover = plan.data['discover']
97
+ if not isinstance(discover, list):
98
+ discover = (discover,)
99
+
100
+ for entry in discover:
101
+ if entry.get('how') != 'fmf':
102
+ continue
103
+
104
+ filtering = {}
105
+ for meta_name in ('filter', 'test', 'exclude'):
106
+ if value := self._listlike(entry, meta_name):
107
+ filtering[meta_name] = value
108
+
109
+ children = tree.prune(
110
+ names=filtering.get('test'),
111
+ filters=filtering.get('filter'),
112
+ )
113
+ for child in children:
114
+ # excludes not supported by .prune(), we have to do it here
115
+ excludes = filtering.get('exclude')
116
+ if excludes and any(re.match(x, child.name) for x in excludes):
117
+ continue
118
+ # only enabled tests
119
+ if 'enabled' in child.data and not child.data['enabled']:
120
+ continue
121
+ # no manual tests
122
+ if child.data.get('manual'):
123
+ continue
124
+ # after adjusting above, any adjusts are useless, free some space
125
+ if 'adjust' in child.data:
126
+ del child.data['adjust']
127
+ # ie. ['/abs/path/to/some.fmf', '/abs/path/to/some/node.fmf']
128
+ source_dir = Path(child.sources[-1]).parent.relative_to(self.fmf_root)
129
+ self.tests.append(
130
+ FMFTest(name=child.name, data=child.data, dir=source_dir),
131
+ )
132
+
133
+
134
+ # Some extra notes for fmf.prune() arguments:
135
+ #
136
+ # Set 'names' to filter by a list of fmf node names, ie.
137
+ # ['/some/test', '/another/test']
138
+ #
139
+ # Set 'filters' to filter by a list of fmf-style filter expressions, see
140
+ # https://fmf.readthedocs.io/en/stable/modules.html#fmf.filter
141
+ #
142
+ # Set 'conditions' to filter by a list of python expressions whose namespace
143
+ # locals() are set up to be a dictionary of the tree. When any of the
144
+ # expressions returns True, the tree is returned, ie.
145
+ # ['environment["FOO"] == "BAR"']
146
+ # ['"enabled" not in locals() or enabled']
147
+ # Note that KeyError is silently ignored and treated as False.
148
+ #
149
+ # Set 'context' to a dictionary to post-process the tree metadata with
150
+ # adjust expressions (that may be present in a tree) using the specified
151
+ # context. Any other filters are applied afterwards to allow modification
152
+ # of tree metadata by the adjust expressions. Ie.
153
+ # {'distro': 'rhel-9.6.0', 'arch': 'x86_64'}
154
+
155
+ Platform = collections.namedtuple('Platform', ['distro', 'arch'])
156
+
157
+
158
+ def combine_platforms(fmf_path, plan_name, platforms):
159
+ # TODO: document
160
+ fmf_datas = {}
161
+ tree = fmf.Tree(fmf_path)
162
+ for platform in platforms:
163
+ context = {'distro': platform.distro, 'arch': platform.arch}
164
+ fmf_datas[platform] = FMFData(tree, plan_name, context=context)
165
+ return fmf_datas
166
+
167
+ # TODO: in Orchestrator, when a Provisioner becomes free, have it pick a test
168
+ # from the appropriate tests[platform] per the Provisioner's platform
@@ -0,0 +1,109 @@
1
+ import random
2
+
3
+ # TODO: TMT_PLAN_ENVIRONMENT_FILE
4
+
5
+ # TODO: install rsync on the guest as part of setup
6
+
7
+ # TODO: in Orchestrator, when a Provisioner becomes free, have it pick a test
8
+ # from the appropriate tests[platform] per the Provisioner's platform
9
+
10
+
11
+ def _random_string(length):
12
+ return ''.join(
13
+ random.choices('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', k=length),
14
+ )
15
+
16
+
17
+ class Preparator:
18
+ """
19
+ Set of utilities for preparing a newly acquired/reserved machine for
20
+ running tests, by installing global package requirements, copying all
21
+ tests over, executing tmt plan 'prepare' step, etc.
22
+ """
23
+ def __init__(self, ssh_conn):
24
+ self.conn = ssh_conn
25
+
26
+ def copy_tests(self):
27
+ pass
28
+
29
+ def run_prepare_scripts(self):
30
+ pass
31
+
32
+ def __enter__(self):
33
+ self.conn.connect()
34
+ return self
35
+
36
+ def __exit__(self, exc_type, exc_value, traceback):
37
+ self.conn.disconnect()
38
+
39
+
40
+ # TODO: have Executor take a finished Preparator instance as input?
41
+ # - for extracting copied tests location
42
+ # - for extracting TMT_PLAN_ENVIRONMENT_FILE location
43
+ # - etc.
44
+
45
+
46
+ class Executor:
47
+ """
48
+ Helper for running one test on a remote system and processing results
49
+ and uploaded files by that test.
50
+ """
51
+ def __init__(self, fmf_test, ssh_conn):
52
+ self.fmf_test = fmf_test
53
+ self.conn = ssh_conn
54
+ self.remote_socket = self.local_socket = None
55
+
56
+ def __enter__(self):
57
+ # generate a (hopefully) unique test control socket name
58
+ # and modify the SSHConn instance to use it
59
+ rand_name = f'atex-control-{_random_string(50)}.sock'
60
+ self.local_socket = Path(os.environ.get('TMPDIR', '/tmp')) / rand_name
61
+ self.remote_socket = f'/tmp/{rand_name}'
62
+ self.conn.options['RemoteForward'] = f'{self.remote_socket} {self.local_socket}'
63
+ self.conn.connect()
64
+ return self
65
+
66
+ def __exit__(self, exc_type, exc_value, traceback):
67
+ self.conn.ssh(f'rm -f {self.remote_socket}')
68
+ self.local_socket.unlink()
69
+ self.remote_socket = self.local_socket = None
70
+ self.conn.disconnect()
71
+
72
+ # execute all prepares (how:install and how:shell) via ssh
73
+ def prepare(self):
74
+ # TODO: check via __some_attr (named / prefixed after our class)
75
+ # whether this reserved system has been prepared already ... ?
76
+ # ^^^^ in Orchestrator
77
+ #
78
+ # TODO: copy root of fmf metadata to some /var/tmp/somedir to run tests from
79
+ #
80
+ # TODO: move prepare out, possibly to class-less function,
81
+ # we don't want it running over an SSHConn that would set up socket forwarding
82
+ # only to tear it back down, when executed from Orchestrator for setup only
83
+ #
84
+ # TODO: install rsync
85
+ pass
86
+
87
+ # run one test via ssh and parse its results on-the-fly,
88
+ # write out logs
89
+ def run_test(self, fmf_test, reporter):
90
+ # TODO: pass environment from test fmf metadata
91
+ # TODO: watch for test duration, etc. metadata
92
+ # TODO: logging of stdout+stderr to hidden file, doing 'ln' from it to
93
+ # test-named 'testout' files
94
+ # - generate hidden name suffix via:
95
+ # ''.join(random.choices('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', k=20))
96
+ output_logfile = \
97
+ reporter.files_dir(fmf_test.name) / f'.test_output_{self._random_string(50)}.log'
98
+ output_logfile = os.open(reporter.files_dir(fmf_test.name), os.O_WRONLY | os.O_CREAT)
99
+ try:
100
+ #self.conn.ssh(
101
+ pass
102
+ finally:
103
+ os.close(output_logfile)
104
+ # TODO: create temp dir on remote via 'mktemp -d', then call
105
+ # self.conn.add_remote_forward(...) with socket path inside that tmpdir
106
+
107
+ # TODO: run tests by passing stdout/stderr via pre-opened fd so we don't handle it in code
108
+
109
+ # TODO: read unix socket as nonblocking, check test subprocess.Popen proc status every 0.1sec