atex 0.4__tar.gz → 0.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. {atex-0.4 → atex-0.7}/PKG-INFO +1 -1
  2. atex-0.7/TODO +59 -0
  3. atex-0.7/atex/__init__.py +25 -0
  4. {atex-0.4 → atex-0.7}/atex/cli/__init__.py +13 -13
  5. atex-0.7/atex/cli/minitmt.py +175 -0
  6. {atex-0.4 → atex-0.7}/atex/cli/testingfarm.py +59 -59
  7. atex-0.7/atex/connection/__init__.py +125 -0
  8. atex-0.7/atex/connection/ssh.py +406 -0
  9. {atex-0.4 → atex-0.7}/atex/minitmt/README.md +43 -54
  10. {atex-0.4 → atex-0.7}/atex/minitmt/RESULTS.md +94 -41
  11. atex-0.7/atex/minitmt/TEST_CONTROL.md +148 -0
  12. atex-0.7/atex/minitmt/__init__.py +23 -0
  13. atex-0.7/atex/minitmt/executor.py +348 -0
  14. {atex-0.4 → atex-0.7}/atex/minitmt/fmf.py +87 -53
  15. atex-0.7/atex/minitmt/scripts.py +149 -0
  16. atex-0.7/atex/minitmt/testcontrol.py +354 -0
  17. atex-0.4/atex/orchestrator.py → atex-0.7/atex/orchestrator/__init__.py +22 -1
  18. atex-0.7/atex/orchestrator/aggregator.py +163 -0
  19. atex-0.7/atex/provision/__init__.py +155 -0
  20. {atex-0.4 → atex-0.7}/atex/provision/libvirt/VM_PROVISION +8 -0
  21. {atex-0.4 → atex-0.7}/atex/provision/libvirt/__init__.py +4 -4
  22. atex-0.7/atex/provision/nspawn/README +74 -0
  23. atex-0.7/atex/provision/podman/README +59 -0
  24. atex-0.7/atex/provision/podman/host_container.sh +74 -0
  25. atex-0.7/atex/provision/testingfarm/__init__.py +29 -0
  26. atex-0.4/atex/testingfarm.py → atex-0.7/atex/provision/testingfarm/api.py +116 -93
  27. atex-0.7/atex/provision/testingfarm/foo.py +1 -0
  28. {atex-0.4 → atex-0.7}/atex/util/__init__.py +4 -4
  29. {atex-0.4 → atex-0.7}/atex/util/dedent.py +1 -1
  30. {atex-0.4 → atex-0.7}/atex/util/log.py +12 -12
  31. {atex-0.4 → atex-0.7}/atex/util/subprocess.py +14 -13
  32. {atex-0.4 → atex-0.7}/pyproject.toml +15 -3
  33. atex-0.7/ssh.py +48 -0
  34. atex-0.7/tests/conftest.py +50 -0
  35. atex-0.4/tests/foobar.py → atex-0.7/tests/test_another.py +0 -1
  36. atex-0.7/tests/test_foobar.py +13 -0
  37. atex-0.7/tf.py +31 -0
  38. atex-0.7/tmt_tests/.fmf/version +1 -0
  39. atex-0.7/tmt_tests/plans/reserve.fmf +5 -0
  40. atex-0.7/tmt_tests/reserve/main.fmf +5 -0
  41. atex-0.7/tmt_tests/reserve/test.sh +72 -0
  42. atex-0.4/atex/__init__.py +0 -35
  43. atex-0.4/atex/cli/minitmt.py +0 -82
  44. atex-0.4/atex/minitmt/CONTROL_FILE.md +0 -99
  45. atex-0.4/atex/minitmt/__init__.py +0 -115
  46. atex-0.4/atex/minitmt/report.py +0 -174
  47. atex-0.4/atex/minitmt/scripts.py +0 -51
  48. atex-0.4/atex/minitmt/testme.py +0 -3
  49. atex-0.4/atex/provision/__init__.py +0 -113
  50. atex-0.4/atex/ssh.py +0 -320
  51. atex-0.4/atex/util/lockable_class.py +0 -38
  52. atex-0.4/fmf_tests/reserve/main.fmf +0 -2
  53. atex-0.4/fmf_tests/reserve/test.sh +0 -36
  54. atex-0.4/ssh.py +0 -41
  55. atex-0.4/tf.py +0 -18
  56. {atex-0.4 → atex-0.7}/.editorconfig +0 -0
  57. {atex-0.4 → atex-0.7}/.gitignore +0 -0
  58. {atex-0.4 → atex-0.7}/COPYING.txt +0 -0
  59. {atex-0.4 → atex-0.7}/README.md +0 -0
  60. {atex-0.4 → atex-0.7}/atex/provision/libvirt/setup-libvirt.sh +0 -0
  61. {atex-0.4 → atex-0.7}/atex/util/README.md +0 -0
  62. {atex-0.4 → atex-0.7}/logtest.py +0 -0
  63. {atex-0.4 → atex-0.7}/reporter.py +0 -0
  64. {atex-0.4 → atex-0.7}/tests/PYTEST.md +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atex
3
- Version: 0.4
3
+ Version: 0.7
4
4
  Summary: Ad-hoc Test EXecutor
5
5
  Project-URL: Homepage, https://github.com/RHSecurityCompliance/atex
6
6
  License-Expression: GPL-3.0-or-later
atex-0.7/TODO ADDED
@@ -0,0 +1,59 @@
1
+ - concept of a RemoteSlot for Orchestrator ; basically, Orchastrator can
2
+ instantiate Provisioner instances in two ways:
3
+ - directly from given via pre-configured Provisioner classes (factories)
4
+ - indirectly from a list of RemoteSlot instances (classes?)
5
+ - each RemoteSlot instance has some make_provisioner() function,
6
+ which instantiates some internal idea of a Provisioner-like instance
7
+ and returns it
8
+ - the idea is that a RemoteSlot can be an universal resource (ie. a VM
9
+ or a Podman container) capable of installing several different OSes
10
+ - same idea might apply to TF, we might want "at most 10 TF workers",
11
+ regardless of which OS they were reserved with - that might change
12
+ as all tests for a given OS finish running
13
+
14
+ - OR a generic concept of a ProvisionerGenerator
15
+ - Orchestrator doesn't maintain its own list of preconfig'd Provisioner classes,
16
+ it just has a link (instance) of a ProvisionerGenerator
17
+ - the generator would "yield" (or just return func() ?) fully configured and
18
+ initialized Provisioner instances (not types/classes!)
19
+ - scenario 1: unlimited TF resources
20
+ - ProvisionerGenerator can simply yield all 30 instances at once, and be done
21
+ - scenario 2: podman containers limited to 8 slots
22
+ - ProvisionerGenerator would stop (return None?) upon creating 8 configured
23
+ and initialized instances of Provisioner
24
+ - it would continue yielding more when old instances are returned somehow,
25
+ effectively working like a semaphore of 8
26
+ - Orchestrator would always execute only tests that apply to active running
27
+ instances of Provisioner, so platforms waiting for their slot would naturally
28
+ just wait
29
+
30
+ - probably ditch the current idea of Provisioner pre-config'd classes,
31
+ have just generators ("provisioners") that generate Connections, possibly
32
+ in limited capacity (by slots), ie. what's delivered isn't always what's
33
+ initially requested
34
+
35
+ - the idea is to
36
+ - input wanted platforms (distro+arch) into a Provisioner
37
+ - output (gather) from it provisioned and connected Remotes
38
+ - it's the Provisioner that manages how many Remotes should be provisioned
39
+ at any given time, and waits for them to be fully provisioned and started
40
+ and their Connections connected
41
+ - Remote is then some wrapper around a Connection that allows de-provisioning,
42
+ or releasing the resource in a way that Provisioner can detect (because it
43
+ ie. maintains a list of yielded Remotes and checks if they're released)
44
+
45
+ - actually, Remote is just a superser of Connection API, adding release() and alive()
46
+ class Remote(Connection):
47
+ - Executor should then take a Remote instead of a Connection, and it can easily
48
+ do .release() on destructive testing
49
+ - and Orchestrator can easily check .alive() to see if it should throw away that
50
+ instance of Remote
51
+ - class Remote can store other data (arch, distro, etc.)
52
+ - that's how Orchestrator can match it to a test
53
+
54
+ - there should be some API for Orchestrator to tell a Provisioner which distro/arch
55
+ combos to request, *and* when a specific distro/arch combo is no longer wanted
56
+ (because all tests for it were already executed), so the Provisioner doesn't reserve
57
+ any more Remotes with that combination
58
+ - it should not release() any existing ones, the tests will do it when finishing
59
+
@@ -0,0 +1,25 @@
1
+ """
2
+ Ad-hoc Test EXecutor
3
+
4
+ Some documentation here.
5
+ """
6
+
7
+ import importlib as _importlib
8
+ import pkgutil as _pkgutil
9
+
10
+ __all__ = [
11
+ info.name for info in _pkgutil.iter_modules(__spec__.submodule_search_locations)
12
+ ]
13
+
14
+
15
+ def __dir__():
16
+ return __all__
17
+
18
+
19
+ # lazily import submodules
20
+ def __getattr__(attr):
21
+ # importing a module known to exist
22
+ if attr in __all__:
23
+ return _importlib.import_module(f".{attr}", __name__)
24
+ else:
25
+ raise AttributeError(f"module '{__name__}' has no attribute '{attr}'")
@@ -32,16 +32,16 @@ def setup_logging(level):
32
32
  logging.basicConfig(
33
33
  level=level,
34
34
  stream=sys.stderr,
35
- format='%(asctime)s %(name)s: %(message)s',
36
- datefmt='%Y-%m-%d %H:%M:%S',
35
+ format="%(asctime)s %(name)s: %(message)s",
36
+ datefmt="%Y-%m-%d %H:%M:%S",
37
37
  )
38
38
 
39
39
 
40
40
  def collect_modules():
41
41
  for info in pkgutil.iter_modules(__spec__.submodule_search_locations):
42
- mod = importlib.import_module(f'.{info.name}', __name__)
43
- if not hasattr(mod, 'CLI_SPEC'):
44
- raise ValueError(f"CLI submodule {info.name} does not define CLI_SPEC")
42
+ mod = importlib.import_module(f".{info.name}", __name__)
43
+ if not hasattr(mod, "CLI_SPEC"):
44
+ raise ValueError(f"CLI submodule '{info.name}' does not define CLI_SPEC")
45
45
  yield (info.name, mod.CLI_SPEC)
46
46
 
47
47
 
@@ -50,28 +50,28 @@ def main():
50
50
 
51
51
  log_grp = parser.add_mutually_exclusive_group()
52
52
  log_grp.add_argument(
53
- '--debug', '-d', action='store_const', dest='loglevel', const=logging.DEBUG,
53
+ "--debug", "-d", action="store_const", dest="loglevel", const=logging.DEBUG,
54
54
  help="enable extra debugging (logging.DEBUG)",
55
55
  )
56
56
  log_grp.add_argument(
57
- '--quiet', '-q', action='store_const', dest='loglevel', const=logging.WARNING,
57
+ "--quiet", "-q", action="store_const", dest="loglevel", const=logging.WARNING,
58
58
  help="be quiet during normal operation (logging.WARNING)",
59
59
  )
60
60
  parser.set_defaults(loglevel=logging.INFO)
61
61
 
62
62
  mains = {}
63
- subparsers = parser.add_subparsers(dest='_module', metavar='<module>', required=True)
63
+ subparsers = parser.add_subparsers(dest="_module", metavar="<module>", required=True)
64
64
  for name, spec in collect_modules():
65
- aliases = spec['aliases'] if 'aliases' in spec else ()
65
+ aliases = spec["aliases"] if "aliases" in spec else ()
66
66
  subp = subparsers.add_parser(
67
67
  name,
68
68
  aliases=aliases,
69
- help=spec['help'],
69
+ help=spec["help"],
70
70
  )
71
- spec['args'](subp)
72
- mains[name] = spec['main']
71
+ spec["args"](subp)
72
+ mains[name] = spec["main"]
73
73
  for alias in aliases:
74
- mains[alias] = spec['main']
74
+ mains[alias] = spec["main"]
75
75
 
76
76
  args = parser.parse_args()
77
77
 
@@ -0,0 +1,175 @@
1
+ import sys
2
+ #import re
3
+ import pprint
4
+ #import subprocess
5
+ from pathlib import Path
6
+
7
+ from .. import connection, provision, minitmt
8
+ from ..orchestrator import aggregator
9
+
10
+
11
+ def _fatal(msg):
12
+ print(msg, file=sys.stderr)
13
+ sys.exit(1)
14
+
15
+
16
+ def _get_context(args):
17
+ context = {}
18
+ if args.context:
19
+ for c in args.context:
20
+ key, value = c.split("=", 1)
21
+ context[key] = value
22
+ return context or None
23
+
24
+
25
+ def discover(args):
26
+ result = minitmt.fmf.FMFTests(args.root, args.plan, context=_get_context(args))
27
+ for name in result.tests:
28
+ print(name)
29
+
30
+
31
+ def show(args):
32
+ result = minitmt.fmf.FMFTests(args.root, args.plan, context=_get_context(args))
33
+ if tests := list(result.match(args.test)):
34
+ for test in tests:
35
+ print(f"\n--- {test.name} ---")
36
+ pprint.pprint(test.data)
37
+ else:
38
+ _fatal(f"Not reachable via {args.plan} discovery: {args.test}")
39
+
40
+
41
+ def execute(args):
42
+ # remote system connection
43
+ ssh_keypath = Path(args.ssh_identity)
44
+ if not ssh_keypath.exists():
45
+ _fatal(f"SSH Identity {args.ssh_identity} does not exist")
46
+ ssh_options = {
47
+ "User": args.user,
48
+ "Hostname": args.host,
49
+ "IdentityFile": ssh_keypath,
50
+ }
51
+ env = dict(x.split("=",1) for x in args.env)
52
+
53
+ # dummy Remote that just wraps the connection
54
+ class DummyRemote(provision.Remote, connection.ssh.ManagedSSHConn):
55
+ @staticmethod
56
+ def release():
57
+ return
58
+
59
+ @staticmethod
60
+ def alive():
61
+ return True
62
+
63
+ # result aggregation
64
+ with aggregator.CSVAggregator(args.results_csv, args.results_dir) as csv_aggregator:
65
+ platform_aggregator = csv_aggregator.for_platform(args.platform)
66
+
67
+ # tests discovery and selection
68
+ result = minitmt.fmf.FMFTests(args.root, args.plan, context=_get_context(args))
69
+ if args.test:
70
+ tests = list(result.match(args.test))
71
+ if not tests:
72
+ _fatal(f"Not reachable via plan {args.plan} discovery: {args.test}")
73
+ else:
74
+ tests = list(result.as_fmftests())
75
+ if not tests:
76
+ _fatal(f"No tests found for plan {args.plan}")
77
+
78
+ # test run
79
+ with DummyRemote(ssh_options) as remote:
80
+ executor = minitmt.executor.Executor(remote, platform_aggregator, env=env)
81
+ executor.upload_tests(args.root)
82
+ executor.setup_plan(result)
83
+ for test in tests:
84
+ executor.run_test(test)
85
+
86
+
87
+ def setup_script(args):
88
+ result = minitmt.fmf.FMFTests(args.root, args.plan, context=_get_context(args))
89
+ try:
90
+ test = result.as_fmftest(args.test)
91
+ except KeyError:
92
+ print(f"Not reachable via {args.plan} discovery: {args.test}")
93
+ raise SystemExit(1) from None
94
+ output = minitmt.scripts.test_setup(
95
+ test=test,
96
+ tests_dir=args.remote_root,
97
+ debug=args.script_debug,
98
+ )
99
+ print(output, end="")
100
+
101
+
102
+ def parse_args(parser):
103
+ parser.add_argument("--root", help="path to directory with fmf tests", default=".")
104
+ parser.add_argument("--context", "-c", help="tmt style key=value context", action="append")
105
+ cmds = parser.add_subparsers(
106
+ dest="_cmd", help="minitmt feature", metavar="<cmd>", required=True,
107
+ )
108
+
109
+ cmd = cmds.add_parser(
110
+ "discover", aliases=("di",),
111
+ help="list tests, post-processed by tmt plans",
112
+ )
113
+ cmd.add_argument("plan", help="tmt plan to use for discovery")
114
+
115
+ cmd = cmds.add_parser(
116
+ "show",
117
+ help="show fmf data of a test",
118
+ )
119
+ cmd.add_argument("plan", help="tmt plan to use for discovery")
120
+ cmd.add_argument("test", help="fmf style test regex")
121
+
122
+ cmd = cmds.add_parser(
123
+ "execute", aliases=("ex",),
124
+ help="run a plan (or test) on a remote system",
125
+ )
126
+ #grp = cmd.add_mutually_exclusive_group()
127
+ #grp.add_argument("--test", "-t", help="fmf style test regex")
128
+ #grp.add_argument("--plan", "-p", help="tmt plan name (path) inside metadata root")
129
+ cmd.add_argument("--env", "-e", help="environment to pass to prepare/test", action="append")
130
+ cmd.add_argument("--test", "-t", help="fmf style test regex")
131
+ cmd.add_argument(
132
+ "--plan", "-p", help="tmt plan name (path) inside metadata root", required=True,
133
+ )
134
+ cmd.add_argument("--platform", help="platform name, ie. rhel9@x86_64", required=True)
135
+ cmd.add_argument("--user", help="ssh user to connect via", required=True)
136
+ cmd.add_argument("--host", help="ssh host to connect to", required=True)
137
+ cmd.add_argument(
138
+ "--ssh-identity", help="path to a ssh keyfile for login", required=True,
139
+ )
140
+ cmd.add_argument(
141
+ "--results-csv", help="path to would-be-created .csv.gz results", required=True,
142
+ )
143
+ cmd.add_argument(
144
+ "--results-dir", help="path to would-be-created dir for uploaded files", required=True,
145
+ )
146
+
147
+ cmd = cmds.add_parser(
148
+ "setup-script",
149
+ help="generate a script prepping tests for run",
150
+ )
151
+ cmd.add_argument("--remote-root", help="path to tests repo on the remote", required=True)
152
+ cmd.add_argument("--script-debug", help="do 'set -x' in the script", action="store_true")
153
+ cmd.add_argument("plan", help="tmt plan to use for discovery")
154
+ cmd.add_argument("test", help="full fmf test name (not regex)")
155
+
156
+
157
+ def main(args):
158
+ if args._cmd in ("discover", "di"):
159
+ discover(args)
160
+ elif args._cmd == "show":
161
+ show(args)
162
+ elif args._cmd in ("execute", "ex"):
163
+ execute(args)
164
+ elif args._cmd == "setup-script":
165
+ setup_script(args)
166
+ else:
167
+ raise RuntimeError(f"unknown args: {args}")
168
+
169
+
170
+ CLI_SPEC = {
171
+ "aliases": ("tmt",),
172
+ "help": "simple test executor using atex.minitmt",
173
+ "args": parse_args,
174
+ "main": main,
175
+ }
@@ -2,15 +2,15 @@ import sys
2
2
  import pprint
3
3
 
4
4
  from .. import util
5
- from .. import testingfarm as tf
5
+ from ..provision.testingfarm import api as tf
6
6
 
7
7
 
8
8
  def _get_api(args):
9
9
  api_args = {}
10
10
  if args.url:
11
- api_args['url'] = args.url
11
+ api_args["url"] = args.url
12
12
  if args.token:
13
- api_args['token'] = args.token
13
+ api_args["token"] = args.token
14
14
  return tf.TestingFarmAPI(**api_args)
15
15
 
16
16
 
@@ -27,9 +27,9 @@ def whoami(args):
27
27
  def composes(args):
28
28
  api = _get_api(args)
29
29
  comps = api.composes(ranch=args.ranch)
30
- comps_list = comps['composes']
30
+ comps_list = comps["composes"]
31
31
  for comp in comps_list:
32
- print(comp['name'])
32
+ print(comp["name"])
33
33
 
34
34
 
35
35
  def get_request(args):
@@ -56,27 +56,27 @@ def search_requests(args):
56
56
  if not reply:
57
57
  return
58
58
 
59
- for req in sorted(reply, key=lambda x: x['created']):
60
- req_id = req['id']
61
- created = req['created'].partition('.')[0]
59
+ for req in sorted(reply, key=lambda x: x["created"]):
60
+ req_id = req["id"]
61
+ created = req["created"].partition(".")[0]
62
62
 
63
63
  envs = []
64
- for env in req['environments_requested']:
65
- if 'os' in env and env['os'] and 'compose' in env['os']:
66
- compose = env['os']['compose']
67
- arch = env['arch']
64
+ for env in req["environments_requested"]:
65
+ if "os" in env and env["os"] and "compose" in env["os"]:
66
+ compose = env["os"]["compose"]
67
+ arch = env["arch"]
68
68
  if compose and arch:
69
- envs.append(f'{compose}@{arch}')
70
- envs_str = ', '.join(envs)
69
+ envs.append(f"{compose}@{arch}")
70
+ envs_str = ", ".join(envs)
71
71
 
72
- print(f'{created} {req_id} : {envs_str}')
72
+ print(f"{created} {req_id} : {envs_str}")
73
73
 
74
74
 
75
75
  def reserve(args):
76
76
  util.info(f"Reserving {args.compose} on {args.arch} for {args.timeout} minutes")
77
77
 
78
78
  if args.hvm:
79
- hardware = {'virtualization': {'is-supported': True}}
79
+ hardware = {"virtualization": {"is-supported": True}}
80
80
  else:
81
81
  hardware = None
82
82
 
@@ -96,12 +96,12 @@ def reserve(args):
96
96
  res.request.assert_alive()
97
97
  except tf.GoneAwayError as e:
98
98
  print(e)
99
- raise SystemExit(1)
99
+ raise SystemExit(1) from None
100
100
 
101
101
  proc = util.subprocess_run([
102
- 'ssh', '-q', '-i', m.ssh_key,
103
- '-oStrictHostKeyChecking=no', '-oUserKnownHostsFile=/dev/null',
104
- f'{m.user}@{m.host}',
102
+ "ssh", "-q", "-i", m.ssh_key,
103
+ "-oStrictHostKeyChecking=no", "-oUserKnownHostsFile=/dev/null",
104
+ f"{m.user}@{m.host}",
105
105
  ])
106
106
  if proc.returncode != 0:
107
107
  print(
@@ -123,7 +123,7 @@ def watch_pipeline(args):
123
123
 
124
124
  util.info(f"Waiting for {args.request_id} to be 'running'")
125
125
  try:
126
- request.wait_for_state('running')
126
+ request.wait_for_state("running")
127
127
  except tf.GoneAwayError:
128
128
  util.info(f"Request {args.request_id} already finished")
129
129
  return
@@ -132,96 +132,96 @@ def watch_pipeline(args):
132
132
  try:
133
133
  for line in tf.PipelineLogStreamer(request):
134
134
  sys.stdout.write(line)
135
- sys.stdout.write('\n')
135
+ sys.stdout.write("\n")
136
136
  except tf.GoneAwayError:
137
137
  util.info(f"Request {args.request_id} finished, exiting")
138
138
 
139
139
 
140
140
  def parse_args(parser):
141
- parser.add_argument('--url', help="Testing Farm API URL")
142
- parser.add_argument('--token', help="Testing Farm API auth token")
141
+ parser.add_argument("--url", help="Testing Farm API URL")
142
+ parser.add_argument("--token", help="Testing Farm API auth token")
143
143
  cmds = parser.add_subparsers(
144
- dest='_cmd', help="TF helper to run", metavar='<cmd>', required=True,
144
+ dest="_cmd", help="TF helper to run", metavar="<cmd>", required=True,
145
145
  )
146
146
 
147
147
  cmd = cmds.add_parser(
148
- 'whoami',
148
+ "whoami",
149
149
  help="print out details about active TF token",
150
150
  )
151
151
  cmd = cmds.add_parser(
152
- 'about',
152
+ "about",
153
153
  help="print out details about TF instance (url)",
154
154
  )
155
155
 
156
156
  cmd = cmds.add_parser(
157
- 'composes',
157
+ "composes",
158
158
  help="list all composes available on a given ranch",
159
159
  )
160
- cmd.add_argument('ranch', nargs='?', help="Testing Farm ranch (autodetected if token)")
160
+ cmd.add_argument("ranch", nargs="?", help="Testing Farm ranch (autodetected if token)")
161
161
 
162
162
  cmd = cmds.add_parser(
163
- 'get-request', aliases=('gr',),
163
+ "get-request", aliases=("gr",),
164
164
  help="retrieve and print JSON of a Testing Farm request",
165
165
  )
166
- cmd.add_argument('request_id', help="Testing Farm request UUID")
166
+ cmd.add_argument("request_id", help="Testing Farm request UUID")
167
167
 
168
168
  cmd = cmds.add_parser(
169
- 'cancel',
169
+ "cancel",
170
170
  help="cancel a Testing Farm request",
171
171
  )
172
- cmd.add_argument('request_id', help="Testing Farm request UUID")
172
+ cmd.add_argument("request_id", help="Testing Farm request UUID")
173
173
 
174
174
  cmd = cmds.add_parser(
175
- 'search-requests', aliases=('sr',),
175
+ "search-requests", aliases=("sr",),
176
176
  help="return a list of requests matching the criteria",
177
177
  )
178
- cmd.add_argument('--state', help="request state (running, etc.)", required=True)
179
- cmd.add_argument('--all', help="all requests, not just owned by token", action='store_true')
180
- cmd.add_argument('--ranch', help="Testing Farm ranch")
181
- cmd.add_argument('--before', help="only requests created before ISO8601")
182
- cmd.add_argument('--after', help="only requests created after ISO8601")
178
+ cmd.add_argument("--state", help="request state (running, etc.)", required=True)
179
+ cmd.add_argument("--all", help="all requests, not just owned by token", action="store_true")
180
+ cmd.add_argument("--ranch", help="Testing Farm ranch")
181
+ cmd.add_argument("--before", help="only requests created before ISO8601")
182
+ cmd.add_argument("--after", help="only requests created after ISO8601")
183
183
 
184
184
  cmd = cmds.add_parser(
185
- 'reserve',
185
+ "reserve",
186
186
  help="reserve a system and ssh into it",
187
187
  )
188
- cmd.add_argument('--compose', '-c', help="OS compose to install", required=True)
189
- cmd.add_argument('--arch', '-a', help="system HW architecture", default='x86_64')
190
- cmd.add_argument('--timeout', '-t', help="pipeline timeout (in minutes)", type=int, default=60)
191
- cmd.add_argument('--ssh-key', help="path to a ssh private key file like 'id_rsa'")
192
- cmd.add_argument('--hvm', help="request a HVM virtualization capable HW", action='store_true')
188
+ cmd.add_argument("--compose", "-c", help="OS compose to install", required=True)
189
+ cmd.add_argument("--arch", "-a", help="system HW architecture", default="x86_64")
190
+ cmd.add_argument("--timeout", "-t", help="pipeline timeout (in minutes)", type=int, default=60)
191
+ cmd.add_argument("--ssh-key", help="path to a ssh private key file like 'id_rsa'")
192
+ cmd.add_argument("--hvm", help="request a HVM virtualization capable HW", action="store_true")
193
193
 
194
194
  cmd = cmds.add_parser(
195
- 'watch-pipeline', aliases=('wp',),
195
+ "watch-pipeline", aliases=("wp",),
196
196
  help="continuously output pipeline.log like 'tail -f'",
197
197
  )
198
- cmd.add_argument('request_id', help="Testing Farm request UUID")
198
+ cmd.add_argument("request_id", help="Testing Farm request UUID")
199
199
 
200
200
 
201
201
  def main(args):
202
- if args._cmd == 'whoami':
202
+ if args._cmd == "whoami":
203
203
  whoami(args)
204
- elif args._cmd == 'about':
204
+ elif args._cmd == "about":
205
205
  about(args)
206
- elif args._cmd == 'composes':
206
+ elif args._cmd == "composes":
207
207
  composes(args)
208
- elif args._cmd in ('get-request', 'gr'):
208
+ elif args._cmd in ("get-request", "gr"):
209
209
  get_request(args)
210
- elif args._cmd == 'cancel':
210
+ elif args._cmd == "cancel":
211
211
  cancel(args)
212
- elif args._cmd in ('search-requests', 'sr'):
212
+ elif args._cmd in ("search-requests", "sr"):
213
213
  search_requests(args)
214
- elif args._cmd == 'reserve':
214
+ elif args._cmd == "reserve":
215
215
  reserve(args)
216
- elif args._cmd in ('watch-pipeline', 'wp'):
216
+ elif args._cmd in ("watch-pipeline", "wp"):
217
217
  watch_pipeline(args)
218
218
  else:
219
219
  raise RuntimeError(f"unknown args: {args}")
220
220
 
221
221
 
222
222
  CLI_SPEC = {
223
- 'aliases': ('tf',),
224
- 'help': "various utils for Testing Farm",
225
- 'args': parse_args,
226
- 'main': main,
223
+ "aliases": ("tf",),
224
+ "help": "various utils for Testing Farm",
225
+ "args": parse_args,
226
+ "main": main,
227
227
  }