atex 0.3__tar.gz → 0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {atex-0.3 → atex-0.5}/PKG-INFO +1 -1
  2. atex-0.5/atex/cli/minitmt.py +82 -0
  3. {atex-0.3 → atex-0.5}/atex/cli/testingfarm.py +72 -16
  4. {atex-0.3 → atex-0.5}/atex/minitmt/__init__.py +6 -0
  5. {atex-0.3 → atex-0.5}/atex/provision/libvirt/__init__.py +1 -0
  6. {atex-0.3 → atex-0.5}/atex/ssh.py +2 -2
  7. {atex-0.3 → atex-0.5}/atex/testingfarm.py +60 -51
  8. {atex-0.3 → atex-0.5}/pyproject.toml +1 -1
  9. atex-0.5/tmt_tests/.fmf/version +1 -0
  10. atex-0.5/tmt_tests/plans/reserve.fmf +5 -0
  11. atex-0.5/tmt_tests/reserve/main.fmf +2 -0
  12. atex-0.5/tmt_tests/reserve/test.sh +36 -0
  13. {atex-0.3 → atex-0.5}/.editorconfig +0 -0
  14. {atex-0.3 → atex-0.5}/.gitignore +0 -0
  15. {atex-0.3 → atex-0.5}/COPYING.txt +0 -0
  16. {atex-0.3 → atex-0.5}/README.md +0 -0
  17. {atex-0.3 → atex-0.5}/atex/__init__.py +0 -0
  18. {atex-0.3 → atex-0.5}/atex/cli/__init__.py +0 -0
  19. {atex-0.3 → atex-0.5}/atex/minitmt/CONTROL_FILE.md +0 -0
  20. {atex-0.3 → atex-0.5}/atex/minitmt/README.md +0 -0
  21. {atex-0.3 → atex-0.5}/atex/minitmt/RESULTS.md +0 -0
  22. {atex-0.3/atex → atex-0.5/atex/minitmt}/fmf.py +0 -0
  23. {atex-0.3 → atex-0.5}/atex/minitmt/report.py +0 -0
  24. {atex-0.3 → atex-0.5}/atex/minitmt/scripts.py +0 -0
  25. {atex-0.3 → atex-0.5}/atex/minitmt/testme.py +0 -0
  26. {atex-0.3 → atex-0.5}/atex/orchestrator.py +0 -0
  27. {atex-0.3 → atex-0.5}/atex/provision/__init__.py +0 -0
  28. {atex-0.3 → atex-0.5}/atex/provision/libvirt/VM_PROVISION +0 -0
  29. {atex-0.3 → atex-0.5}/atex/provision/libvirt/setup-libvirt.sh +0 -0
  30. {atex-0.3 → atex-0.5}/atex/util/README.md +0 -0
  31. {atex-0.3 → atex-0.5}/atex/util/__init__.py +0 -0
  32. {atex-0.3 → atex-0.5}/atex/util/dedent.py +0 -0
  33. {atex-0.3 → atex-0.5}/atex/util/lockable_class.py +0 -0
  34. {atex-0.3 → atex-0.5}/atex/util/log.py +0 -0
  35. {atex-0.3 → atex-0.5}/atex/util/subprocess.py +0 -0
  36. {atex-0.3 → atex-0.5}/logtest.py +0 -0
  37. {atex-0.3 → atex-0.5}/reporter.py +0 -0
  38. {atex-0.3 → atex-0.5}/ssh.py +0 -0
  39. {atex-0.3 → atex-0.5}/tests/PYTEST.md +0 -0
  40. {atex-0.3 → atex-0.5}/tests/foobar.py +0 -0
  41. {atex-0.3 → atex-0.5}/tf.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atex
3
- Version: 0.3
3
+ Version: 0.5
4
4
  Summary: Ad-hoc Test EXecutor
5
5
  Project-URL: Homepage, https://github.com/RHSecurityCompliance/atex
6
6
  License-Expression: GPL-3.0-or-later
@@ -0,0 +1,82 @@
1
+ import re
2
+ import pprint
3
+
4
+ #from .. import util
5
+ from ..minitmt import fmf
6
+
7
+
8
+ def _get_context(args):
9
+ context = {}
10
+ if args.context:
11
+ for c in args.context:
12
+ key, value = c.split('=', 1)
13
+ context[key] = value
14
+ return context or None
15
+
16
+
17
+ def discover(args):
18
+ result = fmf.FMFData(args.root, args.plan, context=_get_context(args))
19
+ for test in result.tests:
20
+ print(test.name)
21
+
22
+
23
+ def show(args):
24
+ result = fmf.FMFData(args.root, args.plan, context=_get_context(args))
25
+ for test in result.tests:
26
+ if re.match(args.test, test.name):
27
+ pprint.pprint(test.data)
28
+ break
29
+ else:
30
+ print(f"Not reachable via {args.plan} discovery: {args.test}")
31
+ raise SystemExit(1)
32
+
33
+
34
+ def parse_args(parser):
35
+ parser.add_argument('--root', default='.', help="path to directory with fmf tests")
36
+ parser.add_argument('--context', '-c', help="tmt style key=value context", action='append')
37
+ cmds = parser.add_subparsers(
38
+ dest='_cmd', help="minitmt feature", metavar='<cmd>', required=True,
39
+ )
40
+
41
+ cmd = cmds.add_parser(
42
+ 'discover', aliases=('di',),
43
+ help="list tests, post-processed by tmt plans",
44
+ )
45
+ cmd.add_argument('plan', help="tmt plan to use for discovery")
46
+
47
+ cmd = cmds.add_parser(
48
+ 'show',
49
+ help="show fmf data of a test",
50
+ )
51
+ cmd.add_argument('plan', help="tmt plan to use for discovery")
52
+ cmd.add_argument('test', help="fmf style test regex")
53
+
54
+ cmd = cmds.add_parser(
55
+ 'execute', aliases=('ex',),
56
+ help="run a plan (or test) on a remote system",
57
+ )
58
+ grp = cmd.add_mutually_exclusive_group()
59
+ grp.add_argument('--test', '-t', help="fmf style test regex")
60
+ grp.add_argument('--plan', '-p', help="tmt plan name (path) inside metadata root")
61
+ cmd.add_argument('--ssh-identity', '-i', help="path to a ssh keyfile for login")
62
+ cmd.add_argument('user_host', help="ssh style user@host of the remote")
63
+
64
+
65
+ def main(args):
66
+ if args._cmd in ('discover', 'di'):
67
+ discover(args)
68
+ elif args._cmd == 'show':
69
+ show(args)
70
+ elif args._cmd in ('execute', 'ex'):
71
+ #execute(args)
72
+ print("not implemented yet")
73
+ else:
74
+ raise RuntimeError(f"unknown args: {args}")
75
+
76
+
77
+ CLI_SPEC = {
78
+ 'aliases': ('tmt',),
79
+ 'help': "simple test executor using atex.minitmt",
80
+ 'args': parse_args,
81
+ 'main': main,
82
+ }
@@ -1,5 +1,5 @@
1
1
  import sys
2
- #from datetime import datetime
2
+ import pprint
3
3
 
4
4
  from .. import util
5
5
  from .. import testingfarm as tf
@@ -14,6 +14,16 @@ def _get_api(args):
14
14
  return tf.TestingFarmAPI(**api_args)
15
15
 
16
16
 
17
+ def about(args):
18
+ api = _get_api(args)
19
+ pprint.pprint(api.about())
20
+
21
+
22
+ def whoami(args):
23
+ api = _get_api(args)
24
+ pprint.pprint(api.whoami())
25
+
26
+
17
27
  def composes(args):
18
28
  api = _get_api(args)
19
29
  comps = api.composes(ranch=args.ranch)
@@ -29,6 +39,11 @@ def get_request(args):
29
39
  print(str(request))
30
40
 
31
41
 
42
+ def cancel(args):
43
+ api = _get_api(args)
44
+ api.cancel_request(args.request_id)
45
+
46
+
32
47
  def search_requests(args):
33
48
  api = _get_api(args)
34
49
  reply = api.search_requests(
@@ -43,9 +58,6 @@ def search_requests(args):
43
58
 
44
59
  for req in sorted(reply, key=lambda x: x['created']):
45
60
  req_id = req['id']
46
- #created_utc = req['created'].partition('.')[0]
47
- #created_dt = datetime.fromisoformat(f'{created_utc}+00:00')
48
- #created = created_dt.astimezone().isoformat().partition('.')[0]
49
61
  created = req['created'].partition('.')[0]
50
62
 
51
63
  envs = []
@@ -58,29 +70,51 @@ def search_requests(args):
58
70
  envs_str = ', '.join(envs)
59
71
 
60
72
  print(f'{created} {req_id} : {envs_str}')
61
- #request = tf.Request(initial_data=req)
62
- #print(str(request))
63
- #request.update()
64
- #print(str(request))
65
73
 
66
74
 
67
75
  def reserve(args):
68
76
  util.info(f"Reserving {args.compose} on {args.arch} for {args.timeout} minutes")
69
77
 
78
+ if args.hvm:
79
+ hardware = {'virtualization': {'is-supported': True}}
80
+ else:
81
+ hardware = None
82
+
70
83
  api = _get_api(args)
71
84
  res = tf.Reserve(
72
85
  compose=args.compose,
73
86
  arch=args.arch,
74
87
  timeout=args.timeout,
88
+ hardware=hardware,
75
89
  api=api,
76
90
  )
77
91
  with res as m:
78
92
  util.info(f"Got machine: {m}")
79
- util.subprocess_run([
80
- 'ssh', '-q', '-i', m.ssh_key,
81
- '-oStrictHostKeyChecking=no', '-oUserKnownHostsFile=/dev/null',
82
- f'{m.user}@{m.host}',
83
- ])
93
+ while True:
94
+ try:
95
+ res.request.update()
96
+ res.request.assert_alive()
97
+ except tf.GoneAwayError as e:
98
+ print(e)
99
+ raise SystemExit(1)
100
+
101
+ proc = util.subprocess_run([
102
+ 'ssh', '-q', '-i', m.ssh_key,
103
+ '-oStrictHostKeyChecking=no', '-oUserKnownHostsFile=/dev/null',
104
+ f'{m.user}@{m.host}',
105
+ ])
106
+ if proc.returncode != 0:
107
+ print(
108
+ f"\nssh -i {str(m.ssh_key)} {m.user}@{m.host}\n"
109
+ f"terminated with exit code {proc.returncode}\n",
110
+ )
111
+ try:
112
+ input("Press RETURN to try to reconnect, Ctrl-C to quit ...")
113
+ except KeyboardInterrupt:
114
+ print()
115
+ raise
116
+ else:
117
+ break
84
118
 
85
119
 
86
120
  def watch_pipeline(args):
@@ -104,12 +138,21 @@ def watch_pipeline(args):
104
138
 
105
139
 
106
140
  def parse_args(parser):
107
- parser.add_argument('--url', help='Testing Farm API URL')
108
- parser.add_argument('--token', help='Testing Farm API auth token')
141
+ parser.add_argument('--url', help="Testing Farm API URL")
142
+ parser.add_argument('--token', help="Testing Farm API auth token")
109
143
  cmds = parser.add_subparsers(
110
144
  dest='_cmd', help="TF helper to run", metavar='<cmd>', required=True,
111
145
  )
112
146
 
147
+ cmd = cmds.add_parser(
148
+ 'whoami',
149
+ help="print out details about active TF token",
150
+ )
151
+ cmd = cmds.add_parser(
152
+ 'about',
153
+ help="print out details about TF instance (url)",
154
+ )
155
+
113
156
  cmd = cmds.add_parser(
114
157
  'composes',
115
158
  help="list all composes available on a given ranch",
@@ -122,6 +165,12 @@ def parse_args(parser):
122
165
  )
123
166
  cmd.add_argument('request_id', help="Testing Farm request UUID")
124
167
 
168
+ cmd = cmds.add_parser(
169
+ 'cancel',
170
+ help="cancel a Testing Farm request",
171
+ )
172
+ cmd.add_argument('request_id', help="Testing Farm request UUID")
173
+
125
174
  cmd = cmds.add_parser(
126
175
  'search-requests', aliases=('sr',),
127
176
  help="return a list of requests matching the criteria",
@@ -140,6 +189,7 @@ def parse_args(parser):
140
189
  cmd.add_argument('--arch', '-a', help="system HW architecture", default='x86_64')
141
190
  cmd.add_argument('--timeout', '-t', help="pipeline timeout (in minutes)", type=int, default=60)
142
191
  cmd.add_argument('--ssh-key', help="path to a ssh private key file like 'id_rsa'")
192
+ cmd.add_argument('--hvm', help="request a HVM virtualization capable HW", action='store_true')
143
193
 
144
194
  cmd = cmds.add_parser(
145
195
  'watch-pipeline', aliases=('wp',),
@@ -149,10 +199,16 @@ def parse_args(parser):
149
199
 
150
200
 
151
201
  def main(args):
152
- if args._cmd == 'composes':
202
+ if args._cmd == 'whoami':
203
+ whoami(args)
204
+ elif args._cmd == 'about':
205
+ about(args)
206
+ elif args._cmd == 'composes':
153
207
  composes(args)
154
208
  elif args._cmd in ('get-request', 'gr'):
155
209
  get_request(args)
210
+ elif args._cmd == 'cancel':
211
+ cancel(args)
156
212
  elif args._cmd in ('search-requests', 'sr'):
157
213
  search_requests(args)
158
214
  elif args._cmd == 'reserve':
@@ -1,5 +1,8 @@
1
+ import os
1
2
  import random
2
3
 
4
+ from pathlib import Path
5
+
3
6
  # TODO: TMT_PLAN_ENVIRONMENT_FILE
4
7
 
5
8
  # TODO: install rsync on the guest as part of setup
@@ -84,6 +87,9 @@ class Executor:
84
87
  # TODO: install rsync
85
88
  pass
86
89
 
90
+ def run_script(self, script, duration=None, shell='/bin/bash', **kwargs):
91
+ self.conn.ssh(shell, input=script.encode())
92
+
87
93
  # run one test via ssh and parse its results on-the-fly,
88
94
  # write out logs
89
95
  def run_test(self, fmf_test, reporter):
@@ -1,6 +1,7 @@
1
1
  from .. import Provisioner as _Provisioner
2
2
  from ... import util, ssh
3
3
 
4
+
4
5
  class LibvirtProvisioner(_Provisioner):
5
6
  number = 123
6
7
 
@@ -256,7 +256,7 @@ class SSHConn:
256
256
  unified_options['RemoteCommand'] = _shell_cmd((cmd, *args), sudo=sudo)
257
257
  unified_options['ControlPath'] = self.tmpdir / 'control.sock'
258
258
  return func(
259
- _options_to_cli(unified_options, password=self.password),
259
+ _options_to_ssh(unified_options, password=self.password),
260
260
  skip_frames=1,
261
261
  text=text,
262
262
  **run_kwargs,
@@ -300,7 +300,7 @@ def ssh(
300
300
  unified_options['RemoteCommand'] = _shell_cmd((cmd, *args), sudo=sudo)
301
301
  unified_options.update(options)
302
302
  return func(
303
- _options_to_cli(unified_options, password=password),
303
+ _options_to_ssh(unified_options, password=password),
304
304
  skip_frames=1,
305
305
  text=text,
306
306
  **run_kwargs,
@@ -11,7 +11,6 @@ from pathlib import Path
11
11
 
12
12
  from . import util
13
13
 
14
- #from pprint import pprint as pp
15
14
  import json
16
15
  import urllib3
17
16
 
@@ -22,14 +21,10 @@ API_QUERY_DELAY = 10
22
21
 
23
22
  RESERVE_TASK = {
24
23
  'fmf': {
25
- # 'url': 'https://github.com/RHSecurityCompliance/atex', # TODO
26
- # 'ref': 'main',
27
- # 'path': 'fmf_tests',
28
- # 'name': "/reserve",
29
- 'url': 'https://github.com/comps/tmt-experiments-public',
30
- 'ref': 'master',
31
- 'path': '.',
32
- 'test_name': '/reserve',
24
+ 'url': 'https://github.com/RHSecurityCompliance/atex',
25
+ 'ref': 'main',
26
+ 'path': 'tmt_tests',
27
+ 'name': "/plans/reserve",
33
28
  },
34
29
  }
35
30
 
@@ -44,7 +39,9 @@ _http = urllib3.PoolManager(maxsize=3, block=True)
44
39
 
45
40
 
46
41
  class TestingFarmError(Exception):
47
- pass
42
+ def __init__(self, message, reply=None):
43
+ super().__init__(message)
44
+ self.reply = reply
48
45
 
49
46
 
50
47
  class APIError(TestingFarmError):
@@ -55,7 +52,6 @@ class BadHTTPError(TestingFarmError):
55
52
  pass
56
53
 
57
54
 
58
- # TODO: __init__ and __str__ so we pass just request ID, not a full message
59
55
  class GoneAwayError(TestingFarmError):
60
56
  pass
61
57
 
@@ -90,18 +86,21 @@ class TestingFarmAPI:
90
86
  reply = _http.request(method, url, *args, headers=headers, preload_content=False, **kwargs)
91
87
 
92
88
  if reply.status != 200 and not reply.data:
93
- raise APIError(f"got HTTP {reply.status} on {method} {url}")
89
+ raise APIError(f"got HTTP {reply.status} on {method} {url}", reply)
94
90
 
95
91
  if reply.headers.get('Content-Type') != 'application/json':
96
- raise BadHTTPError(f"HTTP {reply.status} on {method} {url} is not application/json")
92
+ raise BadHTTPError(
93
+ f"HTTP {reply.status} on {method} {url} is not application/json",
94
+ reply,
95
+ )
97
96
 
98
97
  try:
99
98
  decoded = reply.json()
100
99
  except json.decoder.JSONDecodeError:
101
- raise BadHTTPError(f"failed to decode JSON for {method} {url}: {reply.data}")
100
+ raise BadHTTPError(f"failed to decode JSON for {method} {url}: {reply.data}", reply)
102
101
 
103
102
  if reply.status != 200:
104
- raise APIError(f"got HTTP {reply.status} on {method} {url}: {decoded}")
103
+ raise APIError(f"got HTTP {reply.status} on {method} {url}: {decoded}", reply)
105
104
 
106
105
  return decoded
107
106
 
@@ -230,7 +229,8 @@ class Request:
230
229
 
231
230
  def assert_alive(self):
232
231
  if not self.alive():
233
- raise GoneAwayError(f"request {self.data['id']} not alive anymore")
232
+ state = self.data['state']
233
+ raise GoneAwayError(f"request {self.data['id']} not alive anymore, entered: {state}")
234
234
 
235
235
  def wait_for_state(self, state):
236
236
  if 'state' not in self.data:
@@ -268,12 +268,28 @@ class PipelineLogStreamer:
268
268
  while True:
269
269
  self.request.wait_for_state('running')
270
270
 
271
- if 'run' in self.request and 'artifacts' in self.request['run']:
272
- if artifacts := self.request['run']['artifacts']:
273
- return f'{artifacts}/pipeline.log'
271
+ try:
272
+ if 'run' not in self.request or 'artifacts' not in self.request['run']:
273
+ continue
274
+
275
+ artifacts = self.request['run']['artifacts']
276
+ if not artifacts:
277
+ continue
274
278
 
275
- time.sleep(API_QUERY_DELAY)
276
- self.request.update()
279
+ log = f'{artifacts}/pipeline.log'
280
+ reply = _http.request('HEAD', log)
281
+ # TF has a race condition of adding the .log entry without it being created
282
+ if reply.status == 404:
283
+ util.debug(f"got 404 for {log}, retrying")
284
+ continue
285
+ elif reply.status != 200:
286
+ raise APIError(f"got HTTP {reply.status} on HEAD {log}", reply)
287
+
288
+ return log
289
+
290
+ finally:
291
+ time.sleep(API_QUERY_DELAY)
292
+ self.request.update()
277
293
 
278
294
  def __iter__(self):
279
295
  url = self._wait_for_entry()
@@ -282,30 +298,30 @@ class PipelineLogStreamer:
282
298
  while True:
283
299
  self.request.assert_alive()
284
300
 
285
- headers = {'Range': f'bytes={bytes_read}-'}
286
- # load all returned data via .decode() rather than streaming it
287
- # in chunks, because we don't want to leave the connection open
288
- # (blocking others) while the user code runs between __next__ calls
289
- reply = _http.request('GET', url, headers=headers)
290
-
291
- # 416=Range Not Satisfiable, typically meaning "no new data to send"
292
- if reply.status == 416:
293
- time.sleep(API_QUERY_DELAY)
294
- self.request.update()
295
- continue
296
- # 200=OK or 206=Partial Content
297
- elif reply.status not in (200,206):
298
- raise BadHTTPError(f"got {reply.status} when trying to GET {url}")
301
+ try:
302
+ headers = {'Range': f'bytes={bytes_read}-'}
303
+ # load all returned data via .decode() rather than streaming it
304
+ # in chunks, because we don't want to leave the connection open
305
+ # (blocking others) while the user code runs between __next__ calls
306
+ reply = _http.request('GET', url, headers=headers)
307
+
308
+ # 416=Range Not Satisfiable, typically meaning "no new data to send"
309
+ if reply.status == 416:
310
+ continue
311
+ # 200=OK or 206=Partial Content
312
+ elif reply.status not in (200,206):
313
+ raise BadHTTPError(f"got {reply.status} when trying to GET {url}", reply)
299
314
 
300
- bytes_read += len(reply.data)
301
- buffer += reply.data.decode(errors='ignore')
315
+ bytes_read += len(reply.data)
316
+ buffer += reply.data.decode(errors='ignore')
302
317
 
303
- while (index := buffer.find('\n')) != -1:
304
- yield buffer[:index]
305
- buffer = buffer[index+1:]
318
+ while (index := buffer.find('\n')) != -1:
319
+ yield buffer[:index]
320
+ buffer = buffer[index+1:]
306
321
 
307
- time.sleep(API_QUERY_DELAY)
308
- self.request.update()
322
+ finally:
323
+ time.sleep(API_QUERY_DELAY)
324
+ self.request.update()
309
325
 
310
326
 
311
327
  class Reserve:
@@ -456,13 +472,6 @@ class Reserve:
456
472
  self.request.submit(spec)
457
473
  util.debug(f"submitted request:\n{textwrap.indent(str(self.request), ' ')}")
458
474
 
459
- # wait for the request to become running
460
- while self.request['state'] != 'running':
461
- time.sleep(API_QUERY_DELAY)
462
- self.request.update()
463
- if self.request['state'] in END_STATES:
464
- raise GoneAwayError(f"request {self.request['id']} not alive anymore")
465
-
466
475
  # wait for user/host to ssh to
467
476
  ssh_user = ssh_host = None
468
477
  for line in PipelineLogStreamer(self.request):
@@ -487,10 +496,10 @@ class Reserve:
487
496
  f'{ssh_user}@{ssh_host}', 'exit 123',
488
497
  ]
489
498
  while True:
499
+ # wait for API_QUERY_DELAY between ssh retries, seems like GEFN sleep time
490
500
  time.sleep(API_QUERY_DELAY)
491
501
  self.request.update()
492
- if self.request['state'] in END_STATES:
493
- raise GoneAwayError(f"request {self.request['id']} not alive anymore")
502
+ self.request.assert_alive()
494
503
 
495
504
  proc = util.subprocess_run(
496
505
  ssh_attempt_cmd,
@@ -6,7 +6,7 @@ build-backend = "hatchling.build"
6
6
  name = "atex"
7
7
  description = "Ad-hoc Test EXecutor"
8
8
  readme = "README.md"
9
- version = "0.3"
9
+ version = "0.5"
10
10
  license = "GPL-3.0-or-later"
11
11
  requires-python = ">= 3.9"
12
12
  dependencies = [
@@ -0,0 +1 @@
1
+ 1
@@ -0,0 +1,5 @@
1
+ discover:
2
+ how: fmf
3
+ test: /reserve
4
+ execute:
5
+ how: tmt
@@ -0,0 +1,2 @@
1
+ test: ./test.sh
2
+ duration: 1000h
@@ -0,0 +1,36 @@
1
+ #!/bin/bash
2
+
3
+ set -e -x
4
+
5
+ # remove useless daemons to free up RAM a bit
6
+ dnf remove -y rng-tools irqbalance
7
+
8
+ # clean up packages from extra repos, restoring original vanilla OS (sorta)
9
+ rm -v -f \
10
+ /etc/yum.repos.d/{tag-repository,*beakerlib*,rcmtools}.repo \
11
+ /etc/yum.repos.d/beaker-{client,harness,tasks}.repo
12
+
13
+ function list_foreign_rpms {
14
+ dnf list --installed \
15
+ | grep -e @koji-override -e @testing-farm -e @epel -e @copr: -e @rcmtools \
16
+ | sed 's/ .*//'
17
+ }
18
+ rpms=$(list_foreign_rpms)
19
+ [[ $rpms ]] && dnf downgrade -y --skip-broken $rpms
20
+ rpms=$(list_foreign_rpms)
21
+ [[ $rpms ]] && dnf remove -y --noautoremove $rpms
22
+ dnf clean all
23
+
24
+ # install SSH key
25
+ if [[ $RESERVE_SSH_PUBKEY ]]; then
26
+ mkdir -p ~/.ssh
27
+ chmod 0700 ~/.ssh
28
+ echo "$RESERVE_SSH_PUBKEY" >> ~/.ssh/authorized_keys
29
+ chmod 0600 ~/.ssh/authorized_keys
30
+ else
31
+ echo "RESERVE_SSH_PUBKEY env var not defined" >&2
32
+ exit 1
33
+ fi
34
+
35
+ # wait forever
36
+ sleep inf
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes