atex 0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atex/__init__.py +35 -0
- atex/cli/__init__.py +83 -0
- atex/cli/testingfarm.py +171 -0
- atex/fmf.py +168 -0
- atex/minitmt/__init__.py +109 -0
- atex/minitmt/report.py +174 -0
- atex/minitmt/scripts.py +51 -0
- atex/minitmt/testme.py +3 -0
- atex/orchestrator.py +38 -0
- atex/provision/__init__.py +113 -0
- atex/provision/libvirt/VM_PROVISION +51 -0
- atex/provision/libvirt/__init__.py +23 -0
- atex/provision/libvirt/setup-libvirt.sh +72 -0
- atex/ssh.py +320 -0
- atex/testingfarm.py +523 -0
- atex/util/__init__.py +49 -0
- atex/util/dedent.py +25 -0
- atex/util/lockable_class.py +38 -0
- atex/util/log.py +53 -0
- atex/util/subprocess.py +51 -0
- atex-0.1.dist-info/METADATA +11 -0
- atex-0.1.dist-info/RECORD +25 -0
- atex-0.1.dist-info/WHEEL +4 -0
- atex-0.1.dist-info/entry_points.txt +2 -0
- atex-0.1.dist-info/licenses/COPYING.txt +14 -0
atex/testingfarm.py
ADDED
|
@@ -0,0 +1,523 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import re
|
|
4
|
+
import time
|
|
5
|
+
import tempfile
|
|
6
|
+
import textwrap
|
|
7
|
+
import subprocess
|
|
8
|
+
import collections
|
|
9
|
+
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from . import util
|
|
13
|
+
|
|
14
|
+
#from pprint import pprint as pp
|
|
15
|
+
import json
|
|
16
|
+
import urllib3
|
|
17
|
+
|
|
18
|
+
DEFAULT_API_URL = 'https://api.testing-farm.io/v0.1'
|
|
19
|
+
|
|
20
|
+
# how many seconds to sleep for during API polling
|
|
21
|
+
API_QUERY_DELAY = 10
|
|
22
|
+
|
|
23
|
+
RESERVE_TASK = {
|
|
24
|
+
'fmf': {
|
|
25
|
+
# 'url': 'https://github.com/RHSecurityCompliance/atex', # TODO
|
|
26
|
+
# 'ref': 'main',
|
|
27
|
+
# 'path': 'fmf_tests',
|
|
28
|
+
# 'name': "/reserve",
|
|
29
|
+
'url': 'https://github.com/comps/tmt-experiments-public',
|
|
30
|
+
'ref': 'master',
|
|
31
|
+
'path': '.',
|
|
32
|
+
'test_name': '/reserve',
|
|
33
|
+
},
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
# final states of a request,
|
|
37
|
+
# https://gitlab.com/testing-farm/nucleus/-/blob/main/api/src/tft/nucleus/api/core/schemes/test_request.py
|
|
38
|
+
END_STATES = ('error', 'complete', 'canceled')
|
|
39
|
+
|
|
40
|
+
# always have at most 3 outstanding HTTP requests to every given API host,
|
|
41
|
+
# shared by all instances of all classes here, to avoid flooding the host
|
|
42
|
+
# by multi-threaded users
|
|
43
|
+
_http = urllib3.PoolManager(maxsize=3, block=True)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class TestingFarmError(Exception):
|
|
47
|
+
pass
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class APIError(TestingFarmError):
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class BadHTTPError(TestingFarmError):
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
# TODO: __init__ and __str__ so we pass just request ID, not a full message
|
|
59
|
+
class GoneAwayError(TestingFarmError):
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class TestingFarmAPI:
|
|
64
|
+
"""
|
|
65
|
+
A python interface for the Testing Farm HTTP API, closely matching
|
|
66
|
+
functionality provided by it, returning python dictionaries that
|
|
67
|
+
correspond to JSON replies of the HTTP API functions.
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
def __init__(self, url=DEFAULT_API_URL, token=None):
|
|
71
|
+
"""
|
|
72
|
+
'url' is Testing Farm API URL, a sensible default is used
|
|
73
|
+
if unspecified.
|
|
74
|
+
|
|
75
|
+
'token' is a secret API token generated by Testing Farm admins,
|
|
76
|
+
if empty, the TESTING_FARM_API_TOKEN env var is read instead.
|
|
77
|
+
|
|
78
|
+
Note that token-less operation is supported, with limited functionality.
|
|
79
|
+
"""
|
|
80
|
+
self.api_url = url
|
|
81
|
+
self.api_token = token or os.environ.get('TESTING_FARM_API_TOKEN')
|
|
82
|
+
|
|
83
|
+
def _query(self, method, path, *args, headers=None, **kwargs):
|
|
84
|
+
url = f'{self.api_url}{path}'
|
|
85
|
+
if headers is not None:
|
|
86
|
+
headers['Authorization'] = f'Bearer {self.api_token}'
|
|
87
|
+
else:
|
|
88
|
+
headers = {'Authorization': f'Bearer {self.api_token}'}
|
|
89
|
+
|
|
90
|
+
reply = _http.request(method, url, *args, headers=headers, preload_content=False, **kwargs)
|
|
91
|
+
|
|
92
|
+
if reply.status != 200 and not reply.data:
|
|
93
|
+
raise APIError(f"got HTTP {reply.status} on {method} {url}")
|
|
94
|
+
|
|
95
|
+
if reply.headers.get('Content-Type') != 'application/json':
|
|
96
|
+
raise BadHTTPError(f"HTTP {reply.status} on {method} {url} is not application/json")
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
decoded = reply.json()
|
|
100
|
+
except json.decoder.JSONDecodeError:
|
|
101
|
+
raise BadHTTPError(f"failed to decode JSON for {method} {url}: {reply.data}")
|
|
102
|
+
|
|
103
|
+
if reply.status != 200:
|
|
104
|
+
raise APIError(f"got HTTP {reply.status} on {method} {url}: {decoded}")
|
|
105
|
+
|
|
106
|
+
return decoded
|
|
107
|
+
|
|
108
|
+
def whoami(self):
|
|
109
|
+
if not self.api_token:
|
|
110
|
+
raise ValueError("whoami() requires an auth token")
|
|
111
|
+
if hasattr(self, '_whoami_cached'):
|
|
112
|
+
return self._whoami_cached
|
|
113
|
+
else:
|
|
114
|
+
self._whoami_cached = self._query('GET', '/whoami')
|
|
115
|
+
return self._whoami_cached
|
|
116
|
+
|
|
117
|
+
def about(self):
|
|
118
|
+
return self._query('GET', '/about')
|
|
119
|
+
|
|
120
|
+
def composes(self, ranch=None):
|
|
121
|
+
"""
|
|
122
|
+
'ranch' is 'public' or 'redhat', autodetected if token was given.
|
|
123
|
+
"""
|
|
124
|
+
if not ranch:
|
|
125
|
+
if not self.api_token:
|
|
126
|
+
raise ValueError("composes() requires an auth token to identify ranch")
|
|
127
|
+
ranch = self.whoami()['token']['ranch']
|
|
128
|
+
return self._query('GET', f'/composes/{ranch}')
|
|
129
|
+
|
|
130
|
+
def search_requests(
|
|
131
|
+
self, state, mine=True, ranch=None, created_before=None, created_after=None,
|
|
132
|
+
):
|
|
133
|
+
"""
|
|
134
|
+
'state' is one of 'running', 'queued', etc., and is required by the API.
|
|
135
|
+
|
|
136
|
+
If 'mine' is True and a token was given, return only requests for that
|
|
137
|
+
token (user), otherwise return *all* requests (use extra filters pls).
|
|
138
|
+
|
|
139
|
+
'ranch' is 'public' or 'redhat', or (probably?) all if left empty.
|
|
140
|
+
|
|
141
|
+
'created_*' take ISO 8601 formatted strings, as returned by the API
|
|
142
|
+
elsewhere, ie. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS' (or with '.MS'),
|
|
143
|
+
without timezone.
|
|
144
|
+
"""
|
|
145
|
+
fields = {'state': state}
|
|
146
|
+
if ranch:
|
|
147
|
+
fields['ranch'] = ranch
|
|
148
|
+
if created_before:
|
|
149
|
+
fields['created_before'] = created_before
|
|
150
|
+
if created_after:
|
|
151
|
+
fields['created_after'] = created_after
|
|
152
|
+
|
|
153
|
+
if mine:
|
|
154
|
+
if not self.api_token:
|
|
155
|
+
raise ValueError("search_requests(mine=True) requires an auth token")
|
|
156
|
+
fields['token_id'] = self.whoami()['token']['id']
|
|
157
|
+
fields['user_id'] = self.whoami()['user']['id']
|
|
158
|
+
|
|
159
|
+
return self._query('GET', '/requests', fields=fields)
|
|
160
|
+
|
|
161
|
+
def get_request(self, request_id):
|
|
162
|
+
"""
|
|
163
|
+
'request_id' is the UUID (string) of the request.
|
|
164
|
+
"""
|
|
165
|
+
return self._query('GET', f'/requests/{request_id}')
|
|
166
|
+
|
|
167
|
+
def submit_request(self, spec):
|
|
168
|
+
"""
|
|
169
|
+
'spec' is a big dictionary with 'test', 'environment', 'settings', etc.
|
|
170
|
+
keys that specify what should be run and where.
|
|
171
|
+
"""
|
|
172
|
+
if not self.api_token:
|
|
173
|
+
raise ValueError("submit_request() requires an auth token")
|
|
174
|
+
return self._query('POST', '/requests', json=spec)
|
|
175
|
+
|
|
176
|
+
def cancel_request(self, request_id):
|
|
177
|
+
"""
|
|
178
|
+
'request_id' is the UUID (string) of the request.
|
|
179
|
+
"""
|
|
180
|
+
return self._query('DELETE', f'/requests/{request_id}')
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
class Request:
|
|
184
|
+
"""
|
|
185
|
+
A higher-level API for submitting, querying, and cancelling a Testing Farm
|
|
186
|
+
request.
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
def __init__(self, id=None, api=None, initial_data=None):
|
|
190
|
+
"""
|
|
191
|
+
'id' is a Testing Farm request UUID
|
|
192
|
+
'api' is a TestingFarmAPI instance - if unspecified, a sensible default
|
|
193
|
+
'initial_data' (dict) can be used to pre-fill an initial Request state
|
|
194
|
+
will be used.
|
|
195
|
+
"""
|
|
196
|
+
self.id = id
|
|
197
|
+
self.api = api or TestingFarmAPI()
|
|
198
|
+
self.data = initial_data or {}
|
|
199
|
+
|
|
200
|
+
def submit(self, spec):
|
|
201
|
+
"""
|
|
202
|
+
'spec' is a big dictionary with 'test', 'environment', 'settings', etc.
|
|
203
|
+
keys that specify what should be run and where.
|
|
204
|
+
"""
|
|
205
|
+
if self.id:
|
|
206
|
+
raise ValueError("this Request instance already has 'id', refusing submit")
|
|
207
|
+
self.data = self.api.submit_request(spec)
|
|
208
|
+
self.id = self.data['id']
|
|
209
|
+
|
|
210
|
+
def update(self):
|
|
211
|
+
"""
|
|
212
|
+
Query Testing Farm API to get a more up-to-date version of the request
|
|
213
|
+
metadata accessible via __str__().
|
|
214
|
+
"""
|
|
215
|
+
self.data = self.api.get_request(self.id)
|
|
216
|
+
return self.data
|
|
217
|
+
|
|
218
|
+
def cancel(self):
|
|
219
|
+
if not self.id:
|
|
220
|
+
return
|
|
221
|
+
data = self.api.cancel_request(self.id)
|
|
222
|
+
self.id = None
|
|
223
|
+
self.data = {}
|
|
224
|
+
return data
|
|
225
|
+
|
|
226
|
+
def alive(self):
|
|
227
|
+
if 'state' not in self.data:
|
|
228
|
+
self.update()
|
|
229
|
+
return self.data['state'] not in END_STATES
|
|
230
|
+
|
|
231
|
+
def assert_alive(self):
|
|
232
|
+
if not self.alive():
|
|
233
|
+
raise GoneAwayError(f"request {self.data['id']} not alive anymore")
|
|
234
|
+
|
|
235
|
+
def wait_for_state(self, state):
|
|
236
|
+
if 'state' not in self.data:
|
|
237
|
+
self.update()
|
|
238
|
+
self.assert_alive()
|
|
239
|
+
while self.data['state'] != state:
|
|
240
|
+
time.sleep(API_QUERY_DELAY)
|
|
241
|
+
self.update()
|
|
242
|
+
self.assert_alive()
|
|
243
|
+
|
|
244
|
+
def __repr__(self):
|
|
245
|
+
return f'Request(id={self.id})'
|
|
246
|
+
|
|
247
|
+
def __str__(self):
|
|
248
|
+
# python has no better dict-pretty-printing logic
|
|
249
|
+
return json.dumps(self.data, sort_keys=True, indent=4)
|
|
250
|
+
|
|
251
|
+
def __contains__(self, item):
|
|
252
|
+
return item in self.data
|
|
253
|
+
|
|
254
|
+
def __getitem__(self, key):
|
|
255
|
+
return self.data[key]
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
class PipelineLogStreamer:
|
|
259
|
+
"""
|
|
260
|
+
Line buffer for querying Testing Farm pipeline.log using HTTP Range header
|
|
261
|
+
to "stream" its contents over time (over many requests), never having to
|
|
262
|
+
re-read old pipeline.log content.
|
|
263
|
+
"""
|
|
264
|
+
def __init__(self, request):
|
|
265
|
+
self.request = request
|
|
266
|
+
|
|
267
|
+
def _wait_for_entry(self):
|
|
268
|
+
while True:
|
|
269
|
+
self.request.wait_for_state('running')
|
|
270
|
+
|
|
271
|
+
if 'run' in self.request and 'artifacts' in self.request['run']:
|
|
272
|
+
if artifacts := self.request['run']['artifacts']:
|
|
273
|
+
return f'{artifacts}/pipeline.log'
|
|
274
|
+
|
|
275
|
+
time.sleep(API_QUERY_DELAY)
|
|
276
|
+
self.request.update()
|
|
277
|
+
|
|
278
|
+
def __iter__(self):
|
|
279
|
+
url = self._wait_for_entry()
|
|
280
|
+
buffer = ''
|
|
281
|
+
bytes_read = 0
|
|
282
|
+
while True:
|
|
283
|
+
self.request.assert_alive()
|
|
284
|
+
|
|
285
|
+
headers = {'Range': f'bytes={bytes_read}-'}
|
|
286
|
+
# load all returned data via .decode() rather than streaming it
|
|
287
|
+
# in chunks, because we don't want to leave the connection open
|
|
288
|
+
# (blocking others) while the user code runs between __next__ calls
|
|
289
|
+
reply = _http.request('GET', url, headers=headers)
|
|
290
|
+
|
|
291
|
+
# 416=Range Not Satisfiable, typically meaning "no new data to send"
|
|
292
|
+
if reply.status == 416:
|
|
293
|
+
time.sleep(API_QUERY_DELAY)
|
|
294
|
+
self.request.update()
|
|
295
|
+
continue
|
|
296
|
+
# 200=OK or 206=Partial Content
|
|
297
|
+
elif reply.status not in (200,206):
|
|
298
|
+
raise BadHTTPError(f"got {reply.status} when trying to GET {url}")
|
|
299
|
+
|
|
300
|
+
bytes_read += len(reply.data)
|
|
301
|
+
buffer += reply.data.decode(errors='ignore')
|
|
302
|
+
|
|
303
|
+
while (index := buffer.find('\n')) != -1:
|
|
304
|
+
yield buffer[:index]
|
|
305
|
+
buffer = buffer[index+1:]
|
|
306
|
+
|
|
307
|
+
time.sleep(API_QUERY_DELAY)
|
|
308
|
+
self.request.update()
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
class Reserve:
|
|
312
|
+
r"""
|
|
313
|
+
An abstraction for (ab)using Testing Farm for OS reservations, by submitting
|
|
314
|
+
a dummy test that sets up user-provided SSH key access and enters a no-op
|
|
315
|
+
state, allowing ad-hoc access/use by the user.
|
|
316
|
+
|
|
317
|
+
When used in a context manager, it produces a ReservedMachine tuple with
|
|
318
|
+
connection details for an ssh client:
|
|
319
|
+
|
|
320
|
+
with Reserve(compose='CentOS-Stream-9', timeout=720) as m:
|
|
321
|
+
subprocess.run(['ssh', '-i', m.ssh_key, f'{m.user}@{m.host}', 'ls /'])
|
|
322
|
+
"""
|
|
323
|
+
|
|
324
|
+
Reserved = collections.namedtuple('ReservedMachine', ['user', 'host', 'ssh_key', 'request'])
|
|
325
|
+
|
|
326
|
+
def __init__(
|
|
327
|
+
self, compose=None, arch='x86_64', pool=None, hardware=None, kickstart=None,
|
|
328
|
+
timeout=60, ssh_key=None, source_host=None, api=None,
|
|
329
|
+
):
|
|
330
|
+
"""
|
|
331
|
+
'compose' (str) is the OS to install, chosen from the composes supported
|
|
332
|
+
by the Testing Farm ranch of the authenticated user.
|
|
333
|
+
|
|
334
|
+
'arch' (str) is one of 'x86_64', 's390x', etc.
|
|
335
|
+
|
|
336
|
+
'pool' (str) is a name of a Testing Farm infrastructure pool.
|
|
337
|
+
|
|
338
|
+
'hardware' (dict) is a complex specification of hardware properties
|
|
339
|
+
the reserved system should have, see:
|
|
340
|
+
https://docs.testing-farm.io/Testing%20Farm/0.1/test-request.html#hardware
|
|
341
|
+
|
|
342
|
+
'kickstart' (dict) is a Beaker-style specification of Anaconda Kickstart
|
|
343
|
+
hacks, passed directly to Testing Farm POST /requests API.
|
|
344
|
+
|
|
345
|
+
'timeout' (int) is the maximum time IN MINUTES a Testing Farm request
|
|
346
|
+
is alive, which includes initial creation, waiting in queue, preparing
|
|
347
|
+
an OS, and the entire reservation period.
|
|
348
|
+
Make sure to set it high enough (not just the pure reservation time).
|
|
349
|
+
|
|
350
|
+
'ssh_key' (str) is a path to an OpenSSH private key file (with an
|
|
351
|
+
associated public key file in .pub), to be added to the reserved OS.
|
|
352
|
+
If unspecified, an attempt to read ~/.ssh/id_rsa will be made and if
|
|
353
|
+
that is also unsuccessful, a temporary keypair will be generated.
|
|
354
|
+
|
|
355
|
+
'source_host' (str) is an IPv4 network specified as ie. '1.2.3.4/32'
|
|
356
|
+
to be allowed incoming traffic to the reserved system (such as ssh).
|
|
357
|
+
If unspecified, an Internet service will be queried to get an outside-
|
|
358
|
+
facing address of the current system.
|
|
359
|
+
Ignored on the 'redhat' ranch.
|
|
360
|
+
|
|
361
|
+
'api' is a TestingFarmAPI instance - if unspecified, a sensible default
|
|
362
|
+
will be used.
|
|
363
|
+
"""
|
|
364
|
+
spec = {
|
|
365
|
+
'test': RESERVE_TASK,
|
|
366
|
+
'environments': [{
|
|
367
|
+
'arch': arch,
|
|
368
|
+
'os': {
|
|
369
|
+
'compose': compose,
|
|
370
|
+
},
|
|
371
|
+
'pool': pool,
|
|
372
|
+
'settings': {
|
|
373
|
+
'pipeline': {
|
|
374
|
+
'skip_guest_setup': True,
|
|
375
|
+
},
|
|
376
|
+
'provisioning': {
|
|
377
|
+
'tags': {
|
|
378
|
+
'ArtemisUseSpot': 'false',
|
|
379
|
+
},
|
|
380
|
+
'security_group_rules_ingress': [],
|
|
381
|
+
},
|
|
382
|
+
},
|
|
383
|
+
'secrets': {},
|
|
384
|
+
}],
|
|
385
|
+
'settings': {
|
|
386
|
+
'pipeline': {
|
|
387
|
+
'timeout': timeout,
|
|
388
|
+
},
|
|
389
|
+
},
|
|
390
|
+
}
|
|
391
|
+
if hardware:
|
|
392
|
+
spec['environments'][0]['hardware'] = hardware
|
|
393
|
+
if kickstart:
|
|
394
|
+
spec['environments'][0]['kickstart'] = kickstart
|
|
395
|
+
|
|
396
|
+
self._spec = spec
|
|
397
|
+
self._ssh_key = Path(ssh_key) if ssh_key else None
|
|
398
|
+
self._source_host = source_host
|
|
399
|
+
self.api = api or TestingFarmAPI()
|
|
400
|
+
|
|
401
|
+
self.request = None
|
|
402
|
+
self._tmpdir = None
|
|
403
|
+
|
|
404
|
+
@staticmethod
|
|
405
|
+
def _guess_host_ipv4():
|
|
406
|
+
curl_agent = {'User-Agent': 'curl/1.2.3'}
|
|
407
|
+
try:
|
|
408
|
+
r = _http.request('GET', 'https://ifconfig.me', headers=curl_agent)
|
|
409
|
+
if r.status != 200:
|
|
410
|
+
raise ConnectionError()
|
|
411
|
+
except (ConnectionError, urllib3.exceptions.RequestError):
|
|
412
|
+
r = _http.request('GET', 'https://ifconfig.co', headers=curl_agent)
|
|
413
|
+
return r.data.decode().strip()
|
|
414
|
+
|
|
415
|
+
@staticmethod
|
|
416
|
+
def _gen_ssh_keypair(tmpdir):
|
|
417
|
+
tmpdir = Path(tmpdir)
|
|
418
|
+
subprocess.run(
|
|
419
|
+
['ssh-keygen', '-t', 'rsa', '-N', '', '-f', tmpdir / 'key_rsa'],
|
|
420
|
+
stdout=subprocess.DEVNULL,
|
|
421
|
+
check=True,
|
|
422
|
+
)
|
|
423
|
+
return (tmpdir / 'key_rsa', tmpdir / 'key_rsa.pub')
|
|
424
|
+
|
|
425
|
+
def __enter__(self):
|
|
426
|
+
spec = self._spec.copy()
|
|
427
|
+
|
|
428
|
+
try:
|
|
429
|
+
# add source_host firewall filter
|
|
430
|
+
source_host = self._source_host or f'{self._guess_host_ipv4()}/32'
|
|
431
|
+
ingress = \
|
|
432
|
+
spec['environments'][0]['settings']['provisioning']['security_group_rules_ingress']
|
|
433
|
+
ingress.append({
|
|
434
|
+
'type': 'ingress',
|
|
435
|
+
'protocol': '-1',
|
|
436
|
+
'cidr': source_host,
|
|
437
|
+
'port_min': 0,
|
|
438
|
+
'port_max': 65535,
|
|
439
|
+
})
|
|
440
|
+
|
|
441
|
+
# read user-provided ssh key, or generate one
|
|
442
|
+
ssh_key = self._ssh_key
|
|
443
|
+
if ssh_key:
|
|
444
|
+
if not ssh_key.exists():
|
|
445
|
+
raise FileNotFoundError(f"{ssh_key} specified, but does not exist")
|
|
446
|
+
ssh_pubkey = Path(f'{ssh_key}.pub')
|
|
447
|
+
else:
|
|
448
|
+
self._tmpdir = tempfile.TemporaryDirectory()
|
|
449
|
+
ssh_key, ssh_pubkey = self._gen_ssh_keypair(self._tmpdir.name)
|
|
450
|
+
|
|
451
|
+
pubkey_contents = ssh_pubkey.read_text().strip()
|
|
452
|
+
secrets = spec['environments'][0]['secrets']
|
|
453
|
+
secrets['RESERVE_SSH_PUBKEY'] = pubkey_contents
|
|
454
|
+
|
|
455
|
+
self.request = Request(api=self.api)
|
|
456
|
+
self.request.submit(spec)
|
|
457
|
+
util.debug(f"submitted request:\n{textwrap.indent(str(self.request), ' ')}")
|
|
458
|
+
|
|
459
|
+
# wait for the request to become running
|
|
460
|
+
while self.request['state'] != 'running':
|
|
461
|
+
time.sleep(API_QUERY_DELAY)
|
|
462
|
+
self.request.update()
|
|
463
|
+
if self.request['state'] in END_STATES:
|
|
464
|
+
raise GoneAwayError(f"request {self.request['id']} not alive anymore")
|
|
465
|
+
|
|
466
|
+
# wait for user/host to ssh to
|
|
467
|
+
ssh_user = ssh_host = None
|
|
468
|
+
for line in PipelineLogStreamer(self.request):
|
|
469
|
+
util.debug(f"pipeline: {line}")
|
|
470
|
+
# find hidden login details
|
|
471
|
+
m = re.search(r'\] Guest is ready: ArtemisGuest\([^,]+, (\w+)@([0-9\.]+), ', line)
|
|
472
|
+
if m:
|
|
473
|
+
ssh_user, ssh_host = m.groups()
|
|
474
|
+
continue
|
|
475
|
+
# but wait until much later despite having login, at least until
|
|
476
|
+
# the test starts running (and we get closer to it inserting our
|
|
477
|
+
# ~/.ssh/authorized_keys entry)
|
|
478
|
+
if ssh_user and re.search(r'\] starting tests execution', line):
|
|
479
|
+
break
|
|
480
|
+
|
|
481
|
+
# wait for a successful connection over ssh
|
|
482
|
+
# (it will be failing to login for a while, until the reserve test
|
|
483
|
+
# installs our ssh pubkey into authorized_keys)
|
|
484
|
+
ssh_attempt_cmd = [
|
|
485
|
+
'ssh', '-q', '-i', ssh_key, f'-oConnectionAttempts={API_QUERY_DELAY}',
|
|
486
|
+
'-oStrictHostKeyChecking=no', '-oUserKnownHostsFile=/dev/null',
|
|
487
|
+
f'{ssh_user}@{ssh_host}', 'exit 123',
|
|
488
|
+
]
|
|
489
|
+
while True:
|
|
490
|
+
time.sleep(API_QUERY_DELAY)
|
|
491
|
+
self.request.update()
|
|
492
|
+
if self.request['state'] in END_STATES:
|
|
493
|
+
raise GoneAwayError(f"request {self.request['id']} not alive anymore")
|
|
494
|
+
|
|
495
|
+
proc = util.subprocess_run(
|
|
496
|
+
ssh_attempt_cmd,
|
|
497
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
|
|
498
|
+
)
|
|
499
|
+
if proc.returncode == 123:
|
|
500
|
+
break
|
|
501
|
+
|
|
502
|
+
return self.Reserved(ssh_user, ssh_host, ssh_key, self.request)
|
|
503
|
+
|
|
504
|
+
except:
|
|
505
|
+
self.__exit__(*sys.exc_info())
|
|
506
|
+
raise
|
|
507
|
+
|
|
508
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
509
|
+
if self.request:
|
|
510
|
+
try:
|
|
511
|
+
self.request.cancel()
|
|
512
|
+
except APIError:
|
|
513
|
+
pass
|
|
514
|
+
finally:
|
|
515
|
+
self.request = None
|
|
516
|
+
|
|
517
|
+
if self._tmpdir:
|
|
518
|
+
self._tmpdir.cleanup()
|
|
519
|
+
self._tmpdir = None
|
|
520
|
+
|
|
521
|
+
# cancel request
|
|
522
|
+
# clear out stored self.request
|
|
523
|
+
pass
|
atex/util/__init__.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TODO some description about utilities
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import importlib as _importlib
|
|
6
|
+
import pkgutil as _pkgutil
|
|
7
|
+
import inspect as _inspect
|
|
8
|
+
|
|
9
|
+
__all__ = []
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def __dir__():
|
|
13
|
+
return __all__
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# this is the equivalent of 'from .submod import *' for all submodules
|
|
17
|
+
# (function to avoid polluting global namespace with extra variables)
|
|
18
|
+
def _import_submodules():
|
|
19
|
+
for info in _pkgutil.iter_modules(__spec__.submodule_search_locations):
|
|
20
|
+
mod = _importlib.import_module(f'.{info.name}', __name__)
|
|
21
|
+
|
|
22
|
+
# if the module defines __all__, just use it
|
|
23
|
+
if hasattr(mod, '__all__'):
|
|
24
|
+
keys = mod.__all__
|
|
25
|
+
else:
|
|
26
|
+
# https://docs.python.org/3/reference/executionmodel.html#binding-of-names
|
|
27
|
+
keys = (x for x in dir(mod) if not x.startswith('_'))
|
|
28
|
+
|
|
29
|
+
for key in keys:
|
|
30
|
+
attr = getattr(mod, key)
|
|
31
|
+
|
|
32
|
+
# avoid objects that belong to other known modules
|
|
33
|
+
# (ie. imported function from another util module)
|
|
34
|
+
if hasattr(attr, '__module__'):
|
|
35
|
+
if attr.__module__ != mod.__name__:
|
|
36
|
+
continue
|
|
37
|
+
# avoid some common pollution / imports
|
|
38
|
+
# (we don't want subpackages in here anyway)
|
|
39
|
+
if _inspect.ismodule(attr):
|
|
40
|
+
continue
|
|
41
|
+
# do not override already processed objects (avoid duplicates)
|
|
42
|
+
if key in __all__:
|
|
43
|
+
raise AssertionError(f"tried to override already-imported '{key}'")
|
|
44
|
+
|
|
45
|
+
globals()[key] = attr
|
|
46
|
+
__all__.append(key)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
_import_submodules()
|
atex/util/dedent.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
r"""
|
|
2
|
+
This allows raw blocks like
|
|
3
|
+
|
|
4
|
+
def func():
|
|
5
|
+
variable = dedent(fr'''
|
|
6
|
+
some
|
|
7
|
+
content
|
|
8
|
+
''')
|
|
9
|
+
|
|
10
|
+
without the leading or trailing newlines and any common leading whitespaces.
|
|
11
|
+
You might think using '''\ would eliminate the first newline, but the string
|
|
12
|
+
is 'raw', it doesn't have escapes.
|
|
13
|
+
|
|
14
|
+
textwrap.dedent() does only the common leading whitespaces.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import textwrap
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def dedent(text):
|
|
21
|
+
"""
|
|
22
|
+
Like textwrap.dedent(), but also strip leading and trailing spaces/newlines
|
|
23
|
+
up to the content.
|
|
24
|
+
"""
|
|
25
|
+
return textwrap.dedent(text.lstrip('\n').rstrip(' \n'))
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class LockableClass:
|
|
5
|
+
"""
|
|
6
|
+
A class with (nearly) all attribute accesses protected by threading.RLock,
|
|
7
|
+
making them thread-safe at the cost of some speed.
|
|
8
|
+
|
|
9
|
+
class MyClass(LockableClass):
|
|
10
|
+
def writer(self):
|
|
11
|
+
self.attr = 222 # thread-safe instance access
|
|
12
|
+
def reader(self):
|
|
13
|
+
print(self.attr) # thread-safe instance access
|
|
14
|
+
def complex(self):
|
|
15
|
+
with self.lock: # thread-safe context
|
|
16
|
+
self.attr += 1
|
|
17
|
+
|
|
18
|
+
Here, 'lock' is a reserved attribute name and must not be overriden
|
|
19
|
+
by a derived class.
|
|
20
|
+
|
|
21
|
+
If overriding '__init__', make sure to call 'super().__init__()' *before*
|
|
22
|
+
any attribute accesses in your '__init__'.
|
|
23
|
+
"""
|
|
24
|
+
def __init__(self):
|
|
25
|
+
object.__setattr__(self, 'lock', threading.RLock())
|
|
26
|
+
|
|
27
|
+
def __getattribute__(self, name):
|
|
28
|
+
# optimize built-ins
|
|
29
|
+
if name.startswith('__') or name == 'lock':
|
|
30
|
+
return object.__getattribute__(self, name)
|
|
31
|
+
lock = object.__getattribute__(self, 'lock')
|
|
32
|
+
with lock:
|
|
33
|
+
return object.__getattribute__(self, name)
|
|
34
|
+
|
|
35
|
+
def __setattr__(self, name, value):
|
|
36
|
+
lock = object.__getattribute__(self, 'lock')
|
|
37
|
+
with lock:
|
|
38
|
+
object.__setattr__(self, name, value)
|
atex/util/log.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import logging
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
_logger = logging.getLogger('atex')
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _format_msg(msg, *, skip_frames=0):
|
|
9
|
+
stack = inspect.stack()
|
|
10
|
+
if len(stack)-1 <= skip_frames:
|
|
11
|
+
raise SyntaxError("skip_frames exceeds call stack (frame count)")
|
|
12
|
+
stack = stack[skip_frames+1:]
|
|
13
|
+
|
|
14
|
+
# bottom of the stack, or runpy executed module
|
|
15
|
+
for frame_info in stack:
|
|
16
|
+
if frame_info.function == '<module>':
|
|
17
|
+
break
|
|
18
|
+
module = frame_info
|
|
19
|
+
|
|
20
|
+
# last (topmost) function that isn't us
|
|
21
|
+
parent = stack[0]
|
|
22
|
+
function = parent.function
|
|
23
|
+
|
|
24
|
+
# if the function has 'self' and it looks like a class instance,
|
|
25
|
+
# prepend it to the function name
|
|
26
|
+
p_locals = parent.frame.f_locals
|
|
27
|
+
if 'self' in p_locals:
|
|
28
|
+
self = p_locals['self']
|
|
29
|
+
if hasattr(self, '__class__') and inspect.isclass(self.__class__):
|
|
30
|
+
function = f'{self.__class__.__name__}.{function}'
|
|
31
|
+
|
|
32
|
+
# don't report module name of a function if it's the same as running module
|
|
33
|
+
if parent.filename != module.filename:
|
|
34
|
+
parent_modname = parent.frame.f_globals['__name__']
|
|
35
|
+
# avoid everything having the package name prefixed
|
|
36
|
+
parent_modname = parent_modname.partition('.')[2] or parent_modname
|
|
37
|
+
return f'{parent_modname}.{function}:{parent.lineno}: {msg}'
|
|
38
|
+
elif parent.function != '<module>':
|
|
39
|
+
return f'{function}:{parent.lineno}: {msg}'
|
|
40
|
+
else:
|
|
41
|
+
return f'{Path(parent.filename).name}:{parent.lineno}: {msg}'
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def debug(msg, *, skip_frames=0):
|
|
45
|
+
_logger.debug(_format_msg(msg, skip_frames=skip_frames+1))
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def info(msg, *, skip_frames=0):
|
|
49
|
+
_logger.info(_format_msg(msg, skip_frames=skip_frames+1))
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def warning(msg, *, skip_frames=0):
|
|
53
|
+
_logger.warning(_format_msg(msg, skip_frames=skip_frames+1))
|