tft-cli 0.0.23__py3-none-any.whl → 0.0.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2211 @@
1
+ # Copyright Contributors to the Testing Farm project.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ import base64
5
+ import ipaddress
6
+ import json
7
+ import os
8
+ import re
9
+ import shutil
10
+ import stat
11
+ import subprocess
12
+ import textwrap
13
+ import time
14
+ import urllib.parse
15
+ import xml.etree.ElementTree as ET
16
+ from enum import Enum
17
+ from typing import Any, Dict, List, Optional, Tuple
18
+
19
+ import pendulum
20
+ import pkg_resources
21
+ import requests
22
+ import typer
23
+ from click.core import ParameterSource # pyre-ignore[21]
24
+ from rich import print, print_json
25
+ from rich.progress import Progress, SpinnerColumn, TextColumn
26
+ from rich.table import Table
27
+
28
+ from tft.cli.config import settings
29
+ from tft.cli.utils import (
30
+ Age,
31
+ OutputFormat,
32
+ artifacts,
33
+ cmd_output_or_exit,
34
+ console,
35
+ console_stderr,
36
+ exit_error,
37
+ hw_constraints,
38
+ install_http_retries,
39
+ normalize_multistring_option,
40
+ options_to_dict,
41
+ read_glob_paths,
42
+ uuid_valid,
43
+ )
44
+
45
+ cli_version: str = pkg_resources.get_distribution("tft-cli").version
46
+
47
+ TestingFarmRequestV1: Dict[str, Any] = {'test': {}, 'environments': None}
48
+ Environment: Dict[str, Any] = {'arch': None, 'os': None, 'pool': None, 'artifacts': None, 'variables': {}}
49
+ TestTMT: Dict[str, Any] = {'url': None, 'ref': None, 'name': None}
50
+ TestSTI: Dict[str, Any] = {'url': None, 'ref': None}
51
+
52
+ REQUEST_PANEL_TMT = "TMT Options"
53
+ REQUEST_PANEL_STI = "STI Options"
54
+ REQUEST_PANEL_RESERVE = "Reserve Options"
55
+
56
+ RESERVE_PANEL_GENERAL = "General Options"
57
+ RESERVE_PANEL_ENVIRONMENT = "Environment Options"
58
+ RESERVE_PANEL_OUTPUT = "Output Options"
59
+
60
+ RUN_REPO = "https://gitlab.com/testing-farm/tests"
61
+ RUN_PLAN = "/testing-farm/sanity"
62
+
63
+ RESERVE_PLAN = os.getenv("TESTING_FARM_RESERVE_PLAN", "/testing-farm/reserve")
64
+ RESERVE_TEST = os.getenv("TESTING_FARM_RESERVE_TEST", "/testing-farm/reserve-system")
65
+ RESERVE_URL = os.getenv("TESTING_FARM_RESERVE_URL", "https://gitlab.com/testing-farm/tests")
66
+ RESERVE_REF = os.getenv("TESTING_FARM_RESERVE_REF", "main")
67
+ RESERVE_TMT_DISCOVER_EXTRA_ARGS = f"--insert --how fmf --url {RESERVE_URL} --ref {RESERVE_REF} --test {RESERVE_TEST}"
68
+
69
+ DEFAULT_PIPELINE_TIMEOUT = 60 * 12
70
+ DEFAULT_AGE = "7d"
71
+
72
+ # Won't be validating CIDR and 65535 max port range with regex here, not worth it
73
+ SECURITY_GROUP_RULE_FORMAT = re.compile(r"(tcp|ip|icmp|udp|-1|[0-255]):(.*):(\d{1,5}-\d{1,5}|\d{1,5}|-1)")
74
+
75
+
76
+ class WatchFormat(str, Enum):
77
+ text = 'text'
78
+ json = 'json'
79
+
80
+
81
+ class PipelineType(str, Enum):
82
+ tmt_multihost = "tmt-multihost"
83
+
84
+
85
+ class PipelineState(str, Enum):
86
+ new = "new"
87
+ queued = "queued"
88
+ running = "running"
89
+ complete = "complete"
90
+ error = "error"
91
+ canceled = "canceled"
92
+
93
+
94
+ # Arguments and options that are shared among multiple commands
95
+ ARGUMENT_API_URL: str = typer.Argument(
96
+ settings.API_URL, envvar="TESTING_FARM_API_URL", metavar='', rich_help_panel='Environment variables'
97
+ )
98
+ ARGUMENT_API_TOKEN: str = typer.Argument(
99
+ settings.API_TOKEN,
100
+ envvar="TESTING_FARM_API_TOKEN",
101
+ show_default=False,
102
+ metavar='',
103
+ rich_help_panel='Environment variables',
104
+ )
105
+ OPTION_TMT_PLAN_NAME: Optional[str] = typer.Option(
106
+ None,
107
+ "--plan",
108
+ help=(
109
+ 'Select plans to be executed. '
110
+ 'Passed as `--name` option to the `tmt plan` command. '
111
+ 'Can be a regular expression.'
112
+ ),
113
+ rich_help_panel=REQUEST_PANEL_TMT,
114
+ )
115
+ OPTION_TMT_PLAN_FILTER: Optional[str] = typer.Option(
116
+ None,
117
+ "--plan-filter",
118
+ help=(
119
+ 'Filter tmt plans. '
120
+ 'Passed as `--filter` option to the `tmt plan` command. '
121
+ 'By default, `enabled:true` filter is applied. '
122
+ 'Plan filtering is similar to test filtering, '
123
+ 'see https://tmt.readthedocs.io/en/stable/examples.html#filter-tests for more information.'
124
+ ),
125
+ rich_help_panel=REQUEST_PANEL_TMT,
126
+ )
127
+ OPTION_TMT_TEST_NAME: Optional[str] = typer.Option(
128
+ None,
129
+ "--test",
130
+ help=(
131
+ 'Select tests to be executed. '
132
+ 'Passed as `--name` option to the `tmt test` command. '
133
+ 'Can be a regular expression.'
134
+ ),
135
+ rich_help_panel=REQUEST_PANEL_TMT,
136
+ )
137
+ OPTION_TMT_TEST_FILTER: Optional[str] = typer.Option(
138
+ None,
139
+ "--test-filter",
140
+ help=(
141
+ 'Filter tmt tests. '
142
+ 'Passed as `--filter` option to the `tmt test` command. '
143
+ 'It overrides any test filter defined in the plan. '
144
+ 'See https://tmt.readthedocs.io/en/stable/examples.html#filter-tests for more information.'
145
+ ),
146
+ rich_help_panel=REQUEST_PANEL_TMT,
147
+ )
148
+ OPTION_TMT_PATH: str = typer.Option(
149
+ '.',
150
+ '--path',
151
+ help='Path to the metadata tree root. Relative to the git repository root specified by --git-url.',
152
+ rich_help_panel=REQUEST_PANEL_TMT,
153
+ )
154
+ OPTION_PIPELINE_TYPE: Optional[PipelineType] = typer.Option(None, help="Force a specific Testing Farm pipeline type.")
155
+ OPTION_POST_INSTALL_SCRIPT: Optional[str] = typer.Option(
156
+ None, help="Post-install script to run right after the guest boots for the first time."
157
+ )
158
+ OPTION_SECURITY_GROUP_RULE_INGRESS: Optional[List[str]] = typer.Option(
159
+ None,
160
+ help=(
161
+ "Additional ingress security group rules to be passed to guest in "
162
+ "PROTOCOL:CIDR:PORT format. Multiple rules can be specified as comma separated, "
163
+ "eg. `tcp:109.81.42.42/32:22,142.0.42.0/24:22`. "
164
+ "Supported by AWS only atm."
165
+ ),
166
+ )
167
+ OPTION_SECURITY_GROUP_RULE_EGRESS: Optional[List[str]] = typer.Option(
168
+ None,
169
+ help=(
170
+ "Additional egress security group rules to be passed to guest in "
171
+ "PROTOCOL:CIDR:PORT format. Multiple rules can be specified as comma separated, "
172
+ "eg. `tcp:109.81.42.42/32:22,142.0.42.0/24:22`. "
173
+ "Supported by AWS only atm."
174
+ ),
175
+ )
176
+ OPTION_KICKSTART: Optional[List[str]] = typer.Option(
177
+ None,
178
+ metavar="key=value|@file",
179
+ help=(
180
+ "Kickstart specification to customize the guest installation. Expressed as a key=value pair. "
181
+ "For more information about the supported keys see "
182
+ "https://tmt.readthedocs.io/en/stable/spec/plans.html#kickstart. The @ prefix marks a yaml file to load."
183
+ ),
184
+ )
185
+ OPTION_POOL: Optional[str] = typer.Option(
186
+ None,
187
+ help=(
188
+ "Force pool to provision. By default the most suited pool is used according to the hardware "
189
+ "requirements specified in tmt plans."
190
+ ),
191
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
192
+ )
193
+ OPTION_REDHAT_BREW_BUILD: List[str] = typer.Option(
194
+ None,
195
+ help="Brew build task IDs or build NVRs to install on the test environment.",
196
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
197
+ )
198
+ OPTION_FEDORA_KOJI_BUILD: List[str] = typer.Option(
199
+ None,
200
+ help="Koji build task IDs or build NVRs to install on the test environment.",
201
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
202
+ )
203
+ OPTION_FEDORA_COPR_BUILD: List[str] = typer.Option(
204
+ None,
205
+ help=(
206
+ "Fedora Copr build to install on the test environment, specified using `build-id:chroot-name`"
207
+ ", e.g. 1784470:fedora-32-x86_64."
208
+ ),
209
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
210
+ )
211
+ OPTION_REPOSITORY: List[str] = typer.Option(
212
+ None,
213
+ help="Repository base url to add to the test environment and install all packages from it.",
214
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
215
+ )
216
+ OPTION_REPOSITORY_FILE: List[str] = typer.Option(
217
+ None,
218
+ help="URL to a repository file which should be added to /etc/yum.repos.d, e.g. https://example.com/repository.repo", # noqa
219
+ )
220
+ OPTION_DRY_RUN: bool = typer.Option(
221
+ False, help="Do not submit a request to Testing Farm, just print it.", rich_help_panel=RESERVE_PANEL_GENERAL
222
+ )
223
+ OPTION_VARIABLES: Optional[List[str]] = typer.Option(
224
+ None,
225
+ "-e",
226
+ "--environment",
227
+ metavar="key=value|@file",
228
+ help="Variables to pass to the test environment. The @ prefix marks a yaml file to load.",
229
+ )
230
+ OPTION_SECRETS: Optional[List[str]] = typer.Option(
231
+ None,
232
+ "-s",
233
+ "--secret",
234
+ metavar="key=value|@file",
235
+ help="Secret variables to pass to the test environment. The @ prefix marks a yaml file to load.",
236
+ )
237
+ OPTION_HARDWARE: List[str] = typer.Option(
238
+ None,
239
+ help=(
240
+ "HW requirements, expressed as key/value pairs. Keys can consist of several properties, "
241
+ "e.g. ``disk.size='>= 40 GiB'``, such keys will be merged in the resulting environment "
242
+ "with other keys sharing the path: ``cpu.family=79`` and ``cpu.model=6`` would be merged, not overwriting "
243
+ "each other. See https://docs.testing-farm.io/Testing%20Farm/0.1/test-request.html#hardware "
244
+ "for the supported hardware selection possibilities."
245
+ ),
246
+ )
247
+ OPTION_WORKER_IMAGE: Optional[str] = typer.Option(
248
+ None, "--worker-image", help="Force worker container image. Requires Testing Farm developer permissions."
249
+ )
250
+ OPTION_PARALLEL_LIMIT: Optional[int] = typer.Option(
251
+ None,
252
+ '--parallel-limit',
253
+ help=(
254
+ "Maximum amount of plans to be executed in parallel. Default values are 12 for Public Ranch and 5 for "
255
+ "Red Hat Ranch."
256
+ ),
257
+ )
258
+ OPTION_TAGS = typer.Option(
259
+ None,
260
+ "-t",
261
+ "--tag",
262
+ metavar="key=value|@file",
263
+ help="Tag cloud resources with given value. The @ prefix marks a yaml file to load.",
264
+ )
265
+ OPTION_RESERVE: bool = typer.Option(
266
+ False,
267
+ help="Reserve machine after testing, similarly to the `reserve` command.",
268
+ rich_help_panel=REQUEST_PANEL_RESERVE,
269
+ )
270
+
271
+
272
+ def _option_autoconnect(panel: str) -> bool:
273
+ return typer.Option(True, help="Automatically connect to the guest via SSH.", rich_help_panel=panel)
274
+
275
+
276
+ def _option_ssh_public_keys(panel: str) -> List[str]:
277
+ return typer.Option(
278
+ ["~/.ssh/*.pub"],
279
+ "--ssh-public-key",
280
+ help="Path to SSH public key(s) used to connect. Supports globbing.",
281
+ rich_help_panel=panel,
282
+ )
283
+
284
+
285
+ def _option_reservation_duration(panel: str) -> int:
286
+ return typer.Option(
287
+ settings.DEFAULT_RESERVATION_DURATION,
288
+ "--duration",
289
+ help="Set the reservation duration in minutes. By default the reservation is for 30 minutes.",
290
+ rich_help_panel=panel,
291
+ )
292
+
293
+
294
+ def _option_debug_reservation(panel: Optional[str] = None) -> bool:
295
+ return typer.Option(
296
+ False,
297
+ help="Enable debug messages in the reservation code. Useful for testing changes to reservation code.",
298
+ rich_help_panel=panel,
299
+ )
300
+
301
+
302
+ def _generate_tmt_extra_args(step: str) -> Optional[List[str]]:
303
+ return typer.Option(
304
+ None,
305
+ help=(
306
+ f"Additional options passed to the \"{step}\" step. "
307
+ "Can be specified multiple times for multiple additions."
308
+ ),
309
+ rich_help_panel=REQUEST_PANEL_TMT,
310
+ )
311
+
312
+
313
+ def _sanity_reserve() -> None:
314
+ """
315
+ Sanity checks for reservation support.
316
+ """
317
+
318
+ # Check of SSH_AUTH_SOCK is defined
319
+ ssh_auth_sock = os.getenv("SSH_AUTH_SOCK")
320
+ if not ssh_auth_sock:
321
+ exit_error(
322
+ "No 'ssh-agent' seems to be running, it is required for reservations to work, cannot continue.\n"
323
+ "SSH_AUTH_SOCK is not defined, make sure the ssh-agent is running by executing 'eval `ssh-agent`'."
324
+ )
325
+
326
+ # Check if SSH_AUTH_SOCK exists
327
+ if not os.path.exists(ssh_auth_sock):
328
+ exit_error(
329
+ "SSH_AUTH_SOCK socket does not exist, make sure the ssh-agent is running by executing 'eval `ssh-agent`'."
330
+ )
331
+
332
+ # Check if value of SSH_AUTH_SOCK is socket
333
+ if not stat.S_ISSOCK(os.stat(ssh_auth_sock).st_mode):
334
+ exit_error("SSH_AUTH_SOCK is not a socket, make sure the ssh-agent is running by executing 'eval `ssh-agent`'.")
335
+
336
+ # Check if ssh-add -L is not empty
337
+ ssh_add_output = subprocess.run(["ssh-add", "-L"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
338
+ if ssh_add_output.returncode != 0:
339
+ exit_error("No SSH identities found in the SSH agent. Please run `ssh-add`.")
340
+
341
+
342
+ def _handle_reservation(session, request_id: str, autoconnect: bool = False) -> None:
343
+ """
344
+ Handle the reservation for :py:func:``request`` and :py:func:``restart`` commands.
345
+ """
346
+ # Get artifacts url
347
+ request_url = urllib.parse.urljoin(settings.API_URL, f"/v0.1/requests/{request_id}")
348
+ response = session.get(request_url)
349
+ artifacts_url = response.json()['run']['artifacts']
350
+
351
+ try:
352
+ pipeline_log = session.get(f"{artifacts_url}/pipeline.log").text
353
+
354
+ if not pipeline_log:
355
+ exit_error(f"Pipeline log was empty. Please file an issue to {settings.ISSUE_TRACKER}.")
356
+
357
+ except requests.exceptions.SSLError:
358
+ exit_error(
359
+ textwrap.dedent(
360
+ f"""
361
+ Failed to access Testing Farm artifacts because of SSL validation error.
362
+ If you use Red Hat Ranch please make sure you have Red Hat CA certificates installed.
363
+ Otherwise file an issue to {settings.ISSUE_TRACKER}.
364
+ """
365
+ )
366
+ )
367
+ return
368
+
369
+ except requests.exceptions.ConnectionError:
370
+ exit_error(
371
+ textwrap.dedent(
372
+ f"""
373
+ Failed to access Testing Farm artifacts.
374
+ If you use Red Hat Ranch please make sure you are connected to the VPN.
375
+ Otherwise file an issue to {settings.ISSUE_TRACKER}.
376
+ """
377
+ )
378
+ )
379
+ return
380
+
381
+ # match any hostname or IP address from gluetool modules log
382
+ guests = re.findall(r'Guest is ready.*root@([\d\w\.-]+)', pipeline_log)
383
+
384
+ if not guests:
385
+ exit_error(
386
+ textwrap.dedent(
387
+ f"""
388
+ No guests found to connect to. This is unexpected, please file an issue
389
+ to {settings.ISSUE_TRACKER}.
390
+ """
391
+ )
392
+ )
393
+
394
+ if len(guests) > 1:
395
+ for guest in guests:
396
+ console.print(f"🌎 ssh root@{guest}")
397
+ return
398
+ else:
399
+ console.print(f"🌎 ssh root@{guests[0]}")
400
+
401
+ if autoconnect:
402
+ os.system(f"ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null root@{guests[0]}") # noqa: E501
403
+
404
+
405
+ def _localhost_ingress_rule(session: requests.Session) -> str:
406
+ try:
407
+ get_ip = session.get(settings.PUBLIC_IP_CHECKER_URL)
408
+ except requests.exceptions.RequestException as err:
409
+ exit_error(f"Could not get workstation ip to form a security group rule: {err}")
410
+
411
+ if get_ip.ok:
412
+ ip = get_ip.text.strip()
413
+ return f"-1:{ip}:-1"
414
+
415
+ else:
416
+ exit_error(f"Got {get_ip.status_code} while checking {settings.PUBLIC_IP_CHECKER_URL}")
417
+
418
+
419
+ def _add_reservation(
420
+ ssh_public_keys: List[str],
421
+ rules: Dict[str, Any],
422
+ duration: int,
423
+ environment: Dict[str, Any],
424
+ debug_reservation: bool,
425
+ ):
426
+ """
427
+ Add discovery of the reservation test to the given environment.
428
+ """
429
+ authorized_keys = read_glob_paths(ssh_public_keys).encode("utf-8")
430
+ if not authorized_keys:
431
+ exit_error(f"No public SSH keys found under {', '.join(ssh_public_keys)}, cannot continue.")
432
+
433
+ authorized_keys_bytes = base64.b64encode(authorized_keys)
434
+
435
+ if "secrets" not in environment or environment["secrets"] is None:
436
+ environment["secrets"] = {}
437
+
438
+ environment["secrets"].update({"TF_RESERVATION_AUTHORIZED_KEYS_BASE64": authorized_keys_bytes.decode("utf-8")})
439
+
440
+ if "settings" not in environment or environment["settings"] is None:
441
+ environment["settings"] = {}
442
+
443
+ if "provisioning" not in environment["settings"] or environment["settings"]["provisioning"] is None:
444
+ environment["settings"]["provisioning"] = {}
445
+
446
+ environment["settings"]["provisioning"].update(rules)
447
+
448
+ if "variables" not in environment or environment["variables"] is None:
449
+ environment["variables"] = {}
450
+
451
+ environment["variables"].update({"TF_RESERVATION_DURATION": str(duration)})
452
+
453
+ if debug_reservation:
454
+ environment["variables"].update({"TF_RESERVATION_DEBUG": "1"})
455
+
456
+ if "tmt" not in environment or environment["tmt"] is None:
457
+ environment["tmt"] = {"extra_args": {}}
458
+
459
+ if "extra_args" not in environment["tmt"] or environment["tmt"]["extra_args"] is None:
460
+ environment["tmt"]["extra_args"] = {}
461
+
462
+ if "discover" not in environment["tmt"]["extra_args"] or environment["tmt"]["extra_args"]["discover"] is None:
463
+ environment["tmt"]["extra_args"]["discover"] = []
464
+
465
+ # add reservation if not already present
466
+ if RESERVE_TMT_DISCOVER_EXTRA_ARGS not in environment["tmt"]["extra_args"]["discover"]:
467
+ environment["tmt"]["extra_args"]["discover"].append(RESERVE_TMT_DISCOVER_EXTRA_ARGS)
468
+
469
+
470
+ def _contains_compose(environments: List[Dict[str, Any]]):
471
+ """
472
+ Returns true if any of environments has ``os.compose`` defined.
473
+ """
474
+ for environment in environments:
475
+ if "os" in environment and environment["os"]:
476
+ if "compose" in environment["os"] and environment["os"]["compose"]:
477
+ return True
478
+ return False
479
+
480
+
481
+ # NOTE(ivasilev) Largely borrowed from artemis-cli
482
+ def _parse_security_group_rules(ingress_rules: List[str], egress_rules: List[str]) -> Dict[str, Any]:
483
+ """
484
+ Returns a dictionary with ingress/egress rules in TFT request friendly format
485
+ """
486
+ security_group_rules = {}
487
+
488
+ def _add_secgroup_rules(sg_type: str, sg_data: List[str]) -> None:
489
+ security_group_rules[sg_type] = []
490
+
491
+ for sg_rule in normalize_multistring_option(sg_data):
492
+ matches = re.match(SECURITY_GROUP_RULE_FORMAT, sg_rule)
493
+ if not matches:
494
+ exit_error(f"Bad format of security group rule '{sg_rule}', should be PROTOCOL:CIDR:PORT") # noqa: E231
495
+
496
+ protocol, cidr, port = matches[1], matches[2], matches[3]
497
+
498
+ # Let's validate cidr
499
+ try:
500
+ # This way a single ip address will be converted to a valid ip/32 cidr.
501
+ cidr = str(ipaddress.ip_network(cidr))
502
+ except ValueError as err:
503
+ exit_error(f'CIDR {cidr} has incorrect format: {err}')
504
+
505
+ # Artemis expectes port_min/port_max, -1 has to be convered to a proper range 0-65535
506
+ port_min = 0 if port == '-1' else int(port.split('-')[0])
507
+ port_max = 65535 if port == '-1' else int(port.split('-')[-1])
508
+
509
+ # Add rule for Artemis API
510
+ security_group_rules[sg_type].append(
511
+ {
512
+ 'type': sg_type.split('_')[-1],
513
+ 'protocol': protocol,
514
+ 'cidr': cidr,
515
+ 'port_min': port_min,
516
+ 'port_max': port_max,
517
+ }
518
+ )
519
+
520
+ _add_secgroup_rules('security_group_rules_ingress', ingress_rules)
521
+ _add_secgroup_rules('security_group_rules_egress', egress_rules)
522
+
523
+ return security_group_rules
524
+
525
+
526
+ def _get_headers(api_key: str) -> Dict[str, str]:
527
+ """
528
+ Return a dict with headers for a request to Testing Farm API.
529
+ Used for authentication.
530
+ """
531
+ return {'Authorization': f'Bearer {api_key}'}
532
+
533
+
534
+ def _parse_xunit(xunit: str):
535
+ """
536
+ A helper that parses xunit file into sets of passed_plans/failed_plans/errored_plans per arch.
537
+
538
+ The plans are returned as a {'arch': ['plan1', 'plan2', ..]} map. If it was impossible to deduce architecture
539
+ from a certain plan result (happens in case of early fails / infra issues), the plan will be listed under the 'N/A'
540
+ key.
541
+ """
542
+
543
+ def _add_plan(collection: dict, arch: str, plan: ET.Element):
544
+ # NOTE(ivasilev) name property will always be defined at this point, defaulting to '' to make type check happy
545
+ plan_name = plan.get('name', '')
546
+ if arch in collection:
547
+ collection[arch].append(plan_name)
548
+ else:
549
+ collection[arch] = [plan_name]
550
+
551
+ failed_plans = {}
552
+ passed_plans = {}
553
+ skipped_plans = {}
554
+ errored_plans = {}
555
+
556
+ results_root = ET.fromstring(xunit)
557
+ for plan in results_root.findall('./testsuite'):
558
+ # Try to get information about the environment (stored under ./testing-environment), may be
559
+ # absent if state is undefined
560
+ testing_environment: Optional[ET.Element] = plan.find('./testing-environment[@name="requested"]')
561
+ if not testing_environment:
562
+ console_stderr.print(
563
+ f'Could not find env specifications for {plan.get("name")}, assuming fail for all arches'
564
+ )
565
+ arch = 'N/A'
566
+ else:
567
+ arch_property = testing_environment.find('./property[@name="arch"]')
568
+ if arch_property is None:
569
+ console_stderr.print(f'Could not find arch property for plan {plan.get("name")} results, skipping')
570
+ continue
571
+ # NOTE(ivasilev) arch property will always be defined at this point, defaulting to '' to make type check
572
+ # happy
573
+ arch = arch_property.get('value', '')
574
+ if plan.get('result') == 'passed':
575
+ _add_plan(passed_plans, arch, plan)
576
+ elif plan.get('result') == 'failed':
577
+ _add_plan(failed_plans, arch, plan)
578
+ elif plan.get('result') == 'skipped':
579
+ _add_plan(skipped_plans, arch, plan)
580
+ else:
581
+ _add_plan(errored_plans, arch, plan)
582
+
583
+ # Let's remove possible duplicates among N/A errored out tests
584
+ if 'N/A' in errored_plans:
585
+ errored_plans['N/A'] = list(set(errored_plans['N/A']))
586
+ return passed_plans, failed_plans, skipped_plans, errored_plans
587
+
588
+
589
+ def _get_request_summary(request: dict, session: requests.Session):
590
+ """A helper that prepares json summary of the test run"""
591
+ state = request.get('state')
592
+ artifacts_url = (request.get('run') or {}).get('artifacts')
593
+ xpath_url = f'{artifacts_url}/results.xml' if artifacts_url else ''
594
+ xunit = (request.get('result') or {}).get('xunit') or '<testsuites></testsuites>'
595
+ if state not in ['queued', 'running'] and artifacts_url:
596
+ # NOTE(ivasilev) xunit can be None (ex. in case of timed out requests) so let's fetch results.xml and use it
597
+ # as source of truth
598
+ try:
599
+ response = session.get(xpath_url)
600
+ if response.status_code == 200:
601
+ xunit = response.text
602
+ except requests.exceptions.ConnectionError:
603
+ console_stderr.print("Could not get xunit results")
604
+ passed_plans, failed_plans, skipped_plans, errored_plans = _parse_xunit(xunit)
605
+ overall = (request.get("result") or {}).get("overall")
606
+ arches_requested = [env['arch'] for env in request['environments_requested']]
607
+
608
+ return {
609
+ 'id': request['id'],
610
+ 'state': request['state'],
611
+ 'artifacts': artifacts_url,
612
+ 'overall': overall,
613
+ 'arches_requested': arches_requested,
614
+ 'errored_plans': errored_plans,
615
+ 'failed_plans': failed_plans,
616
+ 'skipped_plans': skipped_plans,
617
+ 'passed_plans': passed_plans,
618
+ }
619
+
620
+
621
+ def _print_summary_table(summary: dict, format: Optional[WatchFormat], show_details=True):
622
+ if not format == WatchFormat.text:
623
+ # Nothing to do, table is printed only when text output is requested
624
+ return
625
+
626
+ def _get_plans_list(collection):
627
+ return list(collection.values())[0] if collection.values() else []
628
+
629
+ def _has_plan(collection, arch, plan):
630
+ return plan in collection.get(arch, [])
631
+
632
+ # Let's transform plans maps into collection of plans to display plan result per arch statistics
633
+ errored = _get_plans_list(summary['errored_plans'])
634
+ failed = _get_plans_list(summary['failed_plans'])
635
+ skipped = _get_plans_list(summary['skipped_plans'])
636
+ passed = _get_plans_list(summary['passed_plans'])
637
+ generic_info_table = Table(show_header=True, header_style="bold magenta")
638
+ arches_requested = summary['arches_requested']
639
+ artifacts_url = summary['artifacts'] or ''
640
+ for column in summary.keys():
641
+ generic_info_table.add_column(column)
642
+ generic_info_table.add_row(
643
+ summary['id'],
644
+ summary['state'],
645
+ f'[link]{artifacts_url}[/link]',
646
+ summary['overall'],
647
+ ','.join(arches_requested),
648
+ str(len(errored)),
649
+ str(len(failed)),
650
+ str(len(skipped)),
651
+ str(len(passed)),
652
+ )
653
+ console.print(generic_info_table)
654
+
655
+ all_plans = sorted(set(errored + failed + skipped + passed))
656
+ details_table = Table(show_header=True, header_style="bold magenta")
657
+ for column in ["plan"] + arches_requested:
658
+ details_table.add_column(column)
659
+
660
+ for plan in all_plans:
661
+ row = [plan]
662
+ for arch in arches_requested:
663
+ if _has_plan(summary['passed_plans'], arch, plan):
664
+ res = '[green]pass[/green]'
665
+ elif _has_plan(summary['skipped_plans'], arch, plan):
666
+ res = '[white]skip[/white]'
667
+ elif _has_plan(summary['failed_plans'], arch, plan):
668
+ res = '[red]fail[/red]'
669
+ elif _has_plan(summary['errored_plans'], 'N/A', plan):
670
+ res = '[yellow]error[/yellow]'
671
+ else:
672
+ # If for some reason the plan has not been executed for this arch (this can happen after
673
+ # applying adjust rules) -> don't show anything
674
+ res = None
675
+ row.append(res)
676
+ details_table.add_row(*row)
677
+ if show_details:
678
+ console.print(details_table)
679
+
680
+
681
+ def watch(
682
+ api_url: str = typer.Option(settings.API_URL, help="Testing Farm API URL."),
683
+ id: str = typer.Option(..., help="Request ID to watch"),
684
+ no_wait: bool = typer.Option(False, help="Skip waiting for request completion."),
685
+ format: Optional[WatchFormat] = typer.Option(WatchFormat.text, help="Output format"),
686
+ autoconnect: bool = typer.Option(True, hidden=True),
687
+ reserve: bool = typer.Option(False, hidden=True),
688
+ ):
689
+ def _console_print(*args, **kwargs):
690
+ """A helper function that will skip printing to console if output format is json"""
691
+ if format == WatchFormat.json:
692
+ return
693
+ console.print(*args, **kwargs)
694
+
695
+ """Watch request for completion."""
696
+
697
+ if not uuid_valid(id):
698
+ exit_error("invalid request id")
699
+
700
+ get_url = urllib.parse.urljoin(api_url, f"/v0.1/requests/{id}")
701
+ current_state: str = ""
702
+
703
+ _console_print(f"🔎 api [blue]{get_url}[/blue]")
704
+
705
+ if not no_wait:
706
+ _console_print("💡 waiting for request to finish, use ctrl+c to skip", style="bright_yellow")
707
+
708
+ artifacts_shown = False
709
+
710
+ # Setting up retries
711
+ session = requests.Session()
712
+ install_http_retries(session)
713
+
714
+ def _is_reserved(session, request):
715
+ artifacts_url = (request.get('run') or {}).get('artifacts')
716
+
717
+ if not artifacts_url:
718
+ return False
719
+
720
+ try:
721
+ workdir = re.search(r'href="(.*)" name="workdir"', session.get(f"{artifacts_url}/results.xml").text)
722
+ except requests.exceptions.SSLError:
723
+ exit_error("Artifacts unreachable via SSL, do you have RH CA certificates installed?[/yellow]")
724
+
725
+ if workdir:
726
+ # finish early if reservation is running
727
+ if re.search(r"\[\+\] Reservation tick:", session.get(f"{workdir.group(1)}/log.txt").text):
728
+ return True
729
+
730
+ return False
731
+
732
+ while True:
733
+ try:
734
+ response = session.get(get_url)
735
+
736
+ except requests.exceptions.ConnectionError as exc:
737
+ console.print("📛 connection to API failed", style="red")
738
+ raise typer.Exit(code=2) from exc
739
+
740
+ if response.status_code == 404:
741
+ exit_error("request with given ID not found")
742
+
743
+ if response.status_code != 200:
744
+ exit_error(f"failed to get request: {response.text}")
745
+
746
+ request = response.json()
747
+
748
+ state = request["state"]
749
+
750
+ if state == current_state:
751
+ # check for reservation status and finish early if reserved
752
+ if reserve and _is_reserved(session, request):
753
+ _handle_reservation(session, request["id"], autoconnect)
754
+ return
755
+
756
+ time.sleep(1)
757
+ continue
758
+
759
+ current_state = state
760
+
761
+ request_summary = _get_request_summary(request, session)
762
+ if format == WatchFormat.json:
763
+ console.print(json.dumps(request_summary, indent=2))
764
+
765
+ if state == "new":
766
+ _console_print("👶 request is [blue]waiting to be queued[/blue]")
767
+
768
+ elif state == "queued":
769
+ _console_print("👷 request is [blue]queued[/blue]")
770
+
771
+ elif state == "running":
772
+ _console_print("🚀 request is [blue]running[/blue]")
773
+ _console_print(f"🚢 artifacts [blue]{request['run']['artifacts']}[/blue]")
774
+ artifacts_shown = True
775
+
776
+ elif state == "complete":
777
+ if not artifacts_shown:
778
+ _console_print(f"🚢 artifacts [blue]{request['run']['artifacts']}[/blue]")
779
+
780
+ overall = request["result"]["overall"]
781
+ if overall in ["passed", "skipped"]:
782
+ _console_print("✅ tests passed", style="green")
783
+ _print_summary_table(request_summary, format)
784
+ raise typer.Exit()
785
+
786
+ if overall in ["failed", "error", "unknown"]:
787
+ _console_print(f"❌ tests {overall}", style="red")
788
+ if overall == "error":
789
+ _console_print(f"{request['result']['summary']}", style="red")
790
+ _print_summary_table(request_summary, format)
791
+ raise typer.Exit(code=1)
792
+
793
+ elif state == "error":
794
+ msg = (
795
+ request['result'].get('summary')
796
+ if request['result']
797
+ else '\n'.join(note['message'] for note in request['notes'])
798
+ )
799
+ _console_print(f"📛 pipeline error\n{msg}", style="red")
800
+ _print_summary_table(request_summary, format)
801
+ raise typer.Exit(code=2)
802
+
803
+ elif state in ["canceled", "cancel-requested"]:
804
+ _console_print("⚠️ pipeline cancelled", style="yellow")
805
+ raise typer.Exit(code=3)
806
+
807
+ if no_wait:
808
+ _print_summary_table(request_summary, format, show_details=False)
809
+ raise typer.Exit()
810
+
811
+ time.sleep(settings.WATCH_TICK)
812
+
813
+
814
+ def version():
815
+ """Print CLI version"""
816
+ console.print(f"{cli_version}")
817
+
818
+
819
+ def request(
820
+ api_url: str = ARGUMENT_API_URL,
821
+ api_token: str = ARGUMENT_API_TOKEN,
822
+ timeout: int = typer.Option(
823
+ DEFAULT_PIPELINE_TIMEOUT,
824
+ help="Set the timeout for the request in minutes. If the test takes longer than this, it will be terminated.",
825
+ ),
826
+ test_type: str = typer.Option("fmf", help="Test type to use, if not set autodetected."),
827
+ tmt_plan_name: Optional[str] = OPTION_TMT_PLAN_NAME,
828
+ tmt_plan_filter: Optional[str] = OPTION_TMT_PLAN_FILTER,
829
+ tmt_test_name: Optional[str] = OPTION_TMT_TEST_NAME,
830
+ tmt_test_filter: Optional[str] = OPTION_TMT_TEST_FILTER,
831
+ tmt_path: str = OPTION_TMT_PATH,
832
+ sti_playbooks: Optional[List[str]] = typer.Option(
833
+ None,
834
+ "--playbook",
835
+ help="Playbook to run, by default 'tests/tests*.yml', multiple playbooks can be specified.",
836
+ rich_help_panel=REQUEST_PANEL_STI,
837
+ ),
838
+ git_url: Optional[str] = typer.Option(
839
+ None, help="URL of the GIT repository to test. If not set, autodetected from current git repository."
840
+ ),
841
+ git_ref: str = typer.Option(
842
+ "main", help="GIT ref or branch to test. If not set, autodetected from current git repository."
843
+ ),
844
+ git_merge_sha: Optional[str] = typer.Option(
845
+ None, help="GIT ref or branch into which --ref will be merged, if specified."
846
+ ),
847
+ arches: List[str] = typer.Option(["x86_64"], "--arch", help="Hardware platforms of the system to be provisioned."),
848
+ compose: Optional[str] = typer.Option(
849
+ None,
850
+ help="Compose used to provision system-under-test. If not set, tests will expect 'container' provision method specified in tmt plans.", # noqa
851
+ ),
852
+ hardware: List[str] = OPTION_HARDWARE,
853
+ kickstart: Optional[List[str]] = OPTION_KICKSTART,
854
+ pool: Optional[str] = OPTION_POOL,
855
+ cli_tmt_context: Optional[List[str]] = typer.Option(
856
+ None,
857
+ "-c",
858
+ "--context",
859
+ metavar="key=value|@file",
860
+ help="Context variables to pass to `tmt`. The @ prefix marks a yaml file to load.",
861
+ ),
862
+ variables: Optional[List[str]] = OPTION_VARIABLES,
863
+ secrets: Optional[List[str]] = OPTION_SECRETS,
864
+ tmt_environment: Optional[List[str]] = typer.Option(
865
+ None,
866
+ "-T",
867
+ "--tmt-environment",
868
+ metavar="key=value|@file",
869
+ help=(
870
+ "Environment variables to pass to the tmt process. "
871
+ "Used to configure tmt report plugins like reportportal or polarion. "
872
+ "The @ prefix marks a yaml file to load."
873
+ ),
874
+ ),
875
+ no_wait: bool = typer.Option(False, help="Skip waiting for request completion."),
876
+ worker_image: Optional[str] = OPTION_WORKER_IMAGE,
877
+ redhat_brew_build: List[str] = OPTION_REDHAT_BREW_BUILD,
878
+ fedora_koji_build: List[str] = OPTION_FEDORA_KOJI_BUILD,
879
+ fedora_copr_build: List[str] = OPTION_FEDORA_COPR_BUILD,
880
+ repository: List[str] = OPTION_REPOSITORY,
881
+ repository_file: List[str] = OPTION_REPOSITORY_FILE,
882
+ sanity: bool = typer.Option(False, help="Run Testing Farm sanity test.", rich_help_panel=RESERVE_PANEL_GENERAL),
883
+ tags: Optional[List[str]] = OPTION_TAGS,
884
+ watchdog_dispatch_delay: Optional[int] = typer.Option(
885
+ None,
886
+ help="How long (seconds) before the guest \"is-alive\" watchdog is dispatched. Note that this is implemented only in Artemis service.", # noqa
887
+ ),
888
+ watchdog_period_delay: Optional[int] = typer.Option(
889
+ None,
890
+ help="How often (seconds) check that the guest \"is-alive\". Note that this is implemented only in Artemis service.", # noqa
891
+ ),
892
+ dry_run: bool = OPTION_DRY_RUN,
893
+ pipeline_type: Optional[PipelineType] = OPTION_PIPELINE_TYPE,
894
+ post_install_script: Optional[str] = OPTION_POST_INSTALL_SCRIPT,
895
+ security_group_rule_ingress: Optional[List[str]] = OPTION_SECURITY_GROUP_RULE_INGRESS,
896
+ security_group_rule_egress: Optional[List[str]] = OPTION_SECURITY_GROUP_RULE_EGRESS,
897
+ user_webpage: Optional[str] = typer.Option(
898
+ None, help="URL to the user's webpage. The link will be shown in the results viewer."
899
+ ),
900
+ user_webpage_name: Optional[str] = typer.Option(
901
+ None, help="Name of the user's webpage. It will be shown in the results viewer."
902
+ ),
903
+ user_webpage_icon: Optional[str] = typer.Option(
904
+ None, help="URL of the icon of the user's webpage. It will be shown in the results viewer."
905
+ ),
906
+ parallel_limit: Optional[int] = OPTION_PARALLEL_LIMIT,
907
+ tmt_discover: Optional[List[str]] = _generate_tmt_extra_args("discover"),
908
+ tmt_prepare: Optional[List[str]] = _generate_tmt_extra_args("prepare"),
909
+ tmt_finish: Optional[List[str]] = _generate_tmt_extra_args("finish"),
910
+ reserve: bool = OPTION_RESERVE,
911
+ ssh_public_keys: List[str] = _option_ssh_public_keys(REQUEST_PANEL_RESERVE),
912
+ autoconnect: bool = _option_autoconnect(REQUEST_PANEL_RESERVE),
913
+ reservation_duration: int = _option_reservation_duration(REQUEST_PANEL_RESERVE),
914
+ debug_reservation: bool = _option_debug_reservation(REQUEST_PANEL_RESERVE),
915
+ ):
916
+ """
917
+ Request testing from Testing Farm.
918
+ """
919
+ # Split comma separated arches
920
+ arches = normalize_multistring_option(arches)
921
+
922
+ git_available = bool(shutil.which("git"))
923
+
924
+ # check for token
925
+ if not api_token:
926
+ exit_error("No API token found, export `TESTING_FARM_API_TOKEN` environment variable")
927
+
928
+ if not compose and arches != ['x86_64']:
929
+ exit_error(
930
+ "Without compose the tests run against a container image specified in the plan. "
931
+ "Only 'x86_64' architecture supported in this case."
932
+ )
933
+
934
+ if sanity:
935
+ if git_url or tmt_plan_name:
936
+ exit_error(
937
+ "The option [underline]--sanity[/underline] is mutually exclusive with "
938
+ "[underline]--git-url[/underline] and [underline]--plan[/underline]."
939
+ )
940
+
941
+ git_url = str(settings.TESTING_FARM_TESTS_GIT_URL)
942
+ tmt_plan_name = str(settings.TESTING_FARM_SANITY_PLAN)
943
+
944
+ if reserve:
945
+ _sanity_reserve()
946
+
947
+ # resolve git repository details from the current repository
948
+ if not git_url:
949
+ if not git_available:
950
+ exit_error("no git url defined")
951
+
952
+ # check for uncommited changes
953
+ if git_available and not git_url:
954
+ try:
955
+ subprocess.check_output("git update-index --refresh".split(), stderr=subprocess.STDOUT)
956
+ subprocess.check_output("git diff-index --quiet HEAD --".split(), stderr=subprocess.STDOUT)
957
+ except subprocess.CalledProcessError as process:
958
+ if 'fatal:' not in str(process.stdout):
959
+ exit_error(
960
+ "Uncommited changes found in current git repository, refusing to continue.\n"
961
+ " HINT: When running tests for the current repository, the changes "
962
+ "must be commited and pushed."
963
+ )
964
+
965
+ git_url = cmd_output_or_exit("git remote get-url origin", "could not auto-detect git url")
966
+ # use https instead git when auto-detected
967
+ # GitLab: git@github.com:containers/podman.git
968
+ # GitHub: git@gitlab.com:testing-farm/cli.git, git+ssh://git@gitlab.com/spoore/centos_rpms_jq.git
969
+ # Pagure: ssh://git@pagure.io/fedora-ci/messages.git
970
+ assert git_url
971
+ git_url = re.sub(r"^(?:(?:git\+)?ssh://)?git@([^:/]*)[:/](.*)", r"https://\1/\2", git_url)
972
+
973
+ # detect git ref
974
+ git_ref = cmd_output_or_exit("git rev-parse --abbrev-ref HEAD", "could not autodetect git ref")
975
+
976
+ # in case we have a commit checked out, not a named branch
977
+ if git_ref == "HEAD":
978
+ git_ref = cmd_output_or_exit("git rev-parse HEAD", "could not autodetect git ref")
979
+
980
+ # detect test type from local files
981
+ if os.path.exists(os.path.join((tmt_path or ""), ".fmf/version")):
982
+ test_type = "fmf"
983
+ elif os.path.exists("tests/tests.yml"):
984
+ test_type = "sti"
985
+ else:
986
+ exit_error("no test type defined")
987
+
988
+ # make typing happy
989
+ assert git_url is not None
990
+
991
+ # STI is not supported against a container
992
+ if test_type == "sti" and compose == "container":
993
+ exit_error("container based testing is not available for 'sti' test type")
994
+
995
+ console.print(f"📦 repository [blue]{git_url}[/blue] ref [blue]{git_ref}[/blue] test-type [blue]{test_type}[/blue]")
996
+
997
+ pool_info = f"via pool [blue]{pool}[/blue]" if pool else ""
998
+ for arch in arches:
999
+ console.print(f"💻 [blue]{compose or 'container image in plan'}[/blue] on [blue]{arch}[/blue] {pool_info}")
1000
+
1001
+ # test details
1002
+ test = TestTMT if test_type == "fmf" else TestSTI
1003
+ test["url"] = git_url
1004
+ test["ref"] = git_ref
1005
+
1006
+ if git_merge_sha:
1007
+ test["merge_sha"] = git_merge_sha
1008
+
1009
+ if tmt_plan_name:
1010
+ test["name"] = tmt_plan_name
1011
+
1012
+ if tmt_plan_filter:
1013
+ test["plan_filter"] = tmt_plan_filter
1014
+
1015
+ if tmt_test_name:
1016
+ test["test_name"] = tmt_test_name
1017
+
1018
+ if tmt_test_filter:
1019
+ test["test_filter"] = tmt_test_filter
1020
+
1021
+ if sti_playbooks:
1022
+ test["playbooks"] = sti_playbooks
1023
+
1024
+ # environment details
1025
+ environments = []
1026
+ for arch in arches:
1027
+ environment = Environment.copy()
1028
+ environment["arch"] = arch
1029
+ environment["pool"] = pool
1030
+ environment["artifacts"] = []
1031
+ environment["tmt"] = {}
1032
+
1033
+ # NOTE(ivasilev) From now on tmt.context will be always set. Even if user didn't request anything then
1034
+ # arch requested will be passed into the context
1035
+ tmt_context = options_to_dict("tmt context", cli_tmt_context or [])
1036
+ if "arch" not in tmt_context:
1037
+ # If context distro is not set by the user directly via -c let's set it according to arch requested
1038
+ tmt_context["arch"] = arch
1039
+ environment["tmt"].update({"context": tmt_context})
1040
+
1041
+ if compose:
1042
+ environment["os"] = {"compose": compose}
1043
+
1044
+ if secrets:
1045
+ environment["secrets"] = options_to_dict("environment secrets", secrets)
1046
+
1047
+ if variables:
1048
+ environment["variables"] = options_to_dict("environment variables", variables)
1049
+
1050
+ if hardware:
1051
+ environment["hardware"] = hw_constraints(hardware)
1052
+
1053
+ if kickstart:
1054
+ environment["kickstart"] = options_to_dict("environment kickstart", kickstart)
1055
+
1056
+ if redhat_brew_build:
1057
+ environment["artifacts"].extend(artifacts("redhat-brew-build", redhat_brew_build))
1058
+
1059
+ if fedora_koji_build:
1060
+ environment["artifacts"].extend(artifacts("fedora-koji-build", fedora_koji_build))
1061
+
1062
+ if fedora_copr_build:
1063
+ environment["artifacts"].extend(artifacts("fedora-copr-build", fedora_copr_build))
1064
+
1065
+ if repository:
1066
+ environment["artifacts"].extend(artifacts("repository", repository))
1067
+
1068
+ if repository_file:
1069
+ environment["artifacts"].extend(artifacts("repository-file", repository_file))
1070
+
1071
+ if tmt_environment:
1072
+ environment["tmt"].update({"environment": options_to_dict("tmt environment variables", tmt_environment)})
1073
+
1074
+ if tmt_discover or tmt_prepare or tmt_finish:
1075
+ if "extra_args" not in environment["tmt"]:
1076
+ environment["tmt"]["extra_args"] = {}
1077
+
1078
+ if tmt_discover:
1079
+ environment["tmt"]["extra_args"]["discover"] = tmt_discover
1080
+
1081
+ if tmt_prepare:
1082
+ environment["tmt"]["extra_args"]["prepare"] = tmt_prepare
1083
+
1084
+ if tmt_finish:
1085
+ environment["tmt"]["extra_args"]["finish"] = tmt_finish
1086
+
1087
+ environments.append(environment)
1088
+
1089
+ # Setting up retries
1090
+ session = requests.Session()
1091
+ install_http_retries(session)
1092
+
1093
+ if reserve:
1094
+ if not _contains_compose(environments):
1095
+ exit_error("Reservations are not supported with container executions, cannot continue")
1096
+
1097
+ if len(environments) > 1:
1098
+ exit_error("Reservations are currently supported for a single plan, cannot continue")
1099
+
1100
+ rules = _parse_security_group_rules([_localhost_ingress_rule(session)], [])
1101
+
1102
+ for environment in environments:
1103
+ _add_reservation(
1104
+ ssh_public_keys=ssh_public_keys,
1105
+ rules=rules,
1106
+ duration=reservation_duration,
1107
+ environment=environment,
1108
+ debug_reservation=debug_reservation,
1109
+ )
1110
+
1111
+ machine_pre = "Machine" if len(environments) == 1 else str(len(environments)) + " machines"
1112
+ console.print(f"🛟 {machine_pre} will be reserved after testing")
1113
+
1114
+ if any(
1115
+ provisioning_detail
1116
+ for provisioning_detail in [
1117
+ tags,
1118
+ watchdog_dispatch_delay,
1119
+ watchdog_period_delay,
1120
+ post_install_script,
1121
+ security_group_rule_ingress,
1122
+ security_group_rule_egress,
1123
+ ]
1124
+ ):
1125
+ if "settings" not in environments[0]:
1126
+ environments[0]["settings"] = {}
1127
+
1128
+ if 'provisioning' not in environments[0]["settings"]:
1129
+ environments[0]["settings"]["provisioning"] = {}
1130
+
1131
+ if tags:
1132
+ environments[0]["settings"]["provisioning"]["tags"] = options_to_dict("tags", tags)
1133
+
1134
+ if watchdog_dispatch_delay is not None:
1135
+ environments[0]["settings"]["provisioning"]["watchdog-dispatch-delay"] = watchdog_dispatch_delay
1136
+
1137
+ if watchdog_period_delay is not None:
1138
+ environments[0]["settings"]["provisioning"]["watchdog-period-delay"] = watchdog_period_delay
1139
+
1140
+ if post_install_script:
1141
+ environments[0]["settings"]["provisioning"]["post_install_script"] = post_install_script
1142
+
1143
+ if security_group_rule_ingress or security_group_rule_egress:
1144
+ rules = _parse_security_group_rules(security_group_rule_ingress or [], security_group_rule_egress or [])
1145
+ environments[0]["settings"]["provisioning"].update(rules)
1146
+
1147
+ # create final request
1148
+ request = TestingFarmRequestV1
1149
+ if test_type == "fmf":
1150
+ test["path"] = tmt_path
1151
+ request["test"]["fmf"] = test
1152
+ else:
1153
+ request["test"]["sti"] = test
1154
+
1155
+ request["environments"] = environments
1156
+ request["settings"] = {}
1157
+
1158
+ if reserve or pipeline_type or parallel_limit:
1159
+ request["settings"]["pipeline"] = {}
1160
+
1161
+ # in case the reservation duration is more than the pipeline timeout, adjust also the pipeline timeout
1162
+ if reserve:
1163
+ if reservation_duration > timeout:
1164
+ request["settings"]["pipeline"] = {"timeout": reservation_duration}
1165
+ console.print(f"⏳ Maximum reservation time is {reservation_duration} minutes")
1166
+ else:
1167
+ request["settings"]["pipeline"] = {"timeout": timeout}
1168
+ console.print(f"⏳ Maximum reservation time is {timeout} minutes")
1169
+
1170
+ if pipeline_type:
1171
+ request["settings"]["pipeline"]["type"] = pipeline_type.value
1172
+
1173
+ if parallel_limit:
1174
+ request["settings"]["pipeline"]["parallel-limit"] = parallel_limit
1175
+
1176
+ # worker image
1177
+ if worker_image:
1178
+ console.print(f"👷 Forcing worker image [blue]{worker_image}[/blue]")
1179
+ request["settings"]["worker"] = {"image": worker_image}
1180
+
1181
+ if not user_webpage and (user_webpage_name or user_webpage_icon):
1182
+ exit_error("The user-webpage-name and user-webpage-icon can be used only with user-webpage option")
1183
+
1184
+ request["user"] = {}
1185
+ if user_webpage:
1186
+ request["user"]["webpage"] = {"url": user_webpage, "icon": user_webpage_icon, "name": user_webpage_name}
1187
+
1188
+ # submit request to Testing Farm
1189
+ post_url = urllib.parse.urljoin(api_url, "v0.1/requests")
1190
+
1191
+ # dry run
1192
+ if dry_run:
1193
+ console.print("🔍 Dry run, showing POST json only", style="bright_yellow")
1194
+ print_json(json.dumps(request, indent=4, separators=(',', ': ')))
1195
+ raise typer.Exit()
1196
+
1197
+ # handle errors
1198
+ response = session.post(post_url, json=request, headers=_get_headers(api_token))
1199
+ if response.status_code == 401:
1200
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1201
+
1202
+ if response.status_code == 400:
1203
+ exit_error(
1204
+ f"Request is invalid. {response.json().get('message') or 'Reason unknown.'}."
1205
+ f"\nPlease file an issue to {settings.ISSUE_TRACKER} if unsure."
1206
+ )
1207
+
1208
+ if response.status_code != 200:
1209
+ print(response.text)
1210
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1211
+
1212
+ request_id = response.json()['id']
1213
+
1214
+ # Watch the request and handle reservation
1215
+ watch(api_url, request_id, no_wait, reserve=reserve, autoconnect=autoconnect, format=WatchFormat.text)
1216
+
1217
+
1218
+ def restart(
1219
+ context: typer.Context,
1220
+ request_id: str = typer.Argument(..., help="Testing Farm request ID or a string containing it."),
1221
+ api_url: str = ARGUMENT_API_URL,
1222
+ internal_api_url: str = typer.Argument(
1223
+ settings.INTERNAL_API_URL,
1224
+ envvar="TESTING_FARM_INTERNAL_API_URL",
1225
+ metavar='',
1226
+ rich_help_panel='Environment variables',
1227
+ ),
1228
+ api_token: str = ARGUMENT_API_TOKEN,
1229
+ compose: Optional[str] = typer.Option(
1230
+ None,
1231
+ help="Force compose used to provision test environment.", # noqa
1232
+ ),
1233
+ pool: Optional[str] = typer.Option(
1234
+ None,
1235
+ help="Force pool to provision.",
1236
+ ),
1237
+ git_url: Optional[str] = typer.Option(None, help="Force URL of the GIT repository to test."),
1238
+ git_ref: Optional[str] = typer.Option(None, help="Force GIT ref or branch to test."),
1239
+ git_merge_sha: Optional[str] = typer.Option(None, help="Force GIT ref or branch into which --ref will be merged."),
1240
+ hardware: List[str] = OPTION_HARDWARE,
1241
+ tags: Optional[List[str]] = OPTION_TAGS,
1242
+ tmt_plan_name: Optional[str] = OPTION_TMT_PLAN_NAME,
1243
+ tmt_plan_filter: Optional[str] = OPTION_TMT_PLAN_FILTER,
1244
+ tmt_test_name: Optional[str] = OPTION_TMT_TEST_NAME,
1245
+ tmt_test_filter: Optional[str] = OPTION_TMT_TEST_FILTER,
1246
+ tmt_path: Optional[str] = OPTION_TMT_PATH,
1247
+ tmt_discover: Optional[List[str]] = _generate_tmt_extra_args("discover"),
1248
+ tmt_prepare: Optional[List[str]] = _generate_tmt_extra_args("prepare"),
1249
+ tmt_finish: Optional[List[str]] = _generate_tmt_extra_args("finish"),
1250
+ worker_image: Optional[str] = OPTION_WORKER_IMAGE,
1251
+ no_wait: bool = typer.Option(False, help="Skip waiting for request completion."),
1252
+ dry_run: bool = OPTION_DRY_RUN,
1253
+ pipeline_type: Optional[PipelineType] = OPTION_PIPELINE_TYPE,
1254
+ parallel_limit: Optional[int] = OPTION_PARALLEL_LIMIT,
1255
+ reserve: bool = OPTION_RESERVE,
1256
+ ssh_public_keys: List[str] = _option_ssh_public_keys(REQUEST_PANEL_RESERVE),
1257
+ autoconnect: bool = _option_autoconnect(REQUEST_PANEL_RESERVE),
1258
+ reservation_duration: int = _option_reservation_duration(REQUEST_PANEL_RESERVE),
1259
+ debug_reservation: bool = _option_debug_reservation(REQUEST_PANEL_RESERVE),
1260
+ ):
1261
+ """
1262
+ Restart a Testing Farm request.
1263
+
1264
+ Just pass a request ID or an URL with a request ID to restart it.
1265
+ """
1266
+
1267
+ # UUID pattern
1268
+ uuid_pattern = re.compile('[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}')
1269
+
1270
+ # Find the UUID in the string
1271
+ uuid_match = uuid_pattern.search(request_id)
1272
+
1273
+ if not uuid_match:
1274
+ exit_error(f"Could not find a valid Testing Farm request id in '{request_id}'.")
1275
+ return
1276
+
1277
+ # Extract the UUID from the match object
1278
+ _request_id = uuid_match.group()
1279
+
1280
+ # Construct URL to the internal API
1281
+ get_url = urllib.parse.urljoin(str(internal_api_url), f"v0.1/requests/{_request_id}")
1282
+
1283
+ # Setting up retries
1284
+ session = requests.Session()
1285
+ install_http_retries(session)
1286
+
1287
+ # Get the request details
1288
+ response = session.get(get_url, headers=_get_headers(api_token))
1289
+
1290
+ if response.status_code == 401:
1291
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1292
+
1293
+ # The API token is valid, but it doesn't own the request
1294
+ if response.status_code == 403:
1295
+ console.print(
1296
+ "⚠️ [yellow] You are not the owner of this request. Any secrets associated with the request will not be included on the restart.[/yellow]" # noqa: E501
1297
+ )
1298
+ # Construct URL to the internal API
1299
+ get_url = urllib.parse.urljoin(str(api_url), f"v0.1/requests/{_request_id}")
1300
+
1301
+ # Get the request details
1302
+ response = session.get(get_url)
1303
+
1304
+ if response.status_code != 200:
1305
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1306
+
1307
+ request = response.json()
1308
+
1309
+ # Transform to a request
1310
+ request['environments'] = request['environments_requested']
1311
+
1312
+ # Remove all keys except test and environments
1313
+ for key in list(request):
1314
+ if key not in ['test', 'environments']:
1315
+ del request[key]
1316
+
1317
+ test = request['test']
1318
+
1319
+ # Remove all empty keys in test
1320
+ for key in list(test):
1321
+ for subkey in list(test[key] or []):
1322
+ if not test[key][subkey]:
1323
+ del test[key][subkey]
1324
+ if not test[key]:
1325
+ del test[key]
1326
+
1327
+ # add test type
1328
+ test = request['test'][list(request['test'].keys())[0]]
1329
+
1330
+ if git_url:
1331
+ test["url"] = git_url
1332
+
1333
+ if git_ref:
1334
+ test["ref"] = git_ref
1335
+
1336
+ if tmt_test_name:
1337
+ test["test_name"] = tmt_test_name
1338
+
1339
+ if tmt_test_filter:
1340
+ test["test_filter"] = tmt_test_filter
1341
+
1342
+ merge_sha_info = ""
1343
+ if git_merge_sha:
1344
+ test["merge_sha"] = git_merge_sha
1345
+ merge_sha_info = f"merge_sha [blue]{git_merge_sha}[/blue]"
1346
+
1347
+ console.print(f"📦 repository [blue]{test['url']}[/blue] ref [blue]{test['ref']}[/blue] {merge_sha_info}")
1348
+
1349
+ # Set compose
1350
+ if compose:
1351
+ console.print(f"💻 forcing compose [blue]{compose}[/blue]")
1352
+ for environment in request['environments']:
1353
+ if environment.get("os") is None:
1354
+ environment["os"] = {}
1355
+ environment["os"]["compose"] = compose
1356
+
1357
+ if hardware:
1358
+ console.print(f"💻 forcing hardware [blue]{' '.join(hardware)}[/blue]")
1359
+ for environment in request['environments']:
1360
+ environment["hardware"] = hw_constraints(hardware)
1361
+
1362
+ if pool:
1363
+ console.print(f"💻 forcing pool [blue]{pool}[/blue]")
1364
+ for environment in request['environments']:
1365
+ environment["pool"] = pool
1366
+
1367
+ if tmt_discover or tmt_prepare or tmt_finish:
1368
+ for environment in request["environments"]:
1369
+ if "tmt" not in environment:
1370
+ environment["tmt"] = {"extra_args": {}}
1371
+ if "extra_args" not in environment["tmt"]:
1372
+ environment["tmt"]["extra_args"] = {}
1373
+
1374
+ if tmt_discover:
1375
+ for environment in request["environments"]:
1376
+ environment["tmt"]["extra_args"]["discover"] = tmt_discover
1377
+
1378
+ if tmt_prepare:
1379
+ for environment in request["environments"]:
1380
+ environment["tmt"]["extra_args"]["prepare"] = tmt_prepare
1381
+
1382
+ if tmt_finish:
1383
+ for environment in request["environments"]:
1384
+ environment["tmt"]["extra_args"]["finish"] = tmt_finish
1385
+
1386
+ test_type = "fmf" if "fmf" in request["test"] else "sti"
1387
+
1388
+ if tmt_plan_name:
1389
+ if test_type == "sti":
1390
+ exit_error("The '--plan' option is compabitble only with 'tmt` tests.")
1391
+ request["test"][test_type]["name"] = tmt_plan_name
1392
+
1393
+ if tmt_plan_filter:
1394
+ if test_type == "sti":
1395
+ exit_error("The '--plan-filter' option is compabitble only with 'tmt` tests.")
1396
+ request["test"][test_type]["plan_filter"] = tmt_plan_filter
1397
+
1398
+ if test_type == "fmf":
1399
+ # The method explained in https://github.com/fastapi/typer/discussions/668
1400
+ if context.get_parameter_source("tmt_path") == ParameterSource.COMMANDLINE: # pyre-ignore[16]
1401
+ request["test"][test_type]["path"] = tmt_path
1402
+
1403
+ # worker image
1404
+ if worker_image:
1405
+ console.print(f"👷 Forcing worker image [blue]{worker_image}[/blue]")
1406
+ request["settings"] = request["settings"] if request.get("settings") else {}
1407
+ request["settings"]["worker"] = {"image": worker_image}
1408
+ # it is required to have also pipeline key set, otherwise API will fail
1409
+ request["settings"]["pipeline"] = request["settings"].get("pipeline", {})
1410
+
1411
+ if pipeline_type or parallel_limit:
1412
+ if "settings" not in request:
1413
+ request["settings"] = {}
1414
+ if "pipeline" not in request["settings"]:
1415
+ request["settings"]["pipeline"] = {}
1416
+
1417
+ if pipeline_type:
1418
+ request["settings"]["pipeline"]["type"] = pipeline_type.value
1419
+
1420
+ if parallel_limit:
1421
+ request["settings"]["pipeline"]["parallel-limit"] = parallel_limit
1422
+
1423
+ if tags:
1424
+ for environment in request["environments"]:
1425
+ if "settings" not in environment or not environment["settings"]:
1426
+ environment["settings"] = {}
1427
+
1428
+ if 'provisioning' not in environment["settings"]:
1429
+ environment["settings"]["provisioning"] = {}
1430
+
1431
+ environment["settings"]["provisioning"]["tags"] = options_to_dict("tags", tags)
1432
+
1433
+ if reserve:
1434
+ if not _contains_compose(request["environments"]):
1435
+ exit_error("Reservations are not supported with container executions, cannot continue")
1436
+
1437
+ if len(request["environments"]) > 1:
1438
+ exit_error("Reservations are currently supported for a single plan, cannot continue")
1439
+
1440
+ rules = _parse_security_group_rules([_localhost_ingress_rule(session)], [])
1441
+
1442
+ for environment in request["environments"]:
1443
+ _add_reservation(
1444
+ ssh_public_keys=ssh_public_keys,
1445
+ rules=rules,
1446
+ duration=reservation_duration,
1447
+ environment=environment,
1448
+ debug_reservation=debug_reservation,
1449
+ )
1450
+
1451
+ machine_pre = (
1452
+ "Machine" if len(request["environments"]) == 1 else str(len(request["environments"])) + " machines"
1453
+ )
1454
+ console.print(
1455
+ f"🕗 {machine_pre} will be reserved after testing for [blue]{str(reservation_duration)}[/blue] minutes"
1456
+ )
1457
+
1458
+ # dry run
1459
+ if dry_run:
1460
+ console.print("🔍 Dry run, showing POST json only", style="bright_yellow")
1461
+ print(json.dumps(request, indent=4, separators=(',', ': ')))
1462
+ raise typer.Exit()
1463
+
1464
+ # submit request to Testing Farm
1465
+ post_url = urllib.parse.urljoin(str(api_url), "v0.1/requests")
1466
+
1467
+ # handle errors
1468
+ response = session.post(post_url, json=request, headers=_get_headers(api_token))
1469
+ if response.status_code == 401:
1470
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1471
+
1472
+ if response.status_code == 400:
1473
+ exit_error(
1474
+ f"Request is invalid. {response.json().get('message') or 'Reason unknown.'}."
1475
+ f"\nPlease file an issue to {settings.ISSUE_TRACKER} if unsure."
1476
+ )
1477
+
1478
+ if response.status_code != 200:
1479
+ print(response.text)
1480
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1481
+
1482
+ # watch
1483
+ watch(
1484
+ str(api_url), response.json()['id'], no_wait, reserve=reserve, autoconnect=autoconnect, format=WatchFormat.text
1485
+ )
1486
+
1487
+
1488
+ def run(
1489
+ arch: str = typer.Option("x86_64", "--arch", help="Hardware platform of the target machine."),
1490
+ compose: Optional[str] = typer.Option(
1491
+ None,
1492
+ help="Compose used to provision the target machine. If not set, script will be executed aginst `fedora:latest` container.", # noqa
1493
+ ),
1494
+ pool: Optional[str] = OPTION_POOL,
1495
+ hardware: List[str] = OPTION_HARDWARE,
1496
+ variables: Optional[List[str]] = OPTION_VARIABLES,
1497
+ secrets: Optional[List[str]] = OPTION_SECRETS,
1498
+ dry_run: bool = OPTION_DRY_RUN,
1499
+ verbose: bool = typer.Option(False, help="Be verbose."),
1500
+ command: List[str] = typer.Argument(..., help="Command to run. Use `--` to separate COMMAND from CLI options."),
1501
+ ):
1502
+ """
1503
+ Run an arbitrary script via Testing Farm.
1504
+ """
1505
+
1506
+ # check for token
1507
+ if not settings.API_TOKEN:
1508
+ exit_error("No API token found, export `TESTING_FARM_API_TOKEN` environment variable.")
1509
+
1510
+ # create request
1511
+ request = TestingFarmRequestV1
1512
+
1513
+ test = TestTMT
1514
+ test["url"] = RUN_REPO
1515
+ test["ref"] = "main"
1516
+ test["name"] = "/testing-farm/sanity"
1517
+ request["test"]["fmf"] = test
1518
+
1519
+ environment = Environment.copy()
1520
+
1521
+ environment["arch"] = arch
1522
+ environment["pool"] = pool
1523
+
1524
+ if compose:
1525
+ environment["os"] = {"compose": compose}
1526
+
1527
+ if secrets:
1528
+ environment["secrets"] = options_to_dict("environment secrets", secrets)
1529
+
1530
+ if variables:
1531
+ environment["variables"] = options_to_dict("environment variables", variables)
1532
+
1533
+ if hardware:
1534
+ environment["hardware"] = hw_constraints(hardware)
1535
+
1536
+ environment["variables"]["SCRIPT"] = " ".join(command)
1537
+
1538
+ request["environments"] = [environment]
1539
+
1540
+ # submit request to Testing Farm
1541
+ post_url = urllib.parse.urljoin(str(settings.API_URL), "v0.1/requests")
1542
+
1543
+ # Setting up retries
1544
+ session = requests.Session()
1545
+ install_http_retries(session)
1546
+
1547
+ # dry run
1548
+ if dry_run or verbose:
1549
+ console.print("[blue]🔍 showing POST json[/blue]")
1550
+ print(json.dumps(request, indent=4, separators=(',', ': ')))
1551
+ if dry_run:
1552
+ raise typer.Exit()
1553
+
1554
+ # handle errors
1555
+ response = session.post(post_url, json=request, headers=_get_headers(settings.API_TOKEN))
1556
+ if response.status_code == 401:
1557
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1558
+
1559
+ if response.status_code == 400:
1560
+ exit_error(f"Request is invalid. Please file an issue to {settings.ISSUE_TRACKER}")
1561
+
1562
+ if response.status_code != 200:
1563
+ print(response.text)
1564
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1565
+
1566
+ id = response.json()['id']
1567
+ get_url = urllib.parse.urljoin(str(settings.API_URL), f"/v0.1/requests/{id}")
1568
+
1569
+ if verbose:
1570
+ console.print(f"🔎 api [blue]{get_url}[/blue]")
1571
+
1572
+ search: Optional[re.Match[str]] = None
1573
+
1574
+ # wait for the sanity test to finish
1575
+ with Progress(
1576
+ SpinnerColumn(),
1577
+ TextColumn("[progress.description]{task.description}"),
1578
+ transient=True,
1579
+ ) as progress:
1580
+ progress.add_task(description="Preparing execution environment", total=None)
1581
+
1582
+ current_state: str = ""
1583
+
1584
+ while True:
1585
+ try:
1586
+ response = session.get(get_url)
1587
+
1588
+ except requests.exceptions.ConnectionError as exc:
1589
+ exit_error(f"connection to API failed: {str(exc)}")
1590
+
1591
+ if response.status_code != 200:
1592
+ exit_error(f"Failed to get request: {response.text}")
1593
+
1594
+ request = response.json()
1595
+
1596
+ state = request["state"]
1597
+
1598
+ if state == current_state:
1599
+ time.sleep(1)
1600
+ continue
1601
+
1602
+ current_state = state
1603
+
1604
+ if state in ["complete", "error"]:
1605
+ break
1606
+
1607
+ if state in ["canceled", "cancel-requested"]:
1608
+ progress.stop()
1609
+ exit_error("Request canceled.")
1610
+
1611
+ time.sleep(1)
1612
+
1613
+ # workaround TFT-1690
1614
+ install_http_retries(session, status_forcelist_extend=[404], timeout=60, retry_backoff_factor=0.1)
1615
+
1616
+ # get the command output
1617
+ artifacts_url = response.json()['run']['artifacts']
1618
+
1619
+ if verbose:
1620
+ console.print(f"\r🚢 artifacts [blue]{artifacts_url}[/blue]")
1621
+
1622
+ try:
1623
+ search = re.search(r'href="(.*)" name="workdir"', session.get(f"{artifacts_url}/results.xml").text)
1624
+
1625
+ except requests.exceptions.SSLError:
1626
+ console.print(
1627
+ "\r🚫 [yellow]artifacts unreachable via SSL, do you have RH CA certificates installed?[/yellow]"
1628
+ )
1629
+ console.print(f"\r🚢 artifacts [blue]{artifacts_url}[/blue]")
1630
+
1631
+ except requests.exceptions.ConnectionError:
1632
+ console.print("\r🚫 [yellow]artifacts unreachable, are you on VPN?[/yellow]")
1633
+ console.print(f"\r🚢 artifacts [blue]{artifacts_url}[/blue]")
1634
+ return
1635
+
1636
+ if not search:
1637
+ exit_error("Could not find working directory, cannot continue")
1638
+
1639
+ workdir = str(search.groups(1)[0])
1640
+ output = f"{workdir}/testing-farm/sanity/execute/data/guest/default-0/testing-farm/script-1/output.txt"
1641
+
1642
+ if verbose:
1643
+ console.print(f"\r👷 workdir [blue]{workdir}[/blue]")
1644
+ console.print(f"\r📤 output [blue]{output}[/blue]")
1645
+
1646
+ response = session.get(output)
1647
+ console.print(response.text, end="")
1648
+
1649
+
1650
+ def reserve(
1651
+ ssh_public_keys: List[str] = _option_ssh_public_keys(RESERVE_PANEL_GENERAL),
1652
+ reservation_duration: int = _option_reservation_duration(RESERVE_PANEL_GENERAL),
1653
+ arch: str = typer.Option(
1654
+ "x86_64", help="Hardware platform of the system to be provisioned.", rich_help_panel=RESERVE_PANEL_ENVIRONMENT
1655
+ ),
1656
+ compose: str = typer.Option(
1657
+ "Fedora-Rawhide",
1658
+ help="Compose used to provision system-under-test. By default Fedora-Rawhide.", # noqa
1659
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
1660
+ ),
1661
+ hardware: List[str] = OPTION_HARDWARE,
1662
+ tags: Optional[List[str]] = OPTION_TAGS,
1663
+ kickstart: Optional[List[str]] = OPTION_KICKSTART,
1664
+ pool: Optional[str] = OPTION_POOL,
1665
+ fedora_koji_build: List[str] = OPTION_FEDORA_KOJI_BUILD,
1666
+ fedora_copr_build: List[str] = OPTION_FEDORA_COPR_BUILD,
1667
+ repository: List[str] = OPTION_REPOSITORY,
1668
+ repository_file: List[str] = OPTION_REPOSITORY_FILE,
1669
+ redhat_brew_build: List[str] = OPTION_REDHAT_BREW_BUILD,
1670
+ dry_run: bool = OPTION_DRY_RUN,
1671
+ post_install_script: Optional[str] = OPTION_POST_INSTALL_SCRIPT,
1672
+ print_only_request_id: bool = typer.Option(
1673
+ False,
1674
+ help="Output only the request ID.",
1675
+ rich_help_panel=RESERVE_PANEL_OUTPUT,
1676
+ ),
1677
+ autoconnect: bool = _option_autoconnect(RESERVE_PANEL_GENERAL),
1678
+ worker_image: Optional[str] = OPTION_WORKER_IMAGE,
1679
+ security_group_rule_ingress: Optional[List[str]] = OPTION_SECURITY_GROUP_RULE_INGRESS,
1680
+ security_group_rule_egress: Optional[List[str]] = OPTION_SECURITY_GROUP_RULE_EGRESS,
1681
+ skip_workstation_access: bool = typer.Option(
1682
+ False, help="Do not allow ingress traffic from this workstation's ip to the reserved machine"
1683
+ ),
1684
+ git_ref: Optional[str] = typer.Option(
1685
+ None, help="Force GIT ref or branch. Useful for testing changes to reservation plan."
1686
+ ),
1687
+ debug_reservation: bool = _option_debug_reservation(),
1688
+ ):
1689
+ """
1690
+ Reserve a system in Testing Farm.
1691
+ """
1692
+
1693
+ def _echo(message: str) -> None:
1694
+ if not print_only_request_id:
1695
+ console.print(message)
1696
+
1697
+ _sanity_reserve()
1698
+
1699
+ # check for token
1700
+ if not settings.API_TOKEN:
1701
+ exit_error("No API token found, export `TESTING_FARM_API_TOKEN` environment variable.")
1702
+
1703
+ pool_info = f"via pool [blue]{pool}[/blue]" if pool else ""
1704
+ console.print(f"💻 [blue]{compose}[/blue] on [blue]{arch}[/blue] {pool_info}")
1705
+
1706
+ # test details
1707
+ test = TestTMT
1708
+ test["url"] = RESERVE_URL
1709
+ test["ref"] = git_ref or RESERVE_REF
1710
+ test["name"] = RESERVE_PLAN
1711
+
1712
+ # environment details
1713
+ environment = Environment.copy()
1714
+ environment["arch"] = arch
1715
+ environment["pool"] = pool
1716
+ environment["artifacts"] = []
1717
+
1718
+ if "settings" not in environment:
1719
+ environment["settings"] = {}
1720
+
1721
+ if post_install_script or security_group_rule_ingress or security_group_rule_egress or tags:
1722
+ if "settings" not in environment:
1723
+ environment["settings"] = {}
1724
+
1725
+ if "provisioning" not in environment["settings"]:
1726
+ environment["settings"]["provisioning"] = {}
1727
+
1728
+ if "tags" not in environment["settings"]["provisioning"]:
1729
+ environment["settings"]["provisioning"]["tags"] = {}
1730
+
1731
+ # reserve command is for interacting with the guest, and so non-spot instances
1732
+ # would be nicer for the user than them getting shocked when they loose their work.
1733
+ environment["settings"]["provisioning"]["tags"]["ArtemisUseSpot"] = "false"
1734
+
1735
+ if compose:
1736
+ environment["os"] = {"compose": compose}
1737
+
1738
+ if hardware:
1739
+ environment["hardware"] = hw_constraints(hardware)
1740
+
1741
+ if tags:
1742
+ environment["settings"]["provisioning"]["tags"] = options_to_dict("tags", tags)
1743
+
1744
+ if kickstart:
1745
+ environment["kickstart"] = options_to_dict("environment kickstart", kickstart)
1746
+
1747
+ if redhat_brew_build:
1748
+ environment["artifacts"].extend(artifacts("redhat-brew-build", redhat_brew_build))
1749
+
1750
+ if fedora_koji_build:
1751
+ environment["artifacts"].extend(artifacts("fedora-koji-build", fedora_koji_build))
1752
+
1753
+ if fedora_copr_build:
1754
+ environment["artifacts"].extend(artifacts("fedora-copr-build", fedora_copr_build))
1755
+
1756
+ if repository:
1757
+ environment["artifacts"].extend(artifacts("repository", repository))
1758
+
1759
+ if repository_file:
1760
+ environment["artifacts"].extend(artifacts("repository-file", repository_file))
1761
+
1762
+ if post_install_script:
1763
+ environment["settings"]["provisioning"]["post_install_script"] = post_install_script
1764
+
1765
+ # Setting up retries
1766
+ session = requests.Session()
1767
+ install_http_retries(session)
1768
+
1769
+ if not skip_workstation_access or security_group_rule_ingress or security_group_rule_egress:
1770
+ ingress_rules = security_group_rule_ingress or []
1771
+ if not skip_workstation_access:
1772
+ ingress_rules.append(_localhost_ingress_rule(session))
1773
+
1774
+ rules = _parse_security_group_rules(ingress_rules, security_group_rule_egress or [])
1775
+ environment["settings"]["provisioning"].update(rules)
1776
+
1777
+ console.print(f"🕗 Reserved for [blue]{str(reservation_duration)}[/blue] minutes")
1778
+
1779
+ if "variables" not in environment or environment["variables"] is None:
1780
+ environment["variables"] = {}
1781
+
1782
+ environment["variables"]["TF_RESERVATION_DURATION"] = str(reservation_duration)
1783
+
1784
+ if debug_reservation:
1785
+ environment["variables"]["TF_RESERVATION_DEBUG"] = "1"
1786
+
1787
+ authorized_keys = read_glob_paths(ssh_public_keys).encode("utf-8")
1788
+ if not authorized_keys:
1789
+ exit_error(f"No public SSH keys found under {', '.join(ssh_public_keys)}, cannot continue.")
1790
+
1791
+ authorized_keys_bytes = base64.b64encode(authorized_keys)
1792
+ environment["secrets"] = {"TF_RESERVATION_AUTHORIZED_KEYS_BASE64": authorized_keys_bytes.decode("utf-8")}
1793
+
1794
+ # create final request
1795
+ request = TestingFarmRequestV1
1796
+ request["test"]["fmf"] = test
1797
+
1798
+ # worker image
1799
+ if worker_image:
1800
+ console.print(f"👷 Forcing worker image [blue]{worker_image}[/blue]")
1801
+ request["settings"] = request["settings"] if request.get("settings") else {}
1802
+ request["settings"]["worker"] = {"image": worker_image}
1803
+
1804
+ request["environments"] = [environment]
1805
+
1806
+ # in case the reservation duration is more than the pipeline timeout, adjust also the pipeline timeout
1807
+ if reservation_duration > DEFAULT_PIPELINE_TIMEOUT:
1808
+ request["settings"] = {"pipeline": {"timeout": reservation_duration}}
1809
+ console.print(f"⏳ Maximum reservation time is {reservation_duration} minutes")
1810
+ else:
1811
+ console.print(f"⏳ Maximum reservation time is {DEFAULT_PIPELINE_TIMEOUT} minutes")
1812
+
1813
+ # submit request to Testing Farm
1814
+ post_url = urllib.parse.urljoin(str(settings.API_URL), "v0.1/requests")
1815
+
1816
+ # dry run
1817
+ if dry_run:
1818
+ if print_only_request_id:
1819
+ console.print("🔍 Dry run, print-only-request-id is set. Nothing will be shown", style="bright_yellow")
1820
+ else:
1821
+ console.print("🔍 Dry run, showing POST json only", style="bright_yellow")
1822
+ print(json.dumps(request, indent=4, separators=(',', ': ')))
1823
+ raise typer.Exit()
1824
+
1825
+ # handle errors
1826
+ response = session.post(post_url, json=request, headers=_get_headers(settings.API_TOKEN))
1827
+ if response.status_code == 401:
1828
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1829
+
1830
+ if response.status_code == 400:
1831
+ exit_error(
1832
+ f"Request is invalid. {response.json().get('message') or 'Reason unknown.'}."
1833
+ f"\nPlease file an issue to {settings.ISSUE_TRACKER} if unsure."
1834
+ )
1835
+
1836
+ if response.status_code != 200:
1837
+ print(response.text)
1838
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1839
+
1840
+ id = response.json()['id']
1841
+ get_url = urllib.parse.urljoin(str(settings.API_URL), f"/v0.1/requests/{id}")
1842
+
1843
+ if not print_only_request_id:
1844
+ console.print(f"🔎 [blue]{get_url}[/blue]")
1845
+ else:
1846
+ console.print(id)
1847
+
1848
+ # IP address or hostname of the guest, extracted from pipeline.log
1849
+ guest: str = ""
1850
+
1851
+ # wait for the reserve task to reserve the machine
1852
+ with Progress(
1853
+ SpinnerColumn(),
1854
+ TextColumn("[progress.description]{task.description}"),
1855
+ transient=True,
1856
+ ) as progress:
1857
+ task_id = None
1858
+
1859
+ if not print_only_request_id:
1860
+ task_id = progress.add_task(description="Creating reservation", total=None)
1861
+
1862
+ current_state: str = ""
1863
+
1864
+ while current_state != "running":
1865
+ try:
1866
+ response = session.get(get_url)
1867
+
1868
+ except requests.exceptions.ConnectionError as exc:
1869
+ exit_error(f"connection to API failed: {str(exc)}")
1870
+
1871
+ if response.status_code != 200:
1872
+ exit_error(f"Failed to get request: {response.text}")
1873
+
1874
+ request = response.json()
1875
+
1876
+ state = request["state"]
1877
+
1878
+ if state == current_state:
1879
+ time.sleep(1)
1880
+ continue
1881
+
1882
+ current_state = state
1883
+
1884
+ if state in ["complete", "error"]:
1885
+ exit_error("Reservation failed, check the API request or contact Testing Farm.")
1886
+
1887
+ if state in ["canceled", "cancel-requested"]:
1888
+ progress.stop()
1889
+ exit_error("Reservation canceled.")
1890
+
1891
+ if not print_only_request_id and task_id is not None:
1892
+ progress.update(task_id, description=f"Reservation job is [yellow]{current_state}[/yellow]")
1893
+
1894
+ time.sleep(1)
1895
+
1896
+ while current_state != "ready":
1897
+ if not print_only_request_id and task_id:
1898
+ progress.update(task_id, description=f"Reservation job is [yellow]{current_state}[/yellow]")
1899
+
1900
+ # get the command output
1901
+ artifacts_url = response.json()['run']['artifacts']
1902
+
1903
+ try:
1904
+ pipeline_log = session.get(f"{artifacts_url}/pipeline.log").text
1905
+
1906
+ if not pipeline_log:
1907
+ exit_error(f"Pipeline log was empty. Please file an issue to {settings.ISSUE_TRACKER}.")
1908
+
1909
+ except requests.exceptions.SSLError:
1910
+ exit_error(
1911
+ textwrap.dedent(
1912
+ f"""
1913
+ Failed to access Testing Farm artifacts because of SSL validation error.
1914
+ If you use Red Hat Ranch please make sure you have Red Hat CA certificates installed.
1915
+ Otherwise file an issue to {settings.ISSUE_TRACKER}.
1916
+ """
1917
+ )
1918
+ )
1919
+ return
1920
+
1921
+ except requests.exceptions.ConnectionError:
1922
+ exit_error(
1923
+ textwrap.dedent(
1924
+ f"""
1925
+ Failed to access Testing Farm artifacts.
1926
+ If you use Red Hat Ranch please make sure you are connected to the VPN.
1927
+ Otherwise file an issue to {settings.ISSUE_TRACKER}.
1928
+ """
1929
+ )
1930
+ )
1931
+ return
1932
+
1933
+ if 'Result of testing: ERROR' in pipeline_log:
1934
+ exit_error(
1935
+ textwrap.dedent(
1936
+ f"""
1937
+ Failed to run reservation task.
1938
+ Check status page {settings.STATUS_PAGE} for outages.
1939
+ File an issue to {settings.ISSUE_TRACKER} if needed.
1940
+ """
1941
+ )
1942
+ )
1943
+
1944
+ if '[testing-farm-request] Cancelling pipeline' in pipeline_log:
1945
+ progress.stop()
1946
+ exit_error('Pipeline was canceled.')
1947
+
1948
+ if '[pre-artifact-installation]' in pipeline_log:
1949
+ current_state = "preparing environment"
1950
+
1951
+ elif 'Guest is being provisioned' in pipeline_log:
1952
+ current_state = "provisioning resources"
1953
+
1954
+ # match any hostname or IP address from gluetool modules log
1955
+ search = re.search(r'Guest is ready.*root@([\d\w\.-]+)', pipeline_log)
1956
+
1957
+ if search and 'execute task #1' in pipeline_log:
1958
+ current_state = "ready"
1959
+ guest = search.group(1)
1960
+
1961
+ time.sleep(1)
1962
+
1963
+ console.print(f"🌎 ssh root@{guest}")
1964
+
1965
+ if autoconnect:
1966
+ os.system(f"ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null root@{guest}") # noqa: E501
1967
+
1968
+
1969
+ def update():
1970
+ """
1971
+ Update the CLI tool container image.
1972
+ """
1973
+ # NOTE: This command is handled by the shell wrapper, see `container/testing-farm` file
1974
+ pass
1975
+
1976
+
1977
+ def cancel(
1978
+ request_id: str = typer.Argument(
1979
+ ..., help="Testing Farm request to cancel. Specified by a request ID or a string containing it."
1980
+ ),
1981
+ api_url: str = ARGUMENT_API_URL,
1982
+ api_token: str = ARGUMENT_API_TOKEN,
1983
+ ):
1984
+ """
1985
+ Cancel a Testing Farm request.
1986
+ """
1987
+
1988
+ # UUID pattern
1989
+ uuid_pattern = re.compile('[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}')
1990
+
1991
+ # Find the UUID in the string
1992
+ uuid_match = uuid_pattern.search(request_id)
1993
+
1994
+ if not uuid_match:
1995
+ exit_error(f"Could not find a valid Testing Farm request id in '{request_id}'.")
1996
+ return
1997
+
1998
+ if not api_token:
1999
+ exit_error("No API token found in the environment, please export 'TESTING_FARM_API_TOKEN' variable.")
2000
+ return
2001
+
2002
+ # Extract the UUID from the match object
2003
+ _request_id = uuid_match.group()
2004
+
2005
+ # Construct URL to the internal API
2006
+ request_url = urllib.parse.urljoin(str(api_url), f"v0.1/requests/{_request_id}")
2007
+
2008
+ # Setting up retries
2009
+ session = requests.Session()
2010
+ install_http_retries(session)
2011
+
2012
+ # Get the request details
2013
+ response = session.delete(request_url, headers=_get_headers(api_token))
2014
+
2015
+ if response.status_code == 401:
2016
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
2017
+
2018
+ if response.status_code == 404:
2019
+ exit_error("Request was not found. Verify the request ID is correct.")
2020
+
2021
+ if response.status_code == 204:
2022
+ exit_error("Request was already canceled.")
2023
+
2024
+ if response.status_code == 409:
2025
+ exit_error("Requeted cannot be canceled, it is already finished.")
2026
+
2027
+ if response.status_code != 200:
2028
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
2029
+
2030
+ console.print("✅ Request [yellow]cancellation requested[/yellow]. It will be canceled soon.")
2031
+
2032
+
2033
+ def encrypt(
2034
+ message: str = typer.Argument(..., help="Message to be encrypted."),
2035
+ api_url: str = ARGUMENT_API_URL,
2036
+ api_token: str = ARGUMENT_API_TOKEN,
2037
+ git_url: Optional[str] = typer.Option(
2038
+ None,
2039
+ help="URL of a GIT repository to which the secret will be tied. If not set, it is detected from the current "
2040
+ "git repository.",
2041
+ ),
2042
+ token_id: Optional[str] = typer.Option(
2043
+ None,
2044
+ help="Token ID to which the secret will be tied. If not set, Token ID will be detected from provided Token.",
2045
+ ),
2046
+ ):
2047
+ """
2048
+ Create secrets for use in in-repository configuration.
2049
+ """
2050
+
2051
+ # check for token
2052
+ if not api_token:
2053
+ exit_error("No API token found, export `TESTING_FARM_API_TOKEN` environment variable")
2054
+
2055
+ git_available = bool(shutil.which("git"))
2056
+
2057
+ # resolve git repository details from the current repository
2058
+ if not git_url:
2059
+ if not git_available:
2060
+ exit_error("no git url defined")
2061
+ git_url = cmd_output_or_exit("git remote get-url origin", "could not auto-detect git url")
2062
+ # use https instead git when auto-detected
2063
+ # GitLab: git@github.com:containers/podman.git
2064
+ # GitHub: git@gitlab.com:testing-farm/cli.git, git+ssh://git@gitlab.com/spoore/centos_rpms_jq.git
2065
+ # Pagure: ssh://git@pagure.io/fedora-ci/messages.git
2066
+ assert git_url
2067
+ git_url = re.sub(r"^(?:(?:git\+)?ssh://)?git@([^:/]*)[:/](.*)", r"https://\1/\2", git_url)
2068
+
2069
+ payload = {'url': git_url, 'message': message}
2070
+
2071
+ if token_id:
2072
+ payload['token_id'] = token_id
2073
+ console_stderr.print(f'🔒 Encrypting secret for token id {token_id} for repository {git_url}')
2074
+ else:
2075
+ console_stderr.print(f'🔒 Encrypting secret for your token in repo {git_url}')
2076
+
2077
+ # submit request to Testing Farm
2078
+ post_url = urllib.parse.urljoin(api_url, "/v0.1/secrets/encrypt")
2079
+
2080
+ session = requests.Session()
2081
+ response = session.post(post_url, json=payload, headers={'Authorization': f'Bearer {api_token}'})
2082
+
2083
+ # handle errors
2084
+ if response.status_code == 401:
2085
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
2086
+
2087
+ if response.status_code == 400:
2088
+ exit_error(
2089
+ f"Request is invalid. {response.json().get('message') or 'Reason unknown.'}."
2090
+ f"\nPlease file an issue to {settings.ISSUE_TRACKER} if unsure."
2091
+ )
2092
+
2093
+ if response.status_code != 200:
2094
+ console_stderr.print(response.text)
2095
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
2096
+
2097
+ console_stderr.print(
2098
+ "💡 See https://docs.testing-farm.io/Testing%20Farm/0.1/test-request.html#secrets-in-repo-config for more "
2099
+ "information on how to store the secret in repository."
2100
+ )
2101
+ console.print(response.text)
2102
+
2103
+
2104
+ def list(
2105
+ state: PipelineState = typer.Option(PipelineState.running, help="State of requests to show."),
2106
+ age: Age = typer.Option(
2107
+ "1d",
2108
+ parser=lambda value: Age.from_string(value),
2109
+ metavar="AGE",
2110
+ help=(
2111
+ "Maximum age of the request represented in [VALUE][UNIT] format. "
2112
+ f"Accepted units are: {Age.available_units()}"
2113
+ ),
2114
+ ),
2115
+ format: OutputFormat = typer.Option(
2116
+ "text", help=f"Output format to use. Possible formats: {OutputFormat.available_formats()}"
2117
+ ),
2118
+ api_url: str = ARGUMENT_API_URL,
2119
+ show_time: bool = typer.Option(
2120
+ False, help="Show date instead of human readable diff in text output, i.e. 1 hour ago"
2121
+ ),
2122
+ ):
2123
+ """
2124
+ List Testing Farm requests.
2125
+ By default running requests are shown.
2126
+ """
2127
+
2128
+ # Construct URL to the internal API
2129
+ request_url = urllib.parse.urljoin(api_url, f"v0.1/requests?state={state.value}&created_after={age.to_string()}")
2130
+
2131
+ # Setting up retries
2132
+ session = requests.Session()
2133
+ install_http_retries(session)
2134
+
2135
+ with Progress(
2136
+ SpinnerColumn(),
2137
+ TextColumn("{task.description}"),
2138
+ transient=True,
2139
+ ) as progress:
2140
+ progress.add_task(description="[blue]Loading ...[/blue]")
2141
+ response = session.get(request_url)
2142
+
2143
+ if response.status_code == 401:
2144
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
2145
+
2146
+ if response.status_code != 200:
2147
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
2148
+
2149
+ requests_json = response.json()
2150
+
2151
+ if format == OutputFormat.json:
2152
+ print(requests_json) if requests_json else print('[]')
2153
+ return
2154
+
2155
+ if not requests_json:
2156
+ print("No requests found")
2157
+ return
2158
+
2159
+ table = Table(show_header=True, header_style="bold magenta")
2160
+
2161
+ for column in ["state", "ranch", "type", "git", "artifacts", "started", "user"]:
2162
+ table.add_column(column)
2163
+
2164
+ def shorten_git_url(url: str) -> Tuple[str, ...]:
2165
+ orig_url = url
2166
+
2167
+ url = url.replace("https://github.com/", "[green] github[/green] ")
2168
+ url = url.replace("https://gitlab.com/", "[orange_red1] gitlab[/orange_red1] ")
2169
+ url = url.replace("https://*****@gitlab.com/redhat/", "[dark_orange3] gitlab-rh[/dark_orange3] ")
2170
+ url = url.replace("https://gitlab.cee.redhat.com/", "[dark_orange] gitlab-cee[/dark_orange] ")
2171
+ url = url.replace("https://pkgs.devel.redhat.com/", "[red3] rhel[/red3] ")
2172
+ url = url.replace("https://src.fedoraproject.org/", "[bright_blue] fedora[/bright_blue] ")
2173
+
2174
+ if url == orig_url:
2175
+ return "", orig_url
2176
+
2177
+ return tuple(url.rsplit(maxsplit=1))
2178
+
2179
+ def get_ranch(url: str) -> str:
2180
+ if "artifacts.osci.redhat.com" in url:
2181
+ return "[red3]redhat[/red3]"
2182
+
2183
+ if "artifacts.dev.testing-farm.io" in url or "artifacts.testing-farm.io" in url:
2184
+ return "[bright_blue]public[/bright_blue]"
2185
+
2186
+ return "unknown"
2187
+
2188
+ for request in sorted(requests_json, key=lambda request: request['created']):
2189
+ request_type = "fmf" if request["test"].get("fmf") else "sti"
2190
+ request_type_human = "[blue]tmt[/blue]" if request_type == "fmf" else "[yellow]sti[/yellow]"
2191
+ url = request['test'][request_type].get('url')
2192
+ ref = request['test'][request_type].get('ref')
2193
+ artifacts_url = (request.get('run', {}) or {}).get('artifacts', '<unavailable>')
2194
+ ranch = get_ranch(artifacts_url)
2195
+ short_ref = ref[:8] if len(ref) == 40 else ref
2196
+
2197
+ git_type, git_url = shorten_git_url(url)
2198
+
2199
+ parsed_time = pendulum.parse(request['created'], tz="UTC")
2200
+
2201
+ table.add_row(
2202
+ request['state'],
2203
+ ranch,
2204
+ f"[yellow]{request_type_human}[/yellow]",
2205
+ f"{git_type} [link={url}]{git_url}[/link] [green]({short_ref})[/green]",
2206
+ f"[link={artifacts_url}]{request['id']}[/link]",
2207
+ parsed_time.to_datetime_string() if show_time else parsed_time.diff_for_humans(),
2208
+ request['user_id'],
2209
+ )
2210
+
2211
+ console.print(table)