tft-cli 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tft/cli/commands.py ADDED
@@ -0,0 +1,2234 @@
1
+ # Copyright Contributors to the Testing Farm project.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ import base64
5
+ import codecs
6
+ import importlib.metadata
7
+ import ipaddress
8
+ import json
9
+ import os
10
+ import re
11
+ import shutil
12
+ import stat
13
+ import subprocess
14
+ import textwrap
15
+ import time
16
+ import urllib.parse
17
+ import xml.etree.ElementTree as ET
18
+ from enum import Enum
19
+ from typing import Any, Dict, List, Optional
20
+
21
+ import requests
22
+ import typer
23
+ from click.core import ParameterSource
24
+ from rich import print, print_json
25
+ from rich.progress import Progress, SpinnerColumn, TextColumn
26
+ from rich.table import Table # type: ignore
27
+
28
+ from tft.cli.config import settings
29
+ from tft.cli.utils import (
30
+ artifacts,
31
+ authorization_headers,
32
+ check_unexpected_arguments,
33
+ cmd_output_or_exit,
34
+ console,
35
+ console_stderr,
36
+ exit_error,
37
+ extract_uuid,
38
+ hw_constraints,
39
+ install_http_retries,
40
+ normalize_multistring_option,
41
+ options_to_dict,
42
+ read_glob_paths,
43
+ uuid_valid,
44
+ )
45
+
46
+ cli_version: str = importlib.metadata.version("tft-cli")
47
+
48
+ TestingFarmRequestV1: Dict[str, Any] = {'test': {}, 'environments': None}
49
+ Environment: Dict[str, Any] = {'arch': None, 'os': None, 'pool': None, 'artifacts': None, 'variables': {}}
50
+ TestTMT: Dict[str, Any] = {'url': None, 'ref': None, 'name': None}
51
+ TestSTI: Dict[str, Any] = {'url': None, 'ref': None}
52
+
53
+ REQUEST_PANEL_TMT = "TMT Options"
54
+ REQUEST_PANEL_STI = "STI Options"
55
+ REQUEST_PANEL_RESERVE = "Reserve Options"
56
+
57
+ RESERVE_PANEL_GENERAL = "General Options"
58
+ RESERVE_PANEL_ENVIRONMENT = "Environment Options"
59
+ RESERVE_PANEL_OUTPUT = "Output Options"
60
+
61
+ RUN_REPO = "https://gitlab.com/testing-farm/tests"
62
+ RUN_PLAN = "/testing-farm/sanity"
63
+
64
+ RESERVE_PLAN = os.getenv("TESTING_FARM_RESERVE_PLAN", "/testing-farm/reserve")
65
+ RESERVE_TEST = os.getenv("TESTING_FARM_RESERVE_TEST", "/testing-farm/reserve-system")
66
+ RESERVE_URL = os.getenv("TESTING_FARM_RESERVE_URL", "https://gitlab.com/testing-farm/tests")
67
+ RESERVE_REF = os.getenv("TESTING_FARM_RESERVE_REF", "main")
68
+ RESERVE_TMT_DISCOVER_EXTRA_ARGS = f"--insert --how fmf --url {RESERVE_URL} --ref {RESERVE_REF} --test {RESERVE_TEST}"
69
+
70
+ DEFAULT_PIPELINE_TIMEOUT = 60 * 12
71
+ DEFAULT_AGE = "7d"
72
+
73
+ # SSH command options for reservation connections
74
+ SSH_RESERVATION_OPTIONS = (
75
+ "ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null -oServerAliveInterval=60 -oServerAliveCountMax=3"
76
+ )
77
+
78
+ # Won't be validating CIDR and 65535 max port range with regex here, not worth it
79
+ SECURITY_GROUP_RULE_FORMAT = re.compile(r"(tcp|ip|icmp|udp|-1|[0-255]):(.*):(\d{1,5}-\d{1,5}|\d{1,5}|-1)")
80
+
81
+
82
+ class WatchFormat(str, Enum):
83
+ text = 'text'
84
+ json = 'json'
85
+
86
+
87
+ class PipelineType(str, Enum):
88
+ tmt_multihost = "tmt-multihost"
89
+
90
+
91
+ class PipelineState(str, Enum):
92
+ new = "new"
93
+ queued = "queued"
94
+ running = "running"
95
+ complete = "complete"
96
+ error = "error"
97
+ canceled = "canceled"
98
+
99
+
100
+ class Ranch(str, Enum):
101
+ public = "public"
102
+ redhat = "redhat"
103
+
104
+
105
+ # Arguments and options that are shared among multiple commands
106
+ ARGUMENT_API_URL: str = typer.Argument(
107
+ settings.API_URL, envvar="TESTING_FARM_API_URL", metavar='', rich_help_panel='Environment variables'
108
+ )
109
+ OPTION_API_URL: str = typer.Option(
110
+ settings.API_URL, envvar="TESTING_FARM_API_URL", metavar='', rich_help_panel='Environment variables'
111
+ )
112
+ ARGUMENT_API_TOKEN: str = typer.Argument(
113
+ settings.API_TOKEN,
114
+ envvar="TESTING_FARM_API_TOKEN",
115
+ show_default=False,
116
+ metavar='',
117
+ rich_help_panel='Environment variables',
118
+ )
119
+ ARGUMENT_INTERNAL_API_URL: str = typer.Argument(
120
+ settings.INTERNAL_API_URL,
121
+ envvar="TESTING_FARM_INTERNAL_API_URL",
122
+ metavar='',
123
+ rich_help_panel='Environment variables',
124
+ )
125
+ OPTION_API_TOKEN: str = typer.Option(
126
+ settings.API_TOKEN,
127
+ envvar="TESTING_FARM_API_TOKEN",
128
+ show_default=False,
129
+ metavar='',
130
+ rich_help_panel='Environment variables',
131
+ )
132
+
133
+ # Restart command specific arguments for source operations
134
+ ARGUMENT_SOURCE_API_URL: str = typer.Argument(
135
+ None, envvar="TESTING_FARM_SOURCE_API_URL", metavar='', rich_help_panel='Environment variables'
136
+ )
137
+ ARGUMENT_INTERNAL_SOURCE_API_URL: str = typer.Argument(
138
+ None, envvar="TESTING_FARM_INTERNAL_SOURCE_API_URL", metavar='', rich_help_panel='Environment variables'
139
+ )
140
+ ARGUMENT_SOURCE_API_TOKEN: str = typer.Argument(
141
+ None,
142
+ envvar="TESTING_FARM_SOURCE_API_TOKEN",
143
+ show_default=False,
144
+ metavar='',
145
+ rich_help_panel='Environment variables',
146
+ )
147
+
148
+ # Restart command specific arguments for target operations
149
+ ARGUMENT_TARGET_API_URL: str = typer.Argument(
150
+ None, envvar="TESTING_FARM_TARGET_API_URL", metavar='', rich_help_panel='Environment variables'
151
+ )
152
+ ARGUMENT_TARGET_API_TOKEN: str = typer.Argument(
153
+ None,
154
+ envvar="TESTING_FARM_TARGET_API_TOKEN",
155
+ show_default=False,
156
+ metavar='',
157
+ rich_help_panel='Environment variables',
158
+ )
159
+ OPTION_TMT_PLAN_NAME: Optional[str] = typer.Option(
160
+ None,
161
+ "--plan",
162
+ help=(
163
+ 'Select plans to be executed. '
164
+ 'Passed as `--name` option to the `tmt plan` command. '
165
+ 'Can be a regular expression.'
166
+ ),
167
+ rich_help_panel=REQUEST_PANEL_TMT,
168
+ )
169
+ OPTION_TMT_PLAN_FILTER: Optional[str] = typer.Option(
170
+ None,
171
+ "--plan-filter",
172
+ help=(
173
+ 'Filter tmt plans. '
174
+ 'Passed as `--filter` option to the `tmt plan` command. '
175
+ 'By default, `enabled:true` filter is applied. '
176
+ 'Plan filtering is similar to test filtering, '
177
+ 'see https://tmt.readthedocs.io/en/stable/examples.html#filter-tests for more information.'
178
+ ),
179
+ rich_help_panel=REQUEST_PANEL_TMT,
180
+ )
181
+ OPTION_TMT_TEST_NAME: Optional[str] = typer.Option(
182
+ None,
183
+ "--test",
184
+ help=(
185
+ 'Select tests to be executed. '
186
+ 'Passed as `--name` option to the `tmt test` command. '
187
+ 'Can be a regular expression.'
188
+ ),
189
+ rich_help_panel=REQUEST_PANEL_TMT,
190
+ )
191
+ OPTION_TMT_TEST_FILTER: Optional[str] = typer.Option(
192
+ None,
193
+ "--test-filter",
194
+ help=(
195
+ 'Filter tmt tests. '
196
+ 'Passed as `--filter` option to the `tmt test` command. '
197
+ 'It overrides any test filter defined in the plan. '
198
+ 'See https://tmt.readthedocs.io/en/stable/examples.html#filter-tests for more information.'
199
+ ),
200
+ rich_help_panel=REQUEST_PANEL_TMT,
201
+ )
202
+ OPTION_TMT_PATH: str = typer.Option(
203
+ '.',
204
+ '--path',
205
+ help='Path to the metadata tree root. Relative to the git repository root specified by --git-url.',
206
+ rich_help_panel=REQUEST_PANEL_TMT,
207
+ )
208
+ OPTION_PIPELINE_TYPE: Optional[PipelineType] = typer.Option(None, help="Force a specific Testing Farm pipeline type.")
209
+ OPTION_POST_INSTALL_SCRIPT: Optional[str] = typer.Option(
210
+ None, help="Post-install script to run right after the guest boots for the first time."
211
+ )
212
+ OPTION_SECURITY_GROUP_RULE_INGRESS: Optional[List[str]] = typer.Option(
213
+ None,
214
+ help=(
215
+ "Additional ingress security group rules to be passed to guest in "
216
+ "PROTOCOL:CIDR:PORT format. Multiple rules can be specified as comma separated, "
217
+ "eg. `tcp:109.81.42.42/32:22,142.0.42.0/24:22`. "
218
+ "Supported by AWS only atm."
219
+ ),
220
+ )
221
+ OPTION_SECURITY_GROUP_RULE_EGRESS: Optional[List[str]] = typer.Option(
222
+ None,
223
+ help=(
224
+ "Additional egress security group rules to be passed to guest in "
225
+ "PROTOCOL:CIDR:PORT format. Multiple rules can be specified as comma separated, "
226
+ "eg. `tcp:109.81.42.42/32:22,142.0.42.0/24:22`. "
227
+ "Supported by AWS only atm."
228
+ ),
229
+ )
230
+ OPTION_KICKSTART: Optional[List[str]] = typer.Option(
231
+ None,
232
+ metavar="key=value|@file",
233
+ help=(
234
+ "Kickstart specification to customize the guest installation. Expressed as a key=value pair. "
235
+ "For more information about the supported keys see "
236
+ "https://tmt.readthedocs.io/en/stable/spec/plans.html#kickstart. The @ prefix marks a yaml file to load."
237
+ ),
238
+ )
239
+ OPTION_POOL: Optional[str] = typer.Option(
240
+ None,
241
+ help=(
242
+ "Force pool to provision. By default the most suited pool is used according to the hardware "
243
+ "requirements specified in tmt plans."
244
+ ),
245
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
246
+ )
247
+ OPTION_REDHAT_BREW_BUILD: List[str] = typer.Option(
248
+ None,
249
+ help="Brew build task IDs or build NVRs to install on the test environment.",
250
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
251
+ )
252
+ OPTION_FEDORA_KOJI_BUILD: List[str] = typer.Option(
253
+ None,
254
+ help="Koji build task IDs or build NVRs to install on the test environment.",
255
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
256
+ )
257
+ OPTION_FEDORA_COPR_BUILD: List[str] = typer.Option(
258
+ None,
259
+ help=(
260
+ "Fedora Copr build to install on the test environment, specified using `build-id:chroot-name`"
261
+ ", e.g. 1784470:fedora-32-x86_64."
262
+ ),
263
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
264
+ )
265
+ OPTION_REPOSITORY: List[str] = typer.Option(
266
+ None,
267
+ help="Repository base url to add to the test environment and install all packages from it.",
268
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
269
+ )
270
+ OPTION_REPOSITORY_FILE: List[str] = typer.Option(
271
+ None,
272
+ help="URL to a repository file which should be added to /etc/yum.repos.d, e.g. https://example.com/repository.repo", # noqa
273
+ )
274
+ OPTION_DRY_RUN: bool = typer.Option(
275
+ False, help="Do not submit a request to Testing Farm, just print it.", rich_help_panel=RESERVE_PANEL_GENERAL
276
+ )
277
+ OPTION_VARIABLES: Optional[List[str]] = typer.Option(
278
+ None,
279
+ "-e",
280
+ "--environment",
281
+ metavar="key=value|@file",
282
+ help="Variables to pass to the test environment. The @ prefix marks a yaml file to load.",
283
+ )
284
+ OPTION_SECRETS: Optional[List[str]] = typer.Option(
285
+ None,
286
+ "-s",
287
+ "--secret",
288
+ metavar="key=value|@file",
289
+ help="Secret variables to pass to the test environment. The @ prefix marks a yaml file to load.",
290
+ )
291
+ OPTION_HARDWARE: List[str] = typer.Option(
292
+ None,
293
+ help=(
294
+ "HW requirements, expressed as key/value pairs. Keys can consist of several properties, "
295
+ "e.g. ``disk.size='>= 40 GiB'``, such keys will be merged in the resulting environment "
296
+ "with other keys sharing the path: ``cpu.family=79`` and ``cpu.model=6`` would be merged, not overwriting "
297
+ "each other. See https://docs.testing-farm.io/Testing%20Farm/0.1/test-request.html#hardware "
298
+ "for the supported hardware selection possibilities."
299
+ ),
300
+ )
301
+ OPTION_WORKER_IMAGE: Optional[str] = typer.Option(
302
+ None, "--worker-image", help="Force worker container image. Requires Testing Farm developer permissions."
303
+ )
304
+ OPTION_PARALLEL_LIMIT: Optional[int] = typer.Option(
305
+ None,
306
+ '--parallel-limit',
307
+ help=(
308
+ "Maximum amount of plans to be executed in parallel. Default values are 12 for Public Ranch and 5 for "
309
+ "Red Hat Ranch."
310
+ ),
311
+ )
312
+ OPTION_TAGS = typer.Option(
313
+ None,
314
+ "-t",
315
+ "--tag",
316
+ metavar="key=value|@file",
317
+ help="Tag cloud resources with given value. The @ prefix marks a yaml file to load.",
318
+ )
319
+ OPTION_RESERVE: bool = typer.Option(
320
+ False,
321
+ help="Reserve machine after testing, similarly to the `reserve` command.",
322
+ rich_help_panel=REQUEST_PANEL_RESERVE,
323
+ )
324
+ OPTION_TMT_CONTEXT: Optional[List[str]] = typer.Option(
325
+ None,
326
+ "-c",
327
+ "--context",
328
+ metavar="key=value|@file",
329
+ help="Context variables to pass to `tmt`. The @ prefix marks a yaml file to load.",
330
+ )
331
+
332
+
333
+ def _option_autoconnect(panel: str) -> bool:
334
+ return typer.Option(True, help="Automatically connect to the guest via SSH.", rich_help_panel=panel)
335
+
336
+
337
+ def _option_ssh_public_keys(panel: str) -> List[str]:
338
+ return typer.Option(
339
+ ["~/.ssh/*.pub"],
340
+ "--ssh-public-key",
341
+ help="Path to SSH public key(s) used to connect. Supports globbing.",
342
+ rich_help_panel=panel,
343
+ )
344
+
345
+
346
+ def _option_reservation_duration(panel: str) -> int:
347
+ return typer.Option(
348
+ settings.DEFAULT_RESERVATION_DURATION,
349
+ "--duration",
350
+ help="Set the reservation duration in minutes. By default the reservation is for 30 minutes.",
351
+ rich_help_panel=panel,
352
+ )
353
+
354
+
355
+ def _option_debug_reservation(panel: Optional[str] = None) -> bool:
356
+ return typer.Option(
357
+ False,
358
+ help="Enable debug messages in the reservation code. Useful for testing changes to reservation code.",
359
+ rich_help_panel=panel,
360
+ )
361
+
362
+
363
+ def _generate_tmt_extra_args(step: str) -> Optional[List[str]]:
364
+ return typer.Option(
365
+ None,
366
+ help=(
367
+ f"Additional options passed to the \"{step}\" step. "
368
+ "Can be specified multiple times for multiple additions."
369
+ ),
370
+ rich_help_panel=REQUEST_PANEL_TMT,
371
+ )
372
+
373
+
374
+ def _sanity_reserve() -> None:
375
+ """
376
+ Sanity checks for reservation support.
377
+ """
378
+
379
+ # Check of SSH_AUTH_SOCK is defined
380
+ ssh_auth_sock = os.getenv("SSH_AUTH_SOCK")
381
+ if not ssh_auth_sock:
382
+ exit_error(
383
+ "No 'ssh-agent' seems to be running, it is required for reservations to work, cannot continue.\n"
384
+ "SSH_AUTH_SOCK is not defined, make sure the ssh-agent is running by executing 'eval `ssh-agent`'."
385
+ )
386
+
387
+ # Check if SSH_AUTH_SOCK exists
388
+ if not os.path.exists(ssh_auth_sock):
389
+ exit_error(
390
+ "SSH_AUTH_SOCK socket does not exist, make sure the ssh-agent is running by executing 'eval `ssh-agent`'."
391
+ )
392
+
393
+ # Check if value of SSH_AUTH_SOCK is socket
394
+ if not stat.S_ISSOCK(os.stat(ssh_auth_sock).st_mode):
395
+ exit_error("SSH_AUTH_SOCK is not a socket, make sure the ssh-agent is running by executing 'eval `ssh-agent`'.")
396
+
397
+ # Check if ssh-add -L is not empty
398
+ ssh_add_output = subprocess.run(["ssh-add", "-L"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
399
+ if ssh_add_output.returncode != 0:
400
+ exit_error("No SSH identities found in the SSH agent. Please run `ssh-add`.")
401
+
402
+
403
+ def _handle_reservation(session, api_url: str, request_id: str, autoconnect: bool = False) -> None:
404
+ """
405
+ Handle the reservation for :py:func:``request`` and :py:func:``restart`` commands.
406
+ """
407
+ # Get artifacts url
408
+ request_url = urllib.parse.urljoin(api_url, f"/v0.1/requests/{request_id}")
409
+ response = session.get(request_url)
410
+ artifacts_url = response.json()['run']['artifacts']
411
+
412
+ try:
413
+ pipeline_log = session.get(f"{artifacts_url}/pipeline.log").text
414
+
415
+ if not pipeline_log:
416
+ exit_error(f"Pipeline log was empty. Please file an issue to {settings.ISSUE_TRACKER}.")
417
+
418
+ except requests.exceptions.SSLError:
419
+ exit_error(
420
+ textwrap.dedent(
421
+ f"""
422
+ Failed to access Testing Farm artifacts because of SSL validation error.
423
+ If you use Red Hat Ranch please make sure you have Red Hat CA certificates installed.
424
+ Otherwise file an issue to {settings.ISSUE_TRACKER}.
425
+ """
426
+ )
427
+ )
428
+ return
429
+
430
+ except requests.exceptions.ConnectionError:
431
+ exit_error(
432
+ textwrap.dedent(
433
+ f"""
434
+ Failed to access Testing Farm artifacts.
435
+ If you use Red Hat Ranch please make sure you are connected to the VPN.
436
+ Otherwise file an issue to {settings.ISSUE_TRACKER}.
437
+ """
438
+ )
439
+ )
440
+ return
441
+
442
+ # match any hostname or IP address from gluetool modules log
443
+ guests = re.findall(r'Guest is ready.*root@([\d\w\.-]+)', pipeline_log)
444
+
445
+ if not guests:
446
+ exit_error(
447
+ textwrap.dedent(
448
+ f"""
449
+ No guests found to connect to. This is unexpected, please file an issue
450
+ to {settings.ISSUE_TRACKER}.
451
+ """
452
+ )
453
+ )
454
+
455
+ if len(guests) > 1:
456
+ for guest in guests:
457
+ console.print(f"🌎 ssh root@{guest}")
458
+ return
459
+ else:
460
+ console.print(f"🌎 ssh root@{guests[0]}")
461
+
462
+ if autoconnect:
463
+ os.system(f"{SSH_RESERVATION_OPTIONS} root@{guests[0]}") # noqa: E501
464
+
465
+
466
+ def _localhost_ingress_rule(session: requests.Session) -> str:
467
+ try:
468
+ get_ip = session.get(settings.PUBLIC_IP_CHECKER_URL)
469
+ except requests.exceptions.RequestException as err:
470
+ exit_error(f"Could not get workstation ip to form a security group rule: {err}")
471
+
472
+ if get_ip.ok:
473
+ ip = get_ip.text.strip()
474
+ return f"-1:{ip}:-1" # noqa: E231
475
+
476
+ else:
477
+ exit_error(f"Got {get_ip.status_code} while checking {settings.PUBLIC_IP_CHECKER_URL}")
478
+
479
+
480
+ def _add_reservation(
481
+ ssh_public_keys: List[str],
482
+ rules: Dict[str, Any],
483
+ duration: int,
484
+ environment: Dict[str, Any],
485
+ debug_reservation: bool,
486
+ ):
487
+ """
488
+ Add discovery of the reservation test to the given environment.
489
+ """
490
+ authorized_keys = read_glob_paths(ssh_public_keys).encode("utf-8")
491
+ if not authorized_keys:
492
+ exit_error(f"No public SSH keys found under {', '.join(ssh_public_keys)}, cannot continue.")
493
+
494
+ authorized_keys_bytes = base64.b64encode(authorized_keys)
495
+
496
+ if "secrets" not in environment or environment["secrets"] is None:
497
+ environment["secrets"] = {}
498
+
499
+ environment["secrets"].update({"TF_RESERVATION_AUTHORIZED_KEYS_BASE64": authorized_keys_bytes.decode("utf-8")})
500
+
501
+ if "settings" not in environment or environment["settings"] is None:
502
+ environment["settings"] = {}
503
+
504
+ if "provisioning" not in environment["settings"] or environment["settings"]["provisioning"] is None:
505
+ environment["settings"]["provisioning"] = {}
506
+
507
+ environment["settings"]["provisioning"].update(rules)
508
+
509
+ if "variables" not in environment or environment["variables"] is None:
510
+ environment["variables"] = {}
511
+
512
+ environment["variables"].update({"TF_RESERVATION_DURATION": str(duration)})
513
+
514
+ if debug_reservation:
515
+ environment["variables"].update({"TF_RESERVATION_DEBUG": "1"})
516
+
517
+ if "tmt" not in environment or environment["tmt"] is None:
518
+ environment["tmt"] = {"extra_args": {}}
519
+
520
+ if "extra_args" not in environment["tmt"] or environment["tmt"]["extra_args"] is None:
521
+ environment["tmt"]["extra_args"] = {}
522
+
523
+ if "discover" not in environment["tmt"]["extra_args"] or environment["tmt"]["extra_args"]["discover"] is None:
524
+ environment["tmt"]["extra_args"]["discover"] = []
525
+
526
+ # add reservation if not already present
527
+ if RESERVE_TMT_DISCOVER_EXTRA_ARGS not in environment["tmt"]["extra_args"]["discover"]:
528
+ environment["tmt"]["extra_args"]["discover"].append(RESERVE_TMT_DISCOVER_EXTRA_ARGS)
529
+
530
+
531
+ def _contains_compose(environments: List[Dict[str, Any]]):
532
+ """
533
+ Returns true if any of environments has ``os.compose`` defined.
534
+ """
535
+ for environment in environments:
536
+ if "os" in environment and environment["os"]:
537
+ if "compose" in environment["os"] and environment["os"]["compose"]:
538
+ return True
539
+ return False
540
+
541
+
542
+ # NOTE(ivasilev) Largely borrowed from artemis-cli
543
+ def _parse_security_group_rules(ingress_rules: List[str], egress_rules: List[str]) -> Dict[str, Any]:
544
+ """
545
+ Returns a dictionary with ingress/egress rules in TFT request friendly format
546
+ """
547
+ security_group_rules = {}
548
+
549
+ def _add_secgroup_rules(sg_type: str, sg_data: List[str]) -> None:
550
+ security_group_rules[sg_type] = []
551
+
552
+ for sg_rule in normalize_multistring_option(sg_data):
553
+ matches = re.match(SECURITY_GROUP_RULE_FORMAT, sg_rule)
554
+ if not matches:
555
+ exit_error(f"Bad format of security group rule '{sg_rule}', should be PROTOCOL:CIDR:PORT") # noqa: E231
556
+
557
+ protocol, cidr, port = matches[1], matches[2], matches[3]
558
+
559
+ # Let's validate cidr
560
+ try:
561
+ # This way a single ip address will be converted to a valid ip/32 cidr.
562
+ cidr = str(ipaddress.ip_network(cidr))
563
+ except ValueError as err:
564
+ exit_error(f'CIDR {cidr} has incorrect format: {err}')
565
+
566
+ # Artemis expectes port_min/port_max, -1 has to be convered to a proper range 0-65535
567
+ port_min = 0 if port == '-1' else int(port.split('-')[0])
568
+ port_max = 65535 if port == '-1' else int(port.split('-')[-1])
569
+
570
+ # Add rule for Artemis API
571
+ security_group_rules[sg_type].append(
572
+ {
573
+ 'type': sg_type.split('_')[-1],
574
+ 'protocol': protocol,
575
+ 'cidr': cidr,
576
+ 'port_min': port_min,
577
+ 'port_max': port_max,
578
+ }
579
+ )
580
+
581
+ _add_secgroup_rules('security_group_rules_ingress', ingress_rules)
582
+ _add_secgroup_rules('security_group_rules_egress', egress_rules)
583
+
584
+ return security_group_rules
585
+
586
+
587
+ def _parse_xunit(xunit: str):
588
+ """
589
+ A helper that parses xunit file into sets of passed_plans/failed_plans/errored_plans per arch.
590
+
591
+ The plans are returned as a {'arch': ['plan1', 'plan2', ..]} map. If it was impossible to deduce architecture
592
+ from a certain plan result (happens in case of early fails / infra issues), the plan will be listed under the 'N/A'
593
+ key.
594
+ """
595
+
596
+ def _add_plan(collection: dict, arch: str, plan: ET.Element):
597
+ # NOTE(ivasilev) name property will always be defined at this point, defaulting to '' to make type check happy
598
+ plan_name = plan.get('name', '')
599
+ if arch in collection:
600
+ collection[arch].append(plan_name)
601
+ else:
602
+ collection[arch] = [plan_name]
603
+
604
+ failed_plans = {}
605
+ passed_plans = {}
606
+ skipped_plans = {}
607
+ errored_plans = {}
608
+
609
+ results_root = ET.fromstring(xunit)
610
+ for plan in results_root.findall('./testsuite'):
611
+ # Try to get information about the environment (stored under ./testing-environment), may be
612
+ # absent if state is undefined
613
+ testing_environment: Optional[ET.Element] = plan.find('./testing-environment[@name="requested"]')
614
+ if not testing_environment:
615
+ console_stderr.print(
616
+ f'Could not find env specifications for {plan.get("name")}, assuming fail for all arches'
617
+ )
618
+ arch = 'N/A'
619
+ else:
620
+ arch_property = testing_environment.find('./property[@name="arch"]')
621
+ if arch_property is None:
622
+ console_stderr.print(f'Could not find arch property for plan {plan.get("name")} results, skipping')
623
+ continue
624
+ # NOTE(ivasilev) arch property will always be defined at this point, defaulting to '' to make type check
625
+ # happy
626
+ arch = arch_property.get('value', '')
627
+ if plan.get('result') == 'passed':
628
+ _add_plan(passed_plans, arch, plan)
629
+ elif plan.get('result') == 'failed':
630
+ _add_plan(failed_plans, arch, plan)
631
+ elif plan.get('result') == 'skipped':
632
+ _add_plan(skipped_plans, arch, plan)
633
+ else:
634
+ _add_plan(errored_plans, arch, plan)
635
+
636
+ # Let's remove possible duplicates among N/A errored out tests
637
+ if 'N/A' in errored_plans:
638
+ errored_plans['N/A'] = list(set(errored_plans['N/A']))
639
+ return passed_plans, failed_plans, skipped_plans, errored_plans
640
+
641
+
642
+ def _get_request_summary(request: dict, session: requests.Session):
643
+ """A helper that prepares json summary of the test run"""
644
+ state = request.get('state')
645
+ artifacts_url = (request.get('run') or {}).get('artifacts')
646
+ xpath_url = f'{artifacts_url}/results.xml' if artifacts_url else ''
647
+ xunit = (request.get('result') or {}).get('xunit') or '<testsuites></testsuites>'
648
+ if state not in ['queued', 'running'] and artifacts_url:
649
+ # NOTE(ivasilev) xunit can be None (ex. in case of timed out requests) so let's fetch results.xml and use it
650
+ # as source of truth
651
+ try:
652
+ response = session.get(xpath_url)
653
+ if response.status_code == 200:
654
+ xunit = response.text
655
+ except requests.exceptions.ConnectionError:
656
+ console_stderr.print("Could not get xunit results")
657
+ passed_plans, failed_plans, skipped_plans, errored_plans = _parse_xunit(xunit)
658
+ overall = (request.get("result") or {}).get("overall")
659
+ arches_requested = [env['arch'] for env in request['environments_requested']]
660
+
661
+ return {
662
+ 'id': request['id'],
663
+ 'state': request['state'],
664
+ 'artifacts': artifacts_url,
665
+ 'overall': overall,
666
+ 'arches_requested': arches_requested,
667
+ 'errored_plans': errored_plans,
668
+ 'failed_plans': failed_plans,
669
+ 'skipped_plans': skipped_plans,
670
+ 'passed_plans': passed_plans,
671
+ }
672
+
673
+
674
+ def _print_summary_table(summary: dict, format: Optional[WatchFormat], show_details=True):
675
+ if not format == WatchFormat.text:
676
+ # Nothing to do, table is printed only when text output is requested
677
+ return
678
+
679
+ def _get_plans_list(collection):
680
+ return list(collection.values())[0] if collection.values() else []
681
+
682
+ def _has_plan(collection, arch, plan):
683
+ return plan in collection.get(arch, [])
684
+
685
+ # Let's transform plans maps into collection of plans to display plan result per arch statistics
686
+ errored = _get_plans_list(summary['errored_plans'])
687
+ failed = _get_plans_list(summary['failed_plans'])
688
+ skipped = _get_plans_list(summary['skipped_plans'])
689
+ passed = _get_plans_list(summary['passed_plans'])
690
+ generic_info_table = Table(show_header=True, header_style="bold magenta")
691
+ arches_requested = summary['arches_requested']
692
+ artifacts_url = summary['artifacts'] or ''
693
+ for column in summary.keys():
694
+ generic_info_table.add_column(column)
695
+ generic_info_table.add_row(
696
+ summary['id'],
697
+ summary['state'],
698
+ f'[link]{artifacts_url}[/link]',
699
+ summary['overall'],
700
+ ','.join(arches_requested),
701
+ str(len(errored)),
702
+ str(len(failed)),
703
+ str(len(skipped)),
704
+ str(len(passed)),
705
+ )
706
+ console.print(generic_info_table)
707
+
708
+ all_plans = sorted(set(errored + failed + skipped + passed))
709
+ details_table = Table(show_header=True, header_style="bold magenta")
710
+ for column in ["plan"] + arches_requested:
711
+ details_table.add_column(column)
712
+
713
+ for plan in all_plans:
714
+ row = [plan]
715
+ for arch in arches_requested:
716
+ if _has_plan(summary['passed_plans'], arch, plan):
717
+ res = '[green]pass[/green]'
718
+ elif _has_plan(summary['skipped_plans'], arch, plan):
719
+ res = '[white]skip[/white]'
720
+ elif _has_plan(summary['failed_plans'], arch, plan):
721
+ res = '[red]fail[/red]'
722
+ elif _has_plan(summary['errored_plans'], 'N/A', plan):
723
+ res = '[yellow]error[/yellow]'
724
+ else:
725
+ # If for some reason the plan has not been executed for this arch (this can happen after
726
+ # applying adjust rules) -> don't show anything
727
+ res = None
728
+ row.append(res)
729
+ details_table.add_row(*row)
730
+ if show_details:
731
+ console.print(details_table)
732
+
733
+
734
+ def watch(
735
+ context: typer.Context,
736
+ api_url: str = ARGUMENT_API_URL,
737
+ id: str = typer.Option(..., help="Request ID to watch"),
738
+ no_wait: bool = typer.Option(False, help="Skip waiting for request completion."),
739
+ format: Optional[WatchFormat] = typer.Option(WatchFormat.text, help="Output format"),
740
+ autoconnect: bool = typer.Option(True, hidden=True),
741
+ reserve: bool = typer.Option(False, hidden=True),
742
+ ):
743
+ """Watch request for completion."""
744
+
745
+ # Accept these arguments only via environment variables
746
+ check_unexpected_arguments(context, "api_url")
747
+
748
+ def _console_print(*args, **kwargs):
749
+ """A helper function that will skip printing to console if output format is json"""
750
+ if format == WatchFormat.json:
751
+ return
752
+ console.print(*args, **kwargs)
753
+
754
+ if not uuid_valid(id):
755
+ exit_error("invalid request id")
756
+
757
+ get_url = urllib.parse.urljoin(api_url, f"/v0.1/requests/{id}")
758
+ current_state: str = ""
759
+
760
+ _console_print(f"🔎 api [blue]{get_url}[/blue]")
761
+
762
+ if not no_wait:
763
+ _console_print("💡 waiting for request to finish, use ctrl+c to skip", style="bright_yellow")
764
+
765
+ artifacts_shown = False
766
+
767
+ # Setting up retries
768
+ session = requests.Session()
769
+ install_http_retries(session)
770
+
771
+ def _is_reserved(session, request):
772
+ artifacts_url = (request.get('run') or {}).get('artifacts')
773
+
774
+ if not artifacts_url:
775
+ return False
776
+
777
+ try:
778
+ workdir = re.search(r'href="(.*)" name="workdir"', session.get(f"{artifacts_url}/results.xml").text)
779
+ except requests.exceptions.SSLError:
780
+ exit_error("Artifacts unreachable via SSL, do you have RH CA certificates installed?[/yellow]")
781
+
782
+ if workdir:
783
+ # finish early if reservation is running
784
+ if re.search(r"\[\+\] Reservation tick:", session.get(f"{workdir.group(1)}/log.txt").text):
785
+ return True
786
+
787
+ return False
788
+
789
+ while True:
790
+ try:
791
+ response = session.get(get_url)
792
+
793
+ except requests.exceptions.ConnectionError as exc:
794
+ console.print("📛 connection to API failed", style="red")
795
+ raise typer.Exit(code=2) from exc
796
+
797
+ if response.status_code == 404:
798
+ exit_error("request with given ID not found")
799
+
800
+ if response.status_code != 200:
801
+ exit_error(f"failed to get request: {response.text}")
802
+
803
+ request = response.json()
804
+
805
+ state = request["state"]
806
+
807
+ if state == current_state:
808
+ # check for reservation status and finish early if reserved
809
+ if reserve and _is_reserved(session, request):
810
+ _handle_reservation(session, api_url, request["id"], autoconnect)
811
+ return
812
+
813
+ time.sleep(settings.WATCH_TICK)
814
+ continue
815
+
816
+ current_state = state
817
+
818
+ request_summary = _get_request_summary(request, session)
819
+ if format == WatchFormat.json:
820
+ console.print(json.dumps(request_summary, indent=2))
821
+
822
+ if state == "new":
823
+ _console_print("👶 request is [blue]waiting to be queued[/blue]")
824
+
825
+ elif state == "queued":
826
+ _console_print("👷 request is [blue]queued[/blue]")
827
+
828
+ elif state == "running":
829
+ _console_print("🚀 request is [blue]running[/blue]")
830
+ _console_print(f"🚢 artifacts [blue]{request['run']['artifacts']}[/blue]")
831
+ artifacts_shown = True
832
+
833
+ elif state == "complete":
834
+ if not artifacts_shown:
835
+ _console_print(f"🚢 artifacts [blue]{request['run']['artifacts']}[/blue]")
836
+
837
+ overall = request["result"]["overall"]
838
+ if overall in ["passed", "skipped"]:
839
+ _console_print("✅ tests passed", style="green")
840
+ _print_summary_table(request_summary, format)
841
+ raise typer.Exit()
842
+
843
+ if overall in ["failed", "error", "unknown"]:
844
+ _console_print(f"❌ tests {overall}", style="red")
845
+ if overall == "error":
846
+ _console_print(f"{request['result']['summary']}", style="red")
847
+ _print_summary_table(request_summary, format)
848
+ raise typer.Exit(code=1)
849
+
850
+ elif state == "error":
851
+ msg = (
852
+ request['result'].get('summary')
853
+ if request['result']
854
+ else '\n'.join(note['message'] for note in request['notes'])
855
+ )
856
+ _console_print(f"📛 pipeline error\n{msg}", style="red")
857
+ _print_summary_table(request_summary, format)
858
+ raise typer.Exit(code=2)
859
+
860
+ elif state in ["canceled", "cancel-requested"]:
861
+ _console_print("⚠️ pipeline cancelled", style="yellow")
862
+ raise typer.Exit(code=3)
863
+
864
+ if no_wait:
865
+ _print_summary_table(request_summary, format, show_details=False)
866
+ raise typer.Exit()
867
+
868
+ time.sleep(settings.WATCH_TICK)
869
+
870
+
871
+ def version():
872
+ """Print CLI version"""
873
+ console.print(f"{cli_version}")
874
+
875
+
876
+ def request(
877
+ context: typer.Context,
878
+ api_url: str = ARGUMENT_API_URL,
879
+ api_token: str = ARGUMENT_API_TOKEN,
880
+ timeout: int = typer.Option(
881
+ DEFAULT_PIPELINE_TIMEOUT,
882
+ help="Set the timeout for the request in minutes. If the test takes longer than this, it will be terminated.",
883
+ ),
884
+ test_type: str = typer.Option("fmf", help="Test type to use, if not set autodetected."),
885
+ tmt_plan_name: Optional[str] = OPTION_TMT_PLAN_NAME,
886
+ tmt_plan_filter: Optional[str] = OPTION_TMT_PLAN_FILTER,
887
+ tmt_test_name: Optional[str] = OPTION_TMT_TEST_NAME,
888
+ tmt_test_filter: Optional[str] = OPTION_TMT_TEST_FILTER,
889
+ tmt_path: str = OPTION_TMT_PATH,
890
+ sti_playbooks: Optional[List[str]] = typer.Option(
891
+ None,
892
+ "--playbook",
893
+ help="Playbook to run, by default 'tests/tests*.yml', multiple playbooks can be specified.",
894
+ rich_help_panel=REQUEST_PANEL_STI,
895
+ ),
896
+ git_url: Optional[str] = typer.Option(
897
+ None, help="URL of the GIT repository to test. If not set, autodetected from current git repository."
898
+ ),
899
+ git_ref: str = typer.Option(
900
+ "main", help="GIT ref or branch to test. If not set, autodetected from current git repository."
901
+ ),
902
+ git_merge_sha: Optional[str] = typer.Option(
903
+ None, help="GIT ref or branch into which --ref will be merged, if specified."
904
+ ),
905
+ arches: List[str] = typer.Option(["x86_64"], "--arch", help="Hardware platforms of the system to be provisioned."),
906
+ compose: Optional[str] = typer.Option(
907
+ None,
908
+ help="Compose used to provision system-under-test. If not set, tests will expect 'container' provision method specified in tmt plans.", # noqa
909
+ ),
910
+ hardware: List[str] = OPTION_HARDWARE,
911
+ kickstart: Optional[List[str]] = OPTION_KICKSTART,
912
+ pool: Optional[str] = OPTION_POOL,
913
+ cli_tmt_context: Optional[List[str]] = OPTION_TMT_CONTEXT,
914
+ variables: Optional[List[str]] = OPTION_VARIABLES,
915
+ secrets: Optional[List[str]] = OPTION_SECRETS,
916
+ tmt_environment: Optional[List[str]] = typer.Option(
917
+ None,
918
+ "-T",
919
+ "--tmt-environment",
920
+ metavar="key=value|@file",
921
+ help=(
922
+ "Environment variables to pass to the tmt process. "
923
+ "Used to configure tmt report plugins like reportportal or polarion. "
924
+ "The @ prefix marks a yaml file to load."
925
+ ),
926
+ ),
927
+ no_wait: bool = typer.Option(False, help="Skip waiting for request completion."),
928
+ worker_image: Optional[str] = OPTION_WORKER_IMAGE,
929
+ redhat_brew_build: List[str] = OPTION_REDHAT_BREW_BUILD,
930
+ fedora_koji_build: List[str] = OPTION_FEDORA_KOJI_BUILD,
931
+ fedora_copr_build: List[str] = OPTION_FEDORA_COPR_BUILD,
932
+ repository: List[str] = OPTION_REPOSITORY,
933
+ repository_file: List[str] = OPTION_REPOSITORY_FILE,
934
+ sanity: bool = typer.Option(False, help="Run Testing Farm sanity test.", rich_help_panel=RESERVE_PANEL_GENERAL),
935
+ tags: Optional[List[str]] = OPTION_TAGS,
936
+ watchdog_dispatch_delay: Optional[int] = typer.Option(
937
+ None,
938
+ help="How long (seconds) before the guest \"is-alive\" watchdog is dispatched. Note that this is implemented only in Artemis service.", # noqa
939
+ ),
940
+ watchdog_period_delay: Optional[int] = typer.Option(
941
+ None,
942
+ help="How often (seconds) check that the guest \"is-alive\". Note that this is implemented only in Artemis service.", # noqa
943
+ ),
944
+ dry_run: bool = OPTION_DRY_RUN,
945
+ pipeline_type: Optional[PipelineType] = OPTION_PIPELINE_TYPE,
946
+ post_install_script: Optional[str] = OPTION_POST_INSTALL_SCRIPT,
947
+ security_group_rule_ingress: Optional[List[str]] = OPTION_SECURITY_GROUP_RULE_INGRESS,
948
+ security_group_rule_egress: Optional[List[str]] = OPTION_SECURITY_GROUP_RULE_EGRESS,
949
+ user_webpage: Optional[str] = typer.Option(
950
+ None, help="URL to the user's webpage. The link will be shown in the results viewer."
951
+ ),
952
+ user_webpage_name: Optional[str] = typer.Option(
953
+ None, help="Name of the user's webpage. It will be shown in the results viewer."
954
+ ),
955
+ user_webpage_icon: Optional[str] = typer.Option(
956
+ None, help="URL of the icon of the user's webpage. It will be shown in the results viewer."
957
+ ),
958
+ parallel_limit: Optional[int] = OPTION_PARALLEL_LIMIT,
959
+ tmt_discover: Optional[List[str]] = _generate_tmt_extra_args("discover"),
960
+ tmt_prepare: Optional[List[str]] = _generate_tmt_extra_args("prepare"),
961
+ tmt_finish: Optional[List[str]] = _generate_tmt_extra_args("finish"),
962
+ reserve: bool = OPTION_RESERVE,
963
+ ssh_public_keys: List[str] = _option_ssh_public_keys(REQUEST_PANEL_RESERVE),
964
+ autoconnect: bool = _option_autoconnect(REQUEST_PANEL_RESERVE),
965
+ reservation_duration: int = _option_reservation_duration(REQUEST_PANEL_RESERVE),
966
+ debug_reservation: bool = _option_debug_reservation(REQUEST_PANEL_RESERVE),
967
+ ):
968
+ """
969
+ Request testing from Testing Farm.
970
+ """
971
+
972
+ # Accept these arguments only via environment variables
973
+ check_unexpected_arguments(context, "api_url", "api_token")
974
+
975
+ # Split comma separated arches
976
+ arches = normalize_multistring_option(arches)
977
+
978
+ git_available = bool(shutil.which("git"))
979
+
980
+ # check for token
981
+ if not api_token:
982
+ exit_error("No API token found, export `TESTING_FARM_API_TOKEN` environment variable")
983
+
984
+ if not compose and arches != ['x86_64']:
985
+ exit_error(
986
+ "Without compose the tests run against a container image specified in the plan. "
987
+ "Only 'x86_64' architecture supported in this case."
988
+ )
989
+
990
+ if sanity:
991
+ if git_url or tmt_plan_name:
992
+ exit_error(
993
+ "The option [underline]--sanity[/underline] is mutually exclusive with "
994
+ "[underline]--git-url[/underline] and [underline]--plan[/underline]."
995
+ )
996
+
997
+ git_url = str(settings.TESTING_FARM_TESTS_GIT_URL)
998
+ tmt_plan_name = str(settings.TESTING_FARM_SANITY_PLAN)
999
+
1000
+ if reserve:
1001
+ _sanity_reserve()
1002
+
1003
+ # resolve git repository details from the current repository
1004
+ if not git_url:
1005
+ if not git_available:
1006
+ exit_error("no git url defined")
1007
+
1008
+ # check for uncommited changes
1009
+ if git_available and not git_url:
1010
+ try:
1011
+ subprocess.check_output("git update-index --refresh".split(), stderr=subprocess.STDOUT)
1012
+ subprocess.check_output("git diff-index --quiet HEAD --".split(), stderr=subprocess.STDOUT)
1013
+ except subprocess.CalledProcessError as process:
1014
+ if 'fatal:' not in str(process.stdout):
1015
+ exit_error(
1016
+ "Uncommited changes found in current git repository, refusing to continue.\n"
1017
+ " HINT: When running tests for the current repository, the changes "
1018
+ "must be commited and pushed."
1019
+ )
1020
+
1021
+ git_url = cmd_output_or_exit("git remote get-url origin", "could not auto-detect git url")
1022
+ # use https instead git when auto-detected
1023
+ # GitLab: git@github.com:containers/podman.git
1024
+ # GitHub: git@gitlab.com:testing-farm/cli.git, git+ssh://git@gitlab.com/spoore/centos_rpms_jq.git
1025
+ # Pagure: ssh://git@pagure.io/fedora-ci/messages.git
1026
+ assert git_url
1027
+ git_url = re.sub(r"^(?:(?:git\+)?ssh://)?git@([^:/]*)[:/](.*)", r"https://\1/\2", git_url)
1028
+
1029
+ # detect git ref
1030
+ git_ref = cmd_output_or_exit("git rev-parse --abbrev-ref HEAD", "could not autodetect git ref")
1031
+
1032
+ # in case we have a commit checked out, not a named branch
1033
+ if git_ref == "HEAD":
1034
+ git_ref = cmd_output_or_exit("git rev-parse HEAD", "could not autodetect git ref")
1035
+
1036
+ # detect test type from local files
1037
+ if os.path.exists(os.path.join((tmt_path or ""), ".fmf/version")):
1038
+ test_type = "fmf"
1039
+ elif os.path.exists("tests/tests.yml"):
1040
+ test_type = "sti"
1041
+ else:
1042
+ exit_error("no test type defined")
1043
+
1044
+ # make typing happy
1045
+ assert git_url is not None
1046
+
1047
+ # STI is not supported against a container
1048
+ if test_type == "sti" and compose == "container":
1049
+ exit_error("container based testing is not available for 'sti' test type")
1050
+
1051
+ console.print(f"📦 repository [blue]{git_url}[/blue] ref [blue]{git_ref}[/blue] test-type [blue]{test_type}[/blue]")
1052
+
1053
+ pool_info = f"via pool [blue]{pool}[/blue]" if pool else ""
1054
+ for arch in arches:
1055
+ console.print(f"💻 [blue]{compose or 'container image in plan'}[/blue] on [blue]{arch}[/blue] {pool_info}")
1056
+
1057
+ # test details
1058
+ test = TestTMT if test_type == "fmf" else TestSTI
1059
+ test["url"] = git_url
1060
+ test["ref"] = git_ref
1061
+
1062
+ if git_merge_sha:
1063
+ test["merge_sha"] = git_merge_sha
1064
+
1065
+ if tmt_plan_name:
1066
+ test["name"] = tmt_plan_name
1067
+
1068
+ if tmt_plan_filter:
1069
+ test["plan_filter"] = tmt_plan_filter
1070
+
1071
+ if tmt_test_name:
1072
+ test["test_name"] = tmt_test_name
1073
+
1074
+ if tmt_test_filter:
1075
+ test["test_filter"] = tmt_test_filter
1076
+
1077
+ if sti_playbooks:
1078
+ test["playbooks"] = sti_playbooks
1079
+
1080
+ # environment details
1081
+ environments = []
1082
+ for arch in arches:
1083
+ environment = Environment.copy()
1084
+ environment["arch"] = arch
1085
+ environment["pool"] = pool
1086
+ environment["artifacts"] = []
1087
+ environment["tmt"] = {}
1088
+
1089
+ # NOTE(ivasilev) From now on tmt.context will be always set. Even if user didn't request anything then
1090
+ # arch requested will be passed into the context
1091
+ tmt_context = options_to_dict("tmt context", cli_tmt_context or [])
1092
+ if "arch" not in tmt_context:
1093
+ # If context distro is not set by the user directly via -c let's set it according to arch requested
1094
+ tmt_context["arch"] = arch
1095
+ environment["tmt"].update({"context": tmt_context})
1096
+
1097
+ if compose:
1098
+ environment["os"] = {"compose": compose}
1099
+
1100
+ if secrets:
1101
+ environment["secrets"] = options_to_dict("environment secrets", secrets)
1102
+
1103
+ if variables:
1104
+ environment["variables"] = options_to_dict("environment variables", variables)
1105
+
1106
+ if hardware:
1107
+ environment["hardware"] = hw_constraints(hardware)
1108
+
1109
+ if kickstart:
1110
+ # Typer escapes newlines in options, we need to unescape them
1111
+ kickstart = [codecs.decode(value, 'unicode_escape') for value in kickstart]
1112
+ environment["kickstart"] = options_to_dict("environment kickstart", kickstart)
1113
+
1114
+ if redhat_brew_build:
1115
+ environment["artifacts"].extend(artifacts("redhat-brew-build", redhat_brew_build))
1116
+
1117
+ if fedora_koji_build:
1118
+ environment["artifacts"].extend(artifacts("fedora-koji-build", fedora_koji_build))
1119
+
1120
+ if fedora_copr_build:
1121
+ environment["artifacts"].extend(artifacts("fedora-copr-build", fedora_copr_build))
1122
+
1123
+ if repository:
1124
+ environment["artifacts"].extend(artifacts("repository", repository))
1125
+
1126
+ if repository_file:
1127
+ environment["artifacts"].extend(artifacts("repository-file", repository_file))
1128
+
1129
+ if tmt_environment:
1130
+ environment["tmt"].update({"environment": options_to_dict("tmt environment variables", tmt_environment)})
1131
+
1132
+ if tmt_discover or tmt_prepare or tmt_finish:
1133
+ if "extra_args" not in environment["tmt"]:
1134
+ environment["tmt"]["extra_args"] = {}
1135
+
1136
+ if tmt_discover:
1137
+ environment["tmt"]["extra_args"]["discover"] = tmt_discover
1138
+
1139
+ if tmt_prepare:
1140
+ environment["tmt"]["extra_args"]["prepare"] = tmt_prepare
1141
+
1142
+ if tmt_finish:
1143
+ environment["tmt"]["extra_args"]["finish"] = tmt_finish
1144
+
1145
+ environments.append(environment)
1146
+
1147
+ # Setting up retries
1148
+ session = requests.Session()
1149
+ install_http_retries(session)
1150
+
1151
+ if reserve:
1152
+ if not _contains_compose(environments):
1153
+ exit_error("Reservations are not supported with container executions, cannot continue")
1154
+
1155
+ if len(environments) > 1:
1156
+ exit_error("Reservations are currently supported for a single plan, cannot continue")
1157
+
1158
+ # support cases where the user has multiple localhost addresses
1159
+ rules = _parse_security_group_rules(
1160
+ list({_localhost_ingress_rule(requests.Session()) for _ in range(0, settings.PUBLIC_IP_RESOLVE_TRIES)}), []
1161
+ )
1162
+
1163
+ for environment in environments:
1164
+ _add_reservation(
1165
+ ssh_public_keys=ssh_public_keys,
1166
+ rules=rules,
1167
+ duration=reservation_duration,
1168
+ environment=environment,
1169
+ debug_reservation=debug_reservation,
1170
+ )
1171
+
1172
+ machine_pre = "Machine" if len(environments) == 1 else str(len(environments)) + " machines"
1173
+ console.print(f"🛟 {machine_pre} will be reserved after testing")
1174
+
1175
+ if any(
1176
+ provisioning_detail
1177
+ for provisioning_detail in [
1178
+ tags,
1179
+ watchdog_dispatch_delay,
1180
+ watchdog_period_delay,
1181
+ post_install_script,
1182
+ security_group_rule_ingress,
1183
+ security_group_rule_egress,
1184
+ ]
1185
+ ):
1186
+ if "settings" not in environments[0]:
1187
+ environments[0]["settings"] = {}
1188
+
1189
+ if 'provisioning' not in environments[0]["settings"]:
1190
+ environments[0]["settings"]["provisioning"] = {}
1191
+
1192
+ if tags:
1193
+ environments[0]["settings"]["provisioning"]["tags"] = options_to_dict("tags", tags)
1194
+
1195
+ if watchdog_dispatch_delay is not None:
1196
+ environments[0]["settings"]["provisioning"]["watchdog-dispatch-delay"] = watchdog_dispatch_delay
1197
+
1198
+ if watchdog_period_delay is not None:
1199
+ environments[0]["settings"]["provisioning"]["watchdog-period-delay"] = watchdog_period_delay
1200
+
1201
+ if post_install_script:
1202
+ environments[0]["settings"]["provisioning"]["post_install_script"] = post_install_script
1203
+
1204
+ if security_group_rule_ingress or security_group_rule_egress:
1205
+ rules = _parse_security_group_rules(security_group_rule_ingress or [], security_group_rule_egress or [])
1206
+ environments[0]["settings"]["provisioning"].update(rules)
1207
+
1208
+ # create final request
1209
+ request = TestingFarmRequestV1
1210
+ if test_type == "fmf":
1211
+ test["path"] = tmt_path
1212
+ request["test"]["fmf"] = test
1213
+ else:
1214
+ request["test"]["sti"] = test
1215
+
1216
+ request["environments"] = environments
1217
+ request["settings"] = {}
1218
+
1219
+ if reserve or pipeline_type or parallel_limit or timeout != DEFAULT_PIPELINE_TIMEOUT:
1220
+ request["settings"]["pipeline"] = {}
1221
+
1222
+ # in case the reservation duration is more than the pipeline timeout, adjust also the pipeline timeout
1223
+ if reserve:
1224
+ if reservation_duration > timeout:
1225
+ request["settings"]["pipeline"] = {"timeout": reservation_duration}
1226
+ console.print(f"⏳ Maximum reservation time is {reservation_duration} minutes")
1227
+ else:
1228
+ request["settings"]["pipeline"] = {"timeout": timeout}
1229
+ console.print(f"⏳ Maximum reservation time is {timeout} minutes")
1230
+
1231
+ # forced pipeline timeout
1232
+ elif timeout != DEFAULT_PIPELINE_TIMEOUT:
1233
+ console.print(f"⏳ Pipeline timeout forced to {timeout} minutes")
1234
+ request["settings"]["pipeline"] = {"timeout": timeout}
1235
+
1236
+ if pipeline_type:
1237
+ request["settings"]["pipeline"]["type"] = pipeline_type.value
1238
+
1239
+ if parallel_limit:
1240
+ request["settings"]["pipeline"]["parallel-limit"] = parallel_limit
1241
+
1242
+ # worker image
1243
+ if worker_image:
1244
+ console.print(f"👷 Forcing worker image [blue]{worker_image}[/blue]")
1245
+ request["settings"]["worker"] = {"image": worker_image}
1246
+
1247
+ if not user_webpage and (user_webpage_name or user_webpage_icon):
1248
+ exit_error("The user-webpage-name and user-webpage-icon can be used only with user-webpage option")
1249
+
1250
+ request["user"] = {}
1251
+ if user_webpage:
1252
+ request["user"]["webpage"] = {"url": user_webpage, "icon": user_webpage_icon, "name": user_webpage_name}
1253
+
1254
+ # submit request to Testing Farm
1255
+ post_url = urllib.parse.urljoin(api_url, "v0.1/requests")
1256
+
1257
+ # dry run
1258
+ if dry_run:
1259
+ console.print("🔍 Dry run, showing POST json only", style="bright_yellow")
1260
+ print_json(json.dumps(request, indent=4, separators=(',', ': ')))
1261
+ raise typer.Exit()
1262
+
1263
+ # handle errors
1264
+ response = session.post(post_url, json=request, headers=authorization_headers(api_token))
1265
+ if response.status_code == 401:
1266
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1267
+
1268
+ if response.status_code == 400:
1269
+ exit_error(
1270
+ f"Request is invalid. {response.json().get('message') or 'Reason unknown.'}."
1271
+ f"\nPlease file an issue to {settings.ISSUE_TRACKER} if unsure."
1272
+ )
1273
+
1274
+ if response.status_code != 200:
1275
+ print(response.text)
1276
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1277
+
1278
+ request_id = response.json()['id']
1279
+
1280
+ # Watch the request and handle reservation
1281
+ watch(context, api_url, request_id, no_wait, reserve=reserve, autoconnect=autoconnect, format=WatchFormat.text)
1282
+
1283
+
1284
+ def restart(
1285
+ context: typer.Context,
1286
+ request_id: str = typer.Argument(..., help="Testing Farm request ID or a string containing it."),
1287
+ api_url: str = ARGUMENT_API_URL,
1288
+ internal_api_url: str = ARGUMENT_INTERNAL_API_URL,
1289
+ api_token: str = ARGUMENT_API_TOKEN,
1290
+ source_api_url: Optional[str] = ARGUMENT_SOURCE_API_URL,
1291
+ internal_source_api_url: Optional[str] = ARGUMENT_INTERNAL_SOURCE_API_URL,
1292
+ source_api_token: Optional[str] = ARGUMENT_SOURCE_API_TOKEN,
1293
+ target_api_url: Optional[str] = ARGUMENT_TARGET_API_URL,
1294
+ target_api_token: Optional[str] = ARGUMENT_TARGET_API_TOKEN,
1295
+ compose: Optional[str] = typer.Option(
1296
+ None,
1297
+ help="Force compose used to provision test environment.", # noqa
1298
+ ),
1299
+ pool: Optional[str] = typer.Option(
1300
+ None,
1301
+ help="Force pool to provision.",
1302
+ ),
1303
+ cli_tmt_context: Optional[List[str]] = OPTION_TMT_CONTEXT,
1304
+ variables: Optional[List[str]] = OPTION_VARIABLES,
1305
+ git_url: Optional[str] = typer.Option(None, help="Force URL of the GIT repository to test."),
1306
+ git_ref: Optional[str] = typer.Option(None, help="Force GIT ref or branch to test."),
1307
+ git_merge_sha: Optional[str] = typer.Option(None, help="Force GIT ref or branch into which --ref will be merged."),
1308
+ hardware: List[str] = OPTION_HARDWARE,
1309
+ tags: Optional[List[str]] = OPTION_TAGS,
1310
+ tmt_plan_name: Optional[str] = OPTION_TMT_PLAN_NAME,
1311
+ tmt_plan_filter: Optional[str] = OPTION_TMT_PLAN_FILTER,
1312
+ tmt_test_name: Optional[str] = OPTION_TMT_TEST_NAME,
1313
+ tmt_test_filter: Optional[str] = OPTION_TMT_TEST_FILTER,
1314
+ tmt_path: Optional[str] = OPTION_TMT_PATH,
1315
+ tmt_discover: Optional[List[str]] = _generate_tmt_extra_args("discover"),
1316
+ tmt_prepare: Optional[List[str]] = _generate_tmt_extra_args("prepare"),
1317
+ tmt_finish: Optional[List[str]] = _generate_tmt_extra_args("finish"),
1318
+ worker_image: Optional[str] = OPTION_WORKER_IMAGE,
1319
+ no_wait: bool = typer.Option(False, help="Skip waiting for request completion."),
1320
+ dry_run: bool = OPTION_DRY_RUN,
1321
+ pipeline_type: Optional[PipelineType] = OPTION_PIPELINE_TYPE,
1322
+ parallel_limit: Optional[int] = OPTION_PARALLEL_LIMIT,
1323
+ reserve: bool = OPTION_RESERVE,
1324
+ ssh_public_keys: List[str] = _option_ssh_public_keys(REQUEST_PANEL_RESERVE),
1325
+ autoconnect: bool = _option_autoconnect(REQUEST_PANEL_RESERVE),
1326
+ reservation_duration: int = _option_reservation_duration(REQUEST_PANEL_RESERVE),
1327
+ debug_reservation: bool = _option_debug_reservation(REQUEST_PANEL_RESERVE),
1328
+ ):
1329
+ """
1330
+ Restart a Testing Farm request.
1331
+
1332
+ Just pass a request ID or an URL with a request ID to restart it.
1333
+ """
1334
+
1335
+ # Accept these arguments only via environment variables
1336
+ check_unexpected_arguments(
1337
+ context,
1338
+ "api_url",
1339
+ "api_token",
1340
+ "internal_api_url",
1341
+ "source_api_url",
1342
+ "internal_source_api_url",
1343
+ "source_api_token",
1344
+ "target_api_url",
1345
+ "target_api_token",
1346
+ )
1347
+
1348
+ # Determine source configuration (fallback to general settings)
1349
+ effective_source_api_url = source_api_url or api_url
1350
+ effective_internal_source_api_url = internal_source_api_url or internal_api_url
1351
+ effective_source_api_token = source_api_token or api_token
1352
+
1353
+ # Determine target configuration (fallback to general settings)
1354
+ effective_target_api_url = target_api_url or api_url
1355
+ effective_target_api_token = target_api_token or api_token
1356
+
1357
+ # Extract the UUID from the request_id string
1358
+ _request_id = extract_uuid(request_id)
1359
+
1360
+ # Construct URL to the internal API
1361
+ get_url = urllib.parse.urljoin(str(effective_internal_source_api_url), f"v0.1/requests/{_request_id}")
1362
+
1363
+ # Setting up retries
1364
+ session = requests.Session()
1365
+ install_http_retries(session)
1366
+
1367
+ # Get the request details
1368
+ response = session.get(get_url, headers=authorization_headers(effective_source_api_token))
1369
+
1370
+ if response.status_code == 401:
1371
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1372
+
1373
+ # The API token is valid, but it doesn't own the request
1374
+ if response.status_code == 403:
1375
+ console.print(
1376
+ "⚠️ [yellow] You are not the owner of this request. Any secrets associated with the "
1377
+ "request will not be included on the restart.[/yellow]"
1378
+ )
1379
+ # Construct URL to the API
1380
+ get_url = urllib.parse.urljoin(str(effective_source_api_url), f"v0.1/requests/{_request_id}")
1381
+
1382
+ # Get the request details
1383
+ response = session.get(get_url)
1384
+
1385
+ if response.status_code != 200:
1386
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1387
+
1388
+ request = response.json()
1389
+
1390
+ # Transform to a request
1391
+ request['environments'] = request['environments_requested']
1392
+
1393
+ # Remove all keys except test, environments and settings
1394
+ for key in list(request):
1395
+ if key not in ['test', 'environments', 'settings']:
1396
+ del request[key]
1397
+
1398
+ test = request['test']
1399
+
1400
+ # Remove all empty keys in test
1401
+ for key in list(test):
1402
+ for subkey in list(test[key] or []):
1403
+ if not test[key][subkey]:
1404
+ del test[key][subkey]
1405
+ if not test[key]:
1406
+ del test[key]
1407
+
1408
+ # add test type
1409
+ test = request['test'][list(request['test'].keys())[0]]
1410
+
1411
+ if git_url:
1412
+ test["url"] = git_url
1413
+
1414
+ if git_ref:
1415
+ test["ref"] = git_ref
1416
+
1417
+ if tmt_test_name:
1418
+ test["test_name"] = tmt_test_name
1419
+
1420
+ if tmt_test_filter:
1421
+ test["test_filter"] = tmt_test_filter
1422
+
1423
+ merge_sha_info = ""
1424
+ if git_merge_sha:
1425
+ test["merge_sha"] = git_merge_sha
1426
+ merge_sha_info = f"merge_sha [blue]{git_merge_sha}[/blue]"
1427
+
1428
+ console.print(f"📦 repository [blue]{test['url']}[/blue] ref [blue]{test['ref']}[/blue] {merge_sha_info}")
1429
+
1430
+ # Set compose
1431
+ if compose:
1432
+ console.print(f"💻 forcing compose [blue]{compose}[/blue]")
1433
+ for environment in request['environments']:
1434
+ if environment.get("os") is None:
1435
+ environment["os"] = {}
1436
+ environment["os"]["compose"] = compose
1437
+
1438
+ if hardware:
1439
+ console.print(f"💻 forcing hardware [blue]{' '.join(hardware)}[/blue]")
1440
+ for environment in request['environments']:
1441
+ environment["hardware"] = hw_constraints(hardware)
1442
+
1443
+ if pool:
1444
+ console.print(f"💻 forcing pool [blue]{pool}[/blue]")
1445
+ for environment in request['environments']:
1446
+ environment["pool"] = pool
1447
+
1448
+ if tmt_discover or tmt_prepare or tmt_finish:
1449
+ for environment in request["environments"]:
1450
+ if "tmt" not in environment:
1451
+ environment["tmt"] = {"extra_args": {}}
1452
+ if "extra_args" not in environment["tmt"]:
1453
+ environment["tmt"]["extra_args"] = {}
1454
+
1455
+ if tmt_discover:
1456
+ for environment in request["environments"]:
1457
+ environment["tmt"]["extra_args"]["discover"] = tmt_discover
1458
+
1459
+ if tmt_prepare:
1460
+ for environment in request["environments"]:
1461
+ environment["tmt"]["extra_args"]["prepare"] = tmt_prepare
1462
+
1463
+ if tmt_finish:
1464
+ for environment in request["environments"]:
1465
+ environment["tmt"]["extra_args"]["finish"] = tmt_finish
1466
+
1467
+ if cli_tmt_context:
1468
+ for environment in request["environments"]:
1469
+ environment["tmt"]["context"] = options_to_dict("tmt context", cli_tmt_context)
1470
+
1471
+ if variables:
1472
+ for environment in request["environments"]:
1473
+ environment["variables"] = options_to_dict("environment variables", variables)
1474
+
1475
+ test_type = "fmf" if "fmf" in request["test"] else "sti"
1476
+
1477
+ if tmt_plan_name:
1478
+ if test_type == "sti":
1479
+ exit_error("The '--plan' option is compabitble only with 'tmt` tests.")
1480
+ request["test"][test_type]["name"] = tmt_plan_name
1481
+
1482
+ if tmt_plan_filter:
1483
+ if test_type == "sti":
1484
+ exit_error("The '--plan-filter' option is compabitble only with 'tmt` tests.")
1485
+ request["test"][test_type]["plan_filter"] = tmt_plan_filter
1486
+
1487
+ if test_type == "fmf":
1488
+ # The method explained in https://github.com/fastapi/typer/discussions/668
1489
+ if context.get_parameter_source("tmt_path") == ParameterSource.COMMANDLINE:
1490
+ request["test"][test_type]["path"] = tmt_path
1491
+
1492
+ # worker image
1493
+ if worker_image:
1494
+ console.print(f"👷 Forcing worker image [blue]{worker_image}[/blue]")
1495
+ request["settings"] = request["settings"] if request.get("settings") else {}
1496
+ request["settings"]["worker"] = {"image": worker_image}
1497
+ # it is required to have also pipeline key set, otherwise API will fail
1498
+ request["settings"]["pipeline"] = request["settings"].get("pipeline", {})
1499
+
1500
+ if pipeline_type or parallel_limit:
1501
+ if "settings" not in request:
1502
+ request["settings"] = {}
1503
+ if "pipeline" not in request["settings"]:
1504
+ request["settings"]["pipeline"] = {}
1505
+
1506
+ if pipeline_type:
1507
+ request["settings"]["pipeline"]["type"] = pipeline_type.value
1508
+
1509
+ if parallel_limit:
1510
+ request["settings"]["pipeline"]["parallel-limit"] = parallel_limit
1511
+
1512
+ if tags:
1513
+ for environment in request["environments"]:
1514
+ if "settings" not in environment or not environment["settings"]:
1515
+ environment["settings"] = {}
1516
+
1517
+ if 'provisioning' not in environment["settings"]:
1518
+ environment["settings"]["provisioning"] = {}
1519
+
1520
+ environment["settings"]["provisioning"]["tags"] = options_to_dict("tags", tags)
1521
+
1522
+ if reserve:
1523
+ if not _contains_compose(request["environments"]):
1524
+ exit_error("Reservations are not supported with container executions, cannot continue")
1525
+
1526
+ if len(request["environments"]) > 1:
1527
+ exit_error("Reservations are currently supported for a single plan, cannot continue")
1528
+
1529
+ # support cases where the user has multiple localhost addresses
1530
+ rules = _parse_security_group_rules(
1531
+ list({_localhost_ingress_rule(requests.Session()) for _ in range(0, settings.PUBLIC_IP_RESOLVE_TRIES)}), []
1532
+ )
1533
+
1534
+ for environment in request["environments"]:
1535
+ _add_reservation(
1536
+ ssh_public_keys=ssh_public_keys,
1537
+ rules=rules,
1538
+ duration=reservation_duration,
1539
+ environment=environment,
1540
+ debug_reservation=debug_reservation,
1541
+ )
1542
+
1543
+ machine_pre = (
1544
+ "Machine" if len(request["environments"]) == 1 else str(len(request["environments"])) + " machines"
1545
+ )
1546
+ console.print(
1547
+ f"🕗 {machine_pre} will be reserved after testing for [blue]{str(reservation_duration)}[/blue] minutes"
1548
+ )
1549
+
1550
+ # dry run
1551
+ if dry_run:
1552
+ console.print("🔍 Dry run, showing POST json only", style="bright_yellow")
1553
+ print(json.dumps(request, indent=4, separators=(',', ': ')))
1554
+ raise typer.Exit()
1555
+
1556
+ # submit request to Testing Farm
1557
+ post_url = urllib.parse.urljoin(str(effective_target_api_url), "v0.1/requests")
1558
+
1559
+ # handle errors
1560
+ response = session.post(post_url, json=request, headers=authorization_headers(effective_target_api_token))
1561
+ if response.status_code == 401:
1562
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1563
+
1564
+ if response.status_code == 400:
1565
+ exit_error(
1566
+ f"Request is invalid. {response.json().get('message') or 'Reason unknown.'}."
1567
+ f"\nPlease file an issue to {settings.ISSUE_TRACKER} if unsure."
1568
+ )
1569
+
1570
+ if response.status_code != 200:
1571
+ print(response.text)
1572
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1573
+
1574
+ # watch
1575
+ watch(
1576
+ context,
1577
+ str(effective_target_api_url),
1578
+ response.json()['id'],
1579
+ no_wait,
1580
+ reserve=reserve,
1581
+ autoconnect=autoconnect,
1582
+ format=WatchFormat.text,
1583
+ )
1584
+
1585
+
1586
+ def run(
1587
+ context: typer.Context,
1588
+ arch: str = typer.Option("x86_64", "--arch", help="Hardware platform of the target machine."),
1589
+ compose: Optional[str] = typer.Option(
1590
+ None,
1591
+ help="Compose used to provision the target machine. If not set, script will be executed aginst `fedora:latest` container.", # noqa
1592
+ ),
1593
+ pool: Optional[str] = OPTION_POOL,
1594
+ hardware: List[str] = OPTION_HARDWARE,
1595
+ variables: Optional[List[str]] = OPTION_VARIABLES,
1596
+ secrets: Optional[List[str]] = OPTION_SECRETS,
1597
+ dry_run: bool = OPTION_DRY_RUN,
1598
+ verbose: bool = typer.Option(False, help="Be verbose."),
1599
+ # NOTE: we cannot use ARGUMENT_API_* because it would collide with command,
1600
+ # so use rather OPTION variants for this command
1601
+ api_url: str = OPTION_API_URL,
1602
+ api_token: str = OPTION_API_TOKEN,
1603
+ command: List[str] = typer.Argument(..., help="Command to run. Use `--` to separate COMMAND from CLI options."),
1604
+ ):
1605
+ """
1606
+ Run an arbitrary script via Testing Farm.
1607
+ """
1608
+
1609
+ # check for token
1610
+ if not api_token:
1611
+ exit_error("No API token found, export `TESTING_FARM_API_TOKEN` environment variable.")
1612
+
1613
+ # create request
1614
+ request = TestingFarmRequestV1
1615
+
1616
+ test = TestTMT
1617
+ test["url"] = RUN_REPO
1618
+ test["ref"] = "main"
1619
+ test["name"] = "/testing-farm/sanity"
1620
+ request["test"]["fmf"] = test
1621
+
1622
+ environment = Environment.copy()
1623
+
1624
+ environment["arch"] = arch
1625
+ environment["pool"] = pool
1626
+
1627
+ if compose:
1628
+ environment["os"] = {"compose": compose}
1629
+
1630
+ if secrets:
1631
+ environment["secrets"] = options_to_dict("environment secrets", secrets)
1632
+
1633
+ if variables:
1634
+ environment["variables"] = options_to_dict("environment variables", variables)
1635
+
1636
+ if hardware:
1637
+ environment["hardware"] = hw_constraints(hardware)
1638
+
1639
+ environment["variables"]["SCRIPT"] = " ".join(command)
1640
+
1641
+ request["environments"] = [environment]
1642
+
1643
+ # submit request to Testing Farm
1644
+ post_url = urllib.parse.urljoin(api_url, "v0.1/requests")
1645
+
1646
+ # Setting up retries
1647
+ session = requests.Session()
1648
+ install_http_retries(session)
1649
+
1650
+ # dry run
1651
+ if dry_run or verbose:
1652
+ console.print("[blue]🔍 showing POST json[/blue]")
1653
+ print(json.dumps(request, indent=4, separators=(',', ': ')))
1654
+ if dry_run:
1655
+ raise typer.Exit()
1656
+
1657
+ # handle errors
1658
+ response = session.post(post_url, json=request, headers=authorization_headers(api_token))
1659
+ if response.status_code == 401:
1660
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1661
+
1662
+ if response.status_code == 400:
1663
+ exit_error(f"Request is invalid. Please file an issue to {settings.ISSUE_TRACKER}")
1664
+
1665
+ if response.status_code != 200:
1666
+ print(response.text)
1667
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1668
+
1669
+ id = response.json()['id']
1670
+ get_url = urllib.parse.urljoin(api_url, f"/v0.1/requests/{id}")
1671
+
1672
+ if verbose:
1673
+ console.print(f"🔎 api [blue]{get_url}[/blue]")
1674
+
1675
+ search: Optional[re.Match[str]] = None
1676
+
1677
+ # wait for the sanity test to finish
1678
+ with Progress(
1679
+ SpinnerColumn(),
1680
+ TextColumn("[progress.description]{task.description}"),
1681
+ transient=True,
1682
+ ) as progress:
1683
+ progress.add_task(description="Preparing execution environment", total=None)
1684
+
1685
+ current_state: str = ""
1686
+
1687
+ while True:
1688
+ try:
1689
+ response = session.get(get_url)
1690
+
1691
+ except requests.exceptions.ConnectionError as exc:
1692
+ exit_error(f"connection to API failed: {str(exc)}")
1693
+
1694
+ if response.status_code != 200:
1695
+ exit_error(f"Failed to get request: {response.text}")
1696
+
1697
+ request = response.json()
1698
+
1699
+ state = request["state"]
1700
+
1701
+ if state == current_state:
1702
+ time.sleep(settings.WATCH_TICK)
1703
+ continue
1704
+
1705
+ current_state = state
1706
+
1707
+ if state in ["complete", "error"]:
1708
+ break
1709
+
1710
+ if state in ["canceled", "cancel-requested"]:
1711
+ progress.stop()
1712
+ exit_error("Request canceled.")
1713
+
1714
+ time.sleep(settings.WATCH_TICK)
1715
+
1716
+ # workaround TFT-1690
1717
+ install_http_retries(session, status_forcelist_extend=[404], timeout=60, retry_backoff_factor=0.1)
1718
+
1719
+ # get the command output
1720
+ artifacts_url = response.json()['run']['artifacts']
1721
+
1722
+ if verbose:
1723
+ console.print(f"\r🚢 artifacts [blue]{artifacts_url}[/blue]")
1724
+
1725
+ try:
1726
+ search = re.search(r'href="(.*)" name="workdir"', session.get(f"{artifacts_url}/results.xml").text)
1727
+
1728
+ except requests.exceptions.SSLError:
1729
+ console.print(
1730
+ "\r🚫 [yellow]artifacts unreachable via SSL, do you have RH CA certificates installed?[/yellow]"
1731
+ )
1732
+ console.print(f"\r🚢 artifacts [blue]{artifacts_url}[/blue]")
1733
+
1734
+ except requests.exceptions.ConnectionError:
1735
+ console.print("\r🚫 [yellow]artifacts unreachable, are you on VPN?[/yellow]")
1736
+ console.print(f"\r🚢 artifacts [blue]{artifacts_url}[/blue]")
1737
+ return
1738
+
1739
+ if not search:
1740
+ exit_error("Could not find working directory, cannot continue")
1741
+
1742
+ workdir = str(search.groups(1)[0])
1743
+ output = f"{workdir}/testing-farm/sanity/execute/data/guest/default-0/testing-farm/script-1/output.txt"
1744
+
1745
+ if verbose:
1746
+ console.print(f"\r👷 workdir [blue]{workdir}[/blue]")
1747
+ console.print(f"\r📤 output [blue]{output}[/blue]")
1748
+
1749
+ response = session.get(output)
1750
+ console.print(response.text, end="")
1751
+
1752
+
1753
+ def reserve(
1754
+ context: typer.Context,
1755
+ api_url: str = ARGUMENT_API_URL,
1756
+ api_token: str = ARGUMENT_API_TOKEN,
1757
+ ssh_public_keys: List[str] = _option_ssh_public_keys(RESERVE_PANEL_GENERAL),
1758
+ reservation_duration: int = _option_reservation_duration(RESERVE_PANEL_GENERAL),
1759
+ arch: str = typer.Option(
1760
+ "x86_64", help="Hardware platform of the system to be provisioned.", rich_help_panel=RESERVE_PANEL_ENVIRONMENT
1761
+ ),
1762
+ compose: str = typer.Option(
1763
+ "Fedora-Rawhide",
1764
+ help="Compose used to provision system-under-test. By default Fedora-Rawhide.", # noqa
1765
+ rich_help_panel=RESERVE_PANEL_ENVIRONMENT,
1766
+ ),
1767
+ hardware: List[str] = OPTION_HARDWARE,
1768
+ tags: Optional[List[str]] = OPTION_TAGS,
1769
+ kickstart: Optional[List[str]] = OPTION_KICKSTART,
1770
+ pool: Optional[str] = OPTION_POOL,
1771
+ fedora_koji_build: List[str] = OPTION_FEDORA_KOJI_BUILD,
1772
+ fedora_copr_build: List[str] = OPTION_FEDORA_COPR_BUILD,
1773
+ repository: List[str] = OPTION_REPOSITORY,
1774
+ repository_file: List[str] = OPTION_REPOSITORY_FILE,
1775
+ redhat_brew_build: List[str] = OPTION_REDHAT_BREW_BUILD,
1776
+ tmt_discover: Optional[List[str]] = _generate_tmt_extra_args("discover"),
1777
+ tmt_prepare: Optional[List[str]] = _generate_tmt_extra_args("prepare"),
1778
+ tmt_finish: Optional[List[str]] = _generate_tmt_extra_args("finish"),
1779
+ dry_run: bool = OPTION_DRY_RUN,
1780
+ post_install_script: Optional[str] = OPTION_POST_INSTALL_SCRIPT,
1781
+ print_only_request_id: bool = typer.Option(
1782
+ False,
1783
+ help="Output only the request ID.",
1784
+ rich_help_panel=RESERVE_PANEL_OUTPUT,
1785
+ ),
1786
+ autoconnect: bool = _option_autoconnect(RESERVE_PANEL_GENERAL),
1787
+ worker_image: Optional[str] = OPTION_WORKER_IMAGE,
1788
+ security_group_rule_ingress: Optional[List[str]] = OPTION_SECURITY_GROUP_RULE_INGRESS,
1789
+ security_group_rule_egress: Optional[List[str]] = OPTION_SECURITY_GROUP_RULE_EGRESS,
1790
+ skip_workstation_access: bool = typer.Option(
1791
+ False, help="Do not allow ingress traffic from this workstation's ip to the reserved machine"
1792
+ ),
1793
+ git_ref: Optional[str] = typer.Option(
1794
+ None, help="Force GIT ref or branch. Useful for testing changes to reservation plan."
1795
+ ),
1796
+ debug_reservation: bool = _option_debug_reservation(),
1797
+ ):
1798
+ """
1799
+ Reserve a system in Testing Farm.
1800
+ """
1801
+
1802
+ def _echo(message: str) -> None:
1803
+ if not print_only_request_id:
1804
+ console.print(message)
1805
+
1806
+ _sanity_reserve()
1807
+
1808
+ # Accept these arguments only via environment variables
1809
+ check_unexpected_arguments(context, "api_url", "api_token")
1810
+
1811
+ # check for token
1812
+ if not settings.API_TOKEN:
1813
+ exit_error("No API token found, export `TESTING_FARM_API_TOKEN` environment variable.")
1814
+
1815
+ pool_info = f"via pool [blue]{pool}[/blue]" if pool else ""
1816
+ console.print(f"💻 [blue]{compose}[/blue] on [blue]{arch}[/blue] {pool_info}")
1817
+
1818
+ # test details
1819
+ test = TestTMT
1820
+ test["url"] = RESERVE_URL
1821
+ test["ref"] = git_ref or RESERVE_REF
1822
+ test["name"] = RESERVE_PLAN
1823
+
1824
+ # environment details
1825
+ environment = Environment.copy()
1826
+ environment["arch"] = arch
1827
+ environment["pool"] = pool
1828
+ environment["artifacts"] = []
1829
+
1830
+ if "settings" not in environment:
1831
+ environment["settings"] = {}
1832
+
1833
+ if post_install_script or security_group_rule_ingress or security_group_rule_egress or tags:
1834
+ if "settings" not in environment:
1835
+ environment["settings"] = {}
1836
+
1837
+ if "provisioning" not in environment["settings"]:
1838
+ environment["settings"]["provisioning"] = {}
1839
+
1840
+ if "tags" not in environment["settings"]["provisioning"]:
1841
+ environment["settings"]["provisioning"]["tags"] = {}
1842
+
1843
+ # reserve command is for interacting with the guest, and so non-spot instances
1844
+ # would be nicer for the user than them getting shocked when they loose their work.
1845
+ environment["settings"]["provisioning"]["tags"]["ArtemisUseSpot"] = "false"
1846
+
1847
+ if compose:
1848
+ environment["os"] = {"compose": compose}
1849
+
1850
+ if hardware:
1851
+ environment["hardware"] = hw_constraints(hardware)
1852
+
1853
+ if tags:
1854
+ environment["settings"]["provisioning"]["tags"] = options_to_dict("tags", tags)
1855
+
1856
+ if kickstart:
1857
+ # Typer escapes newlines in options, we need to unescape them
1858
+ kickstart = [codecs.decode(value, 'unicode_escape') for value in kickstart]
1859
+ environment["kickstart"] = options_to_dict("environment kickstart", kickstart)
1860
+
1861
+ if redhat_brew_build:
1862
+ environment["artifacts"].extend(artifacts("redhat-brew-build", redhat_brew_build))
1863
+
1864
+ if fedora_koji_build:
1865
+ environment["artifacts"].extend(artifacts("fedora-koji-build", fedora_koji_build))
1866
+
1867
+ if fedora_copr_build:
1868
+ environment["artifacts"].extend(artifacts("fedora-copr-build", fedora_copr_build))
1869
+
1870
+ if repository:
1871
+ environment["artifacts"].extend(artifacts("repository", repository))
1872
+
1873
+ if repository_file:
1874
+ environment["artifacts"].extend(artifacts("repository-file", repository_file))
1875
+
1876
+ if post_install_script:
1877
+ environment["settings"]["provisioning"]["post_install_script"] = post_install_script
1878
+
1879
+ if tmt_discover or tmt_prepare or tmt_finish:
1880
+ environment["tmt"] = {"extra_args": {}}
1881
+
1882
+ if tmt_discover:
1883
+ environment["tmt"]["extra_args"]["discover"] = tmt_discover
1884
+
1885
+ if tmt_prepare:
1886
+ environment["tmt"]["extra_args"]["prepare"] = tmt_prepare
1887
+
1888
+ if tmt_finish:
1889
+ environment["tmt"]["extra_args"]["finish"] = tmt_finish
1890
+
1891
+ # Setting up retries
1892
+ session = requests.Session()
1893
+ install_http_retries(session)
1894
+
1895
+ if not skip_workstation_access or security_group_rule_ingress or security_group_rule_egress:
1896
+ ingress_rules = security_group_rule_ingress or []
1897
+ if not skip_workstation_access:
1898
+ # support cases where the user has multiple localhost addresses
1899
+ ingress_rules.extend(
1900
+ {_localhost_ingress_rule(requests.Session()) for _ in range(0, settings.PUBLIC_IP_RESOLVE_TRIES)}
1901
+ )
1902
+
1903
+ rules = _parse_security_group_rules(ingress_rules, security_group_rule_egress or [])
1904
+ environment["settings"]["provisioning"].update(rules)
1905
+
1906
+ console.print(f"🕗 Reserved for [blue]{str(reservation_duration)}[/blue] minutes")
1907
+
1908
+ if "variables" not in environment or environment["variables"] is None:
1909
+ environment["variables"] = {}
1910
+
1911
+ environment["variables"]["TF_RESERVATION_DURATION"] = str(reservation_duration)
1912
+
1913
+ if debug_reservation:
1914
+ environment["variables"]["TF_RESERVATION_DEBUG"] = "1"
1915
+
1916
+ authorized_keys = read_glob_paths(ssh_public_keys).encode("utf-8")
1917
+ if not authorized_keys:
1918
+ exit_error(f"No public SSH keys found under {', '.join(ssh_public_keys)}, cannot continue.")
1919
+
1920
+ authorized_keys_bytes = base64.b64encode(authorized_keys)
1921
+ environment["secrets"] = {"TF_RESERVATION_AUTHORIZED_KEYS_BASE64": authorized_keys_bytes.decode("utf-8")}
1922
+
1923
+ # create final request
1924
+ request = TestingFarmRequestV1
1925
+ request["test"]["fmf"] = test
1926
+
1927
+ # worker image
1928
+ if worker_image:
1929
+ console.print(f"👷 Forcing worker image [blue]{worker_image}[/blue]")
1930
+ request["settings"] = request["settings"] if request.get("settings") else {}
1931
+ request["settings"]["worker"] = {"image": worker_image}
1932
+
1933
+ request["environments"] = [environment]
1934
+
1935
+ # in case the reservation duration is more than the pipeline timeout, adjust also the pipeline timeout
1936
+ if reservation_duration > DEFAULT_PIPELINE_TIMEOUT:
1937
+ request["settings"] = {"pipeline": {"timeout": reservation_duration}}
1938
+ console.print(f"⏳ Maximum reservation time is {reservation_duration} minutes")
1939
+ else:
1940
+ console.print(f"⏳ Maximum reservation time is {DEFAULT_PIPELINE_TIMEOUT} minutes")
1941
+
1942
+ # submit request to Testing Farm
1943
+ post_url = urllib.parse.urljoin(api_url, "v0.1/requests")
1944
+
1945
+ # dry run
1946
+ if dry_run:
1947
+ if print_only_request_id:
1948
+ console.print("🔍 Dry run, print-only-request-id is set. Nothing will be shown", style="bright_yellow")
1949
+ else:
1950
+ console.print("🔍 Dry run, showing POST json only", style="bright_yellow")
1951
+ print(json.dumps(request, indent=4, separators=(',', ': ')))
1952
+ raise typer.Exit()
1953
+
1954
+ # handle errors
1955
+ response = session.post(post_url, json=request, headers=authorization_headers(api_token))
1956
+ if response.status_code == 401:
1957
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
1958
+
1959
+ if response.status_code == 400:
1960
+ exit_error(
1961
+ f"Request is invalid. {response.json().get('message') or 'Reason unknown.'}."
1962
+ f"\nPlease file an issue to {settings.ISSUE_TRACKER} if unsure."
1963
+ )
1964
+
1965
+ if response.status_code != 200:
1966
+ print(response.text)
1967
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
1968
+
1969
+ id = response.json()['id']
1970
+ get_url = urllib.parse.urljoin(api_url, f"/v0.1/requests/{id}")
1971
+
1972
+ if not print_only_request_id:
1973
+ console.print(f"🔎 [blue]{get_url}[/blue]")
1974
+ else:
1975
+ console.print(id)
1976
+
1977
+ # IP address or hostname of the guest, extracted from pipeline.log
1978
+ guest: str = ""
1979
+
1980
+ # wait for the reserve task to reserve the machine
1981
+ with Progress(
1982
+ SpinnerColumn(),
1983
+ TextColumn("[progress.description]{task.description}"),
1984
+ transient=True,
1985
+ ) as progress:
1986
+ task_id = None
1987
+
1988
+ if not print_only_request_id:
1989
+ task_id = progress.add_task(description="Creating reservation", total=None)
1990
+
1991
+ current_state: str = ""
1992
+
1993
+ while current_state != "running":
1994
+ try:
1995
+ response = session.get(get_url)
1996
+
1997
+ except requests.exceptions.ConnectionError as exc:
1998
+ exit_error(f"connection to API failed: {str(exc)}")
1999
+
2000
+ if response.status_code != 200:
2001
+ exit_error(f"Failed to get request: {response.text}")
2002
+
2003
+ request = response.json()
2004
+
2005
+ state = request["state"]
2006
+
2007
+ if state == current_state:
2008
+ time.sleep(settings.WATCH_TICK)
2009
+ continue
2010
+
2011
+ current_state = state
2012
+
2013
+ if state in ["complete", "error"]:
2014
+ exit_error("Reservation failed, check the API request or contact Testing Farm.")
2015
+
2016
+ if state in ["canceled", "cancel-requested"]:
2017
+ progress.stop()
2018
+ exit_error("Reservation canceled.")
2019
+
2020
+ if not print_only_request_id and task_id is not None:
2021
+ progress.update(task_id, description=f"Reservation job is [yellow]{current_state}[/yellow]")
2022
+
2023
+ time.sleep(settings.WATCH_TICK)
2024
+
2025
+ while current_state != "ready":
2026
+ if not print_only_request_id and task_id:
2027
+ progress.update(task_id, description=f"Reservation job is [yellow]{current_state}[/yellow]")
2028
+
2029
+ # get the command output
2030
+ artifacts_url = response.json()['run']['artifacts']
2031
+
2032
+ try:
2033
+ pipeline_log = session.get(f"{artifacts_url}/pipeline.log").text
2034
+
2035
+ if not pipeline_log:
2036
+ exit_error(f"Pipeline log was empty. Please file an issue to {settings.ISSUE_TRACKER}.")
2037
+
2038
+ except requests.exceptions.SSLError:
2039
+ exit_error(
2040
+ textwrap.dedent(
2041
+ f"""
2042
+ Failed to access Testing Farm artifacts because of SSL validation error.
2043
+ If you use Red Hat Ranch please make sure you have Red Hat CA certificates installed.
2044
+ Otherwise file an issue to {settings.ISSUE_TRACKER}.
2045
+ """
2046
+ )
2047
+ )
2048
+ return
2049
+
2050
+ except requests.exceptions.ConnectionError:
2051
+ exit_error(
2052
+ textwrap.dedent(
2053
+ f"""
2054
+ Failed to access Testing Farm artifacts.
2055
+ If you use Red Hat Ranch please make sure you are connected to the VPN.
2056
+ Otherwise file an issue to {settings.ISSUE_TRACKER}.
2057
+ """
2058
+ )
2059
+ )
2060
+ return
2061
+
2062
+ if 'Result of testing: ERROR' in pipeline_log:
2063
+ exit_error(
2064
+ textwrap.dedent(
2065
+ f"""
2066
+ Failed to run reservation task.
2067
+ Check status page {settings.STATUS_PAGE} for outages.
2068
+ File an issue to {settings.ISSUE_TRACKER} if needed.
2069
+ """
2070
+ )
2071
+ )
2072
+
2073
+ if '[testing-farm-request] Cancelling pipeline' in pipeline_log:
2074
+ progress.stop()
2075
+ exit_error('Pipeline was canceled.')
2076
+
2077
+ if '[pre-artifact-installation]' in pipeline_log:
2078
+ current_state = "preparing environment"
2079
+
2080
+ elif 'Guest is being provisioned' in pipeline_log:
2081
+ current_state = "provisioning resources"
2082
+
2083
+ # match any hostname or IP address from gluetool modules log
2084
+ search = re.search(r'Guest is ready.*root@([\d\w\.-]+)', pipeline_log)
2085
+
2086
+ if search and 'execute task #1' in pipeline_log:
2087
+ current_state = "ready"
2088
+ guest = search.group(1)
2089
+
2090
+ time.sleep(settings.WATCH_TICK)
2091
+
2092
+ console.print(f"🌎 ssh root@{guest}")
2093
+
2094
+ if autoconnect:
2095
+ os.system(f"{SSH_RESERVATION_OPTIONS} root@{guest}") # noqa: E501
2096
+
2097
+
2098
+ def update():
2099
+ """
2100
+ Update the CLI tool container image.
2101
+ """
2102
+ # NOTE: This command is handled by the shell wrapper, see `container/testing-farm` file
2103
+ pass
2104
+
2105
+
2106
+ def cancel(
2107
+ context: typer.Context,
2108
+ request_id: str = typer.Argument(
2109
+ ..., help="Testing Farm request to cancel. Specified by a request ID or a string containing it."
2110
+ ),
2111
+ api_url: str = ARGUMENT_API_URL,
2112
+ api_token: str = ARGUMENT_API_TOKEN,
2113
+ ):
2114
+ """
2115
+ Cancel a Testing Farm request.
2116
+ """
2117
+
2118
+ # Accept these arguments only via environment variables
2119
+ check_unexpected_arguments(context, "api_url", "api_token")
2120
+
2121
+ # Extract the UUID from the request_id string
2122
+ _request_id = extract_uuid(request_id)
2123
+
2124
+ if not api_token:
2125
+ exit_error("No API token found in the environment, please export 'TESTING_FARM_API_TOKEN' variable.")
2126
+ return
2127
+
2128
+ # Construct URL to the internal API
2129
+ request_url = urllib.parse.urljoin(str(api_url), f"v0.1/requests/{_request_id}")
2130
+
2131
+ # Setting up retries
2132
+ session = requests.Session()
2133
+ install_http_retries(session)
2134
+
2135
+ # Get the request details
2136
+ response = session.delete(request_url, headers=authorization_headers(api_token))
2137
+
2138
+ if response.status_code == 401:
2139
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
2140
+
2141
+ if response.status_code == 403:
2142
+ exit_error(
2143
+ "You cannot cancel foreign requests. You can only cancel your own requests "
2144
+ "or must have 'admin' permissions."
2145
+ )
2146
+
2147
+ if response.status_code == 404:
2148
+ exit_error("Request was not found. Verify the request ID is correct.")
2149
+
2150
+ if response.status_code == 204:
2151
+ exit_error("Request was already canceled.")
2152
+
2153
+ if response.status_code == 409:
2154
+ exit_error("Requeted cannot be canceled, it is already finished.")
2155
+
2156
+ if response.status_code != 200:
2157
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
2158
+
2159
+ console.print("✅ Request [yellow]cancellation requested[/yellow]. It will be canceled soon.")
2160
+
2161
+
2162
+ def encrypt(
2163
+ context: typer.Context,
2164
+ message: str = typer.Argument(..., help="Message to be encrypted."),
2165
+ api_url: str = ARGUMENT_API_URL,
2166
+ api_token: str = ARGUMENT_API_TOKEN,
2167
+ git_url: Optional[str] = typer.Option(
2168
+ None,
2169
+ help="URL of a GIT repository to which the secret will be tied. If not set, it is detected from the current "
2170
+ "git repository.",
2171
+ ),
2172
+ token_id: Optional[str] = typer.Option(
2173
+ None,
2174
+ help="Token ID to which the secret will be tied. If not set, Token ID will be detected from provided Token.",
2175
+ ),
2176
+ ):
2177
+ """
2178
+ Create secrets for use in in-repository configuration.
2179
+ """
2180
+
2181
+ # Accept these arguments only via environment variables
2182
+ check_unexpected_arguments(context, "api_url", "api_token")
2183
+
2184
+ # check for token
2185
+ if not api_token:
2186
+ exit_error("No API token found, export `TESTING_FARM_API_TOKEN` environment variable")
2187
+
2188
+ git_available = bool(shutil.which("git"))
2189
+
2190
+ # resolve git repository details from the current repository
2191
+ if not git_url:
2192
+ if not git_available:
2193
+ exit_error("no git url defined")
2194
+ git_url = cmd_output_or_exit("git remote get-url origin", "could not auto-detect git url")
2195
+ # use https instead git when auto-detected
2196
+ # GitLab: git@github.com:containers/podman.git
2197
+ # GitHub: git@gitlab.com:testing-farm/cli.git, git+ssh://git@gitlab.com/spoore/centos_rpms_jq.git
2198
+ # Pagure: ssh://git@pagure.io/fedora-ci/messages.git
2199
+ assert git_url
2200
+ git_url = re.sub(r"^(?:(?:git\+)?ssh://)?git@([^:/]*)[:/](.*)", r"https://\1/\2", git_url)
2201
+
2202
+ payload = {'url': git_url, 'message': message}
2203
+
2204
+ if token_id:
2205
+ payload['token_id'] = token_id
2206
+ console_stderr.print(f'🔒 Encrypting secret for token id {token_id} for repository {git_url}')
2207
+ else:
2208
+ console_stderr.print(f'🔒 Encrypting secret for your token in repo {git_url}')
2209
+
2210
+ # submit request to Testing Farm
2211
+ post_url = urllib.parse.urljoin(api_url, "/v0.1/secrets/encrypt")
2212
+
2213
+ session = requests.Session()
2214
+ response = session.post(post_url, json=payload, headers={'Authorization': f'Bearer {api_token}'})
2215
+
2216
+ # handle errors
2217
+ if response.status_code == 401:
2218
+ exit_error(f"API token is invalid. See {settings.ONBOARDING_DOCS} for more information.")
2219
+
2220
+ if response.status_code == 400:
2221
+ exit_error(
2222
+ f"Request is invalid. {response.json().get('message') or 'Reason unknown.'}."
2223
+ f"\nPlease file an issue to {settings.ISSUE_TRACKER} if unsure."
2224
+ )
2225
+
2226
+ if response.status_code != 200:
2227
+ console_stderr.print(response.text)
2228
+ exit_error(f"Unexpected error. Please file an issue to {settings.ISSUE_TRACKER}.")
2229
+
2230
+ console_stderr.print(
2231
+ "💡 See https://docs.testing-farm.io/Testing%20Farm/0.1/test-request.html#secrets-in-repo-config for more "
2232
+ "information on how to store the secret in repository."
2233
+ )
2234
+ console.print(response.text)