qontract-reconcile 0.10.2.dev14__py3-none-any.whl → 0.10.2.dev16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qontract_reconcile-0.10.2.dev14.dist-info → qontract_reconcile-0.10.2.dev16.dist-info}/METADATA +1 -1
- {qontract_reconcile-0.10.2.dev14.dist-info → qontract_reconcile-0.10.2.dev16.dist-info}/RECORD +135 -135
- reconcile/acs_rbac.py +2 -4
- reconcile/aus/base.py +13 -13
- reconcile/aws_ami_share.py +1 -2
- reconcile/aws_cloudwatch_log_retention/integration.py +1 -1
- reconcile/aws_saml_idp/integration.py +1 -1
- reconcile/aws_saml_roles/integration.py +1 -1
- reconcile/aws_version_sync/integration.py +3 -3
- reconcile/change_owners/change_owners.py +8 -5
- reconcile/change_owners/change_types.py +18 -18
- reconcile/change_owners/changes.py +8 -9
- reconcile/change_owners/decision.py +12 -15
- reconcile/change_owners/self_service_roles.py +6 -4
- reconcile/change_owners/tester.py +8 -10
- reconcile/cli.py +12 -14
- reconcile/closedbox_endpoint_monitoring_base.py +1 -1
- reconcile/cna/integration.py +2 -2
- reconcile/dashdotdb_base.py +2 -2
- reconcile/dashdotdb_cso.py +1 -1
- reconcile/dashdotdb_dora.py +6 -4
- reconcile/dashdotdb_slo.py +1 -1
- reconcile/database_access_manager.py +15 -19
- reconcile/email_sender.py +4 -8
- reconcile/endpoints_discovery/integration.py +137 -98
- reconcile/external_resources/secrets_sync.py +2 -2
- reconcile/external_resources/state.py +17 -17
- reconcile/gabi_authorized_users.py +3 -3
- reconcile/gcr_mirror.py +2 -2
- reconcile/github_org.py +9 -13
- reconcile/gitlab_housekeeping.py +1 -1
- reconcile/gitlab_owners.py +10 -12
- reconcile/gitlab_permissions.py +5 -4
- reconcile/glitchtip/integration.py +14 -14
- reconcile/glitchtip_project_alerts/integration.py +3 -4
- reconcile/gql_definitions/endpoints_discovery/{namespaces.py → apps.py} +22 -22
- reconcile/integrations_manager.py +1 -2
- reconcile/jenkins_job_builds_cleaner.py +7 -5
- reconcile/jenkins_roles.py +10 -6
- reconcile/jenkins_worker_fleets.py +5 -4
- reconcile/jira_permissions_validator.py +2 -6
- reconcile/ldap_groups/integration.py +3 -2
- reconcile/ocm_groups.py +5 -5
- reconcile/ocm_update_recommended_version.py +2 -2
- reconcile/openshift_base.py +15 -20
- reconcile/openshift_groups.py +9 -8
- reconcile/openshift_namespace_labels.py +3 -4
- reconcile/openshift_namespaces.py +1 -1
- reconcile/openshift_network_policies.py +1 -1
- reconcile/openshift_resources_base.py +4 -4
- reconcile/openshift_serviceaccount_tokens.py +1 -1
- reconcile/openshift_tekton_resources.py +1 -2
- reconcile/openshift_users.py +5 -4
- reconcile/prometheus_rules_tester/integration.py +8 -8
- reconcile/quay_mirror.py +3 -4
- reconcile/quay_mirror_org.py +1 -1
- reconcile/rhidp/ocm_oidc_idp/base.py +10 -15
- reconcile/run_integration.py +7 -7
- reconcile/saas_auto_promotions_manager/publisher.py +1 -1
- reconcile/saas_auto_promotions_manager/utils/saas_files_inventory.py +3 -9
- reconcile/service_dependencies.py +2 -7
- reconcile/skupper_network/reconciler.py +5 -5
- reconcile/skupper_network/site_controller.py +3 -3
- reconcile/sql_query.py +5 -5
- reconcile/status_board.py +24 -24
- reconcile/terraform_cloudflare_users.py +2 -2
- reconcile/terraform_repo.py +6 -6
- reconcile/terraform_users.py +8 -5
- reconcile/terraform_vpc_peerings.py +1 -1
- reconcile/terraform_vpc_resources/integration.py +1 -1
- reconcile/typed_queries/app_interface_deadmanssnitch_settings.py +1 -1
- reconcile/typed_queries/app_quay_repos_escalation_policies.py +1 -1
- reconcile/typed_queries/aws_vpc_requests.py +1 -1
- reconcile/typed_queries/aws_vpcs.py +1 -1
- reconcile/typed_queries/clusters.py +1 -1
- reconcile/typed_queries/clusters_minimal.py +1 -1
- reconcile/typed_queries/clusters_with_dms.py +1 -1
- reconcile/typed_queries/dynatrace_environments.py +1 -1
- reconcile/typed_queries/dynatrace_token_provider_token_specs.py +1 -1
- reconcile/typed_queries/reserved_networks.py +1 -1
- reconcile/typed_queries/saas_files.py +1 -1
- reconcile/typed_queries/slo_documents.py +1 -1
- reconcile/typed_queries/status_board.py +1 -2
- reconcile/utils/amtool.py +2 -2
- reconcile/utils/aws_api.py +10 -10
- reconcile/utils/aws_helper.py +1 -1
- reconcile/utils/binary.py +1 -2
- reconcile/utils/differ.py +4 -7
- reconcile/utils/dnsutils.py +4 -12
- reconcile/utils/external_resources.py +1 -2
- reconcile/utils/gitlab_api.py +2 -4
- reconcile/utils/glitchtip/models.py +1 -1
- reconcile/utils/helm.py +1 -1
- reconcile/utils/instrumented_wrappers.py +2 -2
- reconcile/utils/jjb_client.py +1 -1
- reconcile/utils/jump_host.py +1 -1
- reconcile/utils/metrics.py +6 -11
- reconcile/utils/mr/aws_access.py +1 -1
- reconcile/utils/mr/base.py +2 -4
- reconcile/utils/mr/notificator.py +1 -1
- reconcile/utils/mr/ocm_upgrade_scheduler_org_updates.py +1 -1
- reconcile/utils/oc.py +17 -31
- reconcile/utils/oc_map.py +1 -1
- reconcile/utils/ocm/base.py +4 -2
- reconcile/utils/ocm/search_filters.py +4 -3
- reconcile/utils/ocm/status_board.py +2 -2
- reconcile/utils/ocm/upgrades.py +4 -7
- reconcile/utils/ocm_base_client.py +1 -1
- reconcile/utils/openshift_resource.py +1 -1
- reconcile/utils/promtool.py +1 -1
- reconcile/utils/quay_api.py +1 -3
- reconcile/utils/raw_github_api.py +3 -10
- reconcile/utils/repo_owners.py +5 -5
- reconcile/utils/rest_api_base.py +1 -2
- reconcile/utils/rosa/rosa_cli.py +3 -3
- reconcile/utils/saasherder/saasherder.py +9 -15
- reconcile/utils/secret_reader.py +2 -2
- reconcile/utils/sharding.py +2 -2
- reconcile/utils/state.py +5 -5
- reconcile/utils/terraform_client.py +2 -2
- reconcile/utils/terrascript/cloudflare_resources.py +4 -6
- reconcile/utils/terrascript_aws_client.py +16 -28
- reconcile/utils/vault.py +2 -2
- reconcile/utils/vcs.py +8 -16
- reconcile/vault_replication.py +1 -8
- tools/app_interface_reporter.py +1 -1
- tools/cli_commands/container_images_report.py +1 -1
- tools/cli_commands/cost_report/view.py +4 -2
- tools/cli_commands/gpg_encrypt.py +1 -5
- tools/qontract_cli.py +14 -13
- tools/saas_metrics_exporter/commit_distance/channel.py +1 -1
- tools/saas_promotion_state/saas_promotion_state.py +1 -1
- tools/sd_app_sre_alert_report.py +3 -3
- {qontract_reconcile-0.10.2.dev14.dist-info → qontract_reconcile-0.10.2.dev16.dist-info}/WHEEL +0 -0
- {qontract_reconcile-0.10.2.dev14.dist-info → qontract_reconcile-0.10.2.dev16.dist-info}/entry_points.txt +0 -0
reconcile/utils/dnsutils.py
CHANGED
@@ -1,17 +1,9 @@
|
|
1
1
|
from dns import resolver
|
2
2
|
|
3
3
|
|
4
|
-
def get_nameservers(domain):
|
5
|
-
|
6
|
-
answers = resolver.query(domain, "NS")
|
7
|
-
for rdata in answers:
|
8
|
-
records.append(rdata.to_text())
|
9
|
-
return records
|
4
|
+
def get_nameservers(domain: str) -> list[str]:
|
5
|
+
return [rdata.to_text() for rdata in resolver.query(domain, "NS")]
|
10
6
|
|
11
7
|
|
12
|
-
def get_a_records(host):
|
13
|
-
|
14
|
-
answers = resolver.query(host, "A")
|
15
|
-
for rdata in answers:
|
16
|
-
records.append(rdata.to_text())
|
17
|
-
return records
|
8
|
+
def get_a_records(host: str) -> list[str]:
|
9
|
+
return [rdata.to_text() for rdata in resolver.query(host, "A")]
|
@@ -52,8 +52,7 @@ def get_provision_providers(namespace_info: Mapping[str, Any]) -> set[str]:
|
|
52
52
|
return providers
|
53
53
|
|
54
54
|
external_resources = namespace_info.get("externalResources") or []
|
55
|
-
for e in external_resources
|
56
|
-
providers.add(e["provider"])
|
55
|
+
providers.update(e["provider"] for e in external_resources)
|
57
56
|
|
58
57
|
return providers
|
59
58
|
|
reconcile/utils/gitlab_api.py
CHANGED
@@ -413,7 +413,7 @@ class GitLabApi: # pylint: disable=too-many-public-methods
|
|
413
413
|
items = self.get_items(mr.pipelines.list)
|
414
414
|
return sorted(
|
415
415
|
[i.asdict() for i in items],
|
416
|
-
key=
|
416
|
+
key=itemgetter("created_at"),
|
417
417
|
reverse=True,
|
418
418
|
)
|
419
419
|
|
@@ -599,9 +599,7 @@ class GitLabApi: # pylint: disable=too-many-public-methods
|
|
599
599
|
if label in labels:
|
600
600
|
return
|
601
601
|
labels.append(label)
|
602
|
-
note_body =
|
603
|
-
f"item has been marked as {label}. " f"to remove say `/{label} cancel`"
|
604
|
-
)
|
602
|
+
note_body = f"item has been marked as {label}. to remove say `/{label} cancel`"
|
605
603
|
gitlab_request.labels(integration=INTEGRATION_NAME).inc()
|
606
604
|
item.notes.create({"body": note_body})
|
607
605
|
gitlab_request.labels(integration=INTEGRATION_NAME).inc()
|
@@ -130,7 +130,7 @@ class ProjectAlert(BaseModel):
|
|
130
130
|
) -> MutableMapping[str, Any]:
|
131
131
|
# name is an empty string if the alert was created manually because it can't be set via UI
|
132
132
|
# use the pk instead.
|
133
|
-
values["name"] = values.get("name") or f
|
133
|
+
values["name"] = values.get("name") or f"alert-{values.get('pk')}"
|
134
134
|
return values
|
135
135
|
|
136
136
|
def __eq__(self, other: object) -> bool:
|
reconcile/utils/helm.py
CHANGED
@@ -87,7 +87,7 @@ def do_template(
|
|
87
87
|
]
|
88
88
|
result = run(cmd, capture_output=True, check=True)
|
89
89
|
except CalledProcessError as e:
|
90
|
-
msg = f
|
90
|
+
msg = f"Error running helm template [{' '.join(cmd)}]"
|
91
91
|
if e.stdout:
|
92
92
|
msg += f" {e.stdout.decode()}"
|
93
93
|
if e.stderr:
|
@@ -10,8 +10,8 @@ from reconcile.utils import metrics
|
|
10
10
|
# TODO: move these to a shared, constants module
|
11
11
|
|
12
12
|
INTEGRATION_NAME = os.environ.get("INTEGRATION_NAME", "")
|
13
|
-
SHARDS = os.environ.get("SHARDS", 1)
|
14
|
-
SHARD_ID = int(os.environ.get("SHARD_ID", 0))
|
13
|
+
SHARDS = int(os.environ.get("SHARDS", "1"))
|
14
|
+
SHARD_ID = int(os.environ.get("SHARD_ID", "0"))
|
15
15
|
|
16
16
|
|
17
17
|
class InstrumentedImage(Image):
|
reconcile/utils/jjb_client.py
CHANGED
@@ -236,7 +236,7 @@ class JJB: # pylint: disable=too-many-public-methods
|
|
236
236
|
result = subprocess.run(
|
237
237
|
cmd, check=True, stdout=PIPE, stderr=STDOUT, encoding="utf-8"
|
238
238
|
)
|
239
|
-
if re.search("updated: [1-9]", result.stdout):
|
239
|
+
if re.search(r"updated: [1-9]", result.stdout):
|
240
240
|
logging.info(result.stdout)
|
241
241
|
except CalledProcessError as ex:
|
242
242
|
logging.error(ex.stdout)
|
reconcile/utils/jump_host.py
CHANGED
@@ -36,7 +36,7 @@ class JumpHostBase:
|
|
36
36
|
def __init__(self, parameters: JumphostParameters):
|
37
37
|
self._hostname = parameters.hostname
|
38
38
|
self._user = parameters.user
|
39
|
-
self._port = parameters.port
|
39
|
+
self._port = parameters.port or 22
|
40
40
|
self._identity = parameters.key
|
41
41
|
self._init_identity_file()
|
42
42
|
|
reconcile/utils/metrics.py
CHANGED
@@ -65,8 +65,8 @@ execution_counter = Counter(
|
|
65
65
|
)
|
66
66
|
|
67
67
|
reconcile_time = Histogram(
|
68
|
-
name="
|
69
|
-
documentation="Run time seconds for tracked
|
68
|
+
name="qontract_reconcile_function_elapsed_seconds_since_bundle_commit",
|
69
|
+
documentation="Run time seconds for tracked functions",
|
70
70
|
labelnames=["name", "integration"],
|
71
71
|
buckets=(60.0, 150.0, 300.0, 600.0, 1200.0, 1800.0, 2400.0, 3000.0, float("inf")),
|
72
72
|
)
|
@@ -133,8 +133,7 @@ class BaseMetric(ABC, BaseModel):
|
|
133
133
|
class name. Removes the suffix `_metric` is present. Subclasses can override this.
|
134
134
|
"""
|
135
135
|
metric_name = re.sub(r"(?<!^)(?=[A-Z])", "_", cls.__name__).lower()
|
136
|
-
|
137
|
-
metric_name = metric_name[:-7]
|
136
|
+
metric_name = metric_name.removesuffix("_metric")
|
138
137
|
return metric_name
|
139
138
|
|
140
139
|
|
@@ -151,8 +150,7 @@ class GaugeMetric(BaseMetric):
|
|
151
150
|
@classmethod
|
152
151
|
def name(cls) -> str:
|
153
152
|
metric_name = super().name()
|
154
|
-
|
155
|
-
metric_name = metric_name[:-6]
|
153
|
+
metric_name = metric_name.removesuffix("_gauge")
|
156
154
|
return metric_name
|
157
155
|
|
158
156
|
|
@@ -175,8 +173,7 @@ class CounterMetric(BaseMetric):
|
|
175
173
|
@classmethod
|
176
174
|
def name(cls) -> str:
|
177
175
|
metric_name = super().name()
|
178
|
-
|
179
|
-
metric_name = metric_name[:-8]
|
176
|
+
metric_name = metric_name.removesuffix("_counter")
|
180
177
|
return metric_name
|
181
178
|
|
182
179
|
|
@@ -221,9 +218,7 @@ class MetricsContainer:
|
|
221
218
|
self._counters[counter.__class__][label_values] = current_value + by
|
222
219
|
|
223
220
|
def _aggregate_scopes(self) -> "MetricsContainer":
|
224
|
-
containers = [self]
|
225
|
-
for sub in self._scopes.values():
|
226
|
-
containers.append(sub._aggregate_scopes())
|
221
|
+
containers = [self] + [sub._aggregate_scopes() for sub in self._scopes.values()]
|
227
222
|
return join_metric_containers(containers)
|
228
223
|
|
229
224
|
def collect(self) -> Generator[Metric, None, None]:
|
reconcile/utils/mr/aws_access.py
CHANGED
@@ -64,7 +64,7 @@ class CreateDeleteAwsAccessKey(MergeRequestBase):
|
|
64
64
|
|
65
65
|
body = body_template.render(ACCOUNT=self.account, ACCESS_KEY=self.key)
|
66
66
|
email_name = f"{self.account}-{self.key}"
|
67
|
-
ref = self.path
|
67
|
+
ref = self.path.removeprefix("data")
|
68
68
|
content = app_interface_email(
|
69
69
|
name=email_name, subject=self.title, aws_accounts=[ref], body=pss(body)
|
70
70
|
)
|
reconcile/utils/mr/base.py
CHANGED
@@ -65,9 +65,7 @@ class MergeRequestBase(ABC):
|
|
65
65
|
def cancel(self, message: str) -> None:
|
66
66
|
self.cancelled = True
|
67
67
|
raise CancelMergeRequest(
|
68
|
-
f"{self.name} MR canceled for "
|
69
|
-
f"branch {self.branch}. "
|
70
|
-
f"Reason: {message}"
|
68
|
+
f"{self.name} MR canceled for branch {self.branch}. Reason: {message}"
|
71
69
|
)
|
72
70
|
|
73
71
|
@property
|
@@ -222,7 +220,7 @@ class MergeRequestBase(ABC):
|
|
222
220
|
# we are not going to let an otherwise fine MR
|
223
221
|
# processing fail just because of this
|
224
222
|
LOG.error(
|
225
|
-
f"Failed to delete branch {self.branch}.
|
223
|
+
f"Failed to delete branch {self.branch}. Reason: {gitlab_error}"
|
226
224
|
)
|
227
225
|
|
228
226
|
def diffs(self, gitlab_cli: GitLabApi) -> Any:
|
@@ -40,7 +40,7 @@ class CreateAppInterfaceNotificator(MergeRequestBase):
|
|
40
40
|
self._notification = notification
|
41
41
|
self._email_base_path = email_base_path
|
42
42
|
self._dry_run = dry_run
|
43
|
-
self.labels = labels
|
43
|
+
self.labels = labels or [DO_NOT_MERGE_HOLD]
|
44
44
|
|
45
45
|
@property
|
46
46
|
def title(self) -> str:
|
@@ -21,7 +21,7 @@ class CreateOCMUpgradeSchedulerOrgUpdates(MergeRequestBase):
|
|
21
21
|
|
22
22
|
@property
|
23
23
|
def description(self) -> str:
|
24
|
-
return f
|
24
|
+
return f"ocm upgrade scheduler org updates for {self.updates_info['name']}"
|
25
25
|
|
26
26
|
def process(self, gitlab_cli: GitLabApi) -> None:
|
27
27
|
changes = False
|
reconcile/utils/oc.py
CHANGED
@@ -213,20 +213,13 @@ class OCDecorators:
|
|
213
213
|
return wrapper
|
214
214
|
|
215
215
|
|
216
|
+
@dataclass
|
216
217
|
class OCProcessReconcileTimeDecoratorMsg:
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
slow_oc_reconcile_threshold: float,
|
223
|
-
is_log_slow_oc_reconcile: bool,
|
224
|
-
):
|
225
|
-
self.namespace = namespace
|
226
|
-
self.resource = resource
|
227
|
-
self.server = server
|
228
|
-
self.slow_oc_reconcile_threshold = slow_oc_reconcile_threshold
|
229
|
-
self.is_log_slow_oc_reconcile = is_log_slow_oc_reconcile
|
218
|
+
namespace: str
|
219
|
+
resource: OR
|
220
|
+
server: str | None
|
221
|
+
slow_oc_reconcile_threshold: float
|
222
|
+
is_log_slow_oc_reconcile: bool
|
230
223
|
|
231
224
|
|
232
225
|
def oc_process(template, parameters=None):
|
@@ -383,7 +376,7 @@ class OCCli: # pylint: disable=too-many-public-methods
|
|
383
376
|
self.projects = {p["metadata"]["name"] for p in self.get_all(kind)["items"]}
|
384
377
|
|
385
378
|
self.slow_oc_reconcile_threshold = float(
|
386
|
-
os.environ.get("SLOW_OC_RECONCILE_THRESHOLD", 600)
|
379
|
+
os.environ.get("SLOW_OC_RECONCILE_THRESHOLD", "600")
|
387
380
|
)
|
388
381
|
|
389
382
|
self.is_log_slow_oc_reconcile = os.environ.get(
|
@@ -456,7 +449,7 @@ class OCCli: # pylint: disable=too-many-public-methods
|
|
456
449
|
self.projects = {p["metadata"]["name"] for p in self.get_all(kind)["items"]}
|
457
450
|
|
458
451
|
self.slow_oc_reconcile_threshold = float(
|
459
|
-
os.environ.get("SLOW_OC_RECONCILE_THRESHOLD", 600)
|
452
|
+
os.environ.get("SLOW_OC_RECONCILE_THRESHOLD", "600")
|
460
453
|
)
|
461
454
|
|
462
455
|
self.is_log_slow_oc_reconcile = os.environ.get(
|
@@ -484,9 +477,7 @@ class OCCli: # pylint: disable=too-many-public-methods
|
|
484
477
|
|
485
478
|
if "labels" in kwargs:
|
486
479
|
labels_list = [f"{k}={v}" for k, v in kwargs.get("labels").items()]
|
487
|
-
|
488
|
-
cmd.append("-l")
|
489
|
-
cmd.append(",".join(labels_list))
|
480
|
+
cmd += ["-l", ",".join(labels_list)]
|
490
481
|
|
491
482
|
resource_names = kwargs.get("resource_names")
|
492
483
|
if resource_names:
|
@@ -766,9 +757,9 @@ class OCCli: # pylint: disable=too-many-public-methods
|
|
766
757
|
if not finished_pods:
|
767
758
|
raise JobNotRunningError(name)
|
768
759
|
|
769
|
-
latest_pod =
|
760
|
+
latest_pod = max(
|
770
761
|
finished_pods, key=lambda pod: pod["metadata"]["creationTimestamp"]
|
771
|
-
)
|
762
|
+
)
|
772
763
|
cmd = [
|
773
764
|
"logs",
|
774
765
|
"--all-containers=true",
|
@@ -1180,7 +1171,7 @@ class OCCli: # pylint: disable=too-many-public-methods
|
|
1180
1171
|
|
1181
1172
|
if not find:
|
1182
1173
|
raise StatusCodeError(
|
1183
|
-
f"{self.server}: {apigroup_override}
|
1174
|
+
f"{self.server}: {apigroup_override} does not have kind {kind}"
|
1184
1175
|
)
|
1185
1176
|
return (kind, group_version)
|
1186
1177
|
|
@@ -1420,7 +1411,7 @@ class OCLocal(OCCli):
|
|
1420
1411
|
class OC:
|
1421
1412
|
client_status = Counter(
|
1422
1413
|
name="qontract_reconcile_native_client",
|
1423
|
-
documentation="Cluster is using openshift
|
1414
|
+
documentation="Cluster is using openshift native client",
|
1424
1415
|
labelnames=["cluster_name", "native_client"],
|
1425
1416
|
)
|
1426
1417
|
|
@@ -1667,7 +1658,7 @@ class OC_Map:
|
|
1667
1658
|
cluster,
|
1668
1659
|
OCLogMsg(
|
1669
1660
|
log_level=logging.ERROR,
|
1670
|
-
message=f"[{cluster}]
|
1661
|
+
message=f"[{cluster}] is unreachable: {e}",
|
1671
1662
|
),
|
1672
1663
|
privileged,
|
1673
1664
|
)
|
@@ -1770,13 +1761,9 @@ def validate_labels(labels: dict[str, str]) -> Iterable[str]:
|
|
1770
1761
|
|
1771
1762
|
for k, v in labels.items():
|
1772
1763
|
if len(v) > LABEL_MAX_VALUE_LENGTH:
|
1773
|
-
err.append(
|
1774
|
-
f"Label value longer than " f"{LABEL_MAX_VALUE_LENGTH} chars: {v}"
|
1775
|
-
)
|
1764
|
+
err.append(f"Label value longer than {LABEL_MAX_VALUE_LENGTH} chars: {v}")
|
1776
1765
|
if not v_pattern.match(v):
|
1777
|
-
err.append(
|
1778
|
-
f"Label value is invalid, it needs to match " f"'{v_pattern}': {v}"
|
1779
|
-
)
|
1766
|
+
err.append(f"Label value is invalid, it needs to match '{v_pattern}': {v}")
|
1780
1767
|
|
1781
1768
|
prefix, name = "", k
|
1782
1769
|
if "/" in k:
|
@@ -1792,8 +1779,7 @@ def validate_labels(labels: dict[str, str]) -> Iterable[str]:
|
|
1792
1779
|
)
|
1793
1780
|
if not k_name_pattern.match(name):
|
1794
1781
|
err.append(
|
1795
|
-
f"Label key name is invalid, it needs to mach "
|
1796
|
-
f"'{v_pattern}'': {name}"
|
1782
|
+
f"Label key name is invalid, it needs to mach '{v_pattern}'': {name}"
|
1797
1783
|
)
|
1798
1784
|
|
1799
1785
|
if prefix:
|
reconcile/utils/oc_map.py
CHANGED
reconcile/utils/ocm/base.py
CHANGED
@@ -166,8 +166,10 @@ class OCMClusterAWSSettings(BaseModel):
|
|
166
166
|
roles.append(self.sts.role_arn)
|
167
167
|
if self.sts.support_role_arn:
|
168
168
|
roles.append(self.sts.support_role_arn)
|
169
|
-
|
170
|
-
|
169
|
+
roles.extend(
|
170
|
+
instance_iam_role
|
171
|
+
for instance_iam_role in (self.sts.instance_iam_roles or {}).values()
|
172
|
+
)
|
171
173
|
return roles
|
172
174
|
|
173
175
|
@property
|
@@ -355,9 +355,10 @@ class Filter:
|
|
355
355
|
"""
|
356
356
|
if not self.conditions:
|
357
357
|
raise InvalidFilterError("no conditions within filter object")
|
358
|
-
rendered_conditions = [
|
359
|
-
|
360
|
-
|
358
|
+
rendered_conditions = [
|
359
|
+
condition.render()
|
360
|
+
for condition in sorted(self.conditions, key=lambda c: c.key)
|
361
|
+
]
|
361
362
|
if self.mode == FilterMode.OR:
|
362
363
|
concat = " or ".join(rendered_conditions)
|
363
364
|
if len(rendered_conditions) > 1:
|
@@ -22,7 +22,7 @@ def get_product_applications(
|
|
22
22
|
application.get("metadata", {}).get(METADATA_MANAGED_BY_KEY, "")
|
23
23
|
== METADATA_MANAGED_BY_VALUE
|
24
24
|
):
|
25
|
-
results.append({
|
25
|
+
results.append({ # noqa: PERF401
|
26
26
|
k: v for k, v in application.items() if k in APPLICATION_DESIRED_KEYS
|
27
27
|
})
|
28
28
|
|
@@ -36,7 +36,7 @@ def get_managed_products(ocm_api: OCMBaseClient) -> list[dict[str, Any]]:
|
|
36
36
|
product.get("metadata", {}).get(METADATA_MANAGED_BY_KEY, "")
|
37
37
|
== METADATA_MANAGED_BY_VALUE
|
38
38
|
):
|
39
|
-
results.append({
|
39
|
+
results.append({ # noqa: PERF401
|
40
40
|
k: v for k, v in product.items() if k in PRODUCTS_DESIRED_KEYS
|
41
41
|
})
|
42
42
|
return results
|
reconcile/utils/ocm/upgrades.py
CHANGED
@@ -132,7 +132,7 @@ def get_node_pool_upgrade_policies(
|
|
132
132
|
for policy in ocm_api.get_paginated(
|
133
133
|
f"{build_cluster_url(cluster_id)}/node_pools/{node_pool}/upgrade_policies"
|
134
134
|
):
|
135
|
-
results.append({
|
135
|
+
results.append({ # noqa: PERF401
|
136
136
|
k: v for k, v in policy.items() if k in UPGRADE_POLICY_DESIRED_KEYS
|
137
137
|
})
|
138
138
|
return results
|
@@ -166,12 +166,9 @@ def create_version_agreement(
|
|
166
166
|
def get_version_agreement(
|
167
167
|
ocm_api: OCMBaseClient, cluster_id: str
|
168
168
|
) -> list[dict[str, Any]]:
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
):
|
173
|
-
agreements.append(item)
|
174
|
-
return agreements
|
169
|
+
return list(
|
170
|
+
ocm_api.get_paginated(f"{build_cluster_url(cluster_id)}/gate_agreements")
|
171
|
+
)
|
175
172
|
|
176
173
|
|
177
174
|
def get_version_gates(ocm_api: OCMBaseClient) -> list[OCMVersionGate]:
|
@@ -44,7 +44,7 @@ class OCMBaseClient:
|
|
44
44
|
self._access_token_client_id = access_token_client_id
|
45
45
|
self._access_token_url = access_token_url
|
46
46
|
self._url = url
|
47
|
-
self._session = session
|
47
|
+
self._session = session or Session()
|
48
48
|
self._init_access_token()
|
49
49
|
self._init_request_headers()
|
50
50
|
|
reconcile/utils/promtool.py
CHANGED
@@ -68,7 +68,7 @@ def _run_yaml_spec_cmd(cmd: list[str], yaml_spec: Mapping) -> CommandExecutionRe
|
|
68
68
|
try:
|
69
69
|
result = subprocess.run(cmd, capture_output=True, check=True)
|
70
70
|
except subprocess.CalledProcessError as e:
|
71
|
-
msg = f
|
71
|
+
msg = f"Error running promtool command [{' '.join(cmd)}]"
|
72
72
|
if e.stdout:
|
73
73
|
msg += f" {e.stdout.decode()}"
|
74
74
|
if e.stderr:
|
reconcile/utils/quay_api.py
CHANGED
@@ -45,9 +45,7 @@ class QuayApi:
|
|
45
45
|
body = r.json()
|
46
46
|
|
47
47
|
# Using a set because members may be repeated
|
48
|
-
members =
|
49
|
-
for member in body["members"]:
|
50
|
-
members.add(member["name"])
|
48
|
+
members = {member["name"] for member in body["members"]}
|
51
49
|
|
52
50
|
members_list = list(members)
|
53
51
|
self.team_members[team] = members_list
|
@@ -44,28 +44,21 @@ class RawGithubApi:
|
|
44
44
|
result = res.json()
|
45
45
|
|
46
46
|
if isinstance(result, list):
|
47
|
-
elements =
|
48
|
-
|
49
|
-
for element in result:
|
50
|
-
elements.append(element)
|
51
|
-
|
47
|
+
elements = list(result)
|
52
48
|
while "last" in res.links and "next" in res.links:
|
53
49
|
if res.links["last"]["url"] == res.links["next"]["url"]:
|
54
50
|
req_url = res.links["next"]["url"]
|
55
51
|
res = requests.get(req_url, headers=h, timeout=60)
|
56
52
|
res.raise_for_status()
|
57
53
|
|
58
|
-
for element in res.json()
|
59
|
-
elements.append(element)
|
60
|
-
|
54
|
+
elements.extend(element for element in res.json())
|
61
55
|
return elements
|
62
56
|
|
63
57
|
req_url = res.links["next"]["url"]
|
64
58
|
res = requests.get(req_url, headers=h, timeout=60)
|
65
59
|
res.raise_for_status()
|
66
60
|
|
67
|
-
for element in res.json()
|
68
|
-
elements.append(element)
|
61
|
+
elements.extend(element for element in res.json())
|
69
62
|
|
70
63
|
return elements
|
71
64
|
|
reconcile/utils/repo_owners.py
CHANGED
@@ -92,15 +92,15 @@ class RepoOwners:
|
|
92
92
|
:return: the path closest owners
|
93
93
|
:rtype: dict
|
94
94
|
"""
|
95
|
-
candidates = [
|
95
|
+
candidates = [
|
96
|
+
owned_path
|
97
|
+
for owned_path in self.owners_map
|
98
|
+
if os.path.commonpath([path, owned_path]) == owned_path
|
99
|
+
]
|
96
100
|
|
97
101
|
if "." in self.owners_map:
|
98
102
|
candidates.append(".")
|
99
103
|
|
100
|
-
for owned_path in self.owners_map:
|
101
|
-
if os.path.commonpath([path, owned_path]) == owned_path:
|
102
|
-
candidates.append(owned_path)
|
103
|
-
|
104
104
|
if candidates:
|
105
105
|
# The longest owned_path is the chosen
|
106
106
|
elected = max(candidates, key=len)
|
reconcile/utils/rest_api_base.py
CHANGED
@@ -81,8 +81,7 @@ class ApiBase:
|
|
81
81
|
return response.json()
|
82
82
|
except requests.exceptions.JSONDecodeError:
|
83
83
|
logging.error(
|
84
|
-
f"Failed to decode JSON response from {url}"
|
85
|
-
f"Response: {response.text}"
|
84
|
+
f"Failed to decode JSON response from {url}Response: {response.text}"
|
86
85
|
)
|
87
86
|
raise
|
88
87
|
|
reconcile/utils/rosa/rosa_cli.py
CHANGED
@@ -141,13 +141,13 @@ class RosaJob(K8sJob, BaseModel, frozen=True, arbitrary_types_allowed=True):
|
|
141
141
|
}
|
142
142
|
|
143
143
|
def annotations(self) -> dict[str, str]:
|
144
|
-
|
144
|
+
annotations = {
|
145
145
|
"qontract.rosa.aws_account_id": self.aws_account_id,
|
146
146
|
"qontract.rosa.aws_region": self.aws_region,
|
147
147
|
"qontract.rosa.ocm_org_id": self.ocm_org_id,
|
148
148
|
}
|
149
|
-
|
150
|
-
return
|
149
|
+
annotations.update(self.extra_annotations)
|
150
|
+
return annotations
|
151
151
|
|
152
152
|
def secret_data(self) -> dict[str, str]:
|
153
153
|
return {"OCM_TOKEN": self.ocm_token}
|
@@ -226,7 +226,7 @@ class SaasHerder: # pylint: disable=too-many-public-methods
|
|
226
226
|
if not allowed_secret_parameter_paths:
|
227
227
|
self.valid = False
|
228
228
|
logging.error(
|
229
|
-
f"[{saas_file_name}]
|
229
|
+
f"[{saas_file_name}] missing allowedSecretParameterPaths section"
|
230
230
|
)
|
231
231
|
return
|
232
232
|
for sp in secret_parameters:
|
@@ -558,8 +558,7 @@ class SaasHerder: # pylint: disable=too-many-public-methods
|
|
558
558
|
) -> None:
|
559
559
|
if target.image and target.upstream:
|
560
560
|
logging.error(
|
561
|
-
f"[{saas_file_name}/{resource_template_name}] "
|
562
|
-
f"image used with upstream"
|
561
|
+
f"[{saas_file_name}/{resource_template_name}] image used with upstream"
|
563
562
|
)
|
564
563
|
self.valid = False
|
565
564
|
|
@@ -1056,35 +1055,30 @@ class SaasHerder: # pylint: disable=too-many-public-methods
|
|
1056
1055
|
|
1057
1056
|
@staticmethod
|
1058
1057
|
def _collect_images(resource: Resource) -> set[str]:
|
1059
|
-
images = set()
|
1058
|
+
images: set[str] = set()
|
1060
1059
|
# resources with pod templates
|
1061
1060
|
with suppress(KeyError):
|
1062
1061
|
template = resource["spec"]["template"]
|
1063
|
-
for c in template["spec"]["containers"]
|
1064
|
-
images.add(c["image"])
|
1062
|
+
images.update(c["image"] for c in template["spec"]["containers"])
|
1065
1063
|
# init containers
|
1066
1064
|
with suppress(KeyError):
|
1067
1065
|
template = resource["spec"]["template"]
|
1068
|
-
for c in template["spec"]["initContainers"]
|
1069
|
-
images.add(c["image"])
|
1066
|
+
images.update(c["image"] for c in template["spec"]["initContainers"])
|
1070
1067
|
# CronJob
|
1071
1068
|
with suppress(KeyError):
|
1072
1069
|
template = resource["spec"]["jobTemplate"]["spec"]["template"]
|
1073
|
-
for c in template["spec"]["containers"]
|
1074
|
-
images.add(c["image"])
|
1070
|
+
images.update(c["image"] for c in template["spec"]["containers"])
|
1075
1071
|
# CatalogSource templates
|
1076
1072
|
with suppress(KeyError):
|
1077
1073
|
images.add(resource["spec"]["image"])
|
1078
1074
|
# ClowdApp deployments
|
1079
1075
|
with suppress(KeyError):
|
1080
1076
|
deployments = resource["spec"]["deployments"]
|
1081
|
-
for d in deployments
|
1082
|
-
images.add(d["podSpec"]["image"])
|
1077
|
+
images.update(d["podSpec"]["image"] for d in deployments)
|
1083
1078
|
# ClowdApp jobs
|
1084
1079
|
with suppress(KeyError, TypeError):
|
1085
1080
|
jobs = resource["spec"]["jobs"]
|
1086
|
-
for j in jobs
|
1087
|
-
images.add(j["podSpec"]["image"])
|
1081
|
+
images.update(j["podSpec"]["image"] for j in jobs)
|
1088
1082
|
|
1089
1083
|
return images
|
1090
1084
|
|
@@ -1872,7 +1866,7 @@ class SaasHerder: # pylint: disable=too-many-public-methods
|
|
1872
1866
|
# not have promotion_data yet
|
1873
1867
|
if not config_hashes or not promotion.promotion_data:
|
1874
1868
|
logging.info(
|
1875
|
-
"Promotion data is missing; rely on the success
|
1869
|
+
"Promotion data is missing; rely on the success state only"
|
1876
1870
|
)
|
1877
1871
|
continue
|
1878
1872
|
|
reconcile/utils/secret_reader.py
CHANGED
@@ -163,7 +163,7 @@ class VaultSecretReader(SecretReaderBase):
|
|
163
163
|
)
|
164
164
|
except Forbidden:
|
165
165
|
raise VaultForbidden(
|
166
|
-
f"permission denied reading vault secret
|
166
|
+
f"permission denied reading vault secret at {path}"
|
167
167
|
) from None
|
168
168
|
except vault.SecretNotFound as e:
|
169
169
|
raise SecretNotFound(*e.args) from e
|
@@ -315,7 +315,7 @@ class SecretReader(SecretReaderBase):
|
|
315
315
|
data = self.vault_client.read_all(params) # type: ignore[attr-defined] # mypy doesn't recognize the VaultClient.__new__ method
|
316
316
|
except Forbidden:
|
317
317
|
raise VaultForbidden(
|
318
|
-
f"permission denied reading vault secret
|
318
|
+
f"permission denied reading vault secret at {path}"
|
319
319
|
) from None
|
320
320
|
except vault.SecretNotFound as e:
|
321
321
|
raise SecretNotFound(*e.args) from e
|
reconcile/utils/sharding.py
CHANGED
@@ -4,8 +4,8 @@ import os
|
|
4
4
|
|
5
5
|
LOG = logging.getLogger(__name__)
|
6
6
|
|
7
|
-
SHARDS = int(os.environ.get("SHARDS", 1))
|
8
|
-
SHARD_ID = int(os.environ.get("SHARD_ID", 0))
|
7
|
+
SHARDS = int(os.environ.get("SHARDS", "1"))
|
8
|
+
SHARD_ID = int(os.environ.get("SHARD_ID", "0"))
|
9
9
|
|
10
10
|
|
11
11
|
def is_in_shard(value):
|