qontract-reconcile 0.10.2.dev14__py3-none-any.whl → 0.10.2.dev16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. {qontract_reconcile-0.10.2.dev14.dist-info → qontract_reconcile-0.10.2.dev16.dist-info}/METADATA +1 -1
  2. {qontract_reconcile-0.10.2.dev14.dist-info → qontract_reconcile-0.10.2.dev16.dist-info}/RECORD +135 -135
  3. reconcile/acs_rbac.py +2 -4
  4. reconcile/aus/base.py +13 -13
  5. reconcile/aws_ami_share.py +1 -2
  6. reconcile/aws_cloudwatch_log_retention/integration.py +1 -1
  7. reconcile/aws_saml_idp/integration.py +1 -1
  8. reconcile/aws_saml_roles/integration.py +1 -1
  9. reconcile/aws_version_sync/integration.py +3 -3
  10. reconcile/change_owners/change_owners.py +8 -5
  11. reconcile/change_owners/change_types.py +18 -18
  12. reconcile/change_owners/changes.py +8 -9
  13. reconcile/change_owners/decision.py +12 -15
  14. reconcile/change_owners/self_service_roles.py +6 -4
  15. reconcile/change_owners/tester.py +8 -10
  16. reconcile/cli.py +12 -14
  17. reconcile/closedbox_endpoint_monitoring_base.py +1 -1
  18. reconcile/cna/integration.py +2 -2
  19. reconcile/dashdotdb_base.py +2 -2
  20. reconcile/dashdotdb_cso.py +1 -1
  21. reconcile/dashdotdb_dora.py +6 -4
  22. reconcile/dashdotdb_slo.py +1 -1
  23. reconcile/database_access_manager.py +15 -19
  24. reconcile/email_sender.py +4 -8
  25. reconcile/endpoints_discovery/integration.py +137 -98
  26. reconcile/external_resources/secrets_sync.py +2 -2
  27. reconcile/external_resources/state.py +17 -17
  28. reconcile/gabi_authorized_users.py +3 -3
  29. reconcile/gcr_mirror.py +2 -2
  30. reconcile/github_org.py +9 -13
  31. reconcile/gitlab_housekeeping.py +1 -1
  32. reconcile/gitlab_owners.py +10 -12
  33. reconcile/gitlab_permissions.py +5 -4
  34. reconcile/glitchtip/integration.py +14 -14
  35. reconcile/glitchtip_project_alerts/integration.py +3 -4
  36. reconcile/gql_definitions/endpoints_discovery/{namespaces.py → apps.py} +22 -22
  37. reconcile/integrations_manager.py +1 -2
  38. reconcile/jenkins_job_builds_cleaner.py +7 -5
  39. reconcile/jenkins_roles.py +10 -6
  40. reconcile/jenkins_worker_fleets.py +5 -4
  41. reconcile/jira_permissions_validator.py +2 -6
  42. reconcile/ldap_groups/integration.py +3 -2
  43. reconcile/ocm_groups.py +5 -5
  44. reconcile/ocm_update_recommended_version.py +2 -2
  45. reconcile/openshift_base.py +15 -20
  46. reconcile/openshift_groups.py +9 -8
  47. reconcile/openshift_namespace_labels.py +3 -4
  48. reconcile/openshift_namespaces.py +1 -1
  49. reconcile/openshift_network_policies.py +1 -1
  50. reconcile/openshift_resources_base.py +4 -4
  51. reconcile/openshift_serviceaccount_tokens.py +1 -1
  52. reconcile/openshift_tekton_resources.py +1 -2
  53. reconcile/openshift_users.py +5 -4
  54. reconcile/prometheus_rules_tester/integration.py +8 -8
  55. reconcile/quay_mirror.py +3 -4
  56. reconcile/quay_mirror_org.py +1 -1
  57. reconcile/rhidp/ocm_oidc_idp/base.py +10 -15
  58. reconcile/run_integration.py +7 -7
  59. reconcile/saas_auto_promotions_manager/publisher.py +1 -1
  60. reconcile/saas_auto_promotions_manager/utils/saas_files_inventory.py +3 -9
  61. reconcile/service_dependencies.py +2 -7
  62. reconcile/skupper_network/reconciler.py +5 -5
  63. reconcile/skupper_network/site_controller.py +3 -3
  64. reconcile/sql_query.py +5 -5
  65. reconcile/status_board.py +24 -24
  66. reconcile/terraform_cloudflare_users.py +2 -2
  67. reconcile/terraform_repo.py +6 -6
  68. reconcile/terraform_users.py +8 -5
  69. reconcile/terraform_vpc_peerings.py +1 -1
  70. reconcile/terraform_vpc_resources/integration.py +1 -1
  71. reconcile/typed_queries/app_interface_deadmanssnitch_settings.py +1 -1
  72. reconcile/typed_queries/app_quay_repos_escalation_policies.py +1 -1
  73. reconcile/typed_queries/aws_vpc_requests.py +1 -1
  74. reconcile/typed_queries/aws_vpcs.py +1 -1
  75. reconcile/typed_queries/clusters.py +1 -1
  76. reconcile/typed_queries/clusters_minimal.py +1 -1
  77. reconcile/typed_queries/clusters_with_dms.py +1 -1
  78. reconcile/typed_queries/dynatrace_environments.py +1 -1
  79. reconcile/typed_queries/dynatrace_token_provider_token_specs.py +1 -1
  80. reconcile/typed_queries/reserved_networks.py +1 -1
  81. reconcile/typed_queries/saas_files.py +1 -1
  82. reconcile/typed_queries/slo_documents.py +1 -1
  83. reconcile/typed_queries/status_board.py +1 -2
  84. reconcile/utils/amtool.py +2 -2
  85. reconcile/utils/aws_api.py +10 -10
  86. reconcile/utils/aws_helper.py +1 -1
  87. reconcile/utils/binary.py +1 -2
  88. reconcile/utils/differ.py +4 -7
  89. reconcile/utils/dnsutils.py +4 -12
  90. reconcile/utils/external_resources.py +1 -2
  91. reconcile/utils/gitlab_api.py +2 -4
  92. reconcile/utils/glitchtip/models.py +1 -1
  93. reconcile/utils/helm.py +1 -1
  94. reconcile/utils/instrumented_wrappers.py +2 -2
  95. reconcile/utils/jjb_client.py +1 -1
  96. reconcile/utils/jump_host.py +1 -1
  97. reconcile/utils/metrics.py +6 -11
  98. reconcile/utils/mr/aws_access.py +1 -1
  99. reconcile/utils/mr/base.py +2 -4
  100. reconcile/utils/mr/notificator.py +1 -1
  101. reconcile/utils/mr/ocm_upgrade_scheduler_org_updates.py +1 -1
  102. reconcile/utils/oc.py +17 -31
  103. reconcile/utils/oc_map.py +1 -1
  104. reconcile/utils/ocm/base.py +4 -2
  105. reconcile/utils/ocm/search_filters.py +4 -3
  106. reconcile/utils/ocm/status_board.py +2 -2
  107. reconcile/utils/ocm/upgrades.py +4 -7
  108. reconcile/utils/ocm_base_client.py +1 -1
  109. reconcile/utils/openshift_resource.py +1 -1
  110. reconcile/utils/promtool.py +1 -1
  111. reconcile/utils/quay_api.py +1 -3
  112. reconcile/utils/raw_github_api.py +3 -10
  113. reconcile/utils/repo_owners.py +5 -5
  114. reconcile/utils/rest_api_base.py +1 -2
  115. reconcile/utils/rosa/rosa_cli.py +3 -3
  116. reconcile/utils/saasherder/saasherder.py +9 -15
  117. reconcile/utils/secret_reader.py +2 -2
  118. reconcile/utils/sharding.py +2 -2
  119. reconcile/utils/state.py +5 -5
  120. reconcile/utils/terraform_client.py +2 -2
  121. reconcile/utils/terrascript/cloudflare_resources.py +4 -6
  122. reconcile/utils/terrascript_aws_client.py +16 -28
  123. reconcile/utils/vault.py +2 -2
  124. reconcile/utils/vcs.py +8 -16
  125. reconcile/vault_replication.py +1 -8
  126. tools/app_interface_reporter.py +1 -1
  127. tools/cli_commands/container_images_report.py +1 -1
  128. tools/cli_commands/cost_report/view.py +4 -2
  129. tools/cli_commands/gpg_encrypt.py +1 -5
  130. tools/qontract_cli.py +14 -13
  131. tools/saas_metrics_exporter/commit_distance/channel.py +1 -1
  132. tools/saas_promotion_state/saas_promotion_state.py +1 -1
  133. tools/sd_app_sre_alert_report.py +3 -3
  134. {qontract_reconcile-0.10.2.dev14.dist-info → qontract_reconcile-0.10.2.dev16.dist-info}/WHEEL +0 -0
  135. {qontract_reconcile-0.10.2.dev14.dist-info → qontract_reconcile-0.10.2.dev16.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import operator
2
3
  from collections.abc import (
3
4
  Iterable,
4
5
  Sequence,
@@ -38,10 +39,10 @@ class IDPState(BaseModel):
38
39
  cluster: Cluster
39
40
  idp: OCMOIdentityProvider | OCMOIdentityProviderOidc | OCMOIdentityProviderGithub
40
41
 
41
- def __eq__(self, __value: object) -> bool:
42
- if not isinstance(__value, IDPState):
42
+ def __eq__(self, value: object) -> bool:
43
+ if not isinstance(value, IDPState):
43
44
  raise NotImplementedError("Cannot compare to non IDPState objects.")
44
- return self.idp == __value.idp
45
+ return self.idp == value.idp
45
46
 
46
47
 
47
48
  def run(
@@ -91,19 +92,13 @@ def fetch_current_state(
91
92
  ocm_api: OCMBaseClient, clusters: Iterable[Cluster]
92
93
  ) -> list[IDPState]:
93
94
  """Fetch all current configured OIDC identity providers."""
94
- current_state: list[IDPState] = []
95
- for cluster in clusters:
95
+ return [
96
+ IDPState(cluster=cluster, idp=idp)
97
+ for cluster in clusters
96
98
  for idp in get_identity_providers(
97
99
  ocm_api=ocm_api, ocm_cluster=cluster.ocm_cluster
98
- ):
99
- current_state.append(
100
- IDPState(
101
- cluster=cluster,
102
- idp=idp,
103
- ),
104
- )
105
-
106
- return current_state
100
+ )
101
+ ]
107
102
 
108
103
 
109
104
  def fetch_desired_state(
@@ -175,7 +170,7 @@ def act(
175
170
  idp_state.idp.type,
176
171
  idp_state.idp.name,
177
172
  )),
178
- equal=lambda idp1, idp2: idp1 == idp2,
173
+ equal=operator.eq,
179
174
  )
180
175
 
181
176
  for idp_state in diff_result.delete.values():
@@ -28,8 +28,8 @@ from reconcile.utils.runtime.environment import (
28
28
  log_fmt,
29
29
  )
30
30
 
31
- SHARDS = int(os.environ.get("SHARDS", 1))
32
- SHARD_ID = int(os.environ.get("SHARD_ID", 0))
31
+ SHARDS = int(os.environ.get("SHARDS", "1"))
32
+ SHARD_ID = int(os.environ.get("SHARD_ID", "0"))
33
33
  SHARD_ID_LABEL = os.environ.get("SHARD_KEY", f"{SHARD_ID}-{SHARDS}")
34
34
  PREFIX_LOG_LEVEL = os.environ.get("PREFIX_LOG_LEVEL", "false")
35
35
 
@@ -44,14 +44,14 @@ DRY_RUN = (
44
44
  )
45
45
  INTEGRATION_EXTRA_ARGS = os.environ.get("INTEGRATION_EXTRA_ARGS")
46
46
  CONFIG = os.environ.get("CONFIG", "/config/config.toml")
47
- PROMETHEUS_PORT = os.environ.get("PROMETHEUS_PORT", 9090)
47
+ PROMETHEUS_PORT = int(os.environ.get("PROMETHEUS_PORT", "9090"))
48
48
 
49
49
  LOG_FILE = os.environ.get("LOG_FILE")
50
50
  LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO")
51
- SLEEP_DURATION_SECS = os.environ.get("SLEEP_DURATION_SECS", 600)
52
- SLEEP_ON_ERROR = os.environ.get("SLEEP_ON_ERROR", 10)
51
+ SLEEP_DURATION_SECS = int(os.environ.get("SLEEP_DURATION_SECS", "600"))
52
+ SLEEP_ON_ERROR = int(os.environ.get("SLEEP_ON_ERROR", "10"))
53
53
 
54
- PUSHGATEWAY_ENABLED = os.environ.get("PUSHGATEWAY_ENABLED", False)
54
+ PUSHGATEWAY_ENABLED = bool(os.environ.get("PUSHGATEWAY_ENABLED"))
55
55
 
56
56
  LOG = logging.getLogger(__name__)
57
57
 
@@ -88,7 +88,7 @@ def _parse_dry_run_flag(dry_run: str | None) -> str | None:
88
88
  )
89
89
  logging.error(msg)
90
90
  raise ValueError(msg)
91
- return dry_run if dry_run else None
91
+ return dry_run or None
92
92
 
93
93
 
94
94
  def build_entry_point_args(
@@ -54,7 +54,7 @@ class Publisher:
54
54
  self.app_name = app_name
55
55
  self.namespace_name = namespace_name
56
56
  self.resource_template_name = resource_template_name
57
- self.target_name = target_name if target_name else "None"
57
+ self.target_name = target_name or "None"
58
58
  self.cluster_name = cluster_name
59
59
  self.redeploy_on_config_change = bool(redeploy_on_config_change)
60
60
  self.has_subscriber = has_subscriber
@@ -92,21 +92,15 @@ class SaasFilesInventory:
92
92
  blocked_versions.setdefault(code_component.url, set()).add(version)
93
93
  for resource_template in saas_file.resource_templates:
94
94
  for target in resource_template.targets:
95
- file_path = target.path if target.path else saas_file.path
95
+ file_path = target.path or saas_file.path
96
96
  if target.disable or target.delete:
97
97
  continue
98
98
  if not target.promotion:
99
99
  continue
100
100
  if not target.promotion.auto:
101
101
  continue
102
- soak_days = (
103
- target.promotion.soak_days if target.promotion.soak_days else 0
104
- )
105
- schedule = (
106
- target.promotion.schedule
107
- if target.promotion.schedule
108
- else "* * * * *"
109
- )
102
+ soak_days = target.promotion.soak_days or 0
103
+ schedule = target.promotion.schedule or "* * * * *"
110
104
  subscriber = Subscriber(
111
105
  uid=target.uid(
112
106
  parent_saas_file_name=saas_file.name,
@@ -21,13 +21,8 @@ QONTRACT_INTEGRATION = "service-dependencies"
21
21
 
22
22
 
23
23
  def get_dependency_names(dependency_map: Mapping[Any, Any], dep_type: str) -> list[str]:
24
- dep_names = []
25
- for dm in dependency_map:
26
- if dm["type"] != dep_type:
27
- continue
28
- for service in dm["services"]:
29
- dep_names.append(service["name"])
30
- return dep_names
24
+ dependency_maps = (dm for dm in dependency_map if dm["type"] == dep_type)
25
+ return [service["name"] for service in dependency_maps]
31
26
 
32
27
 
33
28
  def get_desired_dependency_names(
@@ -31,7 +31,7 @@ def delete_skupper_site(
31
31
  for kind in integration_managed_kinds:
32
32
  # delete everything labeled by us
33
33
  to_delete.update({
34
- f'{item["kind"]}-{item["metadata"]["name"]}': item
34
+ f"{item['kind']}-{item['metadata']['name']}": item
35
35
  for item in oc.get_items(
36
36
  kind=kind,
37
37
  namespace=site.namespace.name,
@@ -40,7 +40,7 @@ def delete_skupper_site(
40
40
  })
41
41
  # delete everything else that starts with 'skupper-'
42
42
  to_delete.update({
43
- f'{item["kind"]}-{item["metadata"]["name"]}': item
43
+ f"{item['kind']}-{item['metadata']['name']}": item
44
44
  for item in oc.get_items(kind=kind, namespace=site.namespace.name)
45
45
  if item["metadata"]["name"].startswith("skupper-")
46
46
  })
@@ -84,12 +84,12 @@ def _create_token(
84
84
  logging.info(f"{connected_site}: Creating new connection token for {site}")
85
85
  sc = get_site_controller(connected_site)
86
86
  if not dry_run:
87
- _labels = copy.deepcopy(site.token_labels)
88
- _labels.update(labels)
87
+ labels_ = copy.deepcopy(site.token_labels)
88
+ labels_.update(labels)
89
89
  oc.apply(
90
90
  connected_site.namespace.name,
91
91
  resource=OR(
92
- body=sc.site_token(connected_site.unique_token_name(site), _labels),
92
+ body=sc.site_token(connected_site.unique_token_name(site), labels_),
93
93
  integration=integration,
94
94
  integration_version=integration_version,
95
95
  ),
@@ -28,14 +28,14 @@ class SiteController:
28
28
 
29
29
  def site_token(self, name: str, labels: MutableMapping[str, str]) -> dict[str, Any]:
30
30
  """Skupper site token secret."""
31
- _labels = copy.deepcopy(labels)
32
- _labels["skupper.io/type"] = "connection-token-request"
31
+ labels_ = copy.deepcopy(labels)
32
+ labels_["skupper.io/type"] = "connection-token-request"
33
33
  return {
34
34
  "apiVersion": "v1",
35
35
  "kind": "Secret",
36
36
  "metadata": {
37
37
  "name": name,
38
- "labels": _labels,
38
+ "labels": labels_,
39
39
  },
40
40
  }
41
41
 
reconcile/sql_query.py CHANGED
@@ -277,17 +277,17 @@ def collect_queries(
277
277
  tf_resource_info = get_tf_resource_info(terrascript, namespace, identifier)
278
278
  if tf_resource_info is None:
279
279
  logging.error(
280
- f'[sql-query:{name} (path: {sql_query["path"]})] Could not find rds identifier {identifier} in namespace {namespace["name"]}. '
280
+ f"[sql-query:{name} (path: {sql_query['path']})] Could not find rds identifier {identifier} in namespace {namespace['name']}. "
281
281
  "If this is a removed read only instance, consider updating the identifier to the source replica or remove this file."
282
282
  )
283
283
  sys.exit(ExitCodes.ERROR)
284
284
 
285
- _queries = []
285
+ queries_ = []
286
286
  if sql_query["query"] is not None:
287
- _queries.append(sql_query["query"])
287
+ queries_.append(sql_query["query"])
288
288
 
289
289
  if sql_query["queries"] is not None:
290
- _queries.extend(sql_query["queries"])
290
+ queries_.extend(sql_query["queries"])
291
291
 
292
292
  # building up the final query dictionary
293
293
  item = {
@@ -296,7 +296,7 @@ def collect_queries(
296
296
  "identifier": sql_query["identifier"],
297
297
  "db_conn": db_conn,
298
298
  "output": output,
299
- "queries": _queries,
299
+ "queries": queries_,
300
300
  **tf_resource_info,
301
301
  }
302
302
 
reconcile/status_board.py CHANGED
@@ -194,40 +194,40 @@ class StatusBoardExporterIntegration(QontractReconcileIntegration):
194
194
  )
195
195
  )
196
196
  # new product, so it misses also the applications
197
- for app_name in desired_product_apps[product_name]:
198
- return_list.append(
199
- StatusBoardHandler(
200
- action="create",
201
- status_board_object=create_app(app_name, product),
202
- )
197
+ return_list.extend(
198
+ StatusBoardHandler(
199
+ action="create",
200
+ status_board_object=create_app(app_name, product),
203
201
  )
202
+ for app_name in desired_product_apps[product_name]
203
+ )
204
204
 
205
205
  # existing product, only add/remove applications
206
206
  for product_name, apps in diff_result.change.items():
207
207
  product = current_products[product_name]
208
- for app_name in apps.desired - apps.current:
209
- return_list.append(
210
- StatusBoardHandler(
211
- action="create",
212
- status_board_object=create_app(app_name, product),
213
- )
208
+ return_list.extend(
209
+ StatusBoardHandler(
210
+ action="create",
211
+ status_board_object=create_app(app_name, product),
214
212
  )
213
+ for app_name in apps.desired - apps.current
214
+ )
215
215
  to_delete = apps.current - apps.desired
216
- for application in product.applications or []:
217
- if application.name in to_delete:
218
- return_list.append(
219
- StatusBoardHandler(
220
- action="delete",
221
- status_board_object=application,
222
- )
223
- )
216
+ return_list.extend(
217
+ StatusBoardHandler(
218
+ action="delete",
219
+ status_board_object=application,
220
+ )
221
+ for application in product.applications or []
222
+ if application.name in to_delete
223
+ )
224
224
 
225
225
  # product is deleted entirely
226
226
  for product_name in diff_result.delete:
227
- for application in current_products[product_name].applications or []:
228
- return_list.append(
229
- StatusBoardHandler(action="delete", status_board_object=application)
230
- )
227
+ return_list.extend(
228
+ StatusBoardHandler(action="delete", status_board_object=application)
229
+ for application in current_products[product_name].applications or []
230
+ )
231
231
  return_list.append(
232
232
  StatusBoardHandler(
233
233
  action="delete", status_board_object=current_products[product_name]
@@ -331,8 +331,8 @@ def build_external_resource_spec_from_cloudflare_users(
331
331
  """
332
332
  specs: list[ExternalResourceSpec] = []
333
333
 
334
- for _, v in cloudflare_users.items():
335
- for _, cf_user in v.items():
334
+ for v in cloudflare_users.values():
335
+ for cf_user in v.values():
336
336
  data_source_cloudflare_account_roles = {
337
337
  "identifier": safe_resource_id(cf_user.account_name),
338
338
  "account_id": "${var.account_id}",
@@ -382,12 +382,12 @@ class TerraformRepoIntegration(
382
382
 
383
383
  # construct diff urls
384
384
  diff_urls: list[str] = []
385
- for pair in diff_result.change.values():
386
- if pair.current.ref != pair.desired.ref:
387
- # gitlab specific syntax
388
- diff_urls.append(
389
- f"{pair.current.repository}/compare/{pair.current.ref}...{pair.desired.ref}"
390
- )
385
+ # gitlab specific syntax
386
+ diff_urls.extend(
387
+ f"{pair.current.repository}/compare/{pair.current.ref}...{pair.desired.ref}"
388
+ for pair in diff_result.change.values()
389
+ if pair.current.ref != pair.desired.ref
390
+ )
391
391
 
392
392
  if len(diff_urls) > 0:
393
393
  comment_body = "tf-repo diffs:\n" + "\n".join([
@@ -95,12 +95,15 @@ def _filter_participating_aws_accounts(
95
95
  accounts: list,
96
96
  roles: list[dict[str, Any]],
97
97
  ) -> list:
98
- participating_aws_account_names = set()
98
+ participating_aws_account_names: set[str] = set()
99
99
  for role in roles:
100
- for aws_group in role["aws_groups"] or []:
101
- participating_aws_account_names.add(aws_group["account"]["name"])
102
- for user_policy in role["user_policies"] or []:
103
- participating_aws_account_names.add(user_policy["account"]["name"])
100
+ participating_aws_account_names.update(
101
+ aws_group["account"]["name"] for aws_group in role["aws_groups"] or []
102
+ )
103
+ participating_aws_account_names.update(
104
+ user_policy["account"]["name"]
105
+ for user_policy in role["user_policies"] or []
106
+ )
104
107
  return [a for a in accounts if a["name"] in participating_aws_account_names]
105
108
 
106
109
 
@@ -702,7 +702,7 @@ def run(
702
702
  integration_version=QONTRACT_INTEGRATION_VERSION,
703
703
  dry_run=dry_run,
704
704
  cache_source=cache_source,
705
- shard=account_name if account_name else "",
705
+ shard=account_name or "",
706
706
  ttl_seconds=extended_early_exit_cache_ttl_seconds,
707
707
  logger=logging.getLogger(),
708
708
  runner=runner,
@@ -196,7 +196,7 @@ class TerraformVpcResources(QontractReconcileIntegration[TerraformVpcResourcesPa
196
196
  mr_manager._fetch_managed_open_merge_requests()
197
197
 
198
198
  # Create a MR for each vpc request if the MR don't exist yet
199
- for _, outputs in handled_output.items():
199
+ for outputs in handled_output.values():
200
200
  template = gql_api.get_template(
201
201
  path="/templating/templates/terraform-vpc-resources/vpc.yml"
202
202
  )["template"]
@@ -10,7 +10,7 @@ from reconcile.utils.gql import GqlApi
10
10
  def get_deadmanssnitch_settings(
11
11
  gql_api: GqlApi | None = None,
12
12
  ) -> DeadMansSnitchSettingsV1:
13
- api = gql_api if gql_api else gql.get_api()
13
+ api = gql_api or gql.get_api()
14
14
  data = query(query_func=api.query)
15
15
  if data.settings and data.settings[0].dead_mans_snitch_settings is not None:
16
16
  return data.settings[0].dead_mans_snitch_settings
@@ -9,6 +9,6 @@ from reconcile.utils.gql import GqlApi
9
9
  def get_apps_quay_repos_escalation_policies(
10
10
  gql_api: GqlApi | None = None,
11
11
  ) -> list[AppV1]:
12
- api = gql_api if gql_api else gql.get_api()
12
+ api = gql_api or gql.get_api()
13
13
  data = query(query_func=api.query)
14
14
  return list(data.apps or [])
@@ -4,6 +4,6 @@ from reconcile.utils.gql import GqlApi
4
4
 
5
5
 
6
6
  def get_aws_vpc_requests(gql_api: GqlApi | None = None) -> list[VPCRequest]:
7
- api = gql_api if gql_api else gql.get_api()
7
+ api = gql_api or gql.get_api()
8
8
  data = query(query_func=api.query)
9
9
  return list(data.vpc_requests or [])
@@ -7,6 +7,6 @@ from reconcile.utils.gql import GqlApi
7
7
 
8
8
 
9
9
  def get_aws_vpcs(gql_api: GqlApi | None = None) -> list[AWSVPC]:
10
- api = gql_api if gql_api else gql.get_api()
10
+ api = gql_api or gql.get_api()
11
11
  data = query(query_func=api.query)
12
12
  return list(data.vpcs or [])
@@ -13,6 +13,6 @@ def get_clusters(
13
13
  variables = {}
14
14
  if name:
15
15
  variables["name"] = name
16
- api = gql_api if gql_api else gql.get_api()
16
+ api = gql_api or gql.get_api()
17
17
  data = query(query_func=api.query, variables=variables)
18
18
  return list(data.clusters or [])
@@ -12,6 +12,6 @@ def get_clusters_minimal(
12
12
  variables = {}
13
13
  if name:
14
14
  variables["name"] = name
15
- api = gql_api if gql_api else gql.get_api()
15
+ api = gql_api or gql.get_api()
16
16
  data = query(query_func=api.query, variables=variables)
17
17
  return list(data.clusters or [])
@@ -11,6 +11,6 @@ def get_clusters_with_dms(
11
11
  ) -> list[ClusterV1]:
12
12
  # get the clusters containing the filed enableDeadMansSnitch
13
13
  variable = {"filter": {"enableDeadMansSnitch": {"ne": None}}}
14
- api = gql_api if gql_api else gql.get_api()
14
+ api = gql_api or gql.get_api()
15
15
  data = query(query_func=api.query, variables=variable)
16
16
  return data.clusters or []
@@ -9,6 +9,6 @@ from reconcile.utils.gql import GqlApi
9
9
  def get_dynatrace_environments(
10
10
  api: GqlApi | None = None,
11
11
  ) -> list[DynatraceEnvironmentV1]:
12
- api = api if api else gql.get_api()
12
+ api = api or gql.get_api()
13
13
  data = query(api.query)
14
14
  return list(data.environments or [])
@@ -9,6 +9,6 @@ from reconcile.utils.gql import GqlApi
9
9
  def get_dynatrace_token_provider_token_specs(
10
10
  api: GqlApi | None = None,
11
11
  ) -> list[DynatraceTokenProviderTokenSpecV1]:
12
- api = api if api else gql.get_api()
12
+ api = api or gql.get_api()
13
13
  data = query(api.query)
14
14
  return list(data.token_specs or [])
@@ -7,6 +7,6 @@ from reconcile.utils.gql import GqlApi
7
7
 
8
8
 
9
9
  def get_networks(gql_api: GqlApi | None = None) -> list[NetworkV1]:
10
- api = gql_api if gql_api else gql.get_api()
10
+ api = gql_api or gql.get_api()
11
11
  data = query(query_func=api.query)
12
12
  return list(data.networks or [])
@@ -73,7 +73,7 @@ class SaasResourceTemplateTarget(ConfiguredBaseModel):
73
73
  ) -> str:
74
74
  """Returns a unique identifier for a target."""
75
75
  return hashlib.blake2s(
76
- f"{parent_saas_file_name}:{parent_resource_template_name}:{self.name if self.name else 'default'}:{self.namespace.cluster.name}:{self.namespace.name}".encode(),
76
+ f"{parent_saas_file_name}:{parent_resource_template_name}:{self.name or 'default'}:{self.namespace.cluster.name}:{self.namespace.name}".encode(),
77
77
  digest_size=20,
78
78
  ).hexdigest()
79
79
 
@@ -7,6 +7,6 @@ from reconcile.utils.gql import GqlApi
7
7
 
8
8
 
9
9
  def get_slo_documents(gql_api: GqlApi | None = None) -> list[SLODocumentV1]:
10
- api = gql_api if gql_api else gql.get_api()
10
+ api = gql_api or gql.get_api()
11
11
  data = query(query_func=api.query)
12
12
  return data.slo_document_v1 or []
@@ -51,8 +51,7 @@ def get_selected_app_names(
51
51
  for selector in selectors:
52
52
  apps_to_remove: set[str] = set()
53
53
  results = parser.parse(selector).find(apps)
54
- for match in results:
55
- apps_to_remove.add(match.value["name"])
54
+ apps_to_remove.update(match.value["name"] for match in results)
56
55
  selected_app_names -= apps_to_remove
57
56
 
58
57
  return selected_app_names
reconcile/utils/amtool.py CHANGED
@@ -52,7 +52,7 @@ def version() -> AmtoolResult:
52
52
  """Returns the version parsed from amtool --version"""
53
53
  result = _run_cmd(["amtool", "--version"])
54
54
 
55
- pattern = re.compile("^amtool, version (?P<version>[^ ]+) .+")
55
+ pattern = re.compile(r"^amtool, version (?P<version>[^ ]+) .+")
56
56
  if m := pattern.match(result.message):
57
57
  return AmtoolResult(True, m.group("version"))
58
58
 
@@ -63,7 +63,7 @@ def _run_cmd(cmd: list[str]) -> AmtoolResult:
63
63
  try:
64
64
  result = run(cmd, capture_output=True, check=True)
65
65
  except CalledProcessError as e:
66
- msg = f'Error running amtool command [{" ".join(cmd)}]'
66
+ msg = f"Error running amtool command [{' '.join(cmd)}]"
67
67
  if e.stdout:
68
68
  msg += f" {e.stdout.decode()}"
69
69
  if e.stderr:
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import operator
2
3
  import os
3
4
  import re
4
5
  import time
@@ -191,7 +192,7 @@ class AWSApi: # pylint: disable=too-many-public-methods
191
192
  service_name,
192
193
  region_name: str | None = None,
193
194
  ):
194
- region = region_name if region_name else session.region_name
195
+ region = region_name or session.region_name
195
196
  client = session.client(
196
197
  service_name,
197
198
  region_name=region,
@@ -205,7 +206,7 @@ class AWSApi: # pylint: disable=too-many-public-methods
205
206
  def _get_session_resource(
206
207
  session: Session, service_name, region_name: str | None = None
207
208
  ):
208
- region = region_name if region_name else session.region_name
209
+ region = region_name or session.region_name
209
210
  return session.resource(service_name, region_name=region)
210
211
 
211
212
  def _account_ec2_client(
@@ -1000,12 +1001,13 @@ class AWSApi: # pylint: disable=too-many-public-methods
1000
1001
  assumed_role_data = self._get_account_assume_data(account)
1001
1002
  assumed_ec2 = self._get_assumed_role_client(*assumed_role_data)
1002
1003
  nat_gateways = assumed_ec2.describe_nat_gateways()
1003
- egress_ips = set()
1004
+ egress_ips: set[str] = set()
1004
1005
  for nat in nat_gateways.get("NatGateways") or []:
1005
1006
  if nat["VpcId"] != vpc_id:
1006
1007
  continue
1007
- for address in nat["NatGatewayAddresses"]:
1008
- egress_ips.add(address["PublicIp"])
1008
+ egress_ips.update(
1009
+ address["PublicIp"] for address in nat["NatGatewayAddresses"]
1010
+ )
1009
1011
 
1010
1012
  return egress_ips
1011
1013
 
@@ -1481,13 +1483,11 @@ class AWSApi: # pylint: disable=too-many-public-methods
1481
1483
  client.delete_hosted_zone(Id=zone_id)
1482
1484
  except client.exceptions.NoSuchHostedZone:
1483
1485
  logging.error(
1484
- f"[{account_name}] Error trying to delete "
1485
- f"unknown DNS zone {zone_id}"
1486
+ f"[{account_name}] Error trying to delete unknown DNS zone {zone_id}"
1486
1487
  )
1487
1488
  except client.exceptions.HostedZoneNotEmpty:
1488
1489
  logging.error(
1489
- f"[{account_name}] Cannot delete DNS zone that "
1490
- f"is not empty {zone_id}"
1490
+ f"[{account_name}] Cannot delete DNS zone that is not empty {zone_id}"
1491
1491
  )
1492
1492
  except Exception as e:
1493
1493
  logging.error(f"[{account_name}] unhandled exception: {e}")
@@ -1697,7 +1697,7 @@ class AWSApi: # pylint: disable=too-many-public-methods
1697
1697
  return [
1698
1698
  obj["Key"]
1699
1699
  for obj in sorted(
1700
- objects, key=lambda obj: obj["LastModified"], reverse=True
1700
+ objects, key=operator.itemgetter("LastModified"), reverse=True
1701
1701
  )
1702
1702
  ]
1703
1703
 
@@ -74,7 +74,7 @@ def get_account(accounts: Iterable[Account], account_name: str) -> Account:
74
74
  raise AccountNotFoundError(account_name)
75
75
 
76
76
 
77
- def get_region_from_availability_zone(availability_zone: str) -> str:
77
+ def get_region_from_availability_zone(availability_zone: str) -> str: # noqa: FURB118
78
78
  return availability_zone[:-1]
79
79
 
80
80
 
reconcile/utils/binary.py CHANGED
@@ -39,8 +39,7 @@ def binary_version(binary, version_args, search_regex, expected_versions):
39
39
  result = subprocess.run(cmd, capture_output=True, check=True)
40
40
  except subprocess.CalledProcessError as e:
41
41
  msg = (
42
- f"Could not execute binary '{binary}' "
43
- f"for binary version check: {e}"
42
+ f"Could not execute binary '{binary}' for binary version check: {e}"
44
43
  )
45
44
  raise Exception(msg) from e
46
45
 
reconcile/utils/differ.py CHANGED
@@ -1,3 +1,4 @@
1
+ import operator
1
2
  from collections.abc import (
2
3
  Callable,
3
4
  Iterable,
@@ -30,10 +31,6 @@ class DiffResult(Generic[Current, Desired, Key]):
30
31
  identical: dict[Key, DiffPair[Current, Desired]]
31
32
 
32
33
 
33
- def _default_equal(current: Current, desired: Desired) -> bool:
34
- return current == desired
35
-
36
-
37
34
  def _default_key(item: Any) -> Any:
38
35
  return item
39
36
 
@@ -41,7 +38,7 @@ def _default_key(item: Any) -> Any:
41
38
  def diff_mappings(
42
39
  current: Mapping[Key, Current],
43
40
  desired: Mapping[Key, Desired],
44
- equal: Callable[[Current, Desired], bool] = _default_equal,
41
+ equal: Callable[[Current, Desired], bool] = operator.eq,
45
42
  ) -> DiffResult[Current, Desired, Key]:
46
43
  """
47
44
  Compare two mappings and return a `DiffResult` instance containing the differences between them.
@@ -91,7 +88,7 @@ def diff_any_iterables(
91
88
  desired: Iterable[Desired],
92
89
  current_key: Callable[[Current], Key] = _default_key,
93
90
  desired_key: Callable[[Desired], Key] = _default_key,
94
- equal: Callable[[Current, Desired], bool] = _default_equal,
91
+ equal: Callable[[Current, Desired], bool] = operator.eq,
95
92
  ) -> DiffResult[Current, Desired, Key]:
96
93
  """
97
94
  Compare two iterables and return a `DiffResult` instance containing the differences between them.
@@ -152,7 +149,7 @@ def diff_iterables(
152
149
  current: Iterable[T],
153
150
  desired: Iterable[T],
154
151
  key: Callable[[T], Key] = _default_key,
155
- equal: Callable[[T, T], bool] = _default_equal,
152
+ equal: Callable[[T, T], bool] = operator.eq,
156
153
  ) -> DiffResult[T, T, Key]:
157
154
  """
158
155
  Compare two iterables with same type and return a `DiffResult` instance containing the differences between them.