qontract-reconcile 0.10.1rc461__py3-none-any.whl → 0.10.1rc462__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. {qontract_reconcile-0.10.1rc461.dist-info → qontract_reconcile-0.10.1rc462.dist-info}/METADATA +1 -1
  2. {qontract_reconcile-0.10.1rc461.dist-info → qontract_reconcile-0.10.1rc462.dist-info}/RECORD +85 -85
  3. reconcile/aus/base.py +3 -3
  4. reconcile/aws_iam_keys.py +1 -1
  5. reconcile/aws_support_cases_sos.py +1 -1
  6. reconcile/change_owners/change_owners.py +2 -3
  7. reconcile/change_owners/diff.py +1 -1
  8. reconcile/change_owners/tester.py +3 -3
  9. reconcile/checkpoint.py +1 -1
  10. reconcile/cli.py +2 -1
  11. reconcile/closedbox_endpoint_monitoring_base.py +1 -1
  12. reconcile/cna/state.py +2 -2
  13. reconcile/dashdotdb_base.py +3 -3
  14. reconcile/dynatrace_token_provider.py +7 -8
  15. reconcile/gcr_mirror.py +2 -2
  16. reconcile/github_org.py +2 -2
  17. reconcile/github_owners.py +1 -1
  18. reconcile/gitlab_housekeeping.py +3 -3
  19. reconcile/gitlab_labeler.py +4 -5
  20. reconcile/glitchtip/reconciler.py +3 -3
  21. reconcile/glitchtip_project_alerts/integration.py +3 -3
  22. reconcile/ocm_clusters.py +9 -9
  23. reconcile/ocm_github_idp.py +1 -1
  24. reconcile/ocm_groups.py +1 -1
  25. reconcile/openshift_base.py +6 -6
  26. reconcile/openshift_clusterrolebindings.py +1 -1
  27. reconcile/openshift_groups.py +1 -1
  28. reconcile/openshift_namespace_labels.py +12 -12
  29. reconcile/openshift_resources_base.py +3 -3
  30. reconcile/openshift_rolebindings.py +1 -1
  31. reconcile/openshift_saas_deploy.py +1 -1
  32. reconcile/quay_mirror.py +2 -2
  33. reconcile/rhidp/common.py +2 -2
  34. reconcile/saas_auto_promotions_manager/merge_request_manager/merge_request_manager.py +9 -9
  35. reconcile/slack_usergroups.py +9 -9
  36. reconcile/sql_query.py +3 -4
  37. reconcile/terraform_aws_route53.py +1 -1
  38. reconcile/terraform_cloudflare_users.py +7 -7
  39. reconcile/terraform_repo.py +3 -1
  40. reconcile/terraform_vpc_peerings.py +10 -10
  41. reconcile/test/fixtures.py +1 -1
  42. reconcile/test/saas_auto_promotions_manager/merge_request_manager/renderer/conftest.py +2 -2
  43. reconcile/test/test_jump_host.py +2 -2
  44. reconcile/test/test_quay_mirror.py +3 -1
  45. reconcile/test/test_quay_mirror_org.py +3 -1
  46. reconcile/test/test_terraform_repo.py +2 -2
  47. reconcile/typed_queries/saas_files.py +5 -5
  48. reconcile/utils/amtool.py +2 -2
  49. reconcile/utils/aws_api.py +5 -29
  50. reconcile/utils/config.py +1 -2
  51. reconcile/utils/environ.py +1 -1
  52. reconcile/utils/git.py +7 -3
  53. reconcile/utils/git_secrets.py +2 -2
  54. reconcile/utils/helm.py +1 -1
  55. reconcile/utils/jjb_client.py +7 -7
  56. reconcile/utils/jump_host.py +2 -2
  57. reconcile/utils/metrics.py +3 -3
  58. reconcile/utils/models.py +47 -51
  59. reconcile/utils/mr/aws_access.py +1 -1
  60. reconcile/utils/mr/base.py +1 -1
  61. reconcile/utils/mr/user_maintenance.py +1 -1
  62. reconcile/utils/oc.py +8 -8
  63. reconcile/utils/oc_connection_parameters.py +12 -13
  64. reconcile/utils/ocm/base.py +1 -1
  65. reconcile/utils/ocm/ocm.py +9 -9
  66. reconcile/utils/openshift_resource.py +8 -9
  67. reconcile/utils/parse_dhms_duration.py +1 -1
  68. reconcile/utils/runtime/sharding.py +1 -1
  69. reconcile/utils/saasherder/saasherder.py +5 -5
  70. reconcile/utils/slack_api.py +2 -2
  71. reconcile/utils/terraform/config_client.py +1 -1
  72. reconcile/utils/terraform_client.py +5 -5
  73. reconcile/utils/terrascript/cloudflare_client.py +3 -1
  74. reconcile/utils/terrascript_aws_client.py +40 -40
  75. reconcile/utils/three_way_diff_strategy.py +2 -2
  76. reconcile/utils/unleash.py +1 -1
  77. reconcile/utils/vault.py +1 -1
  78. reconcile/vpc_peerings_validator.py +6 -6
  79. release/version.py +7 -2
  80. tools/app_interface_reporter.py +3 -3
  81. tools/cli_commands/gpg_encrypt.py +2 -2
  82. tools/qontract_cli.py +7 -6
  83. {qontract_reconcile-0.10.1rc461.dist-info → qontract_reconcile-0.10.1rc462.dist-info}/WHEEL +0 -0
  84. {qontract_reconcile-0.10.1rc461.dist-info → qontract_reconcile-0.10.1rc462.dist-info}/entry_points.txt +0 -0
  85. {qontract_reconcile-0.10.1rc461.dist-info → qontract_reconcile-0.10.1rc462.dist-info}/top_level.txt +0 -0
@@ -104,7 +104,7 @@ class OpenshiftResource:
104
104
  for obj1_k, obj1_v in obj1.items():
105
105
  obj2_v = obj2.get(obj1_k, None)
106
106
  if obj2_v is None:
107
- if obj1_v not in [None, ""]:
107
+ if obj1_v:
108
108
  return False
109
109
  if self.ignorable_field(obj1_k):
110
110
  pass
@@ -123,7 +123,7 @@ class OpenshiftResource:
123
123
  ]
124
124
  if diff or not self.obj_intersect_equal(obj1_v, obj2_v, depth + 1):
125
125
  return False
126
- elif obj1_k in ["data", "matchLabels"]:
126
+ elif obj1_k in {"data", "matchLabels"}:
127
127
  diff = [
128
128
  k
129
129
  for k in obj2_v
@@ -244,12 +244,12 @@ class OpenshiftResource:
244
244
  )
245
245
  raise ConstructResourceError(msg)
246
246
 
247
- if self.kind not in [
247
+ if self.kind not in {
248
248
  "Role",
249
249
  "RoleBinding",
250
250
  "ClusterRole",
251
251
  "ClusterRoleBinding",
252
- ] and (
252
+ } and (
253
253
  not DNS_SUBDOMAIN_RE.match(self.name)
254
254
  or not len(self.name) <= DNS_SUBDOMAIN_MAX_LENGTH
255
255
  ):
@@ -414,7 +414,7 @@ class OpenshiftResource:
414
414
 
415
415
  # Default fields for specific resource types
416
416
  # ConfigMaps and Secrets are by default Opaque
417
- if body["kind"] in ("ConfigMap", "Secret") and body.get("type") == "Opaque":
417
+ if body["kind"] in {"ConfigMap", "Secret"} and body.get("type") == "Opaque":
418
418
  body.pop("type")
419
419
 
420
420
  if body["kind"] == "Secret":
@@ -444,7 +444,7 @@ class OpenshiftResource:
444
444
  tls.pop("key", None)
445
445
  tls.pop("certificate", None)
446
446
  subdomain = body["spec"].get("subdomain", None)
447
- if subdomain == "":
447
+ if not subdomain:
448
448
  body["spec"].pop("subdomain", None)
449
449
 
450
450
  if body["kind"] == "ServiceAccount":
@@ -497,8 +497,7 @@ class OpenshiftResource:
497
497
  if "namespace" in subject:
498
498
  subject.pop("namespace")
499
499
  if "apiGroup" in subject and (
500
- subject["apiGroup"] == ""
501
- or subject["apiGroup"] in body["apiVersion"]
500
+ not subject["apiGroup"] or subject["apiGroup"] in body["apiVersion"]
502
501
  ):
503
502
  subject.pop("apiGroup")
504
503
  # TODO: remove this once we have no 3.11 clusters
@@ -714,6 +713,6 @@ def build_secret(
714
713
 
715
714
 
716
715
  def base64_encode_secret_field_value(value: str) -> str:
717
- if value == "":
716
+ if not value:
718
717
  return ""
719
718
  return base64.b64encode(str(value).encode()).decode("utf-8")
@@ -58,7 +58,7 @@ def dhms_to_seconds(time_str: str) -> int:
58
58
  if s.isnumeric():
59
59
  previous_number += s
60
60
  else:
61
- if previous_number == "":
61
+ if not previous_number:
62
62
  raise BadHDMSDurationError(f"Invalid time duration {time_str}")
63
63
 
64
64
  if s in HANDLE_UNIT_MAP:
@@ -110,7 +110,7 @@ class StaticShardingStrategy:
110
110
  def create_sub_shards(
111
111
  base_shard: ShardSpec, sub_sharding: SubShardingV1
112
112
  ) -> list[ShardSpec]:
113
- if base_shard.shard_id != "" or base_shard.shards != "":
113
+ if base_shard.shard_id or base_shard.shards:
114
114
  raise ValueError(
115
115
  "Static sub_sharding can only be applied to Key based sharding"
116
116
  )
@@ -1803,9 +1803,9 @@ class SaasHerder: # pylint: disable=too-many-public-methods
1803
1803
  )
1804
1804
 
1805
1805
  # add managed resource types to target config
1806
- desired_target_config[
1807
- "saas_file_managed_resource_types"
1808
- ] = saas_file.managed_resource_types
1806
+ desired_target_config["saas_file_managed_resource_types"] = (
1807
+ saas_file.managed_resource_types
1808
+ )
1809
1809
  desired_target_config["url"] = rt.url
1810
1810
  desired_target_config["path"] = rt.path
1811
1811
  # before the GQL classes are introduced, the parameters attribute
@@ -2027,8 +2027,8 @@ class SaasHerder: # pylint: disable=too-many-public-methods
2027
2027
  @staticmethod
2028
2028
  def resolve_templated_parameters(saas_files: Iterable[SaasFile]) -> None:
2029
2029
  """Resolve templated target parameters in saas files."""
2030
- from reconcile.openshift_resources_base import (
2031
- compile_jinja2_template, # avoid circular import
2030
+ from reconcile.openshift_resources_base import ( # noqa: PLC0415 - # avoid circular import
2031
+ compile_jinja2_template,
2032
2032
  )
2033
2033
 
2034
2034
  for saas_file in saas_files:
@@ -429,7 +429,7 @@ class SlackApi:
429
429
 
430
430
  cursor = result["response_metadata"]["next_cursor"]
431
431
 
432
- if cursor == "":
432
+ if not cursor:
433
433
  break
434
434
 
435
435
  additional_kwargs["cursor"] = cursor
@@ -478,7 +478,7 @@ class SlackApi:
478
478
  break
479
479
 
480
480
  cursor = response["response_metadata"]["next_cursor"]
481
- if cursor == "":
481
+ if not cursor:
482
482
  break
483
483
 
484
484
  return responses
@@ -122,7 +122,7 @@ class TerraformConfigClientCollection:
122
122
  working_dirs[account_name] = client.dump()
123
123
 
124
124
  if print_to_file:
125
- with open(print_to_file, "a") as f:
125
+ with open(print_to_file, "a", encoding="locale") as f:
126
126
  f.write(f"##### {account_name} #####\n")
127
127
  f.write(client.dumps())
128
128
  f.write("\n")
@@ -439,9 +439,9 @@ class TerraformClient: # pylint: disable=too-many-public-methods
439
439
  replica_source_name = f'{replica_src}-{tf_resource.get("provider")}'
440
440
  # Creating a dict that is convenient to use inside the
441
441
  # loop processing the formatted_output
442
- replicas_info[spec.provisioner_name][
443
- spec.output_prefix
444
- ] = replica_source_name
442
+ replicas_info[spec.provisioner_name][spec.output_prefix] = (
443
+ replica_source_name
444
+ )
445
445
 
446
446
  return replicas_info
447
447
 
@@ -502,10 +502,10 @@ class TerraformClient: # pylint: disable=too-many-public-methods
502
502
  data[resource_name] = {}
503
503
  data[resource_name][field_key] = field_value
504
504
 
505
- if len(data) == 1 and type in (
505
+ if len(data) == 1 and type in {
506
506
  self.OUTPUT_TYPE_PASSWORDS,
507
507
  self.OUTPUT_TYPE_CONSOLEURLS,
508
- ):
508
+ }:
509
509
  return data[list(data.keys())[0]]
510
510
  return data
511
511
 
@@ -171,7 +171,9 @@ class TerrascriptCloudflareClient(TerraformConfigClient):
171
171
  working_dir = tempfile.mkdtemp(prefix=TMP_DIR_PREFIX)
172
172
  else:
173
173
  working_dir = existing_dir
174
- with open(working_dir + "/config.tf.json", "w") as terraform_config_file:
174
+ with open(
175
+ working_dir + "/config.tf.json", "w", encoding="locale"
176
+ ) as terraform_config_file:
175
177
  terraform_config_file.write(self.dumps())
176
178
 
177
179
  return working_dir
@@ -603,10 +603,10 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
603
603
  with self.jenkins_lock:
604
604
  # this may have already happened, so we check again
605
605
  if not self.jenkins_map.get(instance_name):
606
- self.jenkins_map[
607
- instance_name
608
- ] = JenkinsApi.init_jenkins_from_secret(
609
- SecretReader(self.settings), instance["token"]
606
+ self.jenkins_map[instance_name] = (
607
+ JenkinsApi.init_jenkins_from_secret(
608
+ SecretReader(self.settings), instance["token"]
609
+ )
610
610
  )
611
611
  return self.jenkins_map[instance_name]
612
612
 
@@ -1056,7 +1056,7 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
1056
1056
  "Name": connection_name,
1057
1057
  },
1058
1058
  }
1059
- if connection_provider in ["account-vpc", "account-vpc-mesh"]:
1059
+ if connection_provider in {"account-vpc", "account-vpc-mesh"}:
1060
1060
  if self._multiregion_account(acc_account_name):
1061
1061
  values["provider"] = "aws." + accepter["region"]
1062
1062
  else:
@@ -1075,7 +1075,7 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
1075
1075
  + identifier
1076
1076
  + ".id}",
1077
1077
  }
1078
- if connection_provider in ["account-vpc", "account-vpc-mesh"]:
1078
+ if connection_provider in {"account-vpc", "account-vpc-mesh"}:
1079
1079
  if self._multiregion_account(acc_account_name):
1080
1080
  values["provider"] = "aws." + accepter["region"]
1081
1081
  else:
@@ -1796,9 +1796,9 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
1796
1796
  common_values.get("server_side_encryption_configuration")
1797
1797
  or DEFAULT_S3_SSE_CONFIGURATION
1798
1798
  )
1799
- values[
1800
- "server_side_encryption_configuration"
1801
- ] = server_side_encryption_configuration
1799
+ values["server_side_encryption_configuration"] = (
1800
+ server_side_encryption_configuration
1801
+ )
1802
1802
  # Support static website hosting [rosa-authenticator]
1803
1803
  website = common_values.get("website")
1804
1804
  if website:
@@ -2432,9 +2432,9 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
2432
2432
  specs = common_values.get("specs")
2433
2433
  all_queues_per_spec = []
2434
2434
  kms_keys = set()
2435
- for spec in specs:
2436
- defaults = self.get_values(spec["defaults"])
2437
- queues = spec.pop("queues", [])
2435
+ for _spec in specs:
2436
+ defaults = self.get_values(_spec["defaults"])
2437
+ queues = _spec.pop("queues", [])
2438
2438
  all_queues = []
2439
2439
  for queue_kv in queues:
2440
2440
  queue_key = queue_kv["key"]
@@ -2624,10 +2624,10 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
2624
2624
  region = common_values.get("region") or self.default_regions.get(account)
2625
2625
  specs = common_values.get("specs")
2626
2626
  all_tables = []
2627
- for spec in specs:
2628
- defaults = self.get_values(spec["defaults"])
2627
+ for _spec in specs:
2628
+ defaults = self.get_values(_spec["defaults"])
2629
2629
  attributes = defaults.pop("attributes")
2630
- tables = spec["tables"]
2630
+ tables = _spec["tables"]
2631
2631
  for table_kv in tables:
2632
2632
  table_key = table_kv["key"]
2633
2633
  table = table_kv["value"]
@@ -3604,9 +3604,9 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
3604
3604
  "starting_position_timestamp", None
3605
3605
  )
3606
3606
  if not starting_position_timestamp:
3607
- source_vaules[
3608
- "starting_position_timestamp"
3609
- ] = starting_position_timestamp
3607
+ source_vaules["starting_position_timestamp"] = (
3608
+ starting_position_timestamp
3609
+ )
3610
3610
 
3611
3611
  batch_size = common_values.get("batch_size", 100)
3612
3612
  source_vaules["batch_size"] = batch_size
@@ -3796,7 +3796,7 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
3796
3796
 
3797
3797
  for name, ts in self.tss.items():
3798
3798
  if print_to_file:
3799
- with open(print_to_file, "a") as f:
3799
+ with open(print_to_file, "a", encoding="locale") as f:
3800
3800
  f.write(f"##### {name} #####\n")
3801
3801
  f.write(str(ts))
3802
3802
  f.write("\n")
@@ -3804,7 +3804,7 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
3804
3804
  wd = tempfile.mkdtemp(prefix=TMP_DIR_PREFIX)
3805
3805
  else:
3806
3806
  wd = working_dirs[name]
3807
- with open(wd + "/config.tf.json", "w") as f:
3807
+ with open(wd + "/config.tf.json", "w", encoding="locale") as f:
3808
3808
  f.write(str(ts))
3809
3809
  working_dirs[name] = wd
3810
3810
 
@@ -4353,19 +4353,19 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
4353
4353
  auth_options = values.get("auth", {})
4354
4354
  # TODO: @fishi0x01 make mandatory after migration APPSRE-3409
4355
4355
  if auth_options:
4356
- es_values[
4357
- "advanced_security_options"
4358
- ] = self._build_es_advanced_security_options(auth_options)
4356
+ es_values["advanced_security_options"] = (
4357
+ self._build_es_advanced_security_options(auth_options)
4358
+ )
4359
4359
 
4360
4360
  # TODO: @fishi0x01 remove after migration APPSRE-3409
4361
4361
  # ++++++++ START: REMOVE +++++++++
4362
4362
  else:
4363
4363
  advanced_security_options = values.get("advanced_security_options", {})
4364
4364
  if advanced_security_options:
4365
- es_values[
4366
- "advanced_security_options"
4367
- ] = self._build_es_advanced_security_options_deprecated(
4368
- advanced_security_options
4365
+ es_values["advanced_security_options"] = (
4366
+ self._build_es_advanced_security_options_deprecated(
4367
+ advanced_security_options
4368
+ )
4369
4369
  )
4370
4370
  # ++++++++ END: REMOVE ++++++++++
4371
4371
 
@@ -4624,12 +4624,12 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
4624
4624
  account = self.accounts[account_name]
4625
4625
  cluster = namespace_info["cluster"]
4626
4626
  ocm = ocm_map.get(cluster["name"])
4627
- account[
4628
- "assume_role"
4629
- ] = ocm.get_aws_infrastructure_access_terraform_assume_role(
4630
- cluster["name"],
4631
- account["uid"],
4632
- account["terraformUsername"],
4627
+ account["assume_role"] = (
4628
+ ocm.get_aws_infrastructure_access_terraform_assume_role(
4629
+ cluster["name"],
4630
+ account["uid"],
4631
+ account["terraformUsername"],
4632
+ )
4633
4633
  )
4634
4634
  account["assume_region"] = cluster["spec"]["region"]
4635
4635
  service_name = f"{namespace_info['name']}/{openshift_service}"
@@ -5198,9 +5198,9 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
5198
5198
  if instance_requirements:
5199
5199
  override += [{"instance_requirements": instance_requirements}]
5200
5200
  if override:
5201
- asg_value["mixed_instances_policy"]["launch_template"][
5202
- "override"
5203
- ] = override
5201
+ asg_value["mixed_instances_policy"]["launch_template"]["override"] = (
5202
+ override
5203
+ )
5204
5204
 
5205
5205
  asg_value["tags"] = [
5206
5206
  {"key": k, "value": v, "propagate_at_launch": True} for k, v in tags.items()
@@ -5281,7 +5281,7 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
5281
5281
  lambda_managed_policy_arn = (
5282
5282
  "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
5283
5283
  )
5284
- if region in ("us-gov-west-1", "us-gov-east-1"):
5284
+ if region in {"us-gov-west-1", "us-gov-east-1"}:
5285
5285
  lambda_managed_policy_arn = "arn:aws-us-gov:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
5286
5286
  vpc_id = common_values.get("vpc_id")
5287
5287
  subnet_ids = common_values.get("subnet_ids")
@@ -6219,9 +6219,9 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
6219
6219
  del values["logging_info"]["broker_logs"]["cloudwatch_logs"][
6220
6220
  "retention_in_days"
6221
6221
  ]
6222
- values["logging_info"]["broker_logs"]["cloudwatch_logs"][
6223
- "log_group"
6224
- ] = log_group_tf_resource.name
6222
+ values["logging_info"]["broker_logs"]["cloudwatch_logs"]["log_group"] = (
6223
+ log_group_tf_resource.name
6224
+ )
6225
6225
 
6226
6226
  # resource - secret manager for SCRAM client credentials
6227
6227
  if scram_enabled and scram_users:
@@ -95,7 +95,7 @@ def is_empty_env_value(current: OR, desired: OR, patch: Mapping[str, Any]) -> bo
95
95
  pointer = patch["path"]
96
96
  if (
97
97
  patch["op"] == "add"
98
- and patch["value"] == ""
98
+ and not patch["value"]
99
99
  and re.match(EMPTY_ENV_VALUE, pointer)
100
100
  ):
101
101
  return True
@@ -105,7 +105,7 @@ def is_empty_env_value(current: OR, desired: OR, patch: Mapping[str, Any]) -> bo
105
105
 
106
106
  def is_valid_change(current: OR, desired: OR, patch: Mapping[str, Any]) -> bool:
107
107
  # Only consider added or replaced values on the Desired object
108
- if patch["op"] not in ["add", "replace"]:
108
+ if patch["op"] not in {"add", "replace"}:
109
109
  return False
110
110
 
111
111
  # Check known mutations. Replaced values can happen if values have been
@@ -65,7 +65,7 @@ class EnableClusterStrategy(ClusterStrategy):
65
65
 
66
66
 
67
67
  def _get_unleash_api_client(api_url: str, auth_head: str) -> UnleashClient:
68
- global client
68
+ global client # noqa: PLW0603
69
69
  with client_lock:
70
70
  if client is None:
71
71
  logging.getLogger("apscheduler").setLevel(logging.ERROR)
reconcile/utils/vault.py CHANGED
@@ -151,7 +151,7 @@ class _VaultClient:
151
151
  def _refresh_client_auth(self):
152
152
  if self.kube_auth_enabled:
153
153
  # must read each time to account for sa token refresh
154
- with open(self.kube_sa_token_path) as f:
154
+ with open(self.kube_sa_token_path, encoding="locale") as f:
155
155
  try:
156
156
  self._client.auth_kubernetes(
157
157
  role=self.kube_auth_role,
@@ -58,10 +58,10 @@ def validate_no_cidr_overlap(
58
58
  "cidr_block": cidr_block,
59
59
  }
60
60
  peerings_entries.append(vpc_peering_info)
61
- if peering.provider in (
61
+ if peering.provider in {
62
62
  "cluster-vpc-requester",
63
63
  "cluster-vpc-accepter",
64
- ):
64
+ }:
65
65
  vpc_peering_info = {
66
66
  "provider": peering.provider,
67
67
  "vpc_name": peering.cluster.name, # type: ignore[union-attr]
@@ -101,10 +101,10 @@ def validate_no_internal_to_public_peerings(
101
101
  if not cluster.internal or not cluster.peering:
102
102
  continue
103
103
  for connection in cluster.peering.connections:
104
- if connection.provider not in [
104
+ if connection.provider not in {
105
105
  "cluster-vpc-accepter",
106
106
  "cluster-vpc-requester",
107
- ]:
107
+ }:
108
108
  continue
109
109
  connection = cast(
110
110
  Union[
@@ -144,10 +144,10 @@ def validate_no_public_to_public_peerings(
144
144
  ):
145
145
  continue
146
146
  for connection in cluster.peering.connections:
147
- if connection.provider not in [
147
+ if connection.provider not in {
148
148
  "cluster-vpc-accepter",
149
149
  "cluster-vpc-requester",
150
- ]:
150
+ }:
151
151
  continue
152
152
  connection = cast(
153
153
  Union[
release/version.py CHANGED
@@ -1,5 +1,6 @@
1
1
  #!/usr/bin/env python3
2
2
 
3
+ import locale
3
4
  import os
4
5
  import re
5
6
  import subprocess
@@ -25,13 +26,17 @@ def git() -> str:
25
26
  # tox is running setup.py sdist from the git repo, and then runs again outside
26
27
  # of the git repo. At this second step, we cannot run git commands.
27
28
  # So we save the git version in a file and include it in the source distribution
28
- with open(GIT_VERSION_FILE, "w") as f:
29
+ with open(
30
+ GIT_VERSION_FILE, "w", encoding=locale.getpreferredencoding(False)
31
+ ) as f:
29
32
  f.write(v)
30
33
  return v
31
34
  except subprocess.CalledProcessError as e:
32
35
  # if we're not in a git repo, try reading out from the GIT_VERSION file
33
36
  if os.path.exists(GIT_VERSION_FILE):
34
- with open(GIT_VERSION_FILE, "r") as f:
37
+ with open(
38
+ GIT_VERSION_FILE, "r", encoding=locale.getpreferredencoding(False)
39
+ ) as f:
35
40
  return f.read()
36
41
  print(e.stderr)
37
42
  raise e
@@ -387,9 +387,9 @@ def get_build_history(job):
387
387
 
388
388
  def get_build_history_pool(jenkins_map, jobs, timestamp_limit, thread_pool_size):
389
389
  history_to_get = []
390
- for instance, jobs in jobs.items():
390
+ for instance, _jobs in jobs.items():
391
391
  jenkins = jenkins_map[instance]
392
- for job in jobs:
392
+ for job in _jobs:
393
393
  job["jenkins"] = jenkins
394
394
  job["timestamp_limit"] = timestamp_limit
395
395
  history_to_get.append(job)
@@ -443,7 +443,7 @@ def main(
443
443
  except FileExistsError:
444
444
  pass
445
445
 
446
- with open(report_file, "w") as f:
446
+ with open(report_file, "w", encoding="locale") as f:
447
447
  f.write(report["content"])
448
448
 
449
449
  if not dry_run:
@@ -100,7 +100,7 @@ class GPGEncryptCommand:
100
100
  return GPGEncryptCommand._format(secret)
101
101
 
102
102
  def _fetch_local_file_secret(self) -> str:
103
- with open(self._command_data.secret_file_path) as f:
103
+ with open(self._command_data.secret_file_path, encoding="locale") as f:
104
104
  return f.read()
105
105
 
106
106
  def _fetch_secret(self) -> str:
@@ -140,7 +140,7 @@ class GPGEncryptCommand:
140
140
  if not output:
141
141
  print(content)
142
142
  return
143
- with open(output, "w") as f:
143
+ with open(output, "w", encoding="locale") as f:
144
144
  f.write(content)
145
145
 
146
146
  def execute(self):
tools/qontract_cli.py CHANGED
@@ -1,4 +1,5 @@
1
1
  #!/usr/bin/env python3
2
+ # ruff: noqa: PLC0415 - `import` should be at the top-level of a file
2
3
 
3
4
  import base64
4
5
  import json
@@ -1781,10 +1782,10 @@ def app_interface_review_queue(ctx) -> None:
1781
1782
  continue
1782
1783
  if len(mr.commits()) == 0:
1783
1784
  continue
1784
- if mr.merge_status in [
1785
+ if mr.merge_status in {
1785
1786
  MRStatus.CANNOT_BE_MERGED,
1786
1787
  MRStatus.CANNOT_BE_MERGED_RECHECK,
1787
- ]:
1788
+ }:
1788
1789
  continue
1789
1790
 
1790
1791
  labels = mr.attributes.get("labels")
@@ -2207,9 +2208,9 @@ def slo_document_services(ctx, status_board_instance):
2207
2208
  print(f"Status-board instance '{status_board_instance}' not found.")
2208
2209
  sys.exit(1)
2209
2210
 
2210
- desired_product_apps: dict[
2211
- str, set[str]
2212
- ] = StatusBoardExporterIntegration.get_product_apps(sb)
2211
+ desired_product_apps: dict[str, set[str]] = (
2212
+ StatusBoardExporterIntegration.get_product_apps(sb)
2213
+ )
2213
2214
 
2214
2215
  slodocs = []
2215
2216
  for slodoc in get_slo_documents():
@@ -2620,7 +2621,7 @@ def alert_to_receiver(
2620
2621
  @click.option("--env-name", default=None, help="environment to use for parameters.")
2621
2622
  @click.pass_context
2622
2623
  def saas_dev(ctx, app_name=None, saas_file_name=None, env_name=None) -> None:
2623
- if env_name in [None, ""]:
2624
+ if not env_name:
2624
2625
  print("env-name must be defined")
2625
2626
  return
2626
2627
  saas_files = get_saas_files(saas_file_name, env_name, app_name)