qontract-reconcile 0.10.2.dev256__py3-none-any.whl → 0.10.2.dev258__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. {qontract_reconcile-0.10.2.dev256.dist-info → qontract_reconcile-0.10.2.dev258.dist-info}/METADATA +1 -1
  2. {qontract_reconcile-0.10.2.dev256.dist-info → qontract_reconcile-0.10.2.dev258.dist-info}/RECORD +96 -95
  3. reconcile/aus/advanced_upgrade_service.py +1 -1
  4. reconcile/aus/base.py +2 -2
  5. reconcile/aus/version_gates/sts_version_gate_handler.py +2 -2
  6. reconcile/aws_account_manager/reconciler.py +22 -20
  7. reconcile/aws_iam_keys.py +5 -5
  8. reconcile/aws_iam_password_reset.py +5 -5
  9. reconcile/aws_saml_roles/integration.py +5 -5
  10. reconcile/aws_version_sync/integration.py +4 -3
  11. reconcile/cli.py +16 -12
  12. reconcile/closedbox_endpoint_monitoring_base.py +1 -0
  13. reconcile/database_access_manager.py +4 -4
  14. reconcile/dynatrace_token_provider/integration.py +2 -2
  15. reconcile/external_resources/manager.py +2 -2
  16. reconcile/external_resources/model.py +1 -1
  17. reconcile/external_resources/secrets_sync.py +2 -2
  18. reconcile/gabi_authorized_users.py +3 -3
  19. reconcile/github_org.py +2 -2
  20. reconcile/gitlab_housekeeping.py +1 -1
  21. reconcile/gitlab_mr_sqs_consumer.py +1 -1
  22. reconcile/glitchtip/integration.py +2 -2
  23. reconcile/jenkins_worker_fleets.py +5 -5
  24. reconcile/ldap_groups/integration.py +3 -3
  25. reconcile/ocm_clusters.py +2 -2
  26. reconcile/ocm_internal_notifications/integration.py +2 -2
  27. reconcile/ocm_labels/integration.py +3 -2
  28. reconcile/openshift_base.py +12 -11
  29. reconcile/openshift_cluster_bots.py +2 -2
  30. reconcile/openshift_resources_base.py +3 -3
  31. reconcile/openshift_rhcs_certs.py +2 -2
  32. reconcile/openshift_saas_deploy.py +1 -1
  33. reconcile/quay_membership.py +4 -4
  34. reconcile/rhidp/common.py +3 -2
  35. reconcile/run_integration.py +7 -4
  36. reconcile/saas_auto_promotions_manager/dependencies.py +95 -0
  37. reconcile/saas_auto_promotions_manager/integration.py +85 -165
  38. reconcile/skupper_network/integration.py +3 -3
  39. reconcile/slack_usergroups.py +4 -4
  40. reconcile/status_board.py +3 -3
  41. reconcile/terraform_cloudflare_dns.py +5 -5
  42. reconcile/terraform_cloudflare_users.py +15 -17
  43. reconcile/terraform_resources.py +6 -6
  44. reconcile/terraform_vpc_peerings.py +9 -9
  45. reconcile/unleash_feature_toggles/integration.py +1 -1
  46. reconcile/utils/aggregated_list.py +2 -2
  47. reconcile/utils/aws_api_typed/iam.py +2 -2
  48. reconcile/utils/aws_api_typed/organization.py +4 -4
  49. reconcile/utils/aws_api_typed/service_quotas.py +4 -4
  50. reconcile/utils/aws_api_typed/support.py +9 -9
  51. reconcile/utils/aws_helper.py +1 -1
  52. reconcile/utils/config.py +8 -4
  53. reconcile/utils/deadmanssnitch_api.py +2 -4
  54. reconcile/utils/glitchtip/models.py +18 -12
  55. reconcile/utils/gql.py +4 -4
  56. reconcile/utils/internal_groups/client.py +2 -2
  57. reconcile/utils/jinja2/utils.py +7 -3
  58. reconcile/utils/jjb_client.py +2 -2
  59. reconcile/utils/models.py +2 -1
  60. reconcile/utils/mr/__init__.py +3 -3
  61. reconcile/utils/mr/app_interface_reporter.py +2 -2
  62. reconcile/utils/mr/aws_access.py +5 -2
  63. reconcile/utils/mr/base.py +3 -3
  64. reconcile/utils/mr/user_maintenance.py +1 -1
  65. reconcile/utils/oc.py +11 -11
  66. reconcile/utils/oc_connection_parameters.py +4 -4
  67. reconcile/utils/ocm/base.py +3 -3
  68. reconcile/utils/ocm/products.py +8 -8
  69. reconcile/utils/ocm/search_filters.py +2 -2
  70. reconcile/utils/openshift_resource.py +21 -18
  71. reconcile/utils/pagerduty_api.py +5 -5
  72. reconcile/utils/quay_api.py +2 -2
  73. reconcile/utils/rosa/rosa_cli.py +1 -1
  74. reconcile/utils/rosa/session.py +2 -2
  75. reconcile/utils/runtime/desired_state_diff.py +7 -7
  76. reconcile/utils/saasherder/interfaces.py +1 -0
  77. reconcile/utils/saasherder/models.py +1 -1
  78. reconcile/utils/saasherder/saasherder.py +1 -1
  79. reconcile/utils/secret_reader.py +20 -20
  80. reconcile/utils/slack_api.py +5 -5
  81. reconcile/utils/slo_document_manager.py +6 -6
  82. reconcile/utils/state.py +8 -8
  83. reconcile/utils/terraform_client.py +3 -3
  84. reconcile/utils/terrascript/cloudflare_client.py +2 -2
  85. reconcile/utils/terrascript/cloudflare_resources.py +1 -0
  86. reconcile/utils/terrascript_aws_client.py +12 -11
  87. reconcile/utils/vault.py +22 -22
  88. reconcile/vault_replication.py +15 -15
  89. tools/cli_commands/erv2.py +3 -2
  90. tools/cli_commands/gpg_encrypt.py +9 -9
  91. tools/cli_commands/systems_and_tools.py +1 -1
  92. tools/qontract_cli.py +13 -14
  93. tools/saas_promotion_state/saas_promotion_state.py +4 -4
  94. tools/template_validation.py +5 -5
  95. {qontract_reconcile-0.10.2.dev256.dist-info → qontract_reconcile-0.10.2.dev258.dist-info}/WHEEL +0 -0
  96. {qontract_reconcile-0.10.2.dev256.dist-info → qontract_reconcile-0.10.2.dev258.dist-info}/entry_points.txt +0 -0
@@ -33,12 +33,12 @@ __all__ = [
33
33
  "MergeRequestProcessingError",
34
34
  "PromoteQontractReconcileCommercial",
35
35
  "PromoteQontractSchemas",
36
- "UnknownMergeRequestType",
36
+ "UnknownMergeRequestTypeError",
37
37
  "init_from_sqs_message",
38
38
  ]
39
39
 
40
40
 
41
- class UnknownMergeRequestType(Exception):
41
+ class UnknownMergeRequestTypeError(Exception):
42
42
  """
43
43
  Used when the message type from the SQS message is unknown
44
44
  """
@@ -68,7 +68,7 @@ def init_from_sqs_message(message) -> MergeRequestBase:
68
68
  # and fail early if that type is not on the map.
69
69
  msg_type = message.pop("pr_type")
70
70
  if msg_type not in types_map:
71
- raise UnknownMergeRequestType(f"type {msg_type} no supported")
71
+ raise UnknownMergeRequestTypeError(f"type {msg_type} no supported")
72
72
 
73
73
  # Finally, get the class mapped to the type
74
74
  # and create an instance with all the remaining
@@ -1,7 +1,7 @@
1
1
  from datetime import datetime
2
2
  from pathlib import Path
3
3
 
4
- from ruamel.yaml.scalarstring import PreservedScalarString as pss
4
+ from ruamel.yaml.scalarstring import PreservedScalarString
5
5
 
6
6
  from reconcile.utils.gitlab_api import GitLabApi
7
7
  from reconcile.utils.mr.base import (
@@ -50,7 +50,7 @@ class CreateAppInterfaceReporter(MergeRequestBase):
50
50
  name=f"app-interface-reporter-{self.ts}",
51
51
  subject=self.title,
52
52
  aliases=["all-service-owners"],
53
- body=pss(self.email_body),
53
+ body=PreservedScalarString(self.email_body),
54
54
  )
55
55
 
56
56
  email_path = Path("data") / "app-interface" / "emails" / f"{self.ts}.yml"
@@ -2,7 +2,7 @@ from pathlib import Path
2
2
 
3
3
  from jinja2 import Template
4
4
  from ruamel import yaml
5
- from ruamel.yaml.scalarstring import PreservedScalarString as pss
5
+ from ruamel.yaml.scalarstring import PreservedScalarString
6
6
 
7
7
  from reconcile.utils.constants import PROJ_ROOT
8
8
  from reconcile.utils.gitlab_api import GitLabApi
@@ -68,7 +68,10 @@ class CreateDeleteAwsAccessKey(MergeRequestBase):
68
68
  email_name = f"{self.account}-{self.key}"
69
69
  ref = self.path.removeprefix("data")
70
70
  content = app_interface_email(
71
- name=email_name, subject=self.title, aws_accounts=[ref], body=pss(body)
71
+ name=email_name,
72
+ subject=self.title,
73
+ aws_accounts=[ref],
74
+ body=PreservedScalarString(body),
72
75
  )
73
76
 
74
77
  email_path = Path("data") / "app-interface" / "emails" / f"{email_name}.yml"
@@ -23,7 +23,7 @@ EMAIL_TEMPLATE = PROJ_ROOT / "templates" / "email.yml.j2"
23
23
  LOG = logging.getLogger(__name__)
24
24
 
25
25
 
26
- class CancelMergeRequest(Exception):
26
+ class CancelMergeRequestError(Exception):
27
27
  """
28
28
  Used when the Merge Request processing is canceled.
29
29
  """
@@ -64,7 +64,7 @@ class MergeRequestBase(ABC):
64
64
 
65
65
  def cancel(self, message: str) -> None:
66
66
  self.cancelled = True
67
- raise CancelMergeRequest(
67
+ raise CancelMergeRequestError(
68
68
  f"{self.name} MR canceled for branch {self.branch}. Reason: {message}"
69
69
  )
70
70
 
@@ -185,7 +185,7 @@ class MergeRequestBase(ABC):
185
185
  return gitlab_cli.project.mergerequests.create(
186
186
  self.gitlab_data(target_branch=gitlab_cli.main_branch)
187
187
  )
188
- except CancelMergeRequest as mr_cancel:
188
+ except CancelMergeRequestError as mr_cancel:
189
189
  # cancellation is a valid behaviour. it indicates, that the
190
190
  # operation is not required, therefore we will not signal
191
191
  # a problem back to the caller
@@ -24,7 +24,7 @@ class PathSpec(BaseModel):
24
24
  path: str
25
25
 
26
26
  @validator("path")
27
- def prepend_data_to_path(cls, v):
27
+ def prepend_data_to_path(cls, v): # noqa: N805
28
28
  return "data" + v
29
29
 
30
30
 
reconcile/utils/oc.py CHANGED
@@ -56,7 +56,7 @@ from reconcile.utils.metrics import reconcile_time
56
56
  from reconcile.utils.oc_connection_parameters import OCConnectionParameters
57
57
  from reconcile.utils.openshift_resource import OpenshiftResource as OR
58
58
  from reconcile.utils.secret_reader import (
59
- SecretNotFound,
59
+ SecretNotFoundError,
60
60
  SecretReader,
61
61
  )
62
62
  from reconcile.utils.unleash import get_feature_toggle_state
@@ -105,7 +105,7 @@ class UnsupportedMediaTypeError(Exception):
105
105
  pass
106
106
 
107
107
 
108
- class StatefulSetUpdateForbidden(Exception):
108
+ class StatefulSetUpdateForbiddenError(Exception):
109
109
  pass
110
110
 
111
111
 
@@ -125,7 +125,7 @@ class RecyclePodsUnsupportedKindError(Exception):
125
125
  pass
126
126
 
127
127
 
128
- class RecyclePodsInvalidAnnotationValue(Exception):
128
+ class RecyclePodsInvalidAnnotationValueError(Exception):
129
129
  pass
130
130
 
131
131
 
@@ -551,19 +551,19 @@ class OCCli: # pylint: disable=too-many-public-methods
551
551
  @OCDecorators.process_reconcile_time
552
552
  def apply(self, namespace, resource):
553
553
  cmd = ["apply", "-n", namespace, "-f", "-"]
554
- self._run(cmd, stdin=resource.toJSON(), apply=True)
554
+ self._run(cmd, stdin=resource.to_json(), apply=True)
555
555
  return self._msg_to_process_reconcile_time(namespace, resource)
556
556
 
557
557
  @OCDecorators.process_reconcile_time
558
558
  def create(self, namespace, resource):
559
559
  cmd = ["create", "-n", namespace, "-f", "-"]
560
- self._run(cmd, stdin=resource.toJSON(), apply=True)
560
+ self._run(cmd, stdin=resource.to_json(), apply=True)
561
561
  return self._msg_to_process_reconcile_time(namespace, resource)
562
562
 
563
563
  @OCDecorators.process_reconcile_time
564
564
  def replace(self, namespace, resource):
565
565
  cmd = ["replace", "-n", namespace, "-f", "-"]
566
- self._run(cmd, stdin=resource.toJSON(), apply=True)
566
+ self._run(cmd, stdin=resource.to_json(), apply=True)
567
567
  return self._msg_to_process_reconcile_time(namespace, resource)
568
568
 
569
569
  @OCDecorators.process_reconcile_time
@@ -902,7 +902,7 @@ class OCCli: # pylint: disable=too-many-public-methods
902
902
  dep_annotations = dep_resource.body["metadata"].get("annotations") or {}
903
903
  qontract_recycle = dep_annotations.get("qontract.recycle")
904
904
  if qontract_recycle is True:
905
- raise RecyclePodsInvalidAnnotationValue('should be "true"')
905
+ raise RecyclePodsInvalidAnnotationValueError('should be "true"')
906
906
  if qontract_recycle != "true":
907
907
  logging.debug([
908
908
  "skipping_pod_recycle_no_annotation",
@@ -1119,7 +1119,7 @@ class OCCli: # pylint: disable=too-many-public-methods
1119
1119
  if "UnsupportedMediaType" in err:
1120
1120
  raise UnsupportedMediaTypeError(f"[{self.server}]: {err}")
1121
1121
  if "updates to statefulset spec for fields other than" in err:
1122
- raise StatefulSetUpdateForbidden(f"[{self.server}]: {err}")
1122
+ raise StatefulSetUpdateForbiddenError(f"[{self.server}]: {err}")
1123
1123
  if "the object has been modified" in err:
1124
1124
  raise ObjectHasBeenModifiedError(f"[{self.server}]: {err}")
1125
1125
  if "Request entity too large" in err:
@@ -1468,7 +1468,7 @@ class OC:
1468
1468
  )
1469
1469
 
1470
1470
 
1471
- class OC_Map:
1471
+ class OC_Map: # noqa: N801
1472
1472
  """
1473
1473
  DEPRECATED! Use reconcile.utils.oc_map.OCMap instead.
1474
1474
 
@@ -1612,7 +1612,7 @@ class OC_Map:
1612
1612
 
1613
1613
  try:
1614
1614
  token_secret = secret_reader.read_all(automation_token)
1615
- except SecretNotFound:
1615
+ except SecretNotFoundError:
1616
1616
  self.set_oc(
1617
1617
  cluster,
1618
1618
  OCLogMsg(
@@ -1714,7 +1714,7 @@ class OC_Map:
1714
1714
  oc.cleanup()
1715
1715
 
1716
1716
 
1717
- class OCLogMsg(Exception):
1717
+ class OCLogMsg(Exception): # noqa: N818
1718
1718
  """
1719
1719
  Track log messages associated with initializing OC clients in OC_Map.
1720
1720
  """
@@ -12,7 +12,7 @@ from sretoolbox.utils import threaded
12
12
 
13
13
  from reconcile.utils.secret_reader import (
14
14
  HasSecret,
15
- SecretNotFound,
15
+ SecretNotFoundError,
16
16
  SecretReaderBase,
17
17
  )
18
18
 
@@ -142,7 +142,7 @@ class OCConnectionParameters:
142
142
  cluster,
143
143
  )
144
144
  )
145
- except SecretNotFound:
145
+ except SecretNotFoundError:
146
146
  logging.error(
147
147
  f"[{cluster.name}] admin token {cluster.cluster_admin_automation_token} not found"
148
148
  )
@@ -157,7 +157,7 @@ class OCConnectionParameters:
157
157
  automation_token = OCConnectionParameters._get_automation_token(
158
158
  secret_reader, cluster.automation_token, cluster
159
159
  )
160
- except SecretNotFound:
160
+ except SecretNotFoundError:
161
161
  logging.error(
162
162
  f"[{cluster.name}] automation token {cluster.automation_token} not found"
163
163
  )
@@ -186,7 +186,7 @@ class OCConnectionParameters:
186
186
 
187
187
  try:
188
188
  jumphost_key = secret_reader.read_secret(cluster.jump_host.identity)
189
- except SecretNotFound as e:
189
+ except SecretNotFoundError as e:
190
190
  logging.error(
191
191
  f"[{cluster.name}] jumphost secret {cluster.jump_host.identity} not found"
192
192
  )
@@ -174,11 +174,11 @@ class OCMClusterAWSSettings(BaseModel):
174
174
 
175
175
  @property
176
176
  def account_role_prefix(self) -> str | None:
177
- INSTALLER_ROLE_BASE_NAME = "-Installer-Role"
177
+ installer_role_base_name = "-Installer-Role"
178
178
  installer_role_arn = self.sts.role_arn if self.sts else None
179
- if installer_role_arn and installer_role_arn.endswith(INSTALLER_ROLE_BASE_NAME):
179
+ if installer_role_arn and installer_role_arn.endswith(installer_role_base_name):
180
180
  installer_role_name = get_role_name_from_arn(installer_role_arn)
181
- return installer_role_name.removesuffix(INSTALLER_ROLE_BASE_NAME)
181
+ return installer_role_name.removesuffix(installer_role_base_name)
182
182
  return None
183
183
 
184
184
  @property
@@ -23,7 +23,7 @@ from reconcile.ocm.types import (
23
23
  from reconcile.utils.exceptions import ParameterError
24
24
  from reconcile.utils.ocm.clusters import get_provisioning_shard_id
25
25
  from reconcile.utils.ocm_base_client import OCMBaseClient
26
- from reconcile.utils.rosa.rosa_cli import RosaCliException
26
+ from reconcile.utils.rosa.rosa_cli import RosaCliError
27
27
  from reconcile.utils.rosa.session import RosaSessionBuilder
28
28
 
29
29
  CS_API_BASE = "/api/clusters_mgmt"
@@ -61,7 +61,7 @@ OCM_PRODUCT_ROSA = "rosa"
61
61
  OCM_PRODUCT_HYPERSHIFT = "hypershift"
62
62
 
63
63
 
64
- class OCMValidationException(Exception):
64
+ class OCMValidationError(Exception):
65
65
  pass
66
66
 
67
67
 
@@ -216,7 +216,7 @@ class OCMProductOsd(OCMProduct):
216
216
  None,
217
217
  )
218
218
  if default_machine_pool is None:
219
- raise OCMValidationException(
219
+ raise OCMValidationError(
220
220
  f"No default machine pool found, id: {DEFAULT_OCM_MACHINE_POOL_ID}"
221
221
  )
222
222
 
@@ -350,10 +350,10 @@ class OCMProductRosa(OCMProduct):
350
350
  )
351
351
  logging.info("cluster creation kicked off...")
352
352
  result.write_logs_to_logger(logging.info)
353
- except RosaCliException as e:
353
+ except RosaCliError as e:
354
354
  logs = "".join(e.get_log_lines(max_lines=10, from_file_end=True))
355
355
  e.cleanup()
356
- raise OCMValidationException(
356
+ raise OCMValidationError(
357
357
  f"last 10 lines from failed cluster creation job...\n\n{logs}"
358
358
  ) from None
359
359
 
@@ -459,7 +459,7 @@ class OCMProductRosa(OCMProduct):
459
459
  None,
460
460
  )
461
461
  if default_machine_pool is None:
462
- raise OCMValidationException(
462
+ raise OCMValidationError(
463
463
  f"No default machine pool found, id: {DEFAULT_OCM_MACHINE_POOL_ID}"
464
464
  )
465
465
 
@@ -625,10 +625,10 @@ class OCMProductHypershift(OCMProduct):
625
625
  )
626
626
  logging.info("cluster creation kicked off...")
627
627
  result.write_logs_to_logger(logging.info)
628
- except RosaCliException as e:
628
+ except RosaCliError as e:
629
629
  logs = "".join(e.get_log_lines(max_lines=10, from_file_end=True))
630
630
  e.cleanup()
631
- raise OCMValidationException(
631
+ raise OCMValidationError(
632
632
  f"last 10 lines from failed cluster creation job...\n\n{logs}"
633
633
  ) from None
634
634
 
@@ -182,7 +182,7 @@ class InvalidFilterError(Exception):
182
182
  pass
183
183
 
184
184
 
185
- class InvalidChunkRequest(Exception):
185
+ class InvalidChunkRequestError(Exception):
186
186
  """
187
187
  Is raised for various reasons, when a chunk request on a filter is invalid
188
188
  """
@@ -344,7 +344,7 @@ class Filter:
344
344
  if ignore_missing:
345
345
  return [self]
346
346
 
347
- raise InvalidChunkRequest(
347
+ raise InvalidChunkRequestError(
348
348
  f"cannot chunk by {key} because it is not a list condition"
349
349
  )
350
350
 
@@ -375,7 +375,7 @@ class OpenshiftResource:
375
375
  annotations = body["metadata"]["annotations"]
376
376
  return annotations["qontract.sha256sum"]
377
377
 
378
- def toJSON(self):
378
+ def to_json(self):
379
379
  return self.serialize(self.body)
380
380
 
381
381
  @staticmethod
@@ -445,13 +445,12 @@ class OpenshiftResource:
445
445
  if body["kind"] == "ServiceAccount":
446
446
  if "imagePullSecrets" in body:
447
447
  # remove default pull secrets added by k8s
448
- imagePullSecrets = [
448
+ if imagepullsecrets := [
449
449
  s
450
450
  for s in body.pop("imagePullSecrets")
451
451
  if "-dockercfg-" not in s["name"]
452
- ]
453
- if imagePullSecrets:
454
- body["imagePullSecrets"] = imagePullSecrets
452
+ ]:
453
+ body["imagePullSecrets"] = imagepullsecrets
455
454
  if "secrets" in body:
456
455
  body.pop("secrets")
457
456
 
@@ -478,13 +477,15 @@ class OpenshiftResource:
478
477
  if "userNames" in body:
479
478
  body.pop("userNames")
480
479
  if "roleRef" in body:
481
- roleRef = body["roleRef"]
482
- if "namespace" in roleRef:
483
- roleRef.pop("namespace")
484
- if "apiGroup" in roleRef and roleRef["apiGroup"] in body["apiVersion"]:
485
- roleRef.pop("apiGroup")
486
- if "kind" in roleRef:
487
- roleRef.pop("kind")
480
+ if "namespace" in body["roleRef"]:
481
+ body["roleRef"].pop("namespace")
482
+ if (
483
+ "apiGroup" in body["roleRef"]
484
+ and body["roleRef"]["apiGroup"] in body["apiVersion"]
485
+ ):
486
+ body["roleRef"].pop("apiGroup")
487
+ if "kind" in body["roleRef"]:
488
+ body["roleRef"].pop("kind")
488
489
  for subject in body["subjects"]:
489
490
  if "namespace" in subject:
490
491
  subject.pop("namespace")
@@ -497,11 +498,13 @@ class OpenshiftResource:
497
498
  if "userNames" in body:
498
499
  body.pop("userNames")
499
500
  if "roleRef" in body:
500
- roleRef = body["roleRef"]
501
- if "apiGroup" in roleRef and roleRef["apiGroup"] in body["apiVersion"]:
502
- roleRef.pop("apiGroup")
503
- if "kind" in roleRef:
504
- roleRef.pop("kind")
501
+ if (
502
+ "apiGroup" in body["roleRef"]
503
+ and body["roleRef"]["apiGroup"] in body["apiVersion"]
504
+ ):
505
+ body["roleRef"].pop("apiGroup")
506
+ if "kind" in body["roleRef"]:
507
+ body["roleRef"].pop("kind")
505
508
  if "groupNames" in body:
506
509
  body.pop("groupNames")
507
510
  if body["kind"] == "Service":
@@ -532,7 +535,7 @@ class OpenshiftResource:
532
535
 
533
536
  def fully_qualified_kind(kind: str, api_version: str) -> str:
534
537
  if "/" in api_version:
535
- group = api_version.split("/")[0]
538
+ group = api_version.split("/")[0] # noqa: PLC0207
536
539
  return f"{kind}.{group}"
537
540
  return kind
538
541
 
@@ -20,11 +20,11 @@ from reconcile.utils.secret_reader import (
20
20
  )
21
21
 
22
22
 
23
- class PagerDutyTargetException(Exception):
23
+ class PagerDutyTargetError(Exception):
24
24
  """This exception is raised when PagerDutyTarget is not configured correctly."""
25
25
 
26
26
 
27
- class PagerDutyApiException(Exception):
27
+ class PagerDutyApiError(Exception):
28
28
  """This exception is raised when PagerDuty API call fails."""
29
29
 
30
30
 
@@ -89,7 +89,7 @@ class PagerDutyApi:
89
89
  users = self.get_escalation_policy_users(resource_id, now)
90
90
  except requests.exceptions.HTTPError as e:
91
91
  logging.error(str(e))
92
- raise PagerDutyApiException(str(e)) from e
92
+ raise PagerDutyApiError(str(e)) from e
93
93
 
94
94
  return users
95
95
 
@@ -189,7 +189,7 @@ def get_pagerduty_name(user: PagerDutyUser) -> str:
189
189
  return user.pagerduty_username or user.org_username
190
190
 
191
191
 
192
- @retry(no_retry_exceptions=PagerDutyTargetException)
192
+ @retry(no_retry_exceptions=PagerDutyTargetError)
193
193
  def get_usernames_from_pagerduty(
194
194
  pagerduties: Iterable[PagerDutyTarget],
195
195
  users: Iterable[PagerDutyUser],
@@ -202,7 +202,7 @@ def get_usernames_from_pagerduty(
202
202
  all_pagerduty_names = [get_pagerduty_name(u) for u in users]
203
203
  for pagerduty in pagerduties:
204
204
  if pagerduty.schedule_id is None and pagerduty.escalation_policy_id is None:
205
- raise PagerDutyTargetException(
205
+ raise PagerDutyTargetError(
206
206
  f"pagerduty {pagerduty.name}: Either schedule_id or escalation_policy_id must be set!"
207
207
  )
208
208
  if pagerduty.schedule_id is not None:
@@ -1,7 +1,7 @@
1
1
  import requests
2
2
 
3
3
 
4
- class QuayTeamNotFoundException(Exception):
4
+ class QuayTeamNotFoundError(Exception):
5
5
  pass
6
6
 
7
7
 
@@ -34,7 +34,7 @@ class QuayApi:
34
34
 
35
35
  r = requests.get(url, headers=self.auth_header, timeout=self._timeout)
36
36
  if r.status_code == 404:
37
- raise QuayTeamNotFoundException(
37
+ raise QuayTeamNotFoundError(
38
38
  f"team {team} is not found in "
39
39
  f"org {self.organization}. "
40
40
  f"contact org owner to create the "
@@ -91,7 +91,7 @@ class RosaCliResult:
91
91
  self.log_handle.cleanup()
92
92
 
93
93
 
94
- class RosaCliException(Exception, RosaCliResult):
94
+ class RosaCliError(Exception, RosaCliResult):
95
95
  """
96
96
  Represents an exception that occurred during a ROSA CLI execution.
97
97
  """
@@ -11,7 +11,7 @@ from reconcile.utils.jobcontroller.models import JobConcurrencyPolicy, JobStatus
11
11
  from reconcile.utils.ocm_base_client import OCMBaseClient
12
12
  from reconcile.utils.rosa.rosa_cli import (
13
13
  LogHandle,
14
- RosaCliException,
14
+ RosaCliError,
15
15
  RosaCliResult,
16
16
  RosaJob,
17
17
  )
@@ -106,7 +106,7 @@ class RosaSession:
106
106
  log_dir = tempfile.mkdtemp()
107
107
  log_file_name = self.job_controller.store_job_logs(job.name(), log_dir)
108
108
  if status != JobStatus.SUCCESS:
109
- raise RosaCliException(status, cmd, LogHandle(log_file_name))
109
+ raise RosaCliError(status, cmd, LogHandle(log_file_name))
110
110
  return RosaCliResult(status, cmd, LogHandle(log_file_name))
111
111
 
112
112
  def create_hcp_cluster(
@@ -97,13 +97,13 @@ def _extract_diffs_task(
97
97
  return_value[EXTRACT_TASK_RESULT_KEY_ERROR] = e
98
98
 
99
99
 
100
- class DiffDetectionTimeout(Exception):
100
+ class DiffDetectionTimeoutError(Exception):
101
101
  """
102
102
  Raised when the fine grained diff detection takes too long.
103
103
  """
104
104
 
105
105
 
106
- class DiffDetectionFailure(Exception):
106
+ class DiffDetectionFailureError(Exception):
107
107
  """
108
108
  Raised when the fine grained diff detection fails.
109
109
  """
@@ -155,20 +155,20 @@ def extract_diffs_with_timeout(
155
155
  )
156
156
  process.terminate()
157
157
  process.join()
158
- raise DiffDetectionTimeout()
158
+ raise DiffDetectionTimeoutError()
159
159
 
160
160
  if EXTRACT_TASK_RESULT_KEY_DIFFS in result_value:
161
161
  return result_value[EXTRACT_TASK_RESULT_KEY_DIFFS]
162
162
 
163
163
  original_error = result_value.get(EXTRACT_TASK_RESULT_KEY_ERROR)
164
164
  if original_error:
165
- raise DiffDetectionFailure() from original_error
165
+ raise DiffDetectionFailureError() from original_error
166
166
 
167
167
  # not every error situation of the diff extraction process
168
168
  # will result in an exception. the lack of a result is an error
169
169
  # indicator as well. in those cases, we raise at least
170
170
  # a generic exception to indicate that something went wrong
171
- raise DiffDetectionFailure("unknown error during fine grained diff detection")
171
+ raise DiffDetectionFailureError("unknown error during fine grained diff detection")
172
172
 
173
173
 
174
174
  def build_desired_state_diff(
@@ -211,12 +211,12 @@ def build_desired_state_diff(
211
211
  ShardedRunProposal(proposed_shards=changed_shards)
212
212
  ):
213
213
  shards = changed_shards
214
- except DiffDetectionTimeout:
214
+ except DiffDetectionTimeoutError:
215
215
  logging.warning(
216
216
  f"unable to extract fine grained diffs for shard extraction "
217
217
  f"within {exract_diff_timeout_seconds} seconds. continue without sharding"
218
218
  )
219
- except DiffDetectionFailure as e:
219
+ except DiffDetectionFailureError as e:
220
220
  logging.warning(
221
221
  f"unable to extract fine grained diffs for shard extraction: {e}"
222
222
  )
@@ -1,3 +1,4 @@
1
+ # ruff: noqa: N801
1
2
  from __future__ import annotations
2
3
 
3
4
  from collections.abc import Mapping, Sequence, Set
@@ -271,7 +271,7 @@ class ImageAuth:
271
271
  auth_server: str | None = None
272
272
  docker_config: dict[str, dict[str, dict[str, str]]] | None = None
273
273
 
274
- def getDockerConfigJson(self) -> dict:
274
+ def get_docker_config_json(self) -> dict:
275
275
  if self.docker_config:
276
276
  return self.docker_config
277
277
  else:
@@ -726,7 +726,7 @@ class SaasHerder: # pylint: disable=too-many-public-methods
726
726
  saas_file: SaasFile,
727
727
  trigger_reason: str,
728
728
  ) -> tuple[str, str]:
729
- [url, sha] = trigger_reason.split(" ")[0].split("/commit/")
729
+ [url, sha] = trigger_reason.split(" ")[0].split("/commit/") # noqa: PLC0207
730
730
  repo_info = VCS.parse_repo_url(url)
731
731
  repo_name = repo_info.name
732
732
  file_name = f"{repo_name.replace('/', '-')}-{sha}.tar.gz"