qontract-reconcile 0.10.2.dev256__py3-none-any.whl → 0.10.2.dev257__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. {qontract_reconcile-0.10.2.dev256.dist-info → qontract_reconcile-0.10.2.dev257.dist-info}/METADATA +1 -1
  2. {qontract_reconcile-0.10.2.dev256.dist-info → qontract_reconcile-0.10.2.dev257.dist-info}/RECORD +94 -94
  3. reconcile/aus/advanced_upgrade_service.py +1 -1
  4. reconcile/aus/base.py +2 -2
  5. reconcile/aus/version_gates/sts_version_gate_handler.py +2 -2
  6. reconcile/aws_account_manager/reconciler.py +22 -20
  7. reconcile/aws_iam_keys.py +5 -5
  8. reconcile/aws_iam_password_reset.py +5 -5
  9. reconcile/aws_saml_roles/integration.py +5 -5
  10. reconcile/aws_version_sync/integration.py +4 -3
  11. reconcile/cli.py +5 -5
  12. reconcile/closedbox_endpoint_monitoring_base.py +1 -0
  13. reconcile/database_access_manager.py +4 -4
  14. reconcile/dynatrace_token_provider/integration.py +2 -2
  15. reconcile/external_resources/manager.py +2 -2
  16. reconcile/external_resources/model.py +1 -1
  17. reconcile/external_resources/secrets_sync.py +2 -2
  18. reconcile/gabi_authorized_users.py +3 -3
  19. reconcile/github_org.py +2 -2
  20. reconcile/gitlab_housekeeping.py +1 -1
  21. reconcile/gitlab_mr_sqs_consumer.py +1 -1
  22. reconcile/glitchtip/integration.py +2 -2
  23. reconcile/jenkins_worker_fleets.py +5 -5
  24. reconcile/ldap_groups/integration.py +3 -3
  25. reconcile/ocm_clusters.py +2 -2
  26. reconcile/ocm_internal_notifications/integration.py +2 -2
  27. reconcile/ocm_labels/integration.py +3 -2
  28. reconcile/openshift_base.py +12 -11
  29. reconcile/openshift_cluster_bots.py +2 -2
  30. reconcile/openshift_resources_base.py +3 -3
  31. reconcile/openshift_rhcs_certs.py +2 -2
  32. reconcile/openshift_saas_deploy.py +1 -1
  33. reconcile/quay_membership.py +4 -4
  34. reconcile/rhidp/common.py +3 -2
  35. reconcile/run_integration.py +7 -4
  36. reconcile/skupper_network/integration.py +3 -3
  37. reconcile/slack_usergroups.py +4 -4
  38. reconcile/status_board.py +3 -3
  39. reconcile/terraform_cloudflare_dns.py +5 -5
  40. reconcile/terraform_cloudflare_users.py +15 -17
  41. reconcile/terraform_resources.py +6 -6
  42. reconcile/terraform_vpc_peerings.py +9 -9
  43. reconcile/unleash_feature_toggles/integration.py +1 -1
  44. reconcile/utils/aggregated_list.py +2 -2
  45. reconcile/utils/aws_api_typed/iam.py +2 -2
  46. reconcile/utils/aws_api_typed/organization.py +4 -4
  47. reconcile/utils/aws_api_typed/service_quotas.py +4 -4
  48. reconcile/utils/aws_api_typed/support.py +9 -9
  49. reconcile/utils/aws_helper.py +1 -1
  50. reconcile/utils/config.py +8 -4
  51. reconcile/utils/deadmanssnitch_api.py +2 -4
  52. reconcile/utils/glitchtip/models.py +18 -12
  53. reconcile/utils/gql.py +4 -4
  54. reconcile/utils/internal_groups/client.py +2 -2
  55. reconcile/utils/jinja2/utils.py +7 -3
  56. reconcile/utils/jjb_client.py +2 -2
  57. reconcile/utils/models.py +2 -1
  58. reconcile/utils/mr/__init__.py +3 -3
  59. reconcile/utils/mr/app_interface_reporter.py +2 -2
  60. reconcile/utils/mr/aws_access.py +5 -2
  61. reconcile/utils/mr/base.py +3 -3
  62. reconcile/utils/mr/user_maintenance.py +1 -1
  63. reconcile/utils/oc.py +11 -11
  64. reconcile/utils/oc_connection_parameters.py +4 -4
  65. reconcile/utils/ocm/base.py +3 -3
  66. reconcile/utils/ocm/products.py +8 -8
  67. reconcile/utils/ocm/search_filters.py +2 -2
  68. reconcile/utils/openshift_resource.py +21 -18
  69. reconcile/utils/pagerduty_api.py +5 -5
  70. reconcile/utils/quay_api.py +2 -2
  71. reconcile/utils/rosa/rosa_cli.py +1 -1
  72. reconcile/utils/rosa/session.py +2 -2
  73. reconcile/utils/runtime/desired_state_diff.py +7 -7
  74. reconcile/utils/saasherder/interfaces.py +1 -0
  75. reconcile/utils/saasherder/models.py +1 -1
  76. reconcile/utils/saasherder/saasherder.py +1 -1
  77. reconcile/utils/secret_reader.py +20 -20
  78. reconcile/utils/slack_api.py +5 -5
  79. reconcile/utils/slo_document_manager.py +6 -6
  80. reconcile/utils/state.py +8 -8
  81. reconcile/utils/terraform_client.py +3 -3
  82. reconcile/utils/terrascript/cloudflare_client.py +2 -2
  83. reconcile/utils/terrascript/cloudflare_resources.py +1 -0
  84. reconcile/utils/terrascript_aws_client.py +12 -11
  85. reconcile/utils/vault.py +22 -22
  86. reconcile/vault_replication.py +15 -15
  87. tools/cli_commands/erv2.py +3 -2
  88. tools/cli_commands/gpg_encrypt.py +9 -9
  89. tools/cli_commands/systems_and_tools.py +1 -1
  90. tools/qontract_cli.py +13 -14
  91. tools/saas_promotion_state/saas_promotion_state.py +4 -4
  92. tools/template_validation.py +5 -5
  93. {qontract_reconcile-0.10.2.dev256.dist-info → qontract_reconcile-0.10.2.dev257.dist-info}/WHEEL +0 -0
  94. {qontract_reconcile-0.10.2.dev256.dist-info → qontract_reconcile-0.10.2.dev257.dist-info}/entry_points.txt +0 -0
@@ -15,7 +15,7 @@ class AWSCase(BaseModel):
15
15
  status: str
16
16
 
17
17
 
18
- class SUPPORT_PLAN(Enum):
18
+ class SupportPlan(Enum):
19
19
  BASIC = "basic"
20
20
  DEVELOPER = "developer"
21
21
  BUSINESS = "business"
@@ -53,27 +53,27 @@ class AWSApiSupport:
53
53
  case = self.client.describe_cases(caseIdList=[case_id])["cases"][0]
54
54
  return AWSCase(**case)
55
55
 
56
- def get_support_level(self) -> SUPPORT_PLAN:
56
+ def get_support_level(self) -> SupportPlan:
57
57
  """Return the support level of the account."""
58
58
 
59
59
  try:
60
60
  response = self.client.describe_severity_levels(language="en")
61
61
  except self.client.exceptions.ClientError as err:
62
62
  if err.response["Error"]["Code"] == "SubscriptionRequiredException":
63
- return SUPPORT_PLAN.BASIC
63
+ return SupportPlan.BASIC
64
64
  raise err
65
65
 
66
66
  severity_levels = {
67
67
  level["code"].lower() for level in response["severityLevels"]
68
68
  }
69
69
  if "critical" in severity_levels:
70
- return SUPPORT_PLAN.ENTERPRISE
70
+ return SupportPlan.ENTERPRISE
71
71
  if "urgent" in severity_levels:
72
- return SUPPORT_PLAN.BUSINESS
72
+ return SupportPlan.BUSINESS
73
73
  if "high" in severity_levels:
74
- return SUPPORT_PLAN.BUSINESS
74
+ return SupportPlan.BUSINESS
75
75
  if "normal" in severity_levels:
76
- return SUPPORT_PLAN.DEVELOPER
76
+ return SupportPlan.DEVELOPER
77
77
  if "low" in severity_levels:
78
- return SUPPORT_PLAN.DEVELOPER
79
- return SUPPORT_PLAN.BASIC
78
+ return SupportPlan.DEVELOPER
79
+ return SupportPlan.BASIC
@@ -27,7 +27,7 @@ def get_account_uid_from_arn(arn: str) -> str:
27
27
 
28
28
  def get_role_name_from_arn(arn: str) -> str:
29
29
  # arn:aws:iam::12345:role/role-1 --> role-1
30
- return arn.split("/")[-1]
30
+ return arn.split("/")[-1] # noqa: PLC0207
31
31
 
32
32
 
33
33
  def is_aws_managed_resource(arn: str) -> bool:
reconcile/utils/config.py CHANGED
@@ -6,11 +6,11 @@ import toml
6
6
  _config: dict = {}
7
7
 
8
8
 
9
- class ConfigNotFound(Exception):
9
+ class ConfigNotFoundError(Exception):
10
10
  pass
11
11
 
12
12
 
13
- class SecretNotFound(Exception):
13
+ class SecretNotFoundError(Exception):
14
14
  pass
15
15
 
16
16
 
@@ -38,7 +38,9 @@ def read(secret: Mapping[str, Any]) -> str:
38
38
  config = config[t]
39
39
  return config[field]
40
40
  except Exception as e:
41
- raise SecretNotFound(f"key not found in config file {path}: {e!s}") from None
41
+ raise SecretNotFoundError(
42
+ f"key not found in config file {path}: {e!s}"
43
+ ) from None
42
44
 
43
45
 
44
46
  def read_all(secret: Mapping[str, Any]) -> dict:
@@ -50,4 +52,6 @@ def read_all(secret: Mapping[str, Any]) -> dict:
50
52
  config = config[t]
51
53
  return config
52
54
  except Exception as e:
53
- raise SecretNotFound(f"secret {path} not found in config file: {e!s}") from None
55
+ raise SecretNotFoundError(
56
+ f"secret {path} not found in config file: {e!s}"
57
+ ) from None
@@ -11,7 +11,7 @@ BASE_URL = "https://api.deadmanssnitch.com/v1/snitches"
11
11
  REQUEST_TIMEOUT = 60
12
12
 
13
13
 
14
- class DeadManssnitchException(Exception):
14
+ class DeadManssnitchError(Exception):
15
15
  pass
16
16
 
17
17
 
@@ -61,9 +61,7 @@ class DeadMansSnitchApi:
61
61
 
62
62
  def create_snitch(self, payload: dict) -> Snitch:
63
63
  if payload.get("name") is None or payload.get("interval") is None:
64
- raise DeadManssnitchException(
65
- "Invalid payload,name and interval are mandatory"
66
- )
64
+ raise DeadManssnitchError("Invalid payload,name and interval are mandatory")
67
65
  headers = {"Content-Type": "application/json"}
68
66
  logging.debug("Creating new snitch with name:: %s ", payload["name"])
69
67
  response = self.session.post(
@@ -48,8 +48,9 @@ class Team(BaseModel):
48
48
  users: list[User] = []
49
49
 
50
50
  @root_validator(pre=True)
51
- def name_xor_slug_must_be_set( # pylint: disable=no-self-argument
52
- cls, values: MutableMapping[str, Any]
51
+ def name_xor_slug_must_be_set(
52
+ cls, # noqa: N805
53
+ values: MutableMapping[str, Any],
53
54
  ) -> MutableMapping[str, Any]:
54
55
  assert ("name" in values or "slug" in values) and not (
55
56
  "name" in values and "slug" in values
@@ -57,8 +58,9 @@ class Team(BaseModel):
57
58
  return values
58
59
 
59
60
  @root_validator
60
- def slugify( # pylint: disable=no-self-argument
61
- cls, values: MutableMapping[str, Any]
61
+ def slugify(
62
+ cls, # noqa: N805
63
+ values: MutableMapping[str, Any],
62
64
  ) -> MutableMapping[str, Any]:
63
65
  values["slug"] = values.get("slug") or slugify(values.get("name", ""))
64
66
  values["name"] = slugify(values.get("name", "")) or values.get("slug")
@@ -96,8 +98,9 @@ class ProjectAlertRecipient(BaseModel):
96
98
  use_enum_values = True
97
99
 
98
100
  @validator("recipient_type")
99
- def recipient_type_enforce_enum_type( # pylint: disable=no-self-argument
100
- cls, v: str | RecipientType
101
+ def recipient_type_enforce_enum_type(
102
+ cls, # noqa: N805
103
+ v: str | RecipientType,
101
104
  ) -> RecipientType:
102
105
  if isinstance(v, RecipientType):
103
106
  return v
@@ -126,8 +129,9 @@ class ProjectAlert(BaseModel):
126
129
  allow_population_by_field_name = True
127
130
 
128
131
  @root_validator
129
- def empty_name( # pylint: disable=no-self-argument
130
- cls, values: MutableMapping[str, Any]
132
+ def empty_name(
133
+ cls, # noqa: N805
134
+ values: MutableMapping[str, Any],
131
135
  ) -> MutableMapping[str, Any]:
132
136
  # name is an empty string if the alert was created manually because it can't be set via UI
133
137
  # use the pk instead.
@@ -159,8 +163,9 @@ class Project(BaseModel):
159
163
  allow_population_by_field_name = True
160
164
 
161
165
  @root_validator
162
- def slugify( # pylint: disable=no-self-argument
163
- cls, values: MutableMapping[str, Any]
166
+ def slugify(
167
+ cls, # noqa: N805
168
+ values: MutableMapping[str, Any],
164
169
  ) -> MutableMapping[str, Any]:
165
170
  values["slug"] = values.get("slug") or slugify(values["name"])
166
171
  return values
@@ -202,8 +207,9 @@ class Organization(BaseModel):
202
207
  users: list[User] = []
203
208
 
204
209
  @root_validator
205
- def slugify( # pylint: disable=no-self-argument
206
- cls, values: MutableMapping[str, Any]
210
+ def slugify(
211
+ cls, # noqa: N805
212
+ values: MutableMapping[str, Any],
207
213
  ) -> MutableMapping[str, Any]:
208
214
  values["slug"] = values.get("slug") or slugify(values["name"])
209
215
  return values
reconcile/utils/gql.py CHANGED
@@ -53,7 +53,7 @@ class GqlApiError(Exception):
53
53
  pass
54
54
 
55
55
 
56
- class GqlApiIntegrationNotFound(Exception):
56
+ class GqlApiIntegrationNotFoundError(Exception):
57
57
  def __init__(self, integration: str):
58
58
  msg = f"""
59
59
  Integration not found: {integration}
@@ -64,7 +64,7 @@ class GqlApiIntegrationNotFound(Exception):
64
64
  super().__init__(textwrap.dedent(msg).strip())
65
65
 
66
66
 
67
- class GqlApiErrorForbiddenSchema(Exception):
67
+ class GqlApiErrorForbiddenSchemaError(Exception):
68
68
  def __init__(self, schemas: list):
69
69
  msg = f"""
70
70
  Forbidden schemas: {schemas}
@@ -115,7 +115,7 @@ class GqlApi:
115
115
  break
116
116
 
117
117
  if validate_schemas and not self._valid_schemas:
118
- raise GqlApiIntegrationNotFound(int_name)
118
+ raise GqlApiIntegrationNotFoundError(int_name)
119
119
 
120
120
  def _init_gql_client(self) -> Client:
121
121
  req_headers = None
@@ -170,7 +170,7 @@ class GqlApi:
170
170
  schema for schema in query_schemas if schema not in self._valid_schemas
171
171
  ]
172
172
  if forbidden_schemas:
173
- raise GqlApiErrorForbiddenSchema(forbidden_schemas)
173
+ raise GqlApiErrorForbiddenSchemaError(forbidden_schemas)
174
174
 
175
175
  # This is to appease mypy. This exception won't be thrown as this condition
176
176
  # is already handled above with AssertionError
@@ -17,7 +17,7 @@ from reconcile.utils.internal_groups.models import Group
17
17
  REQUEST_TIMEOUT = 30
18
18
 
19
19
 
20
- class NotFound(Exception):
20
+ class NotFoundError(Exception):
21
21
  """Not found exception."""
22
22
 
23
23
 
@@ -48,7 +48,7 @@ class InternalGroupsApi:
48
48
  resp.raise_for_status()
49
49
  except requests.exceptions.HTTPError as e:
50
50
  if e.response is not None and e.response.status_code == 404:
51
- raise NotFound(e.response.text) from e
51
+ raise NotFoundError(e.response.text) from e
52
52
  raise
53
53
 
54
54
  def __enter__(self) -> Self:
@@ -29,8 +29,12 @@ from reconcile.utils.jinja2.filters import (
29
29
  urlunescape,
30
30
  yaml_to_dict,
31
31
  )
32
- from reconcile.utils.secret_reader import SecretNotFound, SecretReader, SecretReaderBase
33
- from reconcile.utils.vault import SecretFieldNotFound
32
+ from reconcile.utils.secret_reader import (
33
+ SecretNotFoundError,
34
+ SecretReader,
35
+ SecretReaderBase,
36
+ )
37
+ from reconcile.utils.vault import SecretFieldNotFoundError
34
38
 
35
39
 
36
40
  class Jinja2TemplateError(Exception):
@@ -209,7 +213,7 @@ def lookup_secret(
209
213
  if not secret_reader:
210
214
  secret_reader = SecretReader(settings)
211
215
  return secret_reader.read(secret)
212
- except (SecretNotFound, SecretFieldNotFound) as e:
216
+ except (SecretNotFoundError, SecretFieldNotFoundError) as e:
213
217
  if allow_not_found:
214
218
  return None
215
219
  raise FetchSecretError(e) from None
@@ -7,7 +7,7 @@ import re
7
7
  import shutil
8
8
  import subprocess
9
9
  import tempfile
10
- import xml.etree.ElementTree as et
10
+ import xml.etree.ElementTree as ET
11
11
  from os import path
12
12
  from subprocess import (
13
13
  PIPE,
@@ -192,7 +192,7 @@ class JJB: # pylint: disable=too-many-public-methods
192
192
  name = "/".join(items)
193
193
  raise ValueError(f"Invalid job name contains '/' in {instance}: {name}")
194
194
  item = items[0]
195
- item_type = et.parse(f).getroot().tag
195
+ item_type = ET.parse(f).getroot().tag
196
196
  item_type = item_type.replace("hudson.model.ListView", "view")
197
197
  item_type = item_type.replace("project", "job")
198
198
  logging.info([action, item_type, instance, item])
reconcile/utils/models.py CHANGED
@@ -1,3 +1,4 @@
1
+ from collections import UserList
1
2
  from collections.abc import (
2
3
  Callable,
3
4
  Generator,
@@ -81,7 +82,7 @@ def data_default_none(
81
82
  return data
82
83
 
83
84
 
84
- class CSV(list[str]):
85
+ class CSV(UserList[str]):
85
86
  """
86
87
  A pydantic custom type that converts a CSV into a list of strings. It
87
88
  also supports basic validation of length constraints.
@@ -33,12 +33,12 @@ __all__ = [
33
33
  "MergeRequestProcessingError",
34
34
  "PromoteQontractReconcileCommercial",
35
35
  "PromoteQontractSchemas",
36
- "UnknownMergeRequestType",
36
+ "UnknownMergeRequestTypeError",
37
37
  "init_from_sqs_message",
38
38
  ]
39
39
 
40
40
 
41
- class UnknownMergeRequestType(Exception):
41
+ class UnknownMergeRequestTypeError(Exception):
42
42
  """
43
43
  Used when the message type from the SQS message is unknown
44
44
  """
@@ -68,7 +68,7 @@ def init_from_sqs_message(message) -> MergeRequestBase:
68
68
  # and fail early if that type is not on the map.
69
69
  msg_type = message.pop("pr_type")
70
70
  if msg_type not in types_map:
71
- raise UnknownMergeRequestType(f"type {msg_type} no supported")
71
+ raise UnknownMergeRequestTypeError(f"type {msg_type} no supported")
72
72
 
73
73
  # Finally, get the class mapped to the type
74
74
  # and create an instance with all the remaining
@@ -1,7 +1,7 @@
1
1
  from datetime import datetime
2
2
  from pathlib import Path
3
3
 
4
- from ruamel.yaml.scalarstring import PreservedScalarString as pss
4
+ from ruamel.yaml.scalarstring import PreservedScalarString
5
5
 
6
6
  from reconcile.utils.gitlab_api import GitLabApi
7
7
  from reconcile.utils.mr.base import (
@@ -50,7 +50,7 @@ class CreateAppInterfaceReporter(MergeRequestBase):
50
50
  name=f"app-interface-reporter-{self.ts}",
51
51
  subject=self.title,
52
52
  aliases=["all-service-owners"],
53
- body=pss(self.email_body),
53
+ body=PreservedScalarString(self.email_body),
54
54
  )
55
55
 
56
56
  email_path = Path("data") / "app-interface" / "emails" / f"{self.ts}.yml"
@@ -2,7 +2,7 @@ from pathlib import Path
2
2
 
3
3
  from jinja2 import Template
4
4
  from ruamel import yaml
5
- from ruamel.yaml.scalarstring import PreservedScalarString as pss
5
+ from ruamel.yaml.scalarstring import PreservedScalarString
6
6
 
7
7
  from reconcile.utils.constants import PROJ_ROOT
8
8
  from reconcile.utils.gitlab_api import GitLabApi
@@ -68,7 +68,10 @@ class CreateDeleteAwsAccessKey(MergeRequestBase):
68
68
  email_name = f"{self.account}-{self.key}"
69
69
  ref = self.path.removeprefix("data")
70
70
  content = app_interface_email(
71
- name=email_name, subject=self.title, aws_accounts=[ref], body=pss(body)
71
+ name=email_name,
72
+ subject=self.title,
73
+ aws_accounts=[ref],
74
+ body=PreservedScalarString(body),
72
75
  )
73
76
 
74
77
  email_path = Path("data") / "app-interface" / "emails" / f"{email_name}.yml"
@@ -23,7 +23,7 @@ EMAIL_TEMPLATE = PROJ_ROOT / "templates" / "email.yml.j2"
23
23
  LOG = logging.getLogger(__name__)
24
24
 
25
25
 
26
- class CancelMergeRequest(Exception):
26
+ class CancelMergeRequestError(Exception):
27
27
  """
28
28
  Used when the Merge Request processing is canceled.
29
29
  """
@@ -64,7 +64,7 @@ class MergeRequestBase(ABC):
64
64
 
65
65
  def cancel(self, message: str) -> None:
66
66
  self.cancelled = True
67
- raise CancelMergeRequest(
67
+ raise CancelMergeRequestError(
68
68
  f"{self.name} MR canceled for branch {self.branch}. Reason: {message}"
69
69
  )
70
70
 
@@ -185,7 +185,7 @@ class MergeRequestBase(ABC):
185
185
  return gitlab_cli.project.mergerequests.create(
186
186
  self.gitlab_data(target_branch=gitlab_cli.main_branch)
187
187
  )
188
- except CancelMergeRequest as mr_cancel:
188
+ except CancelMergeRequestError as mr_cancel:
189
189
  # cancellation is a valid behaviour. it indicates, that the
190
190
  # operation is not required, therefore we will not signal
191
191
  # a problem back to the caller
@@ -24,7 +24,7 @@ class PathSpec(BaseModel):
24
24
  path: str
25
25
 
26
26
  @validator("path")
27
- def prepend_data_to_path(cls, v):
27
+ def prepend_data_to_path(cls, v): # noqa: N805
28
28
  return "data" + v
29
29
 
30
30
 
reconcile/utils/oc.py CHANGED
@@ -56,7 +56,7 @@ from reconcile.utils.metrics import reconcile_time
56
56
  from reconcile.utils.oc_connection_parameters import OCConnectionParameters
57
57
  from reconcile.utils.openshift_resource import OpenshiftResource as OR
58
58
  from reconcile.utils.secret_reader import (
59
- SecretNotFound,
59
+ SecretNotFoundError,
60
60
  SecretReader,
61
61
  )
62
62
  from reconcile.utils.unleash import get_feature_toggle_state
@@ -105,7 +105,7 @@ class UnsupportedMediaTypeError(Exception):
105
105
  pass
106
106
 
107
107
 
108
- class StatefulSetUpdateForbidden(Exception):
108
+ class StatefulSetUpdateForbiddenError(Exception):
109
109
  pass
110
110
 
111
111
 
@@ -125,7 +125,7 @@ class RecyclePodsUnsupportedKindError(Exception):
125
125
  pass
126
126
 
127
127
 
128
- class RecyclePodsInvalidAnnotationValue(Exception):
128
+ class RecyclePodsInvalidAnnotationValueError(Exception):
129
129
  pass
130
130
 
131
131
 
@@ -551,19 +551,19 @@ class OCCli: # pylint: disable=too-many-public-methods
551
551
  @OCDecorators.process_reconcile_time
552
552
  def apply(self, namespace, resource):
553
553
  cmd = ["apply", "-n", namespace, "-f", "-"]
554
- self._run(cmd, stdin=resource.toJSON(), apply=True)
554
+ self._run(cmd, stdin=resource.to_json(), apply=True)
555
555
  return self._msg_to_process_reconcile_time(namespace, resource)
556
556
 
557
557
  @OCDecorators.process_reconcile_time
558
558
  def create(self, namespace, resource):
559
559
  cmd = ["create", "-n", namespace, "-f", "-"]
560
- self._run(cmd, stdin=resource.toJSON(), apply=True)
560
+ self._run(cmd, stdin=resource.to_json(), apply=True)
561
561
  return self._msg_to_process_reconcile_time(namespace, resource)
562
562
 
563
563
  @OCDecorators.process_reconcile_time
564
564
  def replace(self, namespace, resource):
565
565
  cmd = ["replace", "-n", namespace, "-f", "-"]
566
- self._run(cmd, stdin=resource.toJSON(), apply=True)
566
+ self._run(cmd, stdin=resource.to_json(), apply=True)
567
567
  return self._msg_to_process_reconcile_time(namespace, resource)
568
568
 
569
569
  @OCDecorators.process_reconcile_time
@@ -902,7 +902,7 @@ class OCCli: # pylint: disable=too-many-public-methods
902
902
  dep_annotations = dep_resource.body["metadata"].get("annotations") or {}
903
903
  qontract_recycle = dep_annotations.get("qontract.recycle")
904
904
  if qontract_recycle is True:
905
- raise RecyclePodsInvalidAnnotationValue('should be "true"')
905
+ raise RecyclePodsInvalidAnnotationValueError('should be "true"')
906
906
  if qontract_recycle != "true":
907
907
  logging.debug([
908
908
  "skipping_pod_recycle_no_annotation",
@@ -1119,7 +1119,7 @@ class OCCli: # pylint: disable=too-many-public-methods
1119
1119
  if "UnsupportedMediaType" in err:
1120
1120
  raise UnsupportedMediaTypeError(f"[{self.server}]: {err}")
1121
1121
  if "updates to statefulset spec for fields other than" in err:
1122
- raise StatefulSetUpdateForbidden(f"[{self.server}]: {err}")
1122
+ raise StatefulSetUpdateForbiddenError(f"[{self.server}]: {err}")
1123
1123
  if "the object has been modified" in err:
1124
1124
  raise ObjectHasBeenModifiedError(f"[{self.server}]: {err}")
1125
1125
  if "Request entity too large" in err:
@@ -1468,7 +1468,7 @@ class OC:
1468
1468
  )
1469
1469
 
1470
1470
 
1471
- class OC_Map:
1471
+ class OC_Map: # noqa: N801
1472
1472
  """
1473
1473
  DEPRECATED! Use reconcile.utils.oc_map.OCMap instead.
1474
1474
 
@@ -1612,7 +1612,7 @@ class OC_Map:
1612
1612
 
1613
1613
  try:
1614
1614
  token_secret = secret_reader.read_all(automation_token)
1615
- except SecretNotFound:
1615
+ except SecretNotFoundError:
1616
1616
  self.set_oc(
1617
1617
  cluster,
1618
1618
  OCLogMsg(
@@ -1714,7 +1714,7 @@ class OC_Map:
1714
1714
  oc.cleanup()
1715
1715
 
1716
1716
 
1717
- class OCLogMsg(Exception):
1717
+ class OCLogMsg(Exception): # noqa: N818
1718
1718
  """
1719
1719
  Track log messages associated with initializing OC clients in OC_Map.
1720
1720
  """
@@ -12,7 +12,7 @@ from sretoolbox.utils import threaded
12
12
 
13
13
  from reconcile.utils.secret_reader import (
14
14
  HasSecret,
15
- SecretNotFound,
15
+ SecretNotFoundError,
16
16
  SecretReaderBase,
17
17
  )
18
18
 
@@ -142,7 +142,7 @@ class OCConnectionParameters:
142
142
  cluster,
143
143
  )
144
144
  )
145
- except SecretNotFound:
145
+ except SecretNotFoundError:
146
146
  logging.error(
147
147
  f"[{cluster.name}] admin token {cluster.cluster_admin_automation_token} not found"
148
148
  )
@@ -157,7 +157,7 @@ class OCConnectionParameters:
157
157
  automation_token = OCConnectionParameters._get_automation_token(
158
158
  secret_reader, cluster.automation_token, cluster
159
159
  )
160
- except SecretNotFound:
160
+ except SecretNotFoundError:
161
161
  logging.error(
162
162
  f"[{cluster.name}] automation token {cluster.automation_token} not found"
163
163
  )
@@ -186,7 +186,7 @@ class OCConnectionParameters:
186
186
 
187
187
  try:
188
188
  jumphost_key = secret_reader.read_secret(cluster.jump_host.identity)
189
- except SecretNotFound as e:
189
+ except SecretNotFoundError as e:
190
190
  logging.error(
191
191
  f"[{cluster.name}] jumphost secret {cluster.jump_host.identity} not found"
192
192
  )
@@ -174,11 +174,11 @@ class OCMClusterAWSSettings(BaseModel):
174
174
 
175
175
  @property
176
176
  def account_role_prefix(self) -> str | None:
177
- INSTALLER_ROLE_BASE_NAME = "-Installer-Role"
177
+ installer_role_base_name = "-Installer-Role"
178
178
  installer_role_arn = self.sts.role_arn if self.sts else None
179
- if installer_role_arn and installer_role_arn.endswith(INSTALLER_ROLE_BASE_NAME):
179
+ if installer_role_arn and installer_role_arn.endswith(installer_role_base_name):
180
180
  installer_role_name = get_role_name_from_arn(installer_role_arn)
181
- return installer_role_name.removesuffix(INSTALLER_ROLE_BASE_NAME)
181
+ return installer_role_name.removesuffix(installer_role_base_name)
182
182
  return None
183
183
 
184
184
  @property
@@ -23,7 +23,7 @@ from reconcile.ocm.types import (
23
23
  from reconcile.utils.exceptions import ParameterError
24
24
  from reconcile.utils.ocm.clusters import get_provisioning_shard_id
25
25
  from reconcile.utils.ocm_base_client import OCMBaseClient
26
- from reconcile.utils.rosa.rosa_cli import RosaCliException
26
+ from reconcile.utils.rosa.rosa_cli import RosaCliError
27
27
  from reconcile.utils.rosa.session import RosaSessionBuilder
28
28
 
29
29
  CS_API_BASE = "/api/clusters_mgmt"
@@ -61,7 +61,7 @@ OCM_PRODUCT_ROSA = "rosa"
61
61
  OCM_PRODUCT_HYPERSHIFT = "hypershift"
62
62
 
63
63
 
64
- class OCMValidationException(Exception):
64
+ class OCMValidationError(Exception):
65
65
  pass
66
66
 
67
67
 
@@ -216,7 +216,7 @@ class OCMProductOsd(OCMProduct):
216
216
  None,
217
217
  )
218
218
  if default_machine_pool is None:
219
- raise OCMValidationException(
219
+ raise OCMValidationError(
220
220
  f"No default machine pool found, id: {DEFAULT_OCM_MACHINE_POOL_ID}"
221
221
  )
222
222
 
@@ -350,10 +350,10 @@ class OCMProductRosa(OCMProduct):
350
350
  )
351
351
  logging.info("cluster creation kicked off...")
352
352
  result.write_logs_to_logger(logging.info)
353
- except RosaCliException as e:
353
+ except RosaCliError as e:
354
354
  logs = "".join(e.get_log_lines(max_lines=10, from_file_end=True))
355
355
  e.cleanup()
356
- raise OCMValidationException(
356
+ raise OCMValidationError(
357
357
  f"last 10 lines from failed cluster creation job...\n\n{logs}"
358
358
  ) from None
359
359
 
@@ -459,7 +459,7 @@ class OCMProductRosa(OCMProduct):
459
459
  None,
460
460
  )
461
461
  if default_machine_pool is None:
462
- raise OCMValidationException(
462
+ raise OCMValidationError(
463
463
  f"No default machine pool found, id: {DEFAULT_OCM_MACHINE_POOL_ID}"
464
464
  )
465
465
 
@@ -625,10 +625,10 @@ class OCMProductHypershift(OCMProduct):
625
625
  )
626
626
  logging.info("cluster creation kicked off...")
627
627
  result.write_logs_to_logger(logging.info)
628
- except RosaCliException as e:
628
+ except RosaCliError as e:
629
629
  logs = "".join(e.get_log_lines(max_lines=10, from_file_end=True))
630
630
  e.cleanup()
631
- raise OCMValidationException(
631
+ raise OCMValidationError(
632
632
  f"last 10 lines from failed cluster creation job...\n\n{logs}"
633
633
  ) from None
634
634
 
@@ -182,7 +182,7 @@ class InvalidFilterError(Exception):
182
182
  pass
183
183
 
184
184
 
185
- class InvalidChunkRequest(Exception):
185
+ class InvalidChunkRequestError(Exception):
186
186
  """
187
187
  Is raised for various reasons, when a chunk request on a filter is invalid
188
188
  """
@@ -344,7 +344,7 @@ class Filter:
344
344
  if ignore_missing:
345
345
  return [self]
346
346
 
347
- raise InvalidChunkRequest(
347
+ raise InvalidChunkRequestError(
348
348
  f"cannot chunk by {key} because it is not a list condition"
349
349
  )
350
350