qontract-reconcile 0.10.2.dev255__py3-none-any.whl → 0.10.2.dev257__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. {qontract_reconcile-0.10.2.dev255.dist-info → qontract_reconcile-0.10.2.dev257.dist-info}/METADATA +3 -3
  2. {qontract_reconcile-0.10.2.dev255.dist-info → qontract_reconcile-0.10.2.dev257.dist-info}/RECORD +102 -102
  3. reconcile/aus/advanced_upgrade_service.py +1 -1
  4. reconcile/aus/base.py +2 -2
  5. reconcile/aus/version_gates/sts_version_gate_handler.py +2 -2
  6. reconcile/aws_account_manager/reconciler.py +22 -20
  7. reconcile/aws_iam_keys.py +5 -5
  8. reconcile/aws_iam_password_reset.py +5 -5
  9. reconcile/aws_saml_roles/integration.py +5 -5
  10. reconcile/aws_version_sync/integration.py +4 -3
  11. reconcile/cli.py +5 -5
  12. reconcile/closedbox_endpoint_monitoring_base.py +1 -0
  13. reconcile/database_access_manager.py +4 -4
  14. reconcile/dynatrace_token_provider/integration.py +2 -2
  15. reconcile/external_resources/manager.py +2 -2
  16. reconcile/external_resources/model.py +1 -1
  17. reconcile/external_resources/secrets_sync.py +2 -2
  18. reconcile/gabi_authorized_users.py +3 -3
  19. reconcile/github_org.py +2 -2
  20. reconcile/gitlab_housekeeping.py +1 -1
  21. reconcile/gitlab_mr_sqs_consumer.py +1 -1
  22. reconcile/glitchtip/integration.py +2 -2
  23. reconcile/gql_definitions/common/users.py +0 -1
  24. reconcile/gql_definitions/fragments/user.py +0 -1
  25. reconcile/gql_definitions/membershipsources/roles.py +0 -2
  26. reconcile/gql_definitions/openshift_groups/managed_roles.py +0 -1
  27. reconcile/gql_definitions/slack_usergroups/permissions.py +0 -1
  28. reconcile/gql_definitions/slack_usergroups/users.py +0 -1
  29. reconcile/jenkins_worker_fleets.py +5 -5
  30. reconcile/ldap_groups/integration.py +3 -3
  31. reconcile/ocm_clusters.py +2 -2
  32. reconcile/ocm_internal_notifications/integration.py +2 -2
  33. reconcile/ocm_labels/integration.py +3 -2
  34. reconcile/openshift_base.py +12 -11
  35. reconcile/openshift_cluster_bots.py +2 -2
  36. reconcile/openshift_resources_base.py +3 -3
  37. reconcile/openshift_rhcs_certs.py +2 -2
  38. reconcile/openshift_saas_deploy.py +1 -1
  39. reconcile/quay_membership.py +4 -4
  40. reconcile/queries.py +0 -2
  41. reconcile/rhidp/common.py +3 -2
  42. reconcile/run_integration.py +7 -4
  43. reconcile/skupper_network/integration.py +3 -3
  44. reconcile/slack_usergroups.py +5 -5
  45. reconcile/status_board.py +3 -3
  46. reconcile/terraform_cloudflare_dns.py +5 -5
  47. reconcile/terraform_cloudflare_users.py +15 -17
  48. reconcile/terraform_resources.py +6 -6
  49. reconcile/terraform_vpc_peerings.py +9 -9
  50. reconcile/unleash_feature_toggles/integration.py +1 -1
  51. reconcile/utils/aggregated_list.py +2 -2
  52. reconcile/utils/aws_api_typed/iam.py +2 -2
  53. reconcile/utils/aws_api_typed/organization.py +4 -4
  54. reconcile/utils/aws_api_typed/service_quotas.py +4 -4
  55. reconcile/utils/aws_api_typed/support.py +9 -9
  56. reconcile/utils/aws_helper.py +1 -1
  57. reconcile/utils/config.py +8 -4
  58. reconcile/utils/deadmanssnitch_api.py +2 -4
  59. reconcile/utils/glitchtip/models.py +18 -12
  60. reconcile/utils/gql.py +4 -4
  61. reconcile/utils/internal_groups/client.py +2 -2
  62. reconcile/utils/jinja2/utils.py +7 -3
  63. reconcile/utils/jjb_client.py +2 -2
  64. reconcile/utils/membershipsources/models.py +0 -1
  65. reconcile/utils/models.py +2 -1
  66. reconcile/utils/mr/__init__.py +3 -3
  67. reconcile/utils/mr/app_interface_reporter.py +2 -2
  68. reconcile/utils/mr/aws_access.py +5 -2
  69. reconcile/utils/mr/base.py +3 -3
  70. reconcile/utils/mr/user_maintenance.py +1 -1
  71. reconcile/utils/oc.py +11 -11
  72. reconcile/utils/oc_connection_parameters.py +4 -4
  73. reconcile/utils/ocm/base.py +3 -3
  74. reconcile/utils/ocm/products.py +8 -8
  75. reconcile/utils/ocm/search_filters.py +2 -2
  76. reconcile/utils/openshift_resource.py +21 -18
  77. reconcile/utils/pagerduty_api.py +5 -5
  78. reconcile/utils/quay_api.py +2 -2
  79. reconcile/utils/rosa/rosa_cli.py +1 -1
  80. reconcile/utils/rosa/session.py +2 -2
  81. reconcile/utils/runtime/desired_state_diff.py +7 -7
  82. reconcile/utils/saasherder/interfaces.py +1 -0
  83. reconcile/utils/saasherder/models.py +1 -1
  84. reconcile/utils/saasherder/saasherder.py +1 -1
  85. reconcile/utils/secret_reader.py +20 -20
  86. reconcile/utils/slack_api.py +17 -8
  87. reconcile/utils/slo_document_manager.py +6 -6
  88. reconcile/utils/state.py +8 -8
  89. reconcile/utils/terraform_client.py +3 -3
  90. reconcile/utils/terrascript/cloudflare_client.py +2 -2
  91. reconcile/utils/terrascript/cloudflare_resources.py +1 -0
  92. reconcile/utils/terrascript_aws_client.py +12 -11
  93. reconcile/utils/vault.py +22 -22
  94. reconcile/vault_replication.py +15 -15
  95. tools/cli_commands/erv2.py +3 -2
  96. tools/cli_commands/gpg_encrypt.py +9 -9
  97. tools/cli_commands/systems_and_tools.py +1 -1
  98. tools/qontract_cli.py +13 -14
  99. tools/saas_promotion_state/saas_promotion_state.py +4 -4
  100. tools/template_validation.py +5 -5
  101. {qontract_reconcile-0.10.2.dev255.dist-info → qontract_reconcile-0.10.2.dev257.dist-info}/WHEEL +0 -0
  102. {qontract_reconcile-0.10.2.dev255.dist-info → qontract_reconcile-0.10.2.dev257.dist-info}/entry_points.txt +0 -0
@@ -23,7 +23,7 @@ from reconcile.ocm.types import (
23
23
  from reconcile.utils.exceptions import ParameterError
24
24
  from reconcile.utils.ocm.clusters import get_provisioning_shard_id
25
25
  from reconcile.utils.ocm_base_client import OCMBaseClient
26
- from reconcile.utils.rosa.rosa_cli import RosaCliException
26
+ from reconcile.utils.rosa.rosa_cli import RosaCliError
27
27
  from reconcile.utils.rosa.session import RosaSessionBuilder
28
28
 
29
29
  CS_API_BASE = "/api/clusters_mgmt"
@@ -61,7 +61,7 @@ OCM_PRODUCT_ROSA = "rosa"
61
61
  OCM_PRODUCT_HYPERSHIFT = "hypershift"
62
62
 
63
63
 
64
- class OCMValidationException(Exception):
64
+ class OCMValidationError(Exception):
65
65
  pass
66
66
 
67
67
 
@@ -216,7 +216,7 @@ class OCMProductOsd(OCMProduct):
216
216
  None,
217
217
  )
218
218
  if default_machine_pool is None:
219
- raise OCMValidationException(
219
+ raise OCMValidationError(
220
220
  f"No default machine pool found, id: {DEFAULT_OCM_MACHINE_POOL_ID}"
221
221
  )
222
222
 
@@ -350,10 +350,10 @@ class OCMProductRosa(OCMProduct):
350
350
  )
351
351
  logging.info("cluster creation kicked off...")
352
352
  result.write_logs_to_logger(logging.info)
353
- except RosaCliException as e:
353
+ except RosaCliError as e:
354
354
  logs = "".join(e.get_log_lines(max_lines=10, from_file_end=True))
355
355
  e.cleanup()
356
- raise OCMValidationException(
356
+ raise OCMValidationError(
357
357
  f"last 10 lines from failed cluster creation job...\n\n{logs}"
358
358
  ) from None
359
359
 
@@ -459,7 +459,7 @@ class OCMProductRosa(OCMProduct):
459
459
  None,
460
460
  )
461
461
  if default_machine_pool is None:
462
- raise OCMValidationException(
462
+ raise OCMValidationError(
463
463
  f"No default machine pool found, id: {DEFAULT_OCM_MACHINE_POOL_ID}"
464
464
  )
465
465
 
@@ -625,10 +625,10 @@ class OCMProductHypershift(OCMProduct):
625
625
  )
626
626
  logging.info("cluster creation kicked off...")
627
627
  result.write_logs_to_logger(logging.info)
628
- except RosaCliException as e:
628
+ except RosaCliError as e:
629
629
  logs = "".join(e.get_log_lines(max_lines=10, from_file_end=True))
630
630
  e.cleanup()
631
- raise OCMValidationException(
631
+ raise OCMValidationError(
632
632
  f"last 10 lines from failed cluster creation job...\n\n{logs}"
633
633
  ) from None
634
634
 
@@ -182,7 +182,7 @@ class InvalidFilterError(Exception):
182
182
  pass
183
183
 
184
184
 
185
- class InvalidChunkRequest(Exception):
185
+ class InvalidChunkRequestError(Exception):
186
186
  """
187
187
  Is raised for various reasons, when a chunk request on a filter is invalid
188
188
  """
@@ -344,7 +344,7 @@ class Filter:
344
344
  if ignore_missing:
345
345
  return [self]
346
346
 
347
- raise InvalidChunkRequest(
347
+ raise InvalidChunkRequestError(
348
348
  f"cannot chunk by {key} because it is not a list condition"
349
349
  )
350
350
 
@@ -375,7 +375,7 @@ class OpenshiftResource:
375
375
  annotations = body["metadata"]["annotations"]
376
376
  return annotations["qontract.sha256sum"]
377
377
 
378
- def toJSON(self):
378
+ def to_json(self):
379
379
  return self.serialize(self.body)
380
380
 
381
381
  @staticmethod
@@ -445,13 +445,12 @@ class OpenshiftResource:
445
445
  if body["kind"] == "ServiceAccount":
446
446
  if "imagePullSecrets" in body:
447
447
  # remove default pull secrets added by k8s
448
- imagePullSecrets = [
448
+ if imagepullsecrets := [
449
449
  s
450
450
  for s in body.pop("imagePullSecrets")
451
451
  if "-dockercfg-" not in s["name"]
452
- ]
453
- if imagePullSecrets:
454
- body["imagePullSecrets"] = imagePullSecrets
452
+ ]:
453
+ body["imagePullSecrets"] = imagepullsecrets
455
454
  if "secrets" in body:
456
455
  body.pop("secrets")
457
456
 
@@ -478,13 +477,15 @@ class OpenshiftResource:
478
477
  if "userNames" in body:
479
478
  body.pop("userNames")
480
479
  if "roleRef" in body:
481
- roleRef = body["roleRef"]
482
- if "namespace" in roleRef:
483
- roleRef.pop("namespace")
484
- if "apiGroup" in roleRef and roleRef["apiGroup"] in body["apiVersion"]:
485
- roleRef.pop("apiGroup")
486
- if "kind" in roleRef:
487
- roleRef.pop("kind")
480
+ if "namespace" in body["roleRef"]:
481
+ body["roleRef"].pop("namespace")
482
+ if (
483
+ "apiGroup" in body["roleRef"]
484
+ and body["roleRef"]["apiGroup"] in body["apiVersion"]
485
+ ):
486
+ body["roleRef"].pop("apiGroup")
487
+ if "kind" in body["roleRef"]:
488
+ body["roleRef"].pop("kind")
488
489
  for subject in body["subjects"]:
489
490
  if "namespace" in subject:
490
491
  subject.pop("namespace")
@@ -497,11 +498,13 @@ class OpenshiftResource:
497
498
  if "userNames" in body:
498
499
  body.pop("userNames")
499
500
  if "roleRef" in body:
500
- roleRef = body["roleRef"]
501
- if "apiGroup" in roleRef and roleRef["apiGroup"] in body["apiVersion"]:
502
- roleRef.pop("apiGroup")
503
- if "kind" in roleRef:
504
- roleRef.pop("kind")
501
+ if (
502
+ "apiGroup" in body["roleRef"]
503
+ and body["roleRef"]["apiGroup"] in body["apiVersion"]
504
+ ):
505
+ body["roleRef"].pop("apiGroup")
506
+ if "kind" in body["roleRef"]:
507
+ body["roleRef"].pop("kind")
505
508
  if "groupNames" in body:
506
509
  body.pop("groupNames")
507
510
  if body["kind"] == "Service":
@@ -532,7 +535,7 @@ class OpenshiftResource:
532
535
 
533
536
  def fully_qualified_kind(kind: str, api_version: str) -> str:
534
537
  if "/" in api_version:
535
- group = api_version.split("/")[0]
538
+ group = api_version.split("/")[0] # noqa: PLC0207
536
539
  return f"{kind}.{group}"
537
540
  return kind
538
541
 
@@ -20,11 +20,11 @@ from reconcile.utils.secret_reader import (
20
20
  )
21
21
 
22
22
 
23
- class PagerDutyTargetException(Exception):
23
+ class PagerDutyTargetError(Exception):
24
24
  """This exception is raised when PagerDutyTarget is not configured correctly."""
25
25
 
26
26
 
27
- class PagerDutyApiException(Exception):
27
+ class PagerDutyApiError(Exception):
28
28
  """This exception is raised when PagerDuty API call fails."""
29
29
 
30
30
 
@@ -89,7 +89,7 @@ class PagerDutyApi:
89
89
  users = self.get_escalation_policy_users(resource_id, now)
90
90
  except requests.exceptions.HTTPError as e:
91
91
  logging.error(str(e))
92
- raise PagerDutyApiException(str(e)) from e
92
+ raise PagerDutyApiError(str(e)) from e
93
93
 
94
94
  return users
95
95
 
@@ -189,7 +189,7 @@ def get_pagerduty_name(user: PagerDutyUser) -> str:
189
189
  return user.pagerduty_username or user.org_username
190
190
 
191
191
 
192
- @retry(no_retry_exceptions=PagerDutyTargetException)
192
+ @retry(no_retry_exceptions=PagerDutyTargetError)
193
193
  def get_usernames_from_pagerduty(
194
194
  pagerduties: Iterable[PagerDutyTarget],
195
195
  users: Iterable[PagerDutyUser],
@@ -202,7 +202,7 @@ def get_usernames_from_pagerduty(
202
202
  all_pagerduty_names = [get_pagerduty_name(u) for u in users]
203
203
  for pagerduty in pagerduties:
204
204
  if pagerduty.schedule_id is None and pagerduty.escalation_policy_id is None:
205
- raise PagerDutyTargetException(
205
+ raise PagerDutyTargetError(
206
206
  f"pagerduty {pagerduty.name}: Either schedule_id or escalation_policy_id must be set!"
207
207
  )
208
208
  if pagerduty.schedule_id is not None:
@@ -1,7 +1,7 @@
1
1
  import requests
2
2
 
3
3
 
4
- class QuayTeamNotFoundException(Exception):
4
+ class QuayTeamNotFoundError(Exception):
5
5
  pass
6
6
 
7
7
 
@@ -34,7 +34,7 @@ class QuayApi:
34
34
 
35
35
  r = requests.get(url, headers=self.auth_header, timeout=self._timeout)
36
36
  if r.status_code == 404:
37
- raise QuayTeamNotFoundException(
37
+ raise QuayTeamNotFoundError(
38
38
  f"team {team} is not found in "
39
39
  f"org {self.organization}. "
40
40
  f"contact org owner to create the "
@@ -91,7 +91,7 @@ class RosaCliResult:
91
91
  self.log_handle.cleanup()
92
92
 
93
93
 
94
- class RosaCliException(Exception, RosaCliResult):
94
+ class RosaCliError(Exception, RosaCliResult):
95
95
  """
96
96
  Represents an exception that occurred during a ROSA CLI execution.
97
97
  """
@@ -11,7 +11,7 @@ from reconcile.utils.jobcontroller.models import JobConcurrencyPolicy, JobStatus
11
11
  from reconcile.utils.ocm_base_client import OCMBaseClient
12
12
  from reconcile.utils.rosa.rosa_cli import (
13
13
  LogHandle,
14
- RosaCliException,
14
+ RosaCliError,
15
15
  RosaCliResult,
16
16
  RosaJob,
17
17
  )
@@ -106,7 +106,7 @@ class RosaSession:
106
106
  log_dir = tempfile.mkdtemp()
107
107
  log_file_name = self.job_controller.store_job_logs(job.name(), log_dir)
108
108
  if status != JobStatus.SUCCESS:
109
- raise RosaCliException(status, cmd, LogHandle(log_file_name))
109
+ raise RosaCliError(status, cmd, LogHandle(log_file_name))
110
110
  return RosaCliResult(status, cmd, LogHandle(log_file_name))
111
111
 
112
112
  def create_hcp_cluster(
@@ -97,13 +97,13 @@ def _extract_diffs_task(
97
97
  return_value[EXTRACT_TASK_RESULT_KEY_ERROR] = e
98
98
 
99
99
 
100
- class DiffDetectionTimeout(Exception):
100
+ class DiffDetectionTimeoutError(Exception):
101
101
  """
102
102
  Raised when the fine grained diff detection takes too long.
103
103
  """
104
104
 
105
105
 
106
- class DiffDetectionFailure(Exception):
106
+ class DiffDetectionFailureError(Exception):
107
107
  """
108
108
  Raised when the fine grained diff detection fails.
109
109
  """
@@ -155,20 +155,20 @@ def extract_diffs_with_timeout(
155
155
  )
156
156
  process.terminate()
157
157
  process.join()
158
- raise DiffDetectionTimeout()
158
+ raise DiffDetectionTimeoutError()
159
159
 
160
160
  if EXTRACT_TASK_RESULT_KEY_DIFFS in result_value:
161
161
  return result_value[EXTRACT_TASK_RESULT_KEY_DIFFS]
162
162
 
163
163
  original_error = result_value.get(EXTRACT_TASK_RESULT_KEY_ERROR)
164
164
  if original_error:
165
- raise DiffDetectionFailure() from original_error
165
+ raise DiffDetectionFailureError() from original_error
166
166
 
167
167
  # not every error situation of the diff extraction process
168
168
  # will result in an exception. the lack of a result is an error
169
169
  # indicator as well. in those cases, we raise at least
170
170
  # a generic exception to indicate that something went wrong
171
- raise DiffDetectionFailure("unknown error during fine grained diff detection")
171
+ raise DiffDetectionFailureError("unknown error during fine grained diff detection")
172
172
 
173
173
 
174
174
  def build_desired_state_diff(
@@ -211,12 +211,12 @@ def build_desired_state_diff(
211
211
  ShardedRunProposal(proposed_shards=changed_shards)
212
212
  ):
213
213
  shards = changed_shards
214
- except DiffDetectionTimeout:
214
+ except DiffDetectionTimeoutError:
215
215
  logging.warning(
216
216
  f"unable to extract fine grained diffs for shard extraction "
217
217
  f"within {exract_diff_timeout_seconds} seconds. continue without sharding"
218
218
  )
219
- except DiffDetectionFailure as e:
219
+ except DiffDetectionFailureError as e:
220
220
  logging.warning(
221
221
  f"unable to extract fine grained diffs for shard extraction: {e}"
222
222
  )
@@ -1,3 +1,4 @@
1
+ # ruff: noqa: N801
1
2
  from __future__ import annotations
2
3
 
3
4
  from collections.abc import Mapping, Sequence, Set
@@ -271,7 +271,7 @@ class ImageAuth:
271
271
  auth_server: str | None = None
272
272
  docker_config: dict[str, dict[str, dict[str, str]]] | None = None
273
273
 
274
- def getDockerConfigJson(self) -> dict:
274
+ def get_docker_config_json(self) -> dict:
275
275
  if self.docker_config:
276
276
  return self.docker_config
277
277
  else:
@@ -726,7 +726,7 @@ class SaasHerder: # pylint: disable=too-many-public-methods
726
726
  saas_file: SaasFile,
727
727
  trigger_reason: str,
728
728
  ) -> tuple[str, str]:
729
- [url, sha] = trigger_reason.split(" ")[0].split("/commit/")
729
+ [url, sha] = trigger_reason.split(" ")[0].split("/commit/") # noqa: PLC0207
730
730
  repo_info = VCS.parse_repo_url(url)
731
731
  repo_name = repo_info.name
732
732
  file_name = f"{repo_name.replace('/', '-')}-{sha}.tar.gz"
@@ -18,11 +18,11 @@ from reconcile.utils import (
18
18
  from reconcile.utils.vault import VaultClient
19
19
 
20
20
 
21
- class VaultForbidden(Exception):
21
+ class VaultForbiddenError(Exception):
22
22
  pass
23
23
 
24
24
 
25
- class SecretNotFound(Exception):
25
+ class SecretNotFoundError(Exception):
26
26
  pass
27
27
 
28
28
 
@@ -162,11 +162,11 @@ class VaultSecretReader(SecretReaderBase):
162
162
  )
163
163
  )
164
164
  except Forbidden:
165
- raise VaultForbidden(
165
+ raise VaultForbiddenError(
166
166
  f"permission denied reading vault secret at {path}"
167
167
  ) from None
168
- except vault.SecretNotFound as e:
169
- raise SecretNotFound(*e.args) from e
168
+ except vault.SecretNotFoundError as e:
169
+ raise SecretNotFoundError(*e.args) from e
170
170
  return data
171
171
 
172
172
  def _read(
@@ -181,8 +181,8 @@ class VaultSecretReader(SecretReaderBase):
181
181
  version=version,
182
182
  )
183
183
  )
184
- except vault.SecretNotFound as e:
185
- raise SecretNotFound(*e.args) from e
184
+ except vault.SecretNotFoundError as e:
185
+ raise SecretNotFoundError(*e.args) from e
186
186
  return data
187
187
 
188
188
 
@@ -203,8 +203,8 @@ class ConfigSecretReader(SecretReaderBase):
203
203
  version=version,
204
204
  )
205
205
  )
206
- except config.SecretNotFound as e:
207
- raise SecretNotFound(*e.args) from e
206
+ except config.SecretNotFoundError as e:
207
+ raise SecretNotFoundError(*e.args) from e
208
208
  return data
209
209
 
210
210
  def _read_all(
@@ -219,8 +219,8 @@ class ConfigSecretReader(SecretReaderBase):
219
219
  version=version,
220
220
  )
221
221
  )
222
- except config.SecretNotFound as e:
223
- raise SecretNotFound(*e.args) from e
222
+ except config.SecretNotFoundError as e:
223
+ raise SecretNotFoundError(*e.args) from e
224
224
  return data
225
225
 
226
226
 
@@ -279,13 +279,13 @@ class SecretReader(SecretReaderBase):
279
279
  if self.settings and self.settings.get("vault"):
280
280
  try:
281
281
  data = self.vault_client.read(params) # type: ignore[attr-defined] # mypy doesn't recognize the VaultClient.__new__ method
282
- except vault.SecretNotFound as e:
283
- raise SecretNotFound(*e.args) from e
282
+ except vault.SecretNotFoundError as e:
283
+ raise SecretNotFoundError(*e.args) from e
284
284
  else:
285
285
  try:
286
286
  data = config.read(params)
287
- except config.SecretNotFound as e:
288
- raise SecretNotFound(*e.args) from e
287
+ except config.SecretNotFoundError as e:
288
+ raise SecretNotFoundError(*e.args) from e
289
289
 
290
290
  return data
291
291
 
@@ -314,15 +314,15 @@ class SecretReader(SecretReaderBase):
314
314
  try:
315
315
  data = self.vault_client.read_all(params) # type: ignore[attr-defined] # mypy doesn't recognize the VaultClient.__new__ method
316
316
  except Forbidden:
317
- raise VaultForbidden(
317
+ raise VaultForbiddenError(
318
318
  f"permission denied reading vault secret at {path}"
319
319
  ) from None
320
- except vault.SecretNotFound as e:
321
- raise SecretNotFound(*e.args) from e
320
+ except vault.SecretNotFoundError as e:
321
+ raise SecretNotFoundError(*e.args) from e
322
322
  else:
323
323
  try:
324
324
  data = config.read_all(params)
325
- except config.SecretNotFound as e:
326
- raise SecretNotFound(*e.args) from e
325
+ except config.SecretNotFoundError as e:
326
+ raise SecretNotFoundError(*e.args) from e
327
327
 
328
328
  return data
@@ -28,11 +28,11 @@ MAX_RETRIES = 5
28
28
  TIMEOUT = 30
29
29
 
30
30
 
31
- class UserNotFoundException(Exception):
31
+ class UserNotFoundError(Exception):
32
32
  pass
33
33
 
34
34
 
35
- class UsergroupNotFoundException(Exception):
35
+ class UsergroupNotFoundError(Exception):
36
36
  pass
37
37
 
38
38
 
@@ -296,7 +296,7 @@ class SlackApi:
296
296
  def get_usergroup_id(self, handle: str) -> str | None:
297
297
  try:
298
298
  return self.get_usergroup(handle)["id"]
299
- except UsergroupNotFoundException:
299
+ except UsergroupNotFoundError:
300
300
  return None
301
301
 
302
302
  def _initiate_usergroups(self) -> None:
@@ -317,7 +317,7 @@ class SlackApi:
317
317
  self._initiate_usergroups()
318
318
  usergroup = [g for g in self.usergroups if g["handle"] == handle]
319
319
  if len(usergroup) != 1:
320
- raise UsergroupNotFoundException(handle)
320
+ raise UsergroupNotFoundError(handle)
321
321
  return usergroup[0]
322
322
 
323
323
  def create_usergroup(self, handle: str) -> str:
@@ -398,7 +398,7 @@ class SlackApi:
398
398
  result = self._sc.users_lookupByEmail(email=f"{user_name}@{mail_address}")
399
399
  except SlackApiError as e:
400
400
  if e.response["error"] == "users_not_found":
401
- raise UserNotFoundException(e.response["error"]) from None
401
+ raise UserNotFoundError(e.response["error"]) from None
402
402
  raise
403
403
 
404
404
  return result["user"]["id"]
@@ -415,11 +415,19 @@ class SlackApi:
415
415
  k: v["name"] for k, v in self._get("channels").items() if k in channels_ids
416
416
  }
417
417
 
418
+ @staticmethod
419
+ def extract_name_from_user(user: dict[str, Any]) -> str | None:
420
+ if email := user["profile"].get("email"):
421
+ return email.split("@")[0]
422
+ return None
423
+
418
424
  def get_active_users_by_names(self, user_names: Iterable[str]) -> dict[str, str]:
419
425
  return {
420
- k: v["name"]
426
+ k: name
421
427
  for k, v in self._get("users").items()
422
- if v["name"] in user_names and not v["deleted"]
428
+ if not v["deleted"]
429
+ and (name := self.extract_name_from_user(v))
430
+ and name in user_names
423
431
  }
424
432
 
425
433
  def get_users_by_ids(self, users_ids: Iterable[str]) -> dict[str, str]:
@@ -428,9 +436,10 @@ class SlackApi:
428
436
  self._translate_user_id(user_id) for user_id in users_ids
429
437
  )
430
438
  return {
431
- user_id: user["name"]
439
+ user_id: name
432
440
  for user_id in translated_user_ids
433
441
  if (user := users.get(user_id))
442
+ and (name := self.extract_name_from_user(user))
434
443
  }
435
444
 
436
445
  def _get(self, resource: str) -> dict[str, Any]:
@@ -24,15 +24,15 @@ DEFAULT_RETRIES = 3
24
24
  DEFAULT_THREAD_POOL_SIZE = 10
25
25
 
26
26
 
27
- class EmptySLOResult(Exception):
27
+ class EmptySLOResultError(Exception):
28
28
  pass
29
29
 
30
30
 
31
- class EmptySLOValue(Exception):
31
+ class EmptySLOValueError(Exception):
32
32
  pass
33
33
 
34
34
 
35
- class InvalidSLOValue(Exception):
35
+ class InvalidSLOValueError(Exception):
36
36
  pass
37
37
 
38
38
 
@@ -104,13 +104,13 @@ class PrometheusClient(ApiBase):
104
104
  def _extract_current_slo_value(self, data: dict[str, Any]) -> float:
105
105
  result = data["data"]["result"]
106
106
  if not result:
107
- raise EmptySLOResult("prometheus returned empty result")
107
+ raise EmptySLOResultError("prometheus returned empty result")
108
108
  slo_value = result[0]["value"]
109
109
  if not slo_value:
110
- raise EmptySLOValue("prometheus returned empty SLO value")
110
+ raise EmptySLOValueError("prometheus returned empty SLO value")
111
111
  slo_value = float(slo_value[1])
112
112
  if isnan(slo_value):
113
- raise InvalidSLOValue("slo value should be a number")
113
+ raise InvalidSLOValueError("slo value should be a number")
114
114
  return slo_value
115
115
 
116
116
 
reconcile/utils/state.py CHANGED
@@ -39,7 +39,7 @@ from reconcile.utils.secret_reader import (
39
39
  )
40
40
 
41
41
 
42
- class StateInaccessibleException(Exception):
42
+ class StateInaccessibleError(Exception):
43
43
  pass
44
44
 
45
45
 
@@ -180,7 +180,7 @@ def acquire_state_settings(
180
180
  state_bucket_account_name, query_func=query_func
181
181
  )
182
182
  if not account:
183
- raise StateInaccessibleException(
183
+ raise StateInaccessibleError(
184
184
  f"The AWS account {state_bucket_account_name} that holds the state bucket can't be found in app-interface."
185
185
  )
186
186
  secret = secret_reader.read_all_secret(account.automation_token)
@@ -203,11 +203,11 @@ def acquire_state_settings(
203
203
  access_key_id=secret["aws_access_key_id"],
204
204
  secret_access_key=secret["aws_secret_access_key"],
205
205
  )
206
- raise StateInaccessibleException(
206
+ raise StateInaccessibleError(
207
207
  f"The app-interface state provider {ai_settings.provider} is not supported."
208
208
  )
209
209
 
210
- raise StateInaccessibleException(
210
+ raise StateInaccessibleError(
211
211
  "app-interface state must be configured to use stateful integrations. "
212
212
  "use one of the following options to provide state config: "
213
213
  "* env vars APP_INTERFACE_STATE_BUCKET, APP_INTERFACE_STATE_BUCKET_REGION, APP_INTERFACE_STATE_AWS_PROFILE and AWS_CONFIG (hosting the requested profile) \n"
@@ -218,7 +218,7 @@ def acquire_state_settings(
218
218
  )
219
219
 
220
220
 
221
- class AbortStateTransaction(Exception):
221
+ class AbortStateTransactionError(Exception):
222
222
  """Raise to abort a state transaction."""
223
223
 
224
224
 
@@ -249,7 +249,7 @@ class State:
249
249
  try:
250
250
  self.client.head_bucket(Bucket=self.bucket)
251
251
  except ClientError as details:
252
- raise StateInaccessibleException(
252
+ raise StateInaccessibleError(
253
253
  f"Bucket {self.bucket} is not accessible - {details!s}"
254
254
  ) from None
255
255
 
@@ -299,7 +299,7 @@ class State:
299
299
  if error_code == "404":
300
300
  return False, {}
301
301
 
302
- raise StateInaccessibleException(
302
+ raise StateInaccessibleError(
303
303
  f"Can not access state key {key_path} "
304
304
  f"in bucket {self.bucket} - {details!s}"
305
305
  ) from None
@@ -436,7 +436,7 @@ class State:
436
436
  state_obj = TransactionStateObj(key, value=current_value)
437
437
  try:
438
438
  yield state_obj
439
- except AbortStateTransaction:
439
+ except AbortStateTransactionError:
440
440
  return
441
441
  else:
442
442
  if state_obj.changed and state_obj.value != current_value:
@@ -913,13 +913,13 @@ class TerraformClient: # pylint: disable=too-many-public-methods
913
913
  )
914
914
 
915
915
 
916
- class TerraformPlanFailed(Exception):
916
+ class TerraformPlanFailedError(Exception):
917
917
  pass
918
918
 
919
919
 
920
- class TerraformApplyFailed(Exception):
920
+ class TerraformApplyFailedError(Exception):
921
921
  pass
922
922
 
923
923
 
924
- class TerraformDeletionDetected(Exception):
924
+ class TerraformDeletionDetectedError(Exception):
925
925
  pass
@@ -302,9 +302,9 @@ def _get_terraform_s3_state_key_name(
302
302
  return sharding_strategy.get_object_key(integration)
303
303
 
304
304
 
305
- class IntegrationUndefined(Exception):
305
+ class IntegrationUndefinedError(Exception):
306
306
  pass
307
307
 
308
308
 
309
- class InvalidTerraformState(Exception):
309
+ class InvalidTerraformStateError(Exception):
310
310
  pass
@@ -1,3 +1,4 @@
1
+ # ruff: noqa: N801
1
2
  from collections.abc import (
2
3
  Iterable,
3
4
  MutableMapping,