qontract-reconcile 0.10.1rc857__py3-none-any.whl → 0.10.1rc858__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qontract-reconcile
3
- Version: 0.10.1rc857
3
+ Version: 0.10.1rc858
4
4
  Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
5
  Home-page: https://github.com/app-sre/qontract-reconcile
6
6
  Author: Red Hat App-SRE Team
@@ -70,8 +70,8 @@ reconcile/openshift_namespace_labels.py,sha256=dLkQgtgsD51WtDHiQOc-lF2yaaFzkiUAZ
70
70
  reconcile/openshift_namespaces.py,sha256=nHW1e3dyUWw3JPAzeQeZQ6s2-RuQYaNR7_DUfTP8KOg,5830
71
71
  reconcile/openshift_network_policies.py,sha256=_qqv7yj17OM1J8KJPsFmzFZ85gzESJeBocC672z4_WU,4231
72
72
  reconcile/openshift_resourcequotas.py,sha256=yUi56PiOn3inMMfq_x_FEHmaW-reGipzoorjdar372g,2415
73
- reconcile/openshift_resources.py,sha256=kwsY5cko7udEKNlhL2oKiKv_5wzEw9wmmwROE016ng8,1400
74
- reconcile/openshift_resources_base.py,sha256=TuR8GgAB1KlQQlHY_E7FHNyhfHB-X65_tk3-THbFy0s,39716
73
+ reconcile/openshift_resources.py,sha256=WPnSTftrCCHaCDfwSD0CLvs-7GQqay5B7AtM6Swxy7c,1537
74
+ reconcile/openshift_resources_base.py,sha256=FW3gMwji2cdM8MPTL0JDXHHz3jWa50NQyhpvU78oeus,40029
75
75
  reconcile/openshift_rolebindings.py,sha256=LlImloBisEqzc36jaatic-TeM3hzqMEfxogF-dM4Yhw,6599
76
76
  reconcile/openshift_routes.py,sha256=fXvuPSjcjVw1X3j2EQvUAdbOepmIFdKk-M3qP8QzPiw,1075
77
77
  reconcile/openshift_saas_deploy.py,sha256=fmhopPEbyZsGQHRPzyzpKEvoBXEGN3aPxFi7Utq0emU,12788
@@ -834,8 +834,8 @@ tools/test/test_app_interface_metrics_exporter.py,sha256=SX7qL3D1SIRKFo95FoQztvf
834
834
  tools/test/test_qontract_cli.py,sha256=_D61RFGAN5x44CY1tYbouhlGXXABwYfxKSWSQx3Jrss,4941
835
835
  tools/test/test_sd_app_sre_alert_report.py,sha256=v363r9zM7__0kR5K6mvJoGFcM9BvE33fWAayrqkpojA,2116
836
836
  tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
837
- qontract_reconcile-0.10.1rc857.dist-info/METADATA,sha256=y5SBuYTlPDgO4AIat2E18IJErGFgADPtsZaWmhGNUqI,2273
838
- qontract_reconcile-0.10.1rc857.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
839
- qontract_reconcile-0.10.1rc857.dist-info/entry_points.txt,sha256=GKQqCl2j2X1BJQ69een6rHcR26PmnxnONLNOQB-nRjY,491
840
- qontract_reconcile-0.10.1rc857.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
841
- qontract_reconcile-0.10.1rc857.dist-info/RECORD,,
837
+ qontract_reconcile-0.10.1rc858.dist-info/METADATA,sha256=bj-1Gb96R96k2hOHPvpFebr_2irBGjI2LdMDUwliOZ8,2273
838
+ qontract_reconcile-0.10.1rc858.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
839
+ qontract_reconcile-0.10.1rc858.dist-info/entry_points.txt,sha256=GKQqCl2j2X1BJQ69een6rHcR26PmnxnONLNOQB-nRjY,491
840
+ qontract_reconcile-0.10.1rc858.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
841
+ qontract_reconcile-0.10.1rc858.dist-info/RECORD,,
@@ -1,3 +1,4 @@
1
+ from collections.abc import Iterable
1
2
  from typing import Any
2
3
 
3
4
  import reconcile.openshift_base as ob
@@ -11,15 +12,14 @@ PROVIDERS = ["resource", "resource-template", "prometheus-rule"]
11
12
 
12
13
 
13
14
  def run(
14
- dry_run,
15
- thread_pool_size=10,
16
- internal=None,
17
- use_jump_host=True,
18
- cluster_name=None,
19
- exclude_cluster=None,
20
- namespace_name=None,
21
- defer=None,
22
- ):
15
+ dry_run: bool,
16
+ thread_pool_size: int = 10,
17
+ internal: bool | None = None,
18
+ use_jump_host: bool = True,
19
+ cluster_name: Iterable[str] | None = None,
20
+ exclude_cluster: Iterable[str] | None = None,
21
+ namespace_name: str | None = None,
22
+ ) -> None:
23
23
  orb.QONTRACT_INTEGRATION = QONTRACT_INTEGRATION
24
24
  orb.QONTRACT_INTEGRATION_VERSION = QONTRACT_INTEGRATION_VERSION
25
25
 
@@ -41,7 +41,7 @@ def run(
41
41
  ob.check_unused_resource_types(ri)
42
42
 
43
43
 
44
- def early_exit_desired_state(*args, **kwargs) -> dict[str, Any]:
44
+ def early_exit_desired_state(*args: Any, **kwargs: Any) -> dict[str, Any]:
45
45
  return orb.early_exit_desired_state(PROVIDERS)
46
46
 
47
47
 
@@ -7,8 +7,11 @@ import re
7
7
  import sys
8
8
  from collections import defaultdict
9
9
  from collections.abc import (
10
+ Callable,
11
+ Generator,
10
12
  Iterable,
11
13
  Mapping,
14
+ MutableMapping,
12
15
  Sequence,
13
16
  )
14
17
  from contextlib import contextmanager
@@ -21,6 +24,7 @@ from typing import (
21
24
  Protocol,
22
25
  Tuple,
23
26
  )
27
+ from unittest.mock import DEFAULT, patch
24
28
 
25
29
  import anymarkup
26
30
  from deepdiff import DeepHash
@@ -29,9 +33,9 @@ from sretoolbox.utils import (
29
33
  )
30
34
 
31
35
  import reconcile.openshift_base as ob
36
+ import reconcile.utils.jinja2.utils as jinja2_utils
32
37
  from reconcile import queries
33
38
  from reconcile.change_owners.diff import IDENTIFIER_FIELD_NAME
34
- from reconcile.checkpoint import url_makes_sense
35
39
  from reconcile.utils import (
36
40
  amtool,
37
41
  gql,
@@ -41,9 +45,6 @@ from reconcile.utils.defer import defer
41
45
  from reconcile.utils.exceptions import FetchResourceError
42
46
  from reconcile.utils.jinja2.utils import (
43
47
  FetchSecretError,
44
- lookup_github_file_content,
45
- lookup_s3_object,
46
- lookup_secret,
47
48
  process_extracurlyjinja2_template,
48
49
  process_jinja2_template,
49
50
  )
@@ -237,23 +238,23 @@ KUBERNETES_SECRET_DATA_KEY_RE = "^[-._a-zA-Z0-9]+$"
237
238
  _log_lock = Lock()
238
239
 
239
240
 
240
- def _locked_info_log(msg: str):
241
+ def _locked_info_log(msg: str) -> None:
241
242
  with _log_lock:
242
243
  logging.info(msg)
243
244
 
244
245
 
245
- def _locked_debug_log(msg: str):
246
+ def _locked_debug_log(msg: str) -> None:
246
247
  with _log_lock:
247
248
  logging.debug(msg)
248
249
 
249
250
 
250
- def _locked_error_log(msg: str):
251
+ def _locked_error_log(msg: str) -> None:
251
252
  with _log_lock:
252
253
  logging.error(msg)
253
254
 
254
255
 
255
256
  class FetchRouteError(Exception):
256
- def __init__(self, msg):
257
+ def __init__(self, msg: Any):
257
258
  super().__init__("error fetching route: " + str(msg))
258
259
 
259
260
 
@@ -266,16 +267,21 @@ class SecretKeyFormatError(Exception):
266
267
 
267
268
 
268
269
  class UnknownProviderError(Exception):
269
- def __init__(self, msg):
270
+ def __init__(self, msg: Any):
270
271
  super().__init__("unknown provider error: " + str(msg))
271
272
 
272
273
 
273
274
  class UnknownTemplateTypeError(Exception):
274
- def __init__(self, msg):
275
+ def __init__(self, msg: Any):
275
276
  super().__init__("unknown template type error: " + str(msg))
276
277
 
277
278
 
278
- def check_alertmanager_config(data, path, alertmanager_config_key, decode_base64=False):
279
+ def check_alertmanager_config(
280
+ data: Mapping[str, Any],
281
+ path: str,
282
+ alertmanager_config_key: str,
283
+ decode_base64: bool = False,
284
+ ) -> None:
279
285
  try:
280
286
  config = data[alertmanager_config_key]
281
287
  except KeyError:
@@ -295,15 +301,15 @@ def check_alertmanager_config(data, path, alertmanager_config_key, decode_base64
295
301
 
296
302
 
297
303
  def fetch_provider_resource(
298
- resource: dict,
299
- tfunc=None,
300
- tvars=None,
301
- validate_json=False,
302
- validate_alertmanager_config=False,
303
- alertmanager_config_key="alertmanager.yaml",
304
- add_path_to_prom_rules=True,
305
- skip_validation=False,
306
- settings=None,
304
+ resource: Mapping,
305
+ tfunc: Callable | None = None,
306
+ tvars: Mapping[str, Any] | None = None,
307
+ validate_json: bool = False,
308
+ validate_alertmanager_config: bool = False,
309
+ alertmanager_config_key: str = "alertmanager.yaml",
310
+ add_path_to_prom_rules: bool = True,
311
+ skip_validation: bool = False,
312
+ settings: Mapping[str, Any] | None = None,
307
313
  ) -> OR:
308
314
  path = resource["path"]
309
315
  content = resource["content"]
@@ -383,17 +389,17 @@ def fetch_provider_resource(
383
389
 
384
390
 
385
391
  def fetch_provider_vault_secret(
386
- path,
387
- version,
388
- name,
389
- labels,
390
- annotations,
391
- type,
392
- integration,
393
- integration_version,
394
- validate_alertmanager_config=False,
395
- alertmanager_config_key="alertmanager.yaml",
396
- settings=None,
392
+ path: str,
393
+ version: str,
394
+ name: str,
395
+ labels: Mapping[str, str] | None,
396
+ annotations: Mapping[str, str],
397
+ type: str,
398
+ integration: str,
399
+ integration_version: str,
400
+ validate_alertmanager_config: bool = False,
401
+ alertmanager_config_key: str = "alertmanager.yaml",
402
+ settings: Mapping[str, Any] | None = None,
397
403
  ) -> OR:
398
404
  # get the fields from vault
399
405
  secret_reader = SecretReader(settings)
@@ -403,7 +409,7 @@ def fetch_provider_vault_secret(
403
409
  check_alertmanager_config(raw_data, path, alertmanager_config_key)
404
410
 
405
411
  # construct oc resource
406
- body = {
412
+ body: dict[str, Any] = {
407
413
  "apiVersion": "v1",
408
414
  "kind": "Secret",
409
415
  "type": type,
@@ -433,7 +439,7 @@ def fetch_provider_vault_secret(
433
439
  # any white space issues. If any issues are uncovered, an exception will be
434
440
  # raised.
435
441
  # we're receiving the full key: value information, not simply a list of keys.
436
- def assert_valid_secret_keys(secrets_data: dict[str, str]):
442
+ def assert_valid_secret_keys(secrets_data: dict[str, str]) -> None:
437
443
  for k in secrets_data:
438
444
  matches = re.search(KUBERNETES_SECRET_DATA_KEY_RE, k)
439
445
  if not matches:
@@ -442,7 +448,12 @@ def assert_valid_secret_keys(secrets_data: dict[str, str]):
442
448
  )
443
449
 
444
450
 
445
- def fetch_provider_route(resource: dict, tls_path, tls_version, settings=None) -> OR:
451
+ def fetch_provider_route(
452
+ resource: Mapping,
453
+ tls_path: str | None,
454
+ tls_version: str | None,
455
+ settings: Mapping | None = None,
456
+ ) -> OR:
446
457
  path = resource["path"]
447
458
  openshift_resource = fetch_provider_resource(resource)
448
459
 
@@ -485,7 +496,10 @@ def fetch_provider_route(resource: dict, tls_path, tls_version, settings=None) -
485
496
 
486
497
 
487
498
  def fetch_openshift_resource(
488
- resource, parent, settings=None, skip_validation=False
499
+ resource: Mapping,
500
+ parent: Mapping[str, Any],
501
+ settings: Mapping | None = None,
502
+ skip_validation: bool = False,
489
503
  ) -> OR:
490
504
  provider = resource["provider"]
491
505
  if provider == "resource":
@@ -632,8 +646,8 @@ def fetch_current_state(
632
646
  cluster: str,
633
647
  namespace: str,
634
648
  kind: str,
635
- resource_names=Iterable[str],
636
- ):
649
+ resource_names: Iterable[str] | None,
650
+ ) -> None:
637
651
  _locked_debug_log(f"Fetching {kind} from {cluster}/{namespace}")
638
652
  if not oc.is_kind_supported(kind):
639
653
  logging.warning(f"[{cluster}] cluster has no API resource {kind}.")
@@ -659,8 +673,8 @@ def fetch_desired_state(
659
673
  resource: Mapping[str, Any],
660
674
  parent: Mapping[str, Any],
661
675
  privileged: bool,
662
- settings: Optional[Mapping[str, Any]] = None,
663
- ):
676
+ settings: Mapping[str, Any] | None = None,
677
+ ) -> None:
664
678
  try:
665
679
  openshift_resource = fetch_openshift_resource(resource, parent, settings)
666
680
  except (
@@ -717,7 +731,7 @@ def fetch_desired_state(
717
731
  def fetch_states(
718
732
  spec: ob.StateSpec,
719
733
  ri: ResourceInventory,
720
- settings: Optional[Mapping[str, Any]] = None,
734
+ settings: Mapping[str, Any] | None = None,
721
735
  ) -> None:
722
736
  try:
723
737
  if isinstance(spec, ob.CurrentStateSpec):
@@ -747,13 +761,13 @@ def fetch_states(
747
761
 
748
762
 
749
763
  def fetch_data(
750
- namespaces,
751
- thread_pool_size,
752
- internal,
753
- use_jump_host,
754
- init_api_resources=False,
755
- overrides=None,
756
- ):
764
+ namespaces: Iterable[Mapping[str, Any]],
765
+ thread_pool_size: int,
766
+ internal: bool | None,
767
+ use_jump_host: bool,
768
+ init_api_resources: bool = False,
769
+ overrides: Iterable[str] | None = None,
770
+ ) -> tuple[OC_Map, ResourceInventory]:
757
771
  ri = ResourceInventory()
758
772
  settings = queries.get_app_interface_settings()
759
773
  logging.debug(f"Overriding keys {overrides}")
@@ -775,11 +789,11 @@ def fetch_data(
775
789
 
776
790
 
777
791
  def filter_namespaces_by_cluster_and_namespace(
778
- namespaces,
779
- cluster_names: Optional[Iterable[str]],
780
- exclude_clusters: Optional[Iterable[str]],
781
- namespace_name: Optional[str],
782
- ):
792
+ namespaces: Sequence[dict[str, Any]],
793
+ cluster_names: Iterable[str] | None,
794
+ exclude_clusters: Iterable[str] | None,
795
+ namespace_name: str | None,
796
+ ) -> Sequence[dict[str, Any]]:
783
797
  if cluster_names:
784
798
  namespaces = [n for n in namespaces if n["cluster"]["name"] in cluster_names]
785
799
  elif exclude_clusters:
@@ -795,9 +809,9 @@ def filter_namespaces_by_cluster_and_namespace(
795
809
 
796
810
  def canonicalize_namespaces(
797
811
  namespaces: Iterable[dict[str, Any]],
798
- providers: list[str],
799
- resource_schema_filter: Optional[str] = None,
800
- ) -> tuple[list[dict[str, Any]], Optional[list[str]]]:
812
+ providers: Sequence[str],
813
+ resource_schema_filter: str | None = None,
814
+ ) -> tuple[list[dict[str, Any]], list[str] | None]:
801
815
  canonicalized_namespaces = []
802
816
  override = None
803
817
  logging.debug(f"Received providers {providers}")
@@ -829,17 +843,17 @@ def canonicalize_namespaces(
829
843
 
830
844
 
831
845
  def get_namespaces(
832
- providers: Optional[list[str]] = None,
833
- cluster_names: Optional[Iterable[str]] = None,
834
- exclude_clusters: Optional[Iterable[str]] = None,
835
- namespace_name: Optional[str] = None,
836
- resource_schema_filter: Optional[str] = None,
837
- filter_by_shard: Optional[bool] = True,
838
- ) -> tuple[list[dict[str, Any]], Optional[list[str]]]:
846
+ providers: Sequence[str] | None = None,
847
+ cluster_names: Iterable[str] | None = None,
848
+ exclude_clusters: Iterable[str] | None = None,
849
+ namespace_name: str | None = None,
850
+ resource_schema_filter: str | None = None,
851
+ filter_by_shard: bool | None = True,
852
+ ) -> tuple[list[dict[str, Any]], list[str] | None]:
839
853
  if providers is None:
840
854
  providers = []
841
855
  gqlapi = gql.get_api()
842
- namespaces = [
856
+ namespaces: list[dict[str, Any]] = [
843
857
  namespace_info
844
858
  for namespace_info in gqlapi.query(NAMESPACES_QUERY)["namespaces"]
845
859
  if not ob.is_namespace_deleted(namespace_info)
@@ -850,33 +864,33 @@ def get_namespaces(
850
864
  )
851
865
  )
852
866
  ]
853
- namespaces = filter_namespaces_by_cluster_and_namespace(
867
+ _namespaces = filter_namespaces_by_cluster_and_namespace(
854
868
  namespaces, cluster_names, exclude_clusters, namespace_name
855
869
  )
856
- return canonicalize_namespaces(namespaces, providers, resource_schema_filter)
870
+ return canonicalize_namespaces(_namespaces, providers, resource_schema_filter)
857
871
 
858
872
 
859
873
  @defer
860
874
  def run(
861
- dry_run,
862
- thread_pool_size=10,
863
- internal=None,
864
- use_jump_host=True,
865
- providers=None,
866
- cluster_name: Optional[Sequence[str]] = None,
867
- exclude_cluster: Optional[Sequence[str]] = None,
868
- namespace_name=None,
869
- init_api_resources=False,
870
- defer=None,
871
- ):
875
+ dry_run: bool,
876
+ thread_pool_size: int = 10,
877
+ internal: bool | None = None,
878
+ use_jump_host: bool = True,
879
+ providers: Sequence[str] | None = None,
880
+ cluster_name: Sequence[str] | None = None,
881
+ exclude_cluster: Sequence[str] | None = None,
882
+ namespace_name: str | None = None,
883
+ init_api_resources: bool = False,
884
+ defer: Callable | None = None,
885
+ ) -> ResourceInventory | None:
872
886
  # https://click.palletsprojects.com/en/8.1.x/options/#multiple-options
873
887
  cluster_names = cluster_name
874
888
  exclude_clusters = exclude_cluster
875
889
 
876
- if exclude_cluster and not dry_run:
890
+ if exclude_clusters and not dry_run:
877
891
  raise RuntimeError("--exclude-cluster is only supported in dry-run mode")
878
892
 
879
- if exclude_cluster and cluster_name:
893
+ if exclude_clusters and cluster_names:
880
894
  raise RuntimeError(
881
895
  "--cluster-name and --exclude-cluster can not be used together"
882
896
  )
@@ -894,7 +908,7 @@ def run(
894
908
  if not namespaces:
895
909
  logging.debug(
896
910
  "No namespaces found when filtering for "
897
- f"cluster={cluster_name}, namespace={namespace_name}. "
911
+ f"cluster={cluster_names}, namespace={namespace_name}. "
898
912
  "Exiting."
899
913
  )
900
914
  return None
@@ -906,7 +920,8 @@ def run(
906
920
  init_api_resources=init_api_resources,
907
921
  overrides=overrides,
908
922
  )
909
- defer(oc_map.cleanup)
923
+ if defer:
924
+ defer(oc_map.cleanup)
910
925
  if dry_run and QONTRACT_INTEGRATION == "openshift-resources":
911
926
  error = check_cluster_scoped_resources(oc_map, ri, namespaces, None)
912
927
  if error:
@@ -1130,7 +1145,7 @@ def early_exit_desired_state(
1130
1145
  settings=settings,
1131
1146
  )
1132
1147
 
1133
- def post_process_ns(ns):
1148
+ def post_process_ns(ns: MutableMapping) -> MutableMapping:
1134
1149
  # the sharedResources have been aggreated into the openshiftResources
1135
1150
  # and are no longer needed - speeds up diffing process
1136
1151
  del ns["sharedResources"]
@@ -1151,7 +1166,7 @@ def early_exit_desired_state(
1151
1166
  }
1152
1167
 
1153
1168
 
1154
- def _early_exit_fetch_resource(spec, settings):
1169
+ def _early_exit_fetch_resource(spec: Sequence, settings: Mapping) -> dict[str, str]:
1155
1170
  resource = spec[0]
1156
1171
  ns_info = spec[1]
1157
1172
  cluster_name = ns_info["cluster"]["name"]
@@ -1179,58 +1194,44 @@ def _early_exit_fetch_resource(spec, settings):
1179
1194
 
1180
1195
 
1181
1196
  @contextmanager
1182
- def early_exit_monkey_patch():
1197
+ def early_exit_monkey_patch() -> Generator:
1183
1198
  """Avoid looking outside of app-interface on early-exit pr-check."""
1184
- orig_lookup_secret = lookup_secret
1185
- orig_lookup_github_file_content = lookup_github_file_content
1186
- orig_url_makes_sense = url_makes_sense
1187
- orig_check_alertmanager_config = check_alertmanager_config
1188
- orig_lookup_s3_object = lookup_s3_object
1189
-
1190
- try:
1191
- yield _early_exit_monkey_patch_assign(
1199
+ with patch.multiple(
1200
+ jinja2_utils,
1201
+ lookup_secret=DEFAULT,
1202
+ lookup_github_file_content=DEFAULT,
1203
+ url_makes_sense=DEFAULT,
1204
+ lookup_s3_object=DEFAULT,
1205
+ ) as mocks:
1206
+ mocks["lookup_secret"].side_effect = (
1192
1207
  lambda path,
1193
1208
  key,
1194
1209
  version=None,
1195
1210
  tvars=None,
1196
1211
  allow_not_found=False,
1197
1212
  settings=None,
1198
- secret_reader=None: f"vault({path}, {key}, {version}, {allow_not_found})",
1213
+ secret_reader=None: f"vault({path}, {key}, {version}"
1214
+ )
1215
+ mocks["lookup_github_file_content"].side_effect = (
1199
1216
  lambda repo,
1200
1217
  path,
1201
1218
  ref,
1202
1219
  tvars=None,
1203
1220
  settings=None,
1204
- secret_reader=None: f"github({repo}, {path}, {ref})",
1205
- lambda url: False,
1206
- lambda data, path, alertmanager_config_key, decode_base64=False: True,
1221
+ secret_reader=None: f"github({repo}, {path}, {ref})"
1222
+ )
1223
+ mocks["url_makes_sense"].return_value = False
1224
+ mocks["lookup_s3_object"].side_effect = (
1207
1225
  lambda account_name,
1208
1226
  bucket_name,
1209
1227
  path,
1210
- region_name=None: f"lookup_s3_object({account_name}, {bucket_name}, {path}, {region_name})",
1228
+ region_name=None: f"lookup_s3_object({account_name}, {bucket_name}, {path}, {region_name})"
1211
1229
  )
1212
- finally:
1213
- _early_exit_monkey_patch_assign(
1214
- orig_lookup_secret,
1215
- orig_lookup_github_file_content,
1216
- orig_url_makes_sense,
1217
- orig_check_alertmanager_config,
1218
- orig_lookup_s3_object,
1219
- )
1220
-
1221
-
1222
- def _early_exit_monkey_patch_assign(
1223
- lookup_secret,
1224
- lookup_github_file_content,
1225
- url_makes_sense,
1226
- check_alertmanager_config,
1227
- lookup_s3_object,
1228
- ):
1229
- sys.modules[__name__].lookup_secret = lookup_secret
1230
- sys.modules[__name__].lookup_github_file_content = lookup_github_file_content
1231
- sys.modules[__name__].url_makes_sense = url_makes_sense
1232
- sys.modules[__name__].check_alertmanager_config = check_alertmanager_config
1233
- sys.modules[__name__].lookup_s3_object = lookup_s3_object
1230
+ with patch(
1231
+ "reconcile.openshift_resources_base.check_alertmanager_config",
1232
+ return_value=True,
1233
+ ):
1234
+ yield
1234
1235
 
1235
1236
 
1236
1237
  def desired_state_shard_config() -> DesiredStateShardConfig: