qontract-reconcile 0.9.1rc273__py3-none-any.whl → 0.9.1rc275__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qontract-reconcile
3
- Version: 0.9.1rc273
3
+ Version: 0.9.1rc275
4
4
  Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
5
  Home-page: https://github.com/app-sre/qontract-reconcile
6
6
  Author: Red Hat App-SRE Team
@@ -123,7 +123,7 @@ reconcile/terraform_cloudflare_dns.py,sha256=cCwDaFZ9KGNsCjVnz89LMWi5V8mhkbAvQE4
123
123
  reconcile/terraform_cloudflare_resources.py,sha256=BQg12mHm1iaxf086FFPZutPbWKUMaddqu-nREPR8ptA,14887
124
124
  reconcile/terraform_cloudflare_users.py,sha256=lTbrxi8OtW9Pfcr7Yp-70ihldMQKx9dJ7ZgbGHey1XE,13627
125
125
  reconcile/terraform_resources.py,sha256=gQ-LT0TGwf9OR4RF5EWDmNHUnKWnbhrIMtyIdUgP4D4,16782
126
- reconcile/terraform_tgw_attachments.py,sha256=WDrLveeCe1hjAMecvLipWGMHRVJ1GLEBWUP6QIs631M,7477
126
+ reconcile/terraform_tgw_attachments.py,sha256=C7Z_QoWXbqQ3XeDRYi6a5c2X3ceptwTiTk-aNOR5ku4,9544
127
127
  reconcile/terraform_users.py,sha256=AzDvEQCdLpsXoS3nLbIQRraQvJHa8JmL40lZFv8YXMk,9321
128
128
  reconcile/terraform_vpc_peerings.py,sha256=l83dp3LO6WeBa_he0s-m4o182BaQX_0IRNd4MTvsAg8,21025
129
129
  reconcile/unleash_watcher.py,sha256=xNLUFpIr66XESEyXUkmHTTmHghVWHiMtnS_k0OC7gd8,4145
@@ -258,9 +258,9 @@ reconcile/jenkins/types.py,sha256=-WMs4TsCEcbJNF_n-615Fealk08boBYZcTzVUk3Wlns,26
258
258
  reconcile/ocm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
259
259
  reconcile/ocm/types.py,sha256=gCuVra66cIk2Wvoj8PQ-hvsZiozFDFXxUOilPdC0w-c,3138
260
260
  reconcile/saas_auto_promotions_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
261
- reconcile/saas_auto_promotions_manager/integration.py,sha256=EAQipdFjlwbKUENTlnd_UKoUYy3LHjRD9K-LXFOvFe0,6208
261
+ reconcile/saas_auto_promotions_manager/integration.py,sha256=hmRWva3_ZEO845wMkttEt4X6Izl5p8ZKs09NXWp8mkY,5609
262
262
  reconcile/saas_auto_promotions_manager/publisher.py,sha256=gdzR7jNDKy130pO6J-UNwXI__EXgiREzdaXvNkE3Bhg,1845
263
- reconcile/saas_auto_promotions_manager/subscriber.py,sha256=JsS_wrCGZC3-86kHSUTq8veLpdGH6XFL_zqlXKx-eNk,5891
263
+ reconcile/saas_auto_promotions_manager/subscriber.py,sha256=PAncXY973uGMAHZRkV7QqSYSHhyZO_SuLGhwCn0pukI,6177
264
264
  reconcile/saas_auto_promotions_manager/merge_request_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
265
265
  reconcile/saas_auto_promotions_manager/merge_request_manager/merge_request.py,sha256=diDb4BkErloZF6MfzQbMkqC1gx8tglYkw10Vuf_4_Ho,1147
266
266
  reconcile/saas_auto_promotions_manager/merge_request_manager/merge_request_manager.py,sha256=QVaaL0hQn0fy-NPmoKyJMp3NxNfE8umYjReaTztxCps,9763
@@ -342,7 +342,7 @@ reconcile/test/test_terraform_cloudflare_dns.py,sha256=aQTXX8Vr4h9aWvJZTnpZEhMGY
342
342
  reconcile/test/test_terraform_cloudflare_resources.py,sha256=cWNE2UIhz19rLSWdpJG8xRwuEEYoIZWEkDZY7e2QN_g,3426
343
343
  reconcile/test/test_terraform_cloudflare_users.py,sha256=luuSAWflLaLiTipy1CFsuxb6WDkD8PUFqsiBVXergp4,27448
344
344
  reconcile/test/test_terraform_resources.py,sha256=dEpJwaTzE_FzkRjCozDtGzE4egBrb-VrwSoWr2Benv4,7955
345
- reconcile/test/test_terraform_tgw_attachments.py,sha256=VMV6IdEXHj5qFxlr0BCCVVNzxe_j9Nk3ybeOMLMPqJU,9165
345
+ reconcile/test/test_terraform_tgw_attachments.py,sha256=MiohVv2YT6e05y-yGwt9pcGytxB_zN0dKIdJbs4UcGs,15019
346
346
  reconcile/test/test_terraform_users.py,sha256=Yt4iN5FMtn7cfVlVqBJ1MMH94Z0DGchyByhpfNUJFxM,1570
347
347
  reconcile/test/test_terraform_vpc_peerings.py,sha256=-NXgufo1GCYhbWyVWbtK4KfPNIY6_02GQR53ynwMJZ4,18135
348
348
  reconcile/test/test_terraform_vpc_peerings_build_desired_state.py,sha256=43EIGnrm5xAdtuSL6tKDh-nepk4WOKkONMNUOU1BN_Y,37236
@@ -406,12 +406,13 @@ reconcile/test/saas_auto_promotions_manager/merge_request_manager/renderer/test_
406
406
  reconcile/test/saas_auto_promotions_manager/merge_request_manager/renderer/test_content_single_namespace.py,sha256=lpMMllzRYzq-WI5JAyX6B9-VOK8N5IBFJn7KUMuvKxk,2008
407
407
  reconcile/test/saas_auto_promotions_manager/merge_request_manager/renderer/test_content_single_target.py,sha256=cwAPrQ_v3DR_CHU7Nt2xBGutC-1XslyJ5mXM8FXW-3o,1111
408
408
  reconcile/test/saas_auto_promotions_manager/subscriber/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
409
- reconcile/test/saas_auto_promotions_manager/subscriber/conftest.py,sha256=IShISmgW0FECL82wIy6wFUpCxIKlVIg2WngtpY5Dxxg,2843
409
+ reconcile/test/saas_auto_promotions_manager/subscriber/conftest.py,sha256=R4xSKYtRDeCs2c2d5WTfRnqW9br8p54H8jtP0K-WMmg,2428
410
410
  reconcile/test/saas_auto_promotions_manager/subscriber/data_keys.py,sha256=0MPM188d-7kUcqFvOEsjMEhJpSlPZua8rQvlto_50PE,360
411
411
  reconcile/test/saas_auto_promotions_manager/subscriber/test_content_hash.py,sha256=6Nkyix3Uyf7Zd9t2C6RnYlQG_y3NsZ6v6zu3G-qF4to,5186
412
- reconcile/test/saas_auto_promotions_manager/subscriber/test_multiple_channels_config_hash.py,sha256=5D9EHJ83304sFZqIQqCFsXI8icAD1bVhhlHfLZfgBag,5805
413
- reconcile/test/saas_auto_promotions_manager/subscriber/test_multiple_channels_moving_ref.py,sha256=gB2u4DshlrbiD9r8zw5CNQhOP_bJw10VHu-sEoJudts,5567
414
- reconcile/test/saas_auto_promotions_manager/subscriber/test_single_channel_with_single_publisher.py,sha256=QPhzVw5dCMj0ZzjbkN8G-MF_39DefBaY9Zo_tLsnt6o,8040
412
+ reconcile/test/saas_auto_promotions_manager/subscriber/test_diff.py,sha256=GoCl8m63ikzd0fqlqS0z5VwgSVAZSkKk8d0lDbOoS08,2521
413
+ reconcile/test/saas_auto_promotions_manager/subscriber/test_multiple_channels_config_hash.py,sha256=1xdM2gJ4Q6ew1xeHijMrrZx_R7UYbIchlg-bSDLO_x4,6955
414
+ reconcile/test/saas_auto_promotions_manager/subscriber/test_multiple_channels_moving_ref.py,sha256=Q0YLTz-tTyYcyS0OeDuy5EKwdbIwr2tcoaDK6YnXwWo,6717
415
+ reconcile/test/saas_auto_promotions_manager/subscriber/test_single_channel_with_single_publisher.py,sha256=UaKEgI_yMaILEiD-f9sl7FmlEiTICptSC4roapwE8tY,8642
415
416
  reconcile/test/saas_auto_promotions_manager/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
416
417
  reconcile/test/saas_auto_promotions_manager/utils/saas_files_inventory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
417
418
  reconcile/test/saas_auto_promotions_manager/utils/saas_files_inventory/test_multiple_publishers_for_single_channel.py,sha256=A2bw4U2jcwpQpTd8nab2Pe7QeF-__2CEJp8kWocVHVc,2414
@@ -501,7 +502,7 @@ reconcile/utils/state.py,sha256=_SmE7fOEReET3iy9jRQ1pyuaJebg5962Zs9Iy1dzTJk,9530
501
502
  reconcile/utils/structs.py,sha256=B05uQsZLxl4a-wLqiUTJ8ccr1dkjG_UbleMY2jrqoe0,296
502
503
  reconcile/utils/template.py,sha256=wTvRU4AnAV_o042tD4Mwls2dwWMuk7MKnde3MaCjaYg,331
503
504
  reconcile/utils/terraform_client.py,sha256=AYnA1pW7JgNuroHzhrLW8gZ8yN-pLKwH_aAnRj9JcEA,27679
504
- reconcile/utils/terrascript_aws_client.py,sha256=rivA5maobRrUJ6km4FMA7JDEgd2j8PlF4reZJUBXEjg,252139
505
+ reconcile/utils/terrascript_aws_client.py,sha256=aT4KeWG1PPJbYuKRV505iDxBJFz2gYig6q4JXVrHVzQ,252143
505
506
  reconcile/utils/throughput.py,sha256=iP4UWAe2LVhDo69mPPmgo9nQ7RxHD6_GS8MZe-aSiuM,344
506
507
  reconcile/utils/unleash.py,sha256=QGANGA8BHG7oC_bt39c2M7uRa2ycjzmahN8_m7Zovos,3094
507
508
  reconcile/utils/vault.py,sha256=CnhNu0pZfqS14kD1dQmBldITvTcSJHaHfk-KPNNDC7k,14471
@@ -563,8 +564,8 @@ tools/sre_checkpoints/util.py,sha256=zEDbGr18ZeHNQwW8pUsr2JRjuXIPz--WAGJxZo9sv_Y
563
564
  tools/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
564
565
  tools/test/test_qontract_cli.py,sha256=awwTHEc2DWlykuqGIYM0WOBoSL0KRnOraCLk3C7izis,1401
565
566
  tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
566
- qontract_reconcile-0.9.1rc273.dist-info/METADATA,sha256=YM9eACxTszm5yZNp-01JcMueOh-CqE8qXR5-tVdSb-E,2287
567
- qontract_reconcile-0.9.1rc273.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
568
- qontract_reconcile-0.9.1rc273.dist-info/entry_points.txt,sha256=aIVvB7OTCxYu0QkONzBPfFEyg68Pr8KUVKEEm4ChDVc,333
569
- qontract_reconcile-0.9.1rc273.dist-info/top_level.txt,sha256=j0CHPIc8TsVRB50wOz_jhxjjaRyCJB3NOQeXhuHS67c,34
570
- qontract_reconcile-0.9.1rc273.dist-info/RECORD,,
567
+ qontract_reconcile-0.9.1rc275.dist-info/METADATA,sha256=_flJtRC5kagWx1H5IKnB2RlUXZWqepaSiNJnfWvtUbY,2287
568
+ qontract_reconcile-0.9.1rc275.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
569
+ qontract_reconcile-0.9.1rc275.dist-info/entry_points.txt,sha256=aIVvB7OTCxYu0QkONzBPfFEyg68Pr8KUVKEEm4ChDVc,333
570
+ qontract_reconcile-0.9.1rc275.dist-info/top_level.txt,sha256=j0CHPIc8TsVRB50wOz_jhxjjaRyCJB3NOQeXhuHS67c,34
571
+ qontract_reconcile-0.9.1rc275.dist-info/RECORD,,
@@ -13,10 +13,7 @@ from reconcile.saas_auto_promotions_manager.merge_request_manager.renderer impor
13
13
  Renderer,
14
14
  )
15
15
  from reconcile.saas_auto_promotions_manager.publisher import Publisher
16
- from reconcile.saas_auto_promotions_manager.subscriber import (
17
- ConfigHash,
18
- Subscriber,
19
- )
16
+ from reconcile.saas_auto_promotions_manager.subscriber import Subscriber
20
17
  from reconcile.saas_auto_promotions_manager.utils.saas_files_inventory import (
21
18
  SaasFilesInventory,
22
19
  )
@@ -77,20 +74,7 @@ class SaasAutoPromotionsManager:
77
74
  subscriber.compute_desired_state()
78
75
 
79
76
  def _get_subscribers_with_diff(self) -> list[Subscriber]:
80
- subscribers_with_diff: list[Subscriber] = []
81
- for subscriber in self._saas_file_inventory.subscribers:
82
- current_hashes: list[ConfigHash] = []
83
- for s in subscriber.config_hashes_by_channel_name.values():
84
- for el in s:
85
- current_hashes.append(el)
86
- if (
87
- set(subscriber.desired_hashes) == set(current_hashes)
88
- and subscriber.desired_ref == subscriber.ref
89
- ):
90
- # There is no change that requires a promotion
91
- continue
92
- subscribers_with_diff.append(subscriber)
93
- return subscribers_with_diff
77
+ return [s for s in self._saas_file_inventory.subscribers if s.has_diff()]
94
78
 
95
79
  def reconcile(self) -> None:
96
80
  self._deployment_state.cache_commit_shas_from_s3()
@@ -45,6 +45,15 @@ class Subscriber:
45
45
  self.namespace_file_path = namespace_file_path
46
46
  self._content_hash = ""
47
47
 
48
+ def has_diff(self) -> bool:
49
+ current_hashes = {
50
+ el for s in self.config_hashes_by_channel_name.values() for el in s
51
+ }
52
+ return not (
53
+ set(self.desired_hashes) == set(current_hashes)
54
+ and self.desired_ref == self.ref
55
+ )
56
+
48
57
  def compute_desired_state(self) -> None:
49
58
  self._compute_desired_ref()
50
59
  self._compute_desired_config_hashes()
@@ -1,12 +1,23 @@
1
1
  import json
2
2
  import logging
3
- import sys
4
- from typing import Any
3
+ from collections.abc import (
4
+ Callable,
5
+ Generator,
6
+ Iterable,
7
+ Mapping,
8
+ )
9
+ from typing import (
10
+ Any,
11
+ Optional,
12
+ )
5
13
 
6
14
  from reconcile import queries
7
15
  from reconcile.utils.aws_api import AWSApi
8
16
  from reconcile.utils.defer import defer
9
- from reconcile.utils.ocm import OCMMap
17
+ from reconcile.utils.ocm import (
18
+ OCM,
19
+ OCMMap,
20
+ )
10
21
  from reconcile.utils.semver_helper import make_semver
11
22
  from reconcile.utils.terraform_client import TerraformClient as Terraform
12
23
  from reconcile.utils.terrascript_aws_client import TerrascriptClient as Terrascript
@@ -14,8 +25,18 @@ from reconcile.utils.terrascript_aws_client import TerrascriptClient as Terrascr
14
25
  QONTRACT_INTEGRATION = "terraform_tgw_attachments"
15
26
  QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
16
27
 
28
+ TGW_CONNECTION_PROVIDER = "account-tgw"
17
29
 
18
- def build_desired_state_tgw_attachments(clusters, ocm_map: OCMMap, awsapi: AWSApi):
30
+
31
+ class ValidationError(Exception):
32
+ pass
33
+
34
+
35
+ def build_desired_state_tgw_attachments(
36
+ clusters: Iterable[Mapping],
37
+ ocm_map: Optional[OCMMap],
38
+ awsapi: AWSApi,
39
+ ) -> tuple[list[dict], bool]:
19
40
  """
20
41
  Fetch state for TGW attachments between a cluster and all TGWs
21
42
  in an account in the same region as the cluster
@@ -23,110 +44,207 @@ def build_desired_state_tgw_attachments(clusters, ocm_map: OCMMap, awsapi: AWSAp
23
44
  desired_state = []
24
45
  error = False
25
46
 
47
+ for item in _build_desired_state_tgw_attachments(clusters, ocm_map, awsapi):
48
+ if item is None:
49
+ error = True
50
+ else:
51
+ desired_state.append(item)
52
+ return desired_state, error
53
+
54
+
55
+ def _build_desired_state_tgw_attachments(
56
+ clusters: Iterable[Mapping],
57
+ ocm_map: Optional[OCMMap],
58
+ awsapi: AWSApi,
59
+ ) -> Generator[Optional[dict], Any, None]:
26
60
  for cluster_info in clusters:
27
- cluster = cluster_info["name"]
28
- ocm = ocm_map.get(cluster)
29
- peering_info = cluster_info["peering"]
30
- peer_connections = peering_info["connections"]
31
- for peer_connection in peer_connections:
32
- # We only care about account-tgw peering providers
33
- peer_connection_provider = peer_connection["provider"]
34
- if not peer_connection_provider == "account-tgw":
35
- continue
36
- # accepter is the cluster's AWS account
37
- cluster_region = cluster_info["spec"]["region"]
38
- cluster_cidr_block = cluster_info["network"]["vpc"]
39
- accepter = {"cidr_block": cluster_cidr_block, "region": cluster_region}
40
-
41
- account = peer_connection["account"]
42
- # assume_role is the role to assume to provision the
43
- # peering connection request, through the accepter AWS account.
44
- provided_assume_role = peer_connection.get("assumeRole")
45
- # if an assume_role is provided, it means we don't need
46
- # to get the information from OCM. it likely means that
47
- # there is no OCM at all.
48
- if provided_assume_role:
49
- account["assume_role"] = provided_assume_role
50
- else:
51
- account[
52
- "assume_role"
53
- ] = ocm.get_aws_infrastructure_access_terraform_assume_role(
54
- cluster, account["uid"], account["terraformUsername"]
55
- )
56
- account["assume_region"] = accepter["region"]
57
- account["assume_cidr"] = accepter["cidr_block"]
58
- (
59
- accepter_vpc_id,
60
- accepter_route_table_ids,
61
- accepter_subnets_id_az,
62
- ) = awsapi.get_cluster_vpc_details(
63
- account,
64
- route_tables=peer_connection.get("manageRoutes"),
65
- subnets=True,
66
- )
67
-
68
- if accepter_vpc_id is None:
69
- logging.error(f"[{cluster} could not find VPC ID for cluster")
70
- error = True
71
- continue
72
- accepter["vpc_id"] = accepter_vpc_id
73
- accepter["route_table_ids"] = accepter_route_table_ids
74
- accepter["subnets_id_az"] = accepter_subnets_id_az
75
- accepter["account"] = account
76
-
77
- account_tgws = awsapi.get_tgws_details(
78
- account,
79
- cluster_region,
80
- cluster_cidr_block,
81
- tags=json.loads(peer_connection.get("tags") or "{}"),
82
- route_tables=peer_connection.get("manageRoutes"),
83
- security_groups=peer_connection.get("manageSecurityGroups"),
84
- route53_associations=peer_connection.get("manageRoute53Associations"),
85
- )
86
- for tgw in account_tgws:
87
- tgw_id = tgw["tgw_id"]
88
- connection_name = (
89
- f"{peer_connection['name']}_" + f"{account['name']}-{tgw_id}"
61
+ ocm = (
62
+ ocm_map.get(cluster_info["name"])
63
+ if ocm_map and cluster_info.get("ocm")
64
+ else None
65
+ )
66
+ for peer_connection in cluster_info["peering"]["connections"]:
67
+ if peer_connection["provider"] == TGW_CONNECTION_PROVIDER:
68
+ yield from _build_desired_state_tgw_connection(
69
+ peer_connection, cluster_info, ocm, awsapi
90
70
  )
91
- requester = {
92
- "tgw_id": tgw_id,
93
- "tgw_arn": tgw["tgw_arn"],
94
- "region": tgw["region"],
95
- "routes": tgw.get("routes"),
96
- "rules": tgw.get("rules"),
97
- "hostedzones": tgw.get("hostedzones"),
98
- "cidr_block": peer_connection.get("cidrBlock"),
99
- "account": account,
100
- }
101
- item = {
102
- "connection_provider": peer_connection_provider,
103
- "connection_name": connection_name,
104
- "requester": requester,
105
- "accepter": accepter,
106
- "deleted": peer_connection.get("delete", False),
107
- }
108
- desired_state.append(item)
109
71
 
110
- return desired_state, error
111
72
 
73
+ def _build_desired_state_tgw_connection(
74
+ peer_connection: Mapping,
75
+ cluster_info: Mapping,
76
+ ocm: Optional[OCM],
77
+ awsapi: AWSApi,
78
+ ) -> Generator[Optional[dict], Any, None]:
79
+ cluster_name = cluster_info["name"]
80
+ cluster_region = cluster_info["spec"]["region"]
81
+ cluster_cidr_block = cluster_info["network"]["vpc"]
112
82
 
113
- @defer
114
- def run(
115
- dry_run, print_to_file=None, enable_deletion=False, thread_pool_size=10, defer=None
116
- ):
117
- settings = queries.get_secret_reader_settings()
118
- clusters = queries.get_clusters_with_peering_settings()
119
- with_ocm = any(c.get("ocm") for c in clusters)
120
- if with_ocm:
121
- ocm_map = OCMMap(
122
- clusters=clusters, integration=QONTRACT_INTEGRATION, settings=settings
123
- )
83
+ account = _account_with_assume_role_data(
84
+ peer_connection, cluster_name, cluster_region, cluster_cidr_block, ocm
85
+ )
86
+
87
+ # accepter is the cluster's AWS account
88
+ accepter = _build_accepter(
89
+ peer_connection,
90
+ account,
91
+ cluster_region,
92
+ cluster_cidr_block,
93
+ awsapi,
94
+ )
95
+ if accepter["vpc_id"] is None:
96
+ logging.error(f"[{cluster_name}] could not find VPC ID for cluster")
97
+ yield None
98
+
99
+ account_tgws = awsapi.get_tgws_details(
100
+ account,
101
+ cluster_region,
102
+ cluster_cidr_block,
103
+ tags=json.loads(peer_connection.get("tags") or "{}"),
104
+ route_tables=peer_connection.get("manageRoutes"),
105
+ security_groups=peer_connection.get("manageSecurityGroups"),
106
+ route53_associations=peer_connection.get("manageRoute53Associations"),
107
+ )
108
+ for tgw in account_tgws:
109
+ connection_name = f"{peer_connection['name']}_{account['name']}-{tgw['tgw_id']}"
110
+ requester = _build_requester(peer_connection, account, tgw)
111
+ item = {
112
+ "connection_provider": TGW_CONNECTION_PROVIDER,
113
+ "connection_name": connection_name,
114
+ "requester": requester,
115
+ "accepter": accepter,
116
+ "deleted": peer_connection.get("delete", False),
117
+ }
118
+ yield item
119
+
120
+
121
+ def _account_with_assume_role_data(
122
+ peer_connection: Mapping,
123
+ cluster_name: str,
124
+ region: str,
125
+ cidr_block: str,
126
+ ocm: Optional[OCM],
127
+ ) -> dict[str, Any]:
128
+ account = peer_connection["account"]
129
+ # assume_role is the role to assume to provision the
130
+ # peering connection request, through the accepter AWS account.
131
+ provided_assume_role = peer_connection.get("assumeRole")
132
+ # if an assume_role is provided, it means we don't need
133
+ # to get the information from OCM. it likely means that
134
+ # there is no OCM at all.
135
+ if provided_assume_role:
136
+ account["assume_role"] = provided_assume_role
124
137
  else:
138
+ if not ocm:
139
+ raise ValueError("OCM is required to get assume_role data")
140
+ account[
141
+ "assume_role"
142
+ ] = ocm.get_aws_infrastructure_access_terraform_assume_role(
143
+ cluster_name, account["uid"], account["terraformUsername"]
144
+ )
145
+ account["assume_region"] = region
146
+ account["assume_cidr"] = cidr_block
147
+ return account
148
+
149
+
150
+ def _build_accepter(
151
+ peer_connection: Mapping,
152
+ account: Mapping,
153
+ region: str,
154
+ cidr_block: str,
155
+ awsapi: AWSApi,
156
+ ) -> dict[str, Any]:
157
+ (vpc_id, route_table_ids, subnets_id_az) = awsapi.get_cluster_vpc_details(
158
+ account,
159
+ route_tables=peer_connection.get("manageRoutes"),
160
+ subnets=True,
161
+ )
162
+ return {
163
+ "cidr_block": cidr_block,
164
+ "region": region,
165
+ "vpc_id": vpc_id,
166
+ "route_table_ids": route_table_ids,
167
+ "subnets_id_az": subnets_id_az,
168
+ "account": account,
169
+ }
170
+
171
+
172
+ def _build_requester(
173
+ peer_connection: Mapping,
174
+ account: Mapping,
175
+ tgw: Mapping,
176
+ ) -> dict[str, Any]:
177
+ return {
178
+ "tgw_id": tgw["tgw_id"],
179
+ "tgw_arn": tgw["tgw_arn"],
180
+ "region": tgw["region"],
181
+ "routes": tgw.get("routes"),
182
+ "rules": tgw.get("rules"),
183
+ "hostedzones": tgw.get("hostedzones"),
184
+ "cidr_block": peer_connection.get("cidrBlock"),
185
+ "account": account,
186
+ }
187
+
188
+
189
+ def _build_ocm_map(
190
+ clusters: Iterable[Mapping],
191
+ settings: Optional[Mapping[str, Any]],
192
+ ) -> Optional[OCMMap]:
193
+ ocm_clusters = [c for c in clusters if c.get("ocm")]
194
+ return (
195
+ OCMMap(
196
+ clusters=ocm_clusters, integration=QONTRACT_INTEGRATION, settings=settings
197
+ )
198
+ if ocm_clusters
125
199
  # this is a case for an OCP cluster which is not provisioned
126
200
  # through OCM. it is expected that an 'assume_role' is provided
127
- # on the tgw defition in the cluster file.
128
- ocm_map = {}
201
+ # on the tgw definition in the cluster file.
202
+ else None
203
+ )
204
+
205
+
206
+ def _validate_tgw_connection_names(desired_state: Iterable[Mapping]) -> None:
207
+ connection_names = [c["connection_name"] for c in desired_state]
208
+ if len(set(connection_names)) != len(connection_names):
209
+ raise ValidationError("duplicate tgw connection names found")
210
+
129
211
 
212
+ def _filter_accounts(
213
+ accounts: Iterable[Mapping],
214
+ participating_accounts: Iterable[Mapping],
215
+ ) -> list:
216
+ participating_account_names = {a["name"] for a in participating_accounts}
217
+ return [a for a in accounts if a["name"] in participating_account_names]
218
+
219
+
220
+ def _populate_tgw_attachments_working_dirs(
221
+ desired_state: Iterable,
222
+ accounts: Iterable,
223
+ settings: Optional[Mapping[str, Any]],
224
+ participating_accounts: Iterable,
225
+ print_to_file: Optional[str],
226
+ thread_pool_size: int,
227
+ ) -> dict[str, str]:
228
+ ts = Terrascript(
229
+ QONTRACT_INTEGRATION, "", thread_pool_size, accounts, settings=settings
230
+ )
231
+ ts.populate_additional_providers(participating_accounts)
232
+ ts.populate_tgw_attachments(desired_state)
233
+ working_dirs = ts.dump(print_to_file=print_to_file)
234
+ return working_dirs
235
+
236
+
237
+ @defer
238
+ def run(
239
+ dry_run: bool,
240
+ print_to_file: Optional[str] = None,
241
+ enable_deletion: bool = False,
242
+ thread_pool_size: int = 10,
243
+ defer: Optional[Callable] = None,
244
+ ) -> None:
245
+ settings = queries.get_secret_reader_settings()
246
+ clusters = queries.get_clusters_with_peering_settings()
247
+ ocm_map = _build_ocm_map(clusters, settings)
130
248
  accounts = queries.get_aws_accounts(terraform_state=True, ecrs=False)
131
249
 
132
250
  # Fetch desired state for cluster-to-vpc(account) VPCs
@@ -135,66 +253,59 @@ def run(
135
253
  clusters, ocm_map, awsapi
136
254
  )
137
255
  if err:
138
- sys.exit(1)
256
+ raise RuntimeError("Could not find VPC ID for cluster")
139
257
 
140
- # check there are no repeated vpc connection names
141
- connection_names = [c["connection_name"] for c in desired_state]
142
- if len(set(connection_names)) != len(connection_names):
143
- logging.error("duplicate vpc connection names found")
144
- sys.exit(1)
258
+ # check there are no repeated tgw connection names
259
+ _validate_tgw_connection_names(desired_state)
145
260
 
146
261
  participating_accounts = [item["requester"]["account"] for item in desired_state]
147
- participating_accounts += [item["accepter"]["account"] for item in desired_state]
148
- participating_account_names = [a["name"] for a in participating_accounts]
149
- accounts = [
150
- a
151
- for a in queries.get_aws_accounts(terraform_state=True, ecrs=False)
152
- if a["name"] in participating_account_names
153
- ]
262
+ filtered_accounts = _filter_accounts(accounts, participating_accounts)
154
263
 
155
- ts = Terrascript(
156
- QONTRACT_INTEGRATION, "", thread_pool_size, accounts, settings=settings
264
+ working_dirs = _populate_tgw_attachments_working_dirs(
265
+ desired_state,
266
+ filtered_accounts,
267
+ settings,
268
+ participating_accounts,
269
+ print_to_file,
270
+ thread_pool_size,
157
271
  )
158
- ts.populate_additional_providers(participating_accounts)
159
- ts.populate_tgw_attachments(desired_state)
160
- working_dirs = ts.dump(print_to_file=print_to_file)
161
272
 
162
273
  if print_to_file:
163
- sys.exit()
274
+ return
164
275
 
165
- aws_api = AWSApi(1, accounts, settings=settings, init_users=False)
276
+ aws_api = AWSApi(1, filtered_accounts, settings=settings, init_users=False)
166
277
 
167
278
  tf = Terraform(
168
279
  QONTRACT_INTEGRATION,
169
280
  QONTRACT_INTEGRATION_VERSION,
170
281
  "",
171
- accounts,
282
+ filtered_accounts,
172
283
  working_dirs,
173
284
  thread_pool_size,
174
285
  aws_api,
175
286
  )
176
287
 
177
- if tf is None:
178
- sys.exit(1)
179
-
180
- defer(tf.cleanup)
288
+ if defer:
289
+ defer(tf.cleanup)
181
290
 
182
291
  disabled_deletions_detected, err = tf.plan(enable_deletion)
183
292
  if err:
184
- sys.exit(1)
293
+ raise RuntimeError("Error running terraform plan")
185
294
  if disabled_deletions_detected:
186
- sys.exit(1)
295
+ raise RuntimeError("Disabled deletions detected running terraform plan")
187
296
 
188
297
  if dry_run:
189
298
  return
190
299
 
191
300
  err = tf.apply()
192
301
  if err:
193
- sys.exit(1)
302
+ raise RuntimeError("Error running terraform apply")
194
303
 
195
304
 
196
305
  def early_exit_desired_state(
197
- print_to_file=None, enable_deletion=False, thread_pool_size=10
306
+ print_to_file: Optional[str] = None,
307
+ enable_deletion: bool = False,
308
+ thread_pool_size: int = 10,
198
309
  ) -> dict[str, Any]:
199
310
  return {
200
311
  "clusters": queries.get_clusters_with_peering_settings(),
@@ -1,7 +1,6 @@
1
1
  from collections import defaultdict
2
2
  from collections.abc import (
3
3
  Callable,
4
- Iterable,
5
4
  Mapping,
6
5
  )
7
6
  from typing import Any
@@ -23,6 +22,8 @@ from .data_keys import (
23
22
  CONFIG_HASH,
24
23
  CUR_CONFIG_HASHES,
25
24
  CUR_SUBSCRIBER_REF,
25
+ DESIRED_REF,
26
+ DESIRED_TARGET_HASHES,
26
27
  NAMESPACE_REF,
27
28
  REAL_WORLD_SHA,
28
29
  SUCCESSFUL_DEPLOYMENT,
@@ -30,23 +31,6 @@ from .data_keys import (
30
31
  )
31
32
 
32
33
 
33
- @pytest.fixture
34
- def config_hashes_builder() -> Callable[
35
- [Iterable[tuple[str, str, str]]], list[ConfigHash]
36
- ]:
37
- def builder(data: Iterable[tuple[str, str, str]]) -> list[ConfigHash]:
38
- return [
39
- ConfigHash(
40
- channel=d[0],
41
- parent_saas=d[1],
42
- target_config_hash=d[2],
43
- )
44
- for d in data
45
- ]
46
-
47
- return builder
48
-
49
-
50
34
  @pytest.fixture
51
35
  def subscriber_builder() -> Callable[[Mapping[str, Any]], Subscriber]:
52
36
  def builder(data: Mapping[str, Any]) -> Subscriber:
@@ -69,12 +53,8 @@ def subscriber_builder() -> Callable[[Mapping[str, Any]], Subscriber]:
69
53
  channels.append(channel)
70
54
  cur_config_hashes_by_channel: dict[str, list[ConfigHash]] = defaultdict(list)
71
55
  for cur_config_hash in data.get(CUR_CONFIG_HASHES, []):
72
- cur_config_hashes_by_channel[cur_config_hash[0]].append(
73
- ConfigHash(
74
- channel=cur_config_hash[0],
75
- parent_saas=cur_config_hash[1],
76
- target_config_hash=cur_config_hash[2],
77
- )
56
+ cur_config_hashes_by_channel[cur_config_hash.channel].append(
57
+ cur_config_hash
78
58
  )
79
59
  subscriber = Subscriber(
80
60
  namespace_file_path=data.get(NAMESPACE_REF, ""),
@@ -85,6 +65,8 @@ def subscriber_builder() -> Callable[[Mapping[str, Any]], Subscriber]:
85
65
  )
86
66
  subscriber.channels = channels
87
67
  subscriber.config_hashes_by_channel_name = cur_config_hashes_by_channel
68
+ subscriber.desired_ref = data.get(DESIRED_REF, "")
69
+ subscriber.desired_hashes = data.get(DESIRED_TARGET_HASHES, [])
88
70
  return subscriber
89
71
 
90
72
  return builder