qontract-reconcile 0.10.1rc41__py3-none-any.whl → 0.10.1rc43__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qontract-reconcile
3
- Version: 0.10.1rc41
3
+ Version: 0.10.1rc43
4
4
  Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
5
  Home-page: https://github.com/app-sre/qontract-reconcile
6
6
  Author: Red Hat App-SRE Team
@@ -7,7 +7,7 @@ reconcile/aws_iam_password_reset.py,sha256=NwErtrqgBiXr7eGCAHdtGGOx0S7-4JnSc29Ie
7
7
  reconcile/aws_support_cases_sos.py,sha256=i6bSWnlH9fh14P14PjVhFLwNl-q3fD733_rXKM_O51c,2992
8
8
  reconcile/blackbox_exporter_endpoint_monitoring.py,sha256=W_VJagnsJR1v5oqjlI3RJJE0_nhtJ0m81RS8zWA5u5c,3538
9
9
  reconcile/checkpoint.py,sha256=figtZRuWUvdpdSnkhAqeGvO5dI02TT6J3heyeFhlwqM,5016
10
- reconcile/cli.py,sha256=bPn61Jm35bH2FZfD2fXj_ITmbK6SdHQZjL-3ixjpBIY,70379
10
+ reconcile/cli.py,sha256=b9m9QgNakXJOS-b9MQgHEESFF5-qc4J1sE4iUuMMVfA,71194
11
11
  reconcile/closedbox_endpoint_monitoring_base.py,sha256=0xg_d8dwd36Y8GY1mE-LLO1LQpPEMM77bzAfc_KdgzU,4870
12
12
  reconcile/cluster_deployment_mapper.py,sha256=2Ah-nu-Mdig0pjuiZl_XLrmVAjYzFjORR3dMlCgkmw0,2352
13
13
  reconcile/dashdotdb_base.py,sha256=Ca75-OQiu5HeA8Q6zQpEYuhyCSjeuWe99K4y9ipTORM,4032
@@ -111,6 +111,7 @@ reconcile/terraform_aws_route53.py,sha256=06VIlIb95BzVkxV_1TPiaY9sQO-TkvQXL4V_qz
111
111
  reconcile/terraform_cloudflare_dns.py,sha256=auU4bzeLwd4S8D8oqpqJbrCUoEdELXrgi7vHOedjYFk,13332
112
112
  reconcile/terraform_cloudflare_resources.py,sha256=BQg12mHm1iaxf086FFPZutPbWKUMaddqu-nREPR8ptA,14887
113
113
  reconcile/terraform_cloudflare_users.py,sha256=Bv0f9lOO_wTM7st8iltb8FR8gu4KpKu3qavMzAYcoMc,13965
114
+ reconcile/terraform_repo.py,sha256=9Gs5Xbt6qNR_Q_78evgvYWlRkyDQK_4v-_7mS6GQw0k,11112
114
115
  reconcile/terraform_resources.py,sha256=gQ-LT0TGwf9OR4RF5EWDmNHUnKWnbhrIMtyIdUgP4D4,16782
115
116
  reconcile/terraform_tgw_attachments.py,sha256=ootT8zPxcm3-VHy9OiG0zBP0X7wzrvTCh53eYbxJvfI,13725
116
117
  reconcile/terraform_users.py,sha256=AzDvEQCdLpsXoS3nLbIQRraQvJHa8JmL40lZFv8YXMk,9321
@@ -119,8 +120,8 @@ reconcile/unleash_watcher.py,sha256=xNLUFpIr66XESEyXUkmHTTmHghVWHiMtnS_k0OC7gd8,
119
120
  reconcile/vault_replication.py,sha256=xobxnsOfUcwvdQ-RZ7JH_sZCDh8rpEY7MJ36nkvfFqE,17262
120
121
  reconcile/vpc_peerings_validator.py,sha256=10igLYTQpBMGXO9mTO7sJBzgr4jXQ2hf1OH5r5DKugE,3586
121
122
  reconcile/aus/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
122
- reconcile/aus/advanced_upgrade_service.py,sha256=X3_mjNsocBEwgyGHs1O2hG6lTL0BOlM2DX_cvsmXtZ8,10953
123
- reconcile/aus/base.py,sha256=z76_aJrxJZHu8XZmVeGHlVKF3SjMwE8Ayrr5NL1VRqc,26221
123
+ reconcile/aus/advanced_upgrade_service.py,sha256=R0V-APkx12JOQNySLSM1HvJDJSzKp3QyBO0hBBoVzlM,11316
124
+ reconcile/aus/base.py,sha256=qnO8kmnesMho6a-EgFdrVejjPtwS_jkdlSPHaXYB1Rs,26259
124
125
  reconcile/aus/models.py,sha256=Qj4hmJr6J3fsH1acOaudqvGnXIuk5pXakckOf3L_qHA,4536
125
126
  reconcile/aus/ocm_addons_upgrade_scheduler_org.py,sha256=b9qGFWwY8aSEzXeTtw89dqQeK_dJGBEfopOOkdjO8V8,6026
126
127
  reconcile/aus/ocm_upgrade_scheduler.py,sha256=Wh2ZbODeOF7_hEbXfsFUGnAJ9CLSy76lNKEmir-GHuM,3447
@@ -244,6 +245,8 @@ reconcile/gql_definitions/terraform_cloudflare_resources/terraform_cloudflare_re
244
245
  reconcile/gql_definitions/terraform_cloudflare_users/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
245
246
  reconcile/gql_definitions/terraform_cloudflare_users/app_interface_setting_cloudflare_and_vault.py,sha256=OHfIzX9qAePtRwARYNERuvafwVv0Zy0YUbT83Frt3eA,1984
246
247
  reconcile/gql_definitions/terraform_cloudflare_users/terraform_cloudflare_roles.py,sha256=CopEDfqnz6M-rW4kwkbFK_5FvAj7t8NzzffGZUhCTuo,4059
248
+ reconcile/gql_definitions/terraform_repo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
249
+ reconcile/gql_definitions/terraform_repo/terraform_repo.py,sha256=lr9a9pCbhrrwrZCodCzmzQmFTS8gqfry1sGtQ-4v7Z4,2423
247
250
  reconcile/gql_definitions/terraform_resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
248
251
  reconcile/gql_definitions/terraform_resources/terraform_resources_namespaces.py,sha256=_v6Grk8TtqYcIquwgfiE_Ex24QIxKRPUZaoYn3HsAoc,39688
249
252
  reconcile/gql_definitions/terraform_tgw_attachments/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -288,7 +291,7 @@ reconcile/templates/aws_access_key_email.j2,sha256=2MUr1ERmyISzKgHqsWYLd-1Wbl-pe
288
291
  reconcile/templates/email.yml.j2,sha256=OZgczNRgXPj2gVYTgwQyHAQrMGu7xp-e4W1rX19GcrU,690
289
292
  reconcile/templates/jira-checkpoint-missinginfo.j2,sha256=c_Vvg-lEENsB3tgxm9B6Y9igCUQhCnFDYh6xw-zcIbU,570
290
293
  reconcile/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
291
- reconcile/test/conftest.py,sha256=_pvENzQ-5vCvWi40PK70iVVDRrkKUfbC9bTQsjXSj0A,3046
294
+ reconcile/test/conftest.py,sha256=dBWwQMkcdONERlnBJg4J6Td5Fexpbvme5y-UuoI-c9M,3185
292
295
  reconcile/test/fixtures.py,sha256=VhvLXH0AWXEyu3FgPp7bcSTPmDPfMEa2v-_9cd8dCmw,572
293
296
  reconcile/test/test_aggregated_list.py,sha256=iiWitQuNYC58aimWaiBoE4NROHjr1NCgQ91MnHEG_Ro,6412
294
297
  reconcile/test/test_amtool.py,sha256=vxRhGieeydMBOb9UI2ziMHjJa8puMeGNsUhGhy-yMnk,1032
@@ -346,6 +349,7 @@ reconcile/test/test_sql_query.py,sha256=l0QyIflcErIrAwSP8kOIub0jO6oi0Ncuns5IJtnu
346
349
  reconcile/test/test_terraform_cloudflare_dns.py,sha256=aQTXX8Vr4h9aWvJZTnpZEhMGYoBpT2d45ZxU_ECIQ6o,3425
347
350
  reconcile/test/test_terraform_cloudflare_resources.py,sha256=cWNE2UIhz19rLSWdpJG8xRwuEEYoIZWEkDZY7e2QN_g,3426
348
351
  reconcile/test/test_terraform_cloudflare_users.py,sha256=8iAFjz-zbUW4xLS10Lk1XvYSk4B_W__YT9rgrBuigcQ,27482
352
+ reconcile/test/test_terraform_repo.py,sha256=fCr14via4GHmuzAuGIr53PZwNQ2hq4Ys1Iv8Pgro398,6039
349
353
  reconcile/test/test_terraform_resources.py,sha256=dEpJwaTzE_FzkRjCozDtGzE4egBrb-VrwSoWr2Benv4,7955
350
354
  reconcile/test/test_terraform_tgw_attachments.py,sha256=ddf04h_uKYroJOWKOFGZxuJNL-1PSjW5EyddQB3CLSw,33744
351
355
  reconcile/test/test_terraform_users.py,sha256=Yt4iN5FMtn7cfVlVqBJ1MMH94Z0DGchyByhpfNUJFxM,1570
@@ -537,13 +541,13 @@ reconcile/utils/mr/user_maintenance.py,sha256=_4VwAMJsBxD7maM7AZsMl_GjYRgQtZb_rl
537
541
  reconcile/utils/ocm/__init__.py,sha256=5Pcf5cyftDWT5XRi1EzvNklOVxGplJi-v12HN3TDarc,57
538
542
  reconcile/utils/ocm/base.py,sha256=8pCZB_V6pZhc-qZQFTuIr1kM5nxrzz9kT86-DW3rgq0,168
539
543
  reconcile/utils/ocm/cluster_groups.py,sha256=TBb3mIzw68BHBcCzacmAN8IsNPo8FfOnTMphuctwuU0,2679
540
- reconcile/utils/ocm/clusters.py,sha256=0QuvFUQIRGXlPBvVdQrUeKbVj67eL2KcATwOpoOp12c,7407
544
+ reconcile/utils/ocm/clusters.py,sha256=4Ddc0Ah-s5gmNUK0K_4e2zaHyip50fyCCSFuAirPO2s,7766
541
545
  reconcile/utils/ocm/labels.py,sha256=5Edjk9F3o4AnhzYc-YUiwDkG_pnEPcXrIXT9g1uE3x0,4883
542
546
  reconcile/utils/ocm/ocm.py,sha256=pp-T6cT0RQf91oqrWFm5QSd_LyH8n5gJmPaetDtMioI,64455
543
547
  reconcile/utils/ocm/search_filters.py,sha256=zExZpYBh7_tucG-xKoPHUxz1b_6l9qwbEMpMihQg7nA,15043
544
548
  reconcile/utils/ocm/service_log.py,sha256=6_RNLG6KlnXNmJW6xBrlqGdyyz-fuaSeFmiJFOLN51g,2535
545
549
  reconcile/utils/ocm/sre_capability_labels.py,sha256=HY5v3ndMu1hwbLipX24l1R-dvm0SFKFettnV7detVBI,1530
546
- reconcile/utils/ocm/subscriptions.py,sha256=kuHster01yLy0_s5DWFLXh9zse3NIpbGTuKW78q_b90,2322
550
+ reconcile/utils/ocm/subscriptions.py,sha256=PO5Rp440NEmZxIVVvhQpPzDRcKL1jYoQq5j2N9AkM1c,2344
547
551
  reconcile/utils/runtime/__init__.py,sha256=l9o8APZxyED5Q6ylGoLIESksQF4f3O8cdke3IdMFOTQ,108
548
552
  reconcile/utils/runtime/desired_state_diff.py,sha256=AQhJmq3CP2YOWP-KpmVtYKnhZ46sxERfbk_R6PHO-zc,8272
549
553
  reconcile/utils/runtime/environment.py,sha256=cJgCMRBeschdeKJuk_N6BhDWaOCZbo-41i2a9L9DpBE,1328
@@ -570,7 +574,7 @@ tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
570
574
  tools/app_interface_reporter.py,sha256=M_5C7ySACJG6Hp0UcY-w9ZY0sTVE7oQ48qoijp10jag,21933
571
575
  tools/glitchtip_access_reporter.py,sha256=oPBnk_YoDuljU3v0FaChzOwwnk4vap1xEE67QEjzdqs,2948
572
576
  tools/glitchtip_access_revalidation.py,sha256=PXN5wxl6OX8sxddPaakDF3X79nFLvpm-lz0mWLVelw0,2806
573
- tools/qontract_cli.py,sha256=GLnFJS2FQs1yX_bjAVR4PNgEDlCPcNqDv1i4pEOiqVU,90439
577
+ tools/qontract_cli.py,sha256=pGBwz3TjvfSDCYlKSISTzCG5ab1ljxJQIfTh3lkljOY,90671
574
578
  tools/cli_commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
575
579
  tools/cli_commands/gpg_encrypt.py,sha256=JryinrDdvztN931enUY3FuDeLVnfs6y58mnK7itNK6Y,4940
576
580
  tools/sre_checkpoints/__init__.py,sha256=CDaDaywJnmRCLyl_NCcvxi-Zc0hTi_3OdwKiFOyS39I,145
@@ -578,8 +582,8 @@ tools/sre_checkpoints/util.py,sha256=zEDbGr18ZeHNQwW8pUsr2JRjuXIPz--WAGJxZo9sv_Y
578
582
  tools/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
579
583
  tools/test/test_qontract_cli.py,sha256=awwTHEc2DWlykuqGIYM0WOBoSL0KRnOraCLk3C7izis,1401
580
584
  tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
581
- qontract_reconcile-0.10.1rc41.dist-info/METADATA,sha256=FcNoC48l0wdeWYF2QclApVirXpYew_Ew1bJtzZD_n38,2288
582
- qontract_reconcile-0.10.1rc41.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
583
- qontract_reconcile-0.10.1rc41.dist-info/entry_points.txt,sha256=Af70EWPJxsTiCNF6gA-pWdw1A0Heqn-PZF-oBc5NmiU,302
584
- qontract_reconcile-0.10.1rc41.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
585
- qontract_reconcile-0.10.1rc41.dist-info/RECORD,,
585
+ qontract_reconcile-0.10.1rc43.dist-info/METADATA,sha256=wdqh9jrOR8RDo-xKPaCMIoa3MzNi2zTn_Jliv0jWgsQ,2288
586
+ qontract_reconcile-0.10.1rc43.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
587
+ qontract_reconcile-0.10.1rc43.dist-info/entry_points.txt,sha256=Af70EWPJxsTiCNF6gA-pWdw1A0Heqn-PZF-oBc5NmiU,302
588
+ qontract_reconcile-0.10.1rc43.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
589
+ qontract_reconcile-0.10.1rc43.dist-info/RECORD,,
@@ -75,7 +75,11 @@ class AdvancedUpgradeServiceIntegration(OCMClusterUpgradeSchedulerOrgIntegration
75
75
  self, ocm_env: OCMEnvironment, org_ids: Optional[set[str]]
76
76
  ) -> dict[str, OrganizationUpgradeSpec]:
77
77
  ocm_api = init_ocm_base_client(ocm_env, self.secret_reader)
78
- clusters_by_org = _discover_clusters(ocm_api=ocm_api, org_ids=org_ids)
78
+ clusters_by_org = discover_clusters(
79
+ ocm_api=ocm_api,
80
+ org_ids=org_ids,
81
+ ignore_sts_clusters=self.params.ignore_sts_clusters,
82
+ )
79
83
  labels_by_org = _get_org_labels(ocm_api=ocm_api, org_ids=org_ids)
80
84
 
81
85
  return _build_org_upgrade_specs_for_ocm_env(
@@ -96,8 +100,10 @@ class AdvancedUpgradeServiceIntegration(OCMClusterUpgradeSchedulerOrgIntegration
96
100
  )
97
101
 
98
102
 
99
- def _discover_clusters(
100
- ocm_api: OCMBaseClient, org_ids: Optional[set[str]] = None
103
+ def discover_clusters(
104
+ ocm_api: OCMBaseClient,
105
+ org_ids: Optional[set[str]] = None,
106
+ ignore_sts_clusters: bool = False,
101
107
  ) -> dict[str, list[ClusterDetails]]:
102
108
  """
103
109
  Discover all clusters that are part of the AUS service.
@@ -111,7 +117,10 @@ def _discover_clusters(
111
117
  # group by org and filter if org_id is specified
112
118
  clusters_by_org: dict[str, list[ClusterDetails]] = defaultdict(list)
113
119
  for c in clusters:
114
- if org_ids is None or c.organization_id in org_ids:
120
+ is_sts_cluster = c.ocm_cluster.aws and c.ocm_cluster.aws.sts_enabled
121
+ passed_sts_filter = not ignore_sts_clusters or not is_sts_cluster
122
+ passed_ocm_filters = org_ids is None or c.organization_id in org_ids
123
+ if passed_ocm_filters and passed_sts_filter:
115
124
  clusters_by_org[c.organization_id].append(c)
116
125
 
117
126
  return clusters_by_org
reconcile/aus/base.py CHANGED
@@ -59,6 +59,7 @@ class AdvancedUpgradeSchedulerBaseIntegrationParams(PydanticRunParams):
59
59
 
60
60
  ocm_environment: Optional[str] = None
61
61
  ocm_organization_ids: Optional[set[str]] = None
62
+ ignore_sts_clusters: bool = False
62
63
 
63
64
 
64
65
  class AdvancedUpgradeSchedulerBaseIntegration(
reconcile/cli.py CHANGED
@@ -1546,6 +1546,26 @@ def ldap_users(ctx, infra_project_id, app_interface_project_id):
1546
1546
  )
1547
1547
 
1548
1548
 
1549
+ @integration.command(short_help="Manages raw HCL Terraform from a separate repository.")
1550
+ @click.option(
1551
+ "-d",
1552
+ "--output-dir",
1553
+ help="Specify a directory to individually output each repo plan to for the executor",
1554
+ )
1555
+ @click.pass_context
1556
+ def terraform_repo(ctx, output_dir):
1557
+ from reconcile import terraform_repo
1558
+
1559
+ run_class_integration(
1560
+ integration=terraform_repo.TerraformRepoIntegration(
1561
+ terraform_repo.TerraformRepoIntegrationParams(
1562
+ output_dir=output_dir, validate_git=True
1563
+ )
1564
+ ),
1565
+ ctx=ctx.obj,
1566
+ )
1567
+
1568
+
1549
1569
  @integration.command(short_help="Manage AWS Resources using Terraform.")
1550
1570
  @print_to_file
1551
1571
  @vault_output_path
@@ -1996,8 +2016,14 @@ def ocm_addons_upgrade_scheduler_org(ctx):
1996
2016
  required=False,
1997
2017
  envvar="AUS_OCM_ORG_IDS",
1998
2018
  )
2019
+ @click.option(
2020
+ "--ignore-sts-clusters",
2021
+ is_flag=True,
2022
+ default=os.environ.get("IGNORE_STS_CLUSTERS", False),
2023
+ help="Ignore STS clusters",
2024
+ )
1999
2025
  @click.pass_context
2000
- def aus_upgrade_scheduler_org(ctx, ocm_env, ocm_org_ids):
2026
+ def aus_upgrade_scheduler_org(ctx, ocm_env, ocm_org_ids, ignore_sts_clusters):
2001
2027
  from reconcile.aus.advanced_upgrade_service import AdvancedUpgradeServiceIntegration
2002
2028
  from reconcile.aus.base import AdvancedUpgradeSchedulerBaseIntegrationParams
2003
2029
 
@@ -2007,6 +2033,7 @@ def aus_upgrade_scheduler_org(ctx, ocm_env, ocm_org_ids):
2007
2033
  AdvancedUpgradeSchedulerBaseIntegrationParams(
2008
2034
  ocm_environment=ocm_env,
2009
2035
  ocm_organization_ids=parsed_ocm_org_ids,
2036
+ ignore_sts_clusters=ignore_sts_clusters,
2010
2037
  )
2011
2038
  ),
2012
2039
  ctx=ctx.obj,
File without changes
@@ -0,0 +1,91 @@
1
+ """
2
+ Generated by qenerate plugin=pydantic_v1. DO NOT MODIFY MANUALLY!
3
+ """
4
+ from collections.abc import Callable # noqa: F401 # pylint: disable=W0611
5
+ from datetime import datetime # noqa: F401 # pylint: disable=W0611
6
+ from enum import Enum # noqa: F401 # pylint: disable=W0611
7
+ from typing import ( # noqa: F401 # pylint: disable=W0611
8
+ Any,
9
+ Optional,
10
+ Union,
11
+ )
12
+
13
+ from pydantic import ( # noqa: F401 # pylint: disable=W0611
14
+ BaseModel,
15
+ Extra,
16
+ Field,
17
+ Json,
18
+ )
19
+
20
+ from reconcile.gql_definitions.fragments.vault_secret import VaultSecret
21
+
22
+
23
+ DEFINITION = """
24
+ fragment VaultSecret on VaultSecret_v1 {
25
+ path
26
+ field
27
+ version
28
+ format
29
+ }
30
+
31
+ query TerraformRepo {
32
+ repos: terraform_repo_v1 {
33
+ account {
34
+ name
35
+ uid
36
+ automationToken {
37
+ ...VaultSecret
38
+ }
39
+ }
40
+ name
41
+ repository
42
+ ref
43
+ projectPath
44
+ delete
45
+ }
46
+ }
47
+ """
48
+
49
+
50
+ class ConfiguredBaseModel(BaseModel):
51
+ class Config:
52
+ smart_union = True
53
+ extra = Extra.forbid
54
+
55
+
56
+ class AWSAccountV1(ConfiguredBaseModel):
57
+ name: str = Field(..., alias="name")
58
+ uid: str = Field(..., alias="uid")
59
+ automation_token: VaultSecret = Field(..., alias="automationToken")
60
+
61
+
62
+ class TerraformRepoV1(ConfiguredBaseModel):
63
+ account: AWSAccountV1 = Field(..., alias="account")
64
+ name: str = Field(..., alias="name")
65
+ repository: str = Field(..., alias="repository")
66
+ ref: str = Field(..., alias="ref")
67
+ project_path: str = Field(..., alias="projectPath")
68
+ delete: Optional[bool] = Field(..., alias="delete")
69
+
70
+
71
+ class TerraformRepoQueryData(ConfiguredBaseModel):
72
+ repos: Optional[list[TerraformRepoV1]] = Field(..., alias="repos")
73
+
74
+
75
+ def query(query_func: Callable, **kwargs: Any) -> TerraformRepoQueryData:
76
+ """
77
+ This is a convenience function which queries and parses the data into
78
+ concrete types. It should be compatible with most GQL clients.
79
+ You do not have to use it to consume the generated data classes.
80
+ Alternatively, you can also mime and alternate the behavior
81
+ of this function in the caller.
82
+
83
+ Parameters:
84
+ query_func (Callable): Function which queries your GQL Server
85
+ kwargs: optional arguments that will be passed to the query function
86
+
87
+ Returns:
88
+ TerraformRepoQueryData: queried data parsed into generated classes
89
+ """
90
+ raw_data: dict[Any, Any] = query_func(DEFINITION, **kwargs)
91
+ return TerraformRepoQueryData(**raw_data)
@@ -0,0 +1,302 @@
1
+ import logging
2
+ from collections.abc import Callable
3
+ from typing import (
4
+ Any,
5
+ Optional,
6
+ )
7
+
8
+ import yaml
9
+ from pydantic import (
10
+ BaseModel,
11
+ ValidationError,
12
+ )
13
+
14
+ from reconcile import queries
15
+ from reconcile.gql_definitions.terraform_repo.terraform_repo import (
16
+ TerraformRepoV1,
17
+ query,
18
+ )
19
+ from reconcile.utils import gql
20
+ from reconcile.utils.defer import defer
21
+ from reconcile.utils.differ import (
22
+ DiffResult,
23
+ diff_iterables,
24
+ )
25
+ from reconcile.utils.exceptions import ParameterError
26
+ from reconcile.utils.gitlab_api import GitLabApi
27
+ from reconcile.utils.runtime.integration import (
28
+ PydanticRunParams,
29
+ QontractReconcileIntegration,
30
+ )
31
+ from reconcile.utils.semver_helper import make_semver
32
+ from reconcile.utils.state import (
33
+ State,
34
+ init_state,
35
+ )
36
+
37
+
38
+ class RepoSecret(BaseModel):
39
+ path: str
40
+ version: Optional[int]
41
+
42
+
43
+ class RepoOutput(BaseModel):
44
+ """
45
+ Output of the QR terraform-repo integration and input to the executor
46
+ which removes some information that is unnecessary for the executor to parse
47
+ """
48
+
49
+ dry_run: bool
50
+ repository: str
51
+ name: str
52
+ ref: str
53
+ project_path: str
54
+ delete: bool
55
+ secret: RepoSecret
56
+
57
+
58
+ class TerraformRepoIntegrationParams(PydanticRunParams):
59
+ output_dir: Optional[str]
60
+ validate_git: bool
61
+
62
+
63
+ class TerraformRepoIntegration(
64
+ QontractReconcileIntegration[TerraformRepoIntegrationParams]
65
+ ):
66
+ def __init__(self, params: TerraformRepoIntegrationParams) -> None:
67
+ super().__init__(params)
68
+ self.qontract_integration = "terraform_repo"
69
+ self.qontract_integration_version = make_semver(0, 1, 0)
70
+ self.qontract_tf_prefix = "qrtfrepo"
71
+
72
+ @property
73
+ def name(self) -> str:
74
+ return self.qontract_integration.replace("_", "-")
75
+
76
+ @defer
77
+ def run(
78
+ self,
79
+ dry_run: bool,
80
+ defer: Optional[Callable] = None,
81
+ ) -> None:
82
+
83
+ gqlapi = gql.get_api()
84
+
85
+ state = init_state(integration=self.name)
86
+ if defer:
87
+ defer(state.cleanup)
88
+
89
+ desired = self.get_repos(query_func=gqlapi.query)
90
+ existing = self.get_existing_state(state)
91
+
92
+ repo_diff = self.calculate_diff(
93
+ existing_state=existing, desired_state=desired, dry_run=dry_run, state=state
94
+ )
95
+
96
+ for repo in repo_diff:
97
+ # format each repo into the input the executor expects
98
+ repo_output = RepoOutput(
99
+ dry_run=dry_run,
100
+ repository=repo.repository,
101
+ name=repo.name,
102
+ ref=repo.ref,
103
+ project_path=repo.project_path,
104
+ delete=repo.delete or False,
105
+ secret=RepoSecret(
106
+ path=repo.account.automation_token.path,
107
+ version=repo.account.automation_token.version,
108
+ ),
109
+ )
110
+
111
+ if self.params.output_dir:
112
+ try:
113
+ output_filename = f"{self.params.output_dir}/{repo.name}.yaml"
114
+ with open(output_filename, "w") as output_file:
115
+ yaml.safe_dump(
116
+ data=repo_output.dict(),
117
+ stream=output_file,
118
+ explicit_start=True,
119
+ )
120
+ except FileNotFoundError:
121
+ raise ParameterError(f"Unable to write to '{output_filename}'")
122
+ else:
123
+ print(yaml.safe_dump(data=repo_output.dict(), explicit_start=True))
124
+
125
+ def get_repos(self, query_func: Callable) -> list[TerraformRepoV1]:
126
+ """Gets a list of terraform repos defined in App Interface
127
+
128
+ :param query_func: function which queries GQL server
129
+ :type query_func: Callable
130
+ :return: list of Terraform repos or empty list if none are defined in A-I
131
+ :rtype: list[TerraformRepoV1]
132
+ """
133
+ query_results = query(query_func=query_func).repos
134
+ if query_results:
135
+ return query_results
136
+ return []
137
+
138
+ def get_existing_state(self, state: State) -> list[TerraformRepoV1]:
139
+ """Gets the state of terraform infrastructure currently deployed (stored in S3)
140
+
141
+ :param state: S3 state class to retrieve from
142
+ :type state: State
143
+ :return: list of terraform repos or empty list if state is unparsable or no repos are deployed
144
+ :rtype: list[TerraformRepoV1]
145
+ """
146
+ repo_list: list[TerraformRepoV1] = []
147
+ keys = state.ls()
148
+ for key in keys:
149
+ if value := state.get(key.lstrip("/"), None):
150
+ try:
151
+ repo = TerraformRepoV1.parse_raw(value)
152
+ repo_list.append(repo)
153
+ except ValidationError as err:
154
+ logging.error(
155
+ f"{err}\nUnable to parse existing state for repo: '{key}', skipping"
156
+ )
157
+
158
+ return repo_list
159
+
160
+ def check_ref(self, repo_url: str, ref: str) -> None:
161
+ """Validates that a Git SHA exists
162
+
163
+ :param repo_url: full project URL including https/http
164
+ :type repo_url: str
165
+ :param ref: git SHA
166
+ :type ref: str
167
+ :raises ParameterError: if the Git ref is invalid or project is not reachable
168
+ """
169
+ instance = queries.get_gitlab_instance()
170
+ with GitLabApi(
171
+ instance,
172
+ settings=queries.get_secret_reader_settings(),
173
+ project_url=repo_url,
174
+ ) as gl:
175
+ try:
176
+ gl.get_commit_sha(ref=ref, repo_url=repo_url)
177
+ except (KeyError, AttributeError):
178
+ raise ParameterError(
179
+ f'Invalid ref: "{ref}" on repo: "{repo_url}". Or the project repo is not reachable'
180
+ )
181
+
182
+ def merge_results(
183
+ self,
184
+ diff_result: DiffResult[TerraformRepoV1, TerraformRepoV1, str],
185
+ ) -> list[TerraformRepoV1]:
186
+ """Transforms the diff or repos into a list of repos that need to be changed or deleted
187
+
188
+ :param diff_result: diff result of existing and desired state
189
+ :type diff_result: DiffResult[TerraformRepoV1, TerraformRepoV1, str]
190
+ :return: list of repos that need to be changed or deleted
191
+ :rtype: list[TerraformRepoV1]
192
+ """
193
+ output: list[TerraformRepoV1] = []
194
+ for add_key, add_val in diff_result.add.items():
195
+ logging.info(["create_repo", add_val.account.name, add_key])
196
+ output.append(add_val)
197
+ for change_key, change_val in diff_result.change.items():
198
+ if change_val.desired.delete:
199
+ logging.info(
200
+ ["delete_repo", change_val.desired.account.name, change_key]
201
+ )
202
+ output.append(change_val.desired)
203
+ else:
204
+ logging.info(
205
+ ["update_repo", change_val.desired.account.name, change_key]
206
+ )
207
+ output.append(change_val.desired)
208
+ return output
209
+
210
+ def update_state(
211
+ self,
212
+ diff_result: DiffResult[TerraformRepoV1, TerraformRepoV1, str],
213
+ state: State,
214
+ ) -> None:
215
+ """The state of deployed terraform infrastructure is tracked using AWS S3.
216
+ Each repo is saved as a JSON dump of a TerraformRepoV1 object meaning that it can
217
+ be easily compared against the GQL representation in App Interface
218
+
219
+ :param diff_result: diff of existing and desired state
220
+ :type diff_result: DiffResult[TerraformRepoV1, TerraformRepoV1, str]
221
+ :param state: S3 state class
222
+ :type state: State
223
+ """
224
+ try:
225
+ for add_key, add_val in diff_result.add.items():
226
+ # state.add already performs a json.dumps(key) so we export the
227
+ # pydantic model as a dict to avoid a double json dump with extra quotes
228
+ state.add(add_key, add_val.dict(by_alias=True), force=True)
229
+ for delete_key in diff_result.delete.keys():
230
+ state.rm(delete_key)
231
+ for change_key, change_val in diff_result.change.items():
232
+ if change_val.desired.delete:
233
+ state.rm(change_key)
234
+ else:
235
+ state.add(
236
+ change_key, change_val.desired.dict(by_alias=True), force=True
237
+ )
238
+ except KeyError:
239
+ pass
240
+
241
+ def calculate_diff(
242
+ self,
243
+ existing_state: list[TerraformRepoV1],
244
+ desired_state: list[TerraformRepoV1],
245
+ dry_run: bool,
246
+ state: Optional[State],
247
+ ) -> list[TerraformRepoV1]:
248
+ """Calculated the difference between existing and desired state
249
+ to determine what actions the executor will need to take
250
+
251
+ :param existing_state: list of Terraform infrastructure that is currently applied
252
+ :type existing_state: list[TerraformRepoV1]
253
+ :param desired_state: list of Terraform infrastructure we want
254
+ :type desired_state: list[TerraformRepoV1]
255
+ :param dry_run: determines whether State should be updated
256
+ :type dry_run: bool
257
+ :param state: AWS S3 state
258
+ :type state: Optional[State]
259
+ :raises ParameterError: if there is an invalid operation performed like trying to delete
260
+ a representation in A-I before setting the delete flag
261
+ :return: list of Terraform Repos for the executor to act on
262
+ :rtype: list[TerraformRepoV1]
263
+ """
264
+ diff = diff_iterables(existing_state, desired_state, lambda x: x.name)
265
+
266
+ # added repos: do standard validation that SHA is valid
267
+ if self.params.validate_git:
268
+ for add_repo in diff.add.values():
269
+ self.check_ref(add_repo.repository, add_repo.ref)
270
+ # removed repos: ensure that delete = true already
271
+ for delete_repo in diff.delete.values():
272
+ if not delete_repo.delete:
273
+ raise ParameterError(
274
+ f'To delete the terraform repo "{delete_repo.name}", you must set delete: true in the repo definition'
275
+ )
276
+ # changed repos: prevent non deterministic terraform behavior by disabling updating key parameters
277
+ # also do SHA verification
278
+ for changes in diff.change.values():
279
+ c = changes.current
280
+ d = changes.desired
281
+ if (
282
+ c.account != d.account
283
+ or c.name != d.name
284
+ or c.project_path != d.project_path
285
+ or c.repository != d.repository
286
+ ):
287
+ raise ParameterError(
288
+ f'Only the `ref` and `delete` parameters for a terraform repo may be updated in merge requests on repo: "{d.name}"'
289
+ )
290
+ if self.params.validate_git:
291
+ self.check_ref(d.repository, d.ref)
292
+
293
+ if not dry_run and state:
294
+ self.update_state(diff, state)
295
+
296
+ return self.merge_results(diff)
297
+
298
+ def early_exit_desired_state(self, *args: Any, **kwargs: Any) -> dict[str, Any]:
299
+ gqlapi = gql.get_api()
300
+ return {
301
+ "repos": [repo.dict() for repo in self.get_repos(query_func=gqlapi.query)]
302
+ }
@@ -65,8 +65,13 @@ def s3_state_builder() -> Callable[[Mapping], State]:
65
65
  """
66
66
 
67
67
  def builder(data: Mapping) -> State:
68
- def get(key: str) -> dict:
69
- return data["get"][key]
68
+ def get(key: str, *args) -> dict:
69
+ try:
70
+ return data["get"][key]
71
+ except KeyError:
72
+ if args:
73
+ return args[0]
74
+ raise
70
75
 
71
76
  state = create_autospec(spec=State)
72
77
  state.get = get
@@ -0,0 +1,215 @@
1
+ from unittest.mock import MagicMock
2
+
3
+ import pytest
4
+
5
+ from reconcile.gql_definitions.fragments.vault_secret import VaultSecret
6
+ from reconcile.gql_definitions.terraform_repo.terraform_repo import (
7
+ AWSAccountV1,
8
+ TerraformRepoV1,
9
+ )
10
+ from reconcile.terraform_repo import (
11
+ TerraformRepoIntegration,
12
+ TerraformRepoIntegrationParams,
13
+ )
14
+ from reconcile.utils.exceptions import ParameterError
15
+ from reconcile.utils.state import State
16
+
17
+ A_REPO = "https://git-example/tf-repo-example"
18
+ A_REPO_SHA = "a390f5cb20322c90861d6d80e9b70c6a579be1d0"
19
+ B_REPO = "https://git-example/tf-repo-example2"
20
+ B_REPO_SHA = "94edb90815e502b387c25358f5ec602e52d0bfbb"
21
+ AWS_UID = "000000000000"
22
+ AUTOMATION_TOKEN_PATH = "aws-secrets/terraform/foo"
23
+
24
+
25
+ @pytest.fixture
26
+ def existing_repo(aws_account) -> TerraformRepoV1:
27
+ return TerraformRepoV1(
28
+ name="a_repo",
29
+ repository=A_REPO,
30
+ ref=A_REPO_SHA,
31
+ account=aws_account,
32
+ projectPath="tf",
33
+ delete=False,
34
+ )
35
+
36
+
37
+ @pytest.fixture
38
+ def new_repo(aws_account) -> TerraformRepoV1:
39
+ return TerraformRepoV1(
40
+ name="b_repo",
41
+ repository=B_REPO,
42
+ ref=B_REPO_SHA,
43
+ account=aws_account,
44
+ projectPath="tf",
45
+ delete=False,
46
+ )
47
+
48
+
49
+ @pytest.fixture()
50
+ def automation_token() -> VaultSecret:
51
+ return VaultSecret(path=AUTOMATION_TOKEN_PATH, version=1, field="all", format=None)
52
+
53
+
54
+ @pytest.fixture
55
+ def aws_account(automation_token) -> AWSAccountV1:
56
+ return AWSAccountV1(
57
+ name="foo",
58
+ uid="000000000000",
59
+ automationToken=automation_token,
60
+ )
61
+
62
+
63
+ @pytest.fixture
64
+ def int_params() -> TerraformRepoIntegrationParams:
65
+ return TerraformRepoIntegrationParams(print_to_file=None, validate_git=False)
66
+
67
+
68
+ @pytest.fixture()
69
+ def a_repo_json() -> str:
70
+ # terraform repo expects a JSON string not a dict so we have to encode a multi-line JSON string
71
+ return f"""
72
+ {{
73
+ "name": "a_repo",
74
+ "repository": "{A_REPO}",
75
+ "ref": "{A_REPO_SHA}",
76
+ "projectPath": "tf",
77
+ "delete": false,
78
+ "account": {{
79
+ "name": "foo",
80
+ "uid": "{AWS_UID}",
81
+ "automationToken": {{
82
+ "path": "{AUTOMATION_TOKEN_PATH}",
83
+ "field": "all",
84
+ "version": 1,
85
+ "format": null
86
+ }}
87
+ }}
88
+ }}
89
+ """
90
+
91
+
92
+ @pytest.fixture()
93
+ def state_mock() -> MagicMock:
94
+ return MagicMock(spec=State)
95
+
96
+
97
+ def test_addition_to_existing_repo(existing_repo, new_repo, int_params, state_mock):
98
+ existing = [existing_repo]
99
+ desired = [existing_repo, new_repo]
100
+
101
+ integration = TerraformRepoIntegration(params=int_params)
102
+ diff = integration.calculate_diff(
103
+ existing_state=existing, desired_state=desired, dry_run=False, state=state_mock
104
+ )
105
+
106
+ assert diff == [new_repo]
107
+
108
+ # ensure that the state is saved for the new repo
109
+ state_mock.add.assert_called_once_with(
110
+ new_repo.name, new_repo.dict(by_alias=True), force=True
111
+ )
112
+
113
+
114
+ def test_updating_repo_ref(existing_repo, int_params, state_mock):
115
+ existing = [existing_repo]
116
+ updated_repo = TerraformRepoV1.copy(existing_repo)
117
+ updated_repo.ref = B_REPO_SHA
118
+
119
+ integration = TerraformRepoIntegration(params=int_params)
120
+ diff = integration.calculate_diff(
121
+ existing_state=existing,
122
+ desired_state=[updated_repo],
123
+ dry_run=False,
124
+ state=state_mock,
125
+ )
126
+
127
+ assert diff == [updated_repo]
128
+
129
+ state_mock.add.assert_called_once_with(
130
+ updated_repo.name, updated_repo.dict(by_alias=True), force=True
131
+ )
132
+
133
+
134
+ def test_fail_on_update_invalid_repo_params(existing_repo, int_params):
135
+ existing = [existing_repo]
136
+ updated_repo = TerraformRepoV1.copy(existing_repo)
137
+ updated_repo.name = "c_repo"
138
+ updated_repo.project_path = "c_repo"
139
+ updated_repo.repository = B_REPO
140
+ updated_repo.ref = B_REPO_SHA
141
+ updated_repo.delete = True
142
+
143
+ integration = TerraformRepoIntegration(params=int_params)
144
+
145
+ with pytest.raises(ParameterError):
146
+ integration.calculate_diff(
147
+ existing_state=existing,
148
+ desired_state=[updated_repo],
149
+ dry_run=True,
150
+ state=None,
151
+ )
152
+
153
+
154
+ def test_delete_repo(existing_repo, int_params, state_mock):
155
+ existing = [existing_repo]
156
+ updated_repo = TerraformRepoV1.copy(existing_repo)
157
+ updated_repo.delete = True
158
+
159
+ integration = TerraformRepoIntegration(params=int_params)
160
+
161
+ diff = integration.calculate_diff(
162
+ existing_state=existing,
163
+ desired_state=[updated_repo],
164
+ dry_run=False,
165
+ state=state_mock,
166
+ )
167
+
168
+ assert diff == [updated_repo]
169
+
170
+ state_mock.rm.assert_called_once_with(updated_repo.name)
171
+
172
+
173
+ def test_delete_repo_without_flag(existing_repo, int_params):
174
+ existing = [existing_repo]
175
+
176
+ integration = TerraformRepoIntegration(params=int_params)
177
+
178
+ with pytest.raises(ParameterError):
179
+ integration.calculate_diff(
180
+ existing_state=existing, desired_state=[], dry_run=True, state=None
181
+ )
182
+
183
+
184
+ def test_get_repo_state(s3_state_builder, int_params, existing_repo, a_repo_json):
185
+ state = s3_state_builder(
186
+ {
187
+ "ls": [
188
+ "/a_repo",
189
+ ],
190
+ "get": {"a_repo": a_repo_json},
191
+ }
192
+ )
193
+
194
+ integration = TerraformRepoIntegration(params=int_params)
195
+
196
+ existing_state = integration.get_existing_state(state=state)
197
+ assert existing_state == [existing_repo]
198
+
199
+
200
+ def test_update_repo_state(int_params, existing_repo, state_mock):
201
+ integration = TerraformRepoIntegration(params=int_params)
202
+
203
+ existing_state: list = []
204
+ desired_state = [existing_repo]
205
+
206
+ integration.calculate_diff(
207
+ existing_state=existing_state,
208
+ desired_state=desired_state,
209
+ dry_run=False,
210
+ state=state_mock,
211
+ )
212
+
213
+ state_mock.add.assert_called_once_with(
214
+ existing_repo.name, existing_repo.dict(by_alias=True), force=True
215
+ )
@@ -24,6 +24,8 @@ from reconcile.utils.ocm.subscriptions import (
24
24
  )
25
25
  from reconcile.utils.ocm_base_client import OCMBaseClient
26
26
 
27
+ ACTIVE_SUBSCRIPTION_STATES = {"Active", "Reserved"}
28
+
27
29
 
28
30
  class OCMClusterState(Enum):
29
31
  ERROR = "error"
@@ -39,6 +41,19 @@ class OCMClusterState(Enum):
39
41
  WAITING = "waiting"
40
42
 
41
43
 
44
+ class OCMClusterFlag(BaseModel):
45
+
46
+ enabled: bool
47
+
48
+
49
+ class OCMClusterAWSSettings(BaseModel):
50
+ sts: Optional[OCMClusterFlag]
51
+
52
+ @property
53
+ def sts_enabled(self) -> bool:
54
+ return self.sts is not None and self.sts.enabled
55
+
56
+
42
57
  class OCMCluster(BaseModel):
43
58
 
44
59
  kind: str = "Cluster"
@@ -52,8 +67,6 @@ class OCMCluster(BaseModel):
52
67
  display_name: str
53
68
 
54
69
  managed: bool
55
-
56
- openshift_version: str
57
70
  state: OCMClusterState
58
71
 
59
72
  subscription: OCMModelLink
@@ -61,6 +74,8 @@ class OCMCluster(BaseModel):
61
74
  cloud_provider: OCMModelLink
62
75
  product: OCMModelLink
63
76
 
77
+ aws: Optional[OCMClusterAWSSettings]
78
+
64
79
 
65
80
  class ClusterDetails(BaseModel):
66
81
 
@@ -188,7 +203,8 @@ def get_cluster_details_for_subscriptions(
188
203
  # get subscription details
189
204
  subscriptions = get_subscriptions(
190
205
  ocm_api=ocm_api,
191
- filter=(subscription_filter or Filter()) & build_subscription_filter(),
206
+ filter=(subscription_filter or Filter())
207
+ & build_subscription_filter(states=ACTIVE_SUBSCRIPTION_STATES, managed=True),
192
208
  )
193
209
  if not subscriptions:
194
210
  return
@@ -77,9 +77,11 @@ def get_subscriptions(
77
77
  return subscriptions
78
78
 
79
79
 
80
- def build_subscription_filter(state: str = "Active", managed: bool = True) -> Filter:
80
+ def build_subscription_filter(
81
+ states: Optional[set[str]] = None, managed: bool = True
82
+ ) -> Filter:
81
83
  """
82
84
  Helper function to create a subscription search filer for two very common
83
85
  fields: status and managed.
84
86
  """
85
- return Filter().eq("status", state).eq("managed", str(managed).lower())
87
+ return Filter().is_in("status", states).eq("managed", str(managed).lower())
tools/qontract_cli.py CHANGED
@@ -630,8 +630,14 @@ def ocm_fleet_upgrade_policies(
630
630
  required=False,
631
631
  envvar="AUS_OCM_ORG_IDS",
632
632
  )
633
+ @click.option(
634
+ "--ignore-sts-clusters",
635
+ is_flag=True,
636
+ default=os.environ.get("IGNORE_STS_CLUSTERS", False),
637
+ help="Ignore STS clusters",
638
+ )
633
639
  @click.pass_context
634
- def aus_fleet_upgrade_policies(ctx, ocm_env, ocm_org_ids):
640
+ def aus_fleet_upgrade_policies(ctx, ocm_env, ocm_org_ids, ignore_sts_clusters):
635
641
  from reconcile.aus.advanced_upgrade_service import AdvancedUpgradeServiceIntegration
636
642
 
637
643
  parsed_ocm_org_ids = set(ocm_org_ids.split(",")) if ocm_org_ids else None
@@ -641,6 +647,7 @@ def aus_fleet_upgrade_policies(ctx, ocm_env, ocm_org_ids):
641
647
  AdvancedUpgradeSchedulerBaseIntegrationParams(
642
648
  ocm_environment=ocm_env,
643
649
  ocm_organization_ids=parsed_ocm_org_ids,
650
+ ignore_sts_clusters=ignore_sts_clusters,
644
651
  )
645
652
  ),
646
653
  )