qontract-reconcile 0.10.1rc990__py3-none-any.whl → 0.10.1rc992__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qontract-reconcile
3
- Version: 0.10.1rc990
3
+ Version: 0.10.1rc992
4
4
  Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
5
  Home-page: https://github.com/app-sre/qontract-reconcile
6
6
  Author: Red Hat App-SRE Team
@@ -112,7 +112,7 @@ reconcile/terraform_aws_route53.py,sha256=1C1_xd_xjysPLSYKvohS6bfSsjSMP0U3sWVcRN
112
112
  reconcile/terraform_cloudflare_dns.py,sha256=-aLEe2QnH5cJPu7HWqs-R9NmQ1NlFbcVUm0v7alVL3I,13431
113
113
  reconcile/terraform_cloudflare_resources.py,sha256=41Mj1WkuS75slCDpmhG2GGf1nh3BwfxcdNC73-PNadc,15000
114
114
  reconcile/terraform_cloudflare_users.py,sha256=iyTG5sj20Jg4J4qWJ144KVptfIHGOSfH8wQKxu0imq0,13942
115
- reconcile/terraform_repo.py,sha256=GN_wQytiXqL7SbSKpwm2TLtoLPOkN6atHsS1uynzN28,16269
115
+ reconcile/terraform_repo.py,sha256=E-tVE62H5-B8CStVOi_DwqQ5l3qIYGd-l8MBFSo41mQ,16414
116
116
  reconcile/terraform_resources.py,sha256=-sgMMHDtNvnQyNR05-MKebI_pSiyxSWAg8LmeA2_Ntk,19326
117
117
  reconcile/terraform_tgw_attachments.py,sha256=EucuF4p3RWKTS4GTPd8oZmR79GpIW_grQl2PAeeNQeI,18665
118
118
  reconcile/terraform_users.py,sha256=HqSm3ev3b8dZ9J6F_phDZB-FQsnlsdeKp9RPoY1cU94,10188
@@ -179,10 +179,11 @@ reconcile/cna/assets/asset.py,sha256=KWgA4fuDAEGsJwmR52WwK_YgSJMW-1cV2la3lmNf4iE
179
179
  reconcile/cna/assets/asset_factory.py,sha256=7T7X_J6xIsoGETqBRI45_EyIKEdQcnRPt_GAuVuLQcc,785
180
180
  reconcile/cna/assets/null.py,sha256=85mVh97atCoC0aLuX47poTZiyOthmziJeBsUw0c924w,1658
181
181
  reconcile/dynatrace_token_provider/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
182
- reconcile/dynatrace_token_provider/dependencies.py,sha256=uJLvR48kxfqjnBuP60XQx5RbkWPL3__FCgWjqwhKEjo,2160
183
- reconcile/dynatrace_token_provider/integration.py,sha256=n_t_x-9USDtkT_8koOn7SxxXXxc3lAYbkZENlVm6t5c,14909
182
+ reconcile/dynatrace_token_provider/dependencies.py,sha256=41q05A4C_eS3E8-MR4veeMxtQNsPoGdxmEa3d-OKxq4,2814
183
+ reconcile/dynatrace_token_provider/integration.py,sha256=ffH4BpMNb3AafwengwzurZ-aiztGP1MUlnVU4FnO3IY,21540
184
184
  reconcile/dynatrace_token_provider/metrics.py,sha256=xiKkl8fTEBQaXJelGCPNTZhHAWdO1M3pCXNr_Tei63c,1285
185
- reconcile/dynatrace_token_provider/ocm.py,sha256=IwksRMyGcJnamV88ORlBoyOr7uRENhMaHBoSXaGfwDY,2784
185
+ reconcile/dynatrace_token_provider/model.py,sha256=gkpqo5rRRueBXnIMjp4EEHqBUBuU65TRI8zpdb8GJ0A,241
186
+ reconcile/dynatrace_token_provider/ocm.py,sha256=iHMsgbsLs-dlrB9UXmWNDF7E4UDe49JOsLa9rnowKfo,4282
186
187
  reconcile/external_resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
187
188
  reconcile/external_resources/aws.py,sha256=JvjKaABy2Pg8u8Lq82Acv4zMvpE3_qGKes7OG-zlHOM,2956
188
189
  reconcile/external_resources/factories.py,sha256=DXgaLxoO87zZ76VOpRpu2GeYGhsbfOnOx5mrzgo4Gf4,4767
@@ -275,6 +276,7 @@ reconcile/gql_definitions/dashdotdb_slo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5J
275
276
  reconcile/gql_definitions/dashdotdb_slo/slo_documents_query.py,sha256=zUa-CmpOwiymVmOV6KwDHH5mMl06p000320FcOas6hU,4315
276
277
  reconcile/gql_definitions/dynatrace_token_provider/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
277
278
  reconcile/gql_definitions/dynatrace_token_provider/dynatrace_bootstrap_tokens.py,sha256=5gTuAnR2rnx2k6Rn7FMEAzw6GCZ6F5HZbqkmJ9-3NI4,2244
279
+ reconcile/gql_definitions/dynatrace_token_provider/token_specs.py,sha256=XGsMuB8gowRpqJjkD_KRomx-1OswzyWbF4qjVdhionk,2555
278
280
  reconcile/gql_definitions/external_resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
279
281
  reconcile/gql_definitions/external_resources/aws_accounts.py,sha256=XR69j9dpTQ0gv8y-AZN7AJ0dPvO-wbHscyCDgrax6Bk,2046
280
282
  reconcile/gql_definitions/external_resources/external_resources_modules.py,sha256=g2KB2wRnb8zF7xCmDJJFmiRdE4z4aYa9HtY3vCBVwMA,2441
@@ -337,6 +339,7 @@ reconcile/gql_definitions/ocm_labels/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeu
337
339
  reconcile/gql_definitions/ocm_labels/clusters.py,sha256=K0DaTnOEdCZAziDbmq3Ktrxzzmzy6sdb3-9kIDk6yao,2932
338
340
  reconcile/gql_definitions/ocm_labels/organizations.py,sha256=ylNna62pG3XidrLQtMwu0LIOsKh6qyC2QImCbur2tV4,2252
339
341
  reconcile/gql_definitions/ocm_oidc_idp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
342
+ reconcile/gql_definitions/ocm_subscription_labels/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
340
343
  reconcile/gql_definitions/openshift_cluster_bots/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
341
344
  reconcile/gql_definitions/openshift_cluster_bots/clusters.py,sha256=QshhCQeFRu_o0DLpD-4ltT5X_xZHjsCLc5jB3an3UXs,3688
342
345
  reconcile/gql_definitions/openshift_groups/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -382,7 +385,7 @@ reconcile/gql_definitions/terraform_cloudflare_users/terraform_cloudflare_roles.
382
385
  reconcile/gql_definitions/terraform_init/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
383
386
  reconcile/gql_definitions/terraform_init/aws_accounts.py,sha256=OJ0hDbRachRaDkL-OGT6-byr9cKdBiQDnNCpwUe3oJ8,2674
384
387
  reconcile/gql_definitions/terraform_repo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
385
- reconcile/gql_definitions/terraform_repo/terraform_repo.py,sha256=_rdq3efy5Q3QFpI-vcs3-wacsXo_1fu1kVix_E83h5Q,3599
388
+ reconcile/gql_definitions/terraform_repo/terraform_repo.py,sha256=nm4CH7Vog4aabdvCKmhVSUvoUb7dxSLx8nwAEJAVqG0,3706
386
389
  reconcile/gql_definitions/terraform_resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
387
390
  reconcile/gql_definitions/terraform_resources/database_access_manager.py,sha256=yv0_YC-LmhaKD_gyGG3le1w5BtypBjlsO894-Zgdg4U,4813
388
391
  reconcile/gql_definitions/terraform_resources/terraform_resources_namespaces.py,sha256=1PQBvcJjErRaG529kAzMwfw-ShNaM92IQovlwEiYrV8,42491
@@ -551,7 +554,7 @@ reconcile/test/test_terraform_aws_route53.py,sha256=xHggb8K1P76OyCfFcogbkmyKle-N
551
554
  reconcile/test/test_terraform_cloudflare_dns.py,sha256=aQTXX8Vr4h9aWvJZTnpZEhMGYoBpT2d45ZxU_ECIQ6o,3425
552
555
  reconcile/test/test_terraform_cloudflare_resources.py,sha256=1mdSZS-38mtTSg7teJgDCJU1b_yKbwnrr3N5m8kwV4k,13595
553
556
  reconcile/test/test_terraform_cloudflare_users.py,sha256=2VGBtMUhckLPtUnQlHIzpGsCnyVJZPNLFf-ABELkxbQ,27456
554
- reconcile/test/test_terraform_repo.py,sha256=j9mLfwiK707U2KRxYpvzAbOYywk__pL9SXAH3xbP1t8,12184
557
+ reconcile/test/test_terraform_repo.py,sha256=INfl-VlUtpV87J0neQt4wliptnX7PKvxLPF-ZgweTFA,12960
555
558
  reconcile/test/test_terraform_resources.py,sha256=8C97yXIEihaQ3DZrtjxLNt4y4G12IOhD01ydm7JjliY,15359
556
559
  reconcile/test/test_terraform_tgw_attachments.py,sha256=SM6QwogMZNLh0BkUyaxzFafuOLp23-hBtYTu_F53C4I,40922
557
560
  reconcile/test/test_terraform_users.py,sha256=XOAfGvITCJPI1LTlISmHbA4ONMQMkxYUMTsny7pQCFw,4319
@@ -598,6 +601,7 @@ reconcile/typed_queries/clusters_with_dms.py,sha256=JDSKZXwO3QxT-uA1FaHxP8d4XiYA
598
601
  reconcile/typed_queries/clusters_with_peering.py,sha256=lIai7SJJD0bqIJbe7virgrbYRqjLouSL2OpJD0itpAY,330
599
602
  reconcile/typed_queries/dynatrace.py,sha256=8vXDXDIDf9_vN_efYwysDr4gLN7SCx4I2bOoNxQhbio,312
600
603
  reconcile/typed_queries/dynatrace_environments.py,sha256=VV_7KzKG9wqGDV9wZLbcCJtfuzPhTV1wdg0YwAOaq3A,413
604
+ reconcile/typed_queries/dynatrace_token_provider_token_specs.py,sha256=x41KG6JRDNYw5QGJYtIFNwSeejUUgxrL-agS8qFf6q0,433
601
605
  reconcile/typed_queries/external_resources.py,sha256=h1uzZzmtEGzoqSFhDMSAdxauGJoGy0stPuWbA0rkVKE,1503
602
606
  reconcile/typed_queries/get_state_aws_account.py,sha256=CSJjVPWsUZ2rkGIt8ehoQt7hokFqrUDgG9HFlg2lVD8,492
603
607
  reconcile/typed_queries/github_orgs.py,sha256=UZhoPl8qvA_tcO7CZlN8GuMKckt3ywd47Suu61rgHsc,258
@@ -776,7 +780,7 @@ reconcile/utils/ocm/clusters.py,sha256=Fn4swizm1qq-XiNlIZ9SvahkftWAyNT8hF4kqRBpK
776
780
  reconcile/utils/ocm/identity_providers.py,sha256=dKed09N8iWmn39tI_MpwgVe47x23eLsknGbjMUxtwr4,2175
777
781
  reconcile/utils/ocm/label_sources.py,sha256=ES_5VP4X6gsRxMFZ95WgbwE_HqqIUo_JRjHjdGYw6Ss,1846
778
782
  reconcile/utils/ocm/labels.py,sha256=aCsL5QkRk32hZeJwsSJuCCT9sbojWMn8LL5Zo-aoFb4,5916
779
- reconcile/utils/ocm/manifests.py,sha256=8kCVwTiaYHyjiKfP2DrkoT9eFxROy_M3rLom_hdsEIU,1193
783
+ reconcile/utils/ocm/manifests.py,sha256=Q6kgOeiAwLbJY_vO_BEW2oePvbLDZcMZk20YpJJGpOA,1195
780
784
  reconcile/utils/ocm/ocm.py,sha256=EwhCymt7r8cL8UF2XbwmQ6IiRE016AUuPEiMAtYMepE,36707
781
785
  reconcile/utils/ocm/products.py,sha256=XDmTkVv4eWEifloz_f2I8GmdM97tY33PLf2p4d5GMI0,25972
782
786
  reconcile/utils/ocm/search_filters.py,sha256=jdj2sMGArcQrZLluzxeypPSbMFX_5zSE3ACvhNpsnOc,14814
@@ -850,8 +854,8 @@ tools/test/test_qontract_cli.py,sha256=_D61RFGAN5x44CY1tYbouhlGXXABwYfxKSWSQx3Jr
850
854
  tools/test/test_saas_promotion_state.py,sha256=dy4kkSSAQ7bC0Xp2CociETGN-2aABEfL6FU5D9Jl00Y,6056
851
855
  tools/test/test_sd_app_sre_alert_report.py,sha256=v363r9zM7__0kR5K6mvJoGFcM9BvE33fWAayrqkpojA,2116
852
856
  tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
853
- qontract_reconcile-0.10.1rc990.dist-info/METADATA,sha256=gPHQgGX8QfVXyoDNJ5Lx-SBWvxs9-idwSy5huEDo0GE,2262
854
- qontract_reconcile-0.10.1rc990.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
855
- qontract_reconcile-0.10.1rc990.dist-info/entry_points.txt,sha256=GKQqCl2j2X1BJQ69een6rHcR26PmnxnONLNOQB-nRjY,491
856
- qontract_reconcile-0.10.1rc990.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
857
- qontract_reconcile-0.10.1rc990.dist-info/RECORD,,
857
+ qontract_reconcile-0.10.1rc992.dist-info/METADATA,sha256=DRFJMM8ZzS7zsrDv77wTXC9PngnJiNg-SphE3U7qjrQ,2262
858
+ qontract_reconcile-0.10.1rc992.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
859
+ qontract_reconcile-0.10.1rc992.dist-info/entry_points.txt,sha256=GKQqCl2j2X1BJQ69een6rHcR26PmnxnONLNOQB-nRjY,491
860
+ qontract_reconcile-0.10.1rc992.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
861
+ qontract_reconcile-0.10.1rc992.dist-info/RECORD,,
@@ -1,5 +1,13 @@
1
+ from collections.abc import Mapping
2
+
1
3
  from reconcile.dynatrace_token_provider.ocm import OCMClient
4
+ from reconcile.gql_definitions.dynatrace_token_provider.token_specs import (
5
+ DynatraceTokenProviderTokenSpecV1,
6
+ )
2
7
  from reconcile.typed_queries.dynatrace_environments import get_dynatrace_environments
8
+ from reconcile.typed_queries.dynatrace_token_provider_token_specs import (
9
+ get_dynatrace_token_provider_token_specs,
10
+ )
3
11
  from reconcile.typed_queries.ocm import get_ocm_environments
4
12
  from reconcile.utils.dynatrace.client import DynatraceClient
5
13
  from reconcile.utils.ocm_base_client import (
@@ -17,18 +25,25 @@ class Dependencies:
17
25
  def __init__(
18
26
  self,
19
27
  secret_reader: SecretReaderBase,
20
- dynatrace_client_by_tenant_id: dict[str, DynatraceClient],
21
- ocm_client_by_env_name: dict[str, OCMClient],
28
+ dynatrace_client_by_tenant_id: Mapping[str, DynatraceClient],
29
+ ocm_client_by_env_name: Mapping[str, OCMClient],
30
+ token_spec_by_name: Mapping[str, DynatraceTokenProviderTokenSpecV1],
22
31
  ):
23
32
  self.secret_reader = secret_reader
24
- self.dynatrace_client_by_tenant_id: dict[str, DynatraceClient] = (
33
+ self.dynatrace_client_by_tenant_id: dict[str, DynatraceClient] = dict(
25
34
  dynatrace_client_by_tenant_id
26
35
  )
27
- self.ocm_client_by_env_name: dict[str, OCMClient] = ocm_client_by_env_name
36
+ self.ocm_client_by_env_name: dict[str, OCMClient] = dict(ocm_client_by_env_name)
37
+ self.token_spec_by_name = dict(token_spec_by_name)
28
38
 
29
39
  def populate(self) -> None:
30
40
  self._populate_dynatrace_client_map()
31
41
  self._populate_ocm_clients()
42
+ self._populate_token_specs()
43
+
44
+ def _populate_token_specs(self) -> None:
45
+ token_specs = get_dynatrace_token_provider_token_specs()
46
+ self.token_spec_by_name = {spec.name: spec for spec in token_specs}
32
47
 
33
48
  def _populate_dynatrace_client_map(self) -> None:
34
49
  dynatrace_environments = get_dynatrace_environments()
@@ -9,28 +9,33 @@ from reconcile.dynatrace_token_provider.metrics import (
9
9
  DTPClustersManagedGauge,
10
10
  DTPOrganizationErrorRate,
11
11
  )
12
- from reconcile.dynatrace_token_provider.ocm import Cluster, OCMClient
12
+ from reconcile.dynatrace_token_provider.model import DynatraceAPIToken, K8sSecret
13
+ from reconcile.dynatrace_token_provider.ocm import (
14
+ DTP_LABEL_SEARCH,
15
+ DTP_TENANT_LABEL,
16
+ Cluster,
17
+ OCMClient,
18
+ )
19
+ from reconcile.gql_definitions.dynatrace_token_provider.token_specs import (
20
+ DynatraceAPITokenV1,
21
+ DynatraceTokenProviderTokenSpecV1,
22
+ )
13
23
  from reconcile.utils import (
14
24
  metrics,
15
25
  )
16
- from reconcile.utils.dynatrace.client import DynatraceAPITokenCreated, DynatraceClient
26
+ from reconcile.utils.dynatrace.client import DynatraceClient
17
27
  from reconcile.utils.ocm.base import (
18
28
  OCMClusterServiceLogCreateModel,
19
29
  OCMServiceLogSeverity,
20
30
  )
21
31
  from reconcile.utils.ocm.labels import subscription_label_filter
22
- from reconcile.utils.ocm.sre_capability_labels import sre_capability_label_key
23
32
  from reconcile.utils.runtime.integration import (
24
33
  PydanticRunParams,
25
34
  QontractReconcileIntegration,
26
35
  )
27
36
 
28
37
  QONTRACT_INTEGRATION = "dynatrace-token-provider"
29
- SYNCSET_ID = "ext-dynatrace-tokens-dtp"
30
- SECRET_NAME = "dynatrace-token-dtp"
31
- SECRET_NAMESPACE = "dynatrace"
32
- DYNATRACE_INGESTION_TOKEN_NAME = "dynatrace-ingestion-token"
33
- DYNATRACE_OPERATOR_TOKEN_NAME = "dynatrace-operator-token"
38
+ SYNCSET_AND_MANIFEST_ID = "ext-dynatrace-tokens-dtp"
34
39
 
35
40
 
36
41
  class DynatraceTokenProviderIntegrationParams(PydanticRunParams):
@@ -58,6 +63,7 @@ class DynatraceTokenProviderIntegration(
58
63
  secret_reader=self.secret_reader,
59
64
  dynatrace_client_by_tenant_id={},
60
65
  ocm_client_by_env_name={},
66
+ token_spec_by_name={},
61
67
  )
62
68
  dependencies.populate()
63
69
  self.reconcile(dry_run=dry_run, dependencies=dependencies)
@@ -70,7 +76,7 @@ class DynatraceTokenProviderIntegration(
70
76
  try:
71
77
  clusters = ocm_client.discover_clusters_by_labels(
72
78
  label_filter=subscription_label_filter().like(
73
- "key", dtp_label_key("%")
79
+ "key", DTP_LABEL_SEARCH
74
80
  ),
75
81
  )
76
82
  except Exception as e:
@@ -90,7 +96,6 @@ class DynatraceTokenProviderIntegration(
90
96
  for cluster in clusters
91
97
  if cluster.organization_id in self.params.ocm_organization_ids
92
98
  ]
93
- dtp_tenant_label_key = f"{dtp_label_key(None)}.tenant"
94
99
  existing_dtp_tokens = {}
95
100
 
96
101
  for cluster in clusters:
@@ -105,7 +110,7 @@ class DynatraceTokenProviderIntegration(
105
110
  _expose_errors_as_service_log(
106
111
  ocm_client,
107
112
  cluster_uuid=cluster.external_id,
108
- error=f"Missing label {dtp_tenant_label_key}",
113
+ error=f"Missing label {DTP_TENANT_LABEL}",
109
114
  )
110
115
  continue
111
116
  if (
@@ -122,6 +127,16 @@ class DynatraceTokenProviderIntegration(
122
127
  tenant_id
123
128
  ]
124
129
 
130
+ token_spec = dependencies.token_spec_by_name.get(
131
+ cluster.token_spec_name
132
+ )
133
+ if not token_spec:
134
+ _expose_errors_as_service_log(
135
+ ocm_client,
136
+ cluster_uuid=cluster.external_id,
137
+ error=f"Token spec {cluster.token_spec_name} does not exist",
138
+ )
139
+ continue
125
140
  if tenant_id not in existing_dtp_tokens:
126
141
  existing_dtp_tokens[tenant_id] = (
127
142
  dt_client.get_token_ids_for_name_prefix(
@@ -129,13 +144,19 @@ class DynatraceTokenProviderIntegration(
129
144
  )
130
145
  )
131
146
 
147
+ """
148
+ Note, that we consciously do not parallelize cluster processing
149
+ for now. We want to keep stress on OCM at a minimum. The amount
150
+ of tagged clusters is currently feasible to be processed sequentially.
151
+ """
132
152
  self.process_cluster(
133
- dry_run,
134
- cluster,
135
- dt_client,
136
- ocm_client,
137
- existing_dtp_tokens[tenant_id],
138
- tenant_id,
153
+ dry_run=dry_run,
154
+ cluster=cluster,
155
+ dt_client=dt_client,
156
+ ocm_client=ocm_client,
157
+ existing_dtp_tokens=existing_dtp_tokens[tenant_id],
158
+ tenant_id=tenant_id,
159
+ token_spec=token_spec,
139
160
  )
140
161
  except Exception as e:
141
162
  unhandled_exceptions.append(
@@ -153,89 +174,192 @@ class DynatraceTokenProviderIntegration(
153
174
  ocm_client: OCMClient,
154
175
  existing_dtp_tokens: Iterable[str],
155
176
  tenant_id: str,
177
+ token_spec: DynatraceTokenProviderTokenSpecV1,
156
178
  ) -> None:
157
- existing_syncset = self.get_syncset(ocm_client, cluster)
179
+ if cluster.organization_id not in token_spec.ocm_org_ids:
180
+ logging.info(
181
+ f"[{token_spec.name=}] Cluster {cluster.external_id} is not part of ocm orgs defined in {token_spec.ocm_org_ids=}"
182
+ )
183
+ return
184
+ existing_data = {}
185
+ if cluster.is_hcp:
186
+ existing_data = self.get_manifest(ocm_client=ocm_client, cluster=cluster)
187
+ else:
188
+ existing_data = self.get_syncset(ocm_client=ocm_client, cluster=cluster)
158
189
  dt_api_url = f"https://{tenant_id}.live.dynatrace.com/api"
159
- if not existing_syncset:
190
+ if not existing_data:
160
191
  if not dry_run:
161
192
  try:
162
- (ingestion_token, operator_token) = self.create_dynatrace_tokens(
163
- dt_client, cluster.external_id
164
- )
165
- ocm_client.create_syncset(
166
- cluster.id,
167
- self.construct_syncset(
168
- ingestion_token, operator_token, dt_api_url
169
- ),
193
+ k8s_secrets = self.construct_secrets(
194
+ token_spec=token_spec,
195
+ dt_client=dt_client,
196
+ cluster_uuid=cluster.external_id,
170
197
  )
198
+ if cluster.is_hcp:
199
+ ocm_client.create_manifest(
200
+ cluster_id=cluster.id,
201
+ manifest_map=self.construct_manifest(
202
+ with_id=True,
203
+ dt_api_url=dt_api_url,
204
+ secrets=k8s_secrets,
205
+ ),
206
+ )
207
+ else:
208
+ ocm_client.create_syncset(
209
+ cluster_id=cluster.id,
210
+ syncset_map=self.construct_syncset(
211
+ with_id=True,
212
+ dt_api_url=dt_api_url,
213
+ secrets=k8s_secrets,
214
+ ),
215
+ )
171
216
  except Exception as e:
172
217
  _expose_errors_as_service_log(
173
218
  ocm_client,
174
219
  cluster.external_id,
175
- f"DTP can't create Syncset with the tokens {str(e.args)}",
220
+ f"DTP can't create {token_spec.name=} {str(e.args)}",
176
221
  )
177
222
  logging.info(
178
- f"Ingestion and operator tokens created in Dynatrace for cluster {cluster.external_id}."
223
+ f"{token_spec.name=} created in {dt_api_url} for {cluster.external_id=}."
179
224
  )
180
225
  logging.info(
181
- f"SyncSet {SYNCSET_ID} created in cluster {cluster.external_id}."
226
+ f"{SYNCSET_AND_MANIFEST_ID} created for {cluster.external_id=}."
182
227
  )
183
228
  else:
184
- tokens = self.get_tokens_from_syncset(existing_syncset)
185
- need_patching = False
186
- for token_name, token in tokens.items():
187
- if token.id not in existing_dtp_tokens:
188
- need_patching = True
189
- logging.info(f"{token_name} missing in Dynatrace.")
190
- if token_name == DYNATRACE_INGESTION_TOKEN_NAME:
191
- if not dry_run:
192
- ingestion_token = self.create_dynatrace_ingestion_token(
193
- dt_client, cluster.external_id
194
- )
195
- token.id = ingestion_token.id
196
- token.token = ingestion_token.token
197
- logging.info(
198
- f"Ingestion token created in Dynatrace for cluster {cluster.external_id}."
199
- )
200
- elif token_name == DYNATRACE_OPERATOR_TOKEN_NAME:
201
- if not dry_run:
202
- operator_token = self.create_dynatrace_operator_token(
203
- dt_client, cluster.external_id
204
- )
205
- token.id = operator_token.id
206
- token.token = operator_token.token
207
- logging.info(
208
- f"Operator token created in Dynatrace for cluster {cluster.external_id}."
209
- )
210
- elif token_name == DYNATRACE_INGESTION_TOKEN_NAME:
211
- ingestion_token = token
212
- elif token_name == DYNATRACE_OPERATOR_TOKEN_NAME:
213
- operator_token = token
214
- if need_patching:
229
+ current_k8s_secrets: list[K8sSecret] = []
230
+ if cluster.is_hcp:
231
+ current_k8s_secrets = self.get_secrets_from_manifest(
232
+ manifest=existing_data, token_spec=token_spec
233
+ )
234
+ else:
235
+ current_k8s_secrets = self.get_secrets_from_syncset(
236
+ syncset=existing_data, token_spec=token_spec
237
+ )
238
+ has_diff, desired_secrets = self.generate_desired(
239
+ dry_run=dry_run,
240
+ current_k8s_secrets=current_k8s_secrets,
241
+ desired_spec=token_spec,
242
+ existing_dtp_tokens=existing_dtp_tokens,
243
+ dt_client=dt_client,
244
+ cluster_uuid=cluster.external_id,
245
+ )
246
+ if has_diff:
215
247
  if not dry_run:
216
- patch_syncset_payload = self.construct_base_syncset(
217
- ingestion_token=ingestion_token,
218
- operator_token=operator_token,
219
- dt_api_url=dt_api_url,
220
- )
221
248
  try:
222
- logging.info(f"Patching syncset {SYNCSET_ID}.")
223
- ocm_client.patch_syncset(
224
- cluster_id=cluster.id,
225
- syncset_id=SYNCSET_ID,
226
- syncset_map=patch_syncset_payload,
227
- )
249
+ if cluster.is_hcp:
250
+ ocm_client.patch_manifest(
251
+ cluster_id=cluster.id,
252
+ manifest_id=SYNCSET_AND_MANIFEST_ID,
253
+ manifest_map=self.construct_manifest(
254
+ dt_api_url=dt_api_url,
255
+ secrets=desired_secrets,
256
+ with_id=False,
257
+ ),
258
+ )
259
+ else:
260
+ ocm_client.patch_syncset(
261
+ cluster_id=cluster.id,
262
+ syncset_id=SYNCSET_AND_MANIFEST_ID,
263
+ syncset_map=self.construct_syncset(
264
+ dt_api_url=dt_api_url,
265
+ secrets=desired_secrets,
266
+ with_id=False,
267
+ ),
268
+ )
228
269
  except Exception as e:
229
270
  _expose_errors_as_service_log(
230
271
  ocm_client,
231
272
  cluster.external_id,
232
- f"DTP can't patch Syncset {SYNCSET_ID} due to {str(e.args)}",
273
+ f"DTP can't patch {token_spec.name=} for {SYNCSET_AND_MANIFEST_ID} due to {str(e.args)}",
233
274
  )
234
- logging.info(f"Syncset {SYNCSET_ID} patched.")
275
+ logging.info(
276
+ f"Patched {token_spec.name=} for {SYNCSET_AND_MANIFEST_ID} in {cluster.external_id=}."
277
+ )
278
+
279
+ def generate_desired(
280
+ self,
281
+ dry_run: bool,
282
+ current_k8s_secrets: Iterable[K8sSecret],
283
+ desired_spec: DynatraceTokenProviderTokenSpecV1,
284
+ existing_dtp_tokens: Iterable[str],
285
+ dt_client: DynatraceClient,
286
+ cluster_uuid: str,
287
+ ) -> tuple[bool, Iterable[K8sSecret]]:
288
+ has_diff = False
289
+ desired: list[K8sSecret] = []
290
+
291
+ current_secrets_by_name = {
292
+ secret.secret_name: secret for secret in current_k8s_secrets
293
+ }
294
+
295
+ for secret in desired_spec.secrets:
296
+ desired_tokens: list[DynatraceAPIToken] = []
297
+ current_secret = current_secrets_by_name.get(secret.name)
298
+ current_tokens_by_name = (
299
+ {token.name: token for token in current_secret.tokens}
300
+ if current_secret
301
+ else {}
302
+ )
303
+ for desired_token in secret.tokens:
304
+ new_token = current_tokens_by_name.get(desired_token.name)
305
+ if not new_token or new_token.id not in existing_dtp_tokens:
306
+ has_diff = True
307
+ if not dry_run:
308
+ new_token = self.create_dynatrace_token(
309
+ dt_client, cluster_uuid, desired_token
310
+ )
311
+ if new_token:
312
+ desired_tokens.append(new_token)
313
+ desired.append(
314
+ K8sSecret(
315
+ secret_name=secret.name,
316
+ namespace_name=secret.namespace,
317
+ tokens=desired_tokens,
318
+ )
319
+ )
320
+
321
+ return (has_diff, desired)
322
+
323
+ def create_dynatrace_token(
324
+ self, dt_client: DynatraceClient, cluster_uuid: str, token: DynatraceAPITokenV1
325
+ ) -> DynatraceAPIToken:
326
+ token_name = f"dtp-{token.name}-{cluster_uuid}"
327
+ new_token = dt_client.create_api_token(
328
+ name=token_name,
329
+ scopes=token.scopes,
330
+ )
331
+ secret_key = token.key_name_in_secret or token.name
332
+ return DynatraceAPIToken(
333
+ id=new_token.id,
334
+ token=new_token.token,
335
+ name=token_name,
336
+ secret_key=secret_key,
337
+ )
338
+
339
+ def construct_secrets(
340
+ self,
341
+ token_spec: DynatraceTokenProviderTokenSpecV1,
342
+ dt_client: DynatraceClient,
343
+ cluster_uuid: str,
344
+ ) -> list[K8sSecret]:
345
+ secrets: list[K8sSecret] = []
346
+ for secret in token_spec.secrets:
347
+ new_tokens: list[DynatraceAPIToken] = []
348
+ for token in secret.tokens:
349
+ new_token = self.create_dynatrace_token(dt_client, cluster_uuid, token)
350
+ new_tokens.append(new_token)
351
+ secrets.append(
352
+ K8sSecret(
353
+ secret_name=secret.name,
354
+ namespace_name=secret.namespace,
355
+ tokens=new_tokens,
356
+ )
357
+ )
358
+ return secrets
235
359
 
236
360
  def get_syncset(self, ocm_client: OCMClient, cluster: Cluster) -> dict[str, Any]:
237
361
  try:
238
- syncset = ocm_client.get_syncset(cluster.id, SYNCSET_ID)
362
+ syncset = ocm_client.get_syncset(cluster.id, SYNCSET_AND_MANIFEST_ID)
239
363
  except Exception as e:
240
364
  if "Not Found" in e.args[0]:
241
365
  syncset = None
@@ -243,50 +367,136 @@ class DynatraceTokenProviderIntegration(
243
367
  raise e
244
368
  return syncset
245
369
 
246
- def get_tokens_from_syncset(
247
- self, syncset: Mapping[str, Any]
248
- ) -> dict[str, DynatraceAPITokenCreated]:
249
- tokens: dict[str, Any] = {}
250
- for resource in syncset["resources"]:
251
- if resource["kind"] == "Secret":
252
- operator_token_id = self.base64_decode(resource["data"]["apiTokenId"])
253
- operator_token = self.base64_decode(resource["data"]["apiToken"])
254
- ingest_token_id = self.base64_decode(
255
- resource["data"]["dataIngestTokenId"]
370
+ def get_manifest(self, ocm_client: OCMClient, cluster: Cluster) -> dict[str, Any]:
371
+ try:
372
+ manifest = ocm_client.get_manifest(cluster.id, SYNCSET_AND_MANIFEST_ID)
373
+ except Exception as e:
374
+ if "Not Found" in e.args[0]:
375
+ manifest = None
376
+ else:
377
+ raise e
378
+ return manifest
379
+
380
+ def get_secrets_from_syncset(
381
+ self, syncset: Mapping[str, Any], token_spec: DynatraceTokenProviderTokenSpecV1
382
+ ) -> list[K8sSecret]:
383
+ secrets: list[K8sSecret] = []
384
+ secret_data_by_name = {
385
+ resource.get("metadata", {}).get("name"): resource.get("data", {})
386
+ for resource in syncset.get("resources", [])
387
+ if resource.get("kind") == "Secret"
388
+ }
389
+ for secret in token_spec.secrets:
390
+ secret_data = secret_data_by_name.get(secret.name)
391
+ if secret_data:
392
+ tokens = []
393
+ for token in secret.tokens:
394
+ token_id = self.base64_decode(
395
+ secret_data.get(f"{token.key_name_in_secret}Id", "")
396
+ )
397
+ token_value = self.base64_decode(
398
+ secret_data.get(token.key_name_in_secret, "")
399
+ )
400
+ tokens.append(
401
+ DynatraceAPIToken(
402
+ id=token_id,
403
+ token=token_value,
404
+ name=token.name,
405
+ secret_key=token.key_name_in_secret,
406
+ )
407
+ )
408
+ secrets.append(
409
+ K8sSecret(
410
+ secret_name=secret.name,
411
+ namespace_name=secret.namespace,
412
+ tokens=tokens,
413
+ )
256
414
  )
257
- ingest_token = self.base64_decode(resource["data"]["dataIngestToken"])
258
- tokens[DYNATRACE_INGESTION_TOKEN_NAME] = DynatraceAPITokenCreated(
259
- id=ingest_token_id,
260
- token=ingest_token,
261
- )
262
- tokens[DYNATRACE_OPERATOR_TOKEN_NAME] = DynatraceAPITokenCreated(
263
- id=operator_token_id,
264
- token=operator_token,
265
- )
266
- return tokens
415
+ return secrets
416
+
417
+ def get_secrets_from_manifest(
418
+ self, manifest: Mapping[str, Any], token_spec: DynatraceTokenProviderTokenSpecV1
419
+ ) -> list[K8sSecret]:
420
+ secrets: list[K8sSecret] = []
421
+ secret_data_by_name = {
422
+ resource.get("metadata", {}).get("name"): resource.get("data", {})
423
+ for resource in manifest.get("workloads", [])
424
+ if resource.get("kind") == "Secret"
425
+ }
426
+ for secret in token_spec.secrets:
427
+ secret_data = secret_data_by_name.get(secret.name)
428
+ if secret_data:
429
+ tokens = []
430
+ for token in secret.tokens:
431
+ token_id = self.base64_decode(
432
+ secret_data.get(f"{token.key_name_in_secret}Id", "")
433
+ )
434
+ token_value = self.base64_decode(
435
+ secret_data.get(token.key_name_in_secret, "")
436
+ )
437
+ tokens.append(
438
+ DynatraceAPIToken(
439
+ id=token_id,
440
+ token=token_value,
441
+ name=token.name,
442
+ secret_key=token.key_name_in_secret,
443
+ )
444
+ )
445
+ secrets.append(
446
+ K8sSecret(
447
+ secret_name=secret.name,
448
+ namespace_name=secret.namespace,
449
+ tokens=tokens,
450
+ )
451
+ )
452
+ return secrets
453
+
454
+ def construct_secrets_data(
455
+ self,
456
+ secrets: Iterable[K8sSecret],
457
+ dt_api_url: str,
458
+ ) -> list[dict[str, Any]]:
459
+ secrets_data: list[dict[str, Any]] = []
460
+ for secret in secrets:
461
+ data: dict[str, str] = {
462
+ "apiUrl": f"{self.base64_encode_str(dt_api_url)}",
463
+ }
464
+ for token in secret.tokens:
465
+ data[token.secret_key] = f"{self.base64_encode_str(token.token)}"
466
+ data[f"{token.secret_key}Id"] = f"{self.base64_encode_str(token.id)}"
467
+ secrets_data.append({
468
+ "apiVersion": "v1",
469
+ "kind": "Secret",
470
+ "metadata": {
471
+ "name": secret.secret_name,
472
+ "namespace": secret.namespace_name,
473
+ },
474
+ "data": data,
475
+ })
476
+ return secrets_data
267
477
 
268
478
  def construct_base_syncset(
269
479
  self,
270
- ingestion_token: DynatraceAPITokenCreated,
271
- operator_token: DynatraceAPITokenCreated,
480
+ secrets: Iterable[K8sSecret],
272
481
  dt_api_url: str,
273
482
  ) -> dict[str, Any]:
274
483
  return {
275
484
  "kind": "SyncSet",
276
- "resources": [
277
- {
278
- "apiVersion": "v1",
279
- "kind": "Secret",
280
- "metadata": {"name": SECRET_NAME, "namespace": SECRET_NAMESPACE},
281
- "data": {
282
- "apiUrl": f"{self.base64_encode_str(dt_api_url)}",
283
- "dataIngestTokenId": f"{self.base64_encode_str(ingestion_token.id)}",
284
- "dataIngestToken": f"{self.base64_encode_str(ingestion_token.token)}",
285
- "apiTokenId": f"{self.base64_encode_str(operator_token.id)}",
286
- "apiToken": f"{self.base64_encode_str(operator_token.token)}",
287
- },
288
- },
289
- ],
485
+ "resources": self.construct_secrets_data(
486
+ secrets=secrets, dt_api_url=dt_api_url
487
+ ),
488
+ }
489
+
490
+ def construct_base_manifest(
491
+ self,
492
+ secrets: Iterable[K8sSecret],
493
+ dt_api_url: str,
494
+ ) -> dict[str, Any]:
495
+ return {
496
+ "kind": "Manifest",
497
+ "workloads": self.construct_secrets_data(
498
+ secrets=secrets, dt_api_url=dt_api_url
499
+ ),
290
500
  }
291
501
 
292
502
  def base64_decode(self, encoded: str) -> str:
@@ -300,51 +510,31 @@ class DynatraceTokenProviderIntegration(
300
510
 
301
511
  def construct_syncset(
302
512
  self,
303
- ingestion_token: DynatraceAPITokenCreated,
304
- operator_token: DynatraceAPITokenCreated,
513
+ secrets: Iterable[K8sSecret],
305
514
  dt_api_url: str,
515
+ with_id: bool,
306
516
  ) -> dict[str, Any]:
307
517
  syncset = self.construct_base_syncset(
308
- ingestion_token=ingestion_token,
309
- operator_token=operator_token,
518
+ secrets=secrets,
310
519
  dt_api_url=dt_api_url,
311
520
  )
312
- syncset["id"] = SYNCSET_ID
521
+ if with_id:
522
+ syncset["id"] = SYNCSET_AND_MANIFEST_ID
313
523
  return syncset
314
524
 
315
- def create_dynatrace_ingestion_token(
316
- self, dt_client: DynatraceClient, cluster_uuid: str
317
- ) -> DynatraceAPITokenCreated:
318
- return dt_client.create_api_token(
319
- name=f"dtp-ingestion-token-{cluster_uuid}",
320
- scopes=["metrics.ingest", "logs.ingest", "events.ingest"],
321
- )
322
-
323
- def create_dynatrace_operator_token(
324
- self, dt_client: DynatraceClient, cluster_uuid: str
325
- ) -> DynatraceAPITokenCreated:
326
- return dt_client.create_api_token(
327
- name=f"dtp-operator-token-{cluster_uuid}",
328
- scopes=[
329
- "activeGateTokenManagement.create",
330
- "entities.read",
331
- "settings.write",
332
- "settings.read",
333
- "DataExport",
334
- "InstallerDownload",
335
- ],
525
+ def construct_manifest(
526
+ self,
527
+ secrets: Iterable[K8sSecret],
528
+ dt_api_url: str,
529
+ with_id: bool,
530
+ ) -> dict[str, Any]:
531
+ manifest = self.construct_base_manifest(
532
+ secrets=secrets,
533
+ dt_api_url=dt_api_url,
336
534
  )
337
-
338
- def create_dynatrace_tokens(
339
- self, dt_client: DynatraceClient, cluster_uuid: str
340
- ) -> tuple[DynatraceAPITokenCreated, DynatraceAPITokenCreated]:
341
- ingestion_token = self.create_dynatrace_ingestion_token(dt_client, cluster_uuid)
342
- operation_token = self.create_dynatrace_operator_token(dt_client, cluster_uuid)
343
- return (ingestion_token, operation_token)
344
-
345
-
346
- def dtp_label_key(config_atom: str | None) -> str:
347
- return sre_capability_label_key("dtp", config_atom)
535
+ if with_id:
536
+ manifest["id"] = SYNCSET_AND_MANIFEST_ID
537
+ return manifest
348
538
 
349
539
 
350
540
  def _expose_errors_as_service_log(
@@ -0,0 +1,14 @@
1
+ from pydantic import BaseModel
2
+
3
+
4
+ class DynatraceAPIToken(BaseModel):
5
+ token: str
6
+ id: str
7
+ name: str
8
+ secret_key: str
9
+
10
+
11
+ class K8sSecret(BaseModel):
12
+ namespace_name: str
13
+ secret_name: str
14
+ tokens: list[DynatraceAPIToken]
@@ -14,6 +14,11 @@ from reconcile.utils.ocm.clusters import (
14
14
  discover_clusters_by_labels,
15
15
  )
16
16
  from reconcile.utils.ocm.labels import Filter
17
+ from reconcile.utils.ocm.manifests import (
18
+ create_manifest,
19
+ get_manifest,
20
+ patch_manifest,
21
+ )
17
22
  from reconcile.utils.ocm.service_log import create_service_log
18
23
  from reconcile.utils.ocm.sre_capability_labels import sre_capability_label_key
19
24
  from reconcile.utils.ocm.syncsets import (
@@ -29,23 +34,39 @@ from reconcile.utils.ocm_base_client import (
29
34
  Thin abstractions of reconcile.ocm module to reduce coupling.
30
35
  """
31
36
 
37
+ DTP_LABEL = sre_capability_label_key("dtp", None)
38
+ DTP_TENANT_LABEL = sre_capability_label_key("dtp", "tenant")
39
+ DTP_LABEL_SEARCH = sre_capability_label_key("dtp", "%")
40
+
32
41
 
33
42
  class Cluster(BaseModel):
34
43
  id: str
35
44
  external_id: str
36
45
  organization_id: str
37
46
  dt_tenant: str
47
+ token_spec_name: str
48
+ is_hcp: bool
38
49
 
39
50
  @staticmethod
40
51
  def from_cluster_details(cluster: ClusterDetails) -> Cluster:
41
- dt_tenant = cluster.labels.get_label_value(
42
- f"{sre_capability_label_key('dtp', None)}.tenant"
43
- )
52
+ dt_tenant = cluster.labels.get_label_value(DTP_TENANT_LABEL)
53
+ token_spec_name = cluster.labels.get_label_value(DTP_LABEL)
54
+ if not token_spec_name:
55
+ """
56
+ We want to stay backwards compatible.
57
+ Earlier version of DTP did not set a value for the label.
58
+ We fall back to a default token in that case.
59
+
60
+ Long-term, we want to remove this behavior.
61
+ """
62
+ token_spec_name = "default"
44
63
  return Cluster(
45
64
  id=cluster.ocm_cluster.id,
46
65
  external_id=cluster.ocm_cluster.external_id,
47
66
  organization_id=cluster.organization_id,
48
67
  dt_tenant=dt_tenant,
68
+ token_spec_name=token_spec_name,
69
+ is_hcp=cluster.ocm_cluster.is_rosa_hypershift(),
49
70
  )
50
71
 
51
72
 
@@ -77,6 +98,28 @@ class OCMClient:
77
98
  syncset_map=syncset_map,
78
99
  )
79
100
 
101
+ def create_manifest(self, cluster_id: str, manifest_map: Mapping) -> None:
102
+ create_manifest(
103
+ ocm_client=self._ocm_client,
104
+ cluster_id=cluster_id,
105
+ manifest_map=manifest_map,
106
+ )
107
+
108
+ def get_manifest(self, cluster_id: str, manifest_id: str) -> Any:
109
+ return get_manifest(
110
+ ocm_client=self._ocm_client, cluster_id=cluster_id, manifest_id=manifest_id
111
+ )
112
+
113
+ def patch_manifest(
114
+ self, cluster_id: str, manifest_id: str, manifest_map: Mapping
115
+ ) -> None:
116
+ patch_manifest(
117
+ ocm_client=self._ocm_client,
118
+ cluster_id=cluster_id,
119
+ manifest_id=manifest_id,
120
+ manifest_map=manifest_map,
121
+ )
122
+
80
123
  def discover_clusters_by_labels(self, label_filter: Filter) -> list[Cluster]:
81
124
  return [
82
125
  Cluster.from_cluster_details(cluster)
@@ -0,0 +1,84 @@
1
+ """
2
+ Generated by qenerate plugin=pydantic_v1. DO NOT MODIFY MANUALLY!
3
+ """
4
+ from collections.abc import Callable # noqa: F401 # pylint: disable=W0611
5
+ from datetime import datetime # noqa: F401 # pylint: disable=W0611
6
+ from enum import Enum # noqa: F401 # pylint: disable=W0611
7
+ from typing import ( # noqa: F401 # pylint: disable=W0611
8
+ Any,
9
+ Optional,
10
+ Union,
11
+ )
12
+
13
+ from pydantic import ( # noqa: F401 # pylint: disable=W0611
14
+ BaseModel,
15
+ Extra,
16
+ Field,
17
+ Json,
18
+ )
19
+
20
+
21
+ DEFINITION = """
22
+ query DynatraceTokenProviderTokenSpecs {
23
+ token_specs: dynatrace_token_provider_token_spec_v1 {
24
+ name
25
+ ocm_org_ids
26
+ secrets {
27
+ name
28
+ namespace
29
+ tokens {
30
+ name
31
+ keyNameInSecret
32
+ scopes
33
+ }
34
+ }
35
+ }
36
+ }
37
+ """
38
+
39
+
40
+ class ConfiguredBaseModel(BaseModel):
41
+ class Config:
42
+ smart_union=True
43
+ extra=Extra.forbid
44
+
45
+
46
+ class DynatraceAPITokenV1(ConfiguredBaseModel):
47
+ name: str = Field(..., alias="name")
48
+ key_name_in_secret: Optional[str] = Field(..., alias="keyNameInSecret")
49
+ scopes: list[str] = Field(..., alias="scopes")
50
+
51
+
52
+ class DynatraceTokenProviderTokenSecretV1(ConfiguredBaseModel):
53
+ name: str = Field(..., alias="name")
54
+ namespace: str = Field(..., alias="namespace")
55
+ tokens: list[DynatraceAPITokenV1] = Field(..., alias="tokens")
56
+
57
+
58
+ class DynatraceTokenProviderTokenSpecV1(ConfiguredBaseModel):
59
+ name: str = Field(..., alias="name")
60
+ ocm_org_ids: list[str] = Field(..., alias="ocm_org_ids")
61
+ secrets: list[DynatraceTokenProviderTokenSecretV1] = Field(..., alias="secrets")
62
+
63
+
64
+ class DynatraceTokenProviderTokenSpecsQueryData(ConfiguredBaseModel):
65
+ token_specs: Optional[list[DynatraceTokenProviderTokenSpecV1]] = Field(..., alias="token_specs")
66
+
67
+
68
+ def query(query_func: Callable, **kwargs: Any) -> DynatraceTokenProviderTokenSpecsQueryData:
69
+ """
70
+ This is a convenience function which queries and parses the data into
71
+ concrete types. It should be compatible with most GQL clients.
72
+ You do not have to use it to consume the generated data classes.
73
+ Alternatively, you can also mime and alternate the behavior
74
+ of this function in the caller.
75
+
76
+ Parameters:
77
+ query_func (Callable): Function which queries your GQL Server
78
+ kwargs: optional arguments that will be passed to the query function
79
+
80
+ Returns:
81
+ DynatraceTokenProviderTokenSpecsQueryData: queried data parsed into generated classes
82
+ """
83
+ raw_data: dict[Any, Any] = query_func(DEFINITION, **kwargs)
84
+ return DynatraceTokenProviderTokenSpecsQueryData(**raw_data)
@@ -53,6 +53,7 @@ query TerraformRepo {
53
53
  delete
54
54
  requireFips
55
55
  tfVersion
56
+ forceRerunTimestamp
56
57
  variables {
57
58
  inputs {
58
59
  ...VaultSecret
@@ -105,6 +106,7 @@ class TerraformRepoV1(ConfiguredBaseModel):
105
106
  delete: Optional[bool] = Field(..., alias="delete")
106
107
  require_fips: Optional[bool] = Field(..., alias="requireFips")
107
108
  tf_version: str = Field(..., alias="tfVersion")
109
+ force_rerun_timestamp: Optional[str] = Field(..., alias="forceRerunTimestamp")
108
110
  variables: Optional[TerraformRepoVariablesV1] = Field(..., alias="variables")
109
111
 
110
112
 
@@ -361,6 +361,8 @@ class TerraformRepoIntegration(
361
361
  )
362
362
  if self.params.validate_git:
363
363
  self.check_ref(d.repository, d.ref)
364
+ if c.force_rerun_timestamp != d.force_rerun_timestamp:
365
+ logging.info("user has forced a re-run of tf-repo execution")
364
366
 
365
367
  if len(merged) != 0:
366
368
  if not dry_run and state:
@@ -1,3 +1,4 @@
1
+ from datetime import datetime
1
2
  from unittest.mock import MagicMock
2
3
 
3
4
  import pytest
@@ -46,6 +47,7 @@ def existing_repo(aws_account, tf_variables) -> TerraformRepoV1:
46
47
  requireFips=True,
47
48
  tfVersion=A_REPO_VERSION,
48
49
  variables=tf_variables,
50
+ forceRerunTimestamp=None,
49
51
  )
50
52
 
51
53
 
@@ -86,6 +88,7 @@ def new_repo(aws_account_no_state) -> TerraformRepoV1:
86
88
  requireFips=False,
87
89
  tfVersion=B_REPO_VERSION,
88
90
  variables=None,
91
+ forceRerunTimestamp=None,
89
92
  )
90
93
 
91
94
 
@@ -220,6 +223,27 @@ def test_updating_repo_ref(existing_repo, int_params, state_mock):
220
223
  )
221
224
 
222
225
 
226
+ def test_force_rerun(existing_repo, int_params, state_mock):
227
+ existing = [existing_repo]
228
+ updated_repo = TerraformRepoV1.copy(existing_repo)
229
+ updated_repo.force_rerun_timestamp = datetime.now().isoformat()
230
+
231
+ integration = TerraformRepoIntegration(params=int_params)
232
+ diff = integration.calculate_diff(
233
+ existing_state=existing,
234
+ desired_state=[updated_repo],
235
+ dry_run=False,
236
+ state=state_mock,
237
+ recreate_state=False,
238
+ )
239
+
240
+ assert diff == [updated_repo]
241
+
242
+ state_mock.add.assert_called_once_with(
243
+ updated_repo.name, updated_repo.dict(by_alias=True), force=True
244
+ )
245
+
246
+
223
247
  def test_fail_on_update_invalid_repo_params(existing_repo, int_params):
224
248
  existing = [existing_repo]
225
249
  updated_repo = TerraformRepoV1.copy(existing_repo)
@@ -291,6 +315,7 @@ def test_get_repo_state(s3_state_builder, int_params, existing_repo, tf_variable
291
315
  "requireFips": True,
292
316
  "tfVersion": A_REPO_VERSION,
293
317
  "variables": tf_variables,
318
+ "forceRerunTimestamp": None,
294
319
  "account": {
295
320
  "name": "foo",
296
321
  "uid": AWS_UID,
@@ -0,0 +1,14 @@
1
+ from reconcile.gql_definitions.dynatrace_token_provider.token_specs import (
2
+ DynatraceTokenProviderTokenSpecV1,
3
+ query,
4
+ )
5
+ from reconcile.utils import gql
6
+ from reconcile.utils.gql import GqlApi
7
+
8
+
9
+ def get_dynatrace_token_provider_token_specs(
10
+ api: GqlApi | None = None,
11
+ ) -> list[DynatraceTokenProviderTokenSpecV1]:
12
+ api = api if api else gql.get_api()
13
+ data = query(api.query)
14
+ return list(data.token_specs or [])
@@ -33,7 +33,7 @@ def create_manifest(
33
33
 
34
34
 
35
35
  def patch_manifest(
36
- ocm_client: OCMBaseClient, cluster_id: str, syncset_id: str, manifest_map: Mapping
36
+ ocm_client: OCMBaseClient, cluster_id: str, manifest_id: str, manifest_map: Mapping
37
37
  ) -> None:
38
38
  manifest = Manifest(cluster_id)
39
- ocm_client.patch(api_path=manifest.href + "/" + syncset_id, data=manifest_map)
39
+ ocm_client.patch(api_path=manifest.href + "/" + manifest_id, data=manifest_map)