qontract-reconcile 0.10.2.dev192__py3-none-any.whl → 0.10.2.dev194__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qontract_reconcile-0.10.2.dev192.dist-info → qontract_reconcile-0.10.2.dev194.dist-info}/METADATA +1 -1
- {qontract_reconcile-0.10.2.dev192.dist-info → qontract_reconcile-0.10.2.dev194.dist-info}/RECORD +8 -8
- reconcile/dynatrace_token_provider/integration.py +185 -182
- reconcile/dynatrace_token_provider/model.py +6 -1
- reconcile/openshift_resources_base.py +10 -3
- reconcile/sql_query.py +30 -19
- {qontract_reconcile-0.10.2.dev192.dist-info → qontract_reconcile-0.10.2.dev194.dist-info}/WHEEL +0 -0
- {qontract_reconcile-0.10.2.dev192.dist-info → qontract_reconcile-0.10.2.dev194.dist-info}/entry_points.txt +0 -0
{qontract_reconcile-0.10.2.dev192.dist-info → qontract_reconcile-0.10.2.dev194.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: qontract-reconcile
|
3
|
-
Version: 0.10.2.
|
3
|
+
Version: 0.10.2.dev194
|
4
4
|
Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
|
5
5
|
Project-URL: homepage, https://github.com/app-sre/qontract-reconcile
|
6
6
|
Project-URL: repository, https://github.com/app-sre/qontract-reconcile
|
{qontract_reconcile-0.10.2.dev192.dist-info → qontract_reconcile-0.10.2.dev194.dist-info}/RECORD
RENAMED
@@ -71,7 +71,7 @@ reconcile/openshift_network_policies.py,sha256=p81ShFK1WSEGiWHVURopDpg8YvtA3RE3O
|
|
71
71
|
reconcile/openshift_prometheus_rules.py,sha256=onowXab248zmHH8SbYDTc1W1bl7JiqRFU1xdTkZyLFg,1332
|
72
72
|
reconcile/openshift_resourcequotas.py,sha256=yUi56PiOn3inMMfq_x_FEHmaW-reGipzoorjdar372g,2415
|
73
73
|
reconcile/openshift_resources.py,sha256=I2nO_C37mG3rfyGrd4cGwN3mVseVGuTAHAyhFzLyqF4,1518
|
74
|
-
reconcile/openshift_resources_base.py,sha256=
|
74
|
+
reconcile/openshift_resources_base.py,sha256=3HudPdM7EE0HNWUn1eu0O20Ij25fqGisaDBMVvTk1fk,41768
|
75
75
|
reconcile/openshift_rolebindings.py,sha256=9mlJ2FjWUoH-rsjtasreA_hV-K5Z_YR00qR_RR60OZM,6555
|
76
76
|
reconcile/openshift_routes.py,sha256=fXvuPSjcjVw1X3j2EQvUAdbOepmIFdKk-M3qP8QzPiw,1075
|
77
77
|
reconcile/openshift_saas_deploy.py,sha256=T1dvb9zajisaJNjbnR6-AZHU-itscHtr4oCqLj8KCK0,13037
|
@@ -105,7 +105,7 @@ reconcile/service_dependencies.py,sha256=G2qCuYFc8wQLpRxkdhmibxSAl3nUM3hcan4x50W
|
|
105
105
|
reconcile/signalfx_endpoint_monitoring.py,sha256=Nqgsg1cflSd2nNnm89y_e8c--7xLUqTrKOHkDs-qADE,2868
|
106
106
|
reconcile/slack_base.py,sha256=I-msunWxfgu5bSwXYulGbtLjxUB_tRmTCAUCU-3nabI,3484
|
107
107
|
reconcile/slack_usergroups.py,sha256=xFkVe67RXSUj8JvpfSFEiRdQzB0TnJJEHW_b5PEwLng,30213
|
108
|
-
reconcile/sql_query.py,sha256=
|
108
|
+
reconcile/sql_query.py,sha256=auZCWe6dytsDp83Imfo4zqkpMCLRXU007IUlPeUE3j4,26376
|
109
109
|
reconcile/status.py,sha256=cY4IJFXemhxptRJqR4qaaOWqei9e4jgLXuVSGajMsjg,544
|
110
110
|
reconcile/status_board.py,sha256=kJ0bus_wdyX3zsFJuUPrH4n9BNG_jhDbiQ3waOLVRBE,8538
|
111
111
|
reconcile/terraform_aws_route53.py,sha256=dQzzT46YhwRA902_H6pi-f7WlX4EaH187wXSdmJAUkQ,9958
|
@@ -187,9 +187,9 @@ reconcile/cna/assets/asset_factory.py,sha256=7T7X_J6xIsoGETqBRI45_EyIKEdQcnRPt_G
|
|
187
187
|
reconcile/cna/assets/null.py,sha256=85mVh97atCoC0aLuX47poTZiyOthmziJeBsUw0c924w,1658
|
188
188
|
reconcile/dynatrace_token_provider/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
189
189
|
reconcile/dynatrace_token_provider/dependencies.py,sha256=lvkdwqHMsn_2kgj-tUIJdTUnUNxVoS6z8k4nPkGglnQ,3129
|
190
|
-
reconcile/dynatrace_token_provider/integration.py,sha256=
|
190
|
+
reconcile/dynatrace_token_provider/integration.py,sha256=RTGy4A6U4EgE1G4rMdS8gqgw2XIfDcdYd-eF5DL9bo0,27166
|
191
191
|
reconcile/dynatrace_token_provider/metrics.py,sha256=oP-6NTZENFdvWiS0krnmX6tq3xyOzQ8e6vS0CZWYUuw,1496
|
192
|
-
reconcile/dynatrace_token_provider/model.py,sha256=
|
192
|
+
reconcile/dynatrace_token_provider/model.py,sha256=VU2tZT_NAdoCovGFVj5ZoEKhWfMsC1PPPB8Iu9WMSAw,641
|
193
193
|
reconcile/dynatrace_token_provider/ocm.py,sha256=EPknDhLXkySs8Nv8jrrl12oRoe2bRFWx_CMiHpPQhmM,3734
|
194
194
|
reconcile/dynatrace_token_provider/validate.py,sha256=40_9QmHoB3-KBc0k_0D4QO00PpNNPS-gU9Z6cIcWga8,1920
|
195
195
|
reconcile/endpoints_discovery/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -809,7 +809,7 @@ tools/saas_promotion_state/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJ
|
|
809
809
|
tools/saas_promotion_state/saas_promotion_state.py,sha256=UfwwRLS5Ya4_Nh1w5n1dvoYtchQvYE9yj1VANt2IKqI,3925
|
810
810
|
tools/sre_checkpoints/__init__.py,sha256=CDaDaywJnmRCLyl_NCcvxi-Zc0hTi_3OdwKiFOyS39I,145
|
811
811
|
tools/sre_checkpoints/util.py,sha256=zEDbGr18ZeHNQwW8pUsr2JRjuXIPz--WAGJxZo9sv_Y,894
|
812
|
-
qontract_reconcile-0.10.2.
|
813
|
-
qontract_reconcile-0.10.2.
|
814
|
-
qontract_reconcile-0.10.2.
|
815
|
-
qontract_reconcile-0.10.2.
|
812
|
+
qontract_reconcile-0.10.2.dev194.dist-info/METADATA,sha256=-RyDt7W0DypwBtTgRaMQj0BLugj2YxX2eC_XlfpkDLk,24555
|
813
|
+
qontract_reconcile-0.10.2.dev194.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
814
|
+
qontract_reconcile-0.10.2.dev194.dist-info/entry_points.txt,sha256=5i9l54La3vQrDLAdwDKQWC0iG4sV9RRfOb1BpvzOWLc,698
|
815
|
+
qontract_reconcile-0.10.2.dev194.dist-info/RECORD,,
|
@@ -2,7 +2,7 @@ import base64
|
|
2
2
|
import hashlib
|
3
3
|
import logging
|
4
4
|
from collections import Counter, defaultdict
|
5
|
-
from collections.abc import Iterable, Mapping
|
5
|
+
from collections.abc import Iterable, Mapping
|
6
6
|
from datetime import timedelta
|
7
7
|
from threading import Lock
|
8
8
|
from typing import Any
|
@@ -57,6 +57,9 @@ SYNCSET_AND_MANIFEST_ID = "ext-dynatrace-tokens-dtp"
|
|
57
57
|
DTP_LABEL_SEARCH = sre_capability_label_key("dtp", "%")
|
58
58
|
DTP_TENANT_V2_LABEL = sre_capability_label_key("dtp.v2", "tenant")
|
59
59
|
DTP_SPEC_V2_LABEL = sre_capability_label_key("dtp.v2", "token-spec")
|
60
|
+
DTP_V3_PREFIX = sre_capability_label_key("dtp", "v3")
|
61
|
+
DTP_V3_SPEC_SUFFIX = "token-spec"
|
62
|
+
DTP_V3_TENANT_SUFFIX = "tenant"
|
60
63
|
|
61
64
|
|
62
65
|
class ReconcileErrorSummary(Exception):
|
@@ -110,47 +113,106 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
110
113
|
cnt,
|
111
114
|
)
|
112
115
|
|
113
|
-
def _parse_ocm_data_to_cluster(
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
116
|
+
def _parse_ocm_data_to_cluster(
|
117
|
+
self, ocm_cluster: OCMCluster, dependencies: Dependencies
|
118
|
+
) -> Cluster | None:
|
119
|
+
bindings: dict[str, TokenSpecTenantBinding] = {}
|
120
|
+
for label in ocm_cluster.labels:
|
121
|
+
if not label.startswith(DTP_V3_PREFIX):
|
122
|
+
continue
|
123
|
+
if not (
|
124
|
+
label.endswith(DTP_V3_TENANT_SUFFIX)
|
125
|
+
or label.endswith(DTP_V3_SPEC_SUFFIX)
|
126
|
+
):
|
127
|
+
logging.warning(
|
128
|
+
f"[Bad DTPv3 label key] {label=} {ocm_cluster.id=} {ocm_cluster.subscription_id=}"
|
129
|
+
)
|
130
|
+
continue
|
131
|
+
common_prefix = label.rsplit(".", 1)[0]
|
132
|
+
if not (
|
133
|
+
tenant := ocm_cluster.labels.get(
|
134
|
+
f"{common_prefix}.{DTP_V3_TENANT_SUFFIX}"
|
135
|
+
)
|
136
|
+
):
|
137
|
+
logging.warning(
|
138
|
+
f"[Missing {DTP_V3_TENANT_SUFFIX} for common label prefix {common_prefix=}] {ocm_cluster.id=} {ocm_cluster.subscription_id=}"
|
139
|
+
)
|
140
|
+
continue
|
141
|
+
if not (
|
142
|
+
spec_name := ocm_cluster.labels.get(
|
143
|
+
f"{common_prefix}.{DTP_V3_SPEC_SUFFIX}"
|
144
|
+
)
|
145
|
+
):
|
146
|
+
logging.warning(
|
147
|
+
f"[Missing {DTP_V3_SPEC_SUFFIX} for common label prefix {common_prefix=}] {ocm_cluster.id=} {ocm_cluster.subscription_id=}"
|
148
|
+
)
|
149
|
+
continue
|
150
|
+
if not (spec := dependencies.token_spec_by_name.get(spec_name)):
|
151
|
+
logging.warning(
|
152
|
+
f"[Missing spec '{spec_name}'] {ocm_cluster.id=} {ocm_cluster.subscription_id=}"
|
153
|
+
)
|
154
|
+
continue
|
155
|
+
bindings[common_prefix] = TokenSpecTenantBinding(
|
156
|
+
spec=spec,
|
157
|
+
tenant_id=tenant,
|
119
158
|
)
|
120
|
-
|
159
|
+
|
160
|
+
if not bindings:
|
161
|
+
# Stay backwards compatible with v2 for now
|
162
|
+
dt_tenant = ocm_cluster.labels.get(DTP_TENANT_V2_LABEL)
|
163
|
+
token_spec_name = ocm_cluster.labels.get(DTP_SPEC_V2_LABEL)
|
164
|
+
token_spec = dependencies.token_spec_by_name.get(token_spec_name or "")
|
165
|
+
if not dt_tenant or not token_spec:
|
166
|
+
logging.warning(
|
167
|
+
f"[Missing DTP labels] {ocm_cluster.id=} {ocm_cluster.subscription_id=} {dt_tenant=} {token_spec_name=}"
|
168
|
+
)
|
169
|
+
return None
|
170
|
+
bindings["v2"] = TokenSpecTenantBinding(
|
171
|
+
spec=token_spec,
|
172
|
+
tenant_id=dt_tenant,
|
173
|
+
)
|
174
|
+
|
175
|
+
bindings_list = list(bindings.values())
|
176
|
+
|
177
|
+
for binding in bindings_list:
|
178
|
+
if binding.tenant_id not in dependencies.dynatrace_client_by_tenant_id:
|
179
|
+
logging.warning(
|
180
|
+
f"[{ocm_cluster.id=}] Dynatrace {binding.tenant_id=} does not exist"
|
181
|
+
)
|
182
|
+
return None
|
183
|
+
|
121
184
|
return Cluster(
|
122
185
|
id=ocm_cluster.id,
|
123
186
|
external_id=ocm_cluster.external_id,
|
124
187
|
organization_id=ocm_cluster.organization_id,
|
125
188
|
is_hcp=ocm_cluster.is_hcp,
|
126
|
-
dt_token_bindings=
|
127
|
-
TokenSpecTenantBinding(
|
128
|
-
spec_name=token_spec_name,
|
129
|
-
tenant_id=dt_tenant,
|
130
|
-
)
|
131
|
-
],
|
189
|
+
dt_token_bindings=bindings_list,
|
132
190
|
)
|
133
191
|
|
134
192
|
def _filter_clusters(
|
135
193
|
self,
|
136
194
|
clusters: Iterable[Cluster],
|
137
|
-
token_spec_by_name: Mapping[str, DynatraceTokenProviderTokenSpecV1],
|
138
195
|
) -> list[Cluster]:
|
139
196
|
filtered_clusters = []
|
140
197
|
for cluster in clusters:
|
198
|
+
# Check if any token binding is valid for this cluster
|
199
|
+
has_valid_binding = False
|
141
200
|
for token_binding in cluster.dt_token_bindings:
|
142
|
-
token_spec =
|
143
|
-
if not token_spec:
|
144
|
-
logging.debug(
|
145
|
-
f"[{cluster.id=}] Skipping cluster. {token_binding.spec_name=} does not exist."
|
146
|
-
)
|
147
|
-
continue
|
201
|
+
token_spec = token_binding.spec
|
148
202
|
if cluster.organization_id in token_spec.ocm_org_ids:
|
149
|
-
|
203
|
+
has_valid_binding = True
|
204
|
+
break
|
150
205
|
else:
|
151
206
|
logging.debug(
|
152
|
-
f"[{cluster.id=}] Skipping
|
207
|
+
f"[{cluster.id=}] Skipping token binding for {token_spec.name=}. {cluster.organization_id=} is not defined in {token_spec.ocm_org_ids=}."
|
153
208
|
)
|
209
|
+
|
210
|
+
if has_valid_binding:
|
211
|
+
filtered_clusters.append(cluster)
|
212
|
+
else:
|
213
|
+
logging.debug(
|
214
|
+
f"[{cluster.id=}] Skipping cluster as it has no valid token bindings."
|
215
|
+
)
|
154
216
|
return filtered_clusters
|
155
217
|
|
156
218
|
def reconcile(self, dry_run: bool, dependencies: Dependencies) -> None:
|
@@ -176,13 +238,13 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
176
238
|
for ocm_cluster in ocm_clusters
|
177
239
|
if (
|
178
240
|
cluster := self._parse_ocm_data_to_cluster(
|
179
|
-
ocm_cluster=ocm_cluster
|
241
|
+
ocm_cluster=ocm_cluster,
|
242
|
+
dependencies=dependencies,
|
180
243
|
)
|
181
244
|
)
|
182
245
|
]
|
183
246
|
filtered_clusters = self._filter_clusters(
|
184
247
|
clusters=clusters,
|
185
|
-
token_spec_by_name=dependencies.token_spec_by_name,
|
186
248
|
)
|
187
249
|
|
188
250
|
existing_dtp_tokens: dict[str, dict[str, str]] = {}
|
@@ -195,76 +257,21 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
195
257
|
len(clusters),
|
196
258
|
)
|
197
259
|
for cluster in filtered_clusters:
|
198
|
-
|
260
|
+
with DTPOrganizationErrorRate(
|
261
|
+
integration=self.name,
|
262
|
+
ocm_env=ocm_env_name,
|
263
|
+
org_id=cluster.organization_id,
|
264
|
+
):
|
199
265
|
try:
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
cluster_uuid=cluster.external_id,
|
210
|
-
error=f"Missing label {DTP_TENANT_V2_LABEL}",
|
211
|
-
)
|
212
|
-
logging.warning(
|
213
|
-
f"[{cluster.id=}] Missing value for label {DTP_TENANT_V2_LABEL}"
|
214
|
-
)
|
215
|
-
continue
|
216
|
-
if (
|
217
|
-
tenant_id
|
218
|
-
not in dependencies.dynatrace_client_by_tenant_id
|
219
|
-
):
|
220
|
-
_expose_errors_as_service_log(
|
221
|
-
ocm_client,
|
222
|
-
cluster_uuid=cluster.external_id,
|
223
|
-
error=f"Dynatrace tenant {tenant_id} does not exist",
|
224
|
-
)
|
225
|
-
logging.warning(
|
226
|
-
f"[{cluster.id=}] Dynatrace {tenant_id=} does not exist"
|
227
|
-
)
|
228
|
-
continue
|
229
|
-
dt_client = dependencies.dynatrace_client_by_tenant_id[
|
230
|
-
tenant_id
|
231
|
-
]
|
232
|
-
|
233
|
-
token_spec = dependencies.token_spec_by_name.get(
|
234
|
-
token_binding.spec_name
|
235
|
-
)
|
236
|
-
if not token_spec:
|
237
|
-
_expose_errors_as_service_log(
|
238
|
-
ocm_client,
|
239
|
-
cluster_uuid=cluster.external_id,
|
240
|
-
error=f"Token spec {token_binding.spec_name} does not exist",
|
241
|
-
)
|
242
|
-
logging.warning(
|
243
|
-
f"[{cluster.id=}] Token spec '{token_binding.spec_name}' does not exist"
|
244
|
-
)
|
245
|
-
continue
|
246
|
-
if tenant_id not in existing_dtp_tokens:
|
247
|
-
existing_dtp_tokens[tenant_id] = (
|
248
|
-
dt_client.get_token_ids_map_for_name_prefix(
|
249
|
-
prefix="dtp"
|
250
|
-
)
|
251
|
-
)
|
252
|
-
|
253
|
-
"""
|
254
|
-
Note, that we consciously do not parallelize cluster processing
|
255
|
-
for now. We want to keep stress on OCM at a minimum. The amount
|
256
|
-
of tagged clusters is currently feasible to be processed sequentially.
|
257
|
-
"""
|
258
|
-
self.process_cluster(
|
259
|
-
dry_run=dry_run,
|
260
|
-
cluster=cluster,
|
261
|
-
dt_client=dt_client,
|
262
|
-
ocm_client=ocm_client,
|
263
|
-
existing_dtp_tokens=existing_dtp_tokens[tenant_id],
|
264
|
-
tenant_id=tenant_id,
|
265
|
-
token_spec=token_spec,
|
266
|
-
ocm_env_name=ocm_env_name,
|
267
|
-
)
|
266
|
+
self.process_cluster(
|
267
|
+
dry_run=dry_run,
|
268
|
+
cluster=cluster,
|
269
|
+
ocm_client=ocm_client,
|
270
|
+
existing_dtp_tokens=existing_dtp_tokens,
|
271
|
+
ocm_env_name=ocm_env_name,
|
272
|
+
dependencies=dependencies,
|
273
|
+
)
|
274
|
+
|
268
275
|
except Exception as e:
|
269
276
|
unhandled_exceptions.append(
|
270
277
|
f"{ocm_env_name}/{cluster.organization_id}/{cluster.id}: {e}"
|
@@ -278,34 +285,57 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
278
285
|
self,
|
279
286
|
dry_run: bool,
|
280
287
|
cluster: Cluster,
|
281
|
-
dt_client: DynatraceClient,
|
282
288
|
ocm_client: OCMClient,
|
283
|
-
existing_dtp_tokens:
|
284
|
-
tenant_id: str,
|
285
|
-
token_spec: DynatraceTokenProviderTokenSpecV1,
|
289
|
+
existing_dtp_tokens: dict[str, dict[str, str]],
|
286
290
|
ocm_env_name: str,
|
291
|
+
dependencies: Dependencies,
|
287
292
|
) -> None:
|
288
|
-
|
293
|
+
current_secrets: list[K8sSecret] = []
|
289
294
|
if cluster.is_hcp:
|
290
|
-
|
295
|
+
data = self.get_manifest(ocm_client=ocm_client, cluster=cluster)
|
296
|
+
for binding in cluster.dt_token_bindings:
|
297
|
+
current_secrets.extend(
|
298
|
+
self.get_secrets_from_manifest(
|
299
|
+
manifest=data, token_spec=binding.spec
|
300
|
+
)
|
301
|
+
)
|
291
302
|
else:
|
292
|
-
|
293
|
-
|
294
|
-
|
303
|
+
data = self.get_syncset(ocm_client=ocm_client, cluster=cluster)
|
304
|
+
for binding in cluster.dt_token_bindings:
|
305
|
+
current_secrets.extend(
|
306
|
+
self.get_secrets_from_syncset(syncset=data, token_spec=binding.spec)
|
307
|
+
)
|
308
|
+
|
309
|
+
desired_secrets: list[K8sSecret] = []
|
310
|
+
has_diff = False
|
311
|
+
for binding in cluster.dt_token_bindings:
|
312
|
+
dt_client = dependencies.dynatrace_client_by_tenant_id[binding.tenant_id]
|
313
|
+
if binding.tenant_id not in existing_dtp_tokens:
|
314
|
+
existing_dtp_tokens[binding.tenant_id] = (
|
315
|
+
dt_client.get_token_ids_map_for_name_prefix(prefix="dtp")
|
316
|
+
)
|
317
|
+
cur_diff, cur_desired_secrets = self.generate_desired(
|
318
|
+
dry_run=dry_run,
|
319
|
+
current_k8s_secrets=current_secrets,
|
320
|
+
desired_spec=binding.spec,
|
321
|
+
existing_dtp_tokens=existing_dtp_tokens[binding.tenant_id],
|
322
|
+
dt_client=dt_client,
|
323
|
+
cluster_uuid=cluster.external_id,
|
324
|
+
dt_tenant_id=binding.tenant_id,
|
325
|
+
ocm_env_name=ocm_env_name,
|
326
|
+
)
|
327
|
+
desired_secrets.extend(cur_desired_secrets)
|
328
|
+
has_diff |= cur_diff
|
329
|
+
|
330
|
+
if not current_secrets:
|
295
331
|
if not dry_run:
|
296
332
|
try:
|
297
|
-
k8s_secrets = self.construct_secrets(
|
298
|
-
token_spec=token_spec,
|
299
|
-
dt_client=dt_client,
|
300
|
-
cluster_uuid=cluster.external_id,
|
301
|
-
)
|
302
333
|
if cluster.is_hcp:
|
303
334
|
ocm_client.create_manifest(
|
304
335
|
cluster_id=cluster.id,
|
305
336
|
manifest_map=self.construct_manifest(
|
306
337
|
with_id=True,
|
307
|
-
|
308
|
-
secrets=k8s_secrets,
|
338
|
+
secrets=desired_secrets,
|
309
339
|
),
|
310
340
|
)
|
311
341
|
else:
|
@@ -313,72 +343,47 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
313
343
|
cluster_id=cluster.id,
|
314
344
|
syncset_map=self.construct_syncset(
|
315
345
|
with_id=True,
|
316
|
-
|
317
|
-
secrets=k8s_secrets,
|
346
|
+
secrets=desired_secrets,
|
318
347
|
),
|
319
348
|
)
|
320
349
|
except Exception as e:
|
321
350
|
_expose_errors_as_service_log(
|
322
351
|
ocm_client,
|
323
352
|
cluster.external_id,
|
324
|
-
f"DTP can't create {
|
353
|
+
f"DTP can't create {SYNCSET_AND_MANIFEST_ID} due to {e.args!s}",
|
325
354
|
)
|
326
|
-
logging.info(
|
327
|
-
f"{token_spec.name=} created in {dt_api_url} for {cluster.id=}."
|
328
|
-
)
|
329
355
|
logging.info(f"{SYNCSET_AND_MANIFEST_ID} created for {cluster.id=}.")
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
dry_run=dry_run,
|
342
|
-
current_k8s_secrets=current_k8s_secrets,
|
343
|
-
desired_spec=token_spec,
|
344
|
-
existing_dtp_tokens=existing_dtp_tokens,
|
345
|
-
dt_client=dt_client,
|
346
|
-
cluster_uuid=cluster.external_id,
|
347
|
-
dt_tenant_id=tenant_id,
|
348
|
-
ocm_env_name=ocm_env_name,
|
349
|
-
)
|
350
|
-
if has_diff:
|
351
|
-
if not dry_run:
|
352
|
-
try:
|
353
|
-
if cluster.is_hcp:
|
354
|
-
ocm_client.patch_manifest(
|
355
|
-
cluster_id=cluster.id,
|
356
|
-
manifest_id=SYNCSET_AND_MANIFEST_ID,
|
357
|
-
manifest_map=self.construct_manifest(
|
358
|
-
dt_api_url=dt_api_url,
|
359
|
-
secrets=desired_secrets,
|
360
|
-
with_id=False,
|
361
|
-
),
|
362
|
-
)
|
363
|
-
else:
|
364
|
-
ocm_client.patch_syncset(
|
365
|
-
cluster_id=cluster.id,
|
366
|
-
syncset_id=SYNCSET_AND_MANIFEST_ID,
|
367
|
-
syncset_map=self.construct_syncset(
|
368
|
-
dt_api_url=dt_api_url,
|
369
|
-
secrets=desired_secrets,
|
370
|
-
with_id=False,
|
371
|
-
),
|
372
|
-
)
|
373
|
-
except Exception as e:
|
374
|
-
_expose_errors_as_service_log(
|
375
|
-
ocm_client,
|
376
|
-
cluster.external_id,
|
377
|
-
f"DTP can't patch {token_spec.name=} for {SYNCSET_AND_MANIFEST_ID} due to {e.args!s}",
|
356
|
+
elif has_diff:
|
357
|
+
if not dry_run:
|
358
|
+
try:
|
359
|
+
if cluster.is_hcp:
|
360
|
+
ocm_client.patch_manifest(
|
361
|
+
cluster_id=cluster.id,
|
362
|
+
manifest_id=SYNCSET_AND_MANIFEST_ID,
|
363
|
+
manifest_map=self.construct_manifest(
|
364
|
+
secrets=desired_secrets,
|
365
|
+
with_id=False,
|
366
|
+
),
|
378
367
|
)
|
379
|
-
|
380
|
-
|
381
|
-
|
368
|
+
else:
|
369
|
+
ocm_client.patch_syncset(
|
370
|
+
cluster_id=cluster.id,
|
371
|
+
syncset_id=SYNCSET_AND_MANIFEST_ID,
|
372
|
+
syncset_map=self.construct_syncset(
|
373
|
+
secrets=desired_secrets,
|
374
|
+
with_id=False,
|
375
|
+
),
|
376
|
+
)
|
377
|
+
except Exception as e:
|
378
|
+
_expose_errors_as_service_log(
|
379
|
+
ocm_client,
|
380
|
+
cluster.external_id,
|
381
|
+
f"DTP can't patch {SYNCSET_AND_MANIFEST_ID} due to {e.args!s}",
|
382
|
+
)
|
383
|
+
logging.info(f"Patched {SYNCSET_AND_MANIFEST_ID} in {cluster.id=}.")
|
384
|
+
|
385
|
+
def dt_api_url(self, tenant_id: str) -> str:
|
386
|
+
return f"https://{tenant_id}.live.dynatrace.com/api"
|
382
387
|
|
383
388
|
def scopes_hash(self, scopes: Iterable[str], length: int) -> str:
|
384
389
|
m = hashlib.sha256()
|
@@ -428,7 +433,7 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
428
433
|
dry_run: bool,
|
429
434
|
current_k8s_secrets: Iterable[K8sSecret],
|
430
435
|
desired_spec: DynatraceTokenProviderTokenSpecV1,
|
431
|
-
existing_dtp_tokens:
|
436
|
+
existing_dtp_tokens: dict[str, str],
|
432
437
|
dt_client: DynatraceClient,
|
433
438
|
cluster_uuid: str,
|
434
439
|
ocm_env_name: str,
|
@@ -475,6 +480,7 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
475
480
|
secret_name=secret.name,
|
476
481
|
namespace_name=secret.namespace,
|
477
482
|
tokens=desired_tokens,
|
483
|
+
dt_api_url=self.dt_api_url(tenant_id=dt_tenant_id),
|
478
484
|
)
|
479
485
|
)
|
480
486
|
|
@@ -500,6 +506,7 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
500
506
|
self,
|
501
507
|
token_spec: DynatraceTokenProviderTokenSpecV1,
|
502
508
|
dt_client: DynatraceClient,
|
509
|
+
dt_api_url: str,
|
503
510
|
cluster_uuid: str,
|
504
511
|
) -> list[K8sSecret]:
|
505
512
|
secrets: list[K8sSecret] = []
|
@@ -513,6 +520,7 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
513
520
|
secret_name=secret.name,
|
514
521
|
namespace_name=secret.namespace,
|
515
522
|
tokens=new_tokens,
|
523
|
+
dt_api_url=dt_api_url,
|
516
524
|
)
|
517
525
|
)
|
518
526
|
return secrets
|
@@ -562,11 +570,13 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
562
570
|
secret_key=token.key_name_in_secret,
|
563
571
|
)
|
564
572
|
)
|
573
|
+
dt_api_url = self.base64_decode(secret_data.get("apiUrl", ""))
|
565
574
|
secrets.append(
|
566
575
|
K8sSecret(
|
567
576
|
secret_name=secret.name,
|
568
577
|
namespace_name=secret.namespace,
|
569
578
|
tokens=tokens,
|
579
|
+
dt_api_url=dt_api_url,
|
570
580
|
)
|
571
581
|
)
|
572
582
|
return secrets
|
@@ -574,6 +584,8 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
574
584
|
def get_secrets_from_syncset(
|
575
585
|
self, syncset: Mapping[str, Any], token_spec: DynatraceTokenProviderTokenSpecV1
|
576
586
|
) -> list[K8sSecret]:
|
587
|
+
if not syncset:
|
588
|
+
return []
|
577
589
|
secret_data_by_name = {
|
578
590
|
resource.get("metadata", {}).get("name"): resource.get("data", {})
|
579
591
|
for resource in syncset.get("resources", [])
|
@@ -586,6 +598,8 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
586
598
|
def get_secrets_from_manifest(
|
587
599
|
self, manifest: Mapping[str, Any], token_spec: DynatraceTokenProviderTokenSpecV1
|
588
600
|
) -> list[K8sSecret]:
|
601
|
+
if not manifest:
|
602
|
+
return []
|
589
603
|
secret_data_by_name = {
|
590
604
|
resource.get("metadata", {}).get("name"): resource.get("data", {})
|
591
605
|
for resource in manifest.get("workloads", [])
|
@@ -598,12 +612,11 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
598
612
|
def construct_secrets_data(
|
599
613
|
self,
|
600
614
|
secrets: Iterable[K8sSecret],
|
601
|
-
dt_api_url: str,
|
602
615
|
) -> list[dict[str, Any]]:
|
603
616
|
secrets_data: list[dict[str, Any]] = []
|
604
617
|
for secret in secrets:
|
605
618
|
data: dict[str, str] = {
|
606
|
-
"apiUrl": f"{self.base64_encode_str(dt_api_url)}",
|
619
|
+
"apiUrl": f"{self.base64_encode_str(secret.dt_api_url)}",
|
607
620
|
}
|
608
621
|
for token in secret.tokens:
|
609
622
|
data[token.secret_key] = f"{self.base64_encode_str(token.token)}"
|
@@ -626,25 +639,19 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
626
639
|
def construct_base_syncset(
|
627
640
|
self,
|
628
641
|
secrets: Iterable[K8sSecret],
|
629
|
-
dt_api_url: str,
|
630
642
|
) -> dict[str, Any]:
|
631
643
|
return {
|
632
644
|
"kind": "SyncSet",
|
633
|
-
"resources": self.construct_secrets_data(
|
634
|
-
secrets=secrets, dt_api_url=dt_api_url
|
635
|
-
),
|
645
|
+
"resources": self.construct_secrets_data(secrets=secrets),
|
636
646
|
}
|
637
647
|
|
638
648
|
def construct_base_manifest(
|
639
649
|
self,
|
640
650
|
secrets: Iterable[K8sSecret],
|
641
|
-
dt_api_url: str,
|
642
651
|
) -> dict[str, Any]:
|
643
652
|
return {
|
644
653
|
"kind": "Manifest",
|
645
|
-
"workloads": self.construct_secrets_data(
|
646
|
-
secrets=secrets, dt_api_url=dt_api_url
|
647
|
-
),
|
654
|
+
"workloads": self.construct_secrets_data(secrets=secrets),
|
648
655
|
}
|
649
656
|
|
650
657
|
def base64_decode(self, encoded: str) -> str:
|
@@ -659,12 +666,10 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
659
666
|
def construct_syncset(
|
660
667
|
self,
|
661
668
|
secrets: Iterable[K8sSecret],
|
662
|
-
dt_api_url: str,
|
663
669
|
with_id: bool,
|
664
670
|
) -> dict[str, Any]:
|
665
671
|
syncset = self.construct_base_syncset(
|
666
672
|
secrets=secrets,
|
667
|
-
dt_api_url=dt_api_url,
|
668
673
|
)
|
669
674
|
if with_id:
|
670
675
|
syncset["id"] = SYNCSET_AND_MANIFEST_ID
|
@@ -673,12 +678,10 @@ class DynatraceTokenProviderIntegration(QontractReconcileIntegration[NoParams]):
|
|
673
678
|
def construct_manifest(
|
674
679
|
self,
|
675
680
|
secrets: Iterable[K8sSecret],
|
676
|
-
dt_api_url: str,
|
677
681
|
with_id: bool,
|
678
682
|
) -> dict[str, Any]:
|
679
683
|
manifest = self.construct_base_manifest(
|
680
684
|
secrets=secrets,
|
681
|
-
dt_api_url=dt_api_url,
|
682
685
|
)
|
683
686
|
if with_id:
|
684
687
|
manifest["id"] = SYNCSET_AND_MANIFEST_ID
|
@@ -1,5 +1,9 @@
|
|
1
1
|
from pydantic import BaseModel
|
2
2
|
|
3
|
+
from reconcile.gql_definitions.dynatrace_token_provider.token_specs import (
|
4
|
+
DynatraceTokenProviderTokenSpecV1,
|
5
|
+
)
|
6
|
+
|
3
7
|
|
4
8
|
class DynatraceAPIToken(BaseModel):
|
5
9
|
token: str
|
@@ -11,11 +15,12 @@ class DynatraceAPIToken(BaseModel):
|
|
11
15
|
class K8sSecret(BaseModel):
|
12
16
|
namespace_name: str
|
13
17
|
secret_name: str
|
18
|
+
dt_api_url: str
|
14
19
|
tokens: list[DynatraceAPIToken]
|
15
20
|
|
16
21
|
|
17
22
|
class TokenSpecTenantBinding(BaseModel):
|
18
|
-
|
23
|
+
spec: DynatraceTokenProviderTokenSpecV1
|
19
24
|
tenant_id: str
|
20
25
|
|
21
26
|
|
@@ -62,7 +62,7 @@ from reconcile.utils.openshift_resource import (
|
|
62
62
|
)
|
63
63
|
from reconcile.utils.openshift_resource import OpenshiftResource as OR
|
64
64
|
from reconcile.utils.runtime.integration import DesiredStateShardConfig
|
65
|
-
from reconcile.utils.secret_reader import SecretReader
|
65
|
+
from reconcile.utils.secret_reader import SecretReader, SecretReaderBase
|
66
66
|
from reconcile.utils.semver_helper import make_semver
|
67
67
|
from reconcile.utils.sharding import is_in_shard
|
68
68
|
from reconcile.utils.vault import (
|
@@ -402,9 +402,16 @@ def fetch_provider_vault_secret(
|
|
402
402
|
validate_alertmanager_config: bool = False,
|
403
403
|
alertmanager_config_key: str = "alertmanager.yaml",
|
404
404
|
settings: Mapping[str, Any] | None = None,
|
405
|
+
secret_reader: SecretReaderBase | None = None,
|
405
406
|
) -> OR:
|
406
|
-
|
407
|
-
|
407
|
+
if not secret_reader and not settings:
|
408
|
+
raise Exception(
|
409
|
+
"Parameter settings or secret_reader must be provided to run fetch_provider_vault_secret."
|
410
|
+
)
|
411
|
+
|
412
|
+
if not secret_reader:
|
413
|
+
# get the fields from vault
|
414
|
+
secret_reader = SecretReader(settings)
|
408
415
|
raw_data = {
|
409
416
|
k: v
|
410
417
|
for k, v in secret_reader.read_all({"path": path, "version": version}).items()
|
reconcile/sql_query.py
CHANGED
@@ -19,6 +19,9 @@ from reconcile import (
|
|
19
19
|
)
|
20
20
|
from reconcile import openshift_resources_base as orb
|
21
21
|
from reconcile.status import ExitCodes
|
22
|
+
from reconcile.typed_queries.app_interface_vault_settings import (
|
23
|
+
get_app_interface_vault_settings,
|
24
|
+
)
|
22
25
|
from reconcile.utils.defer import defer
|
23
26
|
from reconcile.utils.external_resources import get_external_resource_specs
|
24
27
|
from reconcile.utils.oc import (
|
@@ -30,7 +33,10 @@ from reconcile.utils.openshift_resource import (
|
|
30
33
|
ResourceInventory,
|
31
34
|
)
|
32
35
|
from reconcile.utils.ruamel import create_ruamel_instance
|
33
|
-
from reconcile.utils.secret_reader import
|
36
|
+
from reconcile.utils.secret_reader import (
|
37
|
+
SecretReaderBase,
|
38
|
+
create_secret_reader,
|
39
|
+
)
|
34
40
|
from reconcile.utils.semver_helper import make_semver
|
35
41
|
from reconcile.utils.smtp_client import (
|
36
42
|
DEFAULT_SMTP_TIMEOUT,
|
@@ -187,14 +193,16 @@ def get_tf_resource_info(
|
|
187
193
|
|
188
194
|
|
189
195
|
def collect_queries(
|
190
|
-
|
196
|
+
secret_reader: SecretReaderBase,
|
197
|
+
smtp_client: SmtpClient,
|
198
|
+
query_name: str | None = None,
|
191
199
|
) -> list[dict[str, Any]]:
|
192
200
|
"""
|
193
201
|
Consults the app-interface and constructs the list of queries
|
194
202
|
to be executed.
|
195
203
|
|
196
204
|
:param query_name: (optional) query to look for
|
197
|
-
:param
|
205
|
+
:param secret_reader: SecretReaderBase
|
198
206
|
|
199
207
|
:return: List of queries dictionaries
|
200
208
|
"""
|
@@ -218,8 +226,8 @@ def collect_queries(
|
|
218
226
|
"",
|
219
227
|
1,
|
220
228
|
accounts=[],
|
221
|
-
settings=settings,
|
222
229
|
prefetch_resources_by_schemas=["/aws/rds-defaults-1.yml"],
|
230
|
+
secret_reader=secret_reader,
|
223
231
|
)
|
224
232
|
|
225
233
|
for sql_query in sql_queries:
|
@@ -499,11 +507,11 @@ def get_service_account(name: str, labels: dict) -> dict[str, Any]:
|
|
499
507
|
}
|
500
508
|
|
501
509
|
|
502
|
-
def split_long_query(q, size) -> list[str]:
|
510
|
+
def split_long_query(q: str, size: int) -> list[str]:
|
503
511
|
return [q[i : i + size] for i in range(0, len(q), size)]
|
504
512
|
|
505
513
|
|
506
|
-
def merge_files_command(directory, file_glob, output_file):
|
514
|
+
def merge_files_command(directory: str, file_glob: str, output_file: str) -> str:
|
507
515
|
return f"cat ''{directory}''/{file_glob} > ''{output_file}''"
|
508
516
|
|
509
517
|
|
@@ -582,8 +590,8 @@ def _build_openshift_resources(
|
|
582
590
|
query: dict[str, Any],
|
583
591
|
image_repository: str,
|
584
592
|
pull_secret: dict[str, Any] | None,
|
585
|
-
|
586
|
-
):
|
593
|
+
secret_reader: SecretReaderBase,
|
594
|
+
) -> list[OpenshiftResource]:
|
587
595
|
query_name = query["name"]
|
588
596
|
common_resource_labels = _build_common_resource_labels(query)
|
589
597
|
openshift_resources: list[OpenshiftResource] = []
|
@@ -600,7 +608,7 @@ def _build_openshift_resources(
|
|
600
608
|
type=pull_secret["type"],
|
601
609
|
integration=QONTRACT_INTEGRATION,
|
602
610
|
integration_version=QONTRACT_INTEGRATION_VERSION,
|
603
|
-
|
611
|
+
secret_reader=secret_reader,
|
604
612
|
)
|
605
613
|
openshift_resources.append(secret_resource)
|
606
614
|
# ConfigMap gpg
|
@@ -696,14 +704,14 @@ def _reconstruct_for_metrics(
|
|
696
704
|
query: dict[str, Any],
|
697
705
|
image_repository: str,
|
698
706
|
pull_secret: dict[str, Any] | None,
|
699
|
-
settings: dict[str, Any],
|
700
707
|
ri: ResourceInventory,
|
708
|
+
secret_reader: SecretReaderBase,
|
701
709
|
) -> None:
|
702
710
|
openshift_resources = _build_openshift_resources(
|
703
711
|
query,
|
704
712
|
image_repository,
|
705
713
|
pull_secret,
|
706
|
-
|
714
|
+
secret_reader,
|
707
715
|
)
|
708
716
|
cluster = query["cluster"]
|
709
717
|
namespace = query["namespace"]["name"]
|
@@ -762,6 +770,7 @@ def _process_existing_query(
|
|
762
770
|
image_repository: str,
|
763
771
|
pull_secret: dict[str, Any],
|
764
772
|
ri: ResourceInventory,
|
773
|
+
secret_reader: SecretReaderBase,
|
765
774
|
) -> None:
|
766
775
|
match _get_query_status(query, state):
|
767
776
|
case QueryStatus.ACTIVE:
|
@@ -769,8 +778,8 @@ def _process_existing_query(
|
|
769
778
|
query,
|
770
779
|
image_repository,
|
771
780
|
pull_secret,
|
772
|
-
settings,
|
773
781
|
ri,
|
782
|
+
secret_reader,
|
774
783
|
)
|
775
784
|
case QueryStatus.PENDING_DELETION:
|
776
785
|
_delete_query_resources(
|
@@ -797,12 +806,10 @@ def _process_new_query(
|
|
797
806
|
image_repository: str,
|
798
807
|
pull_secret: dict[str, Any],
|
799
808
|
ri: ResourceInventory,
|
800
|
-
|
809
|
+
secret_reader: SecretReaderBase,
|
810
|
+
) -> None:
|
801
811
|
openshift_resources = _build_openshift_resources(
|
802
|
-
query,
|
803
|
-
image_repository,
|
804
|
-
pull_secret,
|
805
|
-
settings,
|
812
|
+
query, image_repository, pull_secret, secret_reader
|
806
813
|
)
|
807
814
|
|
808
815
|
cluster = query["cluster"]
|
@@ -835,12 +842,14 @@ def run(
|
|
835
842
|
) -> None:
|
836
843
|
settings = queries.get_app_interface_settings()
|
837
844
|
state = init_state(integration=QONTRACT_INTEGRATION)
|
845
|
+
vault_settings = get_app_interface_vault_settings()
|
846
|
+
secret_reader = create_secret_reader(use_vault=vault_settings.vault)
|
838
847
|
if defer:
|
839
848
|
defer(state.cleanup)
|
840
849
|
smtp_settings = typed_queries.smtp.settings()
|
841
850
|
smtp_client = SmtpClient(
|
842
851
|
server=get_smtp_server_connection(
|
843
|
-
secret_reader=
|
852
|
+
secret_reader=secret_reader,
|
844
853
|
secret=smtp_settings.credentials,
|
845
854
|
),
|
846
855
|
mail_address=smtp_settings.mail_address,
|
@@ -854,7 +863,7 @@ def run(
|
|
854
863
|
image_repository = sql_query_settings["imageRepository"]
|
855
864
|
pull_secret = sql_query_settings["pullSecret"]
|
856
865
|
|
857
|
-
queries_list = collect_queries(
|
866
|
+
queries_list = collect_queries(secret_reader=secret_reader, smtp_client=smtp_client)
|
858
867
|
query_states = {s.lstrip("/") for s in state.ls()}
|
859
868
|
ri = ResourceInventory()
|
860
869
|
for query in queries_list:
|
@@ -868,6 +877,7 @@ def run(
|
|
868
877
|
image_repository,
|
869
878
|
pull_secret,
|
870
879
|
ri,
|
880
|
+
secret_reader,
|
871
881
|
)
|
872
882
|
else:
|
873
883
|
_process_new_query(
|
@@ -878,6 +888,7 @@ def run(
|
|
878
888
|
image_repository,
|
879
889
|
pull_secret,
|
880
890
|
ri,
|
891
|
+
secret_reader,
|
881
892
|
)
|
882
893
|
openshift_base.publish_metrics(ri, QONTRACT_INTEGRATION)
|
883
894
|
|
{qontract_reconcile-0.10.2.dev192.dist-info → qontract_reconcile-0.10.2.dev194.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|