qontract-reconcile 0.10.1rc707__py3-none-any.whl → 0.10.1rc709__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qontract-reconcile
3
- Version: 0.10.1rc707
3
+ Version: 0.10.1rc709
4
4
  Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
5
  Home-page: https://github.com/app-sre/qontract-reconcile
6
6
  Author: Red Hat App-SRE Team
@@ -18,7 +18,7 @@ reconcile/dashdotdb_dora.py,sha256=n9EJXhxCoMYuldj4Fa5s0TqfiiolSrqDEOCaLBV3uag,1
18
18
  reconcile/dashdotdb_dvo.py,sha256=YXqpI6fBQAql-ybGI0grj9gWMzmKiAvPE__pNju6obk,8996
19
19
  reconcile/dashdotdb_slo.py,sha256=bf1WSh5JP9obHVQsMy0OO71_VTYZgwAopElFZM6DmRo,6714
20
20
  reconcile/database_access_manager.py,sha256=42dBJyihdwx4WjEBjwi3lUiDzQ1t_2ZFViJri2c4_aE,25716
21
- reconcile/deadmanssnitch.py,sha256=lePZWxya1Xz_EqKxyBXoeTeFSWuJEn0-mU91SEHs93g,7707
21
+ reconcile/deadmanssnitch.py,sha256=ET4gUX1WvmWsILI10GgcPDOuJL2f3bDwwvKORWvyFhc,7339
22
22
  reconcile/dynatrace_token_provider.py,sha256=P5jvMavremWp64LVknz1kCZI4aagwLrDDfXkmJ9diwY,17212
23
23
  reconcile/email_sender.py,sha256=-5L-Ag_jaEYSzYRoMr52KQBRXz1E8yx9GqLbg2X4XFU,3533
24
24
  reconcile/gabi_authorized_users.py,sha256=9kpSJGyMe_qYVHIgTFHhYf8E3lKSLO0Ia1WwK9ADNIE,4502
@@ -77,7 +77,7 @@ reconcile/openshift_routes.py,sha256=fXvuPSjcjVw1X3j2EQvUAdbOepmIFdKk-M3qP8QzPiw
77
77
  reconcile/openshift_saas_deploy.py,sha256=fmhopPEbyZsGQHRPzyzpKEvoBXEGN3aPxFi7Utq0emU,12788
78
78
  reconcile/openshift_saas_deploy_change_tester.py,sha256=fMmPDz-ZRO-WH8OIiDo4e4sBK4-zzpvFXmVCKGY1l-I,8837
79
79
  reconcile/openshift_saas_deploy_trigger_base.py,sha256=p1Mv7dbw_K7TVFv-M-DDq6r1ltuW9VAF6PCNnEng_zM,14291
80
- reconcile/openshift_saas_deploy_trigger_cleaner.py,sha256=aT8wqoXGyZEeXUA7Q6reaL9v5j5UnPLoYVe70D31HUg,2917
80
+ reconcile/openshift_saas_deploy_trigger_cleaner.py,sha256=gU226N1JUvgEbTgDf7ouJ6QzZGMI_BLwXdS67UCukRk,3554
81
81
  reconcile/openshift_saas_deploy_trigger_configs.py,sha256=uWzUV5D5CW0frdi1ys7BObNg-rA-VZKlefd4TD_Z-pY,959
82
82
  reconcile/openshift_saas_deploy_trigger_images.py,sha256=Yl4lMtxqab-c04I2Ju8isAJuYuNTbHN01Bk3dF9nTos,967
83
83
  reconcile/openshift_saas_deploy_trigger_moving_commits.py,sha256=VqjwgRhA-yOSq0WFPqGIJVgYkdq_UrTHcVusBzOFgMY,973
@@ -234,7 +234,7 @@ reconcile/gql_definitions/common/ocm_env_telemeter.py,sha256=jW0Q9WazDQVOxh4u0LM
234
234
  reconcile/gql_definitions/common/ocm_environments.py,sha256=2Szg8VSIM4a9ZbjktDjSY6I1enih2Cx1tjYXoUFiZX0,2002
235
235
  reconcile/gql_definitions/common/pagerduty_instances.py,sha256=qcbOSUNwmeRcBetJ2FktUu6Mn4CPE2iNoevt3fY_xig,1988
236
236
  reconcile/gql_definitions/common/pgp_reencryption_settings.py,sha256=NPLmO6J-zSu5B9QiYbDezLHY3TuOO9ihRBV-Zr84R9w,2259
237
- reconcile/gql_definitions/common/pipeline_providers.py,sha256=AmEgemURPRQhsleaVbwxmEcbuymC6SH_3DO09sHl-b0,9115
237
+ reconcile/gql_definitions/common/pipeline_providers.py,sha256=JJgmmghqLIwjKOdcWYHPnf4PDgAq4GF7046i0ozrqgI,9127
238
238
  reconcile/gql_definitions/common/saas_files.py,sha256=B7HE_jV5ky8AFxOCleSg2RYDrtUaV99DYa2If8wyvHs,15926
239
239
  reconcile/gql_definitions/common/saas_target_namespaces.py,sha256=gcTU9jrsNq9-HX-oOkj-nEZKYFTRytDHLs4SpEs93aw,2755
240
240
  reconcile/gql_definitions/common/saasherder_settings.py,sha256=nqQLcMwYxLseqq0BEcVvmrpIj2eQq0h8XDSpLN6GGCw,1793
@@ -262,7 +262,7 @@ reconcile/gql_definitions/fragments/membership_source.py,sha256=rVjAIAhhAoH0_fUf
262
262
  reconcile/gql_definitions/fragments/minimal_ocm_organization.py,sha256=G7uEwGR2qjtXl3yUUMWAzbihNKor0jj1_fcSd9qQOjw,731
263
263
  reconcile/gql_definitions/fragments/oc_connection_cluster.py,sha256=GO4v0q7urQt2G6fU6HJSgHmFOPxNCc3oO0zAwz9bU7o,1530
264
264
  reconcile/gql_definitions/fragments/ocm_environment.py,sha256=mEbBooD-SAxe7l73sja0ELiByGkTiqxuca9vmxfIISw,1075
265
- reconcile/gql_definitions/fragments/pipeline_provider_retention.py,sha256=hjqsTf95evcZn3BBKpF9dUJVBhpy2tq6r_rEbaF6cy4,757
265
+ reconcile/gql_definitions/fragments/pipeline_provider_retention.py,sha256=nIGIaeuBDw5emBdDJvajl27pGULQ2fYG5SkQ0AYTr7E,814
266
266
  reconcile/gql_definitions/fragments/prometheus_instance.py,sha256=12ltnV9kdEw6LnHgevdM6Ms-lCPY802RC-CrYOoRRgY,1575
267
267
  reconcile/gql_definitions/fragments/resource_limits_requirements.py,sha256=ucskQ_a8RxvFl5-IWxz5kk3g4-5Pvh_W4N3nLmuKxi0,744
268
268
  reconcile/gql_definitions/fragments/resource_requests_requirements.py,sha256=TFKO4YALFPanSvZvIJFz0dCioBU7i73Q6hkDtGMvs9I,736
@@ -439,7 +439,7 @@ reconcile/test/test_cli.py,sha256=qx_iBwh4Z-YkK3sbjK1wEziPTgn060EN-baf9DNvR3k,10
439
439
  reconcile/test/test_closedbox_endpoint_monitoring.py,sha256=isMHYwRWMFARU2nbJgbl69kD6H0eA86noCM4MPVI1fo,7151
440
440
  reconcile/test/test_dashdotdb_dora.py,sha256=MfHGAsX2eSQSvBVt9_1Sah3aQKNJBXA9Iu86X0NWD6c,7705
441
441
  reconcile/test/test_database_access_manager.py,sha256=-9fYo8wMNhbJUTK_bd7g_fS5zYsAlqQ0rBDDYBMZvZQ,19595
442
- reconcile/test/test_deadmanssnitch.py,sha256=tSeFRG9JUOR-Y7W4S5adv9ZqfgAhbQOgnqW-EhCXggU,9445
442
+ reconcile/test/test_deadmanssnitch.py,sha256=3AiarCKkSPU1cEkVwooglwU3sQgDifMAiXuJb1P9_vI,9220
443
443
  reconcile/test/test_gabi_authorized_users.py,sha256=6XnV5Q9inxP81ktGMVKyWucjBTUj8Imy2L0HG3YHyUE,2496
444
444
  reconcile/test/test_gcr_mirror.py,sha256=A0y8auKZzr62-mGoxSQ__JnN0-ijZUltzjwR5miBgso,490
445
445
  reconcile/test/test_github_org.py,sha256=j3KeB4OnSln1gm2hidce49xdMru-j75NS3cM-AEgzZc,4511
@@ -468,6 +468,7 @@ reconcile/test/test_openshift_resource.py,sha256=lbTf48jX1q6rGnRiA5pPvfU0uPfY8zh
468
468
  reconcile/test/test_openshift_resources_base.py,sha256=LtlR9x3o7KkSEw0JN0fZhinFeAAxBAQlB_9PpBnKwOM,14353
469
469
  reconcile/test/test_openshift_saas_deploy.py,sha256=YLJGkc--u5aP0UkQ-b9ZGEFGS2gw25jjcSgknQdI3Ic,5892
470
470
  reconcile/test/test_openshift_saas_deploy_change_tester.py,sha256=1yVe54Hx9YdVjn6qdnKge5Sa_s732c-8uZqCnuT1gGI,12871
471
+ reconcile/test/test_openshift_saas_deploy_trigger_cleaner.py,sha256=cha3bUiXAWPCwrp8XwVC3RNJtJHLcsGTE-F8Zn6XxsU,2852
471
472
  reconcile/test/test_openshift_tekton_resources.py,sha256=RtRWsdm51S13OSkENC9nY_rOH0QELSCaO5tjF0XqIDI,11222
472
473
  reconcile/test/test_openshift_upgrade_watcher.py,sha256=0GDQ_YFHIX8DbkbDYSuLv9uZeeg4NwP1vlOqvSaZvN4,7183
473
474
  reconcile/test/test_prometheus_rules_tester.py,sha256=Qc9J4k0lSVie361lGr_cOy0EaBTD5LDqGZf7CYVFKr8,5677
@@ -761,8 +762,8 @@ tools/test/test_app_interface_metrics_exporter.py,sha256=SX7qL3D1SIRKFo95FoQztvf
761
762
  tools/test/test_qontract_cli.py,sha256=UEwAW7PA_GIrbqzaLxpkCxbuVjEFLNvnVG-6VyoCGIc,4147
762
763
  tools/test/test_sd_app_sre_alert_report.py,sha256=v363r9zM7__0kR5K6mvJoGFcM9BvE33fWAayrqkpojA,2116
763
764
  tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
764
- qontract_reconcile-0.10.1rc707.dist-info/METADATA,sha256=2sMuicUof6ZemdDsl2x77zDnjVZkgPZowU7C4iUrFPk,2382
765
- qontract_reconcile-0.10.1rc707.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
766
- qontract_reconcile-0.10.1rc707.dist-info/entry_points.txt,sha256=rIxI5zWtHNlfpDeq1a7pZXAPoqf7HG32KMTN3MeWK_8,429
767
- qontract_reconcile-0.10.1rc707.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
768
- qontract_reconcile-0.10.1rc707.dist-info/RECORD,,
765
+ qontract_reconcile-0.10.1rc709.dist-info/METADATA,sha256=LWgfuyiC1vMk_M3JnVI9i54gC6OTe2glD6StNzN4X5s,2382
766
+ qontract_reconcile-0.10.1rc709.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
767
+ qontract_reconcile-0.10.1rc709.dist-info/entry_points.txt,sha256=rIxI5zWtHNlfpDeq1a7pZXAPoqf7HG32KMTN3MeWK_8,429
768
+ qontract_reconcile-0.10.1rc709.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
769
+ qontract_reconcile-0.10.1rc709.dist-info/RECORD,,
@@ -4,6 +4,8 @@ from typing import (
4
4
  cast,
5
5
  )
6
6
 
7
+ from pydantic import BaseModel
8
+
7
9
  from reconcile.gql_definitions.common.clusters_with_dms import ClusterV1
8
10
  from reconcile.typed_queries.app_interface_deadmanssnitch_settings import (
9
11
  get_deadmanssnitch_settings,
@@ -32,6 +34,17 @@ QONTRACT_INTEGRATION = "deadmanssnitch"
32
34
  SECRET_NOT_FOUND = "SECRET_NOT_FOUND"
33
35
 
34
36
 
37
+ class SnitchSpec(BaseModel):
38
+ """Class to hold values from cluster file and settings"""
39
+
40
+ name: str
41
+ alert_email: list[str]
42
+ alert_type: str
43
+ interval: str
44
+ tags: list[str]
45
+ notes: str
46
+
47
+
35
48
  class DeadMansSnitchIntegration(QontractReconcileIntegration[NoParams]):
36
49
  """Integration to automate deadmanssnitch snitch api during cluster dressup."""
37
50
 
@@ -45,6 +58,10 @@ class DeadMansSnitchIntegration(QontractReconcileIntegration[NoParams]):
45
58
  self.settings = get_deadmanssnitch_settings()
46
59
  self.vault_client = cast(_VaultClient, VaultClient())
47
60
 
61
+ @staticmethod
62
+ def get_snitch_name(cluster: ClusterV1) -> str:
63
+ return cluster.prometheus_url.replace("https://", "")
64
+
48
65
  def write_snitch_to_vault(
49
66
  self, cluster_name: str, snitch_url: Optional[str]
50
67
  ) -> None:
@@ -71,15 +88,18 @@ class DeadMansSnitchIntegration(QontractReconcileIntegration[NoParams]):
71
88
  return snitch
72
89
 
73
90
  def create_snitch(
74
- self, cluster_name: str, snitch: Snitch, deadmanssnitch_api: DeadMansSnitchApi
91
+ self,
92
+ cluster_name: str,
93
+ snitch_spec: SnitchSpec,
94
+ deadmanssnitch_api: DeadMansSnitchApi,
75
95
  ) -> None:
76
96
  payload = {
77
- "name": snitch.name,
78
- "alert_type": snitch.alert_type,
79
- "interval": snitch.interval,
80
- "tags": snitch.tags,
81
- "alert_email": snitch.alert_email,
82
- "notes": snitch.notes,
97
+ "name": snitch_spec.name,
98
+ "alert_type": snitch_spec.alert_type,
99
+ "interval": snitch_spec.interval,
100
+ "tags": snitch_spec.tags,
101
+ "alert_email": snitch_spec.alert_email,
102
+ "notes": snitch_spec.notes,
83
103
  }
84
104
  snitch_data = deadmanssnitch_api.create_snitch(payload=payload)
85
105
  self.write_snitch_to_vault(
@@ -90,34 +110,40 @@ class DeadMansSnitchIntegration(QontractReconcileIntegration[NoParams]):
90
110
  self,
91
111
  dry_run: bool,
92
112
  current_state: dict[str, Snitch],
93
- desired_state: dict[str, Snitch],
113
+ desired_state: dict[str, SnitchSpec],
94
114
  deadmanssnitch_api: DeadMansSnitchApi,
95
115
  ) -> None:
96
- diffs = diff_mappings(current=current_state, desired=desired_state)
116
+ diffs = diff_mappings(
117
+ current=current_state,
118
+ desired=desired_state,
119
+ equal=lambda current, desired: current.name == desired.name,
120
+ )
97
121
  errors = []
98
122
  for cluster_name, snitch in diffs.add.items():
99
123
  logging.info("[cluster_name:%s] [Action:create_snitch]", cluster_name)
100
- try:
101
- if not dry_run:
124
+ if not dry_run:
125
+ try:
102
126
  self.create_snitch(cluster_name, snitch, deadmanssnitch_api)
103
- except Exception as e:
104
- errors.append(e)
105
- for cluster_name, snitch in diffs.delete.items():
127
+ except Exception as e:
128
+ errors.append(e)
129
+ for cluster_name, snitch_value in diffs.delete.items():
106
130
  logging.info("[cluster_name:%s] [Action:delete_snitch]", cluster_name)
107
- try:
108
- if not dry_run:
109
- deadmanssnitch_api.delete_snitch(snitch.token)
110
- except Exception as e:
111
- errors.append(e)
131
+ if not dry_run:
132
+ try:
133
+ deadmanssnitch_api.delete_snitch(snitch_value.token)
134
+ except Exception as e:
135
+ errors.append(e)
112
136
  for cluster_name, diff_pair in diffs.identical.items():
113
- try:
114
- if diff_pair.desired.needs_vault_update():
115
- self.write_snitch_to_vault(
116
- cluster_name=cluster_name,
117
- snitch_url=diff_pair.desired.check_in_url,
118
- )
119
- except Exception as e:
120
- errors.append(e)
137
+ if diff_pair.current.needs_vault_update():
138
+ logging.info("[cluster_name:%s] [Action:update_vault]", cluster_name)
139
+ if not dry_run:
140
+ try:
141
+ self.write_snitch_to_vault(
142
+ cluster_name=cluster_name,
143
+ snitch_url=diff_pair.current.check_in_url,
144
+ )
145
+ except Exception as e:
146
+ errors.append(e)
121
147
  if errors:
122
148
  raise ExceptionGroup("Errors occurred while reconcile", errors)
123
149
 
@@ -125,63 +151,40 @@ class DeadMansSnitchIntegration(QontractReconcileIntegration[NoParams]):
125
151
  self,
126
152
  deadmanssnitch_api: DeadMansSnitchApi,
127
153
  clusters: list[ClusterV1],
128
- snitch_secret_path: str,
129
- cluster_to_prometheus_mapping: dict[str, str],
130
154
  ) -> dict[str, Snitch]:
155
+ snitch_name_to_cluster_name_mapping = {
156
+ self.get_snitch_name(cluster): cluster.name for cluster in clusters
157
+ }
131
158
  # current state includes for deadmanssnithch response and associated secret in vault
132
159
  snitches = deadmanssnitch_api.get_snitches(tags=self.settings.tags)
133
160
  # create snitch_map only for the desired clusters
134
- snitches_with_cluster_mapping = {
135
- cluster.name: snitch
136
- for snitch in snitches
137
- for cluster in clusters
138
- if (cluster_to_prometheus_mapping.get(cluster.name) == snitch.name)
139
- }
140
161
  current_state = {
141
- cluster.name: self.add_vault_data(cluster.name, snitch, snitch_secret_path)
142
- for cluster in clusters
143
- if (snitch := snitches_with_cluster_mapping.get(cluster.name))
162
+ cluster_name: self.add_vault_data(
163
+ cluster_name, snitch, self.settings.snitches_path
164
+ )
165
+ for snitch in snitches
166
+ if (cluster_name := snitch_name_to_cluster_name_mapping.get(snitch.name))
144
167
  }
145
168
  return current_state
146
169
 
147
170
  def get_desired_state(
148
171
  self,
149
172
  clusters: list[ClusterV1],
150
- current_state: dict[str, Snitch],
151
- cluster_to_prometheus_mapping: dict[str, str],
152
- ) -> dict[str, Snitch]:
173
+ ) -> dict[str, SnitchSpec]:
153
174
  desired_state = {
154
- cluster.name: self.map_desired_snitch_value(
155
- cluster.name, current_state, cluster_to_prometheus_mapping
175
+ cluster.name: SnitchSpec(
176
+ name=self.get_snitch_name(cluster),
177
+ alert_email=self.settings.alert_mail_addresses,
178
+ interval=self.settings.interval,
179
+ tags=self.settings.tags,
180
+ notes=self.settings.notes_link,
181
+ alert_type=self.settings.alert_type,
156
182
  )
157
183
  for cluster in clusters
158
184
  if cluster.enable_dead_mans_snitch
159
185
  }
160
186
  return desired_state
161
187
 
162
- # get snitch in case snitch available for desired state cluster
163
- # return new snitch object in case of new cluster
164
- def map_desired_snitch_value(
165
- self,
166
- cluster_name: str,
167
- current_state: dict[str, Snitch],
168
- cluster_to_prometheus_mapping: dict[str, str],
169
- ) -> Snitch:
170
- if (snitch := current_state.get(cluster_name)) is not None:
171
- return snitch
172
- return Snitch(
173
- name=cluster_to_prometheus_mapping.get(cluster_name),
174
- check_in_url="",
175
- status="",
176
- href="",
177
- token="",
178
- alert_email=self.settings.alert_mail_addresses,
179
- alert_type=self.settings.alert_type,
180
- interval=self.settings.interval,
181
- tags=self.settings.tags,
182
- notes=self.settings.notes_link,
183
- )
184
-
185
188
  def run(self, dry_run: bool) -> None:
186
189
  # Initialize deadmanssnitch_api
187
190
  token = self.secret_reader.read({
@@ -191,18 +194,15 @@ class DeadMansSnitchIntegration(QontractReconcileIntegration[NoParams]):
191
194
  with DeadMansSnitchApi(token=token) as deadmanssnitch_api:
192
195
  # desired state - get the clusters having enableDeadMansSnitch field
193
196
  clusters = get_clusters_with_dms()
194
- # create a mapping between prometheus url without the https:// and cluster name
195
- cluster_to_prometheus_mapping = {
196
- cluster.name: cluster.prometheus_url.replace("https://", "")
197
- for cluster in clusters
198
- }
197
+ desired_state = self.get_desired_state(clusters)
198
+ # create current state from deadmanssnitch and vault
199
199
  current_state = self.get_current_state(
200
200
  deadmanssnitch_api,
201
201
  clusters,
202
- self.settings.snitches_path,
203
- cluster_to_prometheus_mapping,
204
202
  )
205
- desired_state = self.get_desired_state(
206
- clusters, current_state, cluster_to_prometheus_mapping
203
+ self.reconcile(
204
+ dry_run,
205
+ current_state=current_state,
206
+ desired_state=desired_state,
207
+ deadmanssnitch_api=deadmanssnitch_api,
207
208
  )
208
- self.reconcile(dry_run, current_state, desired_state, deadmanssnitch_api)
@@ -39,6 +39,7 @@ fragment CommonJumphostFields on ClusterJumpHost_v1 {
39
39
  fragment PipelineProviderRetention on PipelinesProviderRetention_v1 {
40
40
  days
41
41
  minimum
42
+ maximum
42
43
  }
43
44
 
44
45
  fragment ResourceLimitsRequirements on ResourceLimitsRequirements_v1 {
@@ -27,3 +27,4 @@ class ConfiguredBaseModel(BaseModel):
27
27
  class PipelineProviderRetention(ConfiguredBaseModel):
28
28
  days: Optional[int] = Field(..., alias="days")
29
29
  minimum: Optional[int] = Field(..., alias="minimum")
30
+ maximum: Optional[int] = Field(..., alias="maximum")
@@ -12,6 +12,9 @@ from typing import (
12
12
 
13
13
  from dateutil import parser
14
14
 
15
+ from reconcile.gql_definitions.fragments.pipeline_provider_retention import (
16
+ PipelineProviderRetention,
17
+ )
15
18
  from reconcile.typed_queries.app_interface_vault_settings import (
16
19
  get_app_interface_vault_settings,
17
20
  )
@@ -30,15 +33,36 @@ QONTRACT_INTEGRATION = "openshift-saas-deploy-trigger-cleaner"
30
33
  QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
31
34
 
32
35
 
33
- def within_retention_days(resource: dict[str, Any], days: int) -> bool:
36
+ def within_retention_days(
37
+ resource: dict[str, Any], days: int, now_date: datetime
38
+ ) -> bool:
34
39
  metadata = resource["metadata"]
35
40
  creation_date = parser.parse(metadata["creationTimestamp"])
36
- now_date = datetime.now(timezone.utc)
37
41
  interval = now_date.timestamp() - creation_date.timestamp()
38
42
 
39
43
  return interval < timedelta(days=days).total_seconds()
40
44
 
41
45
 
46
+ def get_pipeline_runs_to_delete(
47
+ pipeline_runs: list[dict[str, Any]],
48
+ retention: PipelineProviderRetention,
49
+ now_date: datetime,
50
+ ) -> list[dict[str, Any]]:
51
+ pipeline_runs_to_delete = []
52
+ if retention.minimum:
53
+ pipeline_runs = pipeline_runs[retention.minimum :]
54
+ elif retention.maximum:
55
+ pipeline_runs_to_delete = pipeline_runs[retention.maximum :]
56
+ pipeline_runs = pipeline_runs[: retention.maximum]
57
+
58
+ for pr in pipeline_runs:
59
+ if retention.days and within_retention_days(pr, retention.days, now_date):
60
+ continue
61
+ pipeline_runs_to_delete.append(pr)
62
+
63
+ return pipeline_runs_to_delete
64
+
65
+
42
66
  @defer
43
67
  def run(
44
68
  dry_run: bool,
@@ -47,6 +71,7 @@ def run(
47
71
  use_jump_host: bool = True,
48
72
  defer: Optional[Callable] = None,
49
73
  ) -> None:
74
+ now_date = datetime.now(timezone.utc)
50
75
  vault_settings = get_app_interface_vault_settings()
51
76
  secret_reader = create_secret_reader(use_vault=vault_settings.vault)
52
77
  pipeline_providers = get_tekton_pipeline_providers()
@@ -77,14 +102,8 @@ def run(
77
102
  reverse=True,
78
103
  )
79
104
 
80
- if pp.retention.minimum:
81
- pipeline_runs = pipeline_runs[pp.retention.minimum :]
82
-
83
- for pr in pipeline_runs:
105
+ for pr in get_pipeline_runs_to_delete(pipeline_runs, pp.retention, now_date):
84
106
  name = pr["metadata"]["name"]
85
- if pp.retention.days and within_retention_days(pr, pp.retention.days):
86
- continue
87
-
88
107
  logging.info([
89
108
  "delete_trigger",
90
109
  pp.namespace.cluster.name,
@@ -102,11 +102,6 @@ def test_get_current_state(
102
102
  current_state = dms_integration.get_current_state(
103
103
  deadmanssnitch_api=deadmanssnitch_api,
104
104
  clusters=clusters,
105
- snitch_secret_path="test_path",
106
- cluster_to_prometheus_mapping={
107
- "test_cluster_1": "prometheus.test_cluster_1.net",
108
- "test_cluster_2": "https://prometheus.test_cluster_2.net",
109
- },
110
105
  )
111
106
  assert current_state["test_cluster_1"].vault_data == "secret"
112
107
 
@@ -0,0 +1,65 @@
1
+ from datetime import datetime, timezone
2
+ from typing import Any
3
+
4
+ import pytest
5
+
6
+ from reconcile.gql_definitions.fragments.pipeline_provider_retention import (
7
+ PipelineProviderRetention,
8
+ )
9
+ from reconcile.openshift_saas_deploy_trigger_cleaner import get_pipeline_runs_to_delete
10
+
11
+ from .fixtures import Fixtures
12
+
13
+ fxt = Fixtures("openshift_saas_deploy_trigger_cleaner")
14
+
15
+
16
+ @pytest.fixture
17
+ def now() -> datetime:
18
+ return datetime(2024, 4, 4, 0, 0, 0, 0, tzinfo=timezone.utc)
19
+
20
+
21
+ # A fixture simulating the output of getting PipelineRuns from a namespace, simplified
22
+ # as it contains only the fields relevant for get_pipeline_runs_to_delete, reversed
23
+ # sorted by creationTimestamp
24
+ @pytest.fixture
25
+ def pipeline_runs() -> list[dict[str, Any]]:
26
+ return fxt.get_anymarkup("pipeline_runs.yaml")
27
+
28
+
29
+ # No min/max settings, we go with whatever "days" says
30
+ def test_days(now: datetime, pipeline_runs: list[dict[str, Any]]) -> None:
31
+ retention = PipelineProviderRetention(days=1, minimum=None, maximum=None)
32
+ assert len(get_pipeline_runs_to_delete(pipeline_runs, retention, now)) == 4
33
+
34
+ retention = PipelineProviderRetention(days=2, minimum=None, maximum=None)
35
+ assert len(get_pipeline_runs_to_delete(pipeline_runs, retention, now)) == 2
36
+
37
+
38
+ # Minimum set, it takes precedence over "days"
39
+ def test_days_and_minimum(now: datetime, pipeline_runs: list[dict[str, Any]]) -> None:
40
+ retention = PipelineProviderRetention(days=1, minimum=5, maximum=None)
41
+ assert len(get_pipeline_runs_to_delete(pipeline_runs, retention, now)) == 1
42
+ # We would have removed four from the "days" setting, we can only remove one
43
+
44
+ retention = PipelineProviderRetention(days=1, minimum=3, maximum=None)
45
+ assert len(get_pipeline_runs_to_delete(pipeline_runs, retention, now)) == 3
46
+ # We would have removed four from the "days" setting, we can only remove three
47
+
48
+ retention = PipelineProviderRetention(days=1, minimum=1, maximum=None)
49
+ assert len(get_pipeline_runs_to_delete(pipeline_runs, retention, now)) == 4
50
+ # Removing 4 we still have two, we're fine.
51
+
52
+
53
+ # Maximum set, it takes precedence over "days"
54
+ def test_days_and_maximum(now: datetime, pipeline_runs: list[dict[str, Any]]) -> None:
55
+ retention = PipelineProviderRetention(days=1, minimum=None, maximum=1)
56
+ assert len(get_pipeline_runs_to_delete(pipeline_runs, retention, now)) == 5
57
+ # we have a max of 1, no matter what "days" says.
58
+
59
+ retention = PipelineProviderRetention(days=1, minimum=None, maximum=3)
60
+ assert len(get_pipeline_runs_to_delete(pipeline_runs, retention, now)) == 4
61
+ # by removing 4 we comply with the max setting of three.
62
+
63
+ retention = PipelineProviderRetention(days=2, minimum=None, maximum=3)
64
+ assert len(get_pipeline_runs_to_delete(pipeline_runs, retention, now)) == 3
65
+ # We would have remove only two following "days", but max tells us otherwise.