qontract-reconcile 0.9.1rc179__py3-none-any.whl → 0.9.1rc181__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qontract-reconcile
3
- Version: 0.9.1rc179
3
+ Version: 0.9.1rc181
4
4
  Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
5
  Home-page: https://github.com/app-sre/qontract-reconcile
6
6
  Author: Red Hat App-SRE Team
@@ -75,7 +75,7 @@ reconcile/ocm_upgrade_scheduler.py,sha256=x5AJN3bf2f0Uffg1g9ZlPUh4L-qEobMoDRuCWz
75
75
  reconcile/ocm_upgrade_scheduler_org.py,sha256=3g696Y2p3iLNzqXqOD-nQmu6PkiZ13X9sB0GaKvgZ6w,1254
76
76
  reconcile/ocm_upgrade_scheduler_org_updater.py,sha256=7j5fxPLZQuhlxfhSRCnSnIJhUoysTwvhJ422vWqfVeo,4015
77
77
  reconcile/ocp_release_mirror.py,sha256=5A9rLooZjOqZKQgtJmbjZPvjaDUC0dZq31662189uXY,13386
78
- reconcile/openshift_base.py,sha256=36kgARd-aPvIgL_mvW_CQrSLbi3-17npDNK_zt3cKwU,42489
78
+ reconcile/openshift_base.py,sha256=tX_SIMx8s82WSuK5kCY32fL0HZeaDt1xnkMER7F2T84,42495
79
79
  reconcile/openshift_clusterrolebindings.py,sha256=cztreWXqrkxLgKvJGGWJg4ZleU1hxvY8Z6rRgCbyahc,5771
80
80
  reconcile/openshift_groups.py,sha256=C1L0tIWbXbpvQVCIoc6z9HMGixxWWx5EGmJtWv17s7w,9376
81
81
  reconcile/openshift_limitranges.py,sha256=SZkHhGS3m9oBUkzcclRiszfLEB7jQ8z8xh9P2Y8KTzg,3393
@@ -90,7 +90,7 @@ reconcile/openshift_routes.py,sha256=fXvuPSjcjVw1X3j2EQvUAdbOepmIFdKk-M3qP8QzPiw
90
90
  reconcile/openshift_saas_deploy.py,sha256=WLMb6MSqMjJzMn88jtAiLSGDqdOiR3oyVL3LrRELdLs,9317
91
91
  reconcile/openshift_saas_deploy_change_tester.py,sha256=gdEke-uNKv1v8vuveThCn8_zEdP-BpiltS_OugqDFhg,9105
92
92
  reconcile/openshift_saas_deploy_trigger_base.py,sha256=sHsTsyRl6wV6B4gxE8RcHqJlxQP8aj2UQ-TB_WLs2eI,14105
93
- reconcile/openshift_saas_deploy_trigger_cleaner.py,sha256=U1JsDxO2Sr0zY4lPpXJhbnLe-6trjedqtSrRiRnqLio,2740
93
+ reconcile/openshift_saas_deploy_trigger_cleaner.py,sha256=iFIkmDnN-ZKeIInT_5lWPkjSPwhidzw1GCmdziaagUk,2935
94
94
  reconcile/openshift_saas_deploy_trigger_configs.py,sha256=uWzUV5D5CW0frdi1ys7BObNg-rA-VZKlefd4TD_Z-pY,959
95
95
  reconcile/openshift_saas_deploy_trigger_images.py,sha256=Yl4lMtxqab-c04I2Ju8isAJuYuNTbHN01Bk3dF9nTos,967
96
96
  reconcile/openshift_saas_deploy_trigger_moving_commits.py,sha256=VqjwgRhA-yOSq0WFPqGIJVgYkdq_UrTHcVusBzOFgMY,973
@@ -176,6 +176,7 @@ reconcile/gql_definitions/common/namespaces.py,sha256=HNu55aeBQUGr4JfjQFj8sGAE2S
176
176
  reconcile/gql_definitions/common/namespaces_minimal.py,sha256=0n9f2ldd_aEN9tzcqKvtzZ53ceH-1v411vVkD8wPaeY,3539
177
177
  reconcile/gql_definitions/common/pagerduty_instances.py,sha256=8NBHKRXg_OKG9NsJv6FOj8UVFcjkdJg-9E16ZqZIRPQ,2006
178
178
  reconcile/gql_definitions/common/pgp_reencryption_settings.py,sha256=tS68-tBBd7BJYmfTjtdTlxpABF3f_z9eJdtaKnyZc0Q,2305
179
+ reconcile/gql_definitions/common/pipeline_providers.py,sha256=6GgiGxV4Y25VTlfmhunFO_isU0gHKfid4L0YH9AGf80,9398
179
180
  reconcile/gql_definitions/common/saas_files.py,sha256=oMR5w7GHxG5QgAzmbB1ZmtzTglwuxnPLNnerVp1JVwY,14975
180
181
  reconcile/gql_definitions/common/saasherder_settings.py,sha256=jxrFr03NmiwV3uegKCxQgB5iveC2IaGZIoguXoiNMgs,1797
181
182
  reconcile/gql_definitions/common/smtp_client_settings.py,sha256=Pb8VgTGFqCh4_rI0BOHoXuicfdNyol1kIN8NLONHaxI,2252
@@ -187,6 +188,7 @@ reconcile/gql_definitions/fragments/aws_infra_management_account.py,sha256=IuXBB
187
188
  reconcile/gql_definitions/fragments/jumphost_common_fields.py,sha256=yQYTbQs9yQDO9Vs-pv_99e_dSq48_SwUVpotRfFlihM,1038
188
189
  reconcile/gql_definitions/fragments/oc_connection_cluster.py,sha256=DoAYQLQXB-TYuQNLpnlNzzHYPzxAW64Fd9-94-1ZTLE,1623
189
190
  reconcile/gql_definitions/fragments/ocm_environment.py,sha256=DOfBY4fb02UF5R3ZxtwHBTlxhPYOOBJaqGeENElX7oc,1037
191
+ reconcile/gql_definitions/fragments/resource_requirements.py,sha256=p7bMDzeIMr008A_4skrZjXAwVXA4PtO2rX93iGBYqvk,732
190
192
  reconcile/gql_definitions/fragments/resource_values.py,sha256=DS3KqzgjT_wJ36SWPZ9HyNXOBNvbhhGnh06IMQmrFXQ,746
191
193
  reconcile/gql_definitions/fragments/user.py,sha256=6RVsHZVHjOrZKcH8rWGQG9V1OtPpACSZ3wNwca3H8pA,943
192
194
  reconcile/gql_definitions/fragments/vault_secret.py,sha256=pXbTPa-ptuT1L5HF83pEGMxhjRVA_b_wfGHS4NoZEW8,837
@@ -376,6 +378,7 @@ reconcile/typed_queries/pagerduty_instances.py,sha256=QCHqEAakiH6eSob0Pnnn3IBd8G
376
378
  reconcile/typed_queries/repos.py,sha256=RKBsf7IDS6NsXTtXxJ9Ol9G3bxG9sr3vW9QQ2bahEHo,512
377
379
  reconcile/typed_queries/saas_files.py,sha256=ZCf_Zo1mXjpnsoSzogPSzzFATKkHZvJAsJVtZdQkwEk,2112
378
380
  reconcile/typed_queries/smtp.py,sha256=aSLglYa5bHKmlGwKkxq2RZqyMWuAf0a4S_mOuhDa084,542
381
+ reconcile/typed_queries/tekton_pipeline_providers.py,sha256=2mpHBdsNPQB94tw0H9aenGuqj8EEjYolQ03YEq1CpiY,546
379
382
  reconcile/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
380
383
  reconcile/utils/aggregated_list.py,sha256=svF8qu2U8iAIUPrGbKiMzuzz2RffI0GOJzWDxBDgSIQ,3332
381
384
  reconcile/utils/amtool.py,sha256=9p9FYkv4RYPnkDICuN1apcqJyZ5n8WbHF6vC0FIiQIw,2166
@@ -492,8 +495,8 @@ tools/sre_checkpoints/util.py,sha256=zEDbGr18ZeHNQwW8pUsr2JRjuXIPz--WAGJxZo9sv_Y
492
495
  tools/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
493
496
  tools/test/test_qontract_cli.py,sha256=awwTHEc2DWlykuqGIYM0WOBoSL0KRnOraCLk3C7izis,1401
494
497
  tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
495
- qontract_reconcile-0.9.1rc179.dist-info/METADATA,sha256=4nF0xvoKf-lhVWVNsuiiEwBv2JbDyiF1GKqK5DsTcBU,2241
496
- qontract_reconcile-0.9.1rc179.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
497
- qontract_reconcile-0.9.1rc179.dist-info/entry_points.txt,sha256=3BPvsRryM1C4S_mb5kXmP5AVv-wJBzVCrOJyv6qUmc0,195
498
- qontract_reconcile-0.9.1rc179.dist-info/top_level.txt,sha256=j0CHPIc8TsVRB50wOz_jhxjjaRyCJB3NOQeXhuHS67c,34
499
- qontract_reconcile-0.9.1rc179.dist-info/RECORD,,
498
+ qontract_reconcile-0.9.1rc181.dist-info/METADATA,sha256=sfaSFXRyUaXopLC2pcz74p7EzvBg4cvhpIdrGR4adYI,2241
499
+ qontract_reconcile-0.9.1rc181.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
500
+ qontract_reconcile-0.9.1rc181.dist-info/entry_points.txt,sha256=3BPvsRryM1C4S_mb5kXmP5AVv-wJBzVCrOJyv6qUmc0,195
501
+ qontract_reconcile-0.9.1rc181.dist-info/top_level.txt,sha256=j0CHPIc8TsVRB50wOz_jhxjjaRyCJB3NOQeXhuHS67c,34
502
+ qontract_reconcile-0.9.1rc181.dist-info/RECORD,,
@@ -0,0 +1,318 @@
1
+ """
2
+ Generated by qenerate plugin=pydantic_v1. DO NOT MODIFY MANUALLY!
3
+ """
4
+ from collections.abc import Callable # noqa: F401 # pylint: disable=W0611
5
+ from datetime import datetime # noqa: F401 # pylint: disable=W0611
6
+ from enum import Enum # noqa: F401 # pylint: disable=W0611
7
+ from typing import ( # noqa: F401 # pylint: disable=W0611
8
+ Any,
9
+ Optional,
10
+ Union,
11
+ )
12
+
13
+ from pydantic import ( # noqa: F401 # pylint: disable=W0611
14
+ BaseModel,
15
+ Extra,
16
+ Field,
17
+ Json,
18
+ )
19
+
20
+ from reconcile.gql_definitions.fragments.jumphost_common_fields import (
21
+ CommonJumphostFields,
22
+ )
23
+ from reconcile.gql_definitions.fragments.resource_requirements import (
24
+ ResourceRequirements,
25
+ )
26
+ from reconcile.gql_definitions.fragments.vault_secret import VaultSecret
27
+
28
+
29
+ DEFINITION = """
30
+ fragment CommonJumphostFields on ClusterJumpHost_v1 {
31
+ hostname
32
+ knownHosts
33
+ user
34
+ port
35
+ remotePort
36
+ identity {
37
+ ... VaultSecret
38
+ }
39
+ }
40
+
41
+ fragment ResourceRequirements on ResourceRequirements_v1 {
42
+ cpu
43
+ memory
44
+ }
45
+
46
+ fragment VaultSecret on VaultSecret_v1 {
47
+ path
48
+ field
49
+ version
50
+ format
51
+ }
52
+
53
+ query PipelineProviders {
54
+ pipelines_providers: pipelines_providers_v1 {
55
+ name
56
+ provider
57
+ ...on PipelinesProviderTekton_v1 {
58
+ defaults {
59
+ retention {
60
+ days
61
+ minimum
62
+ }
63
+ taskTemplates {
64
+ ...on PipelinesProviderTektonObjectTemplate_v1 {
65
+ name
66
+ type
67
+ path
68
+ variables
69
+ }
70
+ }
71
+ pipelineTemplates {
72
+ openshiftSaasDeploy {
73
+ name
74
+ type
75
+ path
76
+ variables
77
+ }
78
+ }
79
+ deployResources {
80
+ requests {
81
+ ... ResourceRequirements
82
+ }
83
+ limits {
84
+ ... ResourceRequirements
85
+ }
86
+ }
87
+ }
88
+ namespace {
89
+ name
90
+ clusterAdmin
91
+ cluster {
92
+ name
93
+ serverUrl
94
+ insecureSkipTLSVerify
95
+ jumpHost {
96
+ ... CommonJumphostFields
97
+ }
98
+ automationToken {
99
+ ... VaultSecret
100
+ }
101
+ clusterAdminAutomationToken {
102
+ ... VaultSecret
103
+ }
104
+ internal
105
+ disable {
106
+ integrations
107
+ e2eTests
108
+ }
109
+ }
110
+ }
111
+ retention {
112
+ days
113
+ minimum
114
+ }
115
+ taskTemplates {
116
+ ...on PipelinesProviderTektonObjectTemplate_v1 {
117
+ name
118
+ type
119
+ path
120
+ variables
121
+ }
122
+ }
123
+ pipelineTemplates {
124
+ openshiftSaasDeploy {
125
+ name
126
+ type
127
+ path
128
+ variables
129
+ }
130
+ }
131
+ deployResources {
132
+ requests {
133
+ ... ResourceRequirements
134
+ }
135
+ limits {
136
+ ... ResourceRequirements
137
+ }
138
+ }
139
+ }
140
+ }
141
+ }
142
+ """
143
+
144
+
145
+ class ConfiguredBaseModel(BaseModel):
146
+ class Config:
147
+ smart_union = True
148
+ extra = Extra.forbid
149
+
150
+
151
+ class PipelinesProviderV1(ConfiguredBaseModel):
152
+ name: str = Field(..., alias="name")
153
+ provider: str = Field(..., alias="provider")
154
+
155
+
156
+ class PipelinesProviderRetentionV1(ConfiguredBaseModel):
157
+ days: Optional[int] = Field(..., alias="days")
158
+ minimum: Optional[int] = Field(..., alias="minimum")
159
+
160
+
161
+ class PipelinesProviderTektonObjectTemplateV1(ConfiguredBaseModel):
162
+ ...
163
+
164
+
165
+ class PipelinesProviderTektonObjectTemplateV1_PipelinesProviderTektonObjectTemplateV1(
166
+ PipelinesProviderTektonObjectTemplateV1
167
+ ):
168
+ name: str = Field(..., alias="name")
169
+ q_type: str = Field(..., alias="type")
170
+ path: str = Field(..., alias="path")
171
+ variables: Optional[Json] = Field(..., alias="variables")
172
+
173
+
174
+ class PipelinesProviderPipelineTemplatesV1_PipelinesProviderTektonObjectTemplateV1(
175
+ ConfiguredBaseModel
176
+ ):
177
+ name: str = Field(..., alias="name")
178
+ q_type: str = Field(..., alias="type")
179
+ path: str = Field(..., alias="path")
180
+ variables: Optional[Json] = Field(..., alias="variables")
181
+
182
+
183
+ class PipelinesProviderPipelineTemplatesV1(ConfiguredBaseModel):
184
+ openshift_saas_deploy: PipelinesProviderPipelineTemplatesV1_PipelinesProviderTektonObjectTemplateV1 = Field(
185
+ ..., alias="openshiftSaasDeploy"
186
+ )
187
+
188
+
189
+ class DeployResourcesV1(ConfiguredBaseModel):
190
+ requests: ResourceRequirements = Field(..., alias="requests")
191
+ limits: ResourceRequirements = Field(..., alias="limits")
192
+
193
+
194
+ class PipelinesProviderTektonProviderDefaultsV1(ConfiguredBaseModel):
195
+ retention: PipelinesProviderRetentionV1 = Field(..., alias="retention")
196
+ task_templates: list[
197
+ Union[
198
+ PipelinesProviderTektonObjectTemplateV1_PipelinesProviderTektonObjectTemplateV1,
199
+ PipelinesProviderTektonObjectTemplateV1,
200
+ ]
201
+ ] = Field(..., alias="taskTemplates")
202
+ pipeline_templates: PipelinesProviderPipelineTemplatesV1 = Field(
203
+ ..., alias="pipelineTemplates"
204
+ )
205
+ deploy_resources: Optional[DeployResourcesV1] = Field(..., alias="deployResources")
206
+
207
+
208
+ class DisableClusterAutomationsV1(ConfiguredBaseModel):
209
+ integrations: Optional[list[str]] = Field(..., alias="integrations")
210
+ e2e_tests: Optional[list[str]] = Field(..., alias="e2eTests")
211
+
212
+
213
+ class ClusterV1(ConfiguredBaseModel):
214
+ name: str = Field(..., alias="name")
215
+ server_url: str = Field(..., alias="serverUrl")
216
+ insecure_skip_tls_verify: Optional[bool] = Field(..., alias="insecureSkipTLSVerify")
217
+ jump_host: Optional[CommonJumphostFields] = Field(..., alias="jumpHost")
218
+ automation_token: Optional[VaultSecret] = Field(..., alias="automationToken")
219
+ cluster_admin_automation_token: Optional[VaultSecret] = Field(
220
+ ..., alias="clusterAdminAutomationToken"
221
+ )
222
+ internal: Optional[bool] = Field(..., alias="internal")
223
+ disable: Optional[DisableClusterAutomationsV1] = Field(..., alias="disable")
224
+
225
+
226
+ class NamespaceV1(ConfiguredBaseModel):
227
+ name: str = Field(..., alias="name")
228
+ cluster_admin: Optional[bool] = Field(..., alias="clusterAdmin")
229
+ cluster: ClusterV1 = Field(..., alias="cluster")
230
+
231
+
232
+ class PipelinesProviderTektonV1_PipelinesProviderRetentionV1(ConfiguredBaseModel):
233
+ days: Optional[int] = Field(..., alias="days")
234
+ minimum: Optional[int] = Field(..., alias="minimum")
235
+
236
+
237
+ class PipelinesProviderTektonV1_PipelinesProviderTektonObjectTemplateV1(
238
+ ConfiguredBaseModel
239
+ ):
240
+ ...
241
+
242
+
243
+ class PipelinesProviderTektonV1_PipelinesProviderTektonObjectTemplateV1_PipelinesProviderTektonObjectTemplateV1(
244
+ PipelinesProviderTektonV1_PipelinesProviderTektonObjectTemplateV1
245
+ ):
246
+ name: str = Field(..., alias="name")
247
+ q_type: str = Field(..., alias="type")
248
+ path: str = Field(..., alias="path")
249
+ variables: Optional[Json] = Field(..., alias="variables")
250
+
251
+
252
+ class PipelinesProviderTektonV1_PipelinesProviderPipelineTemplatesV1_PipelinesProviderTektonObjectTemplateV1(
253
+ ConfiguredBaseModel
254
+ ):
255
+ name: str = Field(..., alias="name")
256
+ q_type: str = Field(..., alias="type")
257
+ path: str = Field(..., alias="path")
258
+ variables: Optional[Json] = Field(..., alias="variables")
259
+
260
+
261
+ class PipelinesProviderTektonV1_PipelinesProviderPipelineTemplatesV1(
262
+ ConfiguredBaseModel
263
+ ):
264
+ openshift_saas_deploy: PipelinesProviderTektonV1_PipelinesProviderPipelineTemplatesV1_PipelinesProviderTektonObjectTemplateV1 = Field(
265
+ ..., alias="openshiftSaasDeploy"
266
+ )
267
+
268
+
269
+ class PipelinesProviderTektonV1_DeployResourcesV1(ConfiguredBaseModel):
270
+ requests: ResourceRequirements = Field(..., alias="requests")
271
+ limits: ResourceRequirements = Field(..., alias="limits")
272
+
273
+
274
+ class PipelinesProviderTektonV1(PipelinesProviderV1):
275
+ defaults: PipelinesProviderTektonProviderDefaultsV1 = Field(..., alias="defaults")
276
+ namespace: NamespaceV1 = Field(..., alias="namespace")
277
+ retention: Optional[PipelinesProviderTektonV1_PipelinesProviderRetentionV1] = Field(
278
+ ..., alias="retention"
279
+ )
280
+ task_templates: Optional[
281
+ list[
282
+ Union[
283
+ PipelinesProviderTektonV1_PipelinesProviderTektonObjectTemplateV1_PipelinesProviderTektonObjectTemplateV1,
284
+ PipelinesProviderTektonV1_PipelinesProviderTektonObjectTemplateV1,
285
+ ]
286
+ ]
287
+ ] = Field(..., alias="taskTemplates")
288
+ pipeline_templates: Optional[
289
+ PipelinesProviderTektonV1_PipelinesProviderPipelineTemplatesV1
290
+ ] = Field(..., alias="pipelineTemplates")
291
+ deploy_resources: Optional[PipelinesProviderTektonV1_DeployResourcesV1] = Field(
292
+ ..., alias="deployResources"
293
+ )
294
+
295
+
296
+ class PipelineProvidersQueryData(ConfiguredBaseModel):
297
+ pipelines_providers: Optional[
298
+ list[Union[PipelinesProviderTektonV1, PipelinesProviderV1]]
299
+ ] = Field(..., alias="pipelines_providers")
300
+
301
+
302
+ def query(query_func: Callable, **kwargs: Any) -> PipelineProvidersQueryData:
303
+ """
304
+ This is a convenience function which queries and parses the data into
305
+ concrete types. It should be compatible with most GQL clients.
306
+ You do not have to use it to consume the generated data classes.
307
+ Alternatively, you can also mime and alternate the behavior
308
+ of this function in the caller.
309
+
310
+ Parameters:
311
+ query_func (Callable): Function which queries your GQL Server
312
+ kwargs: optional arguments that will be passed to the query function
313
+
314
+ Returns:
315
+ PipelineProvidersQueryData: queried data parsed into generated classes
316
+ """
317
+ raw_data: dict[Any, Any] = query_func(DEFINITION, **kwargs)
318
+ return PipelineProvidersQueryData(**raw_data)
@@ -0,0 +1,29 @@
1
+ """
2
+ Generated by qenerate plugin=pydantic_v1. DO NOT MODIFY MANUALLY!
3
+ """
4
+ from collections.abc import Callable # noqa: F401 # pylint: disable=W0611
5
+ from datetime import datetime # noqa: F401 # pylint: disable=W0611
6
+ from enum import Enum # noqa: F401 # pylint: disable=W0611
7
+ from typing import ( # noqa: F401 # pylint: disable=W0611
8
+ Any,
9
+ Optional,
10
+ Union,
11
+ )
12
+
13
+ from pydantic import ( # noqa: F401 # pylint: disable=W0611
14
+ BaseModel,
15
+ Extra,
16
+ Field,
17
+ Json,
18
+ )
19
+
20
+
21
+ class ConfiguredBaseModel(BaseModel):
22
+ class Config:
23
+ smart_union = True
24
+ extra = Extra.forbid
25
+
26
+
27
+ class ResourceRequirements(ConfiguredBaseModel):
28
+ cpu: str = Field(..., alias="cpu")
29
+ memory: str = Field(..., alias="memory")
@@ -353,10 +353,10 @@ def fetch_current_state(
353
353
  return ri, oc_map
354
354
 
355
355
 
356
- @retry(max_attempts=20)
356
+ @retry(max_attempts=30)
357
357
  def wait_for_namespace_exists(oc, namespace):
358
358
  if not oc.project_exists(namespace):
359
- raise Exception(f"namespace {namespace} does not exist")
359
+ raise StatusCodeError(f"namespace {namespace} does not exist")
360
360
 
361
361
 
362
362
  def apply(
@@ -12,10 +12,18 @@ from typing import (
12
12
 
13
13
  from dateutil import parser
14
14
 
15
- from reconcile import queries
15
+ from reconcile.typed_queries.app_interface_vault_settings import (
16
+ get_app_interface_vault_settings,
17
+ )
18
+ from reconcile.typed_queries.tekton_pipeline_providers import (
19
+ get_tekton_pipeline_providers,
20
+ )
16
21
  from reconcile.utils.defer import defer
17
- from reconcile.utils.oc import OC_Map
18
- from reconcile.utils.saasherder import Providers
22
+ from reconcile.utils.oc_map import (
23
+ OCLogMsg,
24
+ init_oc_map_from_namespaces,
25
+ )
26
+ from reconcile.utils.secret_reader import create_secret_reader
19
27
  from reconcile.utils.semver_helper import make_semver
20
28
 
21
29
  QONTRACT_INTEGRATION = "openshift-saas-deploy-trigger-cleaner"
@@ -39,53 +47,52 @@ def run(
39
47
  use_jump_host: bool = True,
40
48
  defer: Optional[Callable] = None,
41
49
  ) -> None:
42
- settings = queries.get_app_interface_settings()
43
- pipelines_providers = queries.get_pipelines_providers()
44
- tkn_namespaces = [
45
- pp["namespace"]
46
- for pp in pipelines_providers
47
- if pp["provider"] == Providers.TEKTON.value
48
- ]
49
-
50
- oc_map = OC_Map(
50
+ vault_settings = get_app_interface_vault_settings()
51
+ secret_reader = create_secret_reader(use_vault=vault_settings.vault)
52
+ pipeline_providers = get_tekton_pipeline_providers()
53
+ tkn_namespaces = [pp.namespace for pp in pipeline_providers]
54
+ oc_map = init_oc_map_from_namespaces(
51
55
  namespaces=tkn_namespaces,
52
56
  integration=QONTRACT_INTEGRATION,
53
- settings=settings,
57
+ secret_reader=secret_reader,
54
58
  internal=internal,
55
59
  use_jump_host=use_jump_host,
56
60
  thread_pool_size=thread_pool_size,
57
61
  )
62
+
58
63
  if defer:
59
64
  defer(oc_map.cleanup)
60
65
 
61
- for pp in pipelines_providers:
62
- retention = pp.get("retention")
63
- if not retention:
66
+ for pp in pipeline_providers:
67
+ if not pp.retention:
64
68
  continue
65
69
 
66
- if pp["provider"] == Providers.TEKTON.value:
67
- ns_info = pp["namespace"]
68
- namespace = ns_info["name"]
69
- cluster = ns_info["cluster"]["name"]
70
- oc = oc_map.get(cluster)
71
- pipeline_runs = sorted(
72
- oc.get(namespace, "PipelineRun")["items"],
73
- key=lambda k: k["metadata"]["creationTimestamp"],
74
- reverse=True,
75
- )
70
+ oc = oc_map.get(pp.namespace.cluster.name)
71
+ if isinstance(oc, OCLogMsg):
72
+ logging.log(level=oc.log_level, msg=oc.message)
73
+ continue
74
+ pipeline_runs = sorted(
75
+ oc.get(pp.namespace.name, "PipelineRun")["items"],
76
+ key=lambda k: k["metadata"]["creationTimestamp"],
77
+ reverse=True,
78
+ )
76
79
 
77
- retention_min = retention.get("minimum")
78
- if retention_min:
79
- pipeline_runs = pipeline_runs[retention_min:]
80
+ if pp.retention.minimum:
81
+ pipeline_runs = pipeline_runs[pp.retention.minimum :]
80
82
 
81
- retention_days = retention.get("days")
82
- for pr in pipeline_runs:
83
- name = pr["metadata"]["name"]
84
- if retention_days and within_retention_days(pr, retention_days):
85
- continue
83
+ for pr in pipeline_runs:
84
+ name = pr["metadata"]["name"]
85
+ if pp.retention.days and within_retention_days(pr, pp.retention.days):
86
+ continue
86
87
 
87
- logging.info(
88
- ["delete_trigger", cluster, namespace, "PipelineRun", name]
89
- )
90
- if not dry_run:
91
- oc.delete(namespace, "PipelineRun", name)
88
+ logging.info(
89
+ [
90
+ "delete_trigger",
91
+ pp.namespace.cluster.name,
92
+ pp.namespace.name,
93
+ "PipelineRun",
94
+ name,
95
+ ]
96
+ )
97
+ if not dry_run:
98
+ oc.delete(pp.namespace.name, "PipelineRun", name)
@@ -0,0 +1,17 @@
1
+ from collections.abc import Callable
2
+ from typing import Optional
3
+
4
+ from reconcile.gql_definitions.common.pipeline_providers import (
5
+ PipelinesProviderTektonV1,
6
+ query,
7
+ )
8
+ from reconcile.utils import gql
9
+
10
+
11
+ def get_tekton_pipeline_providers(
12
+ query_func: Optional[Callable] = None,
13
+ ) -> list[PipelinesProviderTektonV1]:
14
+ if not query_func:
15
+ query_func = gql.get_api().query
16
+ pipeline_providers = query(query_func).pipelines_providers or []
17
+ return [p for p in pipeline_providers if isinstance(p, PipelinesProviderTektonV1)]