qontract-reconcile 0.9.1rc131__py3-none-any.whl → 0.9.1rc133__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qontract_reconcile-0.9.1rc131.dist-info → qontract_reconcile-0.9.1rc133.dist-info}/METADATA +1 -1
- {qontract_reconcile-0.9.1rc131.dist-info → qontract_reconcile-0.9.1rc133.dist-info}/RECORD +14 -14
- reconcile/cli.py +6 -6
- reconcile/openshift_saas_deploy.py +46 -22
- reconcile/openshift_saas_deploy_trigger_base.py +54 -42
- reconcile/openshift_saas_deploy_trigger_cleaner.py +15 -3
- reconcile/openshift_saas_deploy_trigger_configs.py +7 -6
- reconcile/openshift_saas_deploy_trigger_images.py +7 -6
- reconcile/openshift_saas_deploy_trigger_moving_commits.py +7 -6
- reconcile/openshift_saas_deploy_trigger_upstream_jobs.py +7 -6
- reconcile/openshift_saas_deploy_wrapper.py +13 -8
- {qontract_reconcile-0.9.1rc131.dist-info → qontract_reconcile-0.9.1rc133.dist-info}/WHEEL +0 -0
- {qontract_reconcile-0.9.1rc131.dist-info → qontract_reconcile-0.9.1rc133.dist-info}/entry_points.txt +0 -0
- {qontract_reconcile-0.9.1rc131.dist-info → qontract_reconcile-0.9.1rc133.dist-info}/top_level.txt +0 -0
{qontract_reconcile-0.9.1rc131.dist-info → qontract_reconcile-0.9.1rc133.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: qontract-reconcile
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.1rc133
|
4
4
|
Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
|
5
5
|
Home-page: https://github.com/app-sre/qontract-reconcile
|
6
6
|
Author: Red Hat App-SRE Team
|
@@ -16,7 +16,7 @@ reconcile/aws_iam_password_reset.py,sha256=Fequr4Zs7oARnp0rpC4B4rVfyZvl6LXZ3kaDs
|
|
16
16
|
reconcile/aws_support_cases_sos.py,sha256=UJEs3lpmCioUxLu9ek2WuySCI3sYyc2dVP9zU0FIh1s,2888
|
17
17
|
reconcile/blackbox_exporter_endpoint_monitoring.py,sha256=4MrHEFUCaHDtSL8DPAo7iJgZ95Nl1HiVzNFvshAJ0_M,3552
|
18
18
|
reconcile/checkpoint.py,sha256=figtZRuWUvdpdSnkhAqeGvO5dI02TT6J3heyeFhlwqM,5016
|
19
|
-
reconcile/cli.py,sha256=
|
19
|
+
reconcile/cli.py,sha256=_CnhmVbwH6d97ud04cX2Y5GqxrKyMg1-pIAI3T502pM,70825
|
20
20
|
reconcile/closedbox_endpoint_monitoring_base.py,sha256=Yl1_BzgIQbLS6Y16W-u0I82WMcXADpIUOpfQ4J88umY,4888
|
21
21
|
reconcile/cluster_deployment_mapper.py,sha256=2Ah-nu-Mdig0pjuiZl_XLrmVAjYzFjORR3dMlCgkmw0,2352
|
22
22
|
reconcile/dashdotdb_base.py,sha256=Ca75-OQiu5HeA8Q6zQpEYuhyCSjeuWe99K4y9ipTORM,4032
|
@@ -87,15 +87,15 @@ reconcile/openshift_resources.py,sha256=Lkn3KdIqxUCmOzlwoKjM2xVotTbJDfgjMGySvWtB
|
|
87
87
|
reconcile/openshift_resources_base.py,sha256=tWUCzZNEEOTjrl-9Fj2BUwSMt1xNyhqiqwSLBPXlZr4,40756
|
88
88
|
reconcile/openshift_rolebindings.py,sha256=1k0o3hb3ZhhlbUjc8cP7IjKFux0oZApT8kLT8Y-pvqI,6579
|
89
89
|
reconcile/openshift_routes.py,sha256=fXvuPSjcjVw1X3j2EQvUAdbOepmIFdKk-M3qP8QzPiw,1075
|
90
|
-
reconcile/openshift_saas_deploy.py,sha256=
|
90
|
+
reconcile/openshift_saas_deploy.py,sha256=URaGXvp5C4rrQS3rgBLS-Czw5eJadByvYh1wvbENajI,8710
|
91
91
|
reconcile/openshift_saas_deploy_change_tester.py,sha256=gdEke-uNKv1v8vuveThCn8_zEdP-BpiltS_OugqDFhg,9105
|
92
|
-
reconcile/openshift_saas_deploy_trigger_base.py,sha256=
|
93
|
-
reconcile/openshift_saas_deploy_trigger_cleaner.py,sha256=
|
94
|
-
reconcile/openshift_saas_deploy_trigger_configs.py,sha256=
|
95
|
-
reconcile/openshift_saas_deploy_trigger_images.py,sha256=
|
96
|
-
reconcile/openshift_saas_deploy_trigger_moving_commits.py,sha256=
|
97
|
-
reconcile/openshift_saas_deploy_trigger_upstream_jobs.py,sha256=
|
98
|
-
reconcile/openshift_saas_deploy_wrapper.py,sha256=
|
92
|
+
reconcile/openshift_saas_deploy_trigger_base.py,sha256=WARMn3elQ8TN0Z-9NKQLQZMhtSN2JO6iD3pRaaa44GY,13343
|
93
|
+
reconcile/openshift_saas_deploy_trigger_cleaner.py,sha256=fbSO48dnIMPV6XKPDoPD0pEFrhHYWfZcmvQfxAwQ-Ps,2728
|
94
|
+
reconcile/openshift_saas_deploy_trigger_configs.py,sha256=uWzUV5D5CW0frdi1ys7BObNg-rA-VZKlefd4TD_Z-pY,959
|
95
|
+
reconcile/openshift_saas_deploy_trigger_images.py,sha256=Yl4lMtxqab-c04I2Ju8isAJuYuNTbHN01Bk3dF9nTos,967
|
96
|
+
reconcile/openshift_saas_deploy_trigger_moving_commits.py,sha256=VqjwgRhA-yOSq0WFPqGIJVgYkdq_UrTHcVusBzOFgMY,973
|
97
|
+
reconcile/openshift_saas_deploy_trigger_upstream_jobs.py,sha256=etfBGj7GDXTOhNHKc40ee5QM0Chq3oHE40HrbWgopc8,971
|
98
|
+
reconcile/openshift_saas_deploy_wrapper.py,sha256=worob8HdaQ31kndkQoWXqD-wK2jAIa4IR5um6KRsyf4,1854
|
99
99
|
reconcile/openshift_serviceaccount_tokens.py,sha256=UlBRjfAxzf9-h4uxQwHqrYnBWMUGFqqfp6KvW2lM76k,6369
|
100
100
|
reconcile/openshift_tekton_resources.py,sha256=nSJDPqL6Rl3jeUQKS6mFIqkWPdbXu_FNCeHER8w_GyY,13552
|
101
101
|
reconcile/openshift_upgrade_watcher.py,sha256=4LtXziBS1tGfUiioGGzx6h7ZHXdGk97DSMLNblcGmHk,4843
|
@@ -482,8 +482,8 @@ tools/sre_checkpoints/util.py,sha256=zEDbGr18ZeHNQwW8pUsr2JRjuXIPz--WAGJxZo9sv_Y
|
|
482
482
|
tools/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
483
483
|
tools/test/test_qontract_cli.py,sha256=awwTHEc2DWlykuqGIYM0WOBoSL0KRnOraCLk3C7izis,1401
|
484
484
|
tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
|
485
|
-
qontract_reconcile-0.9.
|
486
|
-
qontract_reconcile-0.9.
|
487
|
-
qontract_reconcile-0.9.
|
488
|
-
qontract_reconcile-0.9.
|
489
|
-
qontract_reconcile-0.9.
|
485
|
+
qontract_reconcile-0.9.1rc133.dist-info/METADATA,sha256=Y6ZjvDqi2URKKdxZeGYrSJwCyJ2Xv3yYZRFNRJHCR44,2259
|
486
|
+
qontract_reconcile-0.9.1rc133.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
487
|
+
qontract_reconcile-0.9.1rc133.dist-info/entry_points.txt,sha256=3BPvsRryM1C4S_mb5kXmP5AVv-wJBzVCrOJyv6qUmc0,195
|
488
|
+
qontract_reconcile-0.9.1rc133.dist-info/top_level.txt,sha256=j0CHPIc8TsVRB50wOz_jhxjjaRyCJB3NOQeXhuHS67c,34
|
489
|
+
qontract_reconcile-0.9.1rc133.dist-info/RECORD,,
|
reconcile/cli.py
CHANGED
@@ -1044,12 +1044,12 @@ def openshift_saas_deploy(
|
|
1044
1044
|
run_integration(
|
1045
1045
|
reconcile.openshift_saas_deploy,
|
1046
1046
|
ctx.obj,
|
1047
|
-
thread_pool_size,
|
1048
|
-
io_dir,
|
1049
|
-
use_jump_host,
|
1050
|
-
saas_file_name,
|
1051
|
-
env_name,
|
1052
|
-
gitlab_project_id,
|
1047
|
+
thread_pool_size=thread_pool_size,
|
1048
|
+
io_dir=io_dir,
|
1049
|
+
use_jump_host=use_jump_host,
|
1050
|
+
saas_file_name=saas_file_name,
|
1051
|
+
env_name=env_name,
|
1052
|
+
gitlab_project_id=gitlab_project_id,
|
1053
1053
|
)
|
1054
1054
|
|
1055
1055
|
|
@@ -1,5 +1,10 @@
|
|
1
1
|
import logging
|
2
2
|
import sys
|
3
|
+
from collections.abc import Callable
|
4
|
+
from typing import (
|
5
|
+
Any,
|
6
|
+
Optional,
|
7
|
+
)
|
3
8
|
|
4
9
|
import reconcile.jenkins_plugins as jenkins_base
|
5
10
|
import reconcile.openshift_base as ob
|
@@ -16,12 +21,15 @@ from reconcile.utils.openshift_resource import ResourceInventory
|
|
16
21
|
from reconcile.utils.saasherder import SaasHerder
|
17
22
|
from reconcile.utils.secret_reader import SecretReader
|
18
23
|
from reconcile.utils.semver_helper import make_semver
|
24
|
+
from reconcile.utils.slack_api import SlackApi
|
19
25
|
|
20
26
|
QONTRACT_INTEGRATION = "openshift-saas-deploy"
|
21
27
|
QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
|
22
28
|
|
23
29
|
|
24
|
-
def compose_console_url(
|
30
|
+
def compose_console_url(
|
31
|
+
saas_file: dict[str, Any], saas_file_name: str, env_name: str
|
32
|
+
) -> str:
|
25
33
|
pp = saas_file["pipelinesProvider"]
|
26
34
|
pp_ns = pp["namespace"]
|
27
35
|
pp_ns_name = pp_ns["name"]
|
@@ -46,7 +54,14 @@ def compose_console_url(saas_file, saas_file_name, env_name):
|
|
46
54
|
)
|
47
55
|
|
48
56
|
|
49
|
-
def slack_notify(
|
57
|
+
def slack_notify(
|
58
|
+
saas_file_name: str,
|
59
|
+
env_name: str,
|
60
|
+
slack: SlackApi,
|
61
|
+
ri: ResourceInventory,
|
62
|
+
console_url: str,
|
63
|
+
in_progress: bool,
|
64
|
+
) -> None:
|
50
65
|
success = not ri.has_error_registered()
|
51
66
|
if in_progress:
|
52
67
|
icon = ":yellow_jenkins_circle:"
|
@@ -67,15 +82,15 @@ def slack_notify(saas_file_name, env_name, slack, ri, console_url, in_progress):
|
|
67
82
|
|
68
83
|
@defer
|
69
84
|
def run(
|
70
|
-
dry_run,
|
71
|
-
thread_pool_size=10,
|
72
|
-
io_dir="throughput/",
|
73
|
-
use_jump_host=True,
|
74
|
-
saas_file_name=None,
|
75
|
-
env_name=None,
|
76
|
-
gitlab_project_id=None,
|
77
|
-
defer=None,
|
78
|
-
):
|
85
|
+
dry_run: bool,
|
86
|
+
thread_pool_size: int = 10,
|
87
|
+
io_dir: str = "throughput/",
|
88
|
+
use_jump_host: bool = True,
|
89
|
+
saas_file_name: Optional[str] = None,
|
90
|
+
env_name: Optional[str] = None,
|
91
|
+
gitlab_project_id: Optional[str] = None,
|
92
|
+
defer: Optional[Callable] = None,
|
93
|
+
) -> None:
|
79
94
|
all_saas_files = queries.get_saas_files()
|
80
95
|
saas_files = queries.get_saas_files(saas_file_name, env_name)
|
81
96
|
if not saas_files:
|
@@ -91,6 +106,11 @@ def run(
|
|
91
106
|
saas_file = saas_files[0]
|
92
107
|
slack_info = saas_file.get("slack")
|
93
108
|
if slack_info:
|
109
|
+
if not saas_file_name or not env_name:
|
110
|
+
raise RuntimeError(
|
111
|
+
"saas_file_name and env_name must be provided "
|
112
|
+
+ "when using slack notifications"
|
113
|
+
)
|
94
114
|
slack = slackapi_from_slack_workspace(
|
95
115
|
slack_info,
|
96
116
|
SecretReader(queries.get_secret_reader_settings()),
|
@@ -99,17 +119,20 @@ def run(
|
|
99
119
|
)
|
100
120
|
ri = ResourceInventory()
|
101
121
|
console_url = compose_console_url(saas_file, saas_file_name, env_name)
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
122
|
+
if (
|
123
|
+
defer
|
124
|
+
): # defer is provided by the method decorator. this makes just mypy happy
|
125
|
+
# deployment result notification
|
126
|
+
defer(
|
127
|
+
lambda: slack_notify(
|
128
|
+
saas_file_name,
|
129
|
+
env_name,
|
130
|
+
slack,
|
131
|
+
ri,
|
132
|
+
console_url,
|
133
|
+
in_progress=False,
|
134
|
+
)
|
111
135
|
)
|
112
|
-
)
|
113
136
|
# deployment start notification
|
114
137
|
slack_notifications = slack_info.get("notifications")
|
115
138
|
if slack_notifications and slack_notifications.get("start"):
|
@@ -163,7 +186,8 @@ def run(
|
|
163
186
|
cluster_admin=saasherder.cluster_admin,
|
164
187
|
use_jump_host=use_jump_host,
|
165
188
|
)
|
166
|
-
defer
|
189
|
+
if defer: # defer is provided by the method decorator. this makes just mypy happy
|
190
|
+
defer(oc_map.cleanup)
|
167
191
|
saasherder.populate_desired_state(ri)
|
168
192
|
|
169
193
|
# validate that this deployment is valid
|
@@ -1,7 +1,12 @@
|
|
1
1
|
import datetime
|
2
2
|
import logging
|
3
|
+
from collections.abc import Callable
|
3
4
|
from threading import Lock
|
4
|
-
from typing import
|
5
|
+
from typing import (
|
6
|
+
Any,
|
7
|
+
Optional,
|
8
|
+
cast,
|
9
|
+
)
|
5
10
|
|
6
11
|
from sretoolbox.utils import threaded
|
7
12
|
|
@@ -31,16 +36,16 @@ class TektonTimeoutBadValueError(Exception):
|
|
31
36
|
|
32
37
|
@defer
|
33
38
|
def run(
|
34
|
-
dry_run,
|
35
|
-
trigger_type,
|
36
|
-
integration,
|
37
|
-
integration_version,
|
38
|
-
thread_pool_size,
|
39
|
-
internal,
|
40
|
-
use_jump_host,
|
41
|
-
include_trigger_trace,
|
42
|
-
defer=None,
|
43
|
-
):
|
39
|
+
dry_run: bool,
|
40
|
+
trigger_type: str,
|
41
|
+
integration: str,
|
42
|
+
integration_version: str,
|
43
|
+
thread_pool_size: int,
|
44
|
+
internal: bool,
|
45
|
+
use_jump_host: bool,
|
46
|
+
include_trigger_trace: bool,
|
47
|
+
defer: Optional[Callable] = None,
|
48
|
+
) -> bool:
|
44
49
|
"""Run trigger integration
|
45
50
|
|
46
51
|
Args:
|
@@ -67,12 +72,13 @@ def run(
|
|
67
72
|
)
|
68
73
|
if error:
|
69
74
|
return error
|
70
|
-
defer
|
75
|
+
if defer: # defer is set by method decorator. this makes just mypy happy
|
76
|
+
defer(oc_map.cleanup)
|
71
77
|
|
72
78
|
trigger_specs, diff_err = saasherder.get_diff(trigger_type, dry_run)
|
73
79
|
# This will be populated by 'trigger' in the below loop and
|
74
80
|
# we need it to be consistent across all iterations
|
75
|
-
already_triggered = set()
|
81
|
+
already_triggered: set[str] = set()
|
76
82
|
|
77
83
|
errors = threaded.run(
|
78
84
|
trigger,
|
@@ -91,13 +97,13 @@ def run(
|
|
91
97
|
|
92
98
|
|
93
99
|
def setup(
|
94
|
-
thread_pool_size,
|
95
|
-
internal,
|
96
|
-
use_jump_host,
|
97
|
-
integration,
|
98
|
-
integration_version,
|
99
|
-
include_trigger_trace,
|
100
|
-
):
|
100
|
+
thread_pool_size: int,
|
101
|
+
internal: bool,
|
102
|
+
use_jump_host: bool,
|
103
|
+
integration: str,
|
104
|
+
integration_version: str,
|
105
|
+
include_trigger_trace: bool,
|
106
|
+
) -> tuple[SaasHerder, OC_Map, bool]:
|
101
107
|
"""Setup required resources for triggering integrations
|
102
108
|
|
103
109
|
Args:
|
@@ -116,8 +122,7 @@ def setup(
|
|
116
122
|
|
117
123
|
saas_files = queries.get_saas_files()
|
118
124
|
if not saas_files:
|
119
|
-
|
120
|
-
return None, None, True
|
125
|
+
raise RuntimeError("no saas files found")
|
121
126
|
saas_files = [sf for sf in saas_files if is_in_shard(sf["name"])]
|
122
127
|
|
123
128
|
# Remove saas-file targets that are disabled
|
@@ -215,7 +220,7 @@ def _trigger_tekton(
|
|
215
220
|
already_triggered: set[str],
|
216
221
|
integration: str,
|
217
222
|
integration_version: str,
|
218
|
-
):
|
223
|
+
) -> bool:
|
219
224
|
saas_file_name = spec.saas_file_name
|
220
225
|
env_name = spec.env_name
|
221
226
|
timeout = spec.timeout
|
@@ -292,25 +297,29 @@ def _trigger_tekton(
|
|
292
297
|
return error
|
293
298
|
|
294
299
|
|
295
|
-
def _pipeline_exists(
|
300
|
+
def _pipeline_exists(
|
301
|
+
name: str, tkn_cluster_name: str, tkn_namespace_name: str, oc_map: OC_Map
|
302
|
+
) -> bool:
|
296
303
|
oc = oc_map.get(tkn_cluster_name)
|
297
|
-
|
304
|
+
if oc.get(
|
298
305
|
namespace=tkn_namespace_name, kind="Pipeline", name=name, allow_not_found=True
|
299
|
-
)
|
306
|
+
):
|
307
|
+
return True
|
308
|
+
return False
|
300
309
|
|
301
310
|
|
302
311
|
def _construct_tekton_trigger_resource(
|
303
|
-
saas_file_name,
|
304
|
-
env_name,
|
305
|
-
tkn_pipeline_name,
|
306
|
-
timeout,
|
307
|
-
tkn_cluster_console_url,
|
308
|
-
tkn_namespace_name,
|
309
|
-
integration,
|
310
|
-
integration_version,
|
311
|
-
include_trigger_trace,
|
312
|
-
reason,
|
313
|
-
):
|
312
|
+
saas_file_name: str,
|
313
|
+
env_name: str,
|
314
|
+
tkn_pipeline_name: str,
|
315
|
+
timeout: Optional[str],
|
316
|
+
tkn_cluster_console_url: str,
|
317
|
+
tkn_namespace_name: str,
|
318
|
+
integration: str,
|
319
|
+
integration_version: str,
|
320
|
+
include_trigger_trace: bool,
|
321
|
+
reason: Optional[str],
|
322
|
+
) -> tuple[OR, str]:
|
314
323
|
"""Construct a resource (PipelineRun) to trigger a deployment via Tekton.
|
315
324
|
|
316
325
|
Args:
|
@@ -319,7 +328,7 @@ def _construct_tekton_trigger_resource(
|
|
319
328
|
tkn_cluster_console_url (string): Cluster console URL of the cluster
|
320
329
|
where the pipeline runs
|
321
330
|
tkn_namespace_name (string): namespace where the pipeline runs
|
322
|
-
timeout (
|
331
|
+
timeout (str): Timeout in minutes before the PipelineRun fails (must be > 60)
|
323
332
|
integration (string): Name of calling integration
|
324
333
|
integration_version (string): Version of calling integration
|
325
334
|
include_trigger_trace (bool): Should include traces of the triggering integration and reason
|
@@ -343,13 +352,18 @@ def _construct_tekton_trigger_resource(
|
|
343
352
|
{"name": "tkn_namespace_name", "value": tkn_namespace_name},
|
344
353
|
]
|
345
354
|
if include_trigger_trace:
|
355
|
+
if not reason:
|
356
|
+
raise RuntimeError(
|
357
|
+
"reason must be provided if include_trigger_trace is True"
|
358
|
+
)
|
359
|
+
|
346
360
|
parameters.extend(
|
347
361
|
[
|
348
362
|
{"name": "trigger_integration", "value": integration},
|
349
363
|
{"name": "trigger_reason", "value": reason},
|
350
364
|
]
|
351
365
|
)
|
352
|
-
body = {
|
366
|
+
body: dict[str, Any] = {
|
353
367
|
"apiVersion": "tekton.dev/v1beta1",
|
354
368
|
"kind": "PipelineRun",
|
355
369
|
"metadata": {"name": name},
|
@@ -371,7 +385,7 @@ def _construct_tekton_trigger_resource(
|
|
371
385
|
return OR(body, integration, integration_version, error_details=name), long_name
|
372
386
|
|
373
387
|
|
374
|
-
def _register_trigger(name, already_triggered):
|
388
|
+
def _register_trigger(name: str, already_triggered: set[str]) -> bool:
|
375
389
|
"""checks if a trigger should occur and registers as if it did
|
376
390
|
|
377
391
|
Args:
|
@@ -382,8 +396,6 @@ def _register_trigger(name, already_triggered):
|
|
382
396
|
Returns:
|
383
397
|
bool: to trigger or not to trigger
|
384
398
|
"""
|
385
|
-
global _trigger_lock
|
386
|
-
|
387
399
|
to_trigger = False
|
388
400
|
with _trigger_lock:
|
389
401
|
if name not in already_triggered:
|
@@ -1,9 +1,14 @@
|
|
1
1
|
import logging
|
2
|
+
from collections.abc import Callable
|
2
3
|
from datetime import (
|
3
4
|
datetime,
|
4
5
|
timedelta,
|
5
6
|
timezone,
|
6
7
|
)
|
8
|
+
from typing import (
|
9
|
+
Any,
|
10
|
+
Optional,
|
11
|
+
)
|
7
12
|
|
8
13
|
from dateutil import parser
|
9
14
|
|
@@ -17,7 +22,7 @@ QONTRACT_INTEGRATION = "openshift-saas-deploy-trigger-cleaner"
|
|
17
22
|
QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
|
18
23
|
|
19
24
|
|
20
|
-
def within_retention_days(resource, days):
|
25
|
+
def within_retention_days(resource: dict[str, Any], days: int) -> bool:
|
21
26
|
metadata = resource["metadata"]
|
22
27
|
creation_date = parser.parse(metadata["creationTimestamp"])
|
23
28
|
now_date = datetime.now(timezone.utc)
|
@@ -27,7 +32,13 @@ def within_retention_days(resource, days):
|
|
27
32
|
|
28
33
|
|
29
34
|
@defer
|
30
|
-
def run(
|
35
|
+
def run(
|
36
|
+
dry_run: bool,
|
37
|
+
thread_pool_size: int = 10,
|
38
|
+
internal: Optional[bool] = None,
|
39
|
+
use_jump_host: bool = True,
|
40
|
+
defer: Optional[Callable] = None,
|
41
|
+
) -> None:
|
31
42
|
settings = queries.get_app_interface_settings()
|
32
43
|
pipelines_providers = queries.get_pipelines_providers()
|
33
44
|
tkn_namespaces = [
|
@@ -44,7 +55,8 @@ def run(dry_run, thread_pool_size=10, internal=None, use_jump_host=True, defer=N
|
|
44
55
|
use_jump_host=use_jump_host,
|
45
56
|
thread_pool_size=thread_pool_size,
|
46
57
|
)
|
47
|
-
defer
|
58
|
+
if defer:
|
59
|
+
defer(oc_map.cleanup)
|
48
60
|
|
49
61
|
for pp in pipelines_providers:
|
50
62
|
retention = pp.get("retention")
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import sys
|
2
|
+
from typing import Optional
|
2
3
|
|
3
4
|
import reconcile.openshift_saas_deploy_trigger_base as osdt_base
|
4
5
|
from reconcile.status import ExitCodes
|
@@ -10,12 +11,12 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 3, 0)
|
|
10
11
|
|
11
12
|
|
12
13
|
def run(
|
13
|
-
dry_run,
|
14
|
-
thread_pool_size=10,
|
15
|
-
internal=None,
|
16
|
-
use_jump_host=True,
|
17
|
-
include_trigger_trace=False,
|
18
|
-
):
|
14
|
+
dry_run: bool,
|
15
|
+
thread_pool_size: int = 10,
|
16
|
+
internal: Optional[bool] = None,
|
17
|
+
use_jump_host: bool = True,
|
18
|
+
include_trigger_trace: bool = False,
|
19
|
+
) -> None:
|
19
20
|
error = osdt_base.run(
|
20
21
|
dry_run=dry_run,
|
21
22
|
trigger_type=TriggerTypes.CONFIGS,
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import sys
|
2
|
+
from typing import Optional
|
2
3
|
|
3
4
|
import reconcile.openshift_saas_deploy_trigger_base as osdt_base
|
4
5
|
from reconcile.status import ExitCodes
|
@@ -10,12 +11,12 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
|
|
10
11
|
|
11
12
|
|
12
13
|
def run(
|
13
|
-
dry_run,
|
14
|
-
thread_pool_size=10,
|
15
|
-
internal=None,
|
16
|
-
use_jump_host=True,
|
17
|
-
include_trigger_trace=False,
|
18
|
-
):
|
14
|
+
dry_run: bool,
|
15
|
+
thread_pool_size: int = 10,
|
16
|
+
internal: Optional[bool] = None,
|
17
|
+
use_jump_host: bool = True,
|
18
|
+
include_trigger_trace: bool = False,
|
19
|
+
) -> None:
|
19
20
|
error = osdt_base.run(
|
20
21
|
dry_run=dry_run,
|
21
22
|
trigger_type=TriggerTypes.CONTAINER_IMAGES,
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import sys
|
2
|
+
from typing import Optional
|
2
3
|
|
3
4
|
import reconcile.openshift_saas_deploy_trigger_base as osdt_base
|
4
5
|
from reconcile.status import ExitCodes
|
@@ -10,12 +11,12 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 3, 0)
|
|
10
11
|
|
11
12
|
|
12
13
|
def run(
|
13
|
-
dry_run,
|
14
|
-
thread_pool_size=10,
|
15
|
-
internal=None,
|
16
|
-
use_jump_host=True,
|
17
|
-
include_trigger_trace=False,
|
18
|
-
):
|
14
|
+
dry_run: bool,
|
15
|
+
thread_pool_size: int = 10,
|
16
|
+
internal: Optional[bool] = None,
|
17
|
+
use_jump_host: bool = True,
|
18
|
+
include_trigger_trace: bool = False,
|
19
|
+
) -> None:
|
19
20
|
error = osdt_base.run(
|
20
21
|
dry_run=dry_run,
|
21
22
|
trigger_type=TriggerTypes.MOVING_COMMITS,
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import sys
|
2
|
+
from typing import Optional
|
2
3
|
|
3
4
|
import reconcile.openshift_saas_deploy_trigger_base as osdt_base
|
4
5
|
from reconcile.status import ExitCodes
|
@@ -10,12 +11,12 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 3, 0)
|
|
10
11
|
|
11
12
|
|
12
13
|
def run(
|
13
|
-
dry_run,
|
14
|
-
thread_pool_size=10,
|
15
|
-
internal=None,
|
16
|
-
use_jump_host=True,
|
17
|
-
include_trigger_trace=False,
|
18
|
-
):
|
14
|
+
dry_run: bool,
|
15
|
+
thread_pool_size: int = 10,
|
16
|
+
internal: Optional[bool] = None,
|
17
|
+
use_jump_host: bool = True,
|
18
|
+
include_trigger_trace: bool = False,
|
19
|
+
) -> None:
|
19
20
|
error = osdt_base.run(
|
20
21
|
dry_run=dry_run,
|
21
22
|
trigger_type=TriggerTypes.UPSTREAM_JOBS,
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import sys
|
2
|
+
from typing import Optional
|
2
3
|
|
3
4
|
from sretoolbox.utils import threaded
|
4
5
|
|
@@ -13,8 +14,12 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
|
|
13
14
|
|
14
15
|
|
15
16
|
def osd_run_wrapper(
|
16
|
-
diff
|
17
|
-
|
17
|
+
diff: dict[str, str],
|
18
|
+
dry_run: bool,
|
19
|
+
available_thread_pool_size: int,
|
20
|
+
use_jump_host: bool,
|
21
|
+
gitlab_project_id: Optional[str],
|
22
|
+
) -> int:
|
18
23
|
saas_file_name = diff["saas_file_name"]
|
19
24
|
env_name = diff["environment"]
|
20
25
|
exit_code = 0
|
@@ -33,12 +38,12 @@ def osd_run_wrapper(
|
|
33
38
|
|
34
39
|
|
35
40
|
def run(
|
36
|
-
dry_run,
|
37
|
-
thread_pool_size=10,
|
38
|
-
io_dir="throughput/",
|
39
|
-
use_jump_host=True,
|
40
|
-
gitlab_project_id=None,
|
41
|
-
):
|
41
|
+
dry_run: bool,
|
42
|
+
thread_pool_size: int = 10,
|
43
|
+
io_dir: str = "throughput/",
|
44
|
+
use_jump_host: bool = True,
|
45
|
+
gitlab_project_id: Optional[str] = None,
|
46
|
+
) -> None:
|
42
47
|
saas_file_owners_diffs = read_saas_file_owners_diffs(io_dir)
|
43
48
|
if len(saas_file_owners_diffs) == 0:
|
44
49
|
return
|
File without changes
|
{qontract_reconcile-0.9.1rc131.dist-info → qontract_reconcile-0.9.1rc133.dist-info}/entry_points.txt
RENAMED
File without changes
|
{qontract_reconcile-0.9.1rc131.dist-info → qontract_reconcile-0.9.1rc133.dist-info}/top_level.txt
RENAMED
File without changes
|