qontract-reconcile 0.9.1rc162__py3-none-any.whl → 0.9.1rc164__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/METADATA +2 -2
- {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/RECORD +21 -30
- reconcile/glitchtip_project_dsn/integration.py +3 -0
- reconcile/jenkins_job_builder.py +2 -5
- reconcile/openshift_base.py +11 -11
- reconcile/openshift_saas_deploy.py +52 -57
- reconcile/openshift_saas_deploy_trigger_base.py +48 -55
- reconcile/openshift_saas_deploy_trigger_cleaner.py +2 -2
- reconcile/openshift_tekton_resources.py +1 -1
- reconcile/saas_file_validator.py +10 -23
- reconcile/slack_base.py +2 -5
- reconcile/test/conftest.py +0 -11
- reconcile/test/test_auto_promoter.py +42 -199
- reconcile/test/test_saasherder.py +463 -398
- reconcile/test/test_saasherder_allowed_secret_paths.py +36 -87
- reconcile/utils/mr/auto_promoter.py +50 -58
- reconcile/utils/mr/base.py +2 -6
- reconcile/utils/{saasherder/saasherder.py → saasherder.py} +736 -656
- reconcile/gql_definitions/common/app_code_component_repos.py +0 -68
- reconcile/gql_definitions/common/saas_files.py +0 -542
- reconcile/gql_definitions/common/saasherder_settings.py +0 -62
- reconcile/gql_definitions/fragments/oc_connection_cluster.py +0 -47
- reconcile/typed_queries/repos.py +0 -17
- reconcile/typed_queries/saas_files.py +0 -61
- reconcile/utils/saasherder/__init__.py +0 -17
- reconcile/utils/saasherder/interfaces.py +0 -404
- reconcile/utils/saasherder/models.py +0 -203
- {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/WHEEL +0 -0
- {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/entry_points.txt +0 -0
- {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/top_level.txt +0 -0
@@ -5,6 +5,7 @@ from threading import Lock
|
|
5
5
|
from typing import (
|
6
6
|
Any,
|
7
7
|
Optional,
|
8
|
+
cast,
|
8
9
|
)
|
9
10
|
|
10
11
|
from sretoolbox.utils import threaded
|
@@ -13,13 +14,6 @@ import reconcile.jenkins_plugins as jenkins_base
|
|
13
14
|
import reconcile.openshift_base as osb
|
14
15
|
from reconcile import queries
|
15
16
|
from reconcile.openshift_tekton_resources import build_one_per_saas_file_tkn_object_name
|
16
|
-
from reconcile.typed_queries.app_interface_vault_settings import (
|
17
|
-
get_app_interface_vault_settings,
|
18
|
-
)
|
19
|
-
from reconcile.typed_queries.saas_files import (
|
20
|
-
get_saas_files,
|
21
|
-
get_saasherder_settings,
|
22
|
-
)
|
23
17
|
from reconcile.utils.defer import defer
|
24
18
|
from reconcile.utils.gitlab_api import GitLabApi
|
25
19
|
from reconcile.utils.oc import OC_Map
|
@@ -31,11 +25,7 @@ from reconcile.utils.saasherder import (
|
|
31
25
|
SaasHerder,
|
32
26
|
TriggerSpecUnion,
|
33
27
|
)
|
34
|
-
from reconcile.utils.saasherder.interfaces import SaasPipelinesProviderTekton
|
35
|
-
from reconcile.utils.saasherder.models import TriggerTypes
|
36
|
-
from reconcile.utils.secret_reader import create_secret_reader
|
37
28
|
from reconcile.utils.sharding import is_in_shard
|
38
|
-
from reconcile.utils.state import init_state
|
39
29
|
|
40
30
|
_trigger_lock = Lock()
|
41
31
|
|
@@ -47,7 +37,7 @@ class TektonTimeoutBadValueError(Exception):
|
|
47
37
|
@defer
|
48
38
|
def run(
|
49
39
|
dry_run: bool,
|
50
|
-
trigger_type:
|
40
|
+
trigger_type: str,
|
51
41
|
integration: str,
|
52
42
|
integration_version: str,
|
53
43
|
thread_pool_size: int,
|
@@ -72,7 +62,7 @@ def run(
|
|
72
62
|
Returns:
|
73
63
|
bool: True if there was an error, False otherwise
|
74
64
|
"""
|
75
|
-
saasherder, oc_map = setup(
|
65
|
+
saasherder, oc_map, error = setup(
|
76
66
|
thread_pool_size=thread_pool_size,
|
77
67
|
internal=internal,
|
78
68
|
use_jump_host=use_jump_host,
|
@@ -80,6 +70,8 @@ def run(
|
|
80
70
|
integration_version=integration_version,
|
81
71
|
include_trigger_trace=include_trigger_trace,
|
82
72
|
)
|
73
|
+
if error:
|
74
|
+
return error
|
83
75
|
if defer: # defer is set by method decorator. this makes just mypy happy
|
84
76
|
defer(oc_map.cleanup)
|
85
77
|
|
@@ -111,7 +103,7 @@ def setup(
|
|
111
103
|
integration: str,
|
112
104
|
integration_version: str,
|
113
105
|
include_trigger_trace: bool,
|
114
|
-
) -> tuple[SaasHerder, OC_Map]:
|
106
|
+
) -> tuple[SaasHerder, OC_Map, bool]:
|
115
107
|
"""Setup required resources for triggering integrations
|
116
108
|
|
117
109
|
Args:
|
@@ -125,21 +117,22 @@ def setup(
|
|
125
117
|
Returns:
|
126
118
|
saasherder (SaasHerder): a SaasHerder instance
|
127
119
|
oc_map (OC_Map): a dictionary of OC clients per cluster
|
120
|
+
error (bool): True if one happened, False otherwise
|
128
121
|
"""
|
129
|
-
|
130
|
-
|
131
|
-
secret_reader = create_secret_reader(use_vault=vault_settings.vault)
|
132
|
-
saas_files = get_saas_files()
|
122
|
+
|
123
|
+
saas_files = queries.get_saas_files()
|
133
124
|
if not saas_files:
|
134
125
|
raise RuntimeError("no saas files found")
|
135
|
-
saas_files = [sf for sf in saas_files if is_in_shard(sf
|
126
|
+
saas_files = [sf for sf in saas_files if is_in_shard(sf["name"])]
|
136
127
|
|
137
128
|
# Remove saas-file targets that are disabled
|
138
129
|
for saas_file in saas_files[:]:
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
130
|
+
resource_templates = saas_file["resourceTemplates"]
|
131
|
+
for rt in resource_templates[:]:
|
132
|
+
targets = rt["targets"]
|
133
|
+
for target in targets[:]:
|
134
|
+
if target["disable"]:
|
135
|
+
targets.remove(target)
|
143
136
|
|
144
137
|
instance = queries.get_gitlab_instance()
|
145
138
|
settings = queries.get_app_interface_settings()
|
@@ -147,9 +140,7 @@ def setup(
|
|
147
140
|
jenkins_map = jenkins_base.get_jenkins_map()
|
148
141
|
pipelines_providers = queries.get_pipelines_providers()
|
149
142
|
tkn_provider_namespaces = [
|
150
|
-
pp["namespace"]
|
151
|
-
for pp in pipelines_providers
|
152
|
-
if pp["provider"] == Providers.TEKTON.value
|
143
|
+
pp["namespace"] for pp in pipelines_providers if pp["provider"] == "tekton"
|
153
144
|
]
|
154
145
|
|
155
146
|
oc_map = OC_Map(
|
@@ -164,18 +155,16 @@ def setup(
|
|
164
155
|
saasherder = SaasHerder(
|
165
156
|
saas_files,
|
166
157
|
thread_pool_size=thread_pool_size,
|
158
|
+
gitlab=gl,
|
167
159
|
integration=integration,
|
168
160
|
integration_version=integration_version,
|
169
|
-
|
170
|
-
hash_length=saasherder_settings.hash_length,
|
171
|
-
repo_url=saasherder_settings.repo_url,
|
172
|
-
gitlab=gl,
|
161
|
+
settings=settings,
|
173
162
|
jenkins_map=jenkins_map,
|
174
|
-
|
163
|
+
initialise_state=True,
|
175
164
|
include_trigger_trace=include_trigger_trace,
|
176
165
|
)
|
177
166
|
|
178
|
-
return saasherder, oc_map
|
167
|
+
return saasherder, oc_map, False
|
179
168
|
|
180
169
|
|
181
170
|
def trigger(
|
@@ -203,8 +192,10 @@ def trigger(
|
|
203
192
|
bool: True if there was an error, False otherwise
|
204
193
|
"""
|
205
194
|
saas_file_name = spec.saas_file_name
|
195
|
+
provider_name = cast(dict, spec.pipelines_provider)["provider"]
|
196
|
+
|
206
197
|
error = False
|
207
|
-
if
|
198
|
+
if provider_name == Providers.TEKTON:
|
208
199
|
error = _trigger_tekton(
|
209
200
|
spec,
|
210
201
|
dry_run,
|
@@ -216,10 +207,7 @@ def trigger(
|
|
216
207
|
)
|
217
208
|
else:
|
218
209
|
error = True
|
219
|
-
logging.error(
|
220
|
-
f"[{saas_file_name}] unsupported provider: "
|
221
|
-
+ f"{spec.pipelines_provider.provider}"
|
222
|
-
)
|
210
|
+
logging.error(f"[{saas_file_name}] unsupported provider: " + f"{provider_name}")
|
223
211
|
|
224
212
|
return error
|
225
213
|
|
@@ -233,23 +221,28 @@ def _trigger_tekton(
|
|
233
221
|
integration: str,
|
234
222
|
integration_version: str,
|
235
223
|
) -> bool:
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
224
|
+
saas_file_name = spec.saas_file_name
|
225
|
+
env_name = spec.env_name
|
226
|
+
timeout = spec.timeout
|
227
|
+
pipelines_provider = cast(dict, spec.pipelines_provider)
|
228
|
+
|
229
|
+
pipeline_template_name = pipelines_provider["defaults"]["pipelineTemplates"][
|
230
|
+
"openshiftSaasDeploy"
|
231
|
+
]["name"]
|
232
|
+
|
233
|
+
if pipelines_provider["pipelineTemplates"]:
|
234
|
+
pipeline_template_name = pipelines_provider["pipelineTemplates"][
|
235
|
+
"openshiftSaasDeploy"
|
236
|
+
]["name"]
|
237
|
+
|
247
238
|
tkn_pipeline_name = build_one_per_saas_file_tkn_object_name(
|
248
|
-
pipeline_template_name,
|
239
|
+
pipeline_template_name, saas_file_name
|
249
240
|
)
|
250
|
-
|
251
|
-
|
252
|
-
|
241
|
+
|
242
|
+
tkn_namespace_info = pipelines_provider["namespace"]
|
243
|
+
tkn_namespace_name = tkn_namespace_info["name"]
|
244
|
+
tkn_cluster_name = tkn_namespace_info["cluster"]["name"]
|
245
|
+
tkn_cluster_console_url = tkn_namespace_info["cluster"]["consoleUrl"]
|
253
246
|
|
254
247
|
# if pipeline does not exist it means that either it hasn't been
|
255
248
|
# statically created from app-interface or it hasn't been dynamically
|
@@ -266,10 +259,10 @@ def _trigger_tekton(
|
|
266
259
|
return False
|
267
260
|
|
268
261
|
tkn_trigger_resource, tkn_name = _construct_tekton_trigger_resource(
|
269
|
-
|
270
|
-
|
262
|
+
saas_file_name,
|
263
|
+
env_name,
|
271
264
|
tkn_pipeline_name,
|
272
|
-
|
265
|
+
timeout,
|
273
266
|
tkn_cluster_console_url,
|
274
267
|
tkn_namespace_name,
|
275
268
|
integration,
|
@@ -44,7 +44,7 @@ def run(
|
|
44
44
|
tkn_namespaces = [
|
45
45
|
pp["namespace"]
|
46
46
|
for pp in pipelines_providers
|
47
|
-
if pp["provider"] == Providers.TEKTON
|
47
|
+
if pp["provider"] == Providers.TEKTON
|
48
48
|
]
|
49
49
|
|
50
50
|
oc_map = OC_Map(
|
@@ -63,7 +63,7 @@ def run(
|
|
63
63
|
if not retention:
|
64
64
|
continue
|
65
65
|
|
66
|
-
if pp["provider"] == Providers.TEKTON
|
66
|
+
if pp["provider"] == Providers.TEKTON:
|
67
67
|
ns_info = pp["namespace"]
|
68
68
|
namespace = ns_info["name"]
|
69
69
|
cluster = ns_info["cluster"]["name"]
|
@@ -92,7 +92,7 @@ def fetch_tkn_providers(saas_file_name: Optional[str]) -> dict[str, Any]:
|
|
92
92
|
duplicates: set[str] = set()
|
93
93
|
all_tkn_providers = {}
|
94
94
|
for pipeline_provider in queries.get_pipelines_providers():
|
95
|
-
if pipeline_provider["provider"] != Providers.TEKTON
|
95
|
+
if pipeline_provider["provider"] != Providers.TEKTON:
|
96
96
|
continue
|
97
97
|
|
98
98
|
if pipeline_provider["name"] in all_tkn_providers:
|
reconcile/saas_file_validator.py
CHANGED
@@ -1,45 +1,32 @@
|
|
1
1
|
import logging
|
2
2
|
import sys
|
3
3
|
|
4
|
+
from reconcile import queries
|
4
5
|
from reconcile.jenkins_job_builder import init_jjb
|
5
6
|
from reconcile.status import ExitCodes
|
6
|
-
from reconcile.typed_queries.app_interface_vault_settings import (
|
7
|
-
get_app_interface_vault_settings,
|
8
|
-
)
|
9
|
-
from reconcile.typed_queries.repos import get_repos
|
10
|
-
from reconcile.typed_queries.saas_files import (
|
11
|
-
get_saas_files,
|
12
|
-
get_saasherder_settings,
|
13
|
-
)
|
14
7
|
from reconcile.utils.jjb_client import JJB
|
15
8
|
from reconcile.utils.saasherder import SaasHerder
|
16
|
-
from reconcile.utils.secret_reader import
|
9
|
+
from reconcile.utils.secret_reader import SecretReader
|
17
10
|
from reconcile.utils.semver_helper import make_semver
|
18
11
|
|
19
12
|
QONTRACT_INTEGRATION = "saas-file-validator"
|
20
13
|
QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
|
21
14
|
|
22
15
|
|
23
|
-
def run(dry_run
|
24
|
-
|
25
|
-
|
26
|
-
secret_reader =
|
27
|
-
saas_files = get_saas_files()
|
28
|
-
if not saas_files:
|
29
|
-
logging.error("no saas files found")
|
30
|
-
raise RuntimeError("no saas files found")
|
31
|
-
|
16
|
+
def run(dry_run):
|
17
|
+
saas_files = queries.get_saas_files()
|
18
|
+
settings = queries.get_app_interface_settings()
|
19
|
+
secret_reader = SecretReader(settings)
|
32
20
|
saasherder = SaasHerder(
|
33
|
-
saas_files
|
21
|
+
saas_files,
|
34
22
|
thread_pool_size=1,
|
23
|
+
gitlab=None,
|
35
24
|
integration=QONTRACT_INTEGRATION,
|
36
25
|
integration_version=QONTRACT_INTEGRATION_VERSION,
|
37
|
-
|
38
|
-
hash_length=saasherder_settings.hash_length,
|
39
|
-
repo_url=saasherder_settings.repo_url,
|
26
|
+
settings=settings,
|
40
27
|
validate=True,
|
41
28
|
)
|
42
|
-
app_int_repos = get_repos()
|
29
|
+
app_int_repos = queries.get_repos()
|
43
30
|
missing_repos = [r for r in saasherder.repo_urls if r not in app_int_repos]
|
44
31
|
for r in missing_repos:
|
45
32
|
logging.error(f"repo is missing from codeComponents: {r}")
|
reconcile/slack_base.py
CHANGED
@@ -5,10 +5,7 @@ from typing import (
|
|
5
5
|
)
|
6
6
|
|
7
7
|
from reconcile import queries
|
8
|
-
from reconcile.utils.secret_reader import
|
9
|
-
SecretReader,
|
10
|
-
SecretReaderBase,
|
11
|
-
)
|
8
|
+
from reconcile.utils.secret_reader import SecretReader
|
12
9
|
from reconcile.utils.slack_api import (
|
13
10
|
HasClientConfig,
|
14
11
|
SlackApi,
|
@@ -28,7 +25,7 @@ def slackapi_from_queries(
|
|
28
25
|
|
29
26
|
def slackapi_from_slack_workspace(
|
30
27
|
slack_workspace: Mapping[str, Any],
|
31
|
-
secret_reader:
|
28
|
+
secret_reader: SecretReader,
|
32
29
|
integration_name: str,
|
33
30
|
init_usergroups: bool = True,
|
34
31
|
channel: Optional[str] = None,
|
reconcile/test/conftest.py
CHANGED
@@ -69,17 +69,6 @@ def data_default_none(
|
|
69
69
|
data_default_none(field.type_, item)
|
70
70
|
for item in data[field.alias]
|
71
71
|
]
|
72
|
-
elif field.sub_fields and all(
|
73
|
-
isinstance(sub_field.type_, type)
|
74
|
-
and issubclass(sub_field.type_, BaseModel)
|
75
|
-
for sub_field in field.sub_fields
|
76
|
-
):
|
77
|
-
# Union[ClassA, ClassB] field
|
78
|
-
for sub_field in field.sub_fields:
|
79
|
-
if isinstance(data[field.alias], dict):
|
80
|
-
data[field.alias].update(
|
81
|
-
data_default_none(sub_field.type_, data[field.alias])
|
82
|
-
)
|
83
72
|
|
84
73
|
return data
|
85
74
|
|
@@ -1,21 +1,24 @@
|
|
1
|
-
import json
|
2
1
|
from unittest import TestCase
|
3
2
|
|
4
3
|
from reconcile.utils.mr.auto_promoter import (
|
4
|
+
TARGET_CONFIG_HASH,
|
5
5
|
AutoPromoter,
|
6
6
|
ParentSaasConfigPromotion,
|
7
7
|
)
|
8
|
-
|
9
|
-
from
|
8
|
+
|
9
|
+
# from unittest.mock import MagicMock
|
10
|
+
|
11
|
+
|
12
|
+
# from .fixtures import Fixtures
|
10
13
|
|
11
14
|
|
12
15
|
class TestPromotions(TestCase):
|
13
|
-
def test_init_promotion_data(self)
|
14
|
-
promotion =
|
15
|
-
commit_sha
|
16
|
-
|
17
|
-
target_config_hash
|
18
|
-
|
16
|
+
def test_init_promotion_data(self):
|
17
|
+
promotion = {
|
18
|
+
"commit_sha": "ahash",
|
19
|
+
"saas_file": "saas_file",
|
20
|
+
"target_config_hash": "123123123",
|
21
|
+
}
|
19
22
|
|
20
23
|
expected = {
|
21
24
|
"channel": "test-channel",
|
@@ -30,7 +33,7 @@ class TestPromotions(TestCase):
|
|
30
33
|
ret = AutoPromoter.init_promotion_data("test-channel", promotion)
|
31
34
|
self.assertEqual(ret, expected)
|
32
35
|
|
33
|
-
def test_init_parent_saas_config_dataclass(self)
|
36
|
+
def test_init_parent_saas_config_dataclass(self):
|
34
37
|
data = {
|
35
38
|
"parent_saas": "saas_file",
|
36
39
|
TARGET_CONFIG_HASH: "123123123",
|
@@ -42,15 +45,15 @@ class TestPromotions(TestCase):
|
|
42
45
|
self.assertEqual(obj.target_config_hash, data[TARGET_CONFIG_HASH])
|
43
46
|
self.assertEqual(obj.parent_saas, data["parent_saas"])
|
44
47
|
|
45
|
-
def test_process_promotion_init_promotion_data(self)
|
46
|
-
promotion =
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
48
|
+
def test_process_promotion_init_promotion_data(self):
|
49
|
+
promotion = {
|
50
|
+
"saas_file_paths": ["destination-saas-file"],
|
51
|
+
"auto": True,
|
52
|
+
"publish": ["test-channel"],
|
53
|
+
"commit_sha": "ahash",
|
54
|
+
"saas_file": "saas_file",
|
55
|
+
TARGET_CONFIG_HASH: "111111111",
|
56
|
+
}
|
54
57
|
|
55
58
|
target_promotion = {
|
56
59
|
"auto": True,
|
@@ -62,19 +65,19 @@ class TestPromotions(TestCase):
|
|
62
65
|
)
|
63
66
|
self.assertTrue(modified)
|
64
67
|
|
65
|
-
tp = target_promotion["promotion_data"][0]
|
68
|
+
tp = target_promotion["promotion_data"][0]
|
66
69
|
tp_hash = tp["data"][0]["target_config_hash"]
|
67
70
|
self.assertEqual(tp_hash, "111111111")
|
68
71
|
|
69
|
-
def test_process_promotion_update_when_config_hash_changes(self)
|
70
|
-
promotion =
|
71
|
-
saas_file_paths
|
72
|
-
auto
|
73
|
-
publish
|
74
|
-
commit_sha
|
75
|
-
|
76
|
-
|
77
|
-
|
72
|
+
def test_process_promotion_update_when_config_hash_changes(self):
|
73
|
+
promotion = {
|
74
|
+
"saas_file_paths": ["destination-saas-file"],
|
75
|
+
"auto": True,
|
76
|
+
"publish": ["test-channel"],
|
77
|
+
"commit_sha": "ahash",
|
78
|
+
"saas_file": "saas_file",
|
79
|
+
TARGET_CONFIG_HASH: "111111111",
|
80
|
+
}
|
78
81
|
|
79
82
|
target_promotion = {
|
80
83
|
"auto": True,
|
@@ -98,19 +101,19 @@ class TestPromotions(TestCase):
|
|
98
101
|
)
|
99
102
|
self.assertTrue(modified)
|
100
103
|
|
101
|
-
tp = target_promotion["promotion_data"][0]
|
104
|
+
tp = target_promotion["promotion_data"][0]
|
102
105
|
tp_hash = tp["data"][0]["target_config_hash"]
|
103
106
|
self.assertEqual(tp_hash, "111111111")
|
104
107
|
|
105
|
-
def test_process_promotion_dont_update_when_equal_config_hashes(self)
|
106
|
-
promotion =
|
107
|
-
saas_file_paths
|
108
|
-
auto
|
109
|
-
publish
|
110
|
-
commit_sha
|
111
|
-
|
112
|
-
|
113
|
-
|
108
|
+
def test_process_promotion_dont_update_when_equal_config_hashes(self):
|
109
|
+
promotion = {
|
110
|
+
"saas_file_paths": ["destination-saas-file"],
|
111
|
+
"auto": True,
|
112
|
+
"publish": ["test-channel"],
|
113
|
+
"commit_sha": "ahash",
|
114
|
+
"saas_file": "saas_file",
|
115
|
+
TARGET_CONFIG_HASH: "111111111",
|
116
|
+
}
|
114
117
|
|
115
118
|
target_promotion = {
|
116
119
|
"auto": True,
|
@@ -133,163 +136,3 @@ class TestPromotions(TestCase):
|
|
133
136
|
promotion, target_promotion, ["test-channel"]
|
134
137
|
)
|
135
138
|
self.assertFalse(modified)
|
136
|
-
|
137
|
-
def test_title_property(self) -> None:
|
138
|
-
promotion = Promotion(
|
139
|
-
saas_file_paths=["destination-saas-file"],
|
140
|
-
auto=True,
|
141
|
-
publish=["test-channel"],
|
142
|
-
commit_sha="ahash",
|
143
|
-
saas_file_name="saas_file",
|
144
|
-
target_config_hash="111111111",
|
145
|
-
)
|
146
|
-
|
147
|
-
ap = AutoPromoter([promotion])
|
148
|
-
self.assertEqual(
|
149
|
-
ap.title, "[auto_promoter] openshift-saas-deploy automated promotion 769891"
|
150
|
-
)
|
151
|
-
|
152
|
-
def test_description_property(self) -> None:
|
153
|
-
promotion = Promotion(
|
154
|
-
saas_file_paths=["destination-saas-file"],
|
155
|
-
auto=True,
|
156
|
-
publish=["test-channel"],
|
157
|
-
commit_sha="ahash",
|
158
|
-
saas_file_name="saas_file",
|
159
|
-
target_config_hash="111111111",
|
160
|
-
)
|
161
|
-
|
162
|
-
ap = AutoPromoter([promotion])
|
163
|
-
self.assertEqual(ap.description, "openshift-saas-deploy automated promotion")
|
164
|
-
|
165
|
-
def test_gitlab_data_property(self) -> None:
|
166
|
-
promotion = Promotion(
|
167
|
-
saas_file_paths=["destination-saas-file"],
|
168
|
-
auto=True,
|
169
|
-
publish=["test-channel"],
|
170
|
-
commit_sha="ahash",
|
171
|
-
saas_file_name="saas_file",
|
172
|
-
target_config_hash="111111111",
|
173
|
-
)
|
174
|
-
|
175
|
-
ap = AutoPromoter([promotion])
|
176
|
-
self.assertTrue(ap.gitlab_data["source_branch"].startswith("auto_promoter-"))
|
177
|
-
self.assertEqual(ap.gitlab_data["target_branch"], "master")
|
178
|
-
self.assertEqual(
|
179
|
-
ap.gitlab_data["title"],
|
180
|
-
"[auto_promoter] openshift-saas-deploy automated promotion 769891",
|
181
|
-
)
|
182
|
-
self.assertEqual(
|
183
|
-
ap.gitlab_data["description"], "openshift-saas-deploy automated promotion"
|
184
|
-
)
|
185
|
-
self.assertEqual(ap.gitlab_data["remove_source_branch"], True)
|
186
|
-
self.assertEqual(ap.gitlab_data["labels"], ["bot/automerge"])
|
187
|
-
|
188
|
-
def test_sqs_data_property(self) -> None:
|
189
|
-
promotion = Promotion(
|
190
|
-
saas_file_paths=["destination-saas-file"],
|
191
|
-
auto=True,
|
192
|
-
publish=["test-channel"],
|
193
|
-
commit_sha="ahash",
|
194
|
-
saas_file_name="saas_file",
|
195
|
-
target_config_hash="111111111",
|
196
|
-
)
|
197
|
-
|
198
|
-
ap = AutoPromoter([promotion])
|
199
|
-
self.assertEqual(
|
200
|
-
ap.sqs_data,
|
201
|
-
{
|
202
|
-
"pr_type": "auto_promoter",
|
203
|
-
"promotions": [
|
204
|
-
{
|
205
|
-
"commit_sha": "ahash",
|
206
|
-
"saas_file_name": "saas_file",
|
207
|
-
"target_config_hash": "111111111",
|
208
|
-
"auto": True,
|
209
|
-
"publish": ["test-channel"],
|
210
|
-
"subscribe": None,
|
211
|
-
"promotion_data": None,
|
212
|
-
"saas_file_paths": ["destination-saas-file"],
|
213
|
-
"target_paths": None,
|
214
|
-
}
|
215
|
-
],
|
216
|
-
},
|
217
|
-
)
|
218
|
-
|
219
|
-
def test_sqs_data_json_serializable(self) -> None:
|
220
|
-
promotion = Promotion(
|
221
|
-
saas_file_paths=["destination-saas-file"],
|
222
|
-
auto=True,
|
223
|
-
publish=["test-channel"],
|
224
|
-
commit_sha="ahash",
|
225
|
-
saas_file_name="saas_file",
|
226
|
-
target_config_hash="111111111",
|
227
|
-
promotion_data=[
|
228
|
-
{
|
229
|
-
"channel": "test-channel",
|
230
|
-
"data": [
|
231
|
-
{
|
232
|
-
"parent_saas": "saas_file",
|
233
|
-
"target_config_hash": "111111111",
|
234
|
-
"type": "parent_saas_config",
|
235
|
-
}
|
236
|
-
],
|
237
|
-
}
|
238
|
-
],
|
239
|
-
)
|
240
|
-
|
241
|
-
ap = AutoPromoter([promotion])
|
242
|
-
sqs_json = '{"pr_type": "auto_promoter", "promotions": [{"commit_sha": "ahash", "saas_file_name": "saas_file", "target_config_hash": "111111111", "auto": true, "publish": ["test-channel"], "subscribe": null, "promotion_data": [{"channel": "test-channel", "data": [{"type": "parent_saas_config", "parent_saas": "saas_file", "target_config_hash": "111111111"}]}], "saas_file_paths": ["destination-saas-file"], "target_paths": null}]}'
|
243
|
-
self.assertEqual(json.dumps(ap.sqs_data), sqs_json)
|
244
|
-
|
245
|
-
def test_init_with_promotion_object(self) -> None:
|
246
|
-
promotion = Promotion(
|
247
|
-
saas_file_paths=["destination-saas-file"],
|
248
|
-
auto=True,
|
249
|
-
publish=["test-channel"],
|
250
|
-
commit_sha="ahash",
|
251
|
-
saas_file_name="saas_file",
|
252
|
-
target_config_hash="111111111",
|
253
|
-
promotion_data=[
|
254
|
-
{
|
255
|
-
"channel": "test-channel",
|
256
|
-
"data": [
|
257
|
-
{
|
258
|
-
"parent_saas": "saas_file",
|
259
|
-
"target_config_hash": "111111111",
|
260
|
-
"type": "parent_saas_config",
|
261
|
-
}
|
262
|
-
],
|
263
|
-
}
|
264
|
-
],
|
265
|
-
)
|
266
|
-
|
267
|
-
ap = AutoPromoter([promotion])
|
268
|
-
self.assertEqual(ap.promotions, [promotion.dict(by_alias=True)])
|
269
|
-
self.assertEqual(ap._promotions, [promotion])
|
270
|
-
|
271
|
-
def test_init_with_dict_object(self) -> None:
|
272
|
-
promotion = Promotion(
|
273
|
-
saas_file_paths=["destination-saas-file"],
|
274
|
-
auto=True,
|
275
|
-
publish=["test-channel"],
|
276
|
-
commit_sha="ahash",
|
277
|
-
saas_file_name="saas_file",
|
278
|
-
target_config_hash="111111111",
|
279
|
-
promotion_data=[
|
280
|
-
{
|
281
|
-
"channel": "test-channel",
|
282
|
-
"data": [
|
283
|
-
{
|
284
|
-
"parent_saas": "saas_file",
|
285
|
-
"target_config_hash": "111111111",
|
286
|
-
"type": "parent_saas_config",
|
287
|
-
}
|
288
|
-
],
|
289
|
-
}
|
290
|
-
],
|
291
|
-
)
|
292
|
-
|
293
|
-
ap = AutoPromoter([promotion.dict(by_alias=True)])
|
294
|
-
self.assertEqual(ap.promotions, [promotion.dict(by_alias=True)])
|
295
|
-
self.assertEqual(ap._promotions, [promotion])
|