qontract-reconcile 0.9.1rc162__py3-none-any.whl → 0.9.1rc164__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/METADATA +2 -2
  2. {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/RECORD +21 -30
  3. reconcile/glitchtip_project_dsn/integration.py +3 -0
  4. reconcile/jenkins_job_builder.py +2 -5
  5. reconcile/openshift_base.py +11 -11
  6. reconcile/openshift_saas_deploy.py +52 -57
  7. reconcile/openshift_saas_deploy_trigger_base.py +48 -55
  8. reconcile/openshift_saas_deploy_trigger_cleaner.py +2 -2
  9. reconcile/openshift_tekton_resources.py +1 -1
  10. reconcile/saas_file_validator.py +10 -23
  11. reconcile/slack_base.py +2 -5
  12. reconcile/test/conftest.py +0 -11
  13. reconcile/test/test_auto_promoter.py +42 -199
  14. reconcile/test/test_saasherder.py +463 -398
  15. reconcile/test/test_saasherder_allowed_secret_paths.py +36 -87
  16. reconcile/utils/mr/auto_promoter.py +50 -58
  17. reconcile/utils/mr/base.py +2 -6
  18. reconcile/utils/{saasherder/saasherder.py → saasherder.py} +736 -656
  19. reconcile/gql_definitions/common/app_code_component_repos.py +0 -68
  20. reconcile/gql_definitions/common/saas_files.py +0 -542
  21. reconcile/gql_definitions/common/saasherder_settings.py +0 -62
  22. reconcile/gql_definitions/fragments/oc_connection_cluster.py +0 -47
  23. reconcile/typed_queries/repos.py +0 -17
  24. reconcile/typed_queries/saas_files.py +0 -61
  25. reconcile/utils/saasherder/__init__.py +0 -17
  26. reconcile/utils/saasherder/interfaces.py +0 -404
  27. reconcile/utils/saasherder/models.py +0 -203
  28. {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/WHEEL +0 -0
  29. {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/entry_points.txt +0 -0
  30. {qontract_reconcile-0.9.1rc162.dist-info → qontract_reconcile-0.9.1rc164.dist-info}/top_level.txt +0 -0
@@ -13,11 +13,12 @@ from collections.abc import (
13
13
  Sequence,
14
14
  )
15
15
  from contextlib import suppress
16
+ from dataclasses import dataclass
16
17
  from typing import (
17
18
  Any,
18
- Generator,
19
19
  Optional,
20
20
  Union,
21
+ cast,
21
22
  )
22
23
 
23
24
  import yaml
@@ -25,8 +26,6 @@ from github import (
25
26
  Github,
26
27
  GithubException,
27
28
  )
28
- from github.ContentFile import ContentFile
29
- from github.Repository import Repository
30
29
  from gitlab.exceptions import GitlabError
31
30
  from requests import exceptions as rqexc
32
31
  from sretoolbox.container import Image
@@ -37,10 +36,8 @@ from sretoolbox.utils import (
37
36
 
38
37
  from reconcile.github_org import get_default_config
39
38
  from reconcile.status import RunningState
40
- from reconcile.utils.gitlab_api import GitLabApi
41
- from reconcile.utils.jenkins_api import JenkinsApi
42
39
  from reconcile.utils.jjb_client import JJB
43
- from reconcile.utils.mr.base import MRClient
40
+ from reconcile.utils.mr.auto_promoter import AutoPromoter
44
41
  from reconcile.utils.oc import (
45
42
  OCLocal,
46
43
  StatusCodeError,
@@ -51,47 +48,118 @@ from reconcile.utils.openshift_resource import (
51
48
  ResourceKeyExistsError,
52
49
  fully_qualified_kind,
53
50
  )
54
- from reconcile.utils.saasherder.interfaces import (
55
- HasParameters,
56
- HasSecretParameters,
57
- SaasFile,
58
- SaasParentSaasPromotion,
59
- SaasResourceTemplate,
60
- SaasResourceTemplateTarget,
61
- SaasResourceTemplateTargetNamespace,
62
- SaasResourceTemplateTargetPromotion,
63
- SaasSecretParameters,
64
- )
65
- from reconcile.utils.saasherder.models import (
66
- ImageAuth,
67
- Namespace,
68
- Promotion,
69
- TargetSpec,
70
- TriggerSpecConfig,
71
- TriggerSpecContainerImage,
72
- TriggerSpecMovingCommit,
73
- TriggerSpecUnion,
74
- TriggerSpecUpstreamJob,
75
- TriggerTypes,
76
- UpstreamJob,
77
- )
78
- from reconcile.utils.secret_reader import SecretReaderBase
79
- from reconcile.utils.state import State
51
+ from reconcile.utils.secret_reader import SecretReader
52
+ from reconcile.utils.state import init_state
80
53
 
81
54
  TARGET_CONFIG_HASH = "target_config_hash"
82
55
 
83
56
 
84
- UNIQUE_SAAS_FILE_ENV_COMBO_LEN = 50
57
+ class Providers:
58
+ TEKTON = "tekton"
59
+
60
+
61
+ class TriggerTypes:
62
+ CONFIGS = 0
63
+ MOVING_COMMITS = 1
64
+ UPSTREAM_JOBS = 2
65
+ CONTAINER_IMAGES = 3
66
+
67
+
68
+ @dataclass
69
+ class UpstreamJob:
70
+ instance: str
71
+ job: str
72
+
73
+ def __str__(self):
74
+ return f"{self.instance}/{self.job}"
85
75
 
76
+ def __repr__(self):
77
+ return self.__str__()
86
78
 
87
- def is_commit_sha(ref: str) -> bool:
88
- """Check if the given ref is a commit sha."""
89
- return bool(re.search(r"^[0-9a-f]{40}$", ref))
90
79
 
80
+ @dataclass
81
+ class TriggerSpecBase:
82
+ saas_file_name: str
83
+ env_name: str
84
+ timeout: Optional[str]
85
+ pipelines_provider: Optional[dict[str, Any]]
86
+ resource_template_name: str
87
+ cluster_name: str
88
+ namespace_name: str
89
+ state_content: Any
90
+
91
+ @property
92
+ def state_key(self):
93
+ raise NotImplementedError("implement this function in inheriting classes")
94
+
95
+
96
+ @dataclass
97
+ class TriggerSpecConfig(TriggerSpecBase):
98
+ target_name: Optional[str] = None
99
+ reason: Optional[str] = None
100
+
101
+ @property
102
+ def state_key(self):
103
+ key = (
104
+ f"{self.saas_file_name}/{self.resource_template_name}/{self.cluster_name}/"
105
+ f"{self.namespace_name}/{self.env_name}"
106
+ )
107
+ if self.target_name:
108
+ key += f"/{self.target_name}"
109
+ return key
110
+
111
+
112
+ @dataclass
113
+ class TriggerSpecMovingCommit(TriggerSpecBase):
114
+ ref: str
115
+ reason: Optional[str] = None
116
+
117
+ @property
118
+ def state_key(self):
119
+ key = (
120
+ f"{self.saas_file_name}/{self.resource_template_name}/{self.cluster_name}/"
121
+ f"{self.namespace_name}/{self.env_name}/{self.ref}"
122
+ )
123
+ return key
91
124
 
92
- RtRef = tuple[str, str, str]
93
- Resource = dict[str, Any]
94
- Resources = list[Resource]
125
+
126
+ @dataclass
127
+ class TriggerSpecUpstreamJob(TriggerSpecBase):
128
+ instance_name: str
129
+ job_name: str
130
+ reason: Optional[str] = None
131
+
132
+ @property
133
+ def state_key(self):
134
+ key = (
135
+ f"{self.saas_file_name}/{self.resource_template_name}/{self.cluster_name}/"
136
+ f"{self.namespace_name}/{self.env_name}/{self.instance_name}/{self.job_name}"
137
+ )
138
+ return key
139
+
140
+
141
+ @dataclass
142
+ class TriggerSpecContainerImage(TriggerSpecBase):
143
+ image: str
144
+ reason: Optional[str] = None
145
+
146
+ @property
147
+ def state_key(self):
148
+ key = (
149
+ f"{self.saas_file_name}/{self.resource_template_name}/{self.cluster_name}/"
150
+ f"{self.namespace_name}/{self.env_name}/{self.image}"
151
+ )
152
+ return key
153
+
154
+
155
+ TriggerSpecUnion = Union[
156
+ TriggerSpecConfig,
157
+ TriggerSpecMovingCommit,
158
+ TriggerSpecUpstreamJob,
159
+ TriggerSpecContainerImage,
160
+ ]
161
+
162
+ UNIQUE_SAAS_FILE_ENV_COMBO_LEN = 50
95
163
 
96
164
 
97
165
  class SaasHerder:
@@ -99,18 +167,16 @@ class SaasHerder:
99
167
 
100
168
  def __init__(
101
169
  self,
102
- saas_files: Sequence[SaasFile],
103
- thread_pool_size: int,
104
- integration: str,
105
- integration_version: str,
106
- secret_reader: SecretReaderBase,
107
- hash_length: int,
108
- repo_url: str,
109
- gitlab: Optional[GitLabApi] = None,
110
- jenkins_map: Optional[dict[str, JenkinsApi]] = None,
111
- state: Optional[State] = None,
112
- validate: bool = False,
113
- include_trigger_trace: bool = False,
170
+ saas_files,
171
+ thread_pool_size,
172
+ gitlab,
173
+ integration,
174
+ integration_version,
175
+ settings,
176
+ jenkins_map=None,
177
+ initialise_state=False,
178
+ validate=False,
179
+ include_trigger_trace=False,
114
180
  ):
115
181
  self.error_registered = False
116
182
  self.saas_files = saas_files
@@ -123,14 +189,11 @@ class SaasHerder:
123
189
  self.gitlab = gitlab
124
190
  self.integration = integration
125
191
  self.integration_version = integration_version
126
- self.hash_length = hash_length
127
- self.repo_url = repo_url
128
- self.secret_reader = secret_reader
192
+ self.settings = settings
193
+ self.secret_reader = SecretReader(settings=settings)
129
194
  self.namespaces = self._collect_namespaces()
130
195
  self.jenkins_map = jenkins_map
131
196
  self.include_trigger_trace = include_trigger_trace
132
- self.state = state
133
-
134
197
  # each namespace is in fact a target,
135
198
  # so we can use it to calculate.
136
199
  divisor = len(self.namespaces) or 1
@@ -141,40 +204,31 @@ class SaasHerder:
141
204
  # specify that it manages resources exclusively.
142
205
  self.take_over = self._get_saas_file_feature_enabled("takeover")
143
206
  self.compare = self._get_saas_file_feature_enabled("compare", default=True)
144
- self.publish_job_logs = self._get_saas_file_feature_enabled("publish_job_logs")
145
- self.cluster_admin = self._get_saas_file_feature_enabled("cluster_admin")
207
+ self.publish_job_logs = self._get_saas_file_feature_enabled("publishJobLogs")
208
+ self.cluster_admin = self._get_saas_file_feature_enabled("clusterAdmin")
209
+ if initialise_state:
210
+ self.state = init_state(integration=self.integration)
146
211
 
147
- def _register_error(self) -> None:
212
+ def _register_error(self):
148
213
  self.error_registered = True
149
214
 
150
215
  @property
151
- def has_error_registered(self) -> bool:
216
+ def has_error_registered(self):
152
217
  return self.error_registered
153
218
 
154
- def __iter__(
155
- self,
156
- ) -> Generator[
157
- tuple[SaasFile, SaasResourceTemplate, SaasResourceTemplateTarget],
158
- None,
159
- None,
160
- ]:
219
+ def __iter__(self):
161
220
  for saas_file in self.saas_files:
162
- for resource_template in saas_file.resource_templates:
163
- for target in resource_template.targets:
221
+ for resource_template in saas_file["resourceTemplates"]:
222
+ for target in resource_template["targets"]:
164
223
  yield (saas_file, resource_template, target)
165
224
 
166
- def _get_saas_file_feature_enabled(
167
- self, name: str, default: Optional[bool] = None
168
- ) -> Optional[bool]:
225
+ def _get_saas_file_feature_enabled(self, name, default=None):
169
226
  """Returns a bool indicating if a feature is enabled in a saas file,
170
227
  or a supplied default. Returns False if there are multiple
171
228
  saas files in the process.
172
229
  All features using this method should assume a single saas file.
173
230
  """
174
- if len(self.saas_files) > 1:
175
- return False
176
-
177
- sf_attribute = getattr(self.saas_files[0], name, None)
231
+ sf_attribute = len(self.saas_files) == 1 and self.saas_files[0].get(name)
178
232
  if sf_attribute is None and default is not None:
179
233
  return default
180
234
  return sf_attribute
@@ -182,7 +236,7 @@ class SaasHerder:
182
236
  def _validate_allowed_secret_parameter_paths(
183
237
  self,
184
238
  saas_file_name: str,
185
- secret_parameters: SaasSecretParameters,
239
+ secret_parameters: Sequence[Mapping[str, Any]],
186
240
  allowed_secret_parameter_paths: Sequence[str],
187
241
  ) -> None:
188
242
  if not secret_parameters:
@@ -194,139 +248,149 @@ class SaasHerder:
194
248
  )
195
249
  return
196
250
  for sp in secret_parameters:
251
+ path = sp["secret"]["path"]
197
252
  match = [
198
253
  a
199
254
  for a in allowed_secret_parameter_paths
200
- if (os.path.commonpath([sp.secret.path, a]) == a)
255
+ if (os.path.commonpath([path, a]) == a)
201
256
  ]
202
257
  if not match:
203
258
  self.valid = False
204
259
  logging.error(
205
260
  f"[{saas_file_name}] "
206
- f"secret parameter path '{sp.secret.path}' does not match any of allowedSecretParameterPaths"
261
+ f"secret parameter path '{path}' does not match any of allowedSecretParameterPaths"
207
262
  )
208
263
 
209
- def _validate_saas_files(self) -> None:
264
+ def _validate_saas_files(self):
210
265
  self.valid = True
211
- saas_file_name_path_map: dict[str, list[str]] = {}
212
- tkn_unique_pipelineruns: dict[str, str] = {}
266
+ saas_file_name_path_map = {}
267
+ self.tkn_unique_pipelineruns = {}
213
268
 
214
- publications: dict[str, RtRef] = {}
215
- subscriptions: dict[str, list[RtRef]] = {}
269
+ publications = {}
270
+ subscriptions = {}
216
271
 
217
272
  for saas_file in self.saas_files:
218
- saas_file_name_path_map.setdefault(saas_file.name, [])
219
- saas_file_name_path_map[saas_file.name].append(saas_file.path)
273
+ saas_file_name = saas_file["name"]
274
+ saas_file_path = saas_file["path"]
275
+ saas_file_name_path_map.setdefault(saas_file_name, [])
276
+ saas_file_name_path_map[saas_file_name].append(saas_file_path)
220
277
 
221
278
  saas_file_owners = [
222
- u.org_username
223
- for r in saas_file.self_service_roles or []
224
- for u in list(r.users) + list(r.bots)
279
+ u["org_username"]
280
+ for r in saas_file["selfServiceRoles"]
281
+ for u in r["users"] + r["bots"]
225
282
  ]
226
283
  if not saas_file_owners:
227
- logging.error(
228
- f"saas file {saas_file.name} has no owners: {saas_file.path}"
229
- )
284
+ msg = "saas file {} has no owners: {}"
285
+ logging.error(msg.format(saas_file_name, saas_file_path))
230
286
  self.valid = False
231
287
 
288
+ allowed_secret_parameter_paths = (
289
+ saas_file.get("allowedSecretParameterPaths") or []
290
+ )
232
291
  self._validate_allowed_secret_parameter_paths(
233
- saas_file.name,
234
- saas_file.secret_parameters or [],
235
- saas_file.allowed_secret_parameter_paths or [],
292
+ saas_file_name,
293
+ saas_file.get("secretParameters"),
294
+ allowed_secret_parameter_paths,
236
295
  )
237
296
 
238
- for resource_template in saas_file.resource_templates:
297
+ for resource_template in saas_file["resourceTemplates"]:
298
+ resource_template_name = resource_template["name"]
299
+ resource_template_url = resource_template["url"]
239
300
  self._validate_allowed_secret_parameter_paths(
240
- saas_file.name,
241
- resource_template.secret_parameters or [],
242
- saas_file.allowed_secret_parameter_paths or [],
301
+ saas_file_name,
302
+ resource_template.get("secretParameters"),
303
+ allowed_secret_parameter_paths,
243
304
  )
244
- for target in resource_template.targets:
305
+ for target in resource_template["targets"]:
306
+ target_namespace = target["namespace"]
307
+ namespace_name = target_namespace["name"]
308
+ cluster_name = target_namespace["cluster"]["name"]
309
+ environment = target_namespace["environment"]
310
+ environment_name = environment["name"]
245
311
  # unique saas file and env name combination
246
- tkn_name, tkn_long_name = self._check_saas_file_env_combo_unique(
247
- saas_file.name,
248
- target.namespace.environment.name,
249
- tkn_unique_pipelineruns,
312
+ self._check_saas_file_env_combo_unique(
313
+ saas_file_name, environment_name
250
314
  )
251
- tkn_unique_pipelineruns[tkn_name] = tkn_long_name
252
315
  self._validate_auto_promotion_used_with_commit_sha(
253
- saas_file.name,
254
- resource_template.name,
316
+ saas_file_name,
317
+ resource_template_name,
255
318
  target,
256
319
  )
257
320
  self._validate_upstream_not_used_with_commit_sha(
258
- saas_file.name,
259
- resource_template.name,
321
+ saas_file_name,
322
+ resource_template_name,
260
323
  target,
261
324
  )
262
325
  self._validate_upstream_not_used_with_image(
263
- saas_file.name,
264
- resource_template.name,
326
+ saas_file_name,
327
+ resource_template_name,
265
328
  target,
266
329
  )
267
330
  self._validate_image_not_used_with_commit_sha(
268
- saas_file.name,
269
- resource_template.name,
331
+ saas_file_name,
332
+ resource_template_name,
270
333
  target,
271
334
  )
272
335
  self._validate_allowed_secret_parameter_paths(
273
- saas_file.name,
274
- target.secret_parameters or [],
275
- saas_file.allowed_secret_parameter_paths or [],
336
+ saas_file_name,
337
+ target.get("secretParameters"),
338
+ allowed_secret_parameter_paths,
276
339
  )
277
340
  self._validate_allowed_secret_parameter_paths(
278
- saas_file.name,
279
- target.namespace.environment.secret_parameters or [],
280
- saas_file.allowed_secret_parameter_paths or [],
341
+ saas_file_name,
342
+ environment.get("secretParameters"),
343
+ allowed_secret_parameter_paths,
281
344
  )
282
345
 
283
- if target.promotion:
346
+ promotion = target.get("promotion")
347
+ if promotion:
284
348
  rt_ref = (
285
- saas_file.path,
286
- resource_template.name,
287
- resource_template.url,
349
+ saas_file_path,
350
+ resource_template_name,
351
+ resource_template_url,
288
352
  )
289
353
 
290
354
  # Get publications and subscriptions for the target
291
355
  self._get_promotion_pubs_and_subs(
292
- rt_ref, target.promotion, publications, subscriptions
356
+ rt_ref, promotion, publications, subscriptions
293
357
  )
294
358
  # validate target parameters
295
- if not target.parameters:
359
+ target_parameters = target["parameters"]
360
+ if not target_parameters:
296
361
  continue
362
+ target_parameters = json.loads(target_parameters)
297
363
  self._validate_image_tag_not_equals_ref(
298
- saas_file.name,
299
- resource_template.name,
300
- target.ref,
301
- target.parameters,
364
+ saas_file_name,
365
+ resource_template_name,
366
+ target["ref"],
367
+ target_parameters,
302
368
  )
303
-
304
- if not target.namespace.environment.parameters:
369
+ environment_parameters = environment["parameters"]
370
+ if not environment_parameters:
305
371
  continue
372
+ environment_parameters = json.loads(environment_parameters)
306
373
  msg = (
307
- f"[{saas_file.name}/{resource_template.name}] "
374
+ f"[{saas_file_name}/{resource_template_name}] "
308
375
  + "parameter found in target "
309
- + f"{target.namespace.cluster.name}/{target.namespace.name} "
310
- + f"should be reused from env {target.namespace.environment.name}"
376
+ + f"{cluster_name}/{namespace_name} "
377
+ + f"should be reused from env {environment_name}"
311
378
  )
312
- for t_key, t_value in target.parameters.items():
379
+ for t_key, t_value in target_parameters.items():
313
380
  if not isinstance(t_value, str):
314
381
  continue
315
382
  # Check for recursivity. Ex: PARAM: "foo.${PARAM}"
316
383
  replace_pattern = "${" + t_key + "}"
317
384
  if replace_pattern in t_value:
318
385
  logging.error(
319
- f"[{saas_file.name}/{resource_template.name}] "
386
+ f"[{saas_file_name}/{resource_template_name}] "
320
387
  f"recursivity in parameter name and value "
321
388
  f'found: {t_key}: "{t_value}" - this will '
322
389
  f"likely not work as expected. Please consider"
323
390
  f" changing the parameter name"
324
391
  )
325
392
  self.valid = False
326
- for (
327
- e_key,
328
- e_value,
329
- ) in target.namespace.environment.parameters.items():
393
+ for e_key, e_value in environment_parameters.items():
330
394
  if not isinstance(e_value, str):
331
395
  continue
332
396
  if "." not in e_value:
@@ -363,16 +427,17 @@ class SaasHerder:
363
427
 
364
428
  def _get_promotion_pubs_and_subs(
365
429
  self,
366
- rt_ref: RtRef,
367
- promotion: SaasResourceTemplateTargetPromotion,
368
- publications: MutableMapping[str, RtRef],
369
- subscriptions: MutableMapping[str, list[RtRef]],
370
- ) -> None:
430
+ rt_ref: tuple,
431
+ promotion: dict[str, Any],
432
+ publications: MutableMapping[str, tuple],
433
+ subscriptions: MutableMapping[str, list[tuple]],
434
+ ):
371
435
  """
372
436
  Function to gather promotion publish and subscribe configurations
373
437
  It validates a publish channel is unique across all publish targets.
374
438
  """
375
- for channel in promotion.publish or []:
439
+ publish = promotion.get("publish") or []
440
+ for channel in publish:
376
441
  if channel in publications:
377
442
  self.valid = False
378
443
  logging.error(
@@ -382,14 +447,15 @@ class SaasHerder:
382
447
  continue
383
448
  publications[channel] = rt_ref
384
449
 
385
- for channel in promotion.subscribe or []:
450
+ subscribe = promotion.get("subscribe") or []
451
+ for channel in subscribe:
386
452
  subscriptions.setdefault(channel, [])
387
453
  subscriptions[channel].append(rt_ref)
388
454
 
389
455
  def _check_promotions_have_same_source(
390
456
  self,
391
- subscriptions: Mapping[str, list[RtRef]],
392
- publications: Mapping[str, RtRef],
457
+ subscriptions: Mapping[str, list[tuple]],
458
+ publications: Mapping[str, tuple],
393
459
  ) -> None:
394
460
  """
395
461
  Function to check that a promotion has the same repository
@@ -432,20 +498,15 @@ class SaasHerder:
432
498
  )
433
499
  )
434
500
 
435
- def _check_saas_file_env_combo_unique(
436
- self,
437
- saas_file_name: str,
438
- env_name: str,
439
- tkn_unique_pipelineruns: Mapping[str, str],
440
- ) -> tuple[str, str]:
501
+ def _check_saas_file_env_combo_unique(self, saas_file_name, env_name):
441
502
  # max tekton pipelinerun name length can be 63.
442
503
  # leaving 12 for the timestamp leaves us with 51
443
504
  # to create a unique pipelinerun name
444
505
  tkn_long_name = f"{saas_file_name}-{env_name}"
445
506
  tkn_name = tkn_long_name[:UNIQUE_SAAS_FILE_ENV_COMBO_LEN]
446
507
  if (
447
- tkn_name in tkn_unique_pipelineruns
448
- and tkn_unique_pipelineruns[tkn_name] != tkn_long_name
508
+ tkn_name in self.tkn_unique_pipelineruns
509
+ and self.tkn_unique_pipelineruns[tkn_name] != tkn_long_name
449
510
  ):
450
511
  logging.error(
451
512
  f"[{saas_file_name}/{env_name}] "
@@ -455,48 +516,60 @@ class SaasHerder:
455
516
  f"from this long name: {tkn_long_name}"
456
517
  )
457
518
  self.valid = False
458
-
459
- return tkn_name, tkn_long_name
519
+ else:
520
+ self.tkn_unique_pipelineruns[tkn_name] = tkn_long_name
460
521
 
461
522
  def _validate_auto_promotion_used_with_commit_sha(
462
523
  self,
463
524
  saas_file_name: str,
464
525
  resource_template_name: str,
465
- target: SaasResourceTemplateTarget,
466
- ) -> None:
467
- if not target.promotion:
526
+ target: dict,
527
+ ):
528
+ target_promotion = target.get("promotion") or {}
529
+ if not target_promotion:
468
530
  return
469
531
 
470
- if not target.promotion.auto:
532
+ target_auto = target_promotion.get("auto")
533
+ if not target_auto:
471
534
  return
472
535
 
473
- if not is_commit_sha(target.ref):
474
- self.valid = False
475
- logging.error(
476
- f"[{saas_file_name}/{resource_template_name}] "
477
- f"auto promotion should be used with commit sha instead of: {target.ref}"
478
- )
536
+ pattern = r"^[0-9a-f]{40}$"
537
+ ref = target["ref"]
538
+ if re.search(pattern, ref):
539
+ return
540
+
541
+ self.valid = False
542
+ logging.error(
543
+ f"[{saas_file_name}/{resource_template_name}] "
544
+ f"auto promotion should be used with commit sha instead of: {ref}"
545
+ )
479
546
 
480
547
  def _validate_upstream_not_used_with_commit_sha(
481
548
  self,
482
549
  saas_file_name: str,
483
550
  resource_template_name: str,
484
- target: SaasResourceTemplateTarget,
485
- ) -> None:
486
- if target.upstream and is_commit_sha(target.ref):
487
- logging.error(
488
- f"[{saas_file_name}/{resource_template_name}] "
489
- f"upstream used with commit sha: {target.ref}"
490
- )
491
- self.valid = False
551
+ target: dict,
552
+ ):
553
+ upstream = target.get("upstream")
554
+ if upstream:
555
+ pattern = r"^[0-9a-f]{40}$"
556
+ ref = target["ref"]
557
+ if re.search(pattern, ref):
558
+ logging.error(
559
+ f"[{saas_file_name}/{resource_template_name}] "
560
+ f"upstream used with commit sha: {ref}"
561
+ )
562
+ self.valid = False
492
563
 
493
564
  def _validate_upstream_not_used_with_image(
494
565
  self,
495
566
  saas_file_name: str,
496
567
  resource_template_name: str,
497
- target: SaasResourceTemplateTarget,
498
- ) -> None:
499
- if target.image and target.upstream:
568
+ target: dict,
569
+ ):
570
+ upstream = target.get("upstream")
571
+ image = target.get("image")
572
+ if image and upstream:
500
573
  logging.error(
501
574
  f"[{saas_file_name}/{resource_template_name}] "
502
575
  f"image used with upstream"
@@ -507,14 +580,18 @@ class SaasHerder:
507
580
  self,
508
581
  saas_file_name: str,
509
582
  resource_template_name: str,
510
- target: SaasResourceTemplateTarget,
511
- ) -> None:
512
- if target.image and is_commit_sha(target.ref):
513
- logging.error(
514
- f"[{saas_file_name}/{resource_template_name}] "
515
- f"image used with commit sha: {target.ref}"
516
- )
517
- self.valid = False
583
+ target: dict,
584
+ ):
585
+ image = target.get("image")
586
+ if image:
587
+ pattern = r"^[0-9a-f]{40}$"
588
+ ref = target["ref"]
589
+ if re.search(pattern, ref):
590
+ logging.error(
591
+ f"[{saas_file_name}/{resource_template_name}] "
592
+ f"image used with commit sha: {ref}"
593
+ )
594
+ self.valid = False
518
595
 
519
596
  def _validate_image_tag_not_equals_ref(
520
597
  self,
@@ -522,7 +599,7 @@ class SaasHerder:
522
599
  resource_template_name: str,
523
600
  ref: str,
524
601
  parameters: dict,
525
- ) -> None:
602
+ ):
526
603
  image_tag = parameters.get("IMAGE_TAG")
527
604
  if image_tag and str(ref).startswith(str(image_tag)):
528
605
  logging.error(
@@ -554,16 +631,26 @@ class SaasHerder:
554
631
  def validate_upstream_jobs(
555
632
  self,
556
633
  jjb: JJB,
557
- ) -> None:
634
+ ):
558
635
  all_jobs = jjb.get_all_jobs(job_types=["build"])
636
+ pattern = r"^[0-9a-f]{40}$"
559
637
  for sf, rt, t in self:
560
- if is_commit_sha(t.ref):
638
+ sf_name = sf["name"]
639
+ rt_name = rt["name"]
640
+ url = rt["url"]
641
+ ref = t["ref"]
642
+ if re.search(pattern, ref):
561
643
  continue
562
-
563
- if t.upstream:
564
- upstream_job = UpstreamJob(t.upstream.instance.name, t.upstream.name)
644
+ upstream = t.get("upstream")
645
+ if upstream:
646
+ if isinstance(upstream, str):
647
+ # skip v1 saas files
648
+ continue
649
+ upstream_job = UpstreamJob(
650
+ upstream["instance"]["name"], upstream["name"]
651
+ )
565
652
  possible_upstream_jobs = self._get_upstream_jobs(
566
- jjb, all_jobs, rt.url, t.ref
653
+ jjb, all_jobs, url, ref
567
654
  )
568
655
  found_jobs = [
569
656
  j
@@ -573,56 +660,53 @@ class SaasHerder:
573
660
  if found_jobs:
574
661
  if upstream_job not in possible_upstream_jobs:
575
662
  logging.error(
576
- f"[{sf.name}/{rt.name}] upstream job "
663
+ f"[{sf_name}/{rt_name}] upstream job "
577
664
  f"incorrect: {upstream_job}. "
578
665
  f"should be one of: {possible_upstream_jobs}"
579
666
  )
580
667
  self.valid = False
581
668
  else:
582
669
  logging.error(
583
- f"[{sf.name}/{rt.name}] upstream job "
670
+ f"[{sf_name}/{rt_name}] upstream job "
584
671
  f"not found: {upstream_job}. "
585
672
  f"should be one of: {possible_upstream_jobs}"
586
673
  )
587
674
  self.valid = False
588
675
 
589
- def _collect_namespaces(self) -> list[Namespace]:
676
+ def _collect_namespaces(self):
590
677
  # namespaces may appear more then once in the result
591
678
  namespaces = []
592
679
  for saas_file in self.saas_files:
593
- for rt in saas_file.resource_templates:
594
- for target in rt.targets:
595
- if target.disable:
680
+ managed_resource_types = saas_file["managedResourceTypes"]
681
+ resource_templates = saas_file["resourceTemplates"]
682
+ for rt in resource_templates:
683
+ targets = rt["targets"]
684
+ for target in targets:
685
+ namespace = target["namespace"]
686
+ if target.get("disable"):
596
687
  logging.debug(
597
- f"[{saas_file.name}/{rt.name}] target "
598
- + f"{target.namespace.cluster.name} /"
599
- + f"{target.namespace.name} is disabled."
688
+ f"[{saas_file['name']}/{rt['name']}] target "
689
+ + f"{namespace['cluster']['name']}/"
690
+ + f"{namespace['name']} is disabled."
600
691
  )
601
692
  continue
602
-
603
- namespaces.append(
604
- Namespace(
605
- name=target.namespace.name,
606
- environment=target.namespace.environment,
607
- app=target.namespace.app,
608
- cluster=target.namespace.cluster,
609
- # managedResourceTypes is defined per saas_file
610
- # add it to each namespace in the current saas_file
611
- managed_resource_types=saas_file.managed_resource_types,
612
- )
613
- )
693
+ # managedResourceTypes is defined per saas_file
694
+ # add it to each namespace in the current saas_file
695
+ namespace["managedResourceTypes"] = managed_resource_types
696
+ namespaces.append(namespace)
614
697
  return namespaces
615
698
 
616
- def _collect_repo_urls(self) -> set[str]:
617
- return set(
618
- rt.url
619
- for saas_file in self.saas_files
620
- for rt in saas_file.resource_templates
621
- )
699
+ def _collect_repo_urls(self):
700
+ repo_urls = set()
701
+ for saas_file in self.saas_files:
702
+ resource_templates = saas_file["resourceTemplates"]
703
+ for rt in resource_templates:
704
+ repo_urls.add(rt["url"])
705
+ return repo_urls
622
706
 
623
707
  @staticmethod
624
- def _collect_parameters(container: HasParameters) -> dict[str, str]:
625
- parameters = container.parameters or {}
708
+ def _collect_parameters(container):
709
+ parameters = container.get("parameters") or {}
626
710
  if isinstance(parameters, str):
627
711
  parameters = json.loads(parameters)
628
712
  # adjust Python's True/False
@@ -635,22 +719,22 @@ class SaasHerder:
635
719
  parameters[k] = json.dumps(v)
636
720
  return parameters
637
721
 
638
- def _collect_secret_parameters(
639
- self, container: HasSecretParameters
640
- ) -> dict[str, str]:
641
- return {
642
- sp.name: self.secret_reader.read_secret(sp.secret)
643
- for sp in container.secret_parameters or []
644
- }
722
+ def _collect_secret_parameters(self, container):
723
+ parameters = {}
724
+ secret_parameters = container.get("secretParameters") or []
725
+ for sp in secret_parameters:
726
+ name = sp["name"]
727
+ secret = sp["secret"]
728
+ value = self.secret_reader.read(secret)
729
+ parameters[name] = value
730
+
731
+ return parameters
645
732
 
646
733
  @staticmethod
647
- def _get_file_contents_github(repo: Repository, path: str, commit_sha: str) -> str:
734
+ def _get_file_contents_github(repo, path, commit_sha):
648
735
  f = repo.get_contents(path, commit_sha)
649
- if isinstance(f, list):
650
- raise Exception(f"Path {path} and sha {commit_sha} is a directory!")
651
-
652
736
  if f.size < 1024**2: # 1 MB
653
- return f.decoded_content.decode("utf8")
737
+ return f.decoded_content
654
738
 
655
739
  tree = repo.get_git_tree(commit_sha, recursive="/" in path).tree
656
740
  for x in tree:
@@ -659,15 +743,15 @@ class SaasHerder:
659
743
  blob = repo.get_git_blob(x.sha)
660
744
  return base64.b64decode(blob.content).decode("utf8")
661
745
 
662
- return ""
663
-
664
746
  @retry(max_attempts=20)
665
- def _get_file_contents(
666
- self, url: str, path: str, ref: str, github: Github, hash_length: int
667
- ) -> tuple[Any, str, str]:
747
+ def _get_file_contents(self, options):
748
+ url = options["url"]
749
+ path = options["path"]
750
+ ref = options["ref"]
751
+ github = options["github"]
668
752
  html_url = f"{url}/blob/{ref}{path}"
669
- commit_sha = self._get_commit_sha(url, ref, github, hash_length)
670
-
753
+ commit_sha = self._get_commit_sha(options)
754
+ content = None
671
755
  if "github" in url:
672
756
  repo_name = url.rstrip("/").replace("https://github.com/", "")
673
757
  repo = github.get_repo(repo_name)
@@ -678,25 +762,22 @@ class SaasHerder:
678
762
  project = self.gitlab.get_project(url)
679
763
  f = project.files.get(file_path=path.lstrip("/"), ref=commit_sha)
680
764
  content = f.decode()
681
- else:
682
- raise Exception(f"Only GitHub and GitLab are supported: {url}")
683
765
 
684
766
  return yaml.safe_load(content), html_url, commit_sha
685
767
 
686
768
  @retry()
687
- def _get_directory_contents(
688
- self, url: str, path: str, ref: str, github: Github, hash_length: int
689
- ) -> tuple[list[Any], str, str]:
769
+ def _get_directory_contents(self, options):
770
+ url = options["url"]
771
+ path = options["path"]
772
+ ref = options["ref"]
773
+ github = options["github"]
690
774
  html_url = f"{url}/tree/{ref}{path}"
691
- commit_sha = self._get_commit_sha(url, ref, github, hash_length)
775
+ commit_sha = self._get_commit_sha(options)
692
776
  resources = []
693
777
  if "github" in url:
694
778
  repo_name = url.rstrip("/").replace("https://github.com/", "")
695
779
  repo = github.get_repo(repo_name)
696
- directory = repo.get_contents(path, commit_sha)
697
- if isinstance(directory, ContentFile):
698
- raise Exception(f"Path {path} and sha {commit_sha} is a file!")
699
- for f in directory:
780
+ for f in repo.get_contents(path, commit_sha):
700
781
  file_path = os.path.join(path, f.name)
701
782
  file_contents_decoded = self._get_file_contents_github(
702
783
  repo, file_path, commit_sha
@@ -707,23 +788,21 @@ class SaasHerder:
707
788
  if not self.gitlab:
708
789
  raise Exception("gitlab is not initialized")
709
790
  project = self.gitlab.get_project(url)
710
- for item in self.gitlab.get_items(
791
+ for f in self.gitlab.get_items(
711
792
  project.repository_tree, path=path.lstrip("/"), ref=commit_sha
712
793
  ):
713
- file_contents = project.files.get(
714
- file_path=item["path"], ref=commit_sha
715
- )
794
+ file_contents = project.files.get(file_path=f["path"], ref=commit_sha)
716
795
  resource = yaml.safe_load(file_contents.decode())
717
796
  resources.append(resource)
718
- else:
719
- raise Exception(f"Only GitHub and GitLab are supported: {url}")
720
797
 
721
798
  return resources, html_url, commit_sha
722
799
 
723
800
  @retry()
724
- def _get_commit_sha(
725
- self, url: str, ref: str, github: Github, hash_length: Optional[int] = None
726
- ) -> str:
801
+ def _get_commit_sha(self, options):
802
+ url = options["url"]
803
+ ref = options["ref"]
804
+ github = options["github"]
805
+ hash_length = options.get("hash_length")
727
806
  commit_sha = ""
728
807
  if "github" in url:
729
808
  repo_name = url.rstrip("/").replace("https://github.com/", "")
@@ -743,7 +822,13 @@ class SaasHerder:
743
822
  return commit_sha
744
823
 
745
824
  @staticmethod
746
- def _additional_resource_process(resources: Resources, html_url: str) -> None:
825
+ def _get_cluster_and_namespace(target):
826
+ cluster = target["namespace"]["cluster"]["name"]
827
+ namespace = target["namespace"]["name"]
828
+ return cluster, namespace
829
+
830
+ @staticmethod
831
+ def _additional_resource_process(resources, html_url):
747
832
  for resource in resources:
748
833
  # add a definition annotation to each PrometheusRule rule
749
834
  if resource["kind"] == "PrometheusRule":
@@ -762,11 +847,7 @@ class SaasHerder:
762
847
  )
763
848
 
764
849
  @staticmethod
765
- def _parameter_value_needed(
766
- parameter_name: str,
767
- consolidated_parameters: Mapping[str, str],
768
- template: Mapping[str, Any],
769
- ) -> bool:
850
+ def _parameter_value_needed(parameter_name, consolidated_parameters, template):
770
851
  """Is a parameter named in the template but unspecified?
771
852
 
772
853
  NOTE: This is currently "parameter *named* and absent" -- i.e. we
@@ -786,27 +867,28 @@ class SaasHerder:
786
867
  return True
787
868
  return False
788
869
 
789
- def _process_template(
790
- self,
791
- saas_file_name: str,
792
- resource_template_name: str,
793
- image_auth: ImageAuth,
794
- url: str,
795
- path: str,
796
- provider: str,
797
- hash_length: int,
798
- target: SaasResourceTemplateTarget,
799
- parameters: dict[str, str],
800
- github: Github,
801
- target_config_hash: str,
802
- ) -> tuple[list[Any], str, Optional[Promotion]]:
870
+ def _process_template(self, options):
871
+ saas_file_name = options["saas_file_name"]
872
+ resource_template_name = options["resource_template_name"]
873
+ image_auth = options["image_auth"]
874
+ url = options["url"]
875
+ path = options["path"]
876
+ provider = options["provider"]
877
+ target = options["target"]
878
+ github = options["github"]
879
+ target_ref = target["ref"]
880
+ target_promotion = target.get("promotion") or {}
881
+
882
+ resources = None
883
+ html_url = None
884
+ commit_sha = None
885
+
803
886
  if provider == "openshift-template":
804
- environment_parameters = self._collect_parameters(
805
- target.namespace.environment
806
- )
807
- environment_secret_parameters = self._collect_secret_parameters(
808
- target.namespace.environment
809
- )
887
+ hash_length = options["hash_length"]
888
+ parameters = options["parameters"]
889
+ environment = target["namespace"]["environment"]
890
+ environment_parameters = self._collect_parameters(environment)
891
+ environment_secret_parameters = self._collect_secret_parameters(environment)
810
892
  target_parameters = self._collect_parameters(target)
811
893
  target_secret_parameters = self._collect_secret_parameters(target)
812
894
 
@@ -829,23 +911,27 @@ class SaasHerder:
829
911
  replace_pattern, replace_value
830
912
  )
831
913
 
914
+ get_file_contents_options = {
915
+ "url": url,
916
+ "path": path,
917
+ "ref": target_ref,
918
+ "github": github,
919
+ }
920
+
832
921
  try:
833
922
  template, html_url, commit_sha = self._get_file_contents(
834
- url=url,
835
- path=path,
836
- ref=target.ref,
837
- github=github,
838
- hash_length=hash_length,
923
+ get_file_contents_options
839
924
  )
840
925
  except Exception as e:
841
926
  logging.error(
842
- f"[{url}/blob/{target.ref}{path}] "
927
+ f"[{url}/blob/{target_ref}{path}] "
843
928
  + f"error fetching template: {str(e)}"
844
929
  )
845
- raise
930
+ return None, None, None
846
931
 
847
932
  # add IMAGE_TAG only if it is unspecified
848
- if not (image_tag := consolidated_parameters.get("IMAGE_TAG", "")):
933
+ image_tag = consolidated_parameters.get("IMAGE_TAG")
934
+ if not image_tag:
849
935
  sha_substring = commit_sha[:hash_length]
850
936
  # IMAGE_TAG takes one of two forms:
851
937
  # - If saas file attribute 'use_channel_in_image_tag' is true,
@@ -860,7 +946,7 @@ class SaasHerder:
860
946
  + f"{html_url}: CHANNEL is required when "
861
947
  + "'use_channel_in_image_tag' is true."
862
948
  )
863
- raise
949
+ return None, None, None
864
950
  image_tag = f"{channel}-{sha_substring}"
865
951
  else:
866
952
  image_tag = sha_substring
@@ -884,15 +970,10 @@ class SaasHerder:
884
970
  + "Is REGISTRY_IMG missing? "
885
971
  + f"{str(e)}"
886
972
  )
887
- raise
973
+ return None, None, None
888
974
  try:
889
975
  image_uri = f"{registry_image}:{image_tag}"
890
- img = Image(
891
- url=image_uri,
892
- username=image_auth.username,
893
- password=image_auth.password,
894
- auth_server=image_auth.auth_server,
895
- )
976
+ img = Image(image_uri, **image_auth)
896
977
  if need_repo_digest:
897
978
  consolidated_parameters["REPO_DIGEST"] = img.url_digest
898
979
  if need_image_digest:
@@ -903,7 +984,7 @@ class SaasHerder:
903
984
  + f"{html_url}: error generating REPO_DIGEST for "
904
985
  + f"{image_uri}: {str(e)}"
905
986
  )
906
- raise
987
+ return None, None, None
907
988
 
908
989
  oc = OCLocal("cluster", None, None, local=True)
909
990
  try:
@@ -915,20 +996,22 @@ class SaasHerder:
915
996
  )
916
997
 
917
998
  elif provider == "directory":
999
+ get_directory_contents_options = {
1000
+ "url": url,
1001
+ "path": path,
1002
+ "ref": target_ref,
1003
+ "github": github,
1004
+ }
918
1005
  try:
919
1006
  resources, html_url, commit_sha = self._get_directory_contents(
920
- url=url,
921
- path=path,
922
- ref=target.ref,
923
- github=github,
924
- hash_length=hash_length,
1007
+ get_directory_contents_options
925
1008
  )
926
1009
  except Exception as e:
927
1010
  logging.error(
928
- f"[{url}/tree/{target.ref}{path}] "
1011
+ f"[{url}/tree/{target_ref}{path}] "
929
1012
  + f"error fetching directory: {str(e)}"
930
1013
  )
931
- raise
1014
+ return None, None, None
932
1015
 
933
1016
  else:
934
1017
  logging.error(
@@ -936,21 +1019,16 @@ class SaasHerder:
936
1019
  + f"unknown provider: {provider}"
937
1020
  )
938
1021
 
939
- target_promotion = None
940
- if target.promotion:
941
- target_promotion = Promotion(
942
- auto=target.promotion.auto,
943
- publish=target.promotion.publish,
944
- subscribe=target.promotion.subscribe,
945
- promotion_data=target.promotion.promotion_data,
946
- commit_sha=commit_sha,
947
- saas_file_name=saas_file_name,
948
- target_config_hash=target_config_hash,
949
- )
1022
+ target_promotion["commit_sha"] = commit_sha
1023
+ # This target_promotion data is used in publish_promotions
1024
+ if target_promotion.get("publish"):
1025
+ target_promotion["saas_file"] = saas_file_name
1026
+ target_promotion[TARGET_CONFIG_HASH] = options[TARGET_CONFIG_HASH]
1027
+
950
1028
  return resources, html_url, target_promotion
951
1029
 
952
1030
  @staticmethod
953
- def _collect_images(resource: Resource) -> set[str]:
1031
+ def _collect_images(resource):
954
1032
  images = set()
955
1033
  # resources with pod templates
956
1034
  with suppress(KeyError):
@@ -984,12 +1062,7 @@ class SaasHerder:
984
1062
  return images
985
1063
 
986
1064
  @staticmethod
987
- def _check_image(
988
- image: str,
989
- image_patterns: Iterable[str],
990
- image_auth: ImageAuth,
991
- error_prefix: str,
992
- ) -> bool:
1065
+ def _check_image(image, image_patterns, image_auth, error_prefix):
993
1066
  error = False
994
1067
  if not image_patterns:
995
1068
  error = True
@@ -1000,12 +1073,7 @@ class SaasHerder:
1000
1073
  error = True
1001
1074
  logging.error(f"{error_prefix} Image is not in imagePatterns: {image}")
1002
1075
  try:
1003
- valid = Image(
1004
- image,
1005
- username=image_auth.username,
1006
- password=image_auth.password,
1007
- auth_server=image_auth.auth_server,
1008
- )
1076
+ valid = Image(image, **image_auth)
1009
1077
  if not valid:
1010
1078
  error = True
1011
1079
  logging.error(f"{error_prefix} Image does not exist: {image}")
@@ -1017,16 +1085,15 @@ class SaasHerder:
1017
1085
 
1018
1086
  return error
1019
1087
 
1020
- def _check_images(
1021
- self,
1022
- saas_file_name: str,
1023
- resource_template_name: str,
1024
- image_auth: ImageAuth,
1025
- image_patterns: list[str],
1026
- html_url: str,
1027
- resources: Resources,
1028
- ) -> bool:
1088
+ def _check_images(self, options):
1089
+ saas_file_name = options["saas_file_name"]
1090
+ resource_template_name = options["resource_template_name"]
1091
+ html_url = options["html_url"]
1092
+ resources = options["resources"]
1093
+ image_auth = options["image_auth"]
1094
+ image_patterns = options["image_patterns"]
1029
1095
  error_prefix = f"[{saas_file_name}/{resource_template_name}] {html_url}:"
1096
+
1030
1097
  images_list = threaded.run(
1031
1098
  self._collect_images, resources, self.available_thread_pool_size
1032
1099
  )
@@ -1043,12 +1110,13 @@ class SaasHerder:
1043
1110
  )
1044
1111
  return any(errors)
1045
1112
 
1046
- def _initiate_github(self, saas_file: SaasFile) -> Github:
1047
- token = (
1048
- self.secret_reader.read_secret(saas_file.authentication.code)
1049
- if saas_file.authentication and saas_file.authentication.code
1050
- else get_default_config()["token"]
1051
- )
1113
+ def _initiate_github(self, saas_file):
1114
+ auth = saas_file.get("authentication") or {}
1115
+ auth_code = auth.get("code") or {}
1116
+ if auth_code:
1117
+ token = self.secret_reader.read(auth_code)
1118
+ else:
1119
+ token = get_default_config()["token"]
1052
1120
 
1053
1121
  base_url = os.environ.get("GITHUB_API", "https://api.github.com")
1054
1122
  # This is a threaded world. Let's define a big
@@ -1058,42 +1126,47 @@ class SaasHerder:
1058
1126
  pool_size = 100
1059
1127
  return Github(token, base_url=base_url, pool_size=pool_size)
1060
1128
 
1061
- def _initiate_image_auth(self, saas_file: SaasFile) -> ImageAuth:
1129
+ def _initiate_image_auth(self, saas_file):
1062
1130
  """
1063
- This function initiates an ImageAuth class required for image authentication.
1064
- This class will be used as parameters for sretoolbox's Image.
1131
+ This function initiates a dict required for image authentication.
1132
+ This dict will be used as kwargs for sretoolbox's Image.
1065
1133
  The image authentication secret specified in the saas file must
1066
1134
  contain the 'user' and 'token' keys, and may optionally contain
1067
1135
  a 'url' key specifying the image registry url to be passed to check
1068
1136
  if an image should be checked using these credentials.
1069
-
1070
1137
  The function returns the keys extracted from the secret in the
1071
1138
  structure expected by sretoolbox's Image:
1072
1139
  'user' --> 'username'
1073
1140
  'token' --> 'password'
1074
1141
  'url' --> 'auth_server' (optional)
1075
1142
  """
1076
- if not saas_file.authentication or not saas_file.authentication.image:
1077
- return ImageAuth()
1143
+ auth = saas_file.get("authentication")
1144
+ if not auth:
1145
+ return {}
1146
+
1147
+ auth_image_secret = auth.get("image")
1148
+ if not auth_image_secret:
1149
+ return {}
1078
1150
 
1079
- creds = self.secret_reader.read_all_secret(saas_file.authentication.image)
1151
+ creds = self.secret_reader.read_all(auth_image_secret)
1080
1152
  required_keys = ["user", "token"]
1081
1153
  ok = all(k in creds.keys() for k in required_keys)
1082
1154
  if not ok:
1083
1155
  logging.warning(
1084
1156
  "the specified image authentication secret "
1085
- + f"found in path {saas_file.authentication.image.path} "
1157
+ + f"found in path {auth_image_secret['path']} "
1086
1158
  + f"does not contain all required keys: {required_keys}"
1087
1159
  )
1088
- return ImageAuth()
1160
+ return {}
1089
1161
 
1090
- return ImageAuth(
1091
- username=creds["user"],
1092
- password=creds["token"],
1093
- auth_server=creds.get("url"),
1094
- )
1162
+ image_auth = {"username": creds["user"], "password": creds["token"]}
1163
+ url = creds.get("url")
1164
+ if url:
1165
+ image_auth["auth_server"] = url
1166
+
1167
+ return image_auth
1095
1168
 
1096
- def populate_desired_state(self, ri: ResourceInventory) -> None:
1169
+ def populate_desired_state(self, ri):
1097
1170
  results = threaded.run(
1098
1171
  self._init_populate_desired_state_specs,
1099
1172
  self.saas_files,
@@ -1106,22 +1179,27 @@ class SaasHerder:
1106
1179
  self.thread_pool_size,
1107
1180
  ri=ri,
1108
1181
  )
1109
- self.promotions: list[Optional[Promotion]] = promotions
1182
+ self.promotions = promotions
1110
1183
 
1111
- def _init_populate_desired_state_specs(
1112
- self, saas_file: SaasFile
1113
- ) -> list[TargetSpec]:
1184
+ def _init_populate_desired_state_specs(self, saas_file):
1114
1185
  specs = []
1186
+ saas_file_name = saas_file["name"]
1115
1187
  github = self._initiate_github(saas_file)
1116
1188
  image_auth = self._initiate_image_auth(saas_file)
1189
+ managed_resource_types = saas_file["managedResourceTypes"]
1190
+ image_patterns = saas_file["imagePatterns"]
1191
+ resource_templates = saas_file["resourceTemplates"]
1117
1192
  saas_file_parameters = self._collect_parameters(saas_file)
1118
1193
  saas_file_secret_parameters = self._collect_secret_parameters(saas_file)
1119
1194
 
1120
1195
  all_trigger_specs = self.get_saas_targets_config_trigger_specs(saas_file)
1121
1196
  # iterate over resource templates (multiple per saas_file)
1122
- for rt in saas_file.resource_templates:
1123
- provider = rt.provider or "openshift-template"
1124
- hash_length = rt.hash_length or self.hash_length
1197
+ for rt in resource_templates:
1198
+ rt_name = rt["name"]
1199
+ url = rt["url"]
1200
+ path = rt["path"]
1201
+ provider = rt.get("provider") or "openshift-template"
1202
+ hash_length = rt.get("hash_length") or self.settings["hashLength"]
1125
1203
  resource_template_parameters = self._collect_parameters(rt)
1126
1204
  resource_template_secret_parameters = self._collect_secret_parameters(rt)
1127
1205
 
@@ -1132,154 +1210,144 @@ class SaasHerder:
1132
1210
  consolidated_parameters.update(resource_template_secret_parameters)
1133
1211
 
1134
1212
  # Iterate over targets (each target is a namespace).
1135
- for target in rt.targets:
1136
- if target.disable:
1213
+ for target in rt["targets"]:
1214
+ if target.get("disable"):
1137
1215
  # Warning is logged during SaasHerder initiation.
1138
1216
  continue
1139
1217
 
1218
+ cluster = target["namespace"]["cluster"]["name"]
1219
+ namespace = target["namespace"]["name"]
1220
+ env_name = target["namespace"]["environment"]["name"]
1221
+
1140
1222
  state_key = TriggerSpecConfig(
1141
- saas_file_name=saas_file.name,
1142
- env_name=target.namespace.environment.name,
1223
+ saas_file_name=saas_file_name,
1224
+ env_name=env_name,
1143
1225
  timeout=None,
1144
- pipelines_provider=saas_file.pipelines_provider,
1145
- resource_template_name=rt.name,
1146
- cluster_name=target.namespace.cluster.name,
1147
- namespace_name=target.namespace.name,
1148
- target_name=target.name,
1226
+ pipelines_provider=None,
1227
+ resource_template_name=rt_name,
1228
+ cluster_name=cluster,
1229
+ namespace_name=namespace,
1230
+ target_name=target.get("name"),
1149
1231
  state_content=None,
1150
1232
  ).state_key
1151
1233
  digest = SaasHerder.get_target_config_hash(
1152
1234
  all_trigger_specs[state_key].state_content
1153
1235
  )
1154
1236
 
1155
- specs.append(
1156
- TargetSpec(
1157
- saas_file_name=saas_file.name,
1158
- cluster=target.namespace.cluster.name,
1159
- namespace=target.namespace.name,
1160
- managed_resource_types=saas_file.managed_resource_types,
1161
- delete=bool(target.delete),
1162
- privileged=bool(saas_file.cluster_admin),
1163
- # process_template options
1164
- resource_template_name=rt.name,
1165
- image_auth=image_auth,
1166
- url=rt.url,
1167
- path=rt.path,
1168
- provider=provider,
1169
- hash_length=hash_length,
1170
- target=target,
1171
- parameters=consolidated_parameters,
1172
- github=github,
1173
- target_config_hash=digest,
1174
- # check_image options
1175
- image_patterns=saas_file.image_patterns,
1176
- )
1177
- )
1237
+ process_template_options = {
1238
+ "saas_file_name": saas_file_name,
1239
+ "resource_template_name": rt_name,
1240
+ "image_auth": image_auth,
1241
+ "url": url,
1242
+ "path": path,
1243
+ "provider": provider,
1244
+ "hash_length": hash_length,
1245
+ "target": target,
1246
+ "parameters": consolidated_parameters,
1247
+ "github": github,
1248
+ TARGET_CONFIG_HASH: digest,
1249
+ }
1250
+ check_images_options_base = {
1251
+ "saas_file_name": saas_file_name,
1252
+ "resource_template_name": rt_name,
1253
+ "image_auth": image_auth,
1254
+ "image_patterns": image_patterns,
1255
+ }
1256
+ spec = {
1257
+ "saas_file_name": saas_file_name,
1258
+ "cluster": cluster,
1259
+ "namespace": namespace,
1260
+ "managed_resource_types": managed_resource_types,
1261
+ "process_template_options": process_template_options,
1262
+ "check_images_options_base": check_images_options_base,
1263
+ "delete": target.get("delete"),
1264
+ "privileged": saas_file.get("clusterAdmin", False) is True,
1265
+ }
1266
+ specs.append(spec)
1178
1267
 
1179
1268
  return specs
1180
1269
 
1181
- def populate_desired_state_saas_file(
1182
- self, spec: TargetSpec, ri: ResourceInventory
1183
- ) -> Optional[Promotion]:
1184
- if spec.delete:
1270
+ def populate_desired_state_saas_file(self, spec, ri: ResourceInventory):
1271
+ if spec["delete"]:
1185
1272
  # to delete resources, we avoid adding them to the desired state
1186
- return None
1273
+ return
1187
1274
 
1188
- try:
1189
- resources, html_url, promotion = self._process_template(
1190
- saas_file_name=spec.saas_file_name,
1191
- resource_template_name=spec.resource_template_name,
1192
- image_auth=spec.image_auth,
1193
- url=spec.url,
1194
- path=spec.path,
1195
- provider=spec.provider,
1196
- hash_length=spec.hash_length,
1197
- target=spec.target,
1198
- parameters=spec.parameters,
1199
- github=spec.github,
1200
- target_config_hash=spec.target_config_hash,
1201
- )
1202
- except Exception:
1203
- # log message send in _process_template
1204
- ri.register_error()
1205
- return None
1275
+ saas_file_name = spec["saas_file_name"]
1276
+ cluster = spec["cluster"]
1277
+ namespace = spec["namespace"]
1278
+ managed_resource_types = set(spec["managed_resource_types"])
1279
+ process_template_options = spec["process_template_options"]
1280
+ check_images_options_base = spec["check_images_options_base"]
1206
1281
 
1282
+ resources, html_url, promotion = self._process_template(
1283
+ process_template_options
1284
+ )
1285
+ if resources is None:
1286
+ ri.register_error()
1287
+ return
1207
1288
  # filter resources
1208
- rs: Resources = []
1289
+ rs = []
1209
1290
  for r in resources:
1210
1291
  if isinstance(r, dict) and "kind" in r and "apiVersion" in r:
1211
- kind: str = r["kind"]
1212
- kind_and_group = fully_qualified_kind(kind, r["apiVersion"])
1292
+ kind = cast(str, r.get("kind"))
1293
+ kind_and_group = fully_qualified_kind(
1294
+ kind, cast(str, r.get("apiVersion"))
1295
+ )
1213
1296
  if (
1214
- kind in spec.managed_resource_types
1215
- or kind_and_group in spec.managed_resource_types
1297
+ kind in managed_resource_types
1298
+ or kind_and_group in managed_resource_types
1216
1299
  ):
1217
1300
  rs.append(r)
1218
1301
  else:
1219
1302
  logging.info(
1220
- f"Skipping resource of kind {kind} on "
1221
- f"{spec.cluster}/{spec.namespace}"
1303
+ f"Skipping resource of kind {kind} on " f"{cluster}/{namespace}"
1222
1304
  )
1223
1305
  else:
1224
1306
  logging.info(
1225
- "Skipping non-dictionary resource on "
1226
- f"{spec.cluster}/{spec.namespace}"
1307
+ "Skipping non-dictionary resource on " f"{cluster}/{namespace}"
1227
1308
  )
1228
1309
  # additional processing of resources
1229
1310
  resources = rs
1230
1311
  self._additional_resource_process(resources, html_url)
1231
1312
  # check images
1232
- image_error = self._check_images(
1233
- saas_file_name=spec.saas_file_name,
1234
- resource_template_name=spec.resource_template_name,
1235
- image_auth=spec.image_auth,
1236
- image_patterns=spec.image_patterns,
1237
- html_url=html_url,
1238
- resources=resources,
1239
- )
1313
+ check_images_options = {"html_url": html_url, "resources": resources}
1314
+ check_images_options.update(check_images_options_base)
1315
+ image_error = self._check_images(check_images_options)
1240
1316
  if image_error:
1241
1317
  ri.register_error()
1242
- return None
1318
+ return
1243
1319
  # add desired resources
1244
1320
  for resource in resources:
1321
+ resource_kind = resource["kind"]
1322
+ resource_name = resource["metadata"]["name"]
1245
1323
  oc_resource = OR(
1246
1324
  resource,
1247
1325
  self.integration,
1248
1326
  self.integration_version,
1249
- caller_name=spec.saas_file_name,
1327
+ caller_name=saas_file_name,
1250
1328
  error_details=html_url,
1251
1329
  )
1252
1330
  try:
1253
1331
  ri.add_desired_resource(
1254
- spec.cluster,
1255
- spec.namespace,
1332
+ cluster,
1333
+ namespace,
1256
1334
  oc_resource,
1257
- privileged=spec.privileged,
1335
+ privileged=spec["privileged"],
1258
1336
  )
1259
1337
  except ResourceKeyExistsError:
1260
1338
  ri.register_error()
1261
1339
  msg = (
1262
- f"[{spec.cluster}/{spec.namespace}] desired item "
1263
- + f"already exists: {resource['kind']}/{resource['metadata']['name']}. "
1264
- + f"saas file name: {spec.saas_file_name}, "
1340
+ f"[{cluster}/{namespace}] desired item "
1341
+ + f"already exists: {resource_kind}/{resource_name}. "
1342
+ + f"saas file name: {saas_file_name}, "
1265
1343
  + "resource template name: "
1266
- + f"{spec.resource_template_name}."
1344
+ + f"{process_template_options['resource_template_name']}."
1267
1345
  )
1268
1346
  logging.error(msg)
1269
1347
 
1270
1348
  return promotion
1271
1349
 
1272
- def get_diff(
1273
- self, trigger_type: TriggerTypes, dry_run: bool
1274
- ) -> tuple[
1275
- Union[
1276
- list[TriggerSpecConfig],
1277
- list[TriggerSpecMovingCommit],
1278
- list[TriggerSpecUpstreamJob],
1279
- list[TriggerSpecContainerImage],
1280
- ],
1281
- bool,
1282
- ]:
1350
+ def get_diff(self, trigger_type, dry_run):
1283
1351
  if trigger_type == TriggerTypes.MOVING_COMMITS:
1284
1352
  # TODO: replace error with actual error handling when needed
1285
1353
  error = False
@@ -1299,10 +1367,7 @@ class SaasHerder:
1299
1367
  f"saasherder get_diff for trigger type: {trigger_type}"
1300
1368
  )
1301
1369
 
1302
- def update_state(self, trigger_spec: TriggerSpecUnion) -> None:
1303
- if not self.state:
1304
- raise Exception("state is not initialized")
1305
-
1370
+ def update_state(self, trigger_spec: TriggerSpecUnion):
1306
1371
  self.state.add(
1307
1372
  trigger_spec.state_key, value=trigger_spec.state_content, force=True
1308
1373
  )
@@ -1317,40 +1382,44 @@ class SaasHerder:
1317
1382
  return list(itertools.chain.from_iterable(results))
1318
1383
 
1319
1384
  def get_moving_commits_diff_saas_file(
1320
- self, saas_file: SaasFile, dry_run: bool
1385
+ self, saas_file: dict[str, Any], dry_run: bool
1321
1386
  ) -> list[TriggerSpecMovingCommit]:
1387
+ saas_file_name = saas_file["name"]
1388
+ timeout = saas_file.get("timeout") or None
1389
+ pipelines_provider = self._get_pipelines_provider(saas_file)
1322
1390
  github = self._initiate_github(saas_file)
1323
1391
  trigger_specs: list[TriggerSpecMovingCommit] = []
1324
- for rt in saas_file.resource_templates:
1325
- for target in rt.targets:
1392
+ for rt in saas_file["resourceTemplates"]:
1393
+ rt_name = rt["name"]
1394
+ url = rt["url"]
1395
+ for target in rt["targets"]:
1326
1396
  try:
1327
1397
  # don't trigger if there is a linked upstream job or container image
1328
- if target.upstream or target.image:
1398
+ if target.get("upstream") or target.get("image"):
1329
1399
  continue
1330
-
1331
- desired_commit_sha = self._get_commit_sha(
1332
- url=rt.url, ref=target.ref, github=github
1333
- )
1400
+ ref = target["ref"]
1401
+ get_commit_sha_options = {"url": url, "ref": ref, "github": github}
1402
+ desired_commit_sha = self._get_commit_sha(get_commit_sha_options)
1334
1403
  # don't trigger on refs which are commit shas
1335
- if target.ref == desired_commit_sha:
1404
+ if ref == desired_commit_sha:
1336
1405
  continue
1337
-
1406
+ namespace = target["namespace"]
1407
+ cluster_name = namespace["cluster"]["name"]
1408
+ namespace_name = namespace["name"]
1409
+ env_name = namespace["environment"]["name"]
1338
1410
  trigger_spec = TriggerSpecMovingCommit(
1339
- saas_file_name=saas_file.name,
1340
- env_name=target.namespace.environment.name,
1341
- timeout=saas_file.timeout,
1342
- pipelines_provider=saas_file.pipelines_provider,
1343
- resource_template_name=rt.name,
1344
- cluster_name=target.namespace.cluster.name,
1345
- namespace_name=target.namespace.name,
1346
- ref=target.ref,
1411
+ saas_file_name=saas_file_name,
1412
+ env_name=env_name,
1413
+ timeout=timeout,
1414
+ pipelines_provider=pipelines_provider,
1415
+ resource_template_name=rt_name,
1416
+ cluster_name=cluster_name,
1417
+ namespace_name=namespace_name,
1418
+ ref=ref,
1347
1419
  state_content=desired_commit_sha,
1348
1420
  )
1349
1421
  if self.include_trigger_trace:
1350
- trigger_spec.reason = f"{rt.url}/commit/{desired_commit_sha}"
1351
-
1352
- if not self.state:
1353
- raise Exception("state is not initialized")
1422
+ trigger_spec.reason = f"{url}/commit/{desired_commit_sha}"
1354
1423
  current_commit_sha = self.state.get(trigger_spec.state_key, None)
1355
1424
  # skip if there is no change in commit sha
1356
1425
  if current_commit_sha == desired_commit_sha:
@@ -1368,8 +1437,8 @@ class SaasHerder:
1368
1437
  trigger_specs.append(trigger_spec)
1369
1438
  except (GithubException, GitlabError):
1370
1439
  logging.exception(
1371
- f"Skipping target {saas_file.name}:{rt.name}"
1372
- f" - repo: {rt.url} - ref: {target.ref}"
1440
+ f"Skipping target {saas_file_name}:{rt_name}"
1441
+ f" - repo: {url} - ref: {ref}"
1373
1442
  )
1374
1443
  self._register_error()
1375
1444
  return trigger_specs
@@ -1390,9 +1459,6 @@ class SaasHerder:
1390
1459
  def _get_upstream_jobs_current_state(self) -> tuple[dict[str, Any], bool]:
1391
1460
  current_state: dict[str, Any] = {}
1392
1461
  error = False
1393
- if not self.jenkins_map:
1394
- raise Exception("jenkins_map is not initialized")
1395
-
1396
1462
  for instance_name, jenkins in self.jenkins_map.items():
1397
1463
  try:
1398
1464
  current_state[instance_name] = jenkins.get_jobs_state()
@@ -1404,44 +1470,50 @@ class SaasHerder:
1404
1470
  return current_state, error
1405
1471
 
1406
1472
  def get_upstream_jobs_diff_saas_file(
1407
- self, saas_file: SaasFile, dry_run: bool, current_state: dict[str, Any]
1473
+ self, saas_file: dict[str, Any], dry_run: bool, current_state: dict[str, Any]
1408
1474
  ) -> list[TriggerSpecUpstreamJob]:
1475
+ saas_file_name = saas_file["name"]
1476
+ timeout = saas_file.get("timeout") or None
1477
+ pipelines_provider = self._get_pipelines_provider(saas_file)
1409
1478
  trigger_specs = []
1410
- for rt in saas_file.resource_templates:
1411
- for target in rt.targets:
1412
- if not target.upstream:
1479
+ for rt in saas_file["resourceTemplates"]:
1480
+ rt_name = rt["name"]
1481
+ url = rt["url"]
1482
+ for target in rt["targets"]:
1483
+ upstream = target.get("upstream")
1484
+ if not upstream:
1413
1485
  continue
1414
- job_name = target.upstream.name
1415
- job_history = current_state[target.upstream.instance.name].get(
1416
- job_name, []
1417
- )
1486
+ instance_name = upstream["instance"]["name"]
1487
+ job_name = upstream["name"]
1488
+ job_history = current_state[instance_name].get(job_name, [])
1418
1489
  if not job_history:
1419
1490
  continue
1420
1491
  last_build_result = job_history[0]
1421
-
1492
+ namespace = target["namespace"]
1493
+ cluster_name = namespace["cluster"]["name"]
1494
+ namespace_name = namespace["name"]
1495
+ env_name = namespace["environment"]["name"]
1422
1496
  trigger_spec = TriggerSpecUpstreamJob(
1423
- saas_file_name=saas_file.name,
1424
- env_name=target.namespace.environment.name,
1425
- timeout=saas_file.timeout,
1426
- pipelines_provider=saas_file.pipelines_provider,
1427
- resource_template_name=rt.name,
1428
- cluster_name=target.namespace.cluster.name,
1429
- namespace_name=target.namespace.name,
1430
- instance_name=target.upstream.instance.name,
1497
+ saas_file_name=saas_file_name,
1498
+ env_name=env_name,
1499
+ timeout=timeout,
1500
+ pipelines_provider=pipelines_provider,
1501
+ resource_template_name=rt_name,
1502
+ cluster_name=cluster_name,
1503
+ namespace_name=namespace_name,
1504
+ instance_name=instance_name,
1431
1505
  job_name=job_name,
1432
1506
  state_content=last_build_result,
1433
1507
  )
1434
1508
  last_build_result_number = last_build_result["number"]
1435
1509
  if self.include_trigger_trace:
1436
- trigger_spec.reason = f"{target.upstream.instance.server_url}/job/{job_name}/{last_build_result_number}"
1510
+ trigger_spec.reason = f"{upstream['instance']['serverUrl']}/job/{job_name}/{last_build_result_number}"
1437
1511
  last_build_result_commit_sha = last_build_result.get("commit_sha")
1438
1512
  if last_build_result_commit_sha:
1439
1513
  trigger_spec.reason = (
1440
- f"{rt.url}/commit/{last_build_result_commit_sha} via "
1514
+ f"{url}/commit/{last_build_result_commit_sha} via "
1441
1515
  + trigger_spec.reason
1442
1516
  )
1443
- if not self.state:
1444
- raise Exception("state is not initialized")
1445
1517
  state_build_result = self.state.get(trigger_spec.state_key, None)
1446
1518
  # skip if last_build_result is incomplete or
1447
1519
  # there is no change in job state
@@ -1494,51 +1566,62 @@ class SaasHerder:
1494
1566
  return list(itertools.chain.from_iterable(results))
1495
1567
 
1496
1568
  def get_container_images_diff_saas_file(
1497
- self, saas_file: SaasFile, dry_run: bool
1569
+ self, saas_file: dict[str, Any], dry_run: bool
1498
1570
  ) -> list[TriggerSpecContainerImage]:
1499
1571
  """
1500
1572
  Get a list of trigger specs based on the diff between the
1501
1573
  desired state (git commit) and the current state for a single saas file.
1502
1574
  """
1575
+ saas_file_name = saas_file["name"]
1576
+ timeout = saas_file.get("timeout") or None
1577
+ pipelines_provider = self._get_pipelines_provider(saas_file)
1503
1578
  github = self._initiate_github(saas_file)
1504
1579
  trigger_specs: list[TriggerSpecContainerImage] = []
1505
- for rt in saas_file.resource_templates:
1506
- for target in rt.targets:
1580
+ for rt in saas_file["resourceTemplates"]:
1581
+ rt_name = rt["name"]
1582
+ url = rt["url"]
1583
+ for target in rt["targets"]:
1507
1584
  try:
1508
- if not target.image:
1585
+ image = target.get("image")
1586
+ if not image:
1509
1587
  continue
1510
- desired_image_tag = self._get_commit_sha(
1511
- url=rt.url,
1512
- ref=target.ref,
1513
- github=github,
1514
- hash_length=rt.hash_length or self.hash_length,
1515
- )
1588
+ ref = target["ref"]
1589
+ hash_length = rt.get("hash_length") or self.settings["hashLength"]
1590
+ get_commit_sha_options = {
1591
+ "url": url,
1592
+ "ref": ref,
1593
+ "github": github,
1594
+ "hash_length": hash_length,
1595
+ }
1596
+ desired_image_tag = self._get_commit_sha(get_commit_sha_options)
1516
1597
  # don't trigger if image doesn't exist
1517
- image_registry = f"{target.image.org.instance.url}/{target.image.org.name}/{target.image.name}"
1598
+ image_registry = f"{image['org']['instance']['url']}/{image['org']['name']}/{image['name']}"
1518
1599
  image_uri = f"{image_registry}:{desired_image_tag}"
1600
+ image_patterns = saas_file["imagePatterns"]
1519
1601
  image_auth = self._initiate_image_auth(saas_file)
1520
- error_prefix = f"[{saas_file.name}/{rt.name}] {target.ref}:"
1602
+ error_prefix = f"[{saas_file_name}/{rt_name}] {ref}:"
1521
1603
  error = self._check_image(
1522
- image_uri, saas_file.image_patterns, image_auth, error_prefix
1604
+ image_uri, image_patterns, image_auth, error_prefix
1523
1605
  )
1524
1606
  if error:
1525
1607
  continue
1526
-
1608
+ namespace = target["namespace"]
1609
+ cluster_name = namespace["cluster"]["name"]
1610
+ namespace_name = namespace["name"]
1611
+ env_name = namespace["environment"]["name"]
1527
1612
  trigger_spec = TriggerSpecContainerImage(
1528
- saas_file_name=saas_file.name,
1529
- env_name=target.namespace.environment.name,
1530
- timeout=saas_file.timeout,
1531
- pipelines_provider=saas_file.pipelines_provider,
1532
- resource_template_name=rt.name,
1533
- cluster_name=target.namespace.cluster.name,
1534
- namespace_name=target.namespace.name,
1613
+ saas_file_name=saas_file_name,
1614
+ env_name=env_name,
1615
+ timeout=timeout,
1616
+ pipelines_provider=pipelines_provider,
1617
+ resource_template_name=rt_name,
1618
+ cluster_name=cluster_name,
1619
+ namespace_name=namespace_name,
1535
1620
  image=image_registry,
1536
1621
  state_content=desired_image_tag,
1537
1622
  )
1538
1623
  if self.include_trigger_trace:
1539
1624
  trigger_spec.reason = image_uri
1540
- if not self.state:
1541
- raise Exception("state is not initialized")
1542
1625
  current_image_tag = self.state.get(trigger_spec.state_key, None)
1543
1626
  # skip if there is no change in image tag
1544
1627
  if current_image_tag == desired_image_tag:
@@ -1556,8 +1639,8 @@ class SaasHerder:
1556
1639
  trigger_specs.append(trigger_spec)
1557
1640
  except (GithubException, GitlabError):
1558
1641
  logging.exception(
1559
- f"Skipping target {saas_file.name}:{rt.name}"
1560
- f" - repo: {rt.url} - ref: {target.ref}"
1642
+ f"Skipping target {saas_file_name}:{rt_name}"
1643
+ f" - repo: {url} - ref: {ref}"
1561
1644
  )
1562
1645
 
1563
1646
  return trigger_specs
@@ -1569,7 +1652,7 @@ class SaasHerder:
1569
1652
  return list(itertools.chain.from_iterable(results))
1570
1653
 
1571
1654
  @staticmethod
1572
- def remove_none_values(d: Optional[dict[Any, Any]]) -> dict[Any, Any]:
1655
+ def remove_none_values(d):
1573
1656
  if d is None:
1574
1657
  return {}
1575
1658
  new = {}
@@ -1581,14 +1664,11 @@ class SaasHerder:
1581
1664
  return new
1582
1665
 
1583
1666
  def get_configs_diff_saas_file(
1584
- self, saas_file: SaasFile
1667
+ self, saas_file: dict[str, Any]
1585
1668
  ) -> list[TriggerSpecConfig]:
1586
1669
  all_trigger_specs = self.get_saas_targets_config_trigger_specs(saas_file)
1587
1670
  trigger_specs = []
1588
1671
 
1589
- if not self.state:
1590
- raise Exception("state is not initialized")
1591
-
1592
1672
  for key, trigger_spec in all_trigger_specs.items():
1593
1673
  current_target_config = self.state.get(key, None)
1594
1674
  # Continue if there are no diffs between configs.
@@ -1602,75 +1682,67 @@ class SaasHerder:
1602
1682
  continue
1603
1683
 
1604
1684
  if self.include_trigger_trace:
1605
- trigger_spec.reason = f"{self.repo_url}/commit/{RunningState().commit}"
1685
+ trigger_spec.reason = (
1686
+ f"{self.settings['repoUrl']}/commit/{RunningState().commit}"
1687
+ )
1606
1688
  trigger_specs.append(trigger_spec)
1607
1689
  return trigger_specs
1608
1690
 
1609
1691
  @staticmethod
1610
- def get_target_config_hash(target_config: Any) -> str:
1692
+ def get_target_config_hash(target_config):
1611
1693
  m = hashlib.sha256()
1612
1694
  m.update(json.dumps(target_config, sort_keys=True).encode("utf-8"))
1613
1695
  digest = m.hexdigest()[:16]
1614
1696
  return digest
1615
1697
 
1616
1698
  def get_saas_targets_config_trigger_specs(
1617
- self, saas_file: SaasFile
1699
+ self, saas_file: dict[str, Any]
1618
1700
  ) -> dict[str, TriggerSpecConfig]:
1619
1701
  configs = {}
1620
- for rt in saas_file.resource_templates:
1621
- for target in rt.targets:
1702
+ saas_file_name = saas_file["name"]
1703
+ saas_file_parameters = saas_file.get("parameters")
1704
+ saas_file_managed_resource_types = saas_file["managedResourceTypes"]
1705
+ for rt in saas_file["resourceTemplates"]:
1706
+ rt_name = rt["name"]
1707
+ url = rt["url"]
1708
+ path = rt["path"]
1709
+ rt_parameters = rt.get("parameters")
1710
+ for v in rt["targets"]:
1622
1711
  # ChainMap will store modifications avoiding a deep copy
1623
- desired_target_config = ChainMap(target.dict(by_alias=True))
1712
+ desired_target_config = ChainMap({}, v)
1713
+ namespace = desired_target_config["namespace"]
1714
+
1715
+ cluster_name = namespace["cluster"]["name"]
1716
+ namespace_name = namespace["name"]
1717
+ env_name = namespace["environment"]["name"]
1718
+
1624
1719
  # This will add the namespace key/value to the chainMap, but
1625
1720
  # the target will remain with the original value
1626
1721
  # When the namespace key is looked up, the chainmap will
1627
- # return the modified attribute (set in the first mapping)
1628
- desired_target_config["namespace"] = self.sanitize_namespace(
1629
- target.namespace
1630
- )
1722
+ # return the modified attribute ( set in the first mapping)
1723
+ desired_target_config["namespace"] = self.sanitize_namespace(namespace)
1631
1724
  # add parent parameters to target config
1632
- # before the GQL classes are introduced, the parameters attribute
1633
- # was a json string. Keep it that way to be backwards compatible.
1634
- desired_target_config["saas_file_parameters"] = (
1635
- json.dumps(saas_file.parameters, separators=(",", ":"))
1636
- if saas_file.parameters is not None
1637
- else None
1638
- )
1639
-
1640
- # before the GQL classes are introduced, the parameters attribute
1641
- # was a json string. Keep it that way to be backwards compatible.
1642
- desired_target_config["parameters"] = (
1643
- json.dumps(target.parameters, separators=(",", ":"))
1644
- if target.parameters is not None
1645
- else None
1646
- )
1647
-
1725
+ desired_target_config["saas_file_parameters"] = saas_file_parameters
1648
1726
  # add managed resource types to target config
1649
1727
  desired_target_config[
1650
1728
  "saas_file_managed_resource_types"
1651
- ] = saas_file.managed_resource_types
1652
- desired_target_config["url"] = rt.url
1653
- desired_target_config["path"] = rt.path
1654
- # before the GQL classes are introduced, the parameters attribute
1655
- # was a json string. Keep it that way to be backwards compatible.
1656
- desired_target_config["rt_parameters"] = (
1657
- json.dumps(rt.parameters, separators=(",", ":"))
1658
- if rt.parameters is not None
1659
- else None
1660
- )
1729
+ ] = saas_file_managed_resource_types
1730
+ desired_target_config["url"] = url
1731
+ desired_target_config["path"] = path
1732
+ desired_target_config["rt_parameters"] = rt_parameters
1661
1733
  # Convert to dict, ChainMap is not JSON serializable
1662
1734
  # desired_target_config needs to be serialized to generate
1663
1735
  # its config hash and to be stored in S3
1664
1736
  serializable_target_config = dict(desired_target_config)
1665
1737
  trigger_spec = TriggerSpecConfig(
1666
- saas_file_name=saas_file.name,
1667
- env_name=target.namespace.environment.name,
1668
- timeout=saas_file.timeout,
1669
- pipelines_provider=saas_file.pipelines_provider,
1670
- resource_template_name=rt.name,
1671
- cluster_name=target.namespace.cluster.name,
1672
- namespace_name=target.namespace.name,
1673
- target_name=target.name,
1738
+ saas_file_name=saas_file_name,
1739
+ env_name=env_name,
1740
+ timeout=saas_file.get("timeout") or None,
1741
+ pipelines_provider=self._get_pipelines_provider(saas_file),
1742
+ resource_template_name=rt_name,
1743
+ cluster_name=cluster_name,
1744
+ namespace_name=namespace_name,
1745
+ target_name=desired_target_config.get("name"),
1674
1746
  state_content=serializable_target_config,
1675
1747
  )
1676
1748
  configs[trigger_spec.state_key] = trigger_spec
@@ -1678,48 +1750,57 @@ class SaasHerder:
1678
1750
  return configs
1679
1751
 
1680
1752
  @staticmethod
1681
- def sanitize_namespace(
1682
- namespace: SaasResourceTemplateTargetNamespace,
1683
- ) -> dict[str, dict[str, str]]:
1753
+ def _get_pipelines_provider(saas_file: Mapping[str, Any]) -> dict[str, Any]:
1754
+ return saas_file["pipelinesProvider"]
1755
+
1756
+ @staticmethod
1757
+ def sanitize_namespace(namespace):
1684
1758
  """Only keep fields that should trigger a new job."""
1685
- return namespace.dict(
1686
- by_alias=True,
1687
- include={
1688
- "name": True,
1689
- "cluster": {"name": True, "server_url": True},
1690
- "app": {"name": True},
1691
- },
1692
- )
1759
+ new_job_fields = {
1760
+ "namespace": ["name", "cluster", "app"],
1761
+ "cluster": ["name", "serverUrl"],
1762
+ "app": ["name"],
1763
+ }
1764
+ namespace = {
1765
+ k: v for k, v in namespace.items() if k in new_job_fields["namespace"]
1766
+ }
1767
+ cluster = namespace["cluster"]
1768
+ namespace["cluster"] = {
1769
+ k: v for k, v in cluster.items() if k in new_job_fields["cluster"]
1770
+ }
1771
+ app = namespace["app"]
1772
+ namespace["app"] = {k: v for k, v in app.items() if k in new_job_fields["app"]}
1773
+ return namespace
1693
1774
 
1694
- def validate_promotions(self) -> bool:
1775
+ def validate_promotions(self):
1695
1776
  """
1696
1777
  If there were promotion sections in the participating saas files
1697
1778
  validate that the conditions are met."""
1698
- if not self.state:
1699
- raise Exception("state is not initialized")
1700
-
1701
- for promotion in self.promotions:
1702
- if promotion is None:
1779
+ for item in self.promotions:
1780
+ if item is None:
1703
1781
  continue
1704
1782
  # validate that the commit sha being promoted
1705
1783
  # was successfully published to the subscribed channel(s)
1706
- if promotion.subscribe:
1707
- for channel in promotion.subscribe:
1708
- state_key = f"promotions/{channel}/{promotion.commit_sha}"
1784
+ subscribe = item.get("subscribe")
1785
+ if subscribe:
1786
+ commit_sha = item["commit_sha"]
1787
+ for channel in subscribe:
1788
+ state_key = f"promotions/{channel}/{commit_sha}"
1709
1789
  stateobj = self.state.get(state_key, {})
1710
1790
  success = stateobj.get("success")
1711
1791
  if not success:
1712
1792
  logging.error(
1713
- f"Commit {promotion.commit_sha} was not "
1793
+ f"Commit {commit_sha} was not "
1714
1794
  + f"published with success to channel {channel}"
1715
1795
  )
1716
1796
  return False
1717
1797
 
1718
1798
  state_config_hash = stateobj.get(TARGET_CONFIG_HASH)
1799
+ promotion_data = item.get("promotion_data", None)
1719
1800
 
1720
1801
  # This code supports current saas targets that does
1721
1802
  # not have promotion_data yet
1722
- if not state_config_hash or not promotion.promotion_data:
1803
+ if not state_config_hash or not promotion_data:
1723
1804
  logging.info(
1724
1805
  "Promotion data is missing; rely on the success "
1725
1806
  "state only"
@@ -1730,10 +1811,13 @@ class SaasHerder:
1730
1811
  # Just validate parent_saas_config hash
1731
1812
  # promotion_data type by now.
1732
1813
  parent_saas_config = None
1733
- for pd in promotion.promotion_data:
1734
- if pd.channel == channel:
1735
- for data in pd.data or []:
1736
- if isinstance(data, SaasParentSaasPromotion):
1814
+ for pd in promotion_data:
1815
+ pd_channel = pd.get("channel")
1816
+ if pd_channel == channel:
1817
+ channel_data = pd.get("data")
1818
+ for data in channel_data:
1819
+ t = data.get("type")
1820
+ if t == "parent_saas_config":
1737
1821
  parent_saas_config = data
1738
1822
 
1739
1823
  # This section might not exist due to a manual MR.
@@ -1749,7 +1833,11 @@ class SaasHerder:
1749
1833
 
1750
1834
  # Validate that the state config_hash set by the parent
1751
1835
  # matches with the hash set in promotion_data
1752
- if parent_saas_config.target_config_hash == state_config_hash:
1836
+ promotion_target_config_hash = parent_saas_config.get(
1837
+ TARGET_CONFIG_HASH
1838
+ )
1839
+
1840
+ if promotion_target_config_hash == state_config_hash:
1753
1841
  return True
1754
1842
 
1755
1843
  logging.error(
@@ -1760,13 +1848,7 @@ class SaasHerder:
1760
1848
  return False
1761
1849
  return True
1762
1850
 
1763
- def publish_promotions(
1764
- self,
1765
- success: bool,
1766
- all_saas_files: Iterable[SaasFile],
1767
- mr_cli: MRClient,
1768
- auto_promote: bool = False,
1769
- ) -> None:
1851
+ def publish_promotions(self, success, all_saas_files, mr_cli, auto_promote=False):
1770
1852
  """
1771
1853
  If there were promotion sections in the participating saas file
1772
1854
  publish the results for future promotion validations."""
@@ -1782,27 +1864,25 @@ class SaasHerder:
1782
1864
  "happen if the current stage does not make any change"
1783
1865
  )
1784
1866
 
1785
- if not self.state:
1786
- raise Exception("state is not initialized")
1787
-
1788
- for promotion in self.promotions:
1789
- if promotion is None:
1867
+ for item in self.promotions:
1868
+ if item is None:
1790
1869
  continue
1791
-
1792
- if promotion.publish:
1870
+ commit_sha = item["commit_sha"]
1871
+ publish = item.get("publish")
1872
+ if publish:
1793
1873
  value = {
1794
1874
  "success": success,
1795
- "saas_file": promotion.saas_file_name,
1796
- "target_config_hash": promotion.target_config_hash,
1875
+ "saas_file": item["saas_file"],
1876
+ TARGET_CONFIG_HASH: item.get(TARGET_CONFIG_HASH),
1797
1877
  }
1798
1878
  all_subscribed_saas_file_paths = set()
1799
1879
  all_subscribed_target_paths = set()
1800
- for channel in promotion.publish:
1880
+ for channel in publish:
1801
1881
  # publish to state to pass promotion gate
1802
- state_key = f"promotions/{channel}/{promotion.commit_sha}"
1882
+ state_key = f"promotions/{channel}/{commit_sha}"
1803
1883
  self.state.add(state_key, value, force=True)
1804
1884
  logging.info(
1805
- f"Commit {promotion.commit_sha} was published "
1885
+ f"Commit {commit_sha} was published "
1806
1886
  + f"with success {success} to channel {channel}"
1807
1887
  )
1808
1888
  # collect data to trigger promotion
@@ -1819,8 +1899,8 @@ class SaasHerder:
1819
1899
  if subscribed_target_paths:
1820
1900
  all_subscribed_target_paths.update(subscribed_target_paths)
1821
1901
 
1822
- promotion.saas_file_paths = list(all_subscribed_saas_file_paths)
1823
- promotion.target_paths = list(all_subscribed_target_paths)
1902
+ item["saas_file_paths"] = list(all_subscribed_saas_file_paths)
1903
+ item["target_paths"] = list(all_subscribed_target_paths)
1824
1904
 
1825
1905
  if auto_promote and (
1826
1906
  all_subscribed_saas_file_paths or all_subscribed_target_paths
@@ -1828,16 +1908,12 @@ class SaasHerder:
1828
1908
  trigger_promotion = True
1829
1909
 
1830
1910
  if success and trigger_promotion:
1831
- from reconcile.utils.mr.auto_promoter import (
1832
- AutoPromoter, # avoid circular import
1833
- )
1834
-
1835
- mr = AutoPromoter([p for p in self.promotions if p is not None])
1911
+ mr = AutoPromoter(self.promotions)
1836
1912
  mr.submit(cli=mr_cli)
1837
1913
 
1838
1914
  @staticmethod
1839
1915
  def _get_subscribe_path_map(
1840
- saas_files: Iterable[SaasFile], auto_only: bool = False
1916
+ saas_files: Iterable[Mapping[str, Any]], auto_only: bool = False
1841
1917
  ) -> tuple[dict[str, set[str]], dict[str, set[str]]]:
1842
1918
  """
1843
1919
  Returns dicts with subscribe channels as keys and a
@@ -1846,23 +1922,27 @@ class SaasHerder:
1846
1922
  subscribe_saas_file_path_map: dict[str, set[str]] = {}
1847
1923
  subscribe_target_path_map: dict[str, set[str]] = {}
1848
1924
  for saas_file in saas_files:
1849
- saas_file_path = "data" + saas_file.path
1850
- for rt in saas_file.resource_templates:
1851
- for target in rt.targets:
1852
- if not target.promotion:
1925
+ saas_file_path = "data" + saas_file["path"]
1926
+ for rt in saas_file["resourceTemplates"]:
1927
+ for target in rt["targets"]:
1928
+ target_promotion = target.get("promotion")
1929
+ if not target_promotion:
1853
1930
  continue
1854
- if auto_only and not target.promotion.auto:
1931
+ target_auto = target_promotion.get("auto")
1932
+ if auto_only and not target_auto:
1855
1933
  continue
1856
- if not target.promotion.subscribe:
1934
+ subscribe = target_promotion.get("subscribe")
1935
+ if not subscribe:
1857
1936
  continue
1858
1937
  # targets with a path are referenced and not inlined
1859
- if target.path:
1860
- target.path = "data" + target.path
1861
- for channel in target.promotion.subscribe:
1938
+ target_path = target.get("path")
1939
+ if target_path:
1940
+ target_path = "data" + target_path
1941
+ for channel in subscribe:
1862
1942
  subscribe_saas_file_path_map.setdefault(channel, set())
1863
1943
  subscribe_saas_file_path_map[channel].add(saas_file_path)
1864
- if target.path:
1944
+ if target_path:
1865
1945
  subscribe_target_path_map.setdefault(channel, set())
1866
- subscribe_target_path_map[channel].add(target.path)
1946
+ subscribe_target_path_map[channel].add(target_path)
1867
1947
 
1868
1948
  return subscribe_saas_file_path_map, subscribe_target_path_map