dbt-platform-helper 13.4.1__py3-none-any.whl → 14.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-platform-helper might be problematic. Click here for more details.

Files changed (46) hide show
  1. dbt_platform_helper/COMMANDS.md +26 -57
  2. dbt_platform_helper/commands/config.py +9 -0
  3. dbt_platform_helper/commands/environment.py +3 -7
  4. dbt_platform_helper/commands/notify.py +24 -77
  5. dbt_platform_helper/commands/pipeline.py +6 -12
  6. dbt_platform_helper/commands/secrets.py +1 -1
  7. dbt_platform_helper/constants.py +7 -5
  8. dbt_platform_helper/domain/codebase.py +0 -5
  9. dbt_platform_helper/domain/config.py +16 -9
  10. dbt_platform_helper/domain/copilot_environment.py +3 -3
  11. dbt_platform_helper/domain/database_copy.py +1 -1
  12. dbt_platform_helper/domain/maintenance_page.py +3 -3
  13. dbt_platform_helper/domain/notify.py +64 -0
  14. dbt_platform_helper/domain/pipelines.py +20 -16
  15. dbt_platform_helper/domain/terraform_environment.py +18 -11
  16. dbt_platform_helper/domain/versioning.py +18 -78
  17. dbt_platform_helper/providers/aws/exceptions.py +1 -1
  18. dbt_platform_helper/providers/cloudformation.py +1 -1
  19. dbt_platform_helper/providers/config.py +119 -17
  20. dbt_platform_helper/providers/config_validator.py +4 -31
  21. dbt_platform_helper/providers/copilot.py +3 -3
  22. dbt_platform_helper/providers/io.py +1 -1
  23. dbt_platform_helper/providers/load_balancers.py +6 -6
  24. dbt_platform_helper/providers/platform_config_schema.py +24 -29
  25. dbt_platform_helper/providers/schema_migrations/__init__.py +0 -0
  26. dbt_platform_helper/providers/schema_migrations/schema_v0_to_v1_migration.py +43 -0
  27. dbt_platform_helper/providers/schema_migrator.py +77 -0
  28. dbt_platform_helper/providers/secrets.py +5 -5
  29. dbt_platform_helper/providers/semantic_version.py +6 -1
  30. dbt_platform_helper/providers/slack_channel_notifier.py +62 -0
  31. dbt_platform_helper/providers/terraform_manifest.py +8 -10
  32. dbt_platform_helper/providers/version.py +1 -18
  33. dbt_platform_helper/providers/version_status.py +8 -61
  34. dbt_platform_helper/providers/yaml_file.py +23 -1
  35. dbt_platform_helper/templates/environment-pipelines/main.tf +1 -1
  36. dbt_platform_helper/utils/application.py +1 -1
  37. dbt_platform_helper/utils/aws.py +3 -3
  38. dbt_platform_helper/utils/git.py +0 -15
  39. {dbt_platform_helper-13.4.1.dist-info → dbt_platform_helper-14.1.0.dist-info}/METADATA +5 -4
  40. {dbt_platform_helper-13.4.1.dist-info → dbt_platform_helper-14.1.0.dist-info}/RECORD +44 -41
  41. platform_helper.py +0 -2
  42. dbt_platform_helper/commands/version.py +0 -37
  43. dbt_platform_helper/utils/tool_versioning.py +0 -12
  44. {dbt_platform_helper-13.4.1.dist-info → dbt_platform_helper-14.1.0.dist-info}/LICENSE +0 -0
  45. {dbt_platform_helper-13.4.1.dist-info → dbt_platform_helper-14.1.0.dist-info}/WHEEL +0 -0
  46. {dbt_platform_helper-13.4.1.dist-info → dbt_platform_helper-14.1.0.dist-info}/entry_points.txt +0 -0
@@ -48,7 +48,7 @@ class LoadBalancerProvider:
48
48
  ResourceTypeFilters=[
49
49
  "elasticloadbalancing:targetgroup",
50
50
  ],
51
- ) # TODO should be paginated
51
+ ) # TODO: DBTP-1942: should be paginated
52
52
  for resource in response["ResourceTagMappingList"]:
53
53
  tags = {tag["Key"]: tag["Value"] for tag in resource["Tags"]}
54
54
 
@@ -73,7 +73,7 @@ class LoadBalancerProvider:
73
73
  listener_arn = self.get_https_listener_for_application(app, env)
74
74
  certificates = self.evlb_client.describe_listener_certificates(ListenerArn=listener_arn)[
75
75
  "Certificates"
76
- ] # TODO should be paginated
76
+ ] # TODO: DBTP-1942: should be paginated
77
77
 
78
78
  try:
79
79
  certificate_arn = next(c["CertificateArn"] for c in certificates if c["IsDefault"])
@@ -87,7 +87,7 @@ class LoadBalancerProvider:
87
87
 
88
88
  listeners = self.evlb_client.describe_listeners(LoadBalancerArn=load_balancer_arn)[
89
89
  "Listeners"
90
- ] # TODO should be paginated
90
+ ] # TODO: DBTP-1942: should be paginated
91
91
 
92
92
  listener_arn = None
93
93
 
@@ -114,7 +114,7 @@ class LoadBalancerProvider:
114
114
 
115
115
  for lb in tag_descriptions:
116
116
  tags = {t["Key"]: t["Value"] for t in lb["Tags"]}
117
- # TODO copilot hangover, creates coupling to specific tags could update to check application and environment
117
+ # TODO: DBTP-1967: copilot hangover, creates coupling to specific tags could update to check application and environment
118
118
  if tags.get("copilot-application") == app and tags.get("copilot-environment") == env:
119
119
  return lb["ResourceArn"]
120
120
 
@@ -123,7 +123,7 @@ class LoadBalancerProvider:
123
123
  def get_host_header_conditions(self, listener_arn: str, target_group_arn: str) -> list:
124
124
  rules = self.evlb_client.describe_rules(ListenerArn=listener_arn)[
125
125
  "Rules"
126
- ] # TODO should be paginated
126
+ ] # TODO: DBTP-1942: should be paginated
127
127
 
128
128
  conditions = []
129
129
 
@@ -152,7 +152,7 @@ class LoadBalancerProvider:
152
152
  def get_rules_tag_descriptions_by_listener_arn(self, listener_arn: str) -> list:
153
153
  rules = self.evlb_client.describe_rules(ListenerArn=listener_arn)[
154
154
  "Rules"
155
- ] # TODO should be paginated
155
+ ] # TODO: DBTP-1942: should be paginated
156
156
  return self.get_rules_tag_descriptions(rules)
157
157
 
158
158
  def get_rules_tag_descriptions(self, rules: list) -> list:
@@ -8,15 +8,18 @@ from schema import Regex
8
8
  from schema import Schema
9
9
  from schema import SchemaError
10
10
 
11
+ from dbt_platform_helper.constants import PLATFORM_CONFIG_SCHEMA_VERSION
12
+
11
13
 
12
14
  class PlatformConfigSchema:
13
15
  @staticmethod
14
16
  def schema() -> Schema:
15
17
  return Schema(
16
18
  {
19
+ "schema_version": PLATFORM_CONFIG_SCHEMA_VERSION,
17
20
  "application": str,
18
21
  Optional("deploy_repository"): str,
19
- Optional("default_versions"): PlatformConfigSchema.__default_versions_schema(),
22
+ "default_versions": PlatformConfigSchema.__default_versions_schema(),
20
23
  Optional("environments"): PlatformConfigSchema.__environments_schema(),
21
24
  Optional("codebase_pipelines"): PlatformConfigSchema.__codebase_pipelines_schema(),
22
25
  Optional(
@@ -55,7 +58,7 @@ class PlatformConfigSchema:
55
58
  "subscription-filter": PlatformConfigSchema.__no_configuration_required_schema(
56
59
  "subscription-filter"
57
60
  ),
58
- # Todo: The next three are no longer relevant. Remove them.
61
+ # TODO: DBTP-1943: The next three are no longer relevant. Remove them.
59
62
  "monitoring": Schema(PlatformConfigSchema.__monitoring_schema()),
60
63
  "vpc": PlatformConfigSchema.__no_configuration_required_schema("vpc"),
61
64
  "xray": PlatformConfigSchema.__no_configuration_required_schema("xray"),
@@ -166,16 +169,11 @@ class PlatformConfigSchema:
166
169
  @staticmethod
167
170
  def __default_versions_schema() -> dict:
168
171
  return {
169
- Optional("terraform-platform-modules"): str,
170
- Optional("platform-helper"): str,
172
+ "platform-helper": str,
171
173
  }
172
174
 
173
175
  @staticmethod
174
176
  def __environments_schema() -> dict:
175
- _valid_environment_specific_version_overrides = {
176
- Optional("terraform-platform-modules"): str,
177
- }
178
-
179
177
  return {
180
178
  str: Or(
181
179
  None,
@@ -190,9 +188,8 @@ class PlatformConfigSchema:
190
188
  "id": str,
191
189
  },
192
190
  },
193
- # Todo: requires_approval is no longer relevant since we don't have AWS Copilot manage environment pipelines
191
+ # TODO: DBTP-1943: requires_approval is no longer relevant since we don't have AWS Copilot manage environment pipelines
194
192
  Optional("requires_approval"): bool,
195
- Optional("versions"): _valid_environment_specific_version_overrides,
196
193
  Optional("vpc"): str,
197
194
  },
198
195
  )
@@ -250,7 +247,7 @@ class PlatformConfigSchema:
250
247
 
251
248
  @staticmethod
252
249
  def __opensearch_schema() -> dict:
253
- # Todo: Move to OpenSearch provider?
250
+ # TODO: DBTP-1943: Move to OpenSearch provider?
254
251
  _valid_opensearch_plans = Or(
255
252
  "tiny",
256
253
  "small",
@@ -288,7 +285,7 @@ class PlatformConfigSchema:
288
285
 
289
286
  @staticmethod
290
287
  def __postgres_schema() -> dict:
291
- # Todo: Move to Postgres provider?
288
+ # TODO: DBTP-1943: Move to Postgres provider?
292
289
  _valid_postgres_plans = Or(
293
290
  "tiny",
294
291
  "small",
@@ -311,14 +308,12 @@ class PlatformConfigSchema:
311
308
  "4x-large-high-io",
312
309
  )
313
310
 
314
- # Todo: Move to Postgres provider?
311
+ # TODO: DBTP-1943: Move to Postgres provider?
315
312
  _valid_postgres_storage_types = Or("gp2", "gp3", "io1", "io2")
316
313
 
317
314
  _valid_postgres_database_copy = {
318
315
  "from": PlatformConfigSchema.__valid_environment_name(),
319
316
  "to": PlatformConfigSchema.__valid_environment_name(),
320
- Optional("from_account"): str,
321
- Optional("to_account"): str,
322
317
  Optional("pipeline"): {Optional("schedule"): str},
323
318
  }
324
319
 
@@ -366,7 +361,7 @@ class PlatformConfigSchema:
366
361
 
367
362
  @staticmethod
368
363
  def __redis_schema() -> dict:
369
- # Todo move to Redis provider?
364
+ # TODO: DBTP-1943: move to Redis provider?
370
365
  _valid_redis_plans = Or(
371
366
  "micro",
372
367
  "micro-ha",
@@ -400,7 +395,7 @@ class PlatformConfigSchema:
400
395
 
401
396
  @staticmethod
402
397
  def valid_s3_bucket_name(name: str):
403
- # Todo: This is a public method becasue that's what the test expect. Perhaps it belongs in an S3 provider?
398
+ # TODO: DBTP-1943: This is a public method becasue that's what the test expect. Perhaps it belongs in an S3 provider?
404
399
  errors = []
405
400
  if not (2 < len(name) < 64):
406
401
  errors.append("Length must be between 3 and 63 characters inclusive.")
@@ -429,9 +424,9 @@ class PlatformConfigSchema:
429
424
  errors.append(f"Names cannot be suffixed '{suffix}'.")
430
425
 
431
426
  if errors:
432
- # Todo: Raise suitable PlatformException?
427
+ # TODO: DBTP-1943: Raise suitable PlatformException?
433
428
  raise SchemaError(
434
- "Bucket name '{}' is invalid:\n{}".format(name, "\n".join(f" {e}" for e in errors))
429
+ f"Bucket name '{name}' is invalid:\n" + "\n".join(f" {e}" for e in errors)
435
430
  )
436
431
 
437
432
  return True
@@ -557,10 +552,10 @@ class PlatformConfigSchema:
557
552
 
558
553
  @staticmethod
559
554
  def string_matching_regex(regex_pattern: str) -> Callable:
560
- # Todo public for the unit tests, not sure about testing what could be a private method. Perhaps it's covered by other tests anyway?
555
+ # TODO: DBTP-1943: public for the unit tests, not sure about testing what could be a private method. Perhaps it's covered by other tests anyway?
561
556
  def validate(string):
562
557
  if not re.match(regex_pattern, string):
563
- # Todo: Raise suitable PlatformException?
558
+ # TODO: DBTP-1943: Raise suitable PlatformException?
564
559
  raise SchemaError(
565
560
  f"String '{string}' does not match the required pattern '{regex_pattern}'."
566
561
  )
@@ -570,11 +565,11 @@ class PlatformConfigSchema:
570
565
 
571
566
  @staticmethod
572
567
  def is_integer_between(lower_limit, upper_limit) -> Callable:
573
- # Todo public for the unit tests, not sure about testing what could be a private method. Perhaps it's covered by other tests anyway?
568
+ # TODO: DBTP-1943: public for the unit tests, not sure about testing what could be a private method. Perhaps it's covered by other tests anyway?
574
569
  def validate(value):
575
570
  if isinstance(value, int) and lower_limit <= value <= upper_limit:
576
571
  return True
577
- # Todo: Raise suitable PlatformException?
572
+ # TODO: DBTP-1943: Raise suitable PlatformException?
578
573
  raise SchemaError(f"should be an integer between {lower_limit} and {upper_limit}")
579
574
 
580
575
  return validate
@@ -588,7 +583,7 @@ class PlatformConfigSchema:
588
583
 
589
584
  @staticmethod
590
585
  def __valid_branch_name() -> Callable:
591
- # Todo: Make this actually validate a git branch name properly; https://git-scm.com/docs/git-check-ref-format
586
+ # TODO: DBTP-1943: Make this actually validate a git branch name properly; https://git-scm.com/docs/git-check-ref-format
592
587
  return PlatformConfigSchema.string_matching_regex(r"^((?!\*).)*(\*)?$")
593
588
 
594
589
  @staticmethod
@@ -634,10 +629,10 @@ class PlatformConfigSchema:
634
629
 
635
630
 
636
631
  class ConditionalOpensSearchSchema(Schema):
637
- # Todo: Move to OpenSearch provider?
632
+ # TODO: DBTP-1943: Move to OpenSearch provider?
638
633
  _valid_opensearch_min_volume_size: int = 10
639
634
 
640
- # Todo: Move to OpenSearch provider?
635
+ # TODO: DBTP-1943: Move to OpenSearch provider?
641
636
  _valid_opensearch_max_volume_size: dict = {
642
637
  "tiny": 100,
643
638
  "small": 200,
@@ -671,11 +666,11 @@ class ConditionalOpensSearchSchema(Schema):
671
666
 
672
667
  if volume_size:
673
668
  if not plan:
674
- # Todo: Raise suitable PlatformException?
669
+ # TODO: DBTP-1943: Raise suitable PlatformException?
675
670
  raise SchemaError(f"Missing key: 'plan'")
676
671
 
677
672
  if volume_size < self._valid_opensearch_min_volume_size:
678
- # Todo: Raise suitable PlatformException?
673
+ # TODO: DBTP-1943: Raise suitable PlatformException?
679
674
  raise SchemaError(
680
675
  f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer greater than {self._valid_opensearch_min_volume_size}"
681
676
  )
@@ -685,7 +680,7 @@ class ConditionalOpensSearchSchema(Schema):
685
680
  plan == key
686
681
  and not volume_size <= self._valid_opensearch_max_volume_size[key]
687
682
  ):
688
- # Todo: Raise suitable PlatformException?
683
+ # TODO: DBTP-1943: Raise suitable PlatformException?
689
684
  raise SchemaError(
690
685
  f"Key 'environments' error: Key '{env}' error: Key 'volume_size' error: should be an integer between {self._valid_opensearch_min_volume_size} and {self._valid_opensearch_max_volume_size[key]} for plan {plan}"
691
686
  )
@@ -0,0 +1,43 @@
1
+ from copy import deepcopy
2
+
3
+
4
+ class SchemaV0ToV1Migration:
5
+ def from_version(self) -> int:
6
+ return 0
7
+
8
+ def migrate(self, platform_config: dict) -> dict:
9
+ migrated_config = deepcopy(platform_config)
10
+
11
+ self._remove_terraform_platform_modules_default_version(migrated_config)
12
+ self._remove_versions_from_env_config(migrated_config)
13
+ self._remove_to_account_and_from_account_from_database_copy(migrated_config)
14
+ self._remove_pipeline_platform_helper_override(migrated_config)
15
+
16
+ return migrated_config
17
+
18
+ def _remove_versions_from_env_config(self, migrated_config: dict) -> None:
19
+ for env_name, env in migrated_config.get("environments", {}).items():
20
+ if env and "versions" in env:
21
+ del env["versions"]
22
+
23
+ def _remove_terraform_platform_modules_default_version(self, migrated_config: dict) -> None:
24
+ if "default_versions" in migrated_config:
25
+ default_versions = migrated_config["default_versions"]
26
+ if "terraform-platform-modules" in default_versions:
27
+ del default_versions["terraform-platform-modules"]
28
+
29
+ def _remove_to_account_and_from_account_from_database_copy(self, migrated_config: dict) -> None:
30
+ for extension_name, extension in migrated_config.get("extensions", {}).items():
31
+ if extension.get("type") == "postgres" and "database_copy" in extension:
32
+ for database_copy_block in extension["database_copy"]:
33
+ if "from_account" in database_copy_block:
34
+ del database_copy_block["from_account"]
35
+ if "to_account" in database_copy_block:
36
+ del database_copy_block["to_account"]
37
+
38
+ def _remove_pipeline_platform_helper_override(self, migrated_config: dict) -> None:
39
+ for pipeline_name, pipeline_config in migrated_config.get(
40
+ "environment_pipelines", {}
41
+ ).items():
42
+ if "versions" in pipeline_config:
43
+ del pipeline_config["versions"]
@@ -0,0 +1,77 @@
1
+ from collections import Counter
2
+ from collections import OrderedDict
3
+ from copy import deepcopy
4
+ from typing import Protocol
5
+
6
+ from dbt_platform_helper.platform_exception import PlatformException
7
+ from dbt_platform_helper.providers.io import ClickIOProvider
8
+ from dbt_platform_helper.providers.schema_migrations.schema_v0_to_v1_migration import (
9
+ SchemaV0ToV1Migration,
10
+ )
11
+ from dbt_platform_helper.providers.version import InstalledVersionProvider
12
+
13
+
14
+ class InvalidMigrationConfigurationException(PlatformException):
15
+ pass
16
+
17
+
18
+ class SchemaMigrationProtocol(Protocol):
19
+ def from_version(self) -> int: ...
20
+
21
+ def migrate(self, platform_config: dict) -> dict: ...
22
+
23
+
24
+ # TODO: Possibly get this programmatically?
25
+ ALL_MIGRATIONS = [SchemaV0ToV1Migration()]
26
+
27
+
28
+ class Migrator:
29
+ def __init__(
30
+ self,
31
+ migrations: list[SchemaMigrationProtocol],
32
+ installed_version_provider: InstalledVersionProvider = InstalledVersionProvider,
33
+ io_provider: ClickIOProvider = ClickIOProvider(),
34
+ ):
35
+ self.migrations = sorted(migrations, key=lambda m: m.from_version())
36
+ self.installed_version_provider = installed_version_provider
37
+ self.io_provider = io_provider
38
+ from_version_counts = Counter([migration.from_version() for migration in self.migrations])
39
+ duplicate_from_versions = [count for count in from_version_counts.values() if count > 1]
40
+
41
+ if duplicate_from_versions:
42
+ raise InvalidMigrationConfigurationException(
43
+ "`from_version` parameters must be unique amongst migrations"
44
+ )
45
+
46
+ def migrate(self, platform_config: dict) -> dict:
47
+ out = OrderedDict(deepcopy(platform_config))
48
+ if "schema_version" not in out:
49
+ out["schema_version"] = 0
50
+
51
+ if "default_versions" in out:
52
+ out.move_to_end("default_versions", last=False)
53
+ if "schema_version" in out:
54
+ out.move_to_end("schema_version", last=False)
55
+ if "application" in out:
56
+ out.move_to_end("application", last=False)
57
+
58
+ for migration in self.migrations:
59
+ migration_can_be_applied = migration.from_version() == out["schema_version"]
60
+ if migration_can_be_applied:
61
+ out = migration.migrate(out)
62
+ schema_version = out["schema_version"]
63
+ self.io_provider.info(
64
+ f"Migrating from platform config schema version {schema_version} to version {schema_version + 1}"
65
+ )
66
+ out["schema_version"] += 1
67
+
68
+ if "default_versions" not in out:
69
+ out["default_versions"] = {}
70
+
71
+ out["default_versions"]["platform-helper"] = str(
72
+ self.installed_version_provider.get_semantic_version("dbt-platform-helper")
73
+ )
74
+
75
+ self.io_provider.info("\nMigration complete")
76
+
77
+ return dict(out)
@@ -41,7 +41,7 @@ class Secrets:
41
41
 
42
42
  raise SecretNotFoundException(secret_name)
43
43
 
44
- # Todo: This probably does not belong in the secrets provider. When it moves, take the Todoed exceptions from below
44
+ # TODO: DBTP-1946: This probably does not belong in the secrets provider. When it moves, take the Todoed exceptions from below
45
45
  def get_addon_type(self, addon_name: str) -> str:
46
46
  addon_type = None
47
47
  try:
@@ -82,18 +82,18 @@ class Secrets:
82
82
  return addon_name.replace("-", "_").upper()
83
83
 
84
84
 
85
- # Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
85
+ # TODO: DBTP-1946: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
86
86
  class AddonException(PlatformException):
87
87
  pass
88
88
 
89
89
 
90
- # Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
90
+ # TODO: DBTP-1946: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
91
91
  class AddonNotFoundException(AddonException):
92
92
  def __init__(self, addon_name: str):
93
93
  super().__init__(f"""Addon "{addon_name}" does not exist.""")
94
94
 
95
95
 
96
- # Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
96
+ # TODO: DBTP-1946: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
97
97
  class AddonTypeMissingFromConfigException(AddonException):
98
98
  def __init__(self, addon_name: str):
99
99
  super().__init__(
@@ -101,7 +101,7 @@ class AddonTypeMissingFromConfigException(AddonException):
101
101
  )
102
102
 
103
103
 
104
- # Todo: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
104
+ # TODO: DBTP-1946: This probably does not belong in the secrets provider. Move it when we find a better home for get_addon_type()
105
105
  class InvalidAddonTypeException(AddonException):
106
106
  def __init__(self, addon_type):
107
107
  self.addon_type = addon_type
@@ -19,7 +19,7 @@ class IncompatibleMinorVersionException(ValidationException):
19
19
 
20
20
 
21
21
  class SemanticVersion:
22
- def __init__(self, major, minor, patch):
22
+ def __init__(self, major: int, minor: int, patch: int):
23
23
  self.major = major
24
24
  self.minor = minor
25
25
  self.patch = patch
@@ -74,3 +74,8 @@ class SemanticVersion:
74
74
  major, minor, patch = [self._cast_to_int_with_fallback(s) for s in version_segments]
75
75
 
76
76
  return SemanticVersion(major, minor, patch)
77
+
78
+ @staticmethod
79
+ def is_semantic_version(version_string):
80
+ valid_semantic_string_regex = r"(?i)^v?[0-9]+[.-][0-9]+[.-][0-9]+$"
81
+ return re.match(valid_semantic_string_regex, version_string)
@@ -0,0 +1,62 @@
1
+ from slack_sdk import WebClient
2
+ from slack_sdk.errors import SlackApiError
3
+ from slack_sdk.models import blocks
4
+
5
+ from dbt_platform_helper.platform_exception import PlatformException
6
+
7
+
8
+ class SlackChannelNotifierException(PlatformException):
9
+ pass
10
+
11
+
12
+ class SlackChannelNotifier:
13
+ def __init__(self, slack_token: str, slack_channel_id: str):
14
+ self.client = WebClient(slack_token)
15
+ self.slack_channel_id = slack_channel_id
16
+
17
+ def post_update(self, message_ref, message, context=None):
18
+ args = {
19
+ "channel": self.slack_channel_id,
20
+ "blocks": self._build_message_blocks(message, context),
21
+ "text": message,
22
+ "unfurl_links": False,
23
+ "unfurl_media": False,
24
+ }
25
+
26
+ try:
27
+ response = self.client.chat_update(ts=message_ref, **args)
28
+ return response["ts"]
29
+ except SlackApiError as e:
30
+ raise SlackChannelNotifierException(f"Slack notification unsuccessful: {e}")
31
+
32
+ def post_new(self, message, context=None, title=None, reply_broadcast=None, thread_ref=None):
33
+ args = {
34
+ "channel": self.slack_channel_id,
35
+ "blocks": self._build_message_blocks(message, context),
36
+ "text": title if title else message,
37
+ "reply_broadcast": reply_broadcast,
38
+ "unfurl_links": False,
39
+ "unfurl_media": False,
40
+ "thread_ts": thread_ref,
41
+ }
42
+
43
+ try:
44
+ response = self.client.chat_postMessage(ts=None, **args)
45
+ return response["ts"]
46
+ except SlackApiError as e:
47
+ raise SlackChannelNotifierException(f"Slack notification unsuccessful: {e}")
48
+
49
+ def _build_message_blocks(self, message, context):
50
+ message_blocks = [
51
+ blocks.SectionBlock(
52
+ text=blocks.TextObject(type="mrkdwn", text=message),
53
+ ),
54
+ ]
55
+
56
+ if context:
57
+ message_blocks.append(
58
+ blocks.ContextBlock(
59
+ elements=[blocks.TextObject(type="mrkdwn", text=element) for element in context]
60
+ )
61
+ )
62
+ return message_blocks
@@ -20,7 +20,7 @@ class TerraformManifestProvider:
20
20
  def generate_codebase_pipeline_config(
21
21
  self,
22
22
  platform_config: dict,
23
- terraform_platform_modules_version: str,
23
+ platform_helper_version: str,
24
24
  ecr_imports: dict[str, str],
25
25
  deploy_repository: str,
26
26
  ):
@@ -32,9 +32,7 @@ class TerraformManifestProvider:
32
32
  self._add_codebase_pipeline_locals(terraform)
33
33
  self._add_provider(terraform, default_account)
34
34
  self._add_backend(terraform, platform_config, default_account, state_key_suffix)
35
- self._add_codebase_pipeline_module(
36
- terraform, terraform_platform_modules_version, deploy_repository
37
- )
35
+ self._add_codebase_pipeline_module(terraform, platform_helper_version, deploy_repository)
38
36
  self._add_imports(terraform, ecr_imports)
39
37
  self._write_terraform_json(terraform, "terraform/codebase-pipelines")
40
38
 
@@ -42,7 +40,7 @@ class TerraformManifestProvider:
42
40
  self,
43
41
  platform_config: dict,
44
42
  env: str,
45
- terraform_platform_modules_version: str,
43
+ platform_helper_version: str,
46
44
  ):
47
45
  platform_config = ConfigProvider.apply_environment_defaults(platform_config)
48
46
  account = self._get_account_for_env(env, platform_config)
@@ -55,7 +53,7 @@ class TerraformManifestProvider:
55
53
  self._add_header(terraform)
56
54
  self._add_environment_locals(terraform, application_name)
57
55
  self._add_backend(terraform, platform_config, account, state_key_suffix)
58
- self._add_extensions_module(terraform, terraform_platform_modules_version, env)
56
+ self._add_extensions_module(terraform, platform_helper_version, env)
59
57
  self._add_moved(terraform, platform_config)
60
58
  self._ensure_no_hcl_manifest_file(env_dir)
61
59
  self._write_terraform_json(terraform, env_dir)
@@ -117,9 +115,9 @@ class TerraformManifestProvider:
117
115
 
118
116
  @staticmethod
119
117
  def _add_codebase_pipeline_module(
120
- terraform: dict, terraform_platform_modules_version: str, deploy_repository: str
118
+ terraform: dict, platform_helper_version: str, deploy_repository: str
121
119
  ):
122
- source = f"git::https://github.com/uktrade/terraform-platform-modules.git//codebase-pipelines?depth=1&ref={terraform_platform_modules_version}"
120
+ source = f"git::https://github.com/uktrade/platform-tools.git//terraform/codebase-pipelines?depth=1&ref={platform_helper_version}"
123
121
  terraform["module"] = {
124
122
  "codebase-pipelines": {
125
123
  "source": source,
@@ -139,8 +137,8 @@ class TerraformManifestProvider:
139
137
  }
140
138
 
141
139
  @staticmethod
142
- def _add_extensions_module(terraform: dict, terraform_platform_modules_version: str, env: str):
143
- source = f"git::https://github.com/uktrade/terraform-platform-modules.git//extensions?depth=1&ref={terraform_platform_modules_version}"
140
+ def _add_extensions_module(terraform: dict, platform_helper_version: str, env: str):
141
+ source = f"git::https://github.com/uktrade/platform-tools.git//terraform/extensions?depth=1&ref={platform_helper_version}"
144
142
  terraform["module"] = {
145
143
  "extensions": {"source": source, "args": "${local.args}", "environment": env}
146
144
  }
@@ -4,19 +4,15 @@ from abc import ABC
4
4
  from abc import abstractmethod
5
5
  from importlib.metadata import PackageNotFoundError
6
6
  from importlib.metadata import version
7
- from pathlib import Path
8
7
  from typing import Union
9
8
 
10
9
  from requests import Session
11
10
  from requests.adapters import HTTPAdapter
12
11
  from urllib3.util import Retry
13
12
 
14
- from dbt_platform_helper.constants import PLATFORM_HELPER_VERSION_FILE
15
13
  from dbt_platform_helper.platform_exception import PlatformException
16
14
  from dbt_platform_helper.providers.io import ClickIOProvider
17
15
  from dbt_platform_helper.providers.semantic_version import SemanticVersion
18
- from dbt_platform_helper.providers.yaml_file import FileProviderException
19
- from dbt_platform_helper.providers.yaml_file import YamlFileProvider
20
16
 
21
17
 
22
18
  def set_up_retry():
@@ -49,6 +45,7 @@ class InstalledVersionProvider:
49
45
  def get_semantic_version(tool_name: str) -> SemanticVersion:
50
46
  try:
51
47
  return SemanticVersion.from_string(version(tool_name))
48
+
52
49
  except PackageNotFoundError:
53
50
  raise InstalledToolNotFoundException(tool_name)
54
51
 
@@ -95,20 +92,6 @@ class PyPiLatestVersionProvider(VersionProvider):
95
92
  return semantic_version
96
93
 
97
94
 
98
- class DeprecatedVersionFileVersionProvider(VersionProvider):
99
- def __init__(self, file_provider: YamlFileProvider):
100
- self.file_provider = file_provider or YamlFileProvider
101
-
102
- def get_semantic_version(self) -> Union[SemanticVersion, None]:
103
- deprecated_version_file = Path(PLATFORM_HELPER_VERSION_FILE)
104
- try:
105
- loaded_version = self.file_provider.load(deprecated_version_file)
106
- version_from_file = SemanticVersion.from_string(loaded_version)
107
- except FileProviderException:
108
- version_from_file = None
109
- return version_from_file
110
-
111
-
112
95
  class AWSCLIInstalledVersionProvider(VersionProvider):
113
96
  @staticmethod
114
97
  def get_semantic_version() -> Union[SemanticVersion, None]: