BuzzerboyAWSLightsail 0.329.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,666 @@
1
+ """
2
+ AWS Lightsail Base Infrastructure Stack
3
+ ======================================
4
+
5
+ This module provides an abstract base class for AWS Lightsail infrastructure deployment stacks
6
+ using CDKTF (Cloud Development Kit for Terraform) with Python.
7
+
8
+ The abstract base class includes:
9
+ * Common IAM resources for service access
10
+ * AWS Secrets Manager for credential storage
11
+ * Shared configuration and initialization patterns
12
+ * Common utility methods and secret management strategies
13
+ * Template methods for infrastructure creation workflow
14
+
15
+ This class should be extended by specific Lightsail implementations such as:
16
+ * LightsailContainerStack - For container services
17
+ * LightsailDatabaseStack - For database instances
18
+
19
+ :author: Generated with GitHub Copilot
20
+ :version: 1.0.0
21
+ :license: MIT
22
+ """
23
+
24
+ #region specific imports
25
+
26
+ import os
27
+ import json
28
+ from abc import ABC, abstractmethod
29
+ from enum import Enum
30
+ from constructs import Construct
31
+ from cdktf import TerraformOutput
32
+
33
+ # Import from the correct base architecture package
34
+ import sys
35
+ sys.path.append('/Repos/AWSArchitectureBase')
36
+ from AWSArchitectureBase.AWSArchitectureBaseStack.AWSArchitectureBase import AWSArchitectureBase
37
+
38
+ #endregion
39
+
40
+ #region AWS Provider and Resources
41
+ from cdktf_cdktf_provider_aws.provider import AwsProvider
42
+ from cdktf_cdktf_provider_aws import (
43
+ iam_user,
44
+ iam_access_key,
45
+ iam_user_policy,
46
+ )
47
+ #endregion
48
+
49
+ #region Random Provider and Resources
50
+ from cdktf_cdktf_provider_random import password
51
+
52
+ # AWS Secrets Manager
53
+ from cdktf_cdktf_provider_aws.secretsmanager_secret import SecretsmanagerSecret
54
+ from cdktf_cdktf_provider_aws.secretsmanager_secret_version import SecretsmanagerSecretVersion
55
+ from cdktf_cdktf_provider_aws.data_aws_secretsmanager_secret_version import DataAwsSecretsmanagerSecretVersion
56
+
57
+ # Null Provider for local-exec provisioner
58
+ from cdktf_cdktf_provider_null.resource import Resource as NullResource
59
+
60
+ #endregion
61
+
62
+ #region Base ArchitectureFlags
63
+ class BaseLightsailArchitectureFlags(Enum):
64
+ """
65
+ Base architecture configuration flags for optional components.
66
+
67
+ These flags are common to all Lightsail implementations and can be
68
+ extended by specific implementations with additional flags.
69
+
70
+ :param SKIP_DEFAULT_POST_APPLY_SCRIPTS: Skip default post-apply scripts
71
+ :param PRESERVE_EXISTING_SECRETS: Don't overwrite existing secret versions (smart detection)
72
+ :param IGNORE_SECRET_CHANGES: Ignore all changes to secret after initial creation
73
+ """
74
+
75
+ SKIP_DEFAULT_POST_APPLY_SCRIPTS = "skip_default_post_apply_scripts"
76
+ PRESERVE_EXISTING_SECRETS = "preserve_existing_secrets"
77
+ IGNORE_SECRET_CHANGES = "ignore_secret_changes"
78
+
79
+ #endregion
80
+
81
+
82
+ class LightsailBase(AWSArchitectureBase, ABC):
83
+ """
84
+ Abstract base class for AWS Lightsail Infrastructure Stacks.
85
+
86
+ This abstract class provides common functionality for Lightsail-based
87
+ infrastructure deployments including:
88
+ * IAM resources for service access
89
+ * AWS Secrets Manager for credential storage
90
+ * Common configuration patterns and initialization
91
+ * Shared utility methods and helper functions
92
+ * Template methods for infrastructure creation workflow
93
+
94
+ Subclasses must implement abstract methods to define their specific
95
+ infrastructure components while leveraging the shared functionality.
96
+
97
+ :param scope: The construct scope
98
+ :param id: The construct ID
99
+ :param kwargs: Configuration parameters
100
+
101
+ **Common Configuration Parameters:**
102
+
103
+ :param region: AWS region (default: "us-east-1")
104
+ :param environment: Environment name (default: "dev")
105
+ :param project_name: Project identifier (required)
106
+ :param flags: List of ArchitectureFlags to modify behavior
107
+ :param profile: AWS profile to use (default: "default")
108
+ :param postApplyScripts: List of shell commands to execute after deployment
109
+ :param secret_name: Custom secret name (default: "{project_name}/{environment}/credentials")
110
+ :param default_signature_version: AWS signature version (default: "s3v4")
111
+ :param default_extra_secret_env: Environment variable for additional secrets (default: "SECRET_STRING")
112
+
113
+ Example:
114
+ >>> class MyLightsailStack(LightsailBase):
115
+ ... def create_lightsail_resources(self):
116
+ ... # Implement specific Lightsail resources
117
+ ... pass
118
+ ...
119
+ ... def get_architecture_flags(self):
120
+ ... return MyArchitectureFlags
121
+ """
122
+
123
+ # Class-level resource registry
124
+ resources = {}
125
+
126
+ # Default post-apply scripts executed after deployment
127
+ default_post_apply_scripts = []
128
+
129
+ @staticmethod
130
+ @abstractmethod
131
+ def get_architecture_flags():
132
+ """
133
+ Get the ArchitectureFlags enum for configuration.
134
+
135
+ This method must be implemented by subclasses to return their
136
+ specific ArchitectureFlags enum class.
137
+
138
+ :returns: ArchitectureFlags enum class
139
+ :rtype: type[Enum]
140
+ """
141
+ pass
142
+
143
+ def __init__(self, scope, id, **kwargs):
144
+ """
145
+ Initialize the AWS Lightsail Base Infrastructure Stack.
146
+
147
+ :param scope: The construct scope
148
+ :param id: Unique identifier for this stack
149
+ :param kwargs: Configuration parameters
150
+ """
151
+ # Initialize configuration before parent class to ensure proper state bucket setup
152
+ self.region = kwargs.get("region", "us-east-1")
153
+ self.environment = kwargs.get("environment", "dev")
154
+ self.project_name = kwargs.get("project_name")
155
+ self.profile = kwargs.get("profile", "default")
156
+
157
+ if not self.project_name:
158
+ raise ValueError("project_name is required and cannot be empty")
159
+
160
+ # Ensure we pass all kwargs to parent class
161
+ super().__init__(scope, id, **kwargs)
162
+
163
+ # ===== Stack Configuration =====
164
+ self.flags = kwargs.get("flags", [])
165
+ self.post_apply_scripts = kwargs.get("postApplyScripts", []) or []
166
+
167
+ # ===== Security Configuration =====
168
+ default_secret_name = f"{self.project_name}/{self.environment}/credentials"
169
+ self.secret_name = kwargs.get("secret_name", default_secret_name)
170
+ self.default_signature_version = kwargs.get("default_signature_version", "s3v4")
171
+ self.default_extra_secret_env = kwargs.get("default_extra_secret_env", "SECRET_STRING")
172
+
173
+ # ===== Storage Configuration =====
174
+ default_bucket_name = self.properize_s3_bucketname(f"{self.region}-{self.project_name}-tfstate")
175
+ self.state_bucket_name = kwargs.get("state_bucket_name", default_bucket_name)
176
+
177
+ # ===== Internal State =====
178
+ self.secrets = {}
179
+ self.post_terraform_messages = []
180
+ self._post_plan_guidance: list[str] = []
181
+
182
+ # ===== Infrastructure Setup =====
183
+ # Base infrastructure is already set up by parent class
184
+ # Initialize our specific components using template method pattern
185
+ self._set_default_post_apply_scripts()
186
+ self._create_infrastructure_components()
187
+
188
+ def _initialize_providers(self):
189
+ """
190
+ Initialize all required Terraform providers.
191
+
192
+ Calls the parent class to initialize base providers and can be
193
+ extended by subclasses to add additional provider configurations.
194
+ """
195
+ # Call parent class to initialize base providers (AWS, Random, Null)
196
+ super()._initialize_providers()
197
+
198
+ def _set_default_post_apply_scripts(self):
199
+ """
200
+ Set default post-apply scripts and merge with user-provided scripts.
201
+
202
+ This method configures the default post-apply scripts that provide
203
+ deployment status information and basic verification. These scripts
204
+ are automatically added to the post_apply_scripts list unless the
205
+ SKIP_DEFAULT_POST_APPLY_SCRIPTS flag is set.
206
+
207
+ Subclasses can override this method to provide their own default scripts
208
+ while optionally calling the parent method.
209
+
210
+ **Default Scripts Include:**
211
+
212
+ * Deployment completion notification
213
+ * Infrastructure summary information
214
+ * Environment and project details
215
+ * Basic system information
216
+
217
+ **Script Merging:**
218
+
219
+ * Default scripts are prepended to user-provided scripts
220
+ * User scripts execute after default scripts
221
+ * Duplicates are not automatically removed
222
+
223
+ .. note::
224
+ Default scripts can be skipped by including the SKIP_DEFAULT_POST_APPLY_SCRIPTS
225
+ flag in the flags parameter during stack initialization.
226
+
227
+ .. warning::
228
+ Default scripts use environment variables and command substitution.
229
+ Ensure the execution environment supports bash-style commands.
230
+ """
231
+ # Define base default post-apply scripts
232
+ self.default_post_apply_scripts = [
233
+ "echo '============================================='",
234
+ "echo '🎉 AWS Lightsail Infrastructure Deployment Complete!'",
235
+ "echo '============================================='",
236
+ f"echo '📦 Project: {self.project_name}'",
237
+ f"echo '🌍 Environment: {self.environment}'",
238
+ f"echo '📍 Region: {self.region}'",
239
+ "echo '⏰ Deployment Time: '$(date)",
240
+ "echo '============================================='",
241
+ "echo '💻 System Information:'",
242
+ "echo ' - OS: '$(uname -s)",
243
+ "echo ' - Architecture: '$(uname -m)",
244
+ "echo ' - User: '$(whoami)",
245
+ "echo ' - Working Directory: '$(pwd)",
246
+ "echo '============================================='",
247
+ "echo '✅ Post-deployment scripts execution started'",
248
+ ]
249
+
250
+ # Skip default scripts if flag is set
251
+ if BaseLightsailArchitectureFlags.SKIP_DEFAULT_POST_APPLY_SCRIPTS.value in self.flags:
252
+ return
253
+
254
+ # Merge default scripts with user-provided scripts
255
+ # Default scripts execute first, then user scripts
256
+ self.post_apply_scripts = self.default_post_apply_scripts + self.post_apply_scripts
257
+
258
+ def _create_infrastructure_components(self):
259
+ """
260
+ Template method for creating all infrastructure components in the correct order.
261
+
262
+ This method defines the overall workflow for infrastructure creation
263
+ and calls abstract methods that must be implemented by subclasses.
264
+ The order of operations is:
265
+
266
+ 1. Create IAM resources (concrete implementation provided)
267
+ 2. Create Lightsail-specific resources (abstract - implemented by subclasses)
268
+ 3. Create security resources (concrete implementation provided)
269
+ 4. Execute post-apply scripts (concrete implementation provided)
270
+ 5. Create outputs (abstract - implemented by subclasses)
271
+ """
272
+ # Core infrastructure - provided by base class
273
+ self.create_iam_resources()
274
+
275
+ # Lightsail-specific resources - implemented by subclasses
276
+ self.create_lightsail_resources()
277
+
278
+ # Security and storage - provided by base class
279
+ self.create_security_resources()
280
+
281
+ # Post-apply scripts - provided by base class
282
+ self.execute_post_apply_scripts()
283
+
284
+ # Output generation - implemented by subclasses
285
+ self.create_outputs()
286
+
287
+ # ==================== ABSTRACT METHODS ====================
288
+
289
+ @abstractmethod
290
+ def create_lightsail_resources(self):
291
+ """
292
+ Create Lightsail-specific resources.
293
+
294
+ This method must be implemented by subclasses to create their
295
+ specific Lightsail resources such as:
296
+ * Container services
297
+ * Database instances
298
+ * Storage volumes
299
+ * Networking components
300
+
301
+ The method should populate the self.secrets dictionary with
302
+ any credentials or connection information that should be stored
303
+ in AWS Secrets Manager.
304
+ """
305
+ pass
306
+
307
+ @abstractmethod
308
+ def create_outputs(self):
309
+ """
310
+ Create Terraform outputs for important resource information.
311
+
312
+ This method must be implemented by subclasses to create
313
+ appropriate Terraform outputs for their specific resources.
314
+
315
+ Common patterns include:
316
+ * Resource endpoints and URLs
317
+ * Connection information
318
+ * Sensitive credentials (marked as sensitive=True)
319
+ * Resource identifiers and names
320
+ """
321
+ pass
322
+
323
+ # ==================== CONCRETE SHARED METHODS ====================
324
+
325
+ def create_iam_resources(self):
326
+ """
327
+ Create IAM resources for service access.
328
+
329
+ Creates:
330
+ * IAM user for programmatic access to AWS services
331
+ * Access key pair for the IAM user
332
+ * IAM policy loaded from external JSON file (if exists)
333
+
334
+ The IAM user follows the naming pattern: {project_name}-service-user
335
+ """
336
+ # Create IAM User and Access Key
337
+ user_name = f"{self.project_name}-service-user"
338
+ self.service_user = iam_user.IamUser(
339
+ self, "service_user", name=user_name
340
+ )
341
+
342
+ # Create IAM Access Key
343
+ self.service_key = iam_access_key.IamAccessKey(
344
+ self, "service_key", user=self.service_user.name
345
+ )
346
+
347
+ # IAM Policy from external file (optional)
348
+ try:
349
+ self.service_policy = self.create_iam_policy_from_file()
350
+ self.resources["iam_policy"] = self.service_policy
351
+ except FileNotFoundError:
352
+ # Policy file doesn't exist, skip policy creation
353
+ pass
354
+
355
+ def create_iam_policy_from_file(self, file_path="iam_policy.json"):
356
+ """
357
+ Create IAM policy from JSON file.
358
+
359
+ :param file_path: Path to IAM policy JSON file relative to this module
360
+ :type file_path: str
361
+ :returns: IAM user policy resource
362
+ :rtype: IamUserPolicy
363
+ :raises FileNotFoundError: If policy file doesn't exist
364
+
365
+ .. note::
366
+ The policy file should be located in the same directory as this module.
367
+ """
368
+ file_to_open = os.path.join(os.path.dirname(__file__), file_path)
369
+
370
+ with open(file_to_open, "r") as f:
371
+ policy = f.read()
372
+
373
+ return iam_user_policy.IamUserPolicy(
374
+ self,
375
+ f"{self.project_name}-{self.environment}-service-policy",
376
+ name=f"{self.project_name}-{self.environment}-service-policy",
377
+ user=self.service_user.name,
378
+ policy=policy,
379
+ )
380
+
381
+ def get_extra_secret_env(self, env_var_name=None):
382
+ """
383
+ Load additional secrets from environment variable.
384
+
385
+ Attempts to load and parse a JSON string from the environment variable
386
+ specified in default_extra_secret_env. Any valid JSON key-value pairs
387
+ are added to the secrets dictionary if they don't already exist.
388
+
389
+ :param env_var_name: Environment variable name to load secrets from
390
+ :raises: No exceptions - silently continues if JSON parsing fails
391
+ """
392
+ if env_var_name is None:
393
+ env_var_name = self.default_extra_secret_env
394
+
395
+ extra_secret_env = os.environ.get(env_var_name, None)
396
+
397
+ if extra_secret_env:
398
+ try:
399
+ extra_secret_json = json.loads(extra_secret_env)
400
+ for key, value in extra_secret_json.items():
401
+ if key not in self.secrets:
402
+ self.secrets[key] = value
403
+ except json.JSONDecodeError:
404
+ # Silently continue if JSON parsing fails
405
+ pass
406
+
407
+ def create_security_resources(self):
408
+ """
409
+ Create AWS Secrets Manager resources for credential storage.
410
+
411
+ Creates:
412
+ * Secrets Manager secret for storing application credentials
413
+ * Secret version with JSON-formatted credential data (conditionally)
414
+
415
+ **Secret Management Strategy:**
416
+
417
+ If PRESERVE_EXISTING_SECRETS flag is set:
418
+ - Checks if secret already exists with content
419
+ - Only creates new version if secret is empty or doesn't exist
420
+ - Preserves manual secret updates and rotations
421
+
422
+ **Stored Credentials:**
423
+
424
+ * IAM access keys for service authentication
425
+ * AWS region and signature version configuration
426
+ * Any additional secrets from environment variables
427
+ * Subclass-specific credentials (added by create_lightsail_resources)
428
+
429
+ .. note::
430
+ All secrets are stored as a single JSON document in Secrets Manager
431
+ for easy retrieval by applications.
432
+ """
433
+ # Create Secrets Manager secret
434
+ self.secrets_manager_secret = SecretsmanagerSecret(self, self.secret_name, name=f"{self.secret_name}")
435
+ self.resources["secretsmanager_secret"] = self.secrets_manager_secret
436
+
437
+ # Populate IAM and AWS configuration secrets
438
+ self.secrets.update({
439
+ "service_user_access_key": self.service_key.id,
440
+ "service_user_secret_key": self.service_key.secret,
441
+ "access_key": self.service_key.id,
442
+ "secret_access_key": self.service_key.secret,
443
+ "region_name": self.region,
444
+ "signature_version": self.default_signature_version
445
+ })
446
+
447
+ # Load additional secrets from environment
448
+ self.get_extra_secret_env()
449
+
450
+ # Conditional secret version creation
451
+ if self.has_flag(BaseLightsailArchitectureFlags.PRESERVE_EXISTING_SECRETS.value):
452
+ self._create_secret_version_conditionally()
453
+ elif self.has_flag(BaseLightsailArchitectureFlags.IGNORE_SECRET_CHANGES.value):
454
+ self._create_secret_version_with_lifecycle_ignore()
455
+ else:
456
+ # Create secret version with all credentials (original behavior)
457
+ SecretsmanagerSecretVersion(
458
+ self,
459
+ self.secret_name + "_version",
460
+ secret_id=self.secrets_manager_secret.id,
461
+ secret_string=(json.dumps(self.secrets, indent=2, sort_keys=True) if self.secrets else None),
462
+ )
463
+
464
+ def _create_secret_version_conditionally(self):
465
+ """
466
+ Create secret version only if one doesn't already exist or is empty.
467
+
468
+ This method implements smart secret management:
469
+ 1. Attempts to read existing secret using data source
470
+ 2. Only creates new version if secret is empty or doesn't exist
471
+ 3. Preserves manual updates and rotations made outside Terraform
472
+
473
+ **Use Cases:**
474
+ - Initial deployment when no secret exists
475
+ - Secret exists but has no content (empty)
476
+ - Avoid overwriting manually rotated credentials
477
+ - Preserve additional keys added through AWS console/CLI
478
+ """
479
+ try:
480
+ # Try to read existing secret version to check if it has content
481
+ existing_secret = DataAwsSecretsmanagerSecretVersion(
482
+ self,
483
+ self.secret_name + "_existing_check",
484
+ secret_id=self.secrets_manager_secret.id,
485
+ version_stage="AWSCURRENT"
486
+ )
487
+
488
+ # Create a conditional secret version
489
+ conditional_secret = SecretsmanagerSecretVersion(
490
+ self,
491
+ self.secret_name + "_version_conditional",
492
+ secret_id=self.secrets_manager_secret.id,
493
+ secret_string=json.dumps(self.secrets, indent=2, sort_keys=True) if self.secrets else None,
494
+ lifecycle={
495
+ "ignore_changes": ["secret_string"],
496
+ "create_before_destroy": False
497
+ }
498
+ )
499
+
500
+ # Add dependency to ensure secret exists before checking
501
+ conditional_secret.add_override("count",
502
+ "${length(try(jsondecode(data.aws_secretsmanager_secret_version." +
503
+ self.secret_name.replace("/", "_").replace("-", "_") + "_existing_check.secret_string), {})) == 0 ? 1 : 0}"
504
+ )
505
+
506
+ except Exception:
507
+ # If data source fails (secret doesn't exist), create the version
508
+ SecretsmanagerSecretVersion(
509
+ self,
510
+ self.secret_name + "_version_fallback",
511
+ secret_id=self.secrets_manager_secret.id,
512
+ secret_string=json.dumps(self.secrets, indent=2, sort_keys=True) if self.secrets else None,
513
+ )
514
+
515
+ def _create_secret_version_with_lifecycle_ignore(self):
516
+ """
517
+ Create secret version with lifecycle rule to ignore future changes.
518
+
519
+ This is a simpler approach that:
520
+ 1. Creates the secret version with initial values on first deployment
521
+ 2. Ignores all future changes to the secret_string
522
+ 3. Allows manual updates in AWS console/CLI to persist
523
+
524
+ **Pros:**
525
+ - Simple implementation
526
+ - Reliable behavior
527
+ - Preserves manual changes after initial creation
528
+
529
+ **Cons:**
530
+ - Cannot update secrets through Terraform after initial deployment
531
+ - Requires manual secret management for infrastructure changes
532
+ """
533
+ secret_version = SecretsmanagerSecretVersion(
534
+ self,
535
+ self.secret_name + "_version_ignored",
536
+ secret_id=self.secrets_manager_secret.id,
537
+ secret_string=json.dumps(self.secrets, indent=2, sort_keys=True) if self.secrets else None,
538
+ )
539
+
540
+ # Add lifecycle rule to ignore changes to secret_string
541
+ secret_version.add_override("lifecycle", {
542
+ "ignore_changes": ["secret_string"]
543
+ })
544
+
545
+ def execute_post_apply_scripts(self):
546
+ """
547
+ Execute post-apply scripts using local-exec provisioners.
548
+
549
+ Creates a null resource with local-exec provisioner for each script
550
+ in the post_apply_scripts list. Scripts are executed sequentially
551
+ after all other infrastructure resources are created.
552
+
553
+ **Script Execution:**
554
+
555
+ * Each script runs as a separate null resource
556
+ * Scripts execute in the order they appear in the list
557
+ * Failures in scripts don't prevent deployment completion
558
+ * All scripts depend on core infrastructure being ready
559
+
560
+ **Error Handling:**
561
+
562
+ * Scripts use "on_failure: continue" to prevent deployment failures
563
+ * Failed scripts are logged but don't halt the deployment process
564
+ * Manual intervention may be required if critical scripts fail
565
+
566
+ .. note::
567
+ Post-apply scripts can be provided via the postApplyScripts parameter
568
+ during stack initialization. If no scripts are provided, this method
569
+ returns without creating any resources.
570
+
571
+ .. warning::
572
+ Scripts have access to the local environment where Terraform runs.
573
+ Ensure scripts are safe and don't expose sensitive information.
574
+ """
575
+ if not self.post_apply_scripts:
576
+ return
577
+
578
+ # Create a null resource for each post-apply script
579
+ for i, script in enumerate(self.post_apply_scripts):
580
+ script_resource = NullResource(
581
+ self,
582
+ f"post_apply_script_{i}",
583
+ provisioner=[{
584
+ "local-exec": {
585
+ "command": script,
586
+ "on_failure": "continue"
587
+ }
588
+ }]
589
+ )
590
+
591
+ # Ensure scripts depend on core infrastructure
592
+ if hasattr(self, 'secrets_manager_secret'):
593
+ script_resource.add_override("depends_on", [self.secrets_manager_secret.fqn])
594
+
595
+ # ==================== UTILITY METHODS ====================
596
+
597
+ def has_flag(self, flag_value):
598
+ """
599
+ Check if a specific flag is set in the configuration.
600
+
601
+ :param flag_value: The flag value to check for
602
+ :type flag_value: str
603
+ :returns: True if the flag is set, False otherwise
604
+ :rtype: bool
605
+ """
606
+ return flag_value in self.flags
607
+
608
+ def clean_hyphens(self, text):
609
+ """
610
+ Remove hyphens from text for database/resource naming.
611
+
612
+ :param text: Text to clean
613
+ :type text: str
614
+ :returns: Text with hyphens replaced by underscores
615
+ :rtype: str
616
+ """
617
+ return text.replace("-", "_")
618
+
619
+ def properize_s3_bucketname(self, bucket_name):
620
+ """
621
+ Ensure S3 bucket name follows AWS naming conventions.
622
+
623
+ :param bucket_name: Proposed bucket name
624
+ :type bucket_name: str
625
+ :returns: Properly formatted bucket name
626
+ :rtype: str
627
+ """
628
+ # Convert to lowercase and replace invalid characters
629
+ clean_name = bucket_name.lower().replace("_", "-")
630
+ # Ensure it starts and ends with alphanumeric characters
631
+ clean_name = clean_name.strip("-.")
632
+ return clean_name
633
+
634
+ # ==================== SHARED OUTPUT HELPERS ====================
635
+
636
+ def create_iam_outputs(self):
637
+ """
638
+ Create standard IAM-related Terraform outputs.
639
+
640
+ This helper method can be called by subclasses to create
641
+ consistent IAM outputs across all Lightsail implementations.
642
+ """
643
+ # IAM credentials (sensitive)
644
+ TerraformOutput(
645
+ self,
646
+ "iam_user_access_key",
647
+ value=self.service_key.id,
648
+ sensitive=True,
649
+ description="IAM user access key ID (sensitive)",
650
+ )
651
+
652
+ TerraformOutput(
653
+ self,
654
+ "iam_user_secret_key",
655
+ value=self.service_key.secret,
656
+ sensitive=True,
657
+ description="IAM user secret access key (sensitive)",
658
+ )
659
+
660
+ # Secret name for reference
661
+ TerraformOutput(
662
+ self,
663
+ "secrets_manager_secret_name",
664
+ value=self.secret_name,
665
+ description="AWS Secrets Manager secret name containing all credentials",
666
+ )