runbooks 0.1.7__py3-none-any.whl → 0.1.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. runbooks/__init__.py +1 -1
  2. runbooks/aws/__init__.py +58 -0
  3. runbooks/aws/dynamodb_operations.py +231 -0
  4. runbooks/aws/ec2_copy_image_cross-region.py +195 -0
  5. runbooks/aws/ec2_describe_instances.py +202 -0
  6. runbooks/aws/ec2_ebs_snapshots_delete.py +186 -0
  7. runbooks/aws/ec2_run_instances.py +207 -0
  8. runbooks/aws/ec2_start_stop_instances.py +199 -0
  9. runbooks/aws/ec2_terminate_instances.py +143 -0
  10. runbooks/aws/ec2_unused_eips.py +196 -0
  11. runbooks/aws/ec2_unused_volumes.py +184 -0
  12. runbooks/aws/s3_create_bucket.py +140 -0
  13. runbooks/aws/s3_list_buckets.py +152 -0
  14. runbooks/aws/s3_list_objects.py +151 -0
  15. runbooks/aws/s3_object_operations.py +183 -0
  16. runbooks/aws/tagging_lambda_handler.py +172 -0
  17. runbooks/python101/calculator.py +34 -0
  18. runbooks/python101/config.py +1 -0
  19. runbooks/python101/exceptions.py +16 -0
  20. runbooks/python101/file_manager.py +218 -0
  21. runbooks/python101/toolkit.py +153 -0
  22. runbooks/security_baseline/__init__.py +0 -0
  23. runbooks/security_baseline/checklist/__init__.py +17 -0
  24. runbooks/security_baseline/checklist/account_level_bucket_public_access.py +86 -0
  25. runbooks/security_baseline/checklist/alternate_contacts.py +65 -0
  26. runbooks/security_baseline/checklist/bucket_public_access.py +82 -0
  27. runbooks/security_baseline/checklist/cloudwatch_alarm_configuration.py +66 -0
  28. runbooks/security_baseline/checklist/direct_attached_policy.py +69 -0
  29. runbooks/security_baseline/checklist/guardduty_enabled.py +71 -0
  30. runbooks/security_baseline/checklist/iam_password_policy.py +43 -0
  31. runbooks/security_baseline/checklist/iam_user_mfa.py +39 -0
  32. runbooks/security_baseline/checklist/multi_region_instance_usage.py +55 -0
  33. runbooks/security_baseline/checklist/multi_region_trail.py +64 -0
  34. runbooks/security_baseline/checklist/root_access_key.py +72 -0
  35. runbooks/security_baseline/checklist/root_mfa.py +39 -0
  36. runbooks/security_baseline/checklist/root_usage.py +128 -0
  37. runbooks/security_baseline/checklist/trail_enabled.py +68 -0
  38. runbooks/security_baseline/checklist/trusted_advisor.py +24 -0
  39. runbooks/security_baseline/report_generator.py +149 -0
  40. runbooks/security_baseline/run_script.py +76 -0
  41. runbooks/security_baseline/security_baseline_tester.py +179 -0
  42. runbooks/security_baseline/utils/__init__.py +1 -0
  43. runbooks/security_baseline/utils/common.py +109 -0
  44. runbooks/security_baseline/utils/enums.py +44 -0
  45. runbooks/security_baseline/utils/language.py +762 -0
  46. runbooks/security_baseline/utils/level_const.py +5 -0
  47. runbooks/security_baseline/utils/permission_list.py +26 -0
  48. runbooks/utils/__init__.py +0 -0
  49. runbooks/utils/logger.py +36 -0
  50. {runbooks-0.1.7.dist-info → runbooks-0.1.8.dist-info}/METADATA +2 -2
  51. runbooks-0.1.8.dist-info/RECORD +54 -0
  52. runbooks-0.1.7.dist-info/RECORD +0 -6
  53. {runbooks-0.1.7.dist-info → runbooks-0.1.8.dist-info}/WHEEL +0 -0
  54. {runbooks-0.1.7.dist-info → runbooks-0.1.8.dist-info}/entry_points.txt +0 -0
  55. {runbooks-0.1.7.dist-info → runbooks-0.1.8.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,152 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """
4
+ AWS S3 Bucket Listing Utility with logging and error handling.
5
+
6
+ Author: nnthanh101@gmail.com
7
+ Date: 2025-01-05
8
+ Version: 1.0.0
9
+
10
+ Description:
11
+ This script lists all S3 buckets in the AWS account using the Boto3 library.
12
+ It implements robust error handling, logging, and modularization for high standards.
13
+
14
+ Usage:
15
+ python list_s3_buckets.py
16
+ """
17
+
18
+ import json
19
+ import os
20
+ from typing import Dict, List
21
+
22
+ import boto3
23
+ from botocore.exceptions import ClientError, NoCredentialsError, PartialCredentialsError
24
+ from tabulate import tabulate
25
+
26
+ from runbooks.utils.logger import configure_logger
27
+
28
+ ## Initialize Logger
29
+ logger = configure_logger("list_s3_buckets")
30
+
31
+
32
+ def get_s3_client(region: str = None) -> boto3.client:
33
+ """
34
+ Initializes and returns a Boto3 S3 client with optional region support.
35
+
36
+ Args:
37
+ region (str, optional): AWS region. Defaults to None (uses environment or AWS config).
38
+
39
+ Returns:
40
+ boto3.client: Configured S3 client.
41
+
42
+ Raises:
43
+ NoCredentialsError: Raised if AWS credentials are missing.
44
+ PartialCredentialsError: Raised if AWS credentials are incomplete.
45
+ """
46
+ try:
47
+ ## ✅ Allow region override if specified
48
+ session = boto3.Session(region_name=region) if region else boto3.Session()
49
+ client = session.client("s3")
50
+ logger.info("S3 client initialized successfully.")
51
+ return client
52
+ except (NoCredentialsError, PartialCredentialsError) as e:
53
+ # logger.error("Please configure them using AWS CLI or environment variables.")
54
+ logger.error(f"AWS Credentials Error: {str(e)}")
55
+ raise
56
+ except Exception as e:
57
+ logger.error(f"Unexpected error: {str(e)}")
58
+ raise
59
+
60
+
61
+ def list_s3_buckets(client: boto3.client) -> List[Dict[str, str]]:
62
+ """
63
+ Lists all S3 buckets in the AWS account.
64
+
65
+ Args:
66
+ client (boto3.client): Pre-configured S3 client.
67
+
68
+ Returns:
69
+ List[Dict[str, str]]: List of bucket details including name and creation date.
70
+
71
+ Raises:
72
+ ClientError: For API errors returned by AWS.
73
+ """
74
+ try:
75
+ ## ✅ Call AWS API to list buckets
76
+ response = client.list_buckets()
77
+
78
+ ## ✅ Extract bucket names and creation dates
79
+ # bucket_list = [{"Name": bucket['Name'], "CreationDate": str(bucket['CreationDate'])} for bucket in buckets]
80
+ bucket_list = [
81
+ {
82
+ "Name": bucket["Name"],
83
+ "CreationDate": bucket["CreationDate"].strftime("%Y-%m-%d %H:%M:%S"),
84
+ "Owner": {
85
+ "DisplayName": response["Owner"].get("DisplayName", "N/A"),
86
+ # "ID": response['Owner'].get('ID', 'N/A')
87
+ },
88
+ }
89
+ for bucket in response.get("Buckets", [])
90
+ ]
91
+
92
+ ## ✅ Log the number of buckets found
93
+ if not bucket_list:
94
+ logger.warning("No buckets found.")
95
+ else:
96
+ logger.info(f"Found {len(bucket_list)} S3 bucket(s).")
97
+
98
+ return bucket_list
99
+ except ClientError as e:
100
+ logger.error(f"Failed to list buckets: {e.response['Error']['Message']}")
101
+ raise
102
+ except Exception as e:
103
+ logger.error(f"Unexpected error: {str(e)}")
104
+ raise
105
+
106
+
107
+ def display_buckets(buckets: List[Dict[str, str]]) -> None:
108
+ """
109
+ Displays bucket details in JSON format for readability.
110
+
111
+ Args:
112
+ buckets (List[Dict[str, str]]): List of bucket details.
113
+ """
114
+
115
+ # print(json.dumps(buckets, indent=4)) if buckets else print("No buckets found.")
116
+ ## ✅ Prepare Table Headers and Rows
117
+ headers = ["Name", "Creation Date", "Owner Display Name", "Owner ID"]
118
+ rows = [
119
+ # [bucket["Name"], bucket["CreationDate"], bucket["Owner"]["DisplayName"], bucket["Owner"]["ID"]]
120
+ [bucket["Name"], bucket["CreationDate"], bucket["Owner"]["DisplayName"]]
121
+ for bucket in buckets
122
+ ]
123
+
124
+ ## ✅ Render Markdown Table
125
+ print("### AWS S3 Buckets\n")
126
+ ## Creating & printing the Markdown Table
127
+ table = tabulate(rows, headers=headers, tablefmt="github", missingval="N/A")
128
+ print(table)
129
+
130
+
131
+ ## ==============================
132
+ ## MAIN FUNCTION
133
+ ## ==============================
134
+ def main() -> None:
135
+ """
136
+ Main entry point for listing S3 buckets.
137
+ """
138
+ try:
139
+ ## ✅ Load AWS region dynamically (fallback to default)
140
+ region = os.getenv("AWS_REGION", "us-east-1")
141
+ ## ✅ Initialize S3 client
142
+ client = get_s3_client(region)
143
+ ## ✅ Retrieve bucket list
144
+ buckets = list_s3_buckets(client)
145
+ ## ✅ Display results
146
+ display_buckets(buckets)
147
+ except Exception as e:
148
+ logger.error(f"Program terminated with error: {str(e)}")
149
+
150
+
151
+ if __name__ == "__main__":
152
+ main()
@@ -0,0 +1,151 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """
4
+ List all objects inside a specified S3 bucket with logging and error handling.
5
+
6
+ Author: nnthanh101@gmail.com
7
+ Date: 2025-01-06
8
+ Version: 1.0.0
9
+
10
+ Usage:
11
+ python list_s3_objects.py <bucket_name>
12
+ """
13
+
14
+ import argparse
15
+ import sys
16
+ from typing import Dict, List, Optional
17
+
18
+ import boto3
19
+ from botocore.exceptions import BotoCoreError, ClientError, NoCredentialsError
20
+ from tabulate import tabulate
21
+
22
+ from runbooks.utils.logger import configure_logger
23
+
24
+ ## ✅ Configure Logger
25
+ logger = configure_logger(__name__)
26
+
27
+
28
+ ## ==============================
29
+ ## AWS S3 UTILITIES
30
+ ## ==============================
31
+ def s3_list_objects(
32
+ bucket_name: str,
33
+ prefix: Optional[str] = None,
34
+ max_keys: int = 1000,
35
+ ) -> List[Dict[str, str]]:
36
+ """
37
+ List objects in the specified S3 bucket.
38
+
39
+ Args:
40
+ bucket_name (str): The name of the S3 bucket.
41
+ prefix (Optional[str]): Filter objects by prefix (default: None).
42
+ max_keys (int): Maximum number of keys to retrieve per request (default: 1000).
43
+
44
+ Returns:
45
+ List[Dict[str, str]]: List of object details, including key, size, and last modified date.
46
+
47
+ Raises:
48
+ NoCredentialsError: If AWS credentials are missing.
49
+ ClientError: If there is an issue accessing the bucket.
50
+ """
51
+ try:
52
+ logger.info(f"Initializing S3 client for bucket: {bucket_name}")
53
+ client = boto3.client("s3")
54
+
55
+ ## ✅ Prepare Parameters
56
+ params = {"Bucket": bucket_name, "MaxKeys": max_keys}
57
+ if prefix:
58
+ params["Prefix"] = prefix
59
+
60
+ ## ✅ Fetch Objects with Pagination Support
61
+ paginator = client.get_paginator("list_objects_v2")
62
+ page_iterator = paginator.paginate(**params)
63
+
64
+ object_list = []
65
+ for page in page_iterator:
66
+ if "Contents" in page: # Check if there are objects
67
+ for obj in page["Contents"]:
68
+ object_list.append(
69
+ {
70
+ "Key": obj["Key"],
71
+ "Size (KB)": f"{obj['Size'] / 1024:.2f}", # Convert bytes to KB
72
+ "LastModified": obj["LastModified"].strftime("%Y-%m-%d %H:%M:%S"),
73
+ }
74
+ )
75
+
76
+ ## ✅ Log Results
77
+ logger.info(f"Found {len(object_list)} object(s) in bucket '{bucket_name}'.")
78
+ return object_list
79
+
80
+ except NoCredentialsError:
81
+ logger.error("AWS credentials not found. Ensure ~/.aws/credentials is configured.")
82
+ raise
83
+
84
+ except ClientError as e:
85
+ logger.error(f"AWS Client Error: {e}")
86
+ raise
87
+
88
+ except BotoCoreError as e:
89
+ logger.error(f"BotoCore Error: {e}")
90
+ raise
91
+
92
+ except Exception as e:
93
+ logger.error(f"Unexpected error: {e}")
94
+ raise
95
+
96
+
97
+ ## ==============================
98
+ ## DISPLAY UTILITIES
99
+ ## ==============================
100
+ def display_objects(objects: List[Dict[str, str]], bucket_name: str) -> None:
101
+ """
102
+ Displays S3 object details in Markdown table format.
103
+
104
+ Args:
105
+ objects (List[Dict[str, str]]): List of S3 object details.
106
+ bucket_name (str): Name of the S3 bucket.
107
+ """
108
+ if not objects:
109
+ print(f"No objects found in bucket: {bucket_name}")
110
+ return
111
+
112
+ ## ✅ Prepare Table Headers and Rows
113
+ headers = ["Key", "Size (KB)", "Last Modified"]
114
+ rows = [[obj["Key"], obj["Size (KB)"], obj["LastModified"]] for obj in objects]
115
+
116
+ ## ✅ Render Markdown Table
117
+ print(f"### S3 Objects in Bucket: `{bucket_name}`\n")
118
+ print(tabulate(rows, headers=headers, tablefmt="github"))
119
+
120
+
121
+ ## ==============================
122
+ ## MAIN FUNCTION
123
+ ## ==============================
124
+ def main():
125
+ """
126
+ Main entry point for listing S3 objects.
127
+ """
128
+ ## ✅ Parse Command-Line Arguments
129
+ parser = argparse.ArgumentParser(description="List objects in an AWS S3 bucket.")
130
+ parser.add_argument("--bucket", required=True, help="The name of the S3 bucket.")
131
+ parser.add_argument("--prefix", default=None, help="Filter objects by prefix.")
132
+ parser.add_argument("--max-keys", type=int, default=1000, help="Max number of keys to fetch (default: 1000).")
133
+
134
+ args = parser.parse_args()
135
+
136
+ try:
137
+ ## ✅ Fetch and Display S3 Objects
138
+ objects = s3_list_objects(
139
+ bucket_name=args.bucket,
140
+ prefix=args.prefix,
141
+ max_keys=args.max_keys,
142
+ )
143
+ display_objects(objects, args.bucket)
144
+
145
+ except Exception as e:
146
+ logger.error(f"Program terminated with error: {e}")
147
+ sys.exit(1)
148
+
149
+
150
+ if __name__ == "__main__":
151
+ main()
@@ -0,0 +1,183 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """
4
+ S3 Object Operations: Upload and Delete Objects in Amazon S3.
5
+
6
+ This script provides functionality to:
7
+ 1. Upload a file to an S3 bucket.
8
+ 2. Delete a file from an S3 bucket.
9
+
10
+ Designed for usage in Python (pip), Docker, and AWS Lambda environments.
11
+
12
+ Author: nnthanh101@gmail.com
13
+ Date: 2025-01-08
14
+ Version: 1.0.0
15
+ """
16
+
17
+ import logging
18
+ import os
19
+ from typing import Optional
20
+
21
+ import boto3
22
+ from botocore.exceptions import BotoCoreError, ClientError
23
+
24
+ # ==============================
25
+ # CONFIGURATION VARIABLES
26
+ # ==============================
27
+ AWS_REGION = os.getenv("AWS_REGION", "us-east-1")
28
+ S3_BUCKET = os.getenv("S3_BUCKET", "my-default-bucket")
29
+ S3_KEY = os.getenv("S3_KEY", "default-key.txt")
30
+ LOCAL_FILE_PATH = os.getenv("LOCAL_FILE_PATH", "default.txt")
31
+ ACL = os.getenv("ACL", "private") ## Options: 'private', 'public-read', etc.
32
+
33
+ # ==============================
34
+ # LOGGING CONFIGURATION
35
+ # ==============================
36
+ logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO)
37
+ logger = logging.getLogger(__name__)
38
+
39
+ # ==============================
40
+ # AWS CLIENT INITIALIZATION
41
+ # ==============================
42
+ try:
43
+ s3_client = boto3.client("s3", region_name=AWS_REGION)
44
+ logger.info("✅ S3 client initialized successfully.")
45
+ except Exception as e:
46
+ logger.error(f"❌ Failed to initialize S3 client: {e}")
47
+ raise
48
+
49
+
50
+ # ==============================
51
+ # UPLOAD FUNCTION
52
+ # ==============================
53
+ def put_object(bucket: str, key: str, file_path: str, acl: str = "private") -> None:
54
+ """
55
+ Uploads a file to an S3 bucket.
56
+
57
+ Args:
58
+ bucket (str): The name of the S3 bucket.
59
+ key (str): The object key in S3.
60
+ file_path (str): Local file path to be uploaded.
61
+ acl (str): Access control list (default: 'private').
62
+
63
+ Raises:
64
+ Exception: Any upload failure.
65
+ """
66
+ try:
67
+ # ✅ Check if the file exists locally
68
+ if not os.path.exists(file_path):
69
+ raise FileNotFoundError(f"File '{file_path}' not found.")
70
+
71
+ logger.info(f"🚀 Uploading '{file_path}' to bucket '{bucket}' with key '{key}'...")
72
+ with open(file_path, "rb") as file_reader:
73
+ s3_client.put_object(ACL=acl, Body=file_reader, Bucket=bucket, Key=key)
74
+ logger.info(f"✅ File '{file_path}' uploaded successfully to '{bucket}/{key}'.")
75
+
76
+ except FileNotFoundError as e:
77
+ logger.error(f"❌ File Not Found: {e}")
78
+ raise
79
+
80
+ except ClientError as e:
81
+ logger.error(f"❌ AWS Client Error: {e}")
82
+ raise
83
+
84
+ except BotoCoreError as e:
85
+ logger.error(f"❌ BotoCore Error: {e}")
86
+ raise
87
+
88
+ except Exception as e:
89
+ logger.error(f"❌ Unexpected Error: {e}")
90
+ raise
91
+
92
+
93
+ # ==============================
94
+ # DELETE FUNCTION
95
+ # ==============================
96
+ def delete_object(bucket: str, key: str) -> None:
97
+ """
98
+ Deletes an object from an S3 bucket.
99
+
100
+ Args:
101
+ bucket (str): The name of the S3 bucket.
102
+ key (str): The object key to delete.
103
+
104
+ Raises:
105
+ Exception: Any deletion failure.
106
+ """
107
+ try:
108
+ logger.info(f"🗑️ Deleting object '{key}' from bucket '{bucket}'...")
109
+ s3_client.delete_object(Bucket=bucket, Key=key)
110
+ logger.info(f"✅ Object '{key}' deleted successfully from '{bucket}'.")
111
+
112
+ except ClientError as e:
113
+ logger.error(f"❌ AWS Client Error: {e}")
114
+ raise
115
+
116
+ except BotoCoreError as e:
117
+ logger.error(f"❌ BotoCore Error: {e}")
118
+ raise
119
+
120
+ except Exception as e:
121
+ logger.error(f"❌ Unexpected Error: {e}")
122
+ raise
123
+
124
+
125
+ # ==============================
126
+ # MAIN FUNCTION (CLI/DOCKER)
127
+ # ==============================
128
+ def main():
129
+ """
130
+ Main entry point for CLI/Docker execution.
131
+ """
132
+ try:
133
+ # ✅ Upload Object
134
+ put_object(S3_BUCKET, S3_KEY, LOCAL_FILE_PATH, ACL)
135
+
136
+ # ✅ Delete Object (Uncomment if needed)
137
+ # delete_object(S3_BUCKET, S3_KEY)
138
+
139
+ except Exception as e:
140
+ logger.error(f"❌ Error in main: {e}")
141
+ raise
142
+
143
+
144
+ # ==============================
145
+ # AWS LAMBDA HANDLER
146
+ # ==============================
147
+ def lambda_handler(event, context):
148
+ """
149
+ AWS Lambda handler for S3 object operations.
150
+
151
+ Args:
152
+ event (dict): AWS Lambda event payload with 'action', 'bucket', 'key', and 'file_path'.
153
+ context: AWS Lambda context object.
154
+
155
+ Returns:
156
+ dict: Status code and message.
157
+ """
158
+ try:
159
+ action = event.get("action") # 'upload' or 'delete'
160
+ bucket = event.get("bucket", S3_BUCKET)
161
+ key = event.get("key", S3_KEY)
162
+ file_path = event.get("file_path", LOCAL_FILE_PATH)
163
+ acl = event.get("acl", ACL)
164
+
165
+ if action == "upload":
166
+ put_object(bucket, key, file_path, acl)
167
+ return {"statusCode": 200, "body": f"File '{key}' uploaded to '{bucket}'."}
168
+ elif action == "delete":
169
+ delete_object(bucket, key)
170
+ return {"statusCode": 200, "body": f"File '{key}' deleted from '{bucket}'."}
171
+ else:
172
+ raise ValueError("Invalid action. Supported actions: 'upload', 'delete'.")
173
+
174
+ except Exception as e:
175
+ logger.error(f"❌ Lambda Error: {e}")
176
+ return {"statusCode": 500, "body": str(e)}
177
+
178
+
179
+ # ==============================
180
+ # SCRIPT ENTRY POINT
181
+ # ==============================
182
+ if __name__ == "__main__":
183
+ main()
@@ -0,0 +1,172 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """
4
+ AWS Lambda Function for Auto-Tagging EC2 Instances Based on S3 Configuration.
5
+
6
+ Author: nnthanh101@gmail.com
7
+ Date: 2025-01-07
8
+ Version: 1.0.0
9
+
10
+ Description:
11
+ - Fetches tagging configuration from an S3 bucket.
12
+ - Applies tags dynamically to an EC2 instance when triggered by AWS CloudTrail events.
13
+
14
+ Requirements:
15
+ - IAM Role Permissions:
16
+ * s3:GetObject
17
+ * ec2:CreateTags
18
+ * sts:GetCallerIdentity
19
+ - Environment Variables:
20
+ * S3_BUCKET: Name of the S3 bucket storing tags.json
21
+ * S3_OBJECT_KEY: Key for the tags.json file
22
+ """
23
+
24
+ import json
25
+ import os
26
+ import re
27
+ from typing import Dict, List
28
+
29
+ import boto3
30
+ from botocore.exceptions import BotoCoreError, ClientError
31
+
32
+ from runbooks.utils.logger import configure_logger
33
+
34
+ ## ✅ Configure Logger
35
+ logger = configure_logger(__name__)
36
+
37
+ ## ✅ Initialize AWS Clients
38
+ s3 = boto3.client("s3")
39
+ ec2 = boto3.client("ec2")
40
+
41
+ # ==============================
42
+ # CONFIGURATIONS: dafault S3 bucket and object key
43
+ # ==============================
44
+ BUCKET_NAME = os.getenv("S3_BUCKET", "os-auto-tagging") ## Default S3 bucket name
45
+ OBJECT_KEY = os.getenv("S3_OBJECT_KEY", "tags.json") ## Default S3 object key
46
+ LOCAL_FILE_PATH = "/tmp/tags.json" ## Local Temp file path
47
+
48
+ # ==============================
49
+ # VALIDATION CONFIGURATIONS
50
+ # ==============================
51
+ REQUIRED_TAGS = ["Account Name", "Functional Area", "WBS Code", "Business Unit", "Managed by", "CostGroup", "TechOwner"]
52
+
53
+ TAG_VALUE_REGEX = r"^[a-zA-Z0-9\s\-_@]+$" ## Allowed characters for tag values
54
+
55
+
56
+ def validate_tags(tags: List[Dict[str, str]]) -> None:
57
+ """
58
+ Validates that all required tags are present in the tag key-value list.
59
+
60
+ Args:
61
+ tags (List[Dict[str, str]]): List of tags to validate.
62
+
63
+ Raises:
64
+ ValueError: If any required tag is missing.
65
+ """
66
+ tag_keys = {tag["Key"] for tag in tags} ## Extract tag keys
67
+ missing_tags = [tag for tag in REQUIRED_TAGS if tag not in tag_keys]
68
+
69
+ if missing_tags:
70
+ raise ValueError(f"Missing required tags: {', '.join(missing_tags)}")
71
+
72
+ ## Validate tag values
73
+ for tag in tags:
74
+ if not re.match(TAG_VALUE_REGEX, tag["Value"]):
75
+ raise ValueError(f"Invalid value '{tag['Value']}' for tag '{tag['Key']}'.")
76
+
77
+ logger.info("All required tags are validated and meet constraints.")
78
+
79
+
80
+ # ==============================
81
+ # S3 UTILITIES
82
+ # ==============================
83
+ def download_tags_from_s3(bucket: str, key: str, local_path: str) -> List[Dict[str, str]]:
84
+ """
85
+ Downloads the tagging configuration file from S3 and parses it.
86
+
87
+ Args:
88
+ bucket (str): The S3 bucket name.
89
+ key (str): The object key in the bucket.
90
+ local_path (str): The local path to store the file.
91
+
92
+ Returns:
93
+ List[Dict[str, str]]: List of tags.
94
+ """
95
+ try:
96
+ ## ✅ Download tags.json File from S3
97
+ logger.info(f"Downloading '{key}' from bucket '{bucket}'...")
98
+ s3.download_file(bucket, key, local_path)
99
+ logger.info(f"File downloaded successfully to {local_path}.")
100
+
101
+ ## ✅ Parse the tags.json file
102
+ with open(local_path, "r") as file:
103
+ ## Load tags as a list of dictionaries
104
+ tags = json.load(file)
105
+
106
+ validate_tags(tags) ## Validate required tags
107
+ return tags
108
+
109
+ except FileNotFoundError:
110
+ logger.error("Local file not found after download.")
111
+ raise
112
+
113
+ except ClientError as e:
114
+ logger.error(f"S3 Client Error: {e}")
115
+ raise
116
+
117
+ except Exception as e:
118
+ logger.error(f"Unexpected error while downloading tags: {e}")
119
+ raise
120
+
121
+
122
+ # ==============================
123
+ # EC2 UTILITIES
124
+ # ==============================
125
+ def apply_tags_to_instance(instance_id: str, tags: List[Dict[str, str]]) -> None:
126
+ """
127
+ Applies tags to the specified EC2 instance.
128
+
129
+ Args:
130
+ instance_id (str): The ID of the EC2 instance.
131
+ tags (List[Dict[str, str]]): Tags to apply.
132
+
133
+ Raises:
134
+ Exception: Any AWS tagging errors.
135
+ """
136
+ try:
137
+ logger.info(f"Applying tags to EC2 instance: {instance_id}")
138
+ ec2.create_tags(Resources=[instance_id], Tags=tags)
139
+ logger.info(f"Tags successfully applied to instance {instance_id}: {tags}")
140
+
141
+ except ClientError as e:
142
+ logger.error(f"EC2 Client Error: {e}")
143
+ raise
144
+
145
+
146
+ # ==============================
147
+ # MAIN HANDLER
148
+ # ==============================
149
+ def lambda_handler(event, context):
150
+ """
151
+ AWS Lambda Handler for applying tags to EC2 instances.
152
+
153
+ Args:
154
+ event (dict): AWS event data.
155
+ context: AWS Lambda context.
156
+ """
157
+ try:
158
+ ## ✅ Extract instance ID from the event
159
+ instance_id = event["detail"]["responseElements"]["instancesSet"]["items"][0]["instanceId"]
160
+ logger.info(f"Processing instance ID: {instance_id}")
161
+
162
+ ## ✅ Download and Parse Tags
163
+ tags = download_tags_from_s3(BUCKET_NAME, OBJECT_KEY, LOCAL_FILE_PATH)
164
+
165
+ ## ✅ Apply Tags to Instance
166
+ apply_tags_to_instance(instance_id, tags)
167
+
168
+ ## ✅ Success Response
169
+ return {"statusCode": 200, "body": json.dumps(f"Tags successfully applied to instance {instance_id}")}
170
+ except Exception as e:
171
+ logger.error(f"Error during tagging process: {e}")
172
+ return {"statusCode": 500, "body": json.dumps(f"Error: {str(e)}")}
@@ -0,0 +1,34 @@
1
+ from typing import Union
2
+
3
+ from runbooks.python101.config import DEFAULT_CONFIG
4
+ from runbooks.python101.exceptions import CalculatorError
5
+
6
+
7
+ class Calculator:
8
+ """Calculator class supporting arithmetic operations with configurable precision."""
9
+
10
+ def __init__(self, precision: int = DEFAULT_CONFIG["precision"]):
11
+ """Initialize calculator with configurable precision."""
12
+ self.precision = precision
13
+
14
+ def _format_result(self, result: Union[int, float]) -> Union[int, float]:
15
+ """Format result based on precision."""
16
+ return round(result, self.precision)
17
+
18
+ def add(self, x: Union[int, float], y: Union[int, float]) -> Union[int, float]:
19
+ """Adds two numbers."""
20
+ return self._format_result(x + y)
21
+
22
+ def subtract(self, x: Union[int, float], y: Union[int, float]) -> Union[int, float]:
23
+ """Subtracts second number from first."""
24
+ return self._format_result(x - y)
25
+
26
+ def multiply(self, x: Union[int, float], y: Union[int, float]) -> Union[int, float]:
27
+ """Multiplies two numbers."""
28
+ return self._format_result(x * y)
29
+
30
+ def divide(self, x: Union[int, float], y: Union[int, float]) -> Union[int, float]:
31
+ """Divides first number by second."""
32
+ if y == 0:
33
+ raise CalculatorError("Division by zero is not allowed.")
34
+ return self._format_result(x / y)
@@ -0,0 +1 @@
1
+ DEFAULT_CONFIG = {"precision": 2, "allow_negative": True, "debug": False, "log_file": "calculator.log"}