runbooks 0.7.0__py3-none-any.whl → 0.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. runbooks/__init__.py +87 -37
  2. runbooks/cfat/README.md +300 -49
  3. runbooks/cfat/__init__.py +2 -2
  4. runbooks/finops/__init__.py +1 -1
  5. runbooks/finops/cli.py +1 -1
  6. runbooks/inventory/collectors/__init__.py +8 -0
  7. runbooks/inventory/collectors/aws_management.py +791 -0
  8. runbooks/inventory/collectors/aws_networking.py +3 -3
  9. runbooks/main.py +3389 -782
  10. runbooks/operate/__init__.py +207 -0
  11. runbooks/operate/base.py +311 -0
  12. runbooks/operate/cloudformation_operations.py +619 -0
  13. runbooks/operate/cloudwatch_operations.py +496 -0
  14. runbooks/operate/dynamodb_operations.py +812 -0
  15. runbooks/operate/ec2_operations.py +926 -0
  16. runbooks/operate/iam_operations.py +569 -0
  17. runbooks/operate/s3_operations.py +1211 -0
  18. runbooks/operate/tagging_operations.py +655 -0
  19. runbooks/remediation/CLAUDE.md +100 -0
  20. runbooks/remediation/DOME9.md +218 -0
  21. runbooks/remediation/README.md +26 -0
  22. runbooks/remediation/Tests/__init__.py +0 -0
  23. runbooks/remediation/Tests/update_policy.py +74 -0
  24. runbooks/remediation/__init__.py +95 -0
  25. runbooks/remediation/acm_cert_expired_unused.py +98 -0
  26. runbooks/remediation/acm_remediation.py +875 -0
  27. runbooks/remediation/api_gateway_list.py +167 -0
  28. runbooks/remediation/base.py +643 -0
  29. runbooks/remediation/cloudtrail_remediation.py +908 -0
  30. runbooks/remediation/cloudtrail_s3_modifications.py +296 -0
  31. runbooks/remediation/cognito_active_users.py +78 -0
  32. runbooks/remediation/cognito_remediation.py +856 -0
  33. runbooks/remediation/cognito_user_password_reset.py +163 -0
  34. runbooks/remediation/commons.py +455 -0
  35. runbooks/remediation/dynamodb_optimize.py +155 -0
  36. runbooks/remediation/dynamodb_remediation.py +744 -0
  37. runbooks/remediation/dynamodb_server_side_encryption.py +108 -0
  38. runbooks/remediation/ec2_public_ips.py +134 -0
  39. runbooks/remediation/ec2_remediation.py +892 -0
  40. runbooks/remediation/ec2_subnet_disable_auto_ip_assignment.py +72 -0
  41. runbooks/remediation/ec2_unattached_ebs_volumes.py +448 -0
  42. runbooks/remediation/ec2_unused_security_groups.py +202 -0
  43. runbooks/remediation/kms_enable_key_rotation.py +651 -0
  44. runbooks/remediation/kms_remediation.py +717 -0
  45. runbooks/remediation/lambda_list.py +243 -0
  46. runbooks/remediation/lambda_remediation.py +971 -0
  47. runbooks/remediation/multi_account.py +569 -0
  48. runbooks/remediation/rds_instance_list.py +199 -0
  49. runbooks/remediation/rds_remediation.py +873 -0
  50. runbooks/remediation/rds_snapshot_list.py +192 -0
  51. runbooks/remediation/requirements.txt +118 -0
  52. runbooks/remediation/s3_block_public_access.py +159 -0
  53. runbooks/remediation/s3_bucket_public_access.py +143 -0
  54. runbooks/remediation/s3_disable_static_website_hosting.py +74 -0
  55. runbooks/remediation/s3_downloader.py +215 -0
  56. runbooks/remediation/s3_enable_access_logging.py +562 -0
  57. runbooks/remediation/s3_encryption.py +526 -0
  58. runbooks/remediation/s3_force_ssl_secure_policy.py +143 -0
  59. runbooks/remediation/s3_list.py +141 -0
  60. runbooks/remediation/s3_object_search.py +201 -0
  61. runbooks/remediation/s3_remediation.py +816 -0
  62. runbooks/remediation/scan_for_phrase.py +425 -0
  63. runbooks/remediation/workspaces_list.py +220 -0
  64. runbooks/security/__init__.py +9 -10
  65. runbooks/security/security_baseline_tester.py +4 -2
  66. runbooks-0.7.6.dist-info/METADATA +608 -0
  67. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/RECORD +84 -76
  68. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/entry_points.txt +0 -1
  69. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/top_level.txt +0 -1
  70. jupyter-agent/.env +0 -2
  71. jupyter-agent/.env.template +0 -2
  72. jupyter-agent/.gitattributes +0 -35
  73. jupyter-agent/.gradio/certificate.pem +0 -31
  74. jupyter-agent/README.md +0 -16
  75. jupyter-agent/__main__.log +0 -8
  76. jupyter-agent/app.py +0 -256
  77. jupyter-agent/cloudops-agent.png +0 -0
  78. jupyter-agent/ds-system-prompt.txt +0 -154
  79. jupyter-agent/jupyter-agent.png +0 -0
  80. jupyter-agent/llama3_template.jinja +0 -123
  81. jupyter-agent/requirements.txt +0 -9
  82. jupyter-agent/tmp/4ojbs8a02ir/jupyter-agent.ipynb +0 -68
  83. jupyter-agent/tmp/cm5iasgpm3p/jupyter-agent.ipynb +0 -91
  84. jupyter-agent/tmp/crqbsseag5/jupyter-agent.ipynb +0 -91
  85. jupyter-agent/tmp/hohanq1u097/jupyter-agent.ipynb +0 -57
  86. jupyter-agent/tmp/jns1sam29wm/jupyter-agent.ipynb +0 -53
  87. jupyter-agent/tmp/jupyter-agent.ipynb +0 -27
  88. jupyter-agent/utils.py +0 -409
  89. runbooks/aws/__init__.py +0 -58
  90. runbooks/aws/dynamodb_operations.py +0 -231
  91. runbooks/aws/ec2_copy_image_cross-region.py +0 -195
  92. runbooks/aws/ec2_describe_instances.py +0 -202
  93. runbooks/aws/ec2_ebs_snapshots_delete.py +0 -186
  94. runbooks/aws/ec2_run_instances.py +0 -213
  95. runbooks/aws/ec2_start_stop_instances.py +0 -212
  96. runbooks/aws/ec2_terminate_instances.py +0 -143
  97. runbooks/aws/ec2_unused_eips.py +0 -196
  98. runbooks/aws/ec2_unused_volumes.py +0 -188
  99. runbooks/aws/s3_create_bucket.py +0 -142
  100. runbooks/aws/s3_list_buckets.py +0 -152
  101. runbooks/aws/s3_list_objects.py +0 -156
  102. runbooks/aws/s3_object_operations.py +0 -183
  103. runbooks/aws/tagging_lambda_handler.py +0 -183
  104. runbooks/inventory/FAILED_SCRIPTS_TROUBLESHOOTING.md +0 -619
  105. runbooks/inventory/PASSED_SCRIPTS_GUIDE.md +0 -738
  106. runbooks/inventory/aws_organization.png +0 -0
  107. runbooks/inventory/cfn_move_stack_instances.py +0 -1526
  108. runbooks/inventory/delete_s3_buckets_objects.py +0 -169
  109. runbooks/inventory/lockdown_cfn_stackset_role.py +0 -224
  110. runbooks/inventory/update_aws_actions.py +0 -173
  111. runbooks/inventory/update_cfn_stacksets.py +0 -1215
  112. runbooks/inventory/update_cloudwatch_logs_retention_policy.py +0 -294
  113. runbooks/inventory/update_iam_roles_cross_accounts.py +0 -478
  114. runbooks/inventory/update_s3_public_access_block.py +0 -539
  115. runbooks/organizations/__init__.py +0 -12
  116. runbooks/organizations/manager.py +0 -374
  117. runbooks-0.7.0.dist-info/METADATA +0 -375
  118. /runbooks/inventory/{tests → Tests}/common_test_data.py +0 -0
  119. /runbooks/inventory/{tests → Tests}/common_test_functions.py +0 -0
  120. /runbooks/inventory/{tests → Tests}/script_test_data.py +0 -0
  121. /runbooks/inventory/{tests → Tests}/setup.py +0 -0
  122. /runbooks/inventory/{tests → Tests}/src.py +0 -0
  123. /runbooks/inventory/{tests/test_inventory_modules.py → Tests/test_Inventory_Modules.py} +0 -0
  124. /runbooks/inventory/{tests → Tests}/test_cfn_describe_stacks.py +0 -0
  125. /runbooks/inventory/{tests → Tests}/test_ec2_describe_instances.py +0 -0
  126. /runbooks/inventory/{tests → Tests}/test_lambda_list_functions.py +0 -0
  127. /runbooks/inventory/{tests → Tests}/test_moto_integration_example.py +0 -0
  128. /runbooks/inventory/{tests → Tests}/test_org_list_accounts.py +0 -0
  129. /runbooks/inventory/{Inventory_Modules.py → inventory_modules.py} +0 -0
  130. /runbooks/{aws → operate}/tags.json +0 -0
  131. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/WHEEL +0 -0
  132. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,425 @@
1
+ """
2
+ Multi-Service Phrase Scanner - Search for sensitive data across AWS services.
3
+ """
4
+
5
+ import json
6
+ import logging
7
+
8
+ import click
9
+ from botocore.exceptions import ClientError
10
+
11
+ from .commons import display_aws_account_info, get_client, write_to_csv
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def scan_lambda_functions(phrase, case_sensitive=False):
17
+ """Search Lambda function environment variables for the phrase."""
18
+ try:
19
+ lambda_client = get_client("lambda")
20
+ paginator = lambda_client.get_paginator("list_functions")
21
+
22
+ results = []
23
+ function_count = 0
24
+
25
+ for page in paginator.paginate():
26
+ for function in page["Functions"]:
27
+ function_count += 1
28
+ function_name = function["FunctionName"]
29
+
30
+ if "Environment" in function:
31
+ env_vars = function["Environment"].get("Variables", {})
32
+ for key, value in env_vars.items():
33
+ search_value = value if case_sensitive else value.lower()
34
+ search_phrase = phrase if case_sensitive else phrase.lower()
35
+
36
+ if search_phrase in search_value:
37
+ results.append(
38
+ {
39
+ "service": "Lambda",
40
+ "resource_type": "Function",
41
+ "resource_name": function_name,
42
+ "resource_arn": function["FunctionArn"],
43
+ "location": f"Environment Variable: {key}",
44
+ "match_context": f"{key}={value[:100]}..."
45
+ if len(value) > 100
46
+ else f"{key}={value}",
47
+ }
48
+ )
49
+
50
+ logger.info(f" Scanned {function_count} Lambda functions")
51
+ return results
52
+
53
+ except ClientError as e:
54
+ logger.error(f"Failed to scan Lambda functions: {e}")
55
+ return []
56
+
57
+
58
+ def scan_ecs_tasks(phrase, case_sensitive=False):
59
+ """Search ECS task definitions for the phrase."""
60
+ try:
61
+ ecs_client = get_client("ecs")
62
+ paginator = ecs_client.get_paginator("list_task_definitions")
63
+
64
+ results = []
65
+ task_def_count = 0
66
+
67
+ for page in paginator.paginate():
68
+ for task_def_arn in page["taskDefinitionArns"]:
69
+ task_def_count += 1
70
+
71
+ try:
72
+ task_def = ecs_client.describe_task_definition(taskDefinition=task_def_arn)["taskDefinition"]
73
+
74
+ for container_def in task_def["containerDefinitions"]:
75
+ container_name = container_def["name"]
76
+
77
+ if "environment" in container_def:
78
+ for env_var in container_def["environment"]:
79
+ var_name = env_var.get("name", "")
80
+ var_value = env_var.get("value", "")
81
+
82
+ search_value = var_value if case_sensitive else var_value.lower()
83
+ search_phrase = phrase if case_sensitive else phrase.lower()
84
+
85
+ if search_phrase in search_value:
86
+ results.append(
87
+ {
88
+ "service": "ECS",
89
+ "resource_type": "Task Definition",
90
+ "resource_name": task_def_arn.split("/")[-1],
91
+ "resource_arn": task_def_arn,
92
+ "location": f"Container: {container_name}, Env Var: {var_name}",
93
+ "match_context": f"{var_name}={var_value[:100]}..."
94
+ if len(var_value) > 100
95
+ else f"{var_name}={var_value}",
96
+ }
97
+ )
98
+
99
+ except ClientError as e:
100
+ logger.debug(f"Could not describe task definition {task_def_arn}: {e}")
101
+ continue
102
+
103
+ logger.info(f" Scanned {task_def_count} ECS task definitions")
104
+ return results
105
+
106
+ except ClientError as e:
107
+ logger.error(f"Failed to scan ECS task definitions: {e}")
108
+ return []
109
+
110
+
111
+ def scan_ssm_parameters(phrase, case_sensitive=False):
112
+ """Search SSM Parameter Store for the phrase."""
113
+ try:
114
+ ssm_client = get_client("ssm")
115
+ paginator = ssm_client.get_paginator("describe_parameters")
116
+
117
+ results = []
118
+ param_count = 0
119
+ access_denied_count = 0
120
+
121
+ for page in paginator.paginate():
122
+ for param in page["Parameters"]:
123
+ param_count += 1
124
+ param_name = param["Name"]
125
+
126
+ try:
127
+ parameter = ssm_client.get_parameter(Name=param_name, WithDecryption=True)
128
+ param_value = parameter["Parameter"]["Value"]
129
+
130
+ search_value = param_value if case_sensitive else param_value.lower()
131
+ search_phrase = phrase if case_sensitive else phrase.lower()
132
+
133
+ if search_phrase in search_value:
134
+ results.append(
135
+ {
136
+ "service": "SSM Parameter Store",
137
+ "resource_type": "Parameter",
138
+ "resource_name": param_name,
139
+ "resource_arn": param.get("ARN", "N/A"),
140
+ "location": "Parameter Value",
141
+ "match_context": param_value[:100] + "..." if len(param_value) > 100 else param_value,
142
+ }
143
+ )
144
+
145
+ except ClientError as e:
146
+ error_code = e.response.get("Error", {}).get("Code", "Unknown")
147
+ if error_code in ["ParameterNotFound", "AccessDenied"]:
148
+ access_denied_count += 1
149
+ logger.debug(f"Cannot access parameter {param_name}: {error_code}")
150
+ else:
151
+ logger.warning(f"Error accessing parameter {param_name}: {e}")
152
+
153
+ logger.info(f" Scanned {param_count} SSM parameters ({access_denied_count} access denied)")
154
+ return results
155
+
156
+ except ClientError as e:
157
+ logger.error(f"Failed to scan SSM parameters: {e}")
158
+ return []
159
+
160
+
161
+ def scan_secrets_manager(phrase, case_sensitive=False):
162
+ """Search AWS Secrets Manager for the phrase."""
163
+ try:
164
+ secrets_client = get_client("secretsmanager")
165
+ paginator = secrets_client.get_paginator("list_secrets")
166
+
167
+ results = []
168
+ secret_count = 0
169
+ access_denied_count = 0
170
+
171
+ for page in paginator.paginate():
172
+ for secret in page["SecretList"]:
173
+ secret_count += 1
174
+ secret_name = secret["Name"]
175
+ secret_arn = secret["ARN"]
176
+
177
+ try:
178
+ secret_value = secrets_client.get_secret_value(SecretId=secret_arn)
179
+ secret_string = secret_value.get("SecretString", "")
180
+
181
+ if isinstance(secret_string, str):
182
+ # Try to parse as JSON first
183
+ try:
184
+ secret_data = json.loads(secret_string)
185
+ if isinstance(secret_data, dict):
186
+ # Search in JSON keys and values
187
+ for key, value in secret_data.items():
188
+ search_value = str(value) if case_sensitive else str(value).lower()
189
+ search_phrase = phrase if case_sensitive else phrase.lower()
190
+
191
+ if search_phrase in search_value:
192
+ results.append(
193
+ {
194
+ "service": "Secrets Manager",
195
+ "resource_type": "Secret",
196
+ "resource_name": secret_name,
197
+ "resource_arn": secret_arn,
198
+ "location": f"JSON Key: {key}",
199
+ "match_context": f"{key}={str(value)[:100]}..."
200
+ if len(str(value)) > 100
201
+ else f"{key}={value}",
202
+ }
203
+ )
204
+ else:
205
+ # Not a JSON object, search the string directly
206
+ search_value = secret_string if case_sensitive else secret_string.lower()
207
+ search_phrase = phrase if case_sensitive else phrase.lower()
208
+
209
+ if search_phrase in search_value:
210
+ results.append(
211
+ {
212
+ "service": "Secrets Manager",
213
+ "resource_type": "Secret",
214
+ "resource_name": secret_name,
215
+ "resource_arn": secret_arn,
216
+ "location": "Secret Value",
217
+ "match_context": secret_string[:100] + "..."
218
+ if len(secret_string) > 100
219
+ else secret_string,
220
+ }
221
+ )
222
+ except json.JSONDecodeError:
223
+ # Not valid JSON, search as plain string
224
+ search_value = secret_string if case_sensitive else secret_string.lower()
225
+ search_phrase = phrase if case_sensitive else phrase.lower()
226
+
227
+ if search_phrase in search_value:
228
+ results.append(
229
+ {
230
+ "service": "Secrets Manager",
231
+ "resource_type": "Secret",
232
+ "resource_name": secret_name,
233
+ "resource_arn": secret_arn,
234
+ "location": "Secret Value",
235
+ "match_context": secret_string[:100] + "..."
236
+ if len(secret_string) > 100
237
+ else secret_string,
238
+ }
239
+ )
240
+
241
+ except ClientError as e:
242
+ error_code = e.response.get("Error", {}).get("Code", "Unknown")
243
+ if error_code in ["ResourceNotFoundException", "AccessDeniedException"]:
244
+ access_denied_count += 1
245
+ logger.debug(f"Cannot access secret {secret_name}: {error_code}")
246
+ else:
247
+ logger.warning(f"Error accessing secret {secret_name}: {e}")
248
+
249
+ logger.info(f" Scanned {secret_count} secrets ({access_denied_count} access denied)")
250
+ return results
251
+
252
+ except ClientError as e:
253
+ logger.error(f"Failed to scan Secrets Manager: {e}")
254
+ return []
255
+
256
+
257
+ def scan_route53_records(phrase, case_sensitive=False):
258
+ """Search Route 53 DNS records for the phrase."""
259
+ try:
260
+ route53_client = get_client("route53")
261
+
262
+ results = []
263
+ zone_count = 0
264
+ record_count = 0
265
+
266
+ # List all hosted zones
267
+ hosted_zones = route53_client.list_hosted_zones()["HostedZones"]
268
+
269
+ for zone in hosted_zones:
270
+ zone_count += 1
271
+ zone_id = zone["Id"]
272
+ zone_name = zone["Name"]
273
+
274
+ # List records in the hosted zone
275
+ paginator = route53_client.get_paginator("list_resource_record_sets")
276
+ for page in paginator.paginate(HostedZoneId=zone_id):
277
+ for record in page["ResourceRecordSets"]:
278
+ record_count += 1
279
+ record_name = record["Name"]
280
+ record_type = record["Type"]
281
+
282
+ search_name = record_name if case_sensitive else record_name.lower()
283
+ search_phrase = phrase if case_sensitive else phrase.lower()
284
+
285
+ # Check if the phrase is in the record name
286
+ if search_phrase in search_name:
287
+ results.append(
288
+ {
289
+ "service": "Route 53",
290
+ "resource_type": "DNS Record",
291
+ "resource_name": record_name,
292
+ "resource_arn": f"arn:aws:route53:::hostedzone/{zone_id.split('/')[-1]}",
293
+ "location": f"Record Name in Zone: {zone_name}",
294
+ "match_context": f"Type: {record_type}, Name: {record_name}",
295
+ }
296
+ )
297
+
298
+ # Check record values for A and CNAME records
299
+ if record_type in ["A", "CNAME"] and "ResourceRecords" in record:
300
+ for value_record in record["ResourceRecords"]:
301
+ record_value = value_record["Value"]
302
+ search_value = record_value if case_sensitive else record_value.lower()
303
+
304
+ if search_phrase in search_value:
305
+ results.append(
306
+ {
307
+ "service": "Route 53",
308
+ "resource_type": "DNS Record",
309
+ "resource_name": record_name,
310
+ "resource_arn": f"arn:aws:route53:::hostedzone/{zone_id.split('/')[-1]}",
311
+ "location": f"Record Value in Zone: {zone_name}",
312
+ "match_context": f"Type: {record_type}, Value: {record_value}",
313
+ }
314
+ )
315
+
316
+ logger.info(f" Scanned {zone_count} hosted zones, {record_count} DNS records")
317
+ return results
318
+
319
+ except ClientError as e:
320
+ logger.error(f"Failed to scan Route 53 records: {e}")
321
+ return []
322
+
323
+
324
+ @click.command()
325
+ @click.option("--phrase", required=True, help="Phrase to search for in AWS resources")
326
+ @click.option("--case-sensitive", is_flag=True, help="Perform case-sensitive search")
327
+ @click.option("--services", default="lambda,ecs,ssm,secrets,route53", help="Comma-separated list of services to scan")
328
+ @click.option("--output-file", default="phrase_search_results.csv", help="Output CSV file path")
329
+ def search_aws_resources(phrase, case_sensitive, services, output_file):
330
+ """Search for a phrase across multiple AWS services (Lambda, ECS, SSM, Secrets Manager, Route 53)."""
331
+ logger.info(f"Multi-service phrase search in {display_aws_account_info()}")
332
+ logger.info(f"Searching for: '{phrase}' (case-sensitive: {case_sensitive})")
333
+
334
+ # Parse service list
335
+ service_list = [s.strip().lower() for s in services.split(",")]
336
+ service_scanners = {
337
+ "lambda": scan_lambda_functions,
338
+ "ecs": scan_ecs_tasks,
339
+ "ssm": scan_ssm_parameters,
340
+ "secrets": scan_secrets_manager,
341
+ "route53": scan_route53_records,
342
+ }
343
+
344
+ # Validate services
345
+ invalid_services = [s for s in service_list if s not in service_scanners]
346
+ if invalid_services:
347
+ logger.error(f"Invalid services: {invalid_services}")
348
+ logger.info(f"Valid services: {list(service_scanners.keys())}")
349
+ return
350
+
351
+ logger.info(f"Scanning services: {', '.join(service_list)}")
352
+
353
+ try:
354
+ all_results = []
355
+
356
+ # Scan each requested service
357
+ for service in service_list:
358
+ logger.info(f"🔍 Scanning {service.upper()}...")
359
+ scanner = service_scanners[service]
360
+
361
+ try:
362
+ results = scanner(phrase, case_sensitive)
363
+ all_results.extend(results)
364
+
365
+ if results:
366
+ logger.info(f" ✓ Found {len(results)} matches in {service}")
367
+ else:
368
+ logger.info(f" ○ No matches found in {service}")
369
+
370
+ except Exception as e:
371
+ logger.error(f" ✗ Error scanning {service}: {e}")
372
+
373
+ # Process and export results
374
+ if all_results:
375
+ logger.info(f"\n🎯 SEARCH RESULTS: Found {len(all_results)} total matches")
376
+
377
+ # Group by service for summary
378
+ service_counts = {}
379
+ for result in all_results:
380
+ service = result["service"]
381
+ service_counts[service] = service_counts.get(service, 0) + 1
382
+
383
+ logger.info("Matches per service:")
384
+ for service, count in sorted(service_counts.items()):
385
+ logger.info(f" {service}: {count} matches")
386
+
387
+ # Export to CSV
388
+ csv_data = []
389
+ for result in all_results:
390
+ csv_data.append(
391
+ {
392
+ "Service": result["service"],
393
+ "ResourceType": result["resource_type"],
394
+ "ResourceName": result["resource_name"],
395
+ "ResourceARN": result["resource_arn"],
396
+ "Location": result["location"],
397
+ "MatchContext": result["match_context"],
398
+ }
399
+ )
400
+
401
+ write_to_csv(csv_data, output_file)
402
+ logger.info(f"Search results exported to: {output_file}")
403
+
404
+ # Show sample results
405
+ logger.info("\nSample matches (first 5):")
406
+ for i, result in enumerate(all_results[:5]):
407
+ logger.info(f" {i + 1}. {result['service']}: {result['resource_name']}")
408
+ logger.info(f" Location: {result['location']}")
409
+
410
+ else:
411
+ logger.info(f"❌ No matches found for phrase: '{phrase}'")
412
+
413
+ # Summary
414
+ logger.info("\n=== SEARCH SUMMARY ===")
415
+ logger.info(f"Services scanned: {len(service_list)}")
416
+ logger.info(f"Total matches found: {len(all_results)}")
417
+ logger.info(f"Search phrase: '{phrase}' (case-sensitive: {case_sensitive})")
418
+
419
+ except Exception as e:
420
+ logger.error(f"Failed to search AWS resources: {e}")
421
+ raise
422
+
423
+
424
+ if __name__ == "__main__":
425
+ search_aws_resources()
@@ -0,0 +1,220 @@
1
+ """
2
+ 🚨 HIGH-RISK: WorkSpaces Management - Analyze and manage WorkSpaces with deletion capabilities.
3
+ """
4
+
5
+ import logging
6
+ from datetime import datetime, timedelta, timezone
7
+
8
+ import click
9
+ from botocore.exceptions import ClientError
10
+
11
+ from .commons import display_aws_account_info, get_client, write_to_csv
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def get_workspace_usage_by_hours(workspace_id, start_time, end_time):
17
+ """Get WorkSpace usage hours from CloudWatch metrics."""
18
+ try:
19
+ cloudwatch = get_client("cloudwatch")
20
+
21
+ response = cloudwatch.get_metric_statistics(
22
+ Namespace="AWS/WorkSpaces",
23
+ MetricName="UserConnected",
24
+ Dimensions=[{"Name": "WorkspaceId", "Value": workspace_id}],
25
+ StartTime=start_time,
26
+ EndTime=end_time,
27
+ Period=3600, # 1 hour intervals
28
+ Statistics=["Sum"],
29
+ )
30
+
31
+ usage_hours = sum(datapoint["Sum"] for datapoint in response.get("Datapoints", []))
32
+ logger.debug(f"Workspace {workspace_id}: {usage_hours} usage hours")
33
+
34
+ return round(usage_hours, 2)
35
+
36
+ except ClientError as e:
37
+ logger.warning(f"Could not get usage metrics for {workspace_id}: {e}")
38
+ return 0.0
39
+
40
+
41
+ @click.command()
42
+ @click.option("--output-file", default="/tmp/workspaces.csv", help="Output CSV file path")
43
+ @click.option("--days", default=30, help="Number of days to analyze for usage metrics")
44
+ @click.option("--delete-unused", is_flag=True, help="🚨 HIGH-RISK: Delete unused WorkSpaces")
45
+ @click.option("--unused-days", default=90, help="Days threshold for considering WorkSpace unused")
46
+ @click.option("--confirm", is_flag=True, help="Skip confirmation prompts (dangerous!)")
47
+ def get_workspaces(
48
+ output_file: str = "/tmp/workspaces.csv",
49
+ days: int = 30,
50
+ delete_unused: bool = False,
51
+ unused_days: int = 90,
52
+ confirm: bool = False,
53
+ ):
54
+ """🚨 HIGH-RISK: Analyze WorkSpaces usage and optionally delete unused ones."""
55
+
56
+ # HIGH-RISK OPERATION WARNING
57
+ if delete_unused and not confirm:
58
+ logger.warning("🚨 HIGH-RISK OPERATION: WorkSpace deletion")
59
+ logger.warning("This operation will permanently delete WorkSpaces and all user data")
60
+ if not click.confirm("Do you want to continue?"):
61
+ logger.info("Operation cancelled by user")
62
+ return
63
+
64
+ logger.info(f"Analyzing WorkSpaces in {display_aws_account_info()}")
65
+
66
+ try:
67
+ ws_client = get_client("workspaces")
68
+
69
+ # Get all WorkSpaces
70
+ logger.info("Collecting WorkSpaces data...")
71
+ paginator = ws_client.get_paginator("describe_workspaces")
72
+ data = []
73
+
74
+ # Calculate time range for usage analysis
75
+ end_time = datetime.now(tz=timezone.utc)
76
+ start_time = end_time - timedelta(days=days)
77
+ unused_threshold = end_time - timedelta(days=unused_days)
78
+
79
+ logger.info(f"Analyzing usage from {start_time.strftime('%Y-%m-%d')} to {end_time.strftime('%Y-%m-%d')}")
80
+
81
+ total_workspaces = 0
82
+ for page in paginator.paginate():
83
+ workspaces = page.get("Workspaces", [])
84
+ total_workspaces += len(workspaces)
85
+
86
+ for workspace in workspaces:
87
+ workspace_id = workspace["WorkspaceId"]
88
+ username = workspace["UserName"]
89
+ state = workspace["State"]
90
+
91
+ logger.info(f"Analyzing WorkSpace: {workspace_id} ({username})")
92
+
93
+ # Get connection status
94
+ try:
95
+ connection_response = ws_client.describe_workspaces_connection_status(WorkspaceIds=[workspace_id])
96
+
97
+ connection_status_list = connection_response.get("WorkspacesConnectionStatus", [])
98
+ if connection_status_list:
99
+ last_connection = connection_status_list[0].get("LastKnownUserConnectionTimestamp")
100
+ connection_state = connection_status_list[0].get("ConnectionState", "UNKNOWN")
101
+ else:
102
+ last_connection = None
103
+ connection_state = "UNKNOWN"
104
+
105
+ except ClientError as e:
106
+ logger.warning(f"Could not get connection status for {workspace_id}: {e}")
107
+ last_connection = None
108
+ connection_state = "ERROR"
109
+
110
+ # Format last connection
111
+ if last_connection:
112
+ last_connection_str = last_connection.strftime("%Y-%m-%d %H:%M:%S")
113
+ days_since_connection = (end_time - last_connection).days
114
+ else:
115
+ last_connection_str = "Never logged in"
116
+ days_since_connection = 999 # High number for never connected
117
+
118
+ # Get usage metrics
119
+ usage_hours = get_workspace_usage_by_hours(workspace_id, start_time, end_time)
120
+
121
+ # Determine if workspace is unused
122
+ is_unused = last_connection is None or last_connection < unused_threshold
123
+
124
+ workspace_data = {
125
+ "WorkspaceId": workspace_id,
126
+ "UserName": username,
127
+ "State": state,
128
+ "RunningMode": workspace["WorkspaceProperties"]["RunningMode"],
129
+ "OperatingSystem": workspace["WorkspaceProperties"]["OperatingSystemName"],
130
+ "BundleId": workspace["BundleId"],
131
+ "LastConnection": last_connection_str,
132
+ "DaysSinceConnection": days_since_connection,
133
+ "ConnectionState": connection_state,
134
+ f"UsageHours_{days}days": usage_hours,
135
+ "IsUnused": is_unused,
136
+ "UnusedThreshold": f"{unused_days} days",
137
+ }
138
+
139
+ data.append(workspace_data)
140
+
141
+ # Log status
142
+ if is_unused:
143
+ logger.warning(f" ⚠ UNUSED: Last connection {days_since_connection} days ago")
144
+ else:
145
+ logger.info(f" ✓ Active: {usage_hours}h usage in {days} days")
146
+
147
+ # Export data
148
+ write_to_csv(data, output_file)
149
+ logger.info(f"WorkSpaces analysis exported to: {output_file}")
150
+
151
+ # Analyze unused WorkSpaces
152
+ unused_workspaces = [ws for ws in data if ws["IsUnused"]]
153
+
154
+ logger.info("\n=== ANALYSIS SUMMARY ===")
155
+ logger.info(f"Total WorkSpaces: {len(data)}")
156
+ logger.info(f"Unused WorkSpaces (>{unused_days} days): {len(unused_workspaces)}")
157
+
158
+ if unused_workspaces:
159
+ logger.warning(f"⚠ Found {len(unused_workspaces)} unused WorkSpaces:")
160
+ for ws in unused_workspaces:
161
+ logger.warning(
162
+ f" - {ws['WorkspaceId']} ({ws['UserName']}) - {ws['DaysSinceConnection']} days since connection"
163
+ )
164
+
165
+ # Handle deletion of unused WorkSpaces
166
+ if delete_unused and unused_workspaces:
167
+ logger.warning(f"\n🚨 DELETION PHASE: {len(unused_workspaces)} WorkSpaces to delete")
168
+
169
+ deletion_candidates = []
170
+ for ws in unused_workspaces:
171
+ # Additional safety check - only delete if really unused
172
+ if ws["State"] in ["AVAILABLE", "STOPPED"] and ws["DaysSinceConnection"] >= unused_days:
173
+ deletion_candidates.append(ws)
174
+
175
+ if deletion_candidates:
176
+ logger.warning(f"Confirmed deletion candidates: {len(deletion_candidates)}")
177
+
178
+ # Final confirmation
179
+ if not confirm:
180
+ logger.warning("\n🚨 FINAL CONFIRMATION:")
181
+ logger.warning(f"About to delete {len(deletion_candidates)} WorkSpaces permanently")
182
+ if not click.confirm("Proceed with WorkSpace deletion?"):
183
+ logger.info("Deletion cancelled")
184
+ return
185
+
186
+ # Perform deletions
187
+ deleted_count = 0
188
+ failed_count = 0
189
+
190
+ for ws in deletion_candidates:
191
+ workspace_id = ws["WorkspaceId"]
192
+ username = ws["UserName"]
193
+
194
+ logger.warning(f"🗑 Deleting WorkSpace: {workspace_id} ({username})")
195
+
196
+ try:
197
+ ws_client.terminate_workspaces(TerminateWorkspaceRequests=[{"WorkspaceId": workspace_id}])
198
+ deleted_count += 1
199
+ logger.warning(f" ✓ Successfully deleted {workspace_id}")
200
+
201
+ # Log for audit
202
+ logger.info(f"🔍 Audit: WorkSpace deletion completed")
203
+ logger.info(f" WorkSpace ID: {workspace_id}")
204
+ logger.info(f" Username: {username}")
205
+ logger.info(f" Days since connection: {ws['DaysSinceConnection']}")
206
+
207
+ except ClientError as e:
208
+ failed_count += 1
209
+ logger.error(f" ✗ Failed to delete {workspace_id}: {e}")
210
+
211
+ logger.warning(f"\n🔄 Deletion complete: {deleted_count} deleted, {failed_count} failed")
212
+ else:
213
+ logger.info("No WorkSpaces meet the deletion criteria")
214
+
215
+ elif delete_unused and not unused_workspaces:
216
+ logger.info("✓ No unused WorkSpaces found for deletion")
217
+
218
+ except Exception as e:
219
+ logger.error(f"Failed to analyze WorkSpaces: {e}")
220
+ raise