aws-cost-calculator-cli 1.2.0__py3-none-any.whl → 1.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aws-cost-calculator-cli might be problematic. Click here for more details.

@@ -0,0 +1,251 @@
1
+ """
2
+ Executor that routes to either API or local execution.
3
+ """
4
+ import boto3
5
+ import click
6
+ from pathlib import Path
7
+ from cost_calculator.api_client import is_api_configured, call_lambda_api
8
+
9
+
10
+ def get_credentials_dict(config):
11
+ """
12
+ Extract credentials from config in format needed for API.
13
+
14
+ Returns:
15
+ dict with access_key, secret_key, session_token, or None if profile is 'dummy'
16
+ """
17
+ if 'aws_profile' in config:
18
+ # Skip credential loading for dummy profile (API-only mode)
19
+ if config['aws_profile'] == 'dummy':
20
+ return None
21
+
22
+ # Get temporary credentials from SSO session
23
+ try:
24
+ session = boto3.Session(profile_name=config['aws_profile'])
25
+ credentials = session.get_credentials()
26
+
27
+ if credentials is None:
28
+ raise Exception(
29
+ f"Could not get credentials for profile '{config['aws_profile']}'.\n"
30
+ f"Run: aws sso login --profile {config['aws_profile']}"
31
+ )
32
+
33
+ frozen_creds = credentials.get_frozen_credentials()
34
+
35
+ return {
36
+ 'access_key': frozen_creds.access_key,
37
+ 'secret_key': frozen_creds.secret_key,
38
+ 'session_token': frozen_creds.token
39
+ }
40
+ except Exception as e:
41
+ # Show the actual error instead of silently returning None
42
+ error_msg = str(e)
43
+
44
+ # If it's an SSO token error, provide better guidance
45
+ if 'SSO Token' in error_msg or 'sso' in error_msg.lower():
46
+ # Try to detect if using sso_session format
47
+ import subprocess
48
+ try:
49
+ result = subprocess.run(
50
+ ['grep', '-A', '3', f'profile {config["aws_profile"]}',
51
+ str(Path.home() / '.aws' / 'config')],
52
+ capture_output=True,
53
+ text=True,
54
+ timeout=2
55
+ )
56
+ if 'sso_session' in result.stdout:
57
+ # Extract session name
58
+ for line in result.stdout.split('\n'):
59
+ if 'sso_session' in line:
60
+ session_name = line.split('=')[1].strip()
61
+ raise Exception(
62
+ f"Failed to get AWS credentials for profile '{config['aws_profile']}'.\n"
63
+ f"Error: {error_msg}\n\n"
64
+ f"Your profile uses SSO session '{session_name}'.\n"
65
+ f"Try: aws sso login --sso-session {session_name}\n"
66
+ f"(Requires AWS CLI v2.9.0+)\n\n"
67
+ f"Or: aws sso login --profile {config['aws_profile']}\n"
68
+ f"(If using older AWS CLI)"
69
+ )
70
+ except:
71
+ pass
72
+
73
+ raise Exception(
74
+ f"Failed to get AWS credentials for profile '{config['aws_profile']}'.\n"
75
+ f"Error: {error_msg}\n"
76
+ f"Try: aws sso login --profile {config['aws_profile']}"
77
+ )
78
+ else:
79
+ # Use static credentials
80
+ creds = config.get('credentials', {})
81
+ if not creds:
82
+ return None
83
+
84
+ result = {
85
+ 'access_key': creds['aws_access_key_id'],
86
+ 'secret_key': creds['aws_secret_access_key']
87
+ }
88
+ if 'aws_session_token' in creds:
89
+ result['session_token'] = creds['aws_session_token']
90
+ return result
91
+
92
+
93
+ def execute_trends(config, weeks):
94
+ """
95
+ Execute trends analysis via API or locally.
96
+
97
+ Returns:
98
+ dict: trends data
99
+ """
100
+ accounts = config['accounts']
101
+
102
+ if not is_api_configured():
103
+ raise Exception(
104
+ "API not configured. Set COST_API_SECRET environment variable.\n"
105
+ "Local execution is disabled. Use the Lambda API."
106
+ )
107
+
108
+ # Use API only
109
+ click.echo("Using Lambda API...")
110
+ credentials = get_credentials_dict(config)
111
+ if not credentials:
112
+ raise Exception("Failed to get AWS credentials. Check your AWS SSO session.")
113
+ return call_lambda_api('trends', credentials, accounts, weeks=weeks)
114
+
115
+
116
+ def execute_monthly(config, months):
117
+ """
118
+ Execute monthly analysis via API or locally.
119
+
120
+ Returns:
121
+ dict: monthly data
122
+ """
123
+ accounts = config['accounts']
124
+
125
+ if not is_api_configured():
126
+ raise Exception(
127
+ "API not configured. Set COST_API_SECRET environment variable.\n"
128
+ "Local execution is disabled. Use the Lambda API."
129
+ )
130
+
131
+ # Use API only
132
+ click.echo("Using Lambda API...")
133
+ credentials = get_credentials_dict(config)
134
+ if not credentials:
135
+ raise Exception("Failed to get AWS credentials. Check your AWS SSO session.")
136
+ return call_lambda_api('monthly', credentials, accounts, months=months)
137
+
138
+
139
+ def execute_drill(config, weeks, service_filter=None, account_filter=None, usage_type_filter=None, resources=False):
140
+ """
141
+ Execute drill-down analysis via API.
142
+
143
+ Args:
144
+ config: Profile configuration
145
+ weeks: Number of weeks to analyze
146
+ service_filter: Optional service name filter
147
+ account_filter: Optional account ID filter
148
+ usage_type_filter: Optional usage type filter
149
+ resources: If True, query CUR for resource-level details
150
+
151
+ Returns:
152
+ dict: drill data or resource data
153
+ """
154
+ accounts = config['accounts']
155
+
156
+ if not is_api_configured():
157
+ raise Exception(
158
+ "API not configured. Set COST_API_SECRET environment variable.\n"
159
+ "Local execution is disabled. Use the Lambda API."
160
+ )
161
+
162
+ # Use API only
163
+ click.echo("Using Lambda API...")
164
+ credentials = get_credentials_dict(config)
165
+ if not credentials:
166
+ raise Exception("Failed to get AWS credentials. Check your AWS SSO session.")
167
+
168
+ kwargs = {'weeks': weeks}
169
+ if service_filter:
170
+ kwargs['service'] = service_filter
171
+ if account_filter:
172
+ kwargs['account'] = account_filter
173
+ if usage_type_filter:
174
+ kwargs['usage_type'] = usage_type_filter
175
+ if resources:
176
+ if not service_filter:
177
+ raise click.ClickException("--service is required when using --resources flag")
178
+ kwargs['resources'] = True
179
+
180
+ return call_lambda_api('drill', credentials, accounts, **kwargs)
181
+
182
+
183
+ def execute_analyze(config, weeks, analysis_type, pattern=None, min_cost=None):
184
+ """
185
+ Execute pandas-based analysis via API.
186
+ Note: This only works via API (requires pandas layer).
187
+
188
+ Returns:
189
+ dict: analysis results
190
+ """
191
+ accounts = config['accounts']
192
+
193
+ if not is_api_configured():
194
+ raise click.ClickException(
195
+ "Analyze command requires API configuration.\n"
196
+ "Set COST_API_SECRET environment variable."
197
+ )
198
+
199
+ credentials = get_credentials_dict(config)
200
+ kwargs = {'weeks': weeks, 'type': analysis_type}
201
+
202
+ if pattern:
203
+ kwargs['pattern'] = pattern
204
+ if min_cost:
205
+ kwargs['min_cost'] = min_cost
206
+
207
+ return call_lambda_api('analyze', credentials, accounts, **kwargs)
208
+
209
+
210
+ def execute_profile_operation(operation, profile_name=None, accounts=None, description=None):
211
+ """
212
+ Execute profile CRUD operations via API.
213
+
214
+ Returns:
215
+ dict: operation result
216
+ """
217
+ if not is_api_configured():
218
+ raise click.ClickException(
219
+ "Profile commands require API configuration.\n"
220
+ "Set COST_API_SECRET environment variable."
221
+ )
222
+
223
+ # Profile operations don't need AWS credentials, just API secret
224
+ import os
225
+ import requests
226
+ import json
227
+
228
+ api_secret = os.environ.get('COST_API_SECRET', '')
229
+
230
+ # Use profiles endpoint (hardcoded URL)
231
+ url = 'https://64g7jq7sjygec2zmll5lsghrpi0txrzo.lambda-url.us-east-1.on.aws/'
232
+
233
+ payload = {'operation': operation}
234
+ if profile_name:
235
+ payload['profile_name'] = profile_name
236
+ if accounts:
237
+ payload['accounts'] = accounts
238
+ if description:
239
+ payload['description'] = description
240
+
241
+ headers = {
242
+ 'X-API-Secret': api_secret,
243
+ 'Content-Type': 'application/json'
244
+ }
245
+
246
+ response = requests.post(url, headers=headers, json=payload, timeout=60)
247
+
248
+ if response.status_code != 200:
249
+ raise Exception(f"API call failed: {response.status_code} - {response.text}")
250
+
251
+ return response.json()
@@ -0,0 +1,323 @@
1
+ """
2
+ Cost forensics module - Resource inventory and CloudTrail analysis
3
+ """
4
+ import boto3
5
+ from datetime import datetime, timedelta
6
+ from collections import defaultdict
7
+ import json
8
+
9
+
10
+ def inventory_resources(account_id, profile, region='us-west-2'):
11
+ """
12
+ Inventory AWS resources in an account
13
+
14
+ Args:
15
+ account_id: AWS account ID
16
+ profile: AWS profile name (SSO)
17
+ region: AWS region
18
+
19
+ Returns:
20
+ dict with resource inventory
21
+ """
22
+ session = boto3.Session(profile_name=profile)
23
+ inventory = {
24
+ 'account_id': account_id,
25
+ 'profile': profile,
26
+ 'region': region,
27
+ 'timestamp': datetime.utcnow().isoformat(),
28
+ 'ec2_instances': [],
29
+ 'efs_file_systems': [],
30
+ 'load_balancers': [],
31
+ 'dynamodb_tables': []
32
+ }
33
+
34
+ try:
35
+ # EC2 Instances
36
+ ec2_client = session.client('ec2', region_name=region)
37
+ instances_response = ec2_client.describe_instances()
38
+
39
+ for reservation in instances_response['Reservations']:
40
+ for instance in reservation['Instances']:
41
+ if instance['State']['Name'] == 'running':
42
+ name = 'N/A'
43
+ for tag in instance.get('Tags', []):
44
+ if tag['Key'] == 'Name':
45
+ name = tag['Value']
46
+ break
47
+
48
+ inventory['ec2_instances'].append({
49
+ 'instance_id': instance['InstanceId'],
50
+ 'instance_type': instance['InstanceType'],
51
+ 'name': name,
52
+ 'state': instance['State']['Name'],
53
+ 'launch_time': instance['LaunchTime'].isoformat(),
54
+ 'availability_zone': instance['Placement']['AvailabilityZone']
55
+ })
56
+
57
+ # EFS File Systems
58
+ efs_client = session.client('efs', region_name=region)
59
+ efs_response = efs_client.describe_file_systems()
60
+
61
+ total_efs_size = 0
62
+ for fs in efs_response['FileSystems']:
63
+ size_bytes = fs['SizeInBytes']['Value']
64
+ size_gb = size_bytes / (1024**3)
65
+ total_efs_size += size_gb
66
+
67
+ inventory['efs_file_systems'].append({
68
+ 'file_system_id': fs['FileSystemId'],
69
+ 'name': fs.get('Name', 'N/A'),
70
+ 'size_gb': round(size_gb, 2),
71
+ 'creation_time': fs['CreationTime'].isoformat(),
72
+ 'number_of_mount_targets': fs['NumberOfMountTargets']
73
+ })
74
+
75
+ inventory['total_efs_size_gb'] = round(total_efs_size, 2)
76
+
77
+ # Load Balancers
78
+ elbv2_client = session.client('elbv2', region_name=region)
79
+ elb_response = elbv2_client.describe_load_balancers()
80
+
81
+ for lb in elb_response['LoadBalancers']:
82
+ inventory['load_balancers'].append({
83
+ 'name': lb['LoadBalancerName'],
84
+ 'type': lb['Type'],
85
+ 'dns_name': lb['DNSName'],
86
+ 'scheme': lb['Scheme'],
87
+ 'created_time': lb['CreatedTime'].isoformat(),
88
+ 'availability_zones': [az['ZoneName'] for az in lb['AvailabilityZones']]
89
+ })
90
+
91
+ # DynamoDB Tables (only if region supports it)
92
+ try:
93
+ ddb_client = session.client('dynamodb', region_name=region)
94
+ tables_response = ddb_client.list_tables()
95
+
96
+ for table_name in tables_response['TableNames'][:20]: # Limit to 20 tables
97
+ table_desc = ddb_client.describe_table(TableName=table_name)
98
+ table_info = table_desc['Table']
99
+
100
+ # Get backup settings
101
+ try:
102
+ backup_desc = ddb_client.describe_continuous_backups(TableName=table_name)
103
+ pitr_status = backup_desc['ContinuousBackupsDescription']['PointInTimeRecoveryDescription']['PointInTimeRecoveryStatus']
104
+ except:
105
+ pitr_status = 'UNKNOWN'
106
+
107
+ size_gb = table_info.get('TableSizeBytes', 0) / (1024**3)
108
+
109
+ inventory['dynamodb_tables'].append({
110
+ 'table_name': table_name,
111
+ 'size_gb': round(size_gb, 2),
112
+ 'item_count': table_info.get('ItemCount', 0),
113
+ 'pitr_status': pitr_status,
114
+ 'created_time': table_info['CreationDateTime'].isoformat()
115
+ })
116
+ except Exception as e:
117
+ # DynamoDB might not be available in all regions
118
+ pass
119
+
120
+ except Exception as e:
121
+ inventory['error'] = str(e)
122
+
123
+ return inventory
124
+
125
+
126
+ def analyze_cloudtrail(account_id, profile, start_date, end_date, region='us-west-2'):
127
+ """
128
+ Analyze CloudTrail events for an account
129
+
130
+ Args:
131
+ account_id: AWS account ID
132
+ profile: AWS profile name (SSO)
133
+ start_date: Start datetime
134
+ end_date: End datetime
135
+ region: AWS region
136
+
137
+ Returns:
138
+ dict with CloudTrail event summary
139
+ """
140
+ session = boto3.Session(profile_name=profile)
141
+ ct_client = session.client('cloudtrail', region_name=region)
142
+
143
+ analysis = {
144
+ 'account_id': account_id,
145
+ 'profile': profile,
146
+ 'region': region,
147
+ 'start_date': start_date.isoformat(),
148
+ 'end_date': end_date.isoformat(),
149
+ 'event_summary': {},
150
+ 'write_events': [],
151
+ 'error': None
152
+ }
153
+
154
+ # Events that indicate resource creation/modification
155
+ write_event_names = [
156
+ 'RunInstances', 'CreateVolume', 'AttachVolume',
157
+ 'CreateFileSystem', 'ModifyFileSystem',
158
+ 'CreateLoadBalancer', 'ModifyLoadBalancerAttributes',
159
+ 'CreateTable', 'UpdateTable', 'UpdateContinuousBackups',
160
+ 'CreateBackupVault', 'StartBackupJob'
161
+ ]
162
+
163
+ try:
164
+ event_counts = defaultdict(int)
165
+
166
+ # Query CloudTrail
167
+ paginator = ct_client.get_paginator('lookup_events')
168
+
169
+ for page in paginator.paginate(
170
+ StartTime=start_date,
171
+ EndTime=end_date,
172
+ MaxResults=50,
173
+ PaginationConfig={'MaxItems': 200}
174
+ ):
175
+ for event in page.get('Events', []):
176
+ event_name = event.get('EventName', '')
177
+ event_counts[event_name] += 1
178
+
179
+ # Capture write events
180
+ if event_name in write_event_names:
181
+ event_detail = json.loads(event['CloudTrailEvent'])
182
+
183
+ analysis['write_events'].append({
184
+ 'time': event.get('EventTime').isoformat(),
185
+ 'event_name': event_name,
186
+ 'username': event.get('Username', 'N/A'),
187
+ 'resources': [
188
+ {
189
+ 'type': r.get('ResourceType', 'N/A'),
190
+ 'name': r.get('ResourceName', 'N/A')
191
+ }
192
+ for r in event.get('Resources', [])[:3]
193
+ ]
194
+ })
195
+
196
+ # Convert to regular dict and sort
197
+ analysis['event_summary'] = dict(sorted(
198
+ event_counts.items(),
199
+ key=lambda x: x[1],
200
+ reverse=True
201
+ ))
202
+
203
+ except Exception as e:
204
+ analysis['error'] = str(e)
205
+
206
+ return analysis
207
+
208
+
209
+ def format_investigation_report(cost_data, inventories, cloudtrail_data=None):
210
+ """
211
+ Format investigation data into markdown report
212
+
213
+ Args:
214
+ cost_data: Cost analysis results from trends/drill
215
+ inventories: List of resource inventories
216
+ cloudtrail_data: List of CloudTrail analyses (optional)
217
+
218
+ Returns:
219
+ str: Markdown formatted report
220
+ """
221
+ report = []
222
+ report.append("# Cost Investigation Report")
223
+ report.append(f"**Generated:** {datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC')}")
224
+ report.append("")
225
+
226
+ # Cost Analysis Section
227
+ if cost_data:
228
+ report.append("## Cost Analysis")
229
+ report.append("")
230
+ # Add cost data formatting here
231
+ # This will be populated from trends/drill results
232
+
233
+ # Resource Inventory Section
234
+ if inventories:
235
+ report.append("## Resource Inventory")
236
+ report.append("")
237
+
238
+ for inv in inventories:
239
+ profile_name = inv.get('profile', inv['account_id'])
240
+ report.append(f"### Account {inv['account_id']} ({profile_name})")
241
+ report.append(f"**Region:** {inv['region']}")
242
+ report.append("")
243
+
244
+ # EC2 Instances
245
+ if inv['ec2_instances']:
246
+ report.append(f"**EC2 Instances:** {len(inv['ec2_instances'])} running")
247
+ for instance in inv['ec2_instances'][:10]: # Show first 10
248
+ report.append(f"- `{instance['instance_id']}`: {instance['instance_type']} ({instance['name']})")
249
+ report.append(f" - Launched: {instance['launch_time'][:10]}, AZ: {instance['availability_zone']}")
250
+ if len(inv['ec2_instances']) > 10:
251
+ report.append(f" ... and {len(inv['ec2_instances']) - 10} more")
252
+ report.append("")
253
+
254
+ # EFS File Systems
255
+ if inv['efs_file_systems']:
256
+ total_size = inv.get('total_efs_size_gb', 0)
257
+ report.append(f"**EFS File Systems:** {len(inv['efs_file_systems'])} total, {total_size:,.0f} GB")
258
+ for fs in inv['efs_file_systems']:
259
+ report.append(f"- `{fs['file_system_id']}` ({fs['name']}): {fs['size_gb']:,.2f} GB")
260
+ report.append(f" - Created: {fs['creation_time'][:10]}")
261
+ report.append("")
262
+
263
+ # Load Balancers
264
+ if inv['load_balancers']:
265
+ report.append(f"**Load Balancers:** {len(inv['load_balancers'])}")
266
+ for lb in inv['load_balancers'][:10]: # Show first 10
267
+ report.append(f"- `{lb['name']}`: {lb['type']}")
268
+ report.append(f" - Created: {lb['created_time'][:10]}, Scheme: {lb['scheme']}")
269
+ if len(inv['load_balancers']) > 10:
270
+ report.append(f" ... and {len(inv['load_balancers']) - 10} more")
271
+ report.append("")
272
+
273
+ # DynamoDB Tables
274
+ if inv['dynamodb_tables']:
275
+ report.append(f"**DynamoDB Tables:** {len(inv['dynamodb_tables'])}")
276
+ for table in inv['dynamodb_tables'][:10]:
277
+ report.append(f"- `{table['table_name']}`: {table['size_gb']:.2f} GB, {table['item_count']:,} items")
278
+ report.append(f" - PITR: {table['pitr_status']}, Created: {table['created_time'][:10]}")
279
+ if len(inv['dynamodb_tables']) > 10:
280
+ report.append(f" ... and {len(inv['dynamodb_tables']) - 10} more")
281
+ report.append("")
282
+
283
+ report.append("---")
284
+ report.append("")
285
+
286
+ # CloudTrail Section
287
+ if cloudtrail_data:
288
+ report.append("## CloudTrail Events")
289
+ report.append("")
290
+
291
+ for ct in cloudtrail_data:
292
+ profile_name = ct.get('profile', ct['account_id'])
293
+ report.append(f"### Account {ct['account_id']} ({profile_name})")
294
+ report.append(f"**Period:** {ct['start_date'][:10]} to {ct['end_date'][:10]}")
295
+ report.append("")
296
+
297
+ if ct.get('error'):
298
+ report.append(f"⚠️ Error: {ct['error']}")
299
+ report.append("")
300
+ continue
301
+
302
+ # Write events (resource changes)
303
+ if ct['write_events']:
304
+ report.append(f"**Resource Changes:** {len(ct['write_events'])} events")
305
+ for evt in ct['write_events'][:10]:
306
+ report.append(f"- `{evt['time'][:19]}` - **{evt['event_name']}**")
307
+ report.append(f" - User: {evt['username']}")
308
+ if evt['resources']:
309
+ for res in evt['resources']:
310
+ report.append(f" - Resource: {res['type']} - {res['name']}")
311
+ report.append("")
312
+
313
+ # Event summary
314
+ if ct['event_summary']:
315
+ report.append("**Top Events:**")
316
+ for event_name, count in list(ct['event_summary'].items())[:15]:
317
+ report.append(f"- {event_name}: {count}")
318
+ report.append("")
319
+
320
+ report.append("---")
321
+ report.append("")
322
+
323
+ return "\n".join(report)