aws-inventory-manager 0.17.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aws_inventory_manager-0.17.12.dist-info/LICENSE +21 -0
- aws_inventory_manager-0.17.12.dist-info/METADATA +1292 -0
- aws_inventory_manager-0.17.12.dist-info/RECORD +152 -0
- aws_inventory_manager-0.17.12.dist-info/WHEEL +5 -0
- aws_inventory_manager-0.17.12.dist-info/entry_points.txt +2 -0
- aws_inventory_manager-0.17.12.dist-info/top_level.txt +1 -0
- src/__init__.py +3 -0
- src/aws/__init__.py +11 -0
- src/aws/client.py +128 -0
- src/aws/credentials.py +191 -0
- src/aws/rate_limiter.py +177 -0
- src/cli/__init__.py +12 -0
- src/cli/config.py +130 -0
- src/cli/main.py +4046 -0
- src/cloudtrail/__init__.py +5 -0
- src/cloudtrail/query.py +642 -0
- src/config_service/__init__.py +21 -0
- src/config_service/collector.py +346 -0
- src/config_service/detector.py +256 -0
- src/config_service/resource_type_mapping.py +328 -0
- src/cost/__init__.py +5 -0
- src/cost/analyzer.py +226 -0
- src/cost/explorer.py +209 -0
- src/cost/reporter.py +237 -0
- src/delta/__init__.py +5 -0
- src/delta/calculator.py +206 -0
- src/delta/differ.py +185 -0
- src/delta/formatters.py +272 -0
- src/delta/models.py +154 -0
- src/delta/reporter.py +234 -0
- src/matching/__init__.py +6 -0
- src/matching/config.py +52 -0
- src/matching/normalizer.py +450 -0
- src/matching/prompts.py +33 -0
- src/models/__init__.py +21 -0
- src/models/config_diff.py +135 -0
- src/models/cost_report.py +87 -0
- src/models/deletion_operation.py +104 -0
- src/models/deletion_record.py +97 -0
- src/models/delta_report.py +122 -0
- src/models/efs_resource.py +80 -0
- src/models/elasticache_resource.py +90 -0
- src/models/group.py +318 -0
- src/models/inventory.py +133 -0
- src/models/protection_rule.py +123 -0
- src/models/report.py +288 -0
- src/models/resource.py +111 -0
- src/models/security_finding.py +102 -0
- src/models/snapshot.py +122 -0
- src/restore/__init__.py +20 -0
- src/restore/audit.py +175 -0
- src/restore/cleaner.py +461 -0
- src/restore/config.py +209 -0
- src/restore/deleter.py +976 -0
- src/restore/dependency.py +254 -0
- src/restore/safety.py +115 -0
- src/security/__init__.py +0 -0
- src/security/checks/__init__.py +0 -0
- src/security/checks/base.py +56 -0
- src/security/checks/ec2_checks.py +88 -0
- src/security/checks/elasticache_checks.py +149 -0
- src/security/checks/iam_checks.py +102 -0
- src/security/checks/rds_checks.py +140 -0
- src/security/checks/s3_checks.py +95 -0
- src/security/checks/secrets_checks.py +96 -0
- src/security/checks/sg_checks.py +142 -0
- src/security/cis_mapper.py +97 -0
- src/security/models.py +53 -0
- src/security/reporter.py +174 -0
- src/security/scanner.py +87 -0
- src/snapshot/__init__.py +6 -0
- src/snapshot/capturer.py +453 -0
- src/snapshot/filter.py +259 -0
- src/snapshot/inventory_storage.py +236 -0
- src/snapshot/report_formatter.py +250 -0
- src/snapshot/reporter.py +189 -0
- src/snapshot/resource_collectors/__init__.py +5 -0
- src/snapshot/resource_collectors/apigateway.py +140 -0
- src/snapshot/resource_collectors/backup.py +136 -0
- src/snapshot/resource_collectors/base.py +81 -0
- src/snapshot/resource_collectors/cloudformation.py +55 -0
- src/snapshot/resource_collectors/cloudwatch.py +109 -0
- src/snapshot/resource_collectors/codebuild.py +69 -0
- src/snapshot/resource_collectors/codepipeline.py +82 -0
- src/snapshot/resource_collectors/dynamodb.py +65 -0
- src/snapshot/resource_collectors/ec2.py +240 -0
- src/snapshot/resource_collectors/ecs.py +215 -0
- src/snapshot/resource_collectors/efs_collector.py +102 -0
- src/snapshot/resource_collectors/eks.py +200 -0
- src/snapshot/resource_collectors/elasticache_collector.py +79 -0
- src/snapshot/resource_collectors/elb.py +126 -0
- src/snapshot/resource_collectors/eventbridge.py +156 -0
- src/snapshot/resource_collectors/glue.py +199 -0
- src/snapshot/resource_collectors/iam.py +188 -0
- src/snapshot/resource_collectors/kms.py +111 -0
- src/snapshot/resource_collectors/lambda_func.py +139 -0
- src/snapshot/resource_collectors/rds.py +109 -0
- src/snapshot/resource_collectors/route53.py +86 -0
- src/snapshot/resource_collectors/s3.py +105 -0
- src/snapshot/resource_collectors/secretsmanager.py +70 -0
- src/snapshot/resource_collectors/sns.py +68 -0
- src/snapshot/resource_collectors/sqs.py +82 -0
- src/snapshot/resource_collectors/ssm.py +160 -0
- src/snapshot/resource_collectors/stepfunctions.py +74 -0
- src/snapshot/resource_collectors/vpcendpoints.py +79 -0
- src/snapshot/resource_collectors/waf.py +159 -0
- src/snapshot/storage.py +351 -0
- src/storage/__init__.py +21 -0
- src/storage/audit_store.py +419 -0
- src/storage/database.py +294 -0
- src/storage/group_store.py +763 -0
- src/storage/inventory_store.py +320 -0
- src/storage/resource_store.py +416 -0
- src/storage/schema.py +339 -0
- src/storage/snapshot_store.py +363 -0
- src/utils/__init__.py +12 -0
- src/utils/export.py +305 -0
- src/utils/hash.py +60 -0
- src/utils/logging.py +63 -0
- src/utils/pagination.py +41 -0
- src/utils/paths.py +51 -0
- src/utils/progress.py +41 -0
- src/utils/unsupported_resources.py +306 -0
- src/web/__init__.py +5 -0
- src/web/app.py +97 -0
- src/web/dependencies.py +69 -0
- src/web/routes/__init__.py +1 -0
- src/web/routes/api/__init__.py +18 -0
- src/web/routes/api/charts.py +156 -0
- src/web/routes/api/cleanup.py +186 -0
- src/web/routes/api/filters.py +253 -0
- src/web/routes/api/groups.py +305 -0
- src/web/routes/api/inventories.py +80 -0
- src/web/routes/api/queries.py +202 -0
- src/web/routes/api/resources.py +393 -0
- src/web/routes/api/snapshots.py +314 -0
- src/web/routes/api/views.py +260 -0
- src/web/routes/pages.py +198 -0
- src/web/services/__init__.py +1 -0
- src/web/templates/base.html +955 -0
- src/web/templates/components/navbar.html +31 -0
- src/web/templates/components/sidebar.html +104 -0
- src/web/templates/pages/audit_logs.html +86 -0
- src/web/templates/pages/cleanup.html +279 -0
- src/web/templates/pages/dashboard.html +227 -0
- src/web/templates/pages/diff.html +175 -0
- src/web/templates/pages/error.html +30 -0
- src/web/templates/pages/groups.html +721 -0
- src/web/templates/pages/queries.html +246 -0
- src/web/templates/pages/resources.html +2429 -0
- src/web/templates/pages/snapshot_detail.html +271 -0
- src/web/templates/pages/snapshots.html +429 -0
|
@@ -0,0 +1,419 @@
|
|
|
1
|
+
"""Audit storage operations for SQLite backend."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from typing import Any, Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
from ..models.deletion_operation import DeletionOperation, OperationMode, OperationStatus
|
|
8
|
+
from ..models.deletion_record import DeletionRecord, DeletionStatus
|
|
9
|
+
from .database import Database, json_deserialize, json_serialize
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class AuditStore:
|
|
15
|
+
"""CRUD operations for audit logs in SQLite database."""
|
|
16
|
+
|
|
17
|
+
def __init__(self, db: Database):
|
|
18
|
+
"""Initialize audit store.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
db: Database connection manager
|
|
22
|
+
"""
|
|
23
|
+
self.db = db
|
|
24
|
+
|
|
25
|
+
def save_operation(self, operation: DeletionOperation) -> str:
|
|
26
|
+
"""Save or update deletion operation.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
operation: DeletionOperation to save
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Operation ID
|
|
33
|
+
"""
|
|
34
|
+
with self.db.transaction() as cursor:
|
|
35
|
+
# Check if operation exists
|
|
36
|
+
existing = self.db.fetchone(
|
|
37
|
+
"SELECT operation_id FROM audit_operations WHERE operation_id = ?",
|
|
38
|
+
(operation.operation_id,),
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
if existing:
|
|
42
|
+
# Update existing
|
|
43
|
+
cursor.execute(
|
|
44
|
+
"""
|
|
45
|
+
UPDATE audit_operations SET
|
|
46
|
+
status = ?,
|
|
47
|
+
succeeded_count = ?,
|
|
48
|
+
failed_count = ?,
|
|
49
|
+
skipped_count = ?,
|
|
50
|
+
duration_seconds = ?
|
|
51
|
+
WHERE operation_id = ?
|
|
52
|
+
""",
|
|
53
|
+
(
|
|
54
|
+
operation.status.value,
|
|
55
|
+
operation.succeeded_count,
|
|
56
|
+
operation.failed_count,
|
|
57
|
+
operation.skipped_count,
|
|
58
|
+
operation.duration_seconds,
|
|
59
|
+
operation.operation_id,
|
|
60
|
+
),
|
|
61
|
+
)
|
|
62
|
+
logger.debug(f"Updated audit operation '{operation.operation_id}'")
|
|
63
|
+
else:
|
|
64
|
+
# Insert new
|
|
65
|
+
cursor.execute(
|
|
66
|
+
"""
|
|
67
|
+
INSERT INTO audit_operations (
|
|
68
|
+
operation_id, baseline_snapshot, timestamp, aws_profile,
|
|
69
|
+
account_id, mode, status, total_resources, succeeded_count,
|
|
70
|
+
failed_count, skipped_count, duration_seconds, filters
|
|
71
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
72
|
+
""",
|
|
73
|
+
(
|
|
74
|
+
operation.operation_id,
|
|
75
|
+
operation.baseline_snapshot,
|
|
76
|
+
operation.timestamp.isoformat(),
|
|
77
|
+
operation.aws_profile,
|
|
78
|
+
operation.account_id,
|
|
79
|
+
operation.mode.value,
|
|
80
|
+
operation.status.value,
|
|
81
|
+
operation.total_resources,
|
|
82
|
+
operation.succeeded_count,
|
|
83
|
+
operation.failed_count,
|
|
84
|
+
operation.skipped_count,
|
|
85
|
+
operation.duration_seconds,
|
|
86
|
+
json_serialize(operation.filters),
|
|
87
|
+
),
|
|
88
|
+
)
|
|
89
|
+
logger.debug(f"Saved audit operation '{operation.operation_id}'")
|
|
90
|
+
|
|
91
|
+
return operation.operation_id
|
|
92
|
+
|
|
93
|
+
def save_record(self, record: DeletionRecord) -> str:
|
|
94
|
+
"""Save deletion record.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
record: DeletionRecord to save
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Record ID
|
|
101
|
+
"""
|
|
102
|
+
with self.db.transaction() as cursor:
|
|
103
|
+
cursor.execute(
|
|
104
|
+
"""
|
|
105
|
+
INSERT INTO audit_records (
|
|
106
|
+
operation_id, resource_arn, resource_id, resource_type,
|
|
107
|
+
region, status, error_code, error_message, protection_reason,
|
|
108
|
+
deletion_tier, tags, estimated_monthly_cost
|
|
109
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
110
|
+
""",
|
|
111
|
+
(
|
|
112
|
+
record.operation_id,
|
|
113
|
+
record.resource_arn,
|
|
114
|
+
record.resource_id,
|
|
115
|
+
record.resource_type,
|
|
116
|
+
record.region,
|
|
117
|
+
record.status.value,
|
|
118
|
+
record.error_code,
|
|
119
|
+
record.error_message,
|
|
120
|
+
record.protection_reason,
|
|
121
|
+
record.deletion_tier,
|
|
122
|
+
json_serialize(record.tags),
|
|
123
|
+
record.estimated_monthly_cost,
|
|
124
|
+
),
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
logger.debug(f"Saved audit record for '{record.resource_arn}'")
|
|
128
|
+
return record.record_id
|
|
129
|
+
|
|
130
|
+
def save_records_batch(self, records: List[DeletionRecord]) -> int:
|
|
131
|
+
"""Save multiple deletion records efficiently.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
records: List of DeletionRecord objects
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
Number of records saved
|
|
138
|
+
"""
|
|
139
|
+
if not records:
|
|
140
|
+
return 0
|
|
141
|
+
|
|
142
|
+
with self.db.transaction() as cursor:
|
|
143
|
+
data = [
|
|
144
|
+
(
|
|
145
|
+
r.operation_id,
|
|
146
|
+
r.resource_arn,
|
|
147
|
+
r.resource_id,
|
|
148
|
+
r.resource_type,
|
|
149
|
+
r.region,
|
|
150
|
+
r.status.value,
|
|
151
|
+
r.error_code,
|
|
152
|
+
r.error_message,
|
|
153
|
+
r.protection_reason,
|
|
154
|
+
r.deletion_tier,
|
|
155
|
+
json_serialize(r.tags),
|
|
156
|
+
r.estimated_monthly_cost,
|
|
157
|
+
)
|
|
158
|
+
for r in records
|
|
159
|
+
]
|
|
160
|
+
cursor.executemany(
|
|
161
|
+
"""
|
|
162
|
+
INSERT INTO audit_records (
|
|
163
|
+
operation_id, resource_arn, resource_id, resource_type,
|
|
164
|
+
region, status, error_code, error_message, protection_reason,
|
|
165
|
+
deletion_tier, tags, estimated_monthly_cost
|
|
166
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
167
|
+
""",
|
|
168
|
+
data,
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
logger.debug(f"Saved {len(records)} audit records")
|
|
172
|
+
return len(records)
|
|
173
|
+
|
|
174
|
+
def load_operation(self, operation_id: str) -> Optional[DeletionOperation]:
|
|
175
|
+
"""Load deletion operation by ID.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
operation_id: Operation ID
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
DeletionOperation or None if not found
|
|
182
|
+
"""
|
|
183
|
+
row = self.db.fetchone(
|
|
184
|
+
"SELECT * FROM audit_operations WHERE operation_id = ?",
|
|
185
|
+
(operation_id,),
|
|
186
|
+
)
|
|
187
|
+
if not row:
|
|
188
|
+
return None
|
|
189
|
+
|
|
190
|
+
return self._row_to_operation(row)
|
|
191
|
+
|
|
192
|
+
def _row_to_operation(self, row: Dict[str, Any]) -> DeletionOperation:
|
|
193
|
+
"""Convert database row to DeletionOperation.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
row: Database row dict
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
DeletionOperation object
|
|
200
|
+
"""
|
|
201
|
+
timestamp = datetime.fromisoformat(row["timestamp"])
|
|
202
|
+
if timestamp.tzinfo is None:
|
|
203
|
+
timestamp = timestamp.replace(tzinfo=timezone.utc)
|
|
204
|
+
|
|
205
|
+
return DeletionOperation(
|
|
206
|
+
operation_id=row["operation_id"],
|
|
207
|
+
baseline_snapshot=row["baseline_snapshot"],
|
|
208
|
+
timestamp=timestamp,
|
|
209
|
+
aws_profile=row["aws_profile"],
|
|
210
|
+
account_id=row["account_id"],
|
|
211
|
+
mode=OperationMode(row["mode"]),
|
|
212
|
+
status=OperationStatus(row["status"]),
|
|
213
|
+
total_resources=row["total_resources"],
|
|
214
|
+
succeeded_count=row["succeeded_count"] or 0,
|
|
215
|
+
failed_count=row["failed_count"] or 0,
|
|
216
|
+
skipped_count=row["skipped_count"] or 0,
|
|
217
|
+
duration_seconds=row["duration_seconds"],
|
|
218
|
+
filters=json_deserialize(row["filters"]),
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
def load_records(self, operation_id: str) -> List[DeletionRecord]:
|
|
222
|
+
"""Load all records for an operation.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
operation_id: Operation ID
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
List of DeletionRecord objects
|
|
229
|
+
"""
|
|
230
|
+
rows = self.db.fetchall(
|
|
231
|
+
"SELECT * FROM audit_records WHERE operation_id = ? ORDER BY id",
|
|
232
|
+
(operation_id,),
|
|
233
|
+
)
|
|
234
|
+
return [self._row_to_record(row) for row in rows]
|
|
235
|
+
|
|
236
|
+
def _row_to_record(self, row: Dict[str, Any]) -> DeletionRecord:
|
|
237
|
+
"""Convert database row to DeletionRecord.
|
|
238
|
+
|
|
239
|
+
Args:
|
|
240
|
+
row: Database row dict
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
DeletionRecord object
|
|
244
|
+
"""
|
|
245
|
+
return DeletionRecord(
|
|
246
|
+
record_id=str(row["id"]),
|
|
247
|
+
operation_id=row["operation_id"],
|
|
248
|
+
resource_arn=row["resource_arn"],
|
|
249
|
+
resource_id=row["resource_id"] or "",
|
|
250
|
+
resource_type=row["resource_type"],
|
|
251
|
+
region=row["region"],
|
|
252
|
+
timestamp=datetime.now(timezone.utc), # Not stored in DB, use current time
|
|
253
|
+
status=DeletionStatus(row["status"]),
|
|
254
|
+
error_code=row["error_code"],
|
|
255
|
+
error_message=row["error_message"],
|
|
256
|
+
protection_reason=row["protection_reason"],
|
|
257
|
+
deletion_tier=row["deletion_tier"],
|
|
258
|
+
tags=json_deserialize(row["tags"]),
|
|
259
|
+
estimated_monthly_cost=row["estimated_monthly_cost"],
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
def list_operations(
|
|
263
|
+
self,
|
|
264
|
+
account_id: Optional[str] = None,
|
|
265
|
+
limit: int = 100,
|
|
266
|
+
) -> List[DeletionOperation]:
|
|
267
|
+
"""List audit operations.
|
|
268
|
+
|
|
269
|
+
Args:
|
|
270
|
+
account_id: Filter by account ID (optional)
|
|
271
|
+
limit: Maximum results
|
|
272
|
+
|
|
273
|
+
Returns:
|
|
274
|
+
List of DeletionOperation objects
|
|
275
|
+
"""
|
|
276
|
+
if account_id:
|
|
277
|
+
rows = self.db.fetchall(
|
|
278
|
+
"SELECT * FROM audit_operations WHERE account_id = ? ORDER BY timestamp DESC LIMIT ?",
|
|
279
|
+
(account_id, limit),
|
|
280
|
+
)
|
|
281
|
+
else:
|
|
282
|
+
rows = self.db.fetchall(
|
|
283
|
+
"SELECT * FROM audit_operations ORDER BY timestamp DESC LIMIT ?",
|
|
284
|
+
(limit,),
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
return [self._row_to_operation(row) for row in rows]
|
|
288
|
+
|
|
289
|
+
def delete_operation(self, operation_id: str) -> bool:
|
|
290
|
+
"""Delete operation and all its records.
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
operation_id: Operation ID
|
|
294
|
+
|
|
295
|
+
Returns:
|
|
296
|
+
True if deleted, False if not found
|
|
297
|
+
"""
|
|
298
|
+
with self.db.transaction() as cursor:
|
|
299
|
+
# Records are deleted by CASCADE
|
|
300
|
+
cursor.execute(
|
|
301
|
+
"DELETE FROM audit_operations WHERE operation_id = ?",
|
|
302
|
+
(operation_id,),
|
|
303
|
+
)
|
|
304
|
+
deleted = cursor.rowcount > 0
|
|
305
|
+
|
|
306
|
+
if deleted:
|
|
307
|
+
logger.debug(f"Deleted audit operation '{operation_id}'")
|
|
308
|
+
return deleted
|
|
309
|
+
|
|
310
|
+
def get_operation_stats(self, operation_id: str) -> Dict[str, Any]:
|
|
311
|
+
"""Get statistics for an operation.
|
|
312
|
+
|
|
313
|
+
Args:
|
|
314
|
+
operation_id: Operation ID
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
Dictionary with statistics
|
|
318
|
+
"""
|
|
319
|
+
operation = self.load_operation(operation_id)
|
|
320
|
+
if not operation:
|
|
321
|
+
return {}
|
|
322
|
+
|
|
323
|
+
# Get record counts by status
|
|
324
|
+
status_counts = self.db.fetchall(
|
|
325
|
+
"""
|
|
326
|
+
SELECT status, COUNT(*) as count
|
|
327
|
+
FROM audit_records
|
|
328
|
+
WHERE operation_id = ?
|
|
329
|
+
GROUP BY status
|
|
330
|
+
""",
|
|
331
|
+
(operation_id,),
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
# Get counts by resource type
|
|
335
|
+
type_counts = self.db.fetchall(
|
|
336
|
+
"""
|
|
337
|
+
SELECT resource_type, COUNT(*) as count
|
|
338
|
+
FROM audit_records
|
|
339
|
+
WHERE operation_id = ?
|
|
340
|
+
GROUP BY resource_type
|
|
341
|
+
ORDER BY count DESC
|
|
342
|
+
""",
|
|
343
|
+
(operation_id,),
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
# Get total estimated cost
|
|
347
|
+
cost_row = self.db.fetchone(
|
|
348
|
+
"""
|
|
349
|
+
SELECT SUM(estimated_monthly_cost) as total_cost
|
|
350
|
+
FROM audit_records
|
|
351
|
+
WHERE operation_id = ?
|
|
352
|
+
""",
|
|
353
|
+
(operation_id,),
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
return {
|
|
357
|
+
"operation_id": operation_id,
|
|
358
|
+
"baseline_snapshot": operation.baseline_snapshot,
|
|
359
|
+
"mode": operation.mode.value,
|
|
360
|
+
"status": operation.status.value,
|
|
361
|
+
"total_resources": operation.total_resources,
|
|
362
|
+
"succeeded_count": operation.succeeded_count,
|
|
363
|
+
"failed_count": operation.failed_count,
|
|
364
|
+
"skipped_count": operation.skipped_count,
|
|
365
|
+
"duration_seconds": operation.duration_seconds,
|
|
366
|
+
"status_breakdown": {row["status"]: row["count"] for row in status_counts},
|
|
367
|
+
"type_breakdown": {row["resource_type"]: row["count"] for row in type_counts},
|
|
368
|
+
"total_estimated_monthly_cost": cost_row["total_cost"] if cost_row else 0,
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
def get_recent_deletions(
|
|
372
|
+
self,
|
|
373
|
+
resource_arn: Optional[str] = None,
|
|
374
|
+
resource_type: Optional[str] = None,
|
|
375
|
+
region: Optional[str] = None,
|
|
376
|
+
limit: int = 100,
|
|
377
|
+
) -> List[Dict[str, Any]]:
|
|
378
|
+
"""Get recent deletion records with filters.
|
|
379
|
+
|
|
380
|
+
Args:
|
|
381
|
+
resource_arn: Filter by ARN pattern (optional)
|
|
382
|
+
resource_type: Filter by resource type (optional)
|
|
383
|
+
region: Filter by region (optional)
|
|
384
|
+
limit: Maximum results
|
|
385
|
+
|
|
386
|
+
Returns:
|
|
387
|
+
List of deletion records with operation info
|
|
388
|
+
"""
|
|
389
|
+
conditions = []
|
|
390
|
+
params: List[Any] = []
|
|
391
|
+
|
|
392
|
+
if resource_arn:
|
|
393
|
+
conditions.append("r.resource_arn LIKE ?")
|
|
394
|
+
params.append(f"%{resource_arn}%")
|
|
395
|
+
|
|
396
|
+
if resource_type:
|
|
397
|
+
conditions.append("r.resource_type LIKE ?")
|
|
398
|
+
params.append(f"%{resource_type}%")
|
|
399
|
+
|
|
400
|
+
if region:
|
|
401
|
+
conditions.append("r.region = ?")
|
|
402
|
+
params.append(region)
|
|
403
|
+
|
|
404
|
+
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
|
405
|
+
params.append(limit)
|
|
406
|
+
|
|
407
|
+
rows = self.db.fetchall(
|
|
408
|
+
f"""
|
|
409
|
+
SELECT r.*, o.timestamp as operation_timestamp, o.mode, o.baseline_snapshot
|
|
410
|
+
FROM audit_records r
|
|
411
|
+
JOIN audit_operations o ON r.operation_id = o.operation_id
|
|
412
|
+
WHERE {where_clause}
|
|
413
|
+
ORDER BY o.timestamp DESC
|
|
414
|
+
LIMIT ?
|
|
415
|
+
""",
|
|
416
|
+
tuple(params),
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
return [dict(row) for row in rows]
|
src/storage/database.py
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
"""SQLite database connection and management for AWS Inventory Manager."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import sqlite3
|
|
6
|
+
from contextlib import contextmanager
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Dict, Generator, List, Optional
|
|
9
|
+
|
|
10
|
+
from ..utils.paths import get_snapshot_storage_path
|
|
11
|
+
from .schema import INDEXES_SQL, SCHEMA_SQL, SCHEMA_VERSION, get_migrations
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Database:
|
|
17
|
+
"""SQLite database connection manager.
|
|
18
|
+
|
|
19
|
+
Handles connection pooling, schema setup, and auto-migration from YAML.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(self, db_path: Optional[Path] = None, storage_path: Optional[Path] = None):
|
|
23
|
+
"""Initialize database manager.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
db_path: Direct path to database file (overrides storage_path)
|
|
27
|
+
storage_path: Storage directory (database will be inventory.db inside)
|
|
28
|
+
"""
|
|
29
|
+
if db_path:
|
|
30
|
+
self.db_path = Path(db_path)
|
|
31
|
+
elif storage_path:
|
|
32
|
+
self.db_path = Path(storage_path) / "inventory.db"
|
|
33
|
+
else:
|
|
34
|
+
self.db_path = get_snapshot_storage_path() / "inventory.db"
|
|
35
|
+
|
|
36
|
+
self.storage_path = self.db_path.parent
|
|
37
|
+
self._connection: Optional[sqlite3.Connection] = None
|
|
38
|
+
self._initialized = False
|
|
39
|
+
|
|
40
|
+
def connect(self) -> sqlite3.Connection:
|
|
41
|
+
"""Get or create database connection with optimal settings.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
SQLite connection configured for optimal performance
|
|
45
|
+
"""
|
|
46
|
+
if self._connection is None:
|
|
47
|
+
# Ensure directory exists
|
|
48
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
49
|
+
|
|
50
|
+
self._connection = sqlite3.connect(str(self.db_path))
|
|
51
|
+
# Use Row factory for dict-like access
|
|
52
|
+
self._connection.row_factory = sqlite3.Row
|
|
53
|
+
|
|
54
|
+
# Enable foreign keys
|
|
55
|
+
self._connection.execute("PRAGMA foreign_keys = ON")
|
|
56
|
+
|
|
57
|
+
# Use WAL mode for better concurrent performance
|
|
58
|
+
self._connection.execute("PRAGMA journal_mode = WAL")
|
|
59
|
+
|
|
60
|
+
# Optimize for performance
|
|
61
|
+
self._connection.execute("PRAGMA synchronous = NORMAL")
|
|
62
|
+
self._connection.execute("PRAGMA cache_size = -64000") # 64MB cache
|
|
63
|
+
self._connection.execute("PRAGMA temp_store = MEMORY") # Temp tables in memory
|
|
64
|
+
self._connection.execute("PRAGMA mmap_size = 268435456") # 256MB memory-mapped I/O
|
|
65
|
+
|
|
66
|
+
logger.debug(f"Connected to database: {self.db_path}")
|
|
67
|
+
|
|
68
|
+
return self._connection
|
|
69
|
+
|
|
70
|
+
def ensure_schema(self) -> None:
|
|
71
|
+
"""Create database schema if not exists and run migrations."""
|
|
72
|
+
if self._initialized:
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
conn = self.connect()
|
|
76
|
+
cursor = conn.cursor()
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
# Get current schema version before creating tables
|
|
80
|
+
current_version = self._get_raw_schema_version(cursor)
|
|
81
|
+
|
|
82
|
+
# Create tables
|
|
83
|
+
cursor.executescript(SCHEMA_SQL)
|
|
84
|
+
|
|
85
|
+
# Run migrations BEFORE creating indexes (migrations may add columns that indexes depend on)
|
|
86
|
+
if current_version and current_version != SCHEMA_VERSION:
|
|
87
|
+
self._run_migrations(cursor, current_version)
|
|
88
|
+
|
|
89
|
+
# Create indexes (after migrations so new columns exist)
|
|
90
|
+
cursor.executescript(INDEXES_SQL)
|
|
91
|
+
|
|
92
|
+
# Set schema version
|
|
93
|
+
cursor.execute(
|
|
94
|
+
"INSERT OR REPLACE INTO schema_info (key, value) VALUES (?, ?)",
|
|
95
|
+
("schema_version", SCHEMA_VERSION),
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
conn.commit()
|
|
99
|
+
self._initialized = True
|
|
100
|
+
logger.debug(f"Database schema initialized (version {SCHEMA_VERSION})")
|
|
101
|
+
|
|
102
|
+
except sqlite3.Error as e:
|
|
103
|
+
conn.rollback()
|
|
104
|
+
logger.error(f"Failed to initialize schema: {e}")
|
|
105
|
+
raise
|
|
106
|
+
|
|
107
|
+
def _get_raw_schema_version(self, cursor: sqlite3.Cursor) -> Optional[str]:
|
|
108
|
+
"""Get schema version without ensuring schema exists.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
cursor: Database cursor
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
Schema version string or None if not set
|
|
115
|
+
"""
|
|
116
|
+
try:
|
|
117
|
+
cursor.execute("SELECT value FROM schema_info WHERE key = ?", ("schema_version",))
|
|
118
|
+
row = cursor.fetchone()
|
|
119
|
+
return row["value"] if row else None
|
|
120
|
+
except sqlite3.OperationalError:
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
def _run_migrations(self, cursor: sqlite3.Cursor, from_version: str) -> None:
|
|
124
|
+
"""Run schema migrations from a given version.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
cursor: Database cursor
|
|
128
|
+
from_version: Version to migrate from
|
|
129
|
+
"""
|
|
130
|
+
migrations = get_migrations()
|
|
131
|
+
|
|
132
|
+
# Simple version comparison - assumes semantic versioning
|
|
133
|
+
for version, statements in sorted(migrations.items()):
|
|
134
|
+
if version > from_version:
|
|
135
|
+
logger.info(f"Running migration to version {version}")
|
|
136
|
+
for sql in statements:
|
|
137
|
+
try:
|
|
138
|
+
cursor.execute(sql)
|
|
139
|
+
logger.debug(f"Migration SQL executed: {sql[:50]}...")
|
|
140
|
+
except sqlite3.OperationalError as e:
|
|
141
|
+
# Column may already exist if migration was partially applied
|
|
142
|
+
if "duplicate column name" in str(e).lower():
|
|
143
|
+
logger.debug(f"Column already exists, skipping: {e}")
|
|
144
|
+
else:
|
|
145
|
+
raise
|
|
146
|
+
|
|
147
|
+
def get_schema_version(self) -> Optional[str]:
|
|
148
|
+
"""Get current schema version from database.
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
Schema version string or None if not set
|
|
152
|
+
"""
|
|
153
|
+
conn = self.connect()
|
|
154
|
+
try:
|
|
155
|
+
cursor = conn.execute("SELECT value FROM schema_info WHERE key = ?", ("schema_version",))
|
|
156
|
+
row = cursor.fetchone()
|
|
157
|
+
return row["value"] if row else None
|
|
158
|
+
except sqlite3.OperationalError:
|
|
159
|
+
return None
|
|
160
|
+
|
|
161
|
+
def is_empty(self) -> bool:
|
|
162
|
+
"""Check if database has no snapshots.
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
True if no snapshots exist
|
|
166
|
+
"""
|
|
167
|
+
conn = self.connect()
|
|
168
|
+
self.ensure_schema()
|
|
169
|
+
cursor = conn.execute("SELECT COUNT(*) as count FROM snapshots")
|
|
170
|
+
row = cursor.fetchone()
|
|
171
|
+
return row["count"] == 0
|
|
172
|
+
|
|
173
|
+
@contextmanager
|
|
174
|
+
def transaction(self) -> Generator[sqlite3.Cursor, None, None]:
|
|
175
|
+
"""Context manager for database transactions.
|
|
176
|
+
|
|
177
|
+
Yields:
|
|
178
|
+
Database cursor
|
|
179
|
+
|
|
180
|
+
Example:
|
|
181
|
+
with db.transaction() as cursor:
|
|
182
|
+
cursor.execute("INSERT INTO ...")
|
|
183
|
+
"""
|
|
184
|
+
conn = self.connect()
|
|
185
|
+
self.ensure_schema()
|
|
186
|
+
cursor = conn.cursor()
|
|
187
|
+
try:
|
|
188
|
+
yield cursor
|
|
189
|
+
conn.commit()
|
|
190
|
+
except Exception:
|
|
191
|
+
conn.rollback()
|
|
192
|
+
raise
|
|
193
|
+
|
|
194
|
+
def execute(self, sql: str, params: tuple = ()) -> sqlite3.Cursor:
|
|
195
|
+
"""Execute a single SQL statement.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
sql: SQL statement
|
|
199
|
+
params: Query parameters
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
Cursor with results
|
|
203
|
+
"""
|
|
204
|
+
conn = self.connect()
|
|
205
|
+
self.ensure_schema()
|
|
206
|
+
return conn.execute(sql, params)
|
|
207
|
+
|
|
208
|
+
def executemany(self, sql: str, params_list: List[tuple]) -> sqlite3.Cursor:
|
|
209
|
+
"""Execute SQL statement with multiple parameter sets.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
sql: SQL statement with placeholders
|
|
213
|
+
params_list: List of parameter tuples
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
Cursor
|
|
217
|
+
"""
|
|
218
|
+
conn = self.connect()
|
|
219
|
+
self.ensure_schema()
|
|
220
|
+
return conn.executemany(sql, params_list)
|
|
221
|
+
|
|
222
|
+
def fetchall(self, sql: str, params: tuple = ()) -> List[Dict[str, Any]]:
|
|
223
|
+
"""Execute query and fetch all results as dicts.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
sql: SQL query
|
|
227
|
+
params: Query parameters
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
List of result dictionaries
|
|
231
|
+
"""
|
|
232
|
+
cursor = self.execute(sql, params)
|
|
233
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
234
|
+
|
|
235
|
+
def fetchone(self, sql: str, params: tuple = ()) -> Optional[Dict[str, Any]]:
|
|
236
|
+
"""Execute query and fetch single result as dict.
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
sql: SQL query
|
|
240
|
+
params: Query parameters
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
Result dictionary or None
|
|
244
|
+
"""
|
|
245
|
+
cursor = self.execute(sql, params)
|
|
246
|
+
row = cursor.fetchone()
|
|
247
|
+
return dict(row) if row else None
|
|
248
|
+
|
|
249
|
+
def close(self) -> None:
|
|
250
|
+
"""Close database connection."""
|
|
251
|
+
if self._connection:
|
|
252
|
+
self._connection.close()
|
|
253
|
+
self._connection = None
|
|
254
|
+
self._initialized = False
|
|
255
|
+
logger.debug("Database connection closed")
|
|
256
|
+
|
|
257
|
+
def __enter__(self) -> "Database":
|
|
258
|
+
"""Context manager entry."""
|
|
259
|
+
self.connect()
|
|
260
|
+
self.ensure_schema()
|
|
261
|
+
return self
|
|
262
|
+
|
|
263
|
+
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
|
|
264
|
+
"""Context manager exit."""
|
|
265
|
+
self.close()
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
# JSON serialization helpers for SQLite
|
|
269
|
+
def json_serialize(obj: Any) -> Optional[str]:
|
|
270
|
+
"""Serialize object to JSON string for SQLite storage.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
obj: Object to serialize
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
JSON string or None if obj is None
|
|
277
|
+
"""
|
|
278
|
+
if obj is None:
|
|
279
|
+
return None
|
|
280
|
+
return json.dumps(obj, default=str)
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def json_deserialize(json_str: Optional[str]) -> Any:
|
|
284
|
+
"""Deserialize JSON string from SQLite.
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
json_str: JSON string
|
|
288
|
+
|
|
289
|
+
Returns:
|
|
290
|
+
Deserialized object or None
|
|
291
|
+
"""
|
|
292
|
+
if json_str is None:
|
|
293
|
+
return None
|
|
294
|
+
return json.loads(json_str)
|