aws-inventory-manager 0.13.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aws-inventory-manager might be problematic. Click here for more details.
- aws_inventory_manager-0.13.2.dist-info/LICENSE +21 -0
- aws_inventory_manager-0.13.2.dist-info/METADATA +1226 -0
- aws_inventory_manager-0.13.2.dist-info/RECORD +145 -0
- aws_inventory_manager-0.13.2.dist-info/WHEEL +5 -0
- aws_inventory_manager-0.13.2.dist-info/entry_points.txt +2 -0
- aws_inventory_manager-0.13.2.dist-info/top_level.txt +1 -0
- src/__init__.py +3 -0
- src/aws/__init__.py +11 -0
- src/aws/client.py +128 -0
- src/aws/credentials.py +191 -0
- src/aws/rate_limiter.py +177 -0
- src/cli/__init__.py +12 -0
- src/cli/config.py +130 -0
- src/cli/main.py +3626 -0
- src/config_service/__init__.py +21 -0
- src/config_service/collector.py +346 -0
- src/config_service/detector.py +256 -0
- src/config_service/resource_type_mapping.py +328 -0
- src/cost/__init__.py +5 -0
- src/cost/analyzer.py +226 -0
- src/cost/explorer.py +209 -0
- src/cost/reporter.py +237 -0
- src/delta/__init__.py +5 -0
- src/delta/calculator.py +206 -0
- src/delta/differ.py +185 -0
- src/delta/formatters.py +272 -0
- src/delta/models.py +154 -0
- src/delta/reporter.py +234 -0
- src/models/__init__.py +21 -0
- src/models/config_diff.py +135 -0
- src/models/cost_report.py +87 -0
- src/models/deletion_operation.py +104 -0
- src/models/deletion_record.py +97 -0
- src/models/delta_report.py +122 -0
- src/models/efs_resource.py +80 -0
- src/models/elasticache_resource.py +90 -0
- src/models/group.py +318 -0
- src/models/inventory.py +133 -0
- src/models/protection_rule.py +123 -0
- src/models/report.py +288 -0
- src/models/resource.py +111 -0
- src/models/security_finding.py +102 -0
- src/models/snapshot.py +122 -0
- src/restore/__init__.py +20 -0
- src/restore/audit.py +175 -0
- src/restore/cleaner.py +461 -0
- src/restore/config.py +209 -0
- src/restore/deleter.py +976 -0
- src/restore/dependency.py +254 -0
- src/restore/safety.py +115 -0
- src/security/__init__.py +0 -0
- src/security/checks/__init__.py +0 -0
- src/security/checks/base.py +56 -0
- src/security/checks/ec2_checks.py +88 -0
- src/security/checks/elasticache_checks.py +149 -0
- src/security/checks/iam_checks.py +102 -0
- src/security/checks/rds_checks.py +140 -0
- src/security/checks/s3_checks.py +95 -0
- src/security/checks/secrets_checks.py +96 -0
- src/security/checks/sg_checks.py +142 -0
- src/security/cis_mapper.py +97 -0
- src/security/models.py +53 -0
- src/security/reporter.py +174 -0
- src/security/scanner.py +87 -0
- src/snapshot/__init__.py +6 -0
- src/snapshot/capturer.py +451 -0
- src/snapshot/filter.py +259 -0
- src/snapshot/inventory_storage.py +236 -0
- src/snapshot/report_formatter.py +250 -0
- src/snapshot/reporter.py +189 -0
- src/snapshot/resource_collectors/__init__.py +5 -0
- src/snapshot/resource_collectors/apigateway.py +140 -0
- src/snapshot/resource_collectors/backup.py +136 -0
- src/snapshot/resource_collectors/base.py +81 -0
- src/snapshot/resource_collectors/cloudformation.py +55 -0
- src/snapshot/resource_collectors/cloudwatch.py +109 -0
- src/snapshot/resource_collectors/codebuild.py +69 -0
- src/snapshot/resource_collectors/codepipeline.py +82 -0
- src/snapshot/resource_collectors/dynamodb.py +65 -0
- src/snapshot/resource_collectors/ec2.py +240 -0
- src/snapshot/resource_collectors/ecs.py +215 -0
- src/snapshot/resource_collectors/efs_collector.py +102 -0
- src/snapshot/resource_collectors/eks.py +200 -0
- src/snapshot/resource_collectors/elasticache_collector.py +79 -0
- src/snapshot/resource_collectors/elb.py +126 -0
- src/snapshot/resource_collectors/eventbridge.py +156 -0
- src/snapshot/resource_collectors/iam.py +188 -0
- src/snapshot/resource_collectors/kms.py +111 -0
- src/snapshot/resource_collectors/lambda_func.py +139 -0
- src/snapshot/resource_collectors/rds.py +109 -0
- src/snapshot/resource_collectors/route53.py +86 -0
- src/snapshot/resource_collectors/s3.py +105 -0
- src/snapshot/resource_collectors/secretsmanager.py +70 -0
- src/snapshot/resource_collectors/sns.py +68 -0
- src/snapshot/resource_collectors/sqs.py +82 -0
- src/snapshot/resource_collectors/ssm.py +160 -0
- src/snapshot/resource_collectors/stepfunctions.py +74 -0
- src/snapshot/resource_collectors/vpcendpoints.py +79 -0
- src/snapshot/resource_collectors/waf.py +159 -0
- src/snapshot/storage.py +351 -0
- src/storage/__init__.py +21 -0
- src/storage/audit_store.py +419 -0
- src/storage/database.py +294 -0
- src/storage/group_store.py +749 -0
- src/storage/inventory_store.py +320 -0
- src/storage/resource_store.py +413 -0
- src/storage/schema.py +288 -0
- src/storage/snapshot_store.py +346 -0
- src/utils/__init__.py +12 -0
- src/utils/export.py +305 -0
- src/utils/hash.py +60 -0
- src/utils/logging.py +63 -0
- src/utils/pagination.py +41 -0
- src/utils/paths.py +51 -0
- src/utils/progress.py +41 -0
- src/utils/unsupported_resources.py +306 -0
- src/web/__init__.py +5 -0
- src/web/app.py +97 -0
- src/web/dependencies.py +69 -0
- src/web/routes/__init__.py +1 -0
- src/web/routes/api/__init__.py +18 -0
- src/web/routes/api/charts.py +156 -0
- src/web/routes/api/cleanup.py +186 -0
- src/web/routes/api/filters.py +253 -0
- src/web/routes/api/groups.py +305 -0
- src/web/routes/api/inventories.py +80 -0
- src/web/routes/api/queries.py +202 -0
- src/web/routes/api/resources.py +379 -0
- src/web/routes/api/snapshots.py +314 -0
- src/web/routes/api/views.py +260 -0
- src/web/routes/pages.py +198 -0
- src/web/services/__init__.py +1 -0
- src/web/templates/base.html +949 -0
- src/web/templates/components/navbar.html +31 -0
- src/web/templates/components/sidebar.html +104 -0
- src/web/templates/pages/audit_logs.html +86 -0
- src/web/templates/pages/cleanup.html +279 -0
- src/web/templates/pages/dashboard.html +227 -0
- src/web/templates/pages/diff.html +175 -0
- src/web/templates/pages/error.html +30 -0
- src/web/templates/pages/groups.html +721 -0
- src/web/templates/pages/queries.html +246 -0
- src/web/templates/pages/resources.html +2251 -0
- src/web/templates/pages/snapshot_detail.html +271 -0
- src/web/templates/pages/snapshots.html +429 -0
src/web/dependencies.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""Dependency injection for FastAPI routes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from ..storage import AuditStore, Database, GroupStore, InventoryStore, ResourceStore, SnapshotStore
|
|
9
|
+
|
|
10
|
+
# Global instances (initialized at startup)
|
|
11
|
+
_db: Optional[Database] = None
|
|
12
|
+
_storage_path: Optional[str] = None
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def init_database(storage_path: Optional[str] = None) -> None:
|
|
16
|
+
"""Initialize the database connection.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
storage_path: Optional path to storage directory.
|
|
20
|
+
If not provided, uses default from Config.
|
|
21
|
+
"""
|
|
22
|
+
global _db, _storage_path
|
|
23
|
+
_storage_path = storage_path
|
|
24
|
+
|
|
25
|
+
if storage_path:
|
|
26
|
+
db_path = Path(storage_path) / "inventory.db"
|
|
27
|
+
_db = Database(db_path=db_path)
|
|
28
|
+
else:
|
|
29
|
+
_db = Database()
|
|
30
|
+
|
|
31
|
+
_db.ensure_schema()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def get_database() -> Database:
|
|
35
|
+
"""Get the database instance."""
|
|
36
|
+
global _db
|
|
37
|
+
if _db is None:
|
|
38
|
+
init_database(_storage_path)
|
|
39
|
+
return _db # type: ignore
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_snapshot_store() -> SnapshotStore:
|
|
43
|
+
"""Get a SnapshotStore instance."""
|
|
44
|
+
return SnapshotStore(get_database())
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def get_resource_store() -> ResourceStore:
|
|
48
|
+
"""Get a ResourceStore instance."""
|
|
49
|
+
return ResourceStore(get_database())
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def get_inventory_store() -> InventoryStore:
|
|
53
|
+
"""Get an InventoryStore instance."""
|
|
54
|
+
return InventoryStore(get_database())
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def get_audit_store() -> AuditStore:
|
|
58
|
+
"""Get an AuditStore instance."""
|
|
59
|
+
return AuditStore(get_database())
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def get_group_store() -> GroupStore:
|
|
63
|
+
"""Get a GroupStore instance."""
|
|
64
|
+
return GroupStore(get_database())
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def get_storage_path() -> Optional[str]:
|
|
68
|
+
"""Get the configured storage path."""
|
|
69
|
+
return _storage_path
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Web routes package."""
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""API routes package."""
|
|
2
|
+
|
|
3
|
+
from fastapi import APIRouter
|
|
4
|
+
|
|
5
|
+
from . import charts, cleanup, filters, groups, inventories, queries, resources, snapshots, views
|
|
6
|
+
|
|
7
|
+
router = APIRouter()
|
|
8
|
+
router.include_router(snapshots.router, tags=["snapshots"])
|
|
9
|
+
router.include_router(resources.router, tags=["resources"])
|
|
10
|
+
router.include_router(queries.router, tags=["queries"])
|
|
11
|
+
router.include_router(charts.router, tags=["charts"])
|
|
12
|
+
router.include_router(cleanup.router, tags=["cleanup"])
|
|
13
|
+
router.include_router(filters.router, tags=["filters"])
|
|
14
|
+
router.include_router(views.router, tags=["views"])
|
|
15
|
+
router.include_router(groups.router, tags=["groups"])
|
|
16
|
+
router.include_router(inventories.router, tags=["inventories"])
|
|
17
|
+
|
|
18
|
+
__all__ = ["router"]
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
"""Chart data API endpoints."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter, Query
|
|
8
|
+
|
|
9
|
+
from ...dependencies import get_resource_store, get_snapshot_store
|
|
10
|
+
|
|
11
|
+
router = APIRouter(prefix="/charts")
|
|
12
|
+
|
|
13
|
+
# Chart.js color palette
|
|
14
|
+
CHART_COLORS = [
|
|
15
|
+
"#3B82F6", # Blue
|
|
16
|
+
"#10B981", # Green
|
|
17
|
+
"#F59E0B", # Amber
|
|
18
|
+
"#EF4444", # Red
|
|
19
|
+
"#8B5CF6", # Purple
|
|
20
|
+
"#EC4899", # Pink
|
|
21
|
+
"#06B6D4", # Cyan
|
|
22
|
+
"#84CC16", # Lime
|
|
23
|
+
"#F97316", # Orange
|
|
24
|
+
"#6366F1", # Indigo
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@router.get("/resources-by-type")
|
|
29
|
+
async def chart_resources_by_type(
|
|
30
|
+
snapshot: Optional[str] = Query(None, description="Limit to specific snapshot"),
|
|
31
|
+
limit: int = Query(10, le=20),
|
|
32
|
+
):
|
|
33
|
+
"""Get Chart.js data for resources by type."""
|
|
34
|
+
store = get_resource_store()
|
|
35
|
+
stats = store.get_stats(snapshot_name=snapshot, group_by="type")[:limit]
|
|
36
|
+
|
|
37
|
+
labels = [s["group_key"] for s in stats]
|
|
38
|
+
data = [s["count"] for s in stats]
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
"labels": labels,
|
|
42
|
+
"datasets": [
|
|
43
|
+
{
|
|
44
|
+
"data": data,
|
|
45
|
+
"backgroundColor": CHART_COLORS[:len(data)],
|
|
46
|
+
}
|
|
47
|
+
],
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@router.get("/resources-by-region")
|
|
52
|
+
async def chart_resources_by_region(
|
|
53
|
+
snapshot: Optional[str] = Query(None, description="Limit to specific snapshot"),
|
|
54
|
+
):
|
|
55
|
+
"""Get Chart.js data for resources by region."""
|
|
56
|
+
store = get_resource_store()
|
|
57
|
+
stats = store.get_stats(snapshot_name=snapshot, group_by="region")
|
|
58
|
+
|
|
59
|
+
labels = [s["group_key"] for s in stats]
|
|
60
|
+
data = [s["count"] for s in stats]
|
|
61
|
+
|
|
62
|
+
return {
|
|
63
|
+
"labels": labels,
|
|
64
|
+
"datasets": [
|
|
65
|
+
{
|
|
66
|
+
"label": "Resources",
|
|
67
|
+
"data": data,
|
|
68
|
+
"backgroundColor": "#3B82F6",
|
|
69
|
+
}
|
|
70
|
+
],
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@router.get("/resources-by-service")
|
|
75
|
+
async def chart_resources_by_service(
|
|
76
|
+
snapshot: Optional[str] = Query(None, description="Limit to specific snapshot"),
|
|
77
|
+
limit: int = Query(10, le=20),
|
|
78
|
+
):
|
|
79
|
+
"""Get Chart.js data for resources by service."""
|
|
80
|
+
store = get_resource_store()
|
|
81
|
+
stats = store.get_stats(snapshot_name=snapshot, group_by="service")[:limit]
|
|
82
|
+
|
|
83
|
+
labels = [s["group_key"] for s in stats]
|
|
84
|
+
data = [s["count"] for s in stats]
|
|
85
|
+
|
|
86
|
+
return {
|
|
87
|
+
"labels": labels,
|
|
88
|
+
"datasets": [
|
|
89
|
+
{
|
|
90
|
+
"data": data,
|
|
91
|
+
"backgroundColor": CHART_COLORS[:len(data)],
|
|
92
|
+
}
|
|
93
|
+
],
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@router.get("/snapshot-trend")
|
|
98
|
+
async def chart_snapshot_trend():
|
|
99
|
+
"""Get Chart.js data for snapshot resource count over time."""
|
|
100
|
+
store = get_snapshot_store()
|
|
101
|
+
snapshots = store.list_all()
|
|
102
|
+
|
|
103
|
+
# Sort by created_at
|
|
104
|
+
sorted_snapshots = sorted(snapshots, key=lambda s: s.get("created_at", ""))
|
|
105
|
+
|
|
106
|
+
# Take last 10 snapshots for trend
|
|
107
|
+
recent = sorted_snapshots[-10:] if len(sorted_snapshots) > 10 else sorted_snapshots
|
|
108
|
+
|
|
109
|
+
labels = [s["name"][:20] for s in recent] # Truncate long names
|
|
110
|
+
data = [s.get("resource_count", 0) for s in recent]
|
|
111
|
+
|
|
112
|
+
return {
|
|
113
|
+
"labels": labels,
|
|
114
|
+
"datasets": [
|
|
115
|
+
{
|
|
116
|
+
"label": "Resource Count",
|
|
117
|
+
"data": data,
|
|
118
|
+
"borderColor": "#3B82F6",
|
|
119
|
+
"backgroundColor": "rgba(59, 130, 246, 0.1)",
|
|
120
|
+
"fill": True,
|
|
121
|
+
"tension": 0.3,
|
|
122
|
+
}
|
|
123
|
+
],
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@router.get("/tag-coverage")
|
|
128
|
+
async def chart_tag_coverage(
|
|
129
|
+
snapshot: Optional[str] = Query(None, description="Limit to specific snapshot"),
|
|
130
|
+
):
|
|
131
|
+
"""Get Chart.js data for tag coverage analysis."""
|
|
132
|
+
resource_store = get_resource_store()
|
|
133
|
+
|
|
134
|
+
# Get all resources
|
|
135
|
+
resources = resource_store.search(snapshot_name=snapshot, limit=10000)
|
|
136
|
+
|
|
137
|
+
# Count tagged vs untagged
|
|
138
|
+
tagged = 0
|
|
139
|
+
untagged = 0
|
|
140
|
+
|
|
141
|
+
for r in resources:
|
|
142
|
+
tags = r.get("tags", {})
|
|
143
|
+
if tags and len(tags) > 0:
|
|
144
|
+
tagged += 1
|
|
145
|
+
else:
|
|
146
|
+
untagged += 1
|
|
147
|
+
|
|
148
|
+
return {
|
|
149
|
+
"labels": ["Tagged", "Untagged"],
|
|
150
|
+
"datasets": [
|
|
151
|
+
{
|
|
152
|
+
"data": [tagged, untagged],
|
|
153
|
+
"backgroundColor": ["#10B981", "#EF4444"],
|
|
154
|
+
}
|
|
155
|
+
],
|
|
156
|
+
}
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""Cleanup operations API endpoints."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import List, Optional
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter, HTTPException, Query
|
|
8
|
+
from pydantic import BaseModel
|
|
9
|
+
|
|
10
|
+
from ...dependencies import get_audit_store, get_snapshot_store
|
|
11
|
+
|
|
12
|
+
router = APIRouter(prefix="/cleanup")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class CleanupPreviewRequest(BaseModel):
|
|
16
|
+
"""Request for cleanup preview."""
|
|
17
|
+
|
|
18
|
+
baseline_snapshot: str
|
|
19
|
+
resource_types: Optional[List[str]] = None
|
|
20
|
+
regions: Optional[List[str]] = None
|
|
21
|
+
protect_tags: Optional[List[str]] = None # Format: "key=value"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class CleanupExecuteRequest(BaseModel):
|
|
25
|
+
"""Request for cleanup execution."""
|
|
26
|
+
|
|
27
|
+
baseline_snapshot: str
|
|
28
|
+
confirmation_token: str # Must match expected token
|
|
29
|
+
resource_types: Optional[List[str]] = None
|
|
30
|
+
regions: Optional[List[str]] = None
|
|
31
|
+
protect_tags: Optional[List[str]] = None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@router.post("/preview")
|
|
35
|
+
async def preview_cleanup(request: CleanupPreviewRequest):
|
|
36
|
+
"""Preview cleanup operation (dry-run).
|
|
37
|
+
|
|
38
|
+
Note: This endpoint is a placeholder. Full implementation requires
|
|
39
|
+
integrating with the ResourceCleaner and collecting current AWS resources.
|
|
40
|
+
"""
|
|
41
|
+
snapshot_store = get_snapshot_store()
|
|
42
|
+
|
|
43
|
+
if not snapshot_store.exists(request.baseline_snapshot):
|
|
44
|
+
raise HTTPException(status_code=404, detail=f"Snapshot '{request.baseline_snapshot}' not found")
|
|
45
|
+
|
|
46
|
+
# TODO: Implement full preview
|
|
47
|
+
# This would need to:
|
|
48
|
+
# 1. Load the baseline snapshot
|
|
49
|
+
# 2. Collect current AWS resources (requires AWS credentials)
|
|
50
|
+
# 3. Calculate the diff
|
|
51
|
+
# 4. Apply protection rules
|
|
52
|
+
|
|
53
|
+
return {
|
|
54
|
+
"status": "preview",
|
|
55
|
+
"message": "Preview endpoint - full implementation pending",
|
|
56
|
+
"baseline_snapshot": request.baseline_snapshot,
|
|
57
|
+
"filters": {
|
|
58
|
+
"resource_types": request.resource_types,
|
|
59
|
+
"regions": request.regions,
|
|
60
|
+
"protect_tags": request.protect_tags,
|
|
61
|
+
},
|
|
62
|
+
"note": "This preview requires AWS credentials to collect current resources. "
|
|
63
|
+
"Use the CLI 'awsinv cleanup preview' for full functionality.",
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@router.post("/execute")
|
|
68
|
+
async def execute_cleanup(request: CleanupExecuteRequest):
|
|
69
|
+
"""Execute cleanup operation.
|
|
70
|
+
|
|
71
|
+
Note: This endpoint is a placeholder. Full implementation requires
|
|
72
|
+
AWS credentials and careful safety checks.
|
|
73
|
+
"""
|
|
74
|
+
snapshot_store = get_snapshot_store()
|
|
75
|
+
|
|
76
|
+
if not snapshot_store.exists(request.baseline_snapshot):
|
|
77
|
+
raise HTTPException(status_code=404, detail=f"Snapshot '{request.baseline_snapshot}' not found")
|
|
78
|
+
|
|
79
|
+
# For safety, this is not implemented in the web UI
|
|
80
|
+
# Users should use the CLI for destructive operations
|
|
81
|
+
raise HTTPException(
|
|
82
|
+
status_code=501,
|
|
83
|
+
detail="Cleanup execution is not available in the web UI for safety. "
|
|
84
|
+
"Please use the CLI: awsinv cleanup execute <snapshot> --confirm",
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@router.get("/operations")
|
|
89
|
+
async def list_operations(
|
|
90
|
+
account_id: Optional[str] = Query(None, description="Filter by account"),
|
|
91
|
+
limit: int = Query(50, le=200),
|
|
92
|
+
):
|
|
93
|
+
"""List cleanup operations from audit log."""
|
|
94
|
+
audit_store = get_audit_store()
|
|
95
|
+
operations = audit_store.list_operations(account_id=account_id, limit=limit)
|
|
96
|
+
|
|
97
|
+
return {
|
|
98
|
+
"count": len(operations),
|
|
99
|
+
"operations": [
|
|
100
|
+
{
|
|
101
|
+
"operation_id": op.operation_id,
|
|
102
|
+
"baseline_snapshot": op.baseline_snapshot,
|
|
103
|
+
"timestamp": op.timestamp.isoformat() if hasattr(op.timestamp, "isoformat") else str(op.timestamp),
|
|
104
|
+
"account_id": op.account_id,
|
|
105
|
+
"mode": op.mode.value if hasattr(op.mode, "value") else str(op.mode),
|
|
106
|
+
"status": op.status.value if hasattr(op.status, "value") else str(op.status),
|
|
107
|
+
"total_resources": op.total_resources,
|
|
108
|
+
"succeeded_count": op.succeeded_count,
|
|
109
|
+
"failed_count": op.failed_count,
|
|
110
|
+
"skipped_count": op.skipped_count,
|
|
111
|
+
"duration_seconds": op.duration_seconds,
|
|
112
|
+
}
|
|
113
|
+
for op in operations
|
|
114
|
+
],
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@router.get("/operations/{operation_id}")
|
|
119
|
+
async def get_operation(operation_id: str):
|
|
120
|
+
"""Get details of a cleanup operation."""
|
|
121
|
+
audit_store = get_audit_store()
|
|
122
|
+
operation = audit_store.load_operation(operation_id)
|
|
123
|
+
|
|
124
|
+
if not operation:
|
|
125
|
+
raise HTTPException(status_code=404, detail="Operation not found")
|
|
126
|
+
|
|
127
|
+
return {
|
|
128
|
+
"operation_id": operation.operation_id,
|
|
129
|
+
"baseline_snapshot": operation.baseline_snapshot,
|
|
130
|
+
"timestamp": operation.timestamp.isoformat()
|
|
131
|
+
if hasattr(operation.timestamp, "isoformat")
|
|
132
|
+
else str(operation.timestamp),
|
|
133
|
+
"account_id": operation.account_id,
|
|
134
|
+
"mode": operation.mode.value if hasattr(operation.mode, "value") else str(operation.mode),
|
|
135
|
+
"status": operation.status.value if hasattr(operation.status, "value") else str(operation.status),
|
|
136
|
+
"total_resources": operation.total_resources,
|
|
137
|
+
"succeeded_count": operation.succeeded_count,
|
|
138
|
+
"failed_count": operation.failed_count,
|
|
139
|
+
"skipped_count": operation.skipped_count,
|
|
140
|
+
"duration_seconds": operation.duration_seconds,
|
|
141
|
+
"filters": operation.filters,
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
@router.get("/operations/{operation_id}/records")
|
|
146
|
+
async def get_operation_records(
|
|
147
|
+
operation_id: str,
|
|
148
|
+
status: Optional[str] = Query(None, description="Filter by status"),
|
|
149
|
+
limit: int = Query(100, le=500),
|
|
150
|
+
):
|
|
151
|
+
"""Get deletion records for a cleanup operation."""
|
|
152
|
+
audit_store = get_audit_store()
|
|
153
|
+
|
|
154
|
+
# First verify operation exists
|
|
155
|
+
operation = audit_store.load_operation(operation_id)
|
|
156
|
+
if not operation:
|
|
157
|
+
raise HTTPException(status_code=404, detail="Operation not found")
|
|
158
|
+
|
|
159
|
+
records = audit_store.load_records(operation_id)
|
|
160
|
+
|
|
161
|
+
# Filter by status if specified
|
|
162
|
+
if status:
|
|
163
|
+
records = [r for r in records if str(r.status.value if hasattr(r.status, "value") else r.status) == status]
|
|
164
|
+
|
|
165
|
+
# Apply limit
|
|
166
|
+
records = records[:limit]
|
|
167
|
+
|
|
168
|
+
return {
|
|
169
|
+
"operation_id": operation_id,
|
|
170
|
+
"count": len(records),
|
|
171
|
+
"records": [
|
|
172
|
+
{
|
|
173
|
+
"record_id": r.record_id,
|
|
174
|
+
"resource_arn": r.resource_arn,
|
|
175
|
+
"resource_id": r.resource_id,
|
|
176
|
+
"resource_type": r.resource_type,
|
|
177
|
+
"region": r.region,
|
|
178
|
+
"status": r.status.value if hasattr(r.status, "value") else str(r.status),
|
|
179
|
+
"error_code": r.error_code,
|
|
180
|
+
"error_message": r.error_message,
|
|
181
|
+
"protection_reason": r.protection_reason,
|
|
182
|
+
"deletion_tier": r.deletion_tier,
|
|
183
|
+
}
|
|
184
|
+
for r in records
|
|
185
|
+
],
|
|
186
|
+
}
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""Saved filters API endpoints."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import Any, List, Optional
|
|
8
|
+
|
|
9
|
+
from fastapi import APIRouter, HTTPException
|
|
10
|
+
from pydantic import BaseModel
|
|
11
|
+
|
|
12
|
+
from ...dependencies import get_database
|
|
13
|
+
|
|
14
|
+
router = APIRouter(prefix="/filters")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
# Available filter operators
|
|
18
|
+
FILTER_OPERATORS = [
|
|
19
|
+
{"value": "equals", "label": "equals"},
|
|
20
|
+
{"value": "not_equals", "label": "does not equal"},
|
|
21
|
+
{"value": "contains", "label": "contains"},
|
|
22
|
+
{"value": "not_contains", "label": "does not contain"},
|
|
23
|
+
{"value": "starts_with", "label": "starts with"},
|
|
24
|
+
{"value": "not_starts_with", "label": "does not start with"},
|
|
25
|
+
{"value": "ends_with", "label": "ends with"},
|
|
26
|
+
{"value": "not_ends_with", "label": "does not end with"},
|
|
27
|
+
{"value": "is_empty", "label": "is empty"},
|
|
28
|
+
{"value": "is_not_empty", "label": "is not empty"},
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
# Available filter fields
|
|
32
|
+
FILTER_FIELDS = [
|
|
33
|
+
{"value": "name", "label": "Name"},
|
|
34
|
+
{"value": "arn", "label": "ARN"},
|
|
35
|
+
{"value": "resource_type", "label": "Type"},
|
|
36
|
+
{"value": "region", "label": "Region"},
|
|
37
|
+
{"value": "snapshot_name", "label": "Snapshot"},
|
|
38
|
+
{"value": "config_hash", "label": "Config Hash"},
|
|
39
|
+
{"value": "tag_key", "label": "Tag Key"},
|
|
40
|
+
{"value": "tag_value", "label": "Tag Value"},
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class FilterCondition(BaseModel):
|
|
45
|
+
"""A single filter condition."""
|
|
46
|
+
|
|
47
|
+
field: str
|
|
48
|
+
operator: str
|
|
49
|
+
value: Optional[str] = None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class FilterConfig(BaseModel):
|
|
53
|
+
"""Filter configuration model - supports both simple and advanced modes."""
|
|
54
|
+
|
|
55
|
+
# Simple mode fields (legacy support)
|
|
56
|
+
resource_type: Optional[str] = None
|
|
57
|
+
region: Optional[str] = None
|
|
58
|
+
snapshot: Optional[str] = None
|
|
59
|
+
search: Optional[str] = None
|
|
60
|
+
tags: Optional[dict] = None
|
|
61
|
+
|
|
62
|
+
# Advanced mode fields
|
|
63
|
+
logic: Optional[str] = "AND" # "AND" or "OR"
|
|
64
|
+
conditions: Optional[List[FilterCondition]] = None
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class SavedFilter(BaseModel):
|
|
68
|
+
"""Saved filter model."""
|
|
69
|
+
|
|
70
|
+
id: Optional[int] = None
|
|
71
|
+
name: str
|
|
72
|
+
description: Optional[str] = None
|
|
73
|
+
filter_config: Any # Accept both FilterConfig and raw dict
|
|
74
|
+
is_favorite: bool = False
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class SavedFilterResponse(BaseModel):
|
|
78
|
+
"""Saved filter response model."""
|
|
79
|
+
|
|
80
|
+
id: int
|
|
81
|
+
name: str
|
|
82
|
+
description: Optional[str]
|
|
83
|
+
filter_config: dict
|
|
84
|
+
is_favorite: bool
|
|
85
|
+
created_at: str
|
|
86
|
+
last_used_at: Optional[str]
|
|
87
|
+
use_count: int
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@router.get("/schema")
|
|
91
|
+
async def get_filter_schema():
|
|
92
|
+
"""Get available filter fields and operators."""
|
|
93
|
+
return {
|
|
94
|
+
"fields": FILTER_FIELDS,
|
|
95
|
+
"operators": FILTER_OPERATORS,
|
|
96
|
+
"logic_options": ["AND", "OR"],
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@router.get("")
|
|
101
|
+
async def list_saved_filters(favorites_only: bool = False):
|
|
102
|
+
"""List saved filters."""
|
|
103
|
+
db = get_database()
|
|
104
|
+
|
|
105
|
+
sql = "SELECT * FROM saved_filters WHERE 1=1"
|
|
106
|
+
params: List = []
|
|
107
|
+
|
|
108
|
+
if favorites_only:
|
|
109
|
+
sql += " AND is_favorite = 1"
|
|
110
|
+
|
|
111
|
+
sql += " ORDER BY is_favorite DESC, last_used_at DESC NULLS LAST, name"
|
|
112
|
+
|
|
113
|
+
rows = db.fetchall(sql, tuple(params))
|
|
114
|
+
|
|
115
|
+
filters = []
|
|
116
|
+
for row in rows:
|
|
117
|
+
row_dict = dict(row)
|
|
118
|
+
# Parse the JSON filter_config
|
|
119
|
+
if row_dict.get("filter_config"):
|
|
120
|
+
row_dict["filter_config"] = json.loads(row_dict["filter_config"])
|
|
121
|
+
filters.append(row_dict)
|
|
122
|
+
|
|
123
|
+
return {"filters": filters}
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@router.post("")
|
|
127
|
+
async def create_saved_filter(filter_data: SavedFilter):
|
|
128
|
+
"""Save a new filter."""
|
|
129
|
+
db = get_database()
|
|
130
|
+
|
|
131
|
+
try:
|
|
132
|
+
# Handle both Pydantic model and raw dict
|
|
133
|
+
if hasattr(filter_data.filter_config, "model_dump"):
|
|
134
|
+
config_json = json.dumps(filter_data.filter_config.model_dump())
|
|
135
|
+
else:
|
|
136
|
+
config_json = json.dumps(filter_data.filter_config)
|
|
137
|
+
|
|
138
|
+
cursor = db.execute(
|
|
139
|
+
"""
|
|
140
|
+
INSERT INTO saved_filters (name, description, filter_config, is_favorite, created_at)
|
|
141
|
+
VALUES (?, ?, ?, ?, ?)
|
|
142
|
+
""",
|
|
143
|
+
(
|
|
144
|
+
filter_data.name,
|
|
145
|
+
filter_data.description,
|
|
146
|
+
config_json,
|
|
147
|
+
filter_data.is_favorite,
|
|
148
|
+
datetime.utcnow().isoformat(),
|
|
149
|
+
),
|
|
150
|
+
)
|
|
151
|
+
db._conn.commit() # type: ignore
|
|
152
|
+
return {"id": cursor.lastrowid, "message": "Filter saved"}
|
|
153
|
+
except Exception as e:
|
|
154
|
+
if "UNIQUE constraint" in str(e):
|
|
155
|
+
raise HTTPException(
|
|
156
|
+
status_code=400, detail=f"Filter with name '{filter_data.name}' already exists"
|
|
157
|
+
)
|
|
158
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
@router.get("/{filter_id}")
|
|
162
|
+
async def get_saved_filter(filter_id: int):
|
|
163
|
+
"""Get a saved filter by ID."""
|
|
164
|
+
db = get_database()
|
|
165
|
+
row = db.fetchone("SELECT * FROM saved_filters WHERE id = ?", (filter_id,))
|
|
166
|
+
|
|
167
|
+
if not row:
|
|
168
|
+
raise HTTPException(status_code=404, detail="Filter not found")
|
|
169
|
+
|
|
170
|
+
row_dict = dict(row)
|
|
171
|
+
if row_dict.get("filter_config"):
|
|
172
|
+
row_dict["filter_config"] = json.loads(row_dict["filter_config"])
|
|
173
|
+
return row_dict
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@router.put("/{filter_id}")
|
|
177
|
+
async def update_saved_filter(filter_id: int, filter_data: SavedFilter):
|
|
178
|
+
"""Update a saved filter."""
|
|
179
|
+
db = get_database()
|
|
180
|
+
|
|
181
|
+
existing = db.fetchone("SELECT id FROM saved_filters WHERE id = ?", (filter_id,))
|
|
182
|
+
if not existing:
|
|
183
|
+
raise HTTPException(status_code=404, detail="Filter not found")
|
|
184
|
+
|
|
185
|
+
# Handle both Pydantic model and raw dict
|
|
186
|
+
if hasattr(filter_data.filter_config, "model_dump"):
|
|
187
|
+
config_json = json.dumps(filter_data.filter_config.model_dump())
|
|
188
|
+
else:
|
|
189
|
+
config_json = json.dumps(filter_data.filter_config)
|
|
190
|
+
|
|
191
|
+
db.execute(
|
|
192
|
+
"""
|
|
193
|
+
UPDATE saved_filters
|
|
194
|
+
SET name = ?, description = ?, filter_config = ?, is_favorite = ?
|
|
195
|
+
WHERE id = ?
|
|
196
|
+
""",
|
|
197
|
+
(filter_data.name, filter_data.description, config_json, filter_data.is_favorite, filter_id),
|
|
198
|
+
)
|
|
199
|
+
db._conn.commit() # type: ignore
|
|
200
|
+
return {"message": "Filter updated"}
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
@router.delete("/{filter_id}")
|
|
204
|
+
async def delete_saved_filter(filter_id: int):
|
|
205
|
+
"""Delete a saved filter."""
|
|
206
|
+
db = get_database()
|
|
207
|
+
|
|
208
|
+
existing = db.fetchone("SELECT id FROM saved_filters WHERE id = ?", (filter_id,))
|
|
209
|
+
if not existing:
|
|
210
|
+
raise HTTPException(status_code=404, detail="Filter not found")
|
|
211
|
+
|
|
212
|
+
db.execute("DELETE FROM saved_filters WHERE id = ?", (filter_id,))
|
|
213
|
+
db._conn.commit() # type: ignore
|
|
214
|
+
return {"message": "Filter deleted"}
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
@router.post("/{filter_id}/use")
|
|
218
|
+
async def mark_filter_used(filter_id: int):
|
|
219
|
+
"""Mark a filter as used (updates last_used_at and use_count)."""
|
|
220
|
+
db = get_database()
|
|
221
|
+
|
|
222
|
+
existing = db.fetchone("SELECT id FROM saved_filters WHERE id = ?", (filter_id,))
|
|
223
|
+
if not existing:
|
|
224
|
+
raise HTTPException(status_code=404, detail="Filter not found")
|
|
225
|
+
|
|
226
|
+
db.execute(
|
|
227
|
+
"""
|
|
228
|
+
UPDATE saved_filters
|
|
229
|
+
SET last_used_at = ?, use_count = use_count + 1
|
|
230
|
+
WHERE id = ?
|
|
231
|
+
""",
|
|
232
|
+
(datetime.utcnow().isoformat(), filter_id),
|
|
233
|
+
)
|
|
234
|
+
db._conn.commit() # type: ignore
|
|
235
|
+
return {"message": "Filter marked as used"}
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
@router.post("/{filter_id}/favorite")
|
|
239
|
+
async def toggle_filter_favorite(filter_id: int):
|
|
240
|
+
"""Toggle the favorite status of a filter."""
|
|
241
|
+
db = get_database()
|
|
242
|
+
|
|
243
|
+
existing = db.fetchone("SELECT id, is_favorite FROM saved_filters WHERE id = ?", (filter_id,))
|
|
244
|
+
if not existing:
|
|
245
|
+
raise HTTPException(status_code=404, detail="Filter not found")
|
|
246
|
+
|
|
247
|
+
new_favorite = not existing["is_favorite"]
|
|
248
|
+
db.execute(
|
|
249
|
+
"UPDATE saved_filters SET is_favorite = ? WHERE id = ?",
|
|
250
|
+
(new_favorite, filter_id),
|
|
251
|
+
)
|
|
252
|
+
db._conn.commit() # type: ignore
|
|
253
|
+
return {"message": "Favorite toggled", "is_favorite": new_favorite}
|