aws-inventory-manager 0.13.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aws-inventory-manager might be problematic. Click here for more details.
- aws_inventory_manager-0.13.2.dist-info/LICENSE +21 -0
- aws_inventory_manager-0.13.2.dist-info/METADATA +1226 -0
- aws_inventory_manager-0.13.2.dist-info/RECORD +145 -0
- aws_inventory_manager-0.13.2.dist-info/WHEEL +5 -0
- aws_inventory_manager-0.13.2.dist-info/entry_points.txt +2 -0
- aws_inventory_manager-0.13.2.dist-info/top_level.txt +1 -0
- src/__init__.py +3 -0
- src/aws/__init__.py +11 -0
- src/aws/client.py +128 -0
- src/aws/credentials.py +191 -0
- src/aws/rate_limiter.py +177 -0
- src/cli/__init__.py +12 -0
- src/cli/config.py +130 -0
- src/cli/main.py +3626 -0
- src/config_service/__init__.py +21 -0
- src/config_service/collector.py +346 -0
- src/config_service/detector.py +256 -0
- src/config_service/resource_type_mapping.py +328 -0
- src/cost/__init__.py +5 -0
- src/cost/analyzer.py +226 -0
- src/cost/explorer.py +209 -0
- src/cost/reporter.py +237 -0
- src/delta/__init__.py +5 -0
- src/delta/calculator.py +206 -0
- src/delta/differ.py +185 -0
- src/delta/formatters.py +272 -0
- src/delta/models.py +154 -0
- src/delta/reporter.py +234 -0
- src/models/__init__.py +21 -0
- src/models/config_diff.py +135 -0
- src/models/cost_report.py +87 -0
- src/models/deletion_operation.py +104 -0
- src/models/deletion_record.py +97 -0
- src/models/delta_report.py +122 -0
- src/models/efs_resource.py +80 -0
- src/models/elasticache_resource.py +90 -0
- src/models/group.py +318 -0
- src/models/inventory.py +133 -0
- src/models/protection_rule.py +123 -0
- src/models/report.py +288 -0
- src/models/resource.py +111 -0
- src/models/security_finding.py +102 -0
- src/models/snapshot.py +122 -0
- src/restore/__init__.py +20 -0
- src/restore/audit.py +175 -0
- src/restore/cleaner.py +461 -0
- src/restore/config.py +209 -0
- src/restore/deleter.py +976 -0
- src/restore/dependency.py +254 -0
- src/restore/safety.py +115 -0
- src/security/__init__.py +0 -0
- src/security/checks/__init__.py +0 -0
- src/security/checks/base.py +56 -0
- src/security/checks/ec2_checks.py +88 -0
- src/security/checks/elasticache_checks.py +149 -0
- src/security/checks/iam_checks.py +102 -0
- src/security/checks/rds_checks.py +140 -0
- src/security/checks/s3_checks.py +95 -0
- src/security/checks/secrets_checks.py +96 -0
- src/security/checks/sg_checks.py +142 -0
- src/security/cis_mapper.py +97 -0
- src/security/models.py +53 -0
- src/security/reporter.py +174 -0
- src/security/scanner.py +87 -0
- src/snapshot/__init__.py +6 -0
- src/snapshot/capturer.py +451 -0
- src/snapshot/filter.py +259 -0
- src/snapshot/inventory_storage.py +236 -0
- src/snapshot/report_formatter.py +250 -0
- src/snapshot/reporter.py +189 -0
- src/snapshot/resource_collectors/__init__.py +5 -0
- src/snapshot/resource_collectors/apigateway.py +140 -0
- src/snapshot/resource_collectors/backup.py +136 -0
- src/snapshot/resource_collectors/base.py +81 -0
- src/snapshot/resource_collectors/cloudformation.py +55 -0
- src/snapshot/resource_collectors/cloudwatch.py +109 -0
- src/snapshot/resource_collectors/codebuild.py +69 -0
- src/snapshot/resource_collectors/codepipeline.py +82 -0
- src/snapshot/resource_collectors/dynamodb.py +65 -0
- src/snapshot/resource_collectors/ec2.py +240 -0
- src/snapshot/resource_collectors/ecs.py +215 -0
- src/snapshot/resource_collectors/efs_collector.py +102 -0
- src/snapshot/resource_collectors/eks.py +200 -0
- src/snapshot/resource_collectors/elasticache_collector.py +79 -0
- src/snapshot/resource_collectors/elb.py +126 -0
- src/snapshot/resource_collectors/eventbridge.py +156 -0
- src/snapshot/resource_collectors/iam.py +188 -0
- src/snapshot/resource_collectors/kms.py +111 -0
- src/snapshot/resource_collectors/lambda_func.py +139 -0
- src/snapshot/resource_collectors/rds.py +109 -0
- src/snapshot/resource_collectors/route53.py +86 -0
- src/snapshot/resource_collectors/s3.py +105 -0
- src/snapshot/resource_collectors/secretsmanager.py +70 -0
- src/snapshot/resource_collectors/sns.py +68 -0
- src/snapshot/resource_collectors/sqs.py +82 -0
- src/snapshot/resource_collectors/ssm.py +160 -0
- src/snapshot/resource_collectors/stepfunctions.py +74 -0
- src/snapshot/resource_collectors/vpcendpoints.py +79 -0
- src/snapshot/resource_collectors/waf.py +159 -0
- src/snapshot/storage.py +351 -0
- src/storage/__init__.py +21 -0
- src/storage/audit_store.py +419 -0
- src/storage/database.py +294 -0
- src/storage/group_store.py +749 -0
- src/storage/inventory_store.py +320 -0
- src/storage/resource_store.py +413 -0
- src/storage/schema.py +288 -0
- src/storage/snapshot_store.py +346 -0
- src/utils/__init__.py +12 -0
- src/utils/export.py +305 -0
- src/utils/hash.py +60 -0
- src/utils/logging.py +63 -0
- src/utils/pagination.py +41 -0
- src/utils/paths.py +51 -0
- src/utils/progress.py +41 -0
- src/utils/unsupported_resources.py +306 -0
- src/web/__init__.py +5 -0
- src/web/app.py +97 -0
- src/web/dependencies.py +69 -0
- src/web/routes/__init__.py +1 -0
- src/web/routes/api/__init__.py +18 -0
- src/web/routes/api/charts.py +156 -0
- src/web/routes/api/cleanup.py +186 -0
- src/web/routes/api/filters.py +253 -0
- src/web/routes/api/groups.py +305 -0
- src/web/routes/api/inventories.py +80 -0
- src/web/routes/api/queries.py +202 -0
- src/web/routes/api/resources.py +379 -0
- src/web/routes/api/snapshots.py +314 -0
- src/web/routes/api/views.py +260 -0
- src/web/routes/pages.py +198 -0
- src/web/services/__init__.py +1 -0
- src/web/templates/base.html +949 -0
- src/web/templates/components/navbar.html +31 -0
- src/web/templates/components/sidebar.html +104 -0
- src/web/templates/pages/audit_logs.html +86 -0
- src/web/templates/pages/cleanup.html +279 -0
- src/web/templates/pages/dashboard.html +227 -0
- src/web/templates/pages/diff.html +175 -0
- src/web/templates/pages/error.html +30 -0
- src/web/templates/pages/groups.html +721 -0
- src/web/templates/pages/queries.html +246 -0
- src/web/templates/pages/resources.html +2251 -0
- src/web/templates/pages/snapshot_detail.html +271 -0
- src/web/templates/pages/snapshots.html +429 -0
src/snapshot/filter.py
ADDED
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
"""Resource filtering for creating historical and tagged baselines."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
from ..models.resource import Resource
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ResourceFilter:
|
|
13
|
+
"""Filter resources by creation date and tags."""
|
|
14
|
+
|
|
15
|
+
def __init__(
|
|
16
|
+
self,
|
|
17
|
+
before_date: Optional[datetime] = None,
|
|
18
|
+
after_date: Optional[datetime] = None,
|
|
19
|
+
required_tags: Optional[Dict[str, str]] = None,
|
|
20
|
+
include_tags: Optional[Dict[str, str]] = None,
|
|
21
|
+
exclude_tags: Optional[Dict[str, str]] = None,
|
|
22
|
+
):
|
|
23
|
+
"""Initialize resource filter.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
before_date: Include only resources created before this date (exclusive)
|
|
27
|
+
after_date: Include only resources created on or after this date (inclusive)
|
|
28
|
+
required_tags: DEPRECATED - use include_tags instead (kept for backward compatibility)
|
|
29
|
+
include_tags: Resources must have ALL these tags (AND logic)
|
|
30
|
+
exclude_tags: Resources must NOT have ANY of these tags (OR logic)
|
|
31
|
+
"""
|
|
32
|
+
self.before_date = before_date
|
|
33
|
+
self.after_date = after_date
|
|
34
|
+
# Support both required_tags (deprecated) and include_tags (new)
|
|
35
|
+
self.include_tags = include_tags or required_tags or {}
|
|
36
|
+
self.exclude_tags = exclude_tags or {}
|
|
37
|
+
# Keep required_tags for backward compatibility
|
|
38
|
+
self.required_tags = self.include_tags
|
|
39
|
+
|
|
40
|
+
# Statistics
|
|
41
|
+
self.stats = {
|
|
42
|
+
"total_collected": 0,
|
|
43
|
+
"date_matched": 0,
|
|
44
|
+
"tag_matched": 0,
|
|
45
|
+
"final_count": 0,
|
|
46
|
+
"filtered_out_by_date": 0,
|
|
47
|
+
"filtered_out_by_tags": 0,
|
|
48
|
+
"filtered_out_by_exclude_tags": 0,
|
|
49
|
+
"missing_creation_date": 0,
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
def apply(self, resources: List[Resource]) -> List[Resource]:
|
|
53
|
+
"""Apply filters to a list of resources.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
resources: List of resources to filter
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
Filtered list of resources
|
|
60
|
+
"""
|
|
61
|
+
self.stats["total_collected"] = len(resources)
|
|
62
|
+
filtered = []
|
|
63
|
+
|
|
64
|
+
for resource in resources:
|
|
65
|
+
if self._matches_filters(resource):
|
|
66
|
+
filtered.append(resource)
|
|
67
|
+
|
|
68
|
+
self.stats["final_count"] = len(filtered)
|
|
69
|
+
|
|
70
|
+
logger.debug(
|
|
71
|
+
f"Filtering complete: {self.stats['total_collected']} collected, "
|
|
72
|
+
f"{self.stats['final_count']} matched filters"
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return filtered
|
|
76
|
+
|
|
77
|
+
def _matches_filters(self, resource: Resource) -> bool:
|
|
78
|
+
"""Check if a resource matches all filters.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
resource: Resource to check
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
True if resource matches all filters
|
|
85
|
+
"""
|
|
86
|
+
# Check date filters
|
|
87
|
+
if not self._matches_date_filter(resource):
|
|
88
|
+
self.stats["filtered_out_by_date"] += 1
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
self.stats["date_matched"] += 1
|
|
92
|
+
|
|
93
|
+
# Check exclude tags first (if resource has any excluded tags, reject immediately)
|
|
94
|
+
if not self._matches_exclude_filter(resource):
|
|
95
|
+
self.stats["filtered_out_by_exclude_tags"] += 1
|
|
96
|
+
return False
|
|
97
|
+
|
|
98
|
+
# Check include tag filters
|
|
99
|
+
if not self._matches_tag_filter(resource):
|
|
100
|
+
self.stats["filtered_out_by_tags"] += 1
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
self.stats["tag_matched"] += 1
|
|
104
|
+
|
|
105
|
+
return True
|
|
106
|
+
|
|
107
|
+
def _matches_date_filter(self, resource: Resource) -> bool:
|
|
108
|
+
"""Check if resource matches date filters.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
resource: Resource to check
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
True if resource matches date filters (or no date filters specified)
|
|
115
|
+
"""
|
|
116
|
+
# If no date filters, everything matches
|
|
117
|
+
if not self.before_date and not self.after_date:
|
|
118
|
+
return True
|
|
119
|
+
|
|
120
|
+
# If resource has no creation date or invalid type, we can't filter by date
|
|
121
|
+
# Many AWS resources don't expose creation timestamps (VPCs, Security Groups, etc.)
|
|
122
|
+
if not resource.created_at:
|
|
123
|
+
self.stats["missing_creation_date"] += 1
|
|
124
|
+
logger.debug(f"Resource {resource.arn} has no creation date - including by default")
|
|
125
|
+
return True
|
|
126
|
+
|
|
127
|
+
# Handle string dates (convert to datetime)
|
|
128
|
+
resource_date = resource.created_at
|
|
129
|
+
if isinstance(resource_date, str):
|
|
130
|
+
try:
|
|
131
|
+
resource_date = datetime.fromisoformat(resource_date.replace("Z", "+00:00"))
|
|
132
|
+
except ValueError:
|
|
133
|
+
self.stats["missing_creation_date"] += 1
|
|
134
|
+
logger.debug(f"Resource {resource.arn} has invalid date format - including by default")
|
|
135
|
+
return True
|
|
136
|
+
|
|
137
|
+
# Ensure we have a datetime object
|
|
138
|
+
if not isinstance(resource_date, datetime):
|
|
139
|
+
self.stats["missing_creation_date"] += 1
|
|
140
|
+
logger.debug(f"Resource {resource.arn} has no valid creation date - including by default")
|
|
141
|
+
return True
|
|
142
|
+
|
|
143
|
+
# Make sure resource.created_at is timezone-aware for comparison
|
|
144
|
+
if resource_date.tzinfo is None:
|
|
145
|
+
# Assume UTC if no timezone
|
|
146
|
+
from datetime import timezone as tz
|
|
147
|
+
|
|
148
|
+
resource_date = resource_date.replace(tzinfo=tz.utc)
|
|
149
|
+
|
|
150
|
+
# Check before_date filter (exclusive)
|
|
151
|
+
if self.before_date:
|
|
152
|
+
# Make sure before_date is timezone-aware
|
|
153
|
+
before_date_aware = self.before_date
|
|
154
|
+
if before_date_aware.tzinfo is None:
|
|
155
|
+
from datetime import timezone as tz
|
|
156
|
+
|
|
157
|
+
before_date_aware = before_date_aware.replace(tzinfo=tz.utc)
|
|
158
|
+
|
|
159
|
+
if resource_date >= before_date_aware:
|
|
160
|
+
logger.debug(f"Resource {resource.name} created {resource_date} " f"is not before {before_date_aware}")
|
|
161
|
+
return False
|
|
162
|
+
|
|
163
|
+
# Check after_date filter (inclusive)
|
|
164
|
+
if self.after_date:
|
|
165
|
+
# Make sure after_date is timezone-aware
|
|
166
|
+
after_date_aware = self.after_date
|
|
167
|
+
if after_date_aware.tzinfo is None:
|
|
168
|
+
from datetime import timezone as tz
|
|
169
|
+
|
|
170
|
+
after_date_aware = after_date_aware.replace(tzinfo=tz.utc)
|
|
171
|
+
|
|
172
|
+
if resource_date < after_date_aware:
|
|
173
|
+
logger.debug(f"Resource {resource.name} created {resource_date} " f"is before {after_date_aware}")
|
|
174
|
+
return False
|
|
175
|
+
|
|
176
|
+
return True
|
|
177
|
+
|
|
178
|
+
def _matches_tag_filter(self, resource: Resource) -> bool:
|
|
179
|
+
"""Check if resource has all include tags (AND logic).
|
|
180
|
+
|
|
181
|
+
Args:
|
|
182
|
+
resource: Resource to check
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
True if resource has all include tags (or no tag filters specified)
|
|
186
|
+
"""
|
|
187
|
+
# If no include tag filters, everything matches
|
|
188
|
+
if not self.include_tags:
|
|
189
|
+
return True
|
|
190
|
+
|
|
191
|
+
# Check if resource has ALL include tags with matching values (AND logic)
|
|
192
|
+
for key, value in self.include_tags.items():
|
|
193
|
+
if key not in resource.tags:
|
|
194
|
+
logger.debug(f"Resource {resource.name} missing include tag: {key}")
|
|
195
|
+
return False
|
|
196
|
+
|
|
197
|
+
if resource.tags[key] != value:
|
|
198
|
+
logger.debug(
|
|
199
|
+
f"Resource {resource.name} tag {key}={resource.tags[key]} " f"does not match required value {value}"
|
|
200
|
+
)
|
|
201
|
+
return False
|
|
202
|
+
|
|
203
|
+
return True
|
|
204
|
+
|
|
205
|
+
def _matches_exclude_filter(self, resource: Resource) -> bool:
|
|
206
|
+
"""Check if resource has any exclude tags (OR logic).
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
resource: Resource to check
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
True if resource does NOT have any exclude tags (or no exclude filters specified)
|
|
213
|
+
"""
|
|
214
|
+
# If no exclude tag filters, everything matches
|
|
215
|
+
if not self.exclude_tags:
|
|
216
|
+
return True
|
|
217
|
+
|
|
218
|
+
# Check if resource has ANY of the exclude tags (OR logic)
|
|
219
|
+
for key, value in self.exclude_tags.items():
|
|
220
|
+
if key in resource.tags and resource.tags[key] == value:
|
|
221
|
+
logger.debug(f"Resource {resource.name} has exclude tag {key}={value}")
|
|
222
|
+
return False
|
|
223
|
+
|
|
224
|
+
return True
|
|
225
|
+
|
|
226
|
+
def get_filter_summary(self) -> str:
|
|
227
|
+
"""Get a human-readable summary of applied filters.
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
Formatted string describing the filters
|
|
231
|
+
"""
|
|
232
|
+
parts = []
|
|
233
|
+
|
|
234
|
+
if self.before_date:
|
|
235
|
+
parts.append(f"created before {self.before_date.strftime('%Y-%m-%d')}")
|
|
236
|
+
|
|
237
|
+
if self.after_date:
|
|
238
|
+
parts.append(f"created on/after {self.after_date.strftime('%Y-%m-%d')}")
|
|
239
|
+
|
|
240
|
+
if self.include_tags:
|
|
241
|
+
tag_strs = [f"{k}={v}" for k, v in self.include_tags.items()]
|
|
242
|
+
parts.append(f"include tags: {', '.join(tag_strs)}")
|
|
243
|
+
|
|
244
|
+
if self.exclude_tags:
|
|
245
|
+
tag_strs = [f"{k}={v}" for k, v in self.exclude_tags.items()]
|
|
246
|
+
parts.append(f"exclude tags: {', '.join(tag_strs)}")
|
|
247
|
+
|
|
248
|
+
if not parts:
|
|
249
|
+
return "No filters applied"
|
|
250
|
+
|
|
251
|
+
return "Filters: " + " AND ".join(parts)
|
|
252
|
+
|
|
253
|
+
def get_statistics_summary(self) -> Dict[str, int]:
|
|
254
|
+
"""Get filtering statistics.
|
|
255
|
+
|
|
256
|
+
Returns:
|
|
257
|
+
Dictionary of filtering statistics
|
|
258
|
+
"""
|
|
259
|
+
return self.stats.copy()
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
"""Storage service for inventory management.
|
|
2
|
+
|
|
3
|
+
This module provides the main interface for inventory persistence.
|
|
4
|
+
It uses SQLite as the primary storage backend, with automatic migration
|
|
5
|
+
from legacy YAML files on first use.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import os
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import List, Optional, Union
|
|
13
|
+
|
|
14
|
+
import yaml
|
|
15
|
+
|
|
16
|
+
from ..models.inventory import Inventory
|
|
17
|
+
from ..storage import Database, InventoryStore
|
|
18
|
+
from ..utils.paths import get_snapshot_storage_path
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class InventoryNotFoundError(Exception):
|
|
24
|
+
"""Raised when an inventory cannot be found."""
|
|
25
|
+
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class InventoryStorage:
|
|
30
|
+
"""Manage inventory storage and retrieval using SQLite backend.
|
|
31
|
+
|
|
32
|
+
Handles CRUD operations for inventories with automatic migration
|
|
33
|
+
from legacy YAML files on first use.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(self, storage_dir: Optional[Union[str, Path]] = None):
|
|
37
|
+
"""Initialize inventory storage.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
storage_dir: Directory for storage (default: ~/.snapshots via get_snapshot_storage_path())
|
|
41
|
+
"""
|
|
42
|
+
self.storage_dir = get_snapshot_storage_path(storage_dir)
|
|
43
|
+
self.inventory_file = self.storage_dir / "inventories.yaml"
|
|
44
|
+
|
|
45
|
+
# Ensure storage directory exists
|
|
46
|
+
self.storage_dir.mkdir(parents=True, exist_ok=True)
|
|
47
|
+
|
|
48
|
+
# Initialize SQLite database
|
|
49
|
+
self.db = Database(storage_path=self.storage_dir)
|
|
50
|
+
self.db.ensure_schema()
|
|
51
|
+
|
|
52
|
+
# Initialize inventory store
|
|
53
|
+
self._store = InventoryStore(self.db)
|
|
54
|
+
|
|
55
|
+
# Auto-migrate YAML inventories on first use
|
|
56
|
+
self._migrate_yaml_if_needed()
|
|
57
|
+
|
|
58
|
+
def _migrate_yaml_if_needed(self) -> None:
|
|
59
|
+
"""Migrate inventories from YAML to SQLite if needed."""
|
|
60
|
+
if not self.inventory_file.exists():
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
# Check if we have inventories in SQLite already
|
|
64
|
+
existing = self._store.list_all()
|
|
65
|
+
if existing:
|
|
66
|
+
return # Already migrated
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
with open(self.inventory_file, "r") as f:
|
|
70
|
+
data = yaml.safe_load(f)
|
|
71
|
+
|
|
72
|
+
if not data or "inventories" not in data:
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
for inv_data in data["inventories"]:
|
|
76
|
+
inventory = Inventory.from_dict(inv_data)
|
|
77
|
+
self._store.save(inventory)
|
|
78
|
+
logger.debug(f"Migrated inventory '{inventory.name}' to SQLite")
|
|
79
|
+
|
|
80
|
+
logger.info(f"Migrated {len(data['inventories'])} inventories from YAML to SQLite")
|
|
81
|
+
|
|
82
|
+
except Exception as e:
|
|
83
|
+
logger.warning(f"Failed to migrate YAML inventories: {e}")
|
|
84
|
+
|
|
85
|
+
def load_all(self) -> List[Inventory]:
|
|
86
|
+
"""Load all inventories.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
List of all inventories (empty list if none exist)
|
|
90
|
+
"""
|
|
91
|
+
inventories = self._store.list_all()
|
|
92
|
+
logger.debug(f"Loaded {len(inventories)} inventories from storage")
|
|
93
|
+
return inventories
|
|
94
|
+
|
|
95
|
+
def load_by_account(self, account_id: str) -> List[Inventory]:
|
|
96
|
+
"""Load inventories for specific account.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
account_id: AWS account ID (12 digits)
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
List of inventories for the account
|
|
103
|
+
"""
|
|
104
|
+
inventories = self._store.list_by_account(account_id)
|
|
105
|
+
logger.debug(f"Found {len(inventories)} inventories for account {account_id}")
|
|
106
|
+
return inventories
|
|
107
|
+
|
|
108
|
+
def get_by_name(self, name: str, account_id: str) -> Inventory:
|
|
109
|
+
"""Get specific inventory by name and account.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
name: Inventory name
|
|
113
|
+
account_id: AWS account ID
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
Inventory instance
|
|
117
|
+
|
|
118
|
+
Raises:
|
|
119
|
+
InventoryNotFoundError: If inventory not found
|
|
120
|
+
"""
|
|
121
|
+
inventory = self._store.load(name, account_id)
|
|
122
|
+
if inventory:
|
|
123
|
+
logger.debug(f"Found inventory '{name}' for account {account_id}")
|
|
124
|
+
return inventory
|
|
125
|
+
|
|
126
|
+
raise InventoryNotFoundError(f"Inventory '{name}' not found for account {account_id}")
|
|
127
|
+
|
|
128
|
+
def get_or_create_default(self, account_id: str) -> Inventory:
|
|
129
|
+
"""Get default inventory, creating if it doesn't exist.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
account_id: AWS account ID
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
Default inventory instance
|
|
136
|
+
"""
|
|
137
|
+
try:
|
|
138
|
+
return self.get_by_name("default", account_id)
|
|
139
|
+
except InventoryNotFoundError:
|
|
140
|
+
# Auto-create default inventory
|
|
141
|
+
default = Inventory(
|
|
142
|
+
name="default",
|
|
143
|
+
account_id=account_id,
|
|
144
|
+
description="Auto-created default inventory",
|
|
145
|
+
include_tags={},
|
|
146
|
+
exclude_tags={},
|
|
147
|
+
snapshots=[],
|
|
148
|
+
active_snapshot=None,
|
|
149
|
+
created_at=datetime.now(timezone.utc),
|
|
150
|
+
last_updated=datetime.now(timezone.utc),
|
|
151
|
+
)
|
|
152
|
+
self.save(default)
|
|
153
|
+
logger.info(f"Created default inventory for account {account_id}")
|
|
154
|
+
return default
|
|
155
|
+
|
|
156
|
+
def save(self, inventory: Inventory) -> None:
|
|
157
|
+
"""Save/update inventory.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
inventory: Inventory to save
|
|
161
|
+
|
|
162
|
+
Raises:
|
|
163
|
+
ValueError: If inventory validation fails
|
|
164
|
+
"""
|
|
165
|
+
# Validate inventory before saving
|
|
166
|
+
errors = inventory.validate()
|
|
167
|
+
if errors:
|
|
168
|
+
raise ValueError(f"Invalid inventory: {', '.join(errors)}")
|
|
169
|
+
|
|
170
|
+
self._store.save(inventory)
|
|
171
|
+
logger.debug(f"Saved inventory '{inventory.name}' for account {inventory.account_id}")
|
|
172
|
+
|
|
173
|
+
def delete(self, name: str, account_id: str, delete_snapshots: bool = False) -> int:
|
|
174
|
+
"""Delete inventory, optionally deleting its snapshot files.
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
name: Inventory name
|
|
178
|
+
account_id: AWS account ID
|
|
179
|
+
delete_snapshots: Whether to delete snapshot files
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
Number of snapshot files deleted (0 if delete_snapshots=False)
|
|
183
|
+
|
|
184
|
+
Raises:
|
|
185
|
+
InventoryNotFoundError: If inventory not found
|
|
186
|
+
"""
|
|
187
|
+
# Load inventory to get snapshot list
|
|
188
|
+
inventory = self.get_by_name(name, account_id)
|
|
189
|
+
|
|
190
|
+
# Delete snapshot files if requested
|
|
191
|
+
deleted_count = 0
|
|
192
|
+
if delete_snapshots:
|
|
193
|
+
from ..storage import SnapshotStore
|
|
194
|
+
|
|
195
|
+
snapshot_store = SnapshotStore(self.db)
|
|
196
|
+
for snapshot_name in inventory.snapshots:
|
|
197
|
+
# Remove file extensions if present
|
|
198
|
+
snap_name = snapshot_name.replace(".yaml.gz", "").replace(".yaml", "")
|
|
199
|
+
if snapshot_store.delete(snap_name):
|
|
200
|
+
deleted_count += 1
|
|
201
|
+
logger.debug(f"Deleted snapshot: {snap_name}")
|
|
202
|
+
|
|
203
|
+
# Delete inventory
|
|
204
|
+
self._store.delete(name, account_id)
|
|
205
|
+
logger.info(f"Deleted inventory '{name}' for account {account_id}")
|
|
206
|
+
|
|
207
|
+
return deleted_count
|
|
208
|
+
|
|
209
|
+
def exists(self, name: str, account_id: str) -> bool:
|
|
210
|
+
"""Check if inventory exists.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
name: Inventory name
|
|
214
|
+
account_id: AWS account ID
|
|
215
|
+
|
|
216
|
+
Returns:
|
|
217
|
+
True if inventory exists, False otherwise
|
|
218
|
+
"""
|
|
219
|
+
return self._store.exists(name, account_id)
|
|
220
|
+
|
|
221
|
+
def validate_unique(self, name: str, account_id: str) -> bool:
|
|
222
|
+
"""Validate that (name, account_id) combination is unique.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
name: Inventory name
|
|
226
|
+
account_id: AWS account ID
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
True if unique, False if already exists
|
|
230
|
+
"""
|
|
231
|
+
return not self.exists(name, account_id)
|
|
232
|
+
|
|
233
|
+
# Legacy methods for backward compatibility
|
|
234
|
+
def _atomic_write(self, inventories: List[Inventory]) -> None:
|
|
235
|
+
"""Write inventories using atomic rename pattern (legacy, no-op for SQLite)."""
|
|
236
|
+
pass # SQLite handles atomicity
|