aws-inventory-manager 0.13.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aws-inventory-manager might be problematic. Click here for more details.

Files changed (145) hide show
  1. aws_inventory_manager-0.13.2.dist-info/LICENSE +21 -0
  2. aws_inventory_manager-0.13.2.dist-info/METADATA +1226 -0
  3. aws_inventory_manager-0.13.2.dist-info/RECORD +145 -0
  4. aws_inventory_manager-0.13.2.dist-info/WHEEL +5 -0
  5. aws_inventory_manager-0.13.2.dist-info/entry_points.txt +2 -0
  6. aws_inventory_manager-0.13.2.dist-info/top_level.txt +1 -0
  7. src/__init__.py +3 -0
  8. src/aws/__init__.py +11 -0
  9. src/aws/client.py +128 -0
  10. src/aws/credentials.py +191 -0
  11. src/aws/rate_limiter.py +177 -0
  12. src/cli/__init__.py +12 -0
  13. src/cli/config.py +130 -0
  14. src/cli/main.py +3626 -0
  15. src/config_service/__init__.py +21 -0
  16. src/config_service/collector.py +346 -0
  17. src/config_service/detector.py +256 -0
  18. src/config_service/resource_type_mapping.py +328 -0
  19. src/cost/__init__.py +5 -0
  20. src/cost/analyzer.py +226 -0
  21. src/cost/explorer.py +209 -0
  22. src/cost/reporter.py +237 -0
  23. src/delta/__init__.py +5 -0
  24. src/delta/calculator.py +206 -0
  25. src/delta/differ.py +185 -0
  26. src/delta/formatters.py +272 -0
  27. src/delta/models.py +154 -0
  28. src/delta/reporter.py +234 -0
  29. src/models/__init__.py +21 -0
  30. src/models/config_diff.py +135 -0
  31. src/models/cost_report.py +87 -0
  32. src/models/deletion_operation.py +104 -0
  33. src/models/deletion_record.py +97 -0
  34. src/models/delta_report.py +122 -0
  35. src/models/efs_resource.py +80 -0
  36. src/models/elasticache_resource.py +90 -0
  37. src/models/group.py +318 -0
  38. src/models/inventory.py +133 -0
  39. src/models/protection_rule.py +123 -0
  40. src/models/report.py +288 -0
  41. src/models/resource.py +111 -0
  42. src/models/security_finding.py +102 -0
  43. src/models/snapshot.py +122 -0
  44. src/restore/__init__.py +20 -0
  45. src/restore/audit.py +175 -0
  46. src/restore/cleaner.py +461 -0
  47. src/restore/config.py +209 -0
  48. src/restore/deleter.py +976 -0
  49. src/restore/dependency.py +254 -0
  50. src/restore/safety.py +115 -0
  51. src/security/__init__.py +0 -0
  52. src/security/checks/__init__.py +0 -0
  53. src/security/checks/base.py +56 -0
  54. src/security/checks/ec2_checks.py +88 -0
  55. src/security/checks/elasticache_checks.py +149 -0
  56. src/security/checks/iam_checks.py +102 -0
  57. src/security/checks/rds_checks.py +140 -0
  58. src/security/checks/s3_checks.py +95 -0
  59. src/security/checks/secrets_checks.py +96 -0
  60. src/security/checks/sg_checks.py +142 -0
  61. src/security/cis_mapper.py +97 -0
  62. src/security/models.py +53 -0
  63. src/security/reporter.py +174 -0
  64. src/security/scanner.py +87 -0
  65. src/snapshot/__init__.py +6 -0
  66. src/snapshot/capturer.py +451 -0
  67. src/snapshot/filter.py +259 -0
  68. src/snapshot/inventory_storage.py +236 -0
  69. src/snapshot/report_formatter.py +250 -0
  70. src/snapshot/reporter.py +189 -0
  71. src/snapshot/resource_collectors/__init__.py +5 -0
  72. src/snapshot/resource_collectors/apigateway.py +140 -0
  73. src/snapshot/resource_collectors/backup.py +136 -0
  74. src/snapshot/resource_collectors/base.py +81 -0
  75. src/snapshot/resource_collectors/cloudformation.py +55 -0
  76. src/snapshot/resource_collectors/cloudwatch.py +109 -0
  77. src/snapshot/resource_collectors/codebuild.py +69 -0
  78. src/snapshot/resource_collectors/codepipeline.py +82 -0
  79. src/snapshot/resource_collectors/dynamodb.py +65 -0
  80. src/snapshot/resource_collectors/ec2.py +240 -0
  81. src/snapshot/resource_collectors/ecs.py +215 -0
  82. src/snapshot/resource_collectors/efs_collector.py +102 -0
  83. src/snapshot/resource_collectors/eks.py +200 -0
  84. src/snapshot/resource_collectors/elasticache_collector.py +79 -0
  85. src/snapshot/resource_collectors/elb.py +126 -0
  86. src/snapshot/resource_collectors/eventbridge.py +156 -0
  87. src/snapshot/resource_collectors/iam.py +188 -0
  88. src/snapshot/resource_collectors/kms.py +111 -0
  89. src/snapshot/resource_collectors/lambda_func.py +139 -0
  90. src/snapshot/resource_collectors/rds.py +109 -0
  91. src/snapshot/resource_collectors/route53.py +86 -0
  92. src/snapshot/resource_collectors/s3.py +105 -0
  93. src/snapshot/resource_collectors/secretsmanager.py +70 -0
  94. src/snapshot/resource_collectors/sns.py +68 -0
  95. src/snapshot/resource_collectors/sqs.py +82 -0
  96. src/snapshot/resource_collectors/ssm.py +160 -0
  97. src/snapshot/resource_collectors/stepfunctions.py +74 -0
  98. src/snapshot/resource_collectors/vpcendpoints.py +79 -0
  99. src/snapshot/resource_collectors/waf.py +159 -0
  100. src/snapshot/storage.py +351 -0
  101. src/storage/__init__.py +21 -0
  102. src/storage/audit_store.py +419 -0
  103. src/storage/database.py +294 -0
  104. src/storage/group_store.py +749 -0
  105. src/storage/inventory_store.py +320 -0
  106. src/storage/resource_store.py +413 -0
  107. src/storage/schema.py +288 -0
  108. src/storage/snapshot_store.py +346 -0
  109. src/utils/__init__.py +12 -0
  110. src/utils/export.py +305 -0
  111. src/utils/hash.py +60 -0
  112. src/utils/logging.py +63 -0
  113. src/utils/pagination.py +41 -0
  114. src/utils/paths.py +51 -0
  115. src/utils/progress.py +41 -0
  116. src/utils/unsupported_resources.py +306 -0
  117. src/web/__init__.py +5 -0
  118. src/web/app.py +97 -0
  119. src/web/dependencies.py +69 -0
  120. src/web/routes/__init__.py +1 -0
  121. src/web/routes/api/__init__.py +18 -0
  122. src/web/routes/api/charts.py +156 -0
  123. src/web/routes/api/cleanup.py +186 -0
  124. src/web/routes/api/filters.py +253 -0
  125. src/web/routes/api/groups.py +305 -0
  126. src/web/routes/api/inventories.py +80 -0
  127. src/web/routes/api/queries.py +202 -0
  128. src/web/routes/api/resources.py +379 -0
  129. src/web/routes/api/snapshots.py +314 -0
  130. src/web/routes/api/views.py +260 -0
  131. src/web/routes/pages.py +198 -0
  132. src/web/services/__init__.py +1 -0
  133. src/web/templates/base.html +949 -0
  134. src/web/templates/components/navbar.html +31 -0
  135. src/web/templates/components/sidebar.html +104 -0
  136. src/web/templates/pages/audit_logs.html +86 -0
  137. src/web/templates/pages/cleanup.html +279 -0
  138. src/web/templates/pages/dashboard.html +227 -0
  139. src/web/templates/pages/diff.html +175 -0
  140. src/web/templates/pages/error.html +30 -0
  141. src/web/templates/pages/groups.html +721 -0
  142. src/web/templates/pages/queries.html +246 -0
  143. src/web/templates/pages/resources.html +2251 -0
  144. src/web/templates/pages/snapshot_detail.html +271 -0
  145. src/web/templates/pages/snapshots.html +429 -0
@@ -0,0 +1,346 @@
1
+ """Snapshot storage operations for SQLite backend."""
2
+
3
+ import json
4
+ import logging
5
+ from datetime import datetime, timezone
6
+ from pathlib import Path
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ from ..models.resource import Resource
10
+ from ..models.snapshot import Snapshot
11
+ from .database import Database, json_deserialize, json_serialize
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def compute_canonical_name(name: str, tags: Optional[Dict[str, str]], arn: str) -> str:
17
+ """Compute canonical name for a resource.
18
+
19
+ Priority order:
20
+ 1. aws:cloudformation:logical-id tag (stable across recreations)
21
+ 2. Resource name
22
+ 3. ARN as fallback
23
+
24
+ Args:
25
+ name: Resource physical name
26
+ tags: Resource tags
27
+ arn: Resource ARN
28
+
29
+ Returns:
30
+ Canonical name for matching
31
+ """
32
+ if tags and "aws:cloudformation:logical-id" in tags:
33
+ return tags["aws:cloudformation:logical-id"]
34
+ return name or arn
35
+
36
+
37
+ class SnapshotStore:
38
+ """CRUD operations for snapshots in SQLite database."""
39
+
40
+ def __init__(self, db: Database):
41
+ """Initialize snapshot store.
42
+
43
+ Args:
44
+ db: Database connection manager
45
+ """
46
+ self.db = db
47
+
48
+ def save(self, snapshot: Snapshot) -> int:
49
+ """Save snapshot and all its resources to database.
50
+
51
+ Args:
52
+ snapshot: Snapshot to save
53
+
54
+ Returns:
55
+ Database ID of saved snapshot
56
+ """
57
+ with self.db.transaction() as cursor:
58
+ # Insert snapshot
59
+ cursor.execute(
60
+ """
61
+ INSERT INTO snapshots (
62
+ name, created_at, account_id, regions, resource_count,
63
+ total_resources_before_filter, service_counts, metadata,
64
+ filters_applied, schema_version, inventory_name, is_active
65
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
66
+ """,
67
+ (
68
+ snapshot.name,
69
+ snapshot.created_at.isoformat(),
70
+ snapshot.account_id,
71
+ json_serialize(snapshot.regions),
72
+ snapshot.resource_count,
73
+ snapshot.total_resources_before_filter,
74
+ json_serialize(snapshot.service_counts),
75
+ json_serialize(snapshot.metadata),
76
+ json_serialize(snapshot.filters_applied),
77
+ snapshot.schema_version,
78
+ snapshot.inventory_name,
79
+ snapshot.is_active,
80
+ ),
81
+ )
82
+ snapshot_id = cursor.lastrowid
83
+
84
+ # Insert resources
85
+ for resource in snapshot.resources:
86
+ canonical = compute_canonical_name(resource.name, resource.tags, resource.arn)
87
+ cursor.execute(
88
+ """
89
+ INSERT INTO resources (
90
+ snapshot_id, arn, resource_type, name, region,
91
+ config_hash, raw_config, created_at, source, canonical_name
92
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
93
+ """,
94
+ (
95
+ snapshot_id,
96
+ resource.arn,
97
+ resource.resource_type,
98
+ resource.name,
99
+ resource.region,
100
+ resource.config_hash,
101
+ json_serialize(resource.raw_config),
102
+ resource.created_at.isoformat() if resource.created_at else None,
103
+ resource.source,
104
+ canonical,
105
+ ),
106
+ )
107
+ resource_id = cursor.lastrowid
108
+
109
+ # Insert tags
110
+ if resource.tags:
111
+ tag_data = [(resource_id, k, v) for k, v in resource.tags.items()]
112
+ cursor.executemany(
113
+ "INSERT INTO resource_tags (resource_id, key, value) VALUES (?, ?, ?)",
114
+ tag_data,
115
+ )
116
+
117
+ logger.debug(f"Saved snapshot '{snapshot.name}' with {len(snapshot.resources)} resources (id={snapshot_id})")
118
+ return snapshot_id
119
+
120
+ def load(self, name: str) -> Optional[Snapshot]:
121
+ """Load snapshot by name with all resources.
122
+
123
+ Args:
124
+ name: Snapshot name
125
+
126
+ Returns:
127
+ Snapshot object or None if not found
128
+ """
129
+ # Get snapshot
130
+ snapshot_row = self.db.fetchone("SELECT * FROM snapshots WHERE name = ?", (name,))
131
+ if not snapshot_row:
132
+ return None
133
+
134
+ snapshot_id = snapshot_row["id"]
135
+
136
+ # Get resources
137
+ resource_rows = self.db.fetchall(
138
+ "SELECT * FROM resources WHERE snapshot_id = ?",
139
+ (snapshot_id,),
140
+ )
141
+
142
+ # Get tags for all resources in one query
143
+ resource_ids = [r["id"] for r in resource_rows]
144
+ tags_by_resource: Dict[int, Dict[str, str]] = {}
145
+
146
+ if resource_ids:
147
+ placeholders = ",".join("?" * len(resource_ids))
148
+ tag_rows = self.db.fetchall(
149
+ f"SELECT resource_id, key, value FROM resource_tags WHERE resource_id IN ({placeholders})",
150
+ tuple(resource_ids),
151
+ )
152
+ for tag_row in tag_rows:
153
+ rid = tag_row["resource_id"]
154
+ if rid not in tags_by_resource:
155
+ tags_by_resource[rid] = {}
156
+ tags_by_resource[rid][tag_row["key"]] = tag_row["value"]
157
+
158
+ # Build Resource objects
159
+ resources = []
160
+ for row in resource_rows:
161
+ created_at = None
162
+ if row["created_at"]:
163
+ try:
164
+ created_at = datetime.fromisoformat(row["created_at"])
165
+ except ValueError:
166
+ pass
167
+
168
+ resource = Resource(
169
+ arn=row["arn"],
170
+ resource_type=row["resource_type"],
171
+ name=row["name"],
172
+ region=row["region"],
173
+ config_hash=row["config_hash"],
174
+ raw_config=json_deserialize(row["raw_config"]),
175
+ tags=tags_by_resource.get(row["id"], {}),
176
+ created_at=created_at,
177
+ source=row["source"] or "direct_api",
178
+ )
179
+ resources.append(resource)
180
+
181
+ # Build Snapshot
182
+ created_at = datetime.fromisoformat(snapshot_row["created_at"])
183
+ if created_at.tzinfo is None:
184
+ created_at = created_at.replace(tzinfo=timezone.utc)
185
+
186
+ snapshot = Snapshot(
187
+ name=snapshot_row["name"],
188
+ created_at=created_at,
189
+ account_id=snapshot_row["account_id"],
190
+ regions=json_deserialize(snapshot_row["regions"]) or [],
191
+ resources=resources,
192
+ is_active=bool(snapshot_row["is_active"]),
193
+ resource_count=snapshot_row["resource_count"],
194
+ total_resources_before_filter=snapshot_row.get("total_resources_before_filter"),
195
+ service_counts=json_deserialize(snapshot_row["service_counts"]) or {},
196
+ metadata=json_deserialize(snapshot_row["metadata"]) or {},
197
+ filters_applied=json_deserialize(snapshot_row["filters_applied"]),
198
+ inventory_name=snapshot_row["inventory_name"] or "default",
199
+ schema_version=snapshot_row["schema_version"] or "1.1",
200
+ )
201
+
202
+ logger.debug(f"Loaded snapshot '{name}' with {len(resources)} resources")
203
+ return snapshot
204
+
205
+ def list_all(self) -> List[Dict[str, Any]]:
206
+ """List all snapshots with metadata (no resources).
207
+
208
+ Returns:
209
+ List of snapshot metadata dictionaries
210
+ """
211
+ rows = self.db.fetchall(
212
+ """
213
+ SELECT name, created_at, account_id, regions, resource_count,
214
+ service_counts, is_active, inventory_name
215
+ FROM snapshots
216
+ ORDER BY created_at DESC
217
+ """
218
+ )
219
+
220
+ results = []
221
+ for row in rows:
222
+ created_at = datetime.fromisoformat(row["created_at"])
223
+ results.append(
224
+ {
225
+ "name": row["name"],
226
+ "created_at": created_at,
227
+ "account_id": row["account_id"],
228
+ "regions": json_deserialize(row["regions"]) or [],
229
+ "resource_count": row["resource_count"],
230
+ "service_counts": json_deserialize(row["service_counts"]) or {},
231
+ "is_active": bool(row["is_active"]),
232
+ "inventory_name": row["inventory_name"],
233
+ }
234
+ )
235
+
236
+ return results
237
+
238
+ def delete(self, name: str) -> bool:
239
+ """Delete snapshot and cascade to resources.
240
+
241
+ Args:
242
+ name: Snapshot name to delete
243
+
244
+ Returns:
245
+ True if deleted, False if not found
246
+ """
247
+ with self.db.transaction() as cursor:
248
+ cursor.execute("DELETE FROM snapshots WHERE name = ?", (name,))
249
+ deleted = cursor.rowcount > 0
250
+
251
+ if deleted:
252
+ logger.debug(f"Deleted snapshot '{name}'")
253
+ return deleted
254
+
255
+ def exists(self, name: str) -> bool:
256
+ """Check if snapshot exists.
257
+
258
+ Args:
259
+ name: Snapshot name
260
+
261
+ Returns:
262
+ True if exists
263
+ """
264
+ row = self.db.fetchone("SELECT 1 FROM snapshots WHERE name = ?", (name,))
265
+ return row is not None
266
+
267
+ def rename(self, old_name: str, new_name: str) -> bool:
268
+ """Rename a snapshot.
269
+
270
+ Args:
271
+ old_name: Current snapshot name
272
+ new_name: New snapshot name
273
+
274
+ Returns:
275
+ True if renamed, False if old_name not found
276
+
277
+ Raises:
278
+ ValueError: If new_name already exists
279
+ """
280
+ if not self.exists(old_name):
281
+ return False
282
+
283
+ if self.exists(new_name):
284
+ raise ValueError(f"Snapshot '{new_name}' already exists")
285
+
286
+ with self.db.transaction() as cursor:
287
+ cursor.execute(
288
+ "UPDATE snapshots SET name = ? WHERE name = ?",
289
+ (new_name, old_name),
290
+ )
291
+
292
+ logger.debug(f"Renamed snapshot '{old_name}' to '{new_name}'")
293
+ return True
294
+
295
+ def get_active(self) -> Optional[str]:
296
+ """Get name of active snapshot.
297
+
298
+ Returns:
299
+ Active snapshot name or None
300
+ """
301
+ row = self.db.fetchone("SELECT name FROM snapshots WHERE is_active = 1")
302
+ return row["name"] if row else None
303
+
304
+ def set_active(self, name: str) -> None:
305
+ """Set snapshot as active baseline.
306
+
307
+ Args:
308
+ name: Snapshot name to set as active
309
+ """
310
+ with self.db.transaction() as cursor:
311
+ # Clear previous active
312
+ cursor.execute("UPDATE snapshots SET is_active = 0 WHERE is_active = 1")
313
+ # Set new active
314
+ cursor.execute("UPDATE snapshots SET is_active = 1 WHERE name = ?", (name,))
315
+
316
+ logger.debug(f"Set active snapshot: '{name}'")
317
+
318
+ def get_id(self, name: str) -> Optional[int]:
319
+ """Get database ID for snapshot.
320
+
321
+ Args:
322
+ name: Snapshot name
323
+
324
+ Returns:
325
+ Database ID or None
326
+ """
327
+ row = self.db.fetchone("SELECT id FROM snapshots WHERE name = ?", (name,))
328
+ return row["id"] if row else None
329
+
330
+ def get_resource_count(self) -> int:
331
+ """Get total resource count across all snapshots.
332
+
333
+ Returns:
334
+ Total resource count
335
+ """
336
+ row = self.db.fetchone("SELECT COUNT(*) as count FROM resources")
337
+ return row["count"] if row else 0
338
+
339
+ def get_snapshot_count(self) -> int:
340
+ """Get total snapshot count.
341
+
342
+ Returns:
343
+ Snapshot count
344
+ """
345
+ row = self.db.fetchone("SELECT COUNT(*) as count FROM snapshots")
346
+ return row["count"] if row else 0
src/utils/__init__.py ADDED
@@ -0,0 +1,12 @@
1
+ """Utility modules for AWS Baseline Snapshot tool."""
2
+
3
+ from .export import export_to_csv, export_to_json
4
+ from .hash import compute_config_hash
5
+ from .logging import setup_logging
6
+
7
+ __all__ = [
8
+ "setup_logging",
9
+ "compute_config_hash",
10
+ "export_to_json",
11
+ "export_to_csv",
12
+ ]
src/utils/export.py ADDED
@@ -0,0 +1,305 @@
1
+ """Export utilities for JSON and CSV formats."""
2
+
3
+ import csv
4
+ import json
5
+ import logging
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING, Any, Dict, List
8
+
9
+ if TYPE_CHECKING:
10
+ from src.models.report import DetailedResource, ResourceSummary, SnapshotMetadata
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def export_to_json(data: Any, filepath: str) -> Path:
16
+ """Export data to JSON file.
17
+
18
+ Args:
19
+ data: Data to export (must be JSON-serializable)
20
+ filepath: Destination file path
21
+
22
+ Returns:
23
+ Path to exported file
24
+ """
25
+ path = Path(filepath)
26
+
27
+ with open(path, "w", encoding="utf-8") as f:
28
+ json.dump(data, f, indent=2, default=str)
29
+
30
+ logger.info(f"Exported data to JSON: {path}")
31
+ return path
32
+
33
+
34
+ def export_to_csv(data: List[Dict[str, Any]], filepath: str) -> Path:
35
+ """Export list of dictionaries to CSV file.
36
+
37
+ Args:
38
+ data: List of dictionaries to export
39
+ filepath: Destination file path
40
+
41
+ Returns:
42
+ Path to exported file
43
+
44
+ Raises:
45
+ ValueError: If data is empty or not a list of dicts
46
+ """
47
+ if not data:
48
+ raise ValueError("Cannot export empty data to CSV")
49
+
50
+ if not isinstance(data, list) or not isinstance(data[0], dict):
51
+ raise ValueError("Data must be a list of dictionaries for CSV export")
52
+
53
+ path = Path(filepath)
54
+
55
+ # Get fieldnames from first item
56
+ fieldnames = list(data[0].keys())
57
+
58
+ with open(path, "w", newline="", encoding="utf-8") as f:
59
+ writer = csv.DictWriter(f, fieldnames=fieldnames)
60
+ writer.writeheader()
61
+ writer.writerows(data)
62
+
63
+ logger.info(f"Exported {len(data)} rows to CSV: {path}")
64
+ return path
65
+
66
+
67
+ def flatten_dict(d: Dict[str, Any], parent_key: str = "", sep: str = "_") -> Dict[str, Any]:
68
+ """Flatten a nested dictionary for CSV export.
69
+
70
+ Args:
71
+ d: Dictionary to flatten
72
+ parent_key: Parent key for nested items
73
+ sep: Separator for concatenating keys
74
+
75
+ Returns:
76
+ Flattened dictionary
77
+ """
78
+ from typing import Any, List, Tuple
79
+
80
+ items: List[Tuple[str, Any]] = []
81
+ for k, v in d.items():
82
+ new_key = f"{parent_key}{sep}{k}" if parent_key else k
83
+ if isinstance(v, dict):
84
+ items.extend(flatten_dict(v, new_key, sep=sep).items())
85
+ elif isinstance(v, list):
86
+ # Convert lists to comma-separated strings
87
+ items.append((new_key, ", ".join(str(x) for x in v)))
88
+ else:
89
+ items.append((new_key, v))
90
+ return dict(items)
91
+
92
+
93
+ def detect_format(filepath: str) -> str:
94
+ """
95
+ Detect export format from file extension.
96
+
97
+ Args:
98
+ filepath: Path to file
99
+
100
+ Returns:
101
+ Format string: 'json', 'csv', or 'txt'
102
+
103
+ Raises:
104
+ ValueError: If format is not supported
105
+ """
106
+ path = Path(filepath)
107
+ extension = path.suffix.lower()
108
+
109
+ if extension == ".json":
110
+ return "json"
111
+ elif extension == ".csv":
112
+ return "csv"
113
+ elif extension == ".txt":
114
+ return "txt"
115
+ else:
116
+ raise ValueError(f"Unsupported export format '{extension}'. " f"Supported formats: .json, .csv, .txt")
117
+
118
+
119
+ def export_report_json(
120
+ filepath: str,
121
+ metadata: "SnapshotMetadata",
122
+ summary: "ResourceSummary",
123
+ resources: List["DetailedResource"],
124
+ ) -> Path:
125
+ """
126
+ Export snapshot report to JSON format.
127
+
128
+ Args:
129
+ filepath: Destination file path
130
+ metadata: Snapshot metadata
131
+ summary: Resource summary
132
+ resources: List of detailed resources
133
+
134
+ Returns:
135
+ Path to exported file
136
+
137
+ Raises:
138
+ FileExistsError: If file already exists
139
+ FileNotFoundError: If parent directory doesn't exist
140
+ """
141
+ path = Path(filepath)
142
+
143
+ # Check if file already exists
144
+ if path.exists():
145
+ raise FileExistsError(f"Export file '{filepath}' already exists")
146
+
147
+ # Check if parent directory exists
148
+ if not path.parent.exists():
149
+ raise FileNotFoundError(f"Parent directory '{path.parent}' does not exist")
150
+
151
+ # Build report data structure
152
+ report_data = {
153
+ "snapshot_metadata": {
154
+ "name": metadata.name,
155
+ "created_at": metadata.created_at.isoformat(),
156
+ "account_id": metadata.account_id,
157
+ "regions": metadata.regions,
158
+ "inventory_name": metadata.inventory_name,
159
+ "total_resource_count": metadata.total_resource_count,
160
+ },
161
+ "summary": {
162
+ "total_count": summary.total_count,
163
+ "by_service": dict(summary.by_service),
164
+ "by_region": dict(summary.by_region),
165
+ "by_type": dict(summary.by_type),
166
+ },
167
+ "resources": [
168
+ {
169
+ "arn": r.arn,
170
+ "resource_type": r.resource_type,
171
+ "name": r.name,
172
+ "region": r.region,
173
+ "tags": r.tags,
174
+ "created_at": r.created_at.isoformat() if r.created_at else None,
175
+ "config_hash": r.config_hash,
176
+ }
177
+ for r in resources
178
+ ],
179
+ }
180
+
181
+ # Write to file
182
+ with open(path, "w", encoding="utf-8") as f:
183
+ json.dump(report_data, f, indent=2)
184
+
185
+ logger.info(f"Exported report to JSON: {path}")
186
+ return path
187
+
188
+
189
+ def export_report_csv(filepath: str, resources: List["DetailedResource"]) -> Path:
190
+ """
191
+ Export resources to CSV format.
192
+
193
+ Args:
194
+ filepath: Destination file path
195
+ resources: List of detailed resources
196
+
197
+ Returns:
198
+ Path to exported file
199
+
200
+ Raises:
201
+ FileExistsError: If file already exists
202
+ FileNotFoundError: If parent directory doesn't exist
203
+ """
204
+ path = Path(filepath)
205
+
206
+ # Check if file already exists
207
+ if path.exists():
208
+ raise FileExistsError(f"Export file '{filepath}' already exists")
209
+
210
+ # Check if parent directory exists
211
+ if not path.parent.exists():
212
+ raise FileNotFoundError(f"Parent directory '{path.parent}' does not exist")
213
+
214
+ # Write CSV
215
+ with open(path, "w", newline="", encoding="utf-8") as f:
216
+ writer = csv.writer(f)
217
+
218
+ # Write header
219
+ writer.writerow(["ARN", "ResourceType", "Name", "Region", "CreatedAt", "Tags"])
220
+
221
+ # Write resources
222
+ for resource in resources:
223
+ writer.writerow(
224
+ [
225
+ resource.arn,
226
+ resource.resource_type,
227
+ resource.name,
228
+ resource.region,
229
+ resource.created_at.isoformat() if resource.created_at else "",
230
+ json.dumps(resource.tags) if resource.tags else "{}",
231
+ ]
232
+ )
233
+
234
+ logger.info(f"Exported {len(resources)} resources to CSV: {path}")
235
+ return path
236
+
237
+
238
+ def export_report_txt(
239
+ filepath: str,
240
+ metadata: "SnapshotMetadata",
241
+ summary: "ResourceSummary",
242
+ ) -> Path:
243
+ """
244
+ Export report summary to plain text format.
245
+
246
+ Args:
247
+ filepath: Destination file path
248
+ metadata: Snapshot metadata
249
+ summary: Resource summary
250
+
251
+ Returns:
252
+ Path to exported file
253
+
254
+ Raises:
255
+ FileExistsError: If file already exists
256
+ FileNotFoundError: If parent directory doesn't exist
257
+ """
258
+ path = Path(filepath)
259
+
260
+ # Check if file already exists
261
+ if path.exists():
262
+ raise FileExistsError(f"Export file '{filepath}' already exists")
263
+
264
+ # Check if parent directory exists
265
+ if not path.parent.exists():
266
+ raise FileNotFoundError(f"Parent directory '{path.parent}' does not exist")
267
+
268
+ # Build text content
269
+ lines = []
270
+ lines.append("=" * 65)
271
+ lines.append(f"Snapshot Report: {metadata.name}")
272
+ lines.append("=" * 65)
273
+ lines.append("")
274
+ lines.append(f"Inventory: {metadata.inventory_name}")
275
+ lines.append(f"Account ID: {metadata.account_id}")
276
+ lines.append(f"Created: {metadata.created_at.strftime('%Y-%m-%d %H:%M:%S UTC')}")
277
+ lines.append(f"Regions: {metadata.region_summary}")
278
+ lines.append("")
279
+ lines.append("─" * 65)
280
+ lines.append("")
281
+ lines.append("Resource Summary")
282
+ lines.append("")
283
+ lines.append(f"Total Resources: {summary.total_count:,}")
284
+ lines.append("")
285
+
286
+ if summary.by_service:
287
+ lines.append("By Service:")
288
+ for service, count in summary.top_services(limit=10):
289
+ percentage = (count / summary.total_count) * 100 if summary.total_count > 0 else 0
290
+ lines.append(f" {service:20} {count:5} ({percentage:.1f}%)")
291
+ lines.append("")
292
+
293
+ if summary.by_region:
294
+ lines.append("By Region:")
295
+ for region, count in summary.top_regions(limit=10):
296
+ percentage = (count / summary.total_count) * 100 if summary.total_count > 0 else 0
297
+ lines.append(f" {region:20} {count:5} ({percentage:.1f}%)")
298
+ lines.append("")
299
+
300
+ # Write to file
301
+ with open(path, "w", encoding="utf-8") as f:
302
+ f.write("\n".join(lines))
303
+
304
+ logger.info(f"Exported report to TXT: {path}")
305
+ return path