aws-inventory-manager 0.17.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. aws_inventory_manager-0.17.12.dist-info/LICENSE +21 -0
  2. aws_inventory_manager-0.17.12.dist-info/METADATA +1292 -0
  3. aws_inventory_manager-0.17.12.dist-info/RECORD +152 -0
  4. aws_inventory_manager-0.17.12.dist-info/WHEEL +5 -0
  5. aws_inventory_manager-0.17.12.dist-info/entry_points.txt +2 -0
  6. aws_inventory_manager-0.17.12.dist-info/top_level.txt +1 -0
  7. src/__init__.py +3 -0
  8. src/aws/__init__.py +11 -0
  9. src/aws/client.py +128 -0
  10. src/aws/credentials.py +191 -0
  11. src/aws/rate_limiter.py +177 -0
  12. src/cli/__init__.py +12 -0
  13. src/cli/config.py +130 -0
  14. src/cli/main.py +4046 -0
  15. src/cloudtrail/__init__.py +5 -0
  16. src/cloudtrail/query.py +642 -0
  17. src/config_service/__init__.py +21 -0
  18. src/config_service/collector.py +346 -0
  19. src/config_service/detector.py +256 -0
  20. src/config_service/resource_type_mapping.py +328 -0
  21. src/cost/__init__.py +5 -0
  22. src/cost/analyzer.py +226 -0
  23. src/cost/explorer.py +209 -0
  24. src/cost/reporter.py +237 -0
  25. src/delta/__init__.py +5 -0
  26. src/delta/calculator.py +206 -0
  27. src/delta/differ.py +185 -0
  28. src/delta/formatters.py +272 -0
  29. src/delta/models.py +154 -0
  30. src/delta/reporter.py +234 -0
  31. src/matching/__init__.py +6 -0
  32. src/matching/config.py +52 -0
  33. src/matching/normalizer.py +450 -0
  34. src/matching/prompts.py +33 -0
  35. src/models/__init__.py +21 -0
  36. src/models/config_diff.py +135 -0
  37. src/models/cost_report.py +87 -0
  38. src/models/deletion_operation.py +104 -0
  39. src/models/deletion_record.py +97 -0
  40. src/models/delta_report.py +122 -0
  41. src/models/efs_resource.py +80 -0
  42. src/models/elasticache_resource.py +90 -0
  43. src/models/group.py +318 -0
  44. src/models/inventory.py +133 -0
  45. src/models/protection_rule.py +123 -0
  46. src/models/report.py +288 -0
  47. src/models/resource.py +111 -0
  48. src/models/security_finding.py +102 -0
  49. src/models/snapshot.py +122 -0
  50. src/restore/__init__.py +20 -0
  51. src/restore/audit.py +175 -0
  52. src/restore/cleaner.py +461 -0
  53. src/restore/config.py +209 -0
  54. src/restore/deleter.py +976 -0
  55. src/restore/dependency.py +254 -0
  56. src/restore/safety.py +115 -0
  57. src/security/__init__.py +0 -0
  58. src/security/checks/__init__.py +0 -0
  59. src/security/checks/base.py +56 -0
  60. src/security/checks/ec2_checks.py +88 -0
  61. src/security/checks/elasticache_checks.py +149 -0
  62. src/security/checks/iam_checks.py +102 -0
  63. src/security/checks/rds_checks.py +140 -0
  64. src/security/checks/s3_checks.py +95 -0
  65. src/security/checks/secrets_checks.py +96 -0
  66. src/security/checks/sg_checks.py +142 -0
  67. src/security/cis_mapper.py +97 -0
  68. src/security/models.py +53 -0
  69. src/security/reporter.py +174 -0
  70. src/security/scanner.py +87 -0
  71. src/snapshot/__init__.py +6 -0
  72. src/snapshot/capturer.py +453 -0
  73. src/snapshot/filter.py +259 -0
  74. src/snapshot/inventory_storage.py +236 -0
  75. src/snapshot/report_formatter.py +250 -0
  76. src/snapshot/reporter.py +189 -0
  77. src/snapshot/resource_collectors/__init__.py +5 -0
  78. src/snapshot/resource_collectors/apigateway.py +140 -0
  79. src/snapshot/resource_collectors/backup.py +136 -0
  80. src/snapshot/resource_collectors/base.py +81 -0
  81. src/snapshot/resource_collectors/cloudformation.py +55 -0
  82. src/snapshot/resource_collectors/cloudwatch.py +109 -0
  83. src/snapshot/resource_collectors/codebuild.py +69 -0
  84. src/snapshot/resource_collectors/codepipeline.py +82 -0
  85. src/snapshot/resource_collectors/dynamodb.py +65 -0
  86. src/snapshot/resource_collectors/ec2.py +240 -0
  87. src/snapshot/resource_collectors/ecs.py +215 -0
  88. src/snapshot/resource_collectors/efs_collector.py +102 -0
  89. src/snapshot/resource_collectors/eks.py +200 -0
  90. src/snapshot/resource_collectors/elasticache_collector.py +79 -0
  91. src/snapshot/resource_collectors/elb.py +126 -0
  92. src/snapshot/resource_collectors/eventbridge.py +156 -0
  93. src/snapshot/resource_collectors/glue.py +199 -0
  94. src/snapshot/resource_collectors/iam.py +188 -0
  95. src/snapshot/resource_collectors/kms.py +111 -0
  96. src/snapshot/resource_collectors/lambda_func.py +139 -0
  97. src/snapshot/resource_collectors/rds.py +109 -0
  98. src/snapshot/resource_collectors/route53.py +86 -0
  99. src/snapshot/resource_collectors/s3.py +105 -0
  100. src/snapshot/resource_collectors/secretsmanager.py +70 -0
  101. src/snapshot/resource_collectors/sns.py +68 -0
  102. src/snapshot/resource_collectors/sqs.py +82 -0
  103. src/snapshot/resource_collectors/ssm.py +160 -0
  104. src/snapshot/resource_collectors/stepfunctions.py +74 -0
  105. src/snapshot/resource_collectors/vpcendpoints.py +79 -0
  106. src/snapshot/resource_collectors/waf.py +159 -0
  107. src/snapshot/storage.py +351 -0
  108. src/storage/__init__.py +21 -0
  109. src/storage/audit_store.py +419 -0
  110. src/storage/database.py +294 -0
  111. src/storage/group_store.py +763 -0
  112. src/storage/inventory_store.py +320 -0
  113. src/storage/resource_store.py +416 -0
  114. src/storage/schema.py +339 -0
  115. src/storage/snapshot_store.py +363 -0
  116. src/utils/__init__.py +12 -0
  117. src/utils/export.py +305 -0
  118. src/utils/hash.py +60 -0
  119. src/utils/logging.py +63 -0
  120. src/utils/pagination.py +41 -0
  121. src/utils/paths.py +51 -0
  122. src/utils/progress.py +41 -0
  123. src/utils/unsupported_resources.py +306 -0
  124. src/web/__init__.py +5 -0
  125. src/web/app.py +97 -0
  126. src/web/dependencies.py +69 -0
  127. src/web/routes/__init__.py +1 -0
  128. src/web/routes/api/__init__.py +18 -0
  129. src/web/routes/api/charts.py +156 -0
  130. src/web/routes/api/cleanup.py +186 -0
  131. src/web/routes/api/filters.py +253 -0
  132. src/web/routes/api/groups.py +305 -0
  133. src/web/routes/api/inventories.py +80 -0
  134. src/web/routes/api/queries.py +202 -0
  135. src/web/routes/api/resources.py +393 -0
  136. src/web/routes/api/snapshots.py +314 -0
  137. src/web/routes/api/views.py +260 -0
  138. src/web/routes/pages.py +198 -0
  139. src/web/services/__init__.py +1 -0
  140. src/web/templates/base.html +955 -0
  141. src/web/templates/components/navbar.html +31 -0
  142. src/web/templates/components/sidebar.html +104 -0
  143. src/web/templates/pages/audit_logs.html +86 -0
  144. src/web/templates/pages/cleanup.html +279 -0
  145. src/web/templates/pages/dashboard.html +227 -0
  146. src/web/templates/pages/diff.html +175 -0
  147. src/web/templates/pages/error.html +30 -0
  148. src/web/templates/pages/groups.html +721 -0
  149. src/web/templates/pages/queries.html +246 -0
  150. src/web/templates/pages/resources.html +2429 -0
  151. src/web/templates/pages/snapshot_detail.html +271 -0
  152. src/web/templates/pages/snapshots.html +429 -0
src/utils/__init__.py ADDED
@@ -0,0 +1,12 @@
1
+ """Utility modules for AWS Baseline Snapshot tool."""
2
+
3
+ from .export import export_to_csv, export_to_json
4
+ from .hash import compute_config_hash
5
+ from .logging import setup_logging
6
+
7
+ __all__ = [
8
+ "setup_logging",
9
+ "compute_config_hash",
10
+ "export_to_json",
11
+ "export_to_csv",
12
+ ]
src/utils/export.py ADDED
@@ -0,0 +1,305 @@
1
+ """Export utilities for JSON and CSV formats."""
2
+
3
+ import csv
4
+ import json
5
+ import logging
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING, Any, Dict, List
8
+
9
+ if TYPE_CHECKING:
10
+ from src.models.report import DetailedResource, ResourceSummary, SnapshotMetadata
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def export_to_json(data: Any, filepath: str) -> Path:
16
+ """Export data to JSON file.
17
+
18
+ Args:
19
+ data: Data to export (must be JSON-serializable)
20
+ filepath: Destination file path
21
+
22
+ Returns:
23
+ Path to exported file
24
+ """
25
+ path = Path(filepath)
26
+
27
+ with open(path, "w", encoding="utf-8") as f:
28
+ json.dump(data, f, indent=2, default=str)
29
+
30
+ logger.info(f"Exported data to JSON: {path}")
31
+ return path
32
+
33
+
34
+ def export_to_csv(data: List[Dict[str, Any]], filepath: str) -> Path:
35
+ """Export list of dictionaries to CSV file.
36
+
37
+ Args:
38
+ data: List of dictionaries to export
39
+ filepath: Destination file path
40
+
41
+ Returns:
42
+ Path to exported file
43
+
44
+ Raises:
45
+ ValueError: If data is empty or not a list of dicts
46
+ """
47
+ if not data:
48
+ raise ValueError("Cannot export empty data to CSV")
49
+
50
+ if not isinstance(data, list) or not isinstance(data[0], dict):
51
+ raise ValueError("Data must be a list of dictionaries for CSV export")
52
+
53
+ path = Path(filepath)
54
+
55
+ # Get fieldnames from first item
56
+ fieldnames = list(data[0].keys())
57
+
58
+ with open(path, "w", newline="", encoding="utf-8") as f:
59
+ writer = csv.DictWriter(f, fieldnames=fieldnames)
60
+ writer.writeheader()
61
+ writer.writerows(data)
62
+
63
+ logger.info(f"Exported {len(data)} rows to CSV: {path}")
64
+ return path
65
+
66
+
67
+ def flatten_dict(d: Dict[str, Any], parent_key: str = "", sep: str = "_") -> Dict[str, Any]:
68
+ """Flatten a nested dictionary for CSV export.
69
+
70
+ Args:
71
+ d: Dictionary to flatten
72
+ parent_key: Parent key for nested items
73
+ sep: Separator for concatenating keys
74
+
75
+ Returns:
76
+ Flattened dictionary
77
+ """
78
+ from typing import Any, List, Tuple
79
+
80
+ items: List[Tuple[str, Any]] = []
81
+ for k, v in d.items():
82
+ new_key = f"{parent_key}{sep}{k}" if parent_key else k
83
+ if isinstance(v, dict):
84
+ items.extend(flatten_dict(v, new_key, sep=sep).items())
85
+ elif isinstance(v, list):
86
+ # Convert lists to comma-separated strings
87
+ items.append((new_key, ", ".join(str(x) for x in v)))
88
+ else:
89
+ items.append((new_key, v))
90
+ return dict(items)
91
+
92
+
93
+ def detect_format(filepath: str) -> str:
94
+ """
95
+ Detect export format from file extension.
96
+
97
+ Args:
98
+ filepath: Path to file
99
+
100
+ Returns:
101
+ Format string: 'json', 'csv', or 'txt'
102
+
103
+ Raises:
104
+ ValueError: If format is not supported
105
+ """
106
+ path = Path(filepath)
107
+ extension = path.suffix.lower()
108
+
109
+ if extension == ".json":
110
+ return "json"
111
+ elif extension == ".csv":
112
+ return "csv"
113
+ elif extension == ".txt":
114
+ return "txt"
115
+ else:
116
+ raise ValueError(f"Unsupported export format '{extension}'. " f"Supported formats: .json, .csv, .txt")
117
+
118
+
119
+ def export_report_json(
120
+ filepath: str,
121
+ metadata: "SnapshotMetadata",
122
+ summary: "ResourceSummary",
123
+ resources: List["DetailedResource"],
124
+ ) -> Path:
125
+ """
126
+ Export snapshot report to JSON format.
127
+
128
+ Args:
129
+ filepath: Destination file path
130
+ metadata: Snapshot metadata
131
+ summary: Resource summary
132
+ resources: List of detailed resources
133
+
134
+ Returns:
135
+ Path to exported file
136
+
137
+ Raises:
138
+ FileExistsError: If file already exists
139
+ FileNotFoundError: If parent directory doesn't exist
140
+ """
141
+ path = Path(filepath)
142
+
143
+ # Check if file already exists
144
+ if path.exists():
145
+ raise FileExistsError(f"Export file '{filepath}' already exists")
146
+
147
+ # Check if parent directory exists
148
+ if not path.parent.exists():
149
+ raise FileNotFoundError(f"Parent directory '{path.parent}' does not exist")
150
+
151
+ # Build report data structure
152
+ report_data = {
153
+ "snapshot_metadata": {
154
+ "name": metadata.name,
155
+ "created_at": metadata.created_at.isoformat(),
156
+ "account_id": metadata.account_id,
157
+ "regions": metadata.regions,
158
+ "inventory_name": metadata.inventory_name,
159
+ "total_resource_count": metadata.total_resource_count,
160
+ },
161
+ "summary": {
162
+ "total_count": summary.total_count,
163
+ "by_service": dict(summary.by_service),
164
+ "by_region": dict(summary.by_region),
165
+ "by_type": dict(summary.by_type),
166
+ },
167
+ "resources": [
168
+ {
169
+ "arn": r.arn,
170
+ "resource_type": r.resource_type,
171
+ "name": r.name,
172
+ "region": r.region,
173
+ "tags": r.tags,
174
+ "created_at": r.created_at.isoformat() if r.created_at else None,
175
+ "config_hash": r.config_hash,
176
+ }
177
+ for r in resources
178
+ ],
179
+ }
180
+
181
+ # Write to file
182
+ with open(path, "w", encoding="utf-8") as f:
183
+ json.dump(report_data, f, indent=2)
184
+
185
+ logger.info(f"Exported report to JSON: {path}")
186
+ return path
187
+
188
+
189
+ def export_report_csv(filepath: str, resources: List["DetailedResource"]) -> Path:
190
+ """
191
+ Export resources to CSV format.
192
+
193
+ Args:
194
+ filepath: Destination file path
195
+ resources: List of detailed resources
196
+
197
+ Returns:
198
+ Path to exported file
199
+
200
+ Raises:
201
+ FileExistsError: If file already exists
202
+ FileNotFoundError: If parent directory doesn't exist
203
+ """
204
+ path = Path(filepath)
205
+
206
+ # Check if file already exists
207
+ if path.exists():
208
+ raise FileExistsError(f"Export file '{filepath}' already exists")
209
+
210
+ # Check if parent directory exists
211
+ if not path.parent.exists():
212
+ raise FileNotFoundError(f"Parent directory '{path.parent}' does not exist")
213
+
214
+ # Write CSV
215
+ with open(path, "w", newline="", encoding="utf-8") as f:
216
+ writer = csv.writer(f)
217
+
218
+ # Write header
219
+ writer.writerow(["ARN", "ResourceType", "Name", "Region", "CreatedAt", "Tags"])
220
+
221
+ # Write resources
222
+ for resource in resources:
223
+ writer.writerow(
224
+ [
225
+ resource.arn,
226
+ resource.resource_type,
227
+ resource.name,
228
+ resource.region,
229
+ resource.created_at.isoformat() if resource.created_at else "",
230
+ json.dumps(resource.tags) if resource.tags else "{}",
231
+ ]
232
+ )
233
+
234
+ logger.info(f"Exported {len(resources)} resources to CSV: {path}")
235
+ return path
236
+
237
+
238
+ def export_report_txt(
239
+ filepath: str,
240
+ metadata: "SnapshotMetadata",
241
+ summary: "ResourceSummary",
242
+ ) -> Path:
243
+ """
244
+ Export report summary to plain text format.
245
+
246
+ Args:
247
+ filepath: Destination file path
248
+ metadata: Snapshot metadata
249
+ summary: Resource summary
250
+
251
+ Returns:
252
+ Path to exported file
253
+
254
+ Raises:
255
+ FileExistsError: If file already exists
256
+ FileNotFoundError: If parent directory doesn't exist
257
+ """
258
+ path = Path(filepath)
259
+
260
+ # Check if file already exists
261
+ if path.exists():
262
+ raise FileExistsError(f"Export file '{filepath}' already exists")
263
+
264
+ # Check if parent directory exists
265
+ if not path.parent.exists():
266
+ raise FileNotFoundError(f"Parent directory '{path.parent}' does not exist")
267
+
268
+ # Build text content
269
+ lines = []
270
+ lines.append("=" * 65)
271
+ lines.append(f"Snapshot Report: {metadata.name}")
272
+ lines.append("=" * 65)
273
+ lines.append("")
274
+ lines.append(f"Inventory: {metadata.inventory_name}")
275
+ lines.append(f"Account ID: {metadata.account_id}")
276
+ lines.append(f"Created: {metadata.created_at.strftime('%Y-%m-%d %H:%M:%S UTC')}")
277
+ lines.append(f"Regions: {metadata.region_summary}")
278
+ lines.append("")
279
+ lines.append("─" * 65)
280
+ lines.append("")
281
+ lines.append("Resource Summary")
282
+ lines.append("")
283
+ lines.append(f"Total Resources: {summary.total_count:,}")
284
+ lines.append("")
285
+
286
+ if summary.by_service:
287
+ lines.append("By Service:")
288
+ for service, count in summary.top_services(limit=10):
289
+ percentage = (count / summary.total_count) * 100 if summary.total_count > 0 else 0
290
+ lines.append(f" {service:20} {count:5} ({percentage:.1f}%)")
291
+ lines.append("")
292
+
293
+ if summary.by_region:
294
+ lines.append("By Region:")
295
+ for region, count in summary.top_regions(limit=10):
296
+ percentage = (count / summary.total_count) * 100 if summary.total_count > 0 else 0
297
+ lines.append(f" {region:20} {count:5} ({percentage:.1f}%)")
298
+ lines.append("")
299
+
300
+ # Write to file
301
+ with open(path, "w", encoding="utf-8") as f:
302
+ f.write("\n".join(lines))
303
+
304
+ logger.info(f"Exported report to TXT: {path}")
305
+ return path
src/utils/hash.py ADDED
@@ -0,0 +1,60 @@
1
+ """Configuration hashing utility for change detection."""
2
+
3
+ import hashlib
4
+ import json
5
+ from typing import Any, Dict, Set
6
+
7
+ # Attributes to exclude from hashing (volatile data)
8
+ EXCLUDE_ATTRIBUTES: Set[str] = {
9
+ "ResponseMetadata",
10
+ "LastModifiedDate",
11
+ "CreatedDate",
12
+ "CreateDate",
13
+ "State",
14
+ "Status",
15
+ "RequestId",
16
+ "VersionId",
17
+ "LastUpdateTime",
18
+ "LastUpdatedTime",
19
+ "ModifiedTime",
20
+ }
21
+
22
+
23
+ def compute_config_hash(resource_data: Dict[str, Any]) -> str:
24
+ """Compute stable SHA256 hash of resource configuration.
25
+
26
+ This hash is used for change detection. Volatile attributes
27
+ (timestamps, states, etc.) are excluded to prevent false positives.
28
+
29
+ Args:
30
+ resource_data: Resource configuration dictionary
31
+
32
+ Returns:
33
+ 64-character SHA256 hex string
34
+ """
35
+ # Deep copy and remove excluded attributes
36
+ clean_data = _remove_volatile_attributes(resource_data, EXCLUDE_ATTRIBUTES)
37
+
38
+ # Normalize: sort keys for deterministic JSON
39
+ normalized = json.dumps(clean_data, sort_keys=True, default=str)
40
+
41
+ # Hash
42
+ return hashlib.sha256(normalized.encode()).hexdigest()
43
+
44
+
45
+ def _remove_volatile_attributes(data: Any, exclude_set: Set[str]) -> Any:
46
+ """Recursively remove excluded attributes from nested dict/list.
47
+
48
+ Args:
49
+ data: Data structure to clean (dict, list, or primitive)
50
+ exclude_set: Set of attribute names to exclude
51
+
52
+ Returns:
53
+ Cleaned data structure
54
+ """
55
+ if isinstance(data, dict):
56
+ return {k: _remove_volatile_attributes(v, exclude_set) for k, v in data.items() if k not in exclude_set}
57
+ elif isinstance(data, list):
58
+ return [_remove_volatile_attributes(item, exclude_set) for item in data]
59
+ else:
60
+ return data
src/utils/logging.py ADDED
@@ -0,0 +1,63 @@
1
+ """Logging configuration for AWS Baseline Snapshot tool."""
2
+
3
+ import logging
4
+ import sys
5
+ from typing import Optional
6
+
7
+
8
+ def setup_logging(level: str = "INFO", log_file: Optional[str] = None, verbose: bool = False) -> None:
9
+ """Configure logging for the application.
10
+
11
+ Args:
12
+ level: Log level (DEBUG, INFO, WARN, ERROR)
13
+ log_file: Optional log file path
14
+ verbose: If True, show detailed logs; if False, suppress all but critical
15
+ """
16
+ # Convert string level to logging constant
17
+ numeric_level = getattr(logging, level.upper(), logging.INFO)
18
+
19
+ # In non-verbose mode, suppress all logs except CRITICAL
20
+ # User will only see styled Rich console output
21
+ if not verbose:
22
+ numeric_level = logging.CRITICAL
23
+
24
+ # Create formatter
25
+ formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
26
+
27
+ # Configure root logger
28
+ root_logger = logging.getLogger()
29
+ root_logger.setLevel(numeric_level)
30
+
31
+ # Remove existing handlers
32
+ for handler in root_logger.handlers[:]:
33
+ root_logger.removeHandler(handler)
34
+
35
+ # Console handler (only add if verbose or log_file specified)
36
+ if verbose or log_file:
37
+ console_handler = logging.StreamHandler(sys.stderr)
38
+ console_handler.setLevel(numeric_level)
39
+ console_handler.setFormatter(formatter)
40
+ root_logger.addHandler(console_handler)
41
+
42
+ # File handler (if specified)
43
+ if log_file:
44
+ file_handler = logging.FileHandler(log_file)
45
+ file_handler.setLevel(logging.DEBUG) # Always log everything to file
46
+ file_handler.setFormatter(formatter)
47
+ root_logger.addHandler(file_handler)
48
+
49
+ # Suppress noisy third-party loggers
50
+ logging.getLogger("boto3").setLevel(logging.CRITICAL)
51
+ logging.getLogger("botocore").setLevel(logging.CRITICAL)
52
+ logging.getLogger("urllib3").setLevel(logging.CRITICAL)
53
+ logging.getLogger("s3transfer").setLevel(logging.CRITICAL)
54
+
55
+ # Suppress internal module logs unless verbose
56
+ if not verbose:
57
+ logging.getLogger("src").setLevel(logging.CRITICAL)
58
+ logging.getLogger("src.snapshot").setLevel(logging.CRITICAL)
59
+ logging.getLogger("src.snapshot.resource_collectors").setLevel(logging.CRITICAL)
60
+ logging.getLogger("src.snapshot.capturer").setLevel(logging.CRITICAL)
61
+ logging.getLogger("src.snapshot.storage").setLevel(logging.CRITICAL)
62
+ logging.getLogger("src.aws").setLevel(logging.CRITICAL)
63
+ logging.getLogger("src.aws.credentials").setLevel(logging.CRITICAL)
@@ -0,0 +1,41 @@
1
+ """
2
+ Terminal pagination utilities for large resource lists.
3
+
4
+ This module provides pagination functionality for displaying large datasets
5
+ in the terminal with user-friendly navigation controls.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Generator, List, TypeVar
11
+
12
+ T = TypeVar("T")
13
+
14
+
15
+ def paginate_resources(items: List[T], page_size: int = 100) -> Generator[List[T], None, None]:
16
+ """
17
+ Paginate a list of items into pages of specified size.
18
+
19
+ This is a memory-efficient generator that yields pages of items
20
+ without loading everything into memory at once.
21
+
22
+ Args:
23
+ items: List of items to paginate
24
+ page_size: Number of items per page (default: 100)
25
+
26
+ Yields:
27
+ Lists of items, each containing up to page_size items
28
+
29
+ Example:
30
+ >>> resources = list(range(250))
31
+ >>> for page in paginate_resources(resources, page_size=100):
32
+ ... print(f"Page has {len(page)} items")
33
+ Page has 100 items
34
+ Page has 100 items
35
+ Page has 50 items
36
+ """
37
+ if not items:
38
+ return
39
+
40
+ for i in range(0, len(items), page_size):
41
+ yield items[i : i + page_size]
src/utils/paths.py ADDED
@@ -0,0 +1,51 @@
1
+ """Path resolution utilities for snapshot storage."""
2
+
3
+ import os
4
+ from pathlib import Path
5
+ from typing import Optional, Union
6
+
7
+
8
+ def get_snapshot_storage_path(custom_path: Optional[Union[str, Path]] = None) -> Path:
9
+ """Resolve snapshot storage path with precedence: parameter > env var > default.
10
+
11
+ Precedence order:
12
+ 1. custom_path parameter (if provided)
13
+ 2. AWS_INVENTORY_STORAGE_PATH environment variable (if set)
14
+ 3. ~/.snapshots (default)
15
+
16
+ Args:
17
+ custom_path: Optional custom path override
18
+
19
+ Returns:
20
+ Resolved Path object for snapshot storage
21
+
22
+ Examples:
23
+ # Use default
24
+ >>> get_snapshot_storage_path()
25
+ Path.home() / '.snapshots'
26
+
27
+ # Use environment variable
28
+ >>> os.environ['AWS_INVENTORY_STORAGE_PATH'] = '/data/snapshots'
29
+ >>> get_snapshot_storage_path()
30
+ Path('/data/snapshots')
31
+
32
+ # Use parameter (highest priority)
33
+ >>> get_snapshot_storage_path('/custom/path')
34
+ Path('/custom/path')
35
+ """
36
+ # Priority 1: Custom path parameter (but not empty string)
37
+ if custom_path:
38
+ # Handle both str and Path types
39
+ if isinstance(custom_path, str):
40
+ if custom_path.strip():
41
+ return Path(custom_path).expanduser().resolve()
42
+ else: # Path object
43
+ return custom_path.expanduser().resolve()
44
+
45
+ # Priority 2: Environment variable
46
+ env_path = os.getenv("AWS_INVENTORY_STORAGE_PATH")
47
+ if env_path:
48
+ return Path(env_path).expanduser().resolve()
49
+
50
+ # Priority 3: Default to ~/.snapshots
51
+ return Path.home() / ".snapshots"
src/utils/progress.py ADDED
@@ -0,0 +1,41 @@
1
+ """Progress indicator utilities using Rich library."""
2
+
3
+ from contextlib import contextmanager
4
+
5
+ from rich.progress import (
6
+ BarColumn,
7
+ Progress,
8
+ SpinnerColumn,
9
+ TaskProgressColumn,
10
+ TextColumn,
11
+ TimeRemainingColumn,
12
+ )
13
+
14
+
15
+ @contextmanager
16
+ def create_progress():
17
+ """Create a Rich progress context for tracking operations.
18
+
19
+ Yields:
20
+ Progress instance configured for multi-task tracking
21
+ """
22
+ with Progress(
23
+ SpinnerColumn(),
24
+ TextColumn("[progress.description]{task.description}"),
25
+ BarColumn(),
26
+ TaskProgressColumn(),
27
+ TimeRemainingColumn(),
28
+ ) as progress:
29
+ yield progress
30
+
31
+
32
+ def create_spinner_progress():
33
+ """Create a simple spinner progress for indeterminate operations.
34
+
35
+ Returns:
36
+ Progress instance with spinner
37
+ """
38
+ return Progress(
39
+ SpinnerColumn(),
40
+ TextColumn("[progress.description]{task.description}"),
41
+ )