aws-inventory-manager 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aws-inventory-manager might be problematic. Click here for more details.

Files changed (65) hide show
  1. aws_inventory_manager-0.2.0.dist-info/METADATA +508 -0
  2. aws_inventory_manager-0.2.0.dist-info/RECORD +65 -0
  3. aws_inventory_manager-0.2.0.dist-info/WHEEL +5 -0
  4. aws_inventory_manager-0.2.0.dist-info/entry_points.txt +2 -0
  5. aws_inventory_manager-0.2.0.dist-info/licenses/LICENSE +21 -0
  6. aws_inventory_manager-0.2.0.dist-info/top_level.txt +1 -0
  7. src/__init__.py +3 -0
  8. src/aws/__init__.py +11 -0
  9. src/aws/client.py +128 -0
  10. src/aws/credentials.py +191 -0
  11. src/aws/rate_limiter.py +177 -0
  12. src/cli/__init__.py +5 -0
  13. src/cli/config.py +130 -0
  14. src/cli/main.py +1450 -0
  15. src/cost/__init__.py +5 -0
  16. src/cost/analyzer.py +226 -0
  17. src/cost/explorer.py +209 -0
  18. src/cost/reporter.py +237 -0
  19. src/delta/__init__.py +5 -0
  20. src/delta/calculator.py +180 -0
  21. src/delta/reporter.py +225 -0
  22. src/models/__init__.py +17 -0
  23. src/models/cost_report.py +87 -0
  24. src/models/delta_report.py +111 -0
  25. src/models/inventory.py +124 -0
  26. src/models/resource.py +99 -0
  27. src/models/snapshot.py +108 -0
  28. src/snapshot/__init__.py +6 -0
  29. src/snapshot/capturer.py +347 -0
  30. src/snapshot/filter.py +245 -0
  31. src/snapshot/inventory_storage.py +264 -0
  32. src/snapshot/resource_collectors/__init__.py +5 -0
  33. src/snapshot/resource_collectors/apigateway.py +140 -0
  34. src/snapshot/resource_collectors/backup.py +136 -0
  35. src/snapshot/resource_collectors/base.py +81 -0
  36. src/snapshot/resource_collectors/cloudformation.py +55 -0
  37. src/snapshot/resource_collectors/cloudwatch.py +109 -0
  38. src/snapshot/resource_collectors/codebuild.py +69 -0
  39. src/snapshot/resource_collectors/codepipeline.py +82 -0
  40. src/snapshot/resource_collectors/dynamodb.py +65 -0
  41. src/snapshot/resource_collectors/ec2.py +240 -0
  42. src/snapshot/resource_collectors/ecs.py +215 -0
  43. src/snapshot/resource_collectors/eks.py +200 -0
  44. src/snapshot/resource_collectors/elb.py +126 -0
  45. src/snapshot/resource_collectors/eventbridge.py +156 -0
  46. src/snapshot/resource_collectors/iam.py +188 -0
  47. src/snapshot/resource_collectors/kms.py +111 -0
  48. src/snapshot/resource_collectors/lambda_func.py +112 -0
  49. src/snapshot/resource_collectors/rds.py +109 -0
  50. src/snapshot/resource_collectors/route53.py +86 -0
  51. src/snapshot/resource_collectors/s3.py +105 -0
  52. src/snapshot/resource_collectors/secretsmanager.py +70 -0
  53. src/snapshot/resource_collectors/sns.py +68 -0
  54. src/snapshot/resource_collectors/sqs.py +72 -0
  55. src/snapshot/resource_collectors/ssm.py +160 -0
  56. src/snapshot/resource_collectors/stepfunctions.py +74 -0
  57. src/snapshot/resource_collectors/vpcendpoints.py +79 -0
  58. src/snapshot/resource_collectors/waf.py +159 -0
  59. src/snapshot/storage.py +259 -0
  60. src/utils/__init__.py +12 -0
  61. src/utils/export.py +87 -0
  62. src/utils/hash.py +60 -0
  63. src/utils/logging.py +63 -0
  64. src/utils/paths.py +51 -0
  65. src/utils/progress.py +41 -0
src/snapshot/filter.py ADDED
@@ -0,0 +1,245 @@
1
+ """Resource filtering for creating historical and tagged baselines."""
2
+
3
+ import logging
4
+ from datetime import datetime
5
+ from typing import Dict, List, Optional
6
+
7
+ from ..models.resource import Resource
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class ResourceFilter:
13
+ """Filter resources by creation date and tags."""
14
+
15
+ def __init__(
16
+ self,
17
+ before_date: Optional[datetime] = None,
18
+ after_date: Optional[datetime] = None,
19
+ required_tags: Optional[Dict[str, str]] = None,
20
+ include_tags: Optional[Dict[str, str]] = None,
21
+ exclude_tags: Optional[Dict[str, str]] = None,
22
+ ):
23
+ """Initialize resource filter.
24
+
25
+ Args:
26
+ before_date: Include only resources created before this date (exclusive)
27
+ after_date: Include only resources created on or after this date (inclusive)
28
+ required_tags: DEPRECATED - use include_tags instead (kept for backward compatibility)
29
+ include_tags: Resources must have ALL these tags (AND logic)
30
+ exclude_tags: Resources must NOT have ANY of these tags (OR logic)
31
+ """
32
+ self.before_date = before_date
33
+ self.after_date = after_date
34
+ # Support both required_tags (deprecated) and include_tags (new)
35
+ self.include_tags = include_tags or required_tags or {}
36
+ self.exclude_tags = exclude_tags or {}
37
+ # Keep required_tags for backward compatibility
38
+ self.required_tags = self.include_tags
39
+
40
+ # Statistics
41
+ self.stats = {
42
+ "total_collected": 0,
43
+ "date_matched": 0,
44
+ "tag_matched": 0,
45
+ "final_count": 0,
46
+ "filtered_out_by_date": 0,
47
+ "filtered_out_by_tags": 0,
48
+ "filtered_out_by_exclude_tags": 0,
49
+ "missing_creation_date": 0,
50
+ }
51
+
52
+ def apply(self, resources: List[Resource]) -> List[Resource]:
53
+ """Apply filters to a list of resources.
54
+
55
+ Args:
56
+ resources: List of resources to filter
57
+
58
+ Returns:
59
+ Filtered list of resources
60
+ """
61
+ self.stats["total_collected"] = len(resources)
62
+ filtered = []
63
+
64
+ for resource in resources:
65
+ if self._matches_filters(resource):
66
+ filtered.append(resource)
67
+
68
+ self.stats["final_count"] = len(filtered)
69
+
70
+ logger.debug(
71
+ f"Filtering complete: {self.stats['total_collected']} collected, "
72
+ f"{self.stats['final_count']} matched filters"
73
+ )
74
+
75
+ return filtered
76
+
77
+ def _matches_filters(self, resource: Resource) -> bool:
78
+ """Check if a resource matches all filters.
79
+
80
+ Args:
81
+ resource: Resource to check
82
+
83
+ Returns:
84
+ True if resource matches all filters
85
+ """
86
+ # Check date filters
87
+ if not self._matches_date_filter(resource):
88
+ self.stats["filtered_out_by_date"] += 1
89
+ return False
90
+
91
+ self.stats["date_matched"] += 1
92
+
93
+ # Check exclude tags first (if resource has any excluded tags, reject immediately)
94
+ if not self._matches_exclude_filter(resource):
95
+ self.stats["filtered_out_by_exclude_tags"] += 1
96
+ return False
97
+
98
+ # Check include tag filters
99
+ if not self._matches_tag_filter(resource):
100
+ self.stats["filtered_out_by_tags"] += 1
101
+ return False
102
+
103
+ self.stats["tag_matched"] += 1
104
+
105
+ return True
106
+
107
+ def _matches_date_filter(self, resource: Resource) -> bool:
108
+ """Check if resource matches date filters.
109
+
110
+ Args:
111
+ resource: Resource to check
112
+
113
+ Returns:
114
+ True if resource matches date filters (or no date filters specified)
115
+ """
116
+ # If no date filters, everything matches
117
+ if not self.before_date and not self.after_date:
118
+ return True
119
+
120
+ # If resource has no creation date, we can't filter by date
121
+ if not resource.created_at:
122
+ self.stats["missing_creation_date"] += 1
123
+ # For resources without creation dates, include them if we're being permissive
124
+ # This is a design choice - could also exclude them
125
+ logger.debug(f"Resource {resource.arn} has no creation date - including by default")
126
+ return True
127
+
128
+ # Make sure resource.created_at is timezone-aware for comparison
129
+ resource_date = resource.created_at
130
+ if resource_date.tzinfo is None:
131
+ # Assume UTC if no timezone
132
+ from datetime import timezone as tz
133
+
134
+ resource_date = resource_date.replace(tzinfo=tz.utc)
135
+
136
+ # Check before_date filter (exclusive)
137
+ if self.before_date:
138
+ # Make sure before_date is timezone-aware
139
+ before_date_aware = self.before_date
140
+ if before_date_aware.tzinfo is None:
141
+ from datetime import timezone as tz
142
+
143
+ before_date_aware = before_date_aware.replace(tzinfo=tz.utc)
144
+
145
+ if resource_date >= before_date_aware:
146
+ logger.debug(f"Resource {resource.name} created {resource_date} " f"is not before {before_date_aware}")
147
+ return False
148
+
149
+ # Check after_date filter (inclusive)
150
+ if self.after_date:
151
+ # Make sure after_date is timezone-aware
152
+ after_date_aware = self.after_date
153
+ if after_date_aware.tzinfo is None:
154
+ from datetime import timezone as tz
155
+
156
+ after_date_aware = after_date_aware.replace(tzinfo=tz.utc)
157
+
158
+ if resource_date < after_date_aware:
159
+ logger.debug(f"Resource {resource.name} created {resource_date} " f"is before {after_date_aware}")
160
+ return False
161
+
162
+ return True
163
+
164
+ def _matches_tag_filter(self, resource: Resource) -> bool:
165
+ """Check if resource has all include tags (AND logic).
166
+
167
+ Args:
168
+ resource: Resource to check
169
+
170
+ Returns:
171
+ True if resource has all include tags (or no tag filters specified)
172
+ """
173
+ # If no include tag filters, everything matches
174
+ if not self.include_tags:
175
+ return True
176
+
177
+ # Check if resource has ALL include tags with matching values (AND logic)
178
+ for key, value in self.include_tags.items():
179
+ if key not in resource.tags:
180
+ logger.debug(f"Resource {resource.name} missing include tag: {key}")
181
+ return False
182
+
183
+ if resource.tags[key] != value:
184
+ logger.debug(
185
+ f"Resource {resource.name} tag {key}={resource.tags[key]} " f"does not match required value {value}"
186
+ )
187
+ return False
188
+
189
+ return True
190
+
191
+ def _matches_exclude_filter(self, resource: Resource) -> bool:
192
+ """Check if resource has any exclude tags (OR logic).
193
+
194
+ Args:
195
+ resource: Resource to check
196
+
197
+ Returns:
198
+ True if resource does NOT have any exclude tags (or no exclude filters specified)
199
+ """
200
+ # If no exclude tag filters, everything matches
201
+ if not self.exclude_tags:
202
+ return True
203
+
204
+ # Check if resource has ANY of the exclude tags (OR logic)
205
+ for key, value in self.exclude_tags.items():
206
+ if key in resource.tags and resource.tags[key] == value:
207
+ logger.debug(f"Resource {resource.name} has exclude tag {key}={value}")
208
+ return False
209
+
210
+ return True
211
+
212
+ def get_filter_summary(self) -> str:
213
+ """Get a human-readable summary of applied filters.
214
+
215
+ Returns:
216
+ Formatted string describing the filters
217
+ """
218
+ parts = []
219
+
220
+ if self.before_date:
221
+ parts.append(f"created before {self.before_date.strftime('%Y-%m-%d')}")
222
+
223
+ if self.after_date:
224
+ parts.append(f"created on/after {self.after_date.strftime('%Y-%m-%d')}")
225
+
226
+ if self.include_tags:
227
+ tag_strs = [f"{k}={v}" for k, v in self.include_tags.items()]
228
+ parts.append(f"include tags: {', '.join(tag_strs)}")
229
+
230
+ if self.exclude_tags:
231
+ tag_strs = [f"{k}={v}" for k, v in self.exclude_tags.items()]
232
+ parts.append(f"exclude tags: {', '.join(tag_strs)}")
233
+
234
+ if not parts:
235
+ return "No filters applied"
236
+
237
+ return "Filters: " + " AND ".join(parts)
238
+
239
+ def get_statistics_summary(self) -> Dict[str, int]:
240
+ """Get filtering statistics.
241
+
242
+ Returns:
243
+ Dictionary of filtering statistics
244
+ """
245
+ return self.stats.copy()
@@ -0,0 +1,264 @@
1
+ """Storage service for inventory management."""
2
+
3
+ import logging
4
+ import os
5
+ from pathlib import Path
6
+ from typing import List, Optional, Union
7
+
8
+ import yaml
9
+
10
+ from ..models.inventory import Inventory
11
+ from ..utils.paths import get_snapshot_storage_path
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class InventoryNotFoundError(Exception):
17
+ """Raised when an inventory cannot be found."""
18
+
19
+ pass
20
+
21
+
22
+ class InventoryStorage:
23
+ """Manage inventory storage and retrieval.
24
+
25
+ Handles CRUD operations for inventories stored in inventories.yaml file.
26
+ Uses atomic writes (temp file + rename) for crash safety.
27
+ """
28
+
29
+ def __init__(self, storage_dir: Optional[Union[str, Path]] = None):
30
+ """Initialize inventory storage.
31
+
32
+ Args:
33
+ storage_dir: Directory containing inventories.yaml (default: ~/.snapshots via get_snapshot_storage_path())
34
+ """
35
+ self.storage_dir = get_snapshot_storage_path(storage_dir)
36
+ self.inventory_file = self.storage_dir / "inventories.yaml"
37
+
38
+ # Ensure storage directory exists
39
+ self.storage_dir.mkdir(parents=True, exist_ok=True)
40
+
41
+ def load_all(self) -> List[Inventory]:
42
+ """Load all inventories from inventories.yaml.
43
+
44
+ Returns:
45
+ List of all inventories (empty list if file doesn't exist)
46
+ """
47
+ if not self.inventory_file.exists():
48
+ logger.debug("No inventories.yaml file found, returning empty list")
49
+ return []
50
+
51
+ try:
52
+ with open(self.inventory_file, "r") as f:
53
+ data = yaml.safe_load(f)
54
+
55
+ if not data or "inventories" not in data:
56
+ logger.debug("Empty or invalid inventories.yaml, returning empty list")
57
+ return []
58
+
59
+ inventories = [Inventory.from_dict(inv_data) for inv_data in data["inventories"]]
60
+ logger.debug(f"Loaded {len(inventories)} inventories from storage")
61
+ return inventories
62
+
63
+ except yaml.YAMLError as e:
64
+ logger.error(f"Failed to parse inventories.yaml: {e}")
65
+ raise ValueError(f"Corrupted inventories file: {e}")
66
+ except Exception as e:
67
+ logger.error(f"Failed to load inventories: {e}")
68
+ raise
69
+
70
+ def load_by_account(self, account_id: str) -> List[Inventory]:
71
+ """Load inventories for specific account.
72
+
73
+ Args:
74
+ account_id: AWS account ID (12 digits)
75
+
76
+ Returns:
77
+ List of inventories for the account
78
+ """
79
+ all_inventories = self.load_all()
80
+ account_inventories = [inv for inv in all_inventories if inv.account_id == account_id]
81
+ logger.debug(f"Found {len(account_inventories)} inventories for account {account_id}")
82
+ return account_inventories
83
+
84
+ def get_by_name(self, name: str, account_id: str) -> Inventory:
85
+ """Get specific inventory by name and account.
86
+
87
+ Args:
88
+ name: Inventory name
89
+ account_id: AWS account ID
90
+
91
+ Returns:
92
+ Inventory instance
93
+
94
+ Raises:
95
+ InventoryNotFoundError: If inventory not found
96
+ """
97
+ account_inventories = self.load_by_account(account_id)
98
+
99
+ for inventory in account_inventories:
100
+ if inventory.name == name:
101
+ logger.debug(f"Found inventory '{name}' for account {account_id}")
102
+ return inventory
103
+
104
+ raise InventoryNotFoundError(f"Inventory '{name}' not found for account {account_id}")
105
+
106
+ def get_or_create_default(self, account_id: str) -> Inventory:
107
+ """Get default inventory, creating if it doesn't exist.
108
+
109
+ Args:
110
+ account_id: AWS account ID
111
+
112
+ Returns:
113
+ Default inventory instance
114
+ """
115
+ try:
116
+ return self.get_by_name("default", account_id)
117
+ except InventoryNotFoundError:
118
+ # Auto-create default inventory
119
+ from datetime import datetime, timezone
120
+
121
+ default = Inventory(
122
+ name="default",
123
+ account_id=account_id,
124
+ description="Auto-created default inventory",
125
+ include_tags={},
126
+ exclude_tags={},
127
+ snapshots=[],
128
+ active_snapshot=None,
129
+ created_at=datetime.now(timezone.utc),
130
+ last_updated=datetime.now(timezone.utc),
131
+ )
132
+ self.save(default)
133
+ logger.info(f"Created default inventory for account {account_id}")
134
+ return default
135
+
136
+ def save(self, inventory: Inventory) -> None:
137
+ """Save/update single inventory using atomic write.
138
+
139
+ Args:
140
+ inventory: Inventory to save
141
+
142
+ Raises:
143
+ ValueError: If inventory validation fails
144
+ """
145
+ # Validate inventory before saving
146
+ errors = inventory.validate()
147
+ if errors:
148
+ raise ValueError(f"Invalid inventory: {', '.join(errors)}")
149
+
150
+ # Load all inventories
151
+ all_inventories = self.load_all()
152
+
153
+ # Find and update existing, or append new
154
+ updated = False
155
+ for i, existing in enumerate(all_inventories):
156
+ if existing.name == inventory.name and existing.account_id == inventory.account_id:
157
+ all_inventories[i] = inventory
158
+ updated = True
159
+ logger.debug(f"Updated inventory '{inventory.name}' for account {inventory.account_id}")
160
+ break
161
+
162
+ if not updated:
163
+ all_inventories.append(inventory)
164
+ logger.debug(f"Added new inventory '{inventory.name}' for account {inventory.account_id}")
165
+
166
+ # Write atomically
167
+ self._atomic_write(all_inventories)
168
+
169
+ def delete(self, name: str, account_id: str, delete_snapshots: bool = False) -> int:
170
+ """Delete inventory, optionally deleting its snapshot files.
171
+
172
+ Args:
173
+ name: Inventory name
174
+ account_id: AWS account ID
175
+ delete_snapshots: Whether to delete snapshot files
176
+
177
+ Returns:
178
+ Number of snapshot files deleted (0 if delete_snapshots=False)
179
+
180
+ Raises:
181
+ InventoryNotFoundError: If inventory not found
182
+ """
183
+ # Load inventory to get snapshot list
184
+ inventory = self.get_by_name(name, account_id)
185
+
186
+ # Delete snapshot files if requested
187
+ deleted_count = 0
188
+ if delete_snapshots:
189
+ snapshots_dir = self.storage_dir / "snapshots"
190
+ for snapshot_file in inventory.snapshots:
191
+ snapshot_path = snapshots_dir / snapshot_file
192
+ try:
193
+ if snapshot_path.exists():
194
+ snapshot_path.unlink()
195
+ deleted_count += 1
196
+ logger.debug(f"Deleted snapshot file: {snapshot_file}")
197
+ except Exception as e:
198
+ logger.warning(f"Failed to delete snapshot file {snapshot_file}: {e}")
199
+
200
+ # Remove inventory from list
201
+ all_inventories = self.load_all()
202
+ all_inventories = [inv for inv in all_inventories if not (inv.name == name and inv.account_id == account_id)]
203
+
204
+ # Write atomically
205
+ self._atomic_write(all_inventories)
206
+ logger.info(f"Deleted inventory '{name}' for account {account_id}")
207
+
208
+ return deleted_count
209
+
210
+ def exists(self, name: str, account_id: str) -> bool:
211
+ """Check if inventory exists.
212
+
213
+ Args:
214
+ name: Inventory name
215
+ account_id: AWS account ID
216
+
217
+ Returns:
218
+ True if inventory exists, False otherwise
219
+ """
220
+ try:
221
+ self.get_by_name(name, account_id)
222
+ return True
223
+ except InventoryNotFoundError:
224
+ return False
225
+
226
+ def validate_unique(self, name: str, account_id: str) -> bool:
227
+ """Validate that (name, account_id) combination is unique.
228
+
229
+ Args:
230
+ name: Inventory name
231
+ account_id: AWS account ID
232
+
233
+ Returns:
234
+ True if unique, False if already exists
235
+ """
236
+ return not self.exists(name, account_id)
237
+
238
+ def _atomic_write(self, inventories: List[Inventory]) -> None:
239
+ """Write inventories using atomic rename pattern.
240
+
241
+ This ensures crash safety - either the full write succeeds or it doesn't.
242
+ Uses temp file + os.replace() which is atomic on all platforms.
243
+
244
+ Args:
245
+ inventories: List of all inventories to write
246
+ """
247
+ # Prepare data structure
248
+ data = {"inventories": [inv.to_dict() for inv in inventories]}
249
+
250
+ # Write to temp file
251
+ temp_path = self.inventory_file.with_suffix(".tmp")
252
+ try:
253
+ with open(temp_path, "w") as f:
254
+ yaml.safe_dump(data, f, default_flow_style=False, sort_keys=False)
255
+
256
+ # Atomic rename (replaces existing file)
257
+ os.replace(temp_path, self.inventory_file)
258
+ logger.debug(f"Wrote {len(inventories)} inventories to storage")
259
+
260
+ except Exception:
261
+ # Clean up temp file on error
262
+ if temp_path.exists():
263
+ temp_path.unlink()
264
+ raise
@@ -0,0 +1,5 @@
1
+ """Resource collectors for AWS services."""
2
+
3
+ from typing import List
4
+
5
+ __all__: List[str] = []
@@ -0,0 +1,140 @@
1
+ """API Gateway resource collector."""
2
+
3
+ from typing import List
4
+
5
+ from ...models.resource import Resource
6
+ from ...utils.hash import compute_config_hash
7
+ from .base import BaseResourceCollector
8
+
9
+
10
+ class APIGatewayCollector(BaseResourceCollector):
11
+ """Collector for AWS API Gateway resources (REST, HTTP, WebSocket APIs)."""
12
+
13
+ @property
14
+ def service_name(self) -> str:
15
+ return "apigateway"
16
+
17
+ def collect(self) -> List[Resource]:
18
+ """Collect API Gateway resources.
19
+
20
+ Collects:
21
+ - REST APIs (v1)
22
+ - HTTP APIs (v2)
23
+ - WebSocket APIs (v2)
24
+
25
+ Returns:
26
+ List of API Gateway APIs
27
+ """
28
+ resources = []
29
+
30
+ # Collect REST APIs (v1)
31
+ resources.extend(self._collect_rest_apis())
32
+
33
+ # Collect HTTP and WebSocket APIs (v2)
34
+ resources.extend(self._collect_v2_apis())
35
+
36
+ self.logger.debug(f"Collected {len(resources)} API Gateway APIs in {self.region}")
37
+ return resources
38
+
39
+ def _collect_rest_apis(self) -> List[Resource]:
40
+ """Collect REST APIs (API Gateway v1).
41
+
42
+ Returns:
43
+ List of REST API resources
44
+ """
45
+ resources = []
46
+ client = self._create_client()
47
+
48
+ try:
49
+ paginator = client.get_paginator("get_rest_apis")
50
+ for page in paginator.paginate():
51
+ for api in page.get("items", []):
52
+ api_id = api["id"]
53
+ api_name = api["name"]
54
+
55
+ # Get tags
56
+ tags = {}
57
+ try:
58
+ tag_response = client.get_tags(
59
+ resourceArn=f"arn:aws:apigateway:{self.region}::/restapis/{api_id}"
60
+ )
61
+ tags = tag_response.get("tags", {})
62
+ except Exception as e:
63
+ self.logger.debug(f"Could not get tags for REST API {api_id}: {e}")
64
+
65
+ # Build ARN
66
+ arn = f"arn:aws:apigateway:{self.region}::/restapis/{api_id}"
67
+
68
+ # Extract creation date
69
+ created_at = api.get("createdDate")
70
+
71
+ # Create resource
72
+ resource = Resource(
73
+ arn=arn,
74
+ resource_type="AWS::ApiGateway::RestApi",
75
+ name=api_name,
76
+ region=self.region,
77
+ tags=tags,
78
+ config_hash=compute_config_hash(api),
79
+ created_at=created_at,
80
+ raw_config=api,
81
+ )
82
+ resources.append(resource)
83
+
84
+ except Exception as e:
85
+ self.logger.error(f"Error collecting REST APIs in {self.region}: {e}")
86
+
87
+ return resources
88
+
89
+ def _collect_v2_apis(self) -> List[Resource]:
90
+ """Collect HTTP and WebSocket APIs (API Gateway v2).
91
+
92
+ Returns:
93
+ List of v2 API resources
94
+ """
95
+ resources = []
96
+
97
+ try:
98
+ client = self._create_client("apigatewayv2")
99
+
100
+ paginator = client.get_paginator("get_apis")
101
+ for page in paginator.paginate():
102
+ for api in page.get("Items", []):
103
+ api_id = api["ApiId"]
104
+ api_name = api["Name"]
105
+ protocol_type = api["ProtocolType"] # HTTP or WEBSOCKET
106
+
107
+ # Get tags
108
+ tags = api.get("Tags", {})
109
+
110
+ # Build ARN
111
+ arn = f"arn:aws:apigateway:{self.region}::/apis/{api_id}"
112
+
113
+ # Extract creation date
114
+ created_at = api.get("CreatedDate")
115
+
116
+ # Determine resource type based on protocol
117
+ if protocol_type == "HTTP":
118
+ resource_type = "AWS::ApiGatewayV2::Api::HTTP"
119
+ elif protocol_type == "WEBSOCKET":
120
+ resource_type = "AWS::ApiGatewayV2::Api::WebSocket"
121
+ else:
122
+ resource_type = f"AWS::ApiGatewayV2::Api::{protocol_type}"
123
+
124
+ # Create resource
125
+ resource = Resource(
126
+ arn=arn,
127
+ resource_type=resource_type,
128
+ name=api_name,
129
+ region=self.region,
130
+ tags=tags,
131
+ config_hash=compute_config_hash(api),
132
+ created_at=created_at,
133
+ raw_config=api,
134
+ )
135
+ resources.append(resource)
136
+
137
+ except Exception as e:
138
+ self.logger.error(f"Error collecting API Gateway v2 APIs in {self.region}: {e}")
139
+
140
+ return resources