iam-policy-validator 1.0.4__py3-none-any.whl → 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of iam-policy-validator might be problematic. Click here for more details.
- {iam_policy_validator-1.0.4.dist-info → iam_policy_validator-1.1.1.dist-info}/METADATA +88 -10
- iam_policy_validator-1.1.1.dist-info/RECORD +53 -0
- iam_validator/__version__.py +1 -1
- iam_validator/checks/__init__.py +2 -0
- iam_validator/checks/action_condition_enforcement.py +112 -28
- iam_validator/checks/action_resource_constraint.py +151 -0
- iam_validator/checks/action_validation.py +18 -138
- iam_validator/checks/security_best_practices.py +241 -400
- iam_validator/checks/utils/__init__.py +1 -0
- iam_validator/checks/utils/policy_level_checks.py +143 -0
- iam_validator/checks/utils/sensitive_action_matcher.py +252 -0
- iam_validator/checks/utils/wildcard_expansion.py +89 -0
- iam_validator/commands/__init__.py +3 -1
- iam_validator/commands/cache.py +402 -0
- iam_validator/commands/validate.py +7 -5
- iam_validator/core/access_analyzer_report.py +2 -1
- iam_validator/core/aws_fetcher.py +79 -19
- iam_validator/core/check_registry.py +3 -0
- iam_validator/core/cli.py +1 -1
- iam_validator/core/config_loader.py +40 -3
- iam_validator/core/defaults.py +334 -0
- iam_validator/core/formatters/__init__.py +2 -0
- iam_validator/core/formatters/console.py +44 -7
- iam_validator/core/formatters/csv.py +7 -2
- iam_validator/core/formatters/enhanced.py +433 -0
- iam_validator/core/formatters/html.py +127 -37
- iam_validator/core/formatters/markdown.py +10 -2
- iam_validator/core/models.py +30 -6
- iam_validator/core/policy_checks.py +21 -2
- iam_validator/core/report.py +112 -26
- iam_policy_validator-1.0.4.dist-info/RECORD +0 -45
- {iam_policy_validator-1.0.4.dist-info → iam_policy_validator-1.1.1.dist-info}/WHEEL +0 -0
- {iam_policy_validator-1.0.4.dist-info → iam_policy_validator-1.1.1.dist-info}/entry_points.txt +0 -0
- {iam_policy_validator-1.0.4.dist-info → iam_policy_validator-1.1.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
"""Cache management command for IAM Policy Validator."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import logging
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from rich.console import Console
|
|
8
|
+
from rich.table import Table
|
|
9
|
+
|
|
10
|
+
from iam_validator.commands.base import Command
|
|
11
|
+
from iam_validator.core.aws_fetcher import AWSServiceFetcher
|
|
12
|
+
from iam_validator.core.config_loader import ConfigLoader
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
console = Console()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class CacheCommand(Command):
|
|
19
|
+
"""Manage AWS service definition cache."""
|
|
20
|
+
|
|
21
|
+
@property
|
|
22
|
+
def name(self) -> str:
|
|
23
|
+
return "cache"
|
|
24
|
+
|
|
25
|
+
@property
|
|
26
|
+
def help(self) -> str:
|
|
27
|
+
return "Manage AWS service definition cache"
|
|
28
|
+
|
|
29
|
+
@property
|
|
30
|
+
def epilog(self) -> str:
|
|
31
|
+
return """
|
|
32
|
+
Examples:
|
|
33
|
+
# Show cache information
|
|
34
|
+
iam-validator cache info
|
|
35
|
+
|
|
36
|
+
# List all cached services
|
|
37
|
+
iam-validator cache list
|
|
38
|
+
|
|
39
|
+
# Clear all cached AWS service definitions
|
|
40
|
+
iam-validator cache clear
|
|
41
|
+
|
|
42
|
+
# Refresh cache (clear and pre-fetch common services)
|
|
43
|
+
iam-validator cache refresh
|
|
44
|
+
|
|
45
|
+
# Pre-fetch common AWS services
|
|
46
|
+
iam-validator cache prefetch
|
|
47
|
+
|
|
48
|
+
# Show cache location
|
|
49
|
+
iam-validator cache location
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
|
|
53
|
+
"""Add cache command arguments."""
|
|
54
|
+
subparsers = parser.add_subparsers(dest="cache_action", help="Cache action to perform")
|
|
55
|
+
|
|
56
|
+
# Info subcommand
|
|
57
|
+
info_parser = subparsers.add_parser("info", help="Show cache information and statistics")
|
|
58
|
+
info_parser.add_argument(
|
|
59
|
+
"--config",
|
|
60
|
+
type=str,
|
|
61
|
+
help="Path to configuration file",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
# List subcommand
|
|
65
|
+
list_parser = subparsers.add_parser("list", help="List all cached AWS services")
|
|
66
|
+
list_parser.add_argument(
|
|
67
|
+
"--config",
|
|
68
|
+
type=str,
|
|
69
|
+
help="Path to configuration file",
|
|
70
|
+
)
|
|
71
|
+
list_parser.add_argument(
|
|
72
|
+
"--format",
|
|
73
|
+
choices=["table", "columns", "simple"],
|
|
74
|
+
default="table",
|
|
75
|
+
help="Output format (default: table)",
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
# Clear subcommand
|
|
79
|
+
clear_parser = subparsers.add_parser(
|
|
80
|
+
"clear", help="Clear all cached AWS service definitions"
|
|
81
|
+
)
|
|
82
|
+
clear_parser.add_argument(
|
|
83
|
+
"--config",
|
|
84
|
+
type=str,
|
|
85
|
+
help="Path to configuration file",
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
# Refresh subcommand
|
|
89
|
+
refresh_parser = subparsers.add_parser(
|
|
90
|
+
"refresh", help="Clear cache and pre-fetch common AWS services"
|
|
91
|
+
)
|
|
92
|
+
refresh_parser.add_argument(
|
|
93
|
+
"--config",
|
|
94
|
+
type=str,
|
|
95
|
+
help="Path to configuration file",
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
# Prefetch subcommand
|
|
99
|
+
prefetch_parser = subparsers.add_parser(
|
|
100
|
+
"prefetch", help="Pre-fetch common AWS services (without clearing)"
|
|
101
|
+
)
|
|
102
|
+
prefetch_parser.add_argument(
|
|
103
|
+
"--config",
|
|
104
|
+
type=str,
|
|
105
|
+
help="Path to configuration file",
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# Location subcommand
|
|
109
|
+
location_parser = subparsers.add_parser("location", help="Show cache directory location")
|
|
110
|
+
location_parser.add_argument(
|
|
111
|
+
"--config",
|
|
112
|
+
type=str,
|
|
113
|
+
help="Path to configuration file",
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
async def execute(self, args: argparse.Namespace) -> int:
|
|
117
|
+
"""Execute cache command."""
|
|
118
|
+
if not hasattr(args, "cache_action") or not args.cache_action:
|
|
119
|
+
console.print("[red]Error:[/red] No cache action specified")
|
|
120
|
+
console.print("Use 'iam-validator cache --help' for available actions")
|
|
121
|
+
return 1
|
|
122
|
+
|
|
123
|
+
# Load config to get cache settings
|
|
124
|
+
config_path = getattr(args, "config", None)
|
|
125
|
+
config = ConfigLoader.load_config(explicit_path=config_path, allow_missing=True)
|
|
126
|
+
|
|
127
|
+
cache_enabled = config.get_setting("cache_enabled", True)
|
|
128
|
+
cache_ttl_hours = config.get_setting("cache_ttl_hours", 168)
|
|
129
|
+
cache_directory = config.get_setting("cache_directory", None)
|
|
130
|
+
cache_ttl_seconds = cache_ttl_hours * 3600
|
|
131
|
+
|
|
132
|
+
# Get cache directory (even if caching is disabled, for info purposes)
|
|
133
|
+
cache_dir = AWSServiceFetcher._get_cache_directory(cache_directory)
|
|
134
|
+
|
|
135
|
+
action = args.cache_action
|
|
136
|
+
|
|
137
|
+
if action == "info":
|
|
138
|
+
return await self._show_info(cache_dir, cache_enabled, cache_ttl_hours)
|
|
139
|
+
elif action == "list":
|
|
140
|
+
output_format = getattr(args, "format", "table")
|
|
141
|
+
return self._list_cached_services(cache_dir, output_format)
|
|
142
|
+
elif action == "clear":
|
|
143
|
+
return await self._clear_cache(cache_dir, cache_enabled)
|
|
144
|
+
elif action == "refresh":
|
|
145
|
+
return await self._refresh_cache(cache_enabled, cache_ttl_seconds, cache_directory)
|
|
146
|
+
elif action == "prefetch":
|
|
147
|
+
return await self._prefetch_services(cache_enabled, cache_ttl_seconds, cache_directory)
|
|
148
|
+
elif action == "location":
|
|
149
|
+
return self._show_location(cache_dir)
|
|
150
|
+
else:
|
|
151
|
+
console.print(f"[red]Error:[/red] Unknown cache action: {action}")
|
|
152
|
+
return 1
|
|
153
|
+
|
|
154
|
+
async def _show_info(self, cache_dir: Path, cache_enabled: bool, cache_ttl_hours: int) -> int:
|
|
155
|
+
"""Show cache information and statistics."""
|
|
156
|
+
table = Table(title="Cache Information")
|
|
157
|
+
table.add_column("Setting", style="cyan", no_wrap=True)
|
|
158
|
+
table.add_column("Value", style="white")
|
|
159
|
+
|
|
160
|
+
# Cache status
|
|
161
|
+
table.add_row(
|
|
162
|
+
"Status", "[green]Enabled[/green]" if cache_enabled else "[red]Disabled[/red]"
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
# Cache location
|
|
166
|
+
table.add_row("Location", str(cache_dir))
|
|
167
|
+
|
|
168
|
+
# Cache exists?
|
|
169
|
+
exists = cache_dir.exists()
|
|
170
|
+
table.add_row("Exists", "[green]Yes[/green]" if exists else "[yellow]No[/yellow]")
|
|
171
|
+
|
|
172
|
+
# Cache TTL
|
|
173
|
+
ttl_days = cache_ttl_hours / 24
|
|
174
|
+
table.add_row("TTL", f"{cache_ttl_hours} hours ({ttl_days:.1f} days)")
|
|
175
|
+
|
|
176
|
+
if exists:
|
|
177
|
+
# Count cached files
|
|
178
|
+
cache_files = list(cache_dir.glob("*.json"))
|
|
179
|
+
table.add_row("Cached Services", str(len(cache_files)))
|
|
180
|
+
|
|
181
|
+
# Calculate cache size
|
|
182
|
+
total_size = sum(f.stat().st_size for f in cache_files)
|
|
183
|
+
size_mb = total_size / (1024 * 1024)
|
|
184
|
+
table.add_row("Cache Size", f"{size_mb:.2f} MB")
|
|
185
|
+
|
|
186
|
+
# Show some cached services
|
|
187
|
+
if cache_files:
|
|
188
|
+
service_names = []
|
|
189
|
+
for f in cache_files[:5]:
|
|
190
|
+
name = f.stem.split("_")[0] if "_" in f.stem else f.stem
|
|
191
|
+
service_names.append(name)
|
|
192
|
+
sample = ", ".join(service_names)
|
|
193
|
+
if len(cache_files) > 5:
|
|
194
|
+
sample += f", ... ({len(cache_files) - 5} more)"
|
|
195
|
+
table.add_row("Sample Services", sample)
|
|
196
|
+
|
|
197
|
+
console.print(table)
|
|
198
|
+
return 0
|
|
199
|
+
|
|
200
|
+
def _list_cached_services(self, cache_dir: Path, output_format: str) -> int:
|
|
201
|
+
"""List all cached AWS services."""
|
|
202
|
+
if not cache_dir.exists():
|
|
203
|
+
console.print("[yellow]Cache directory does not exist[/yellow]")
|
|
204
|
+
return 0
|
|
205
|
+
|
|
206
|
+
cache_files = list(cache_dir.glob("*.json"))
|
|
207
|
+
|
|
208
|
+
if not cache_files:
|
|
209
|
+
console.print("[yellow]No services cached yet[/yellow]")
|
|
210
|
+
return 0
|
|
211
|
+
|
|
212
|
+
# Extract service names from filenames
|
|
213
|
+
services = []
|
|
214
|
+
for f in cache_files:
|
|
215
|
+
# Handle both formats: "service_hash.json" and "services_list.json"
|
|
216
|
+
if f.stem == "services_list":
|
|
217
|
+
continue # Skip the services list file
|
|
218
|
+
|
|
219
|
+
# Extract service name (before underscore or full name)
|
|
220
|
+
name = f.stem.split("_")[0] if "_" in f.stem else f.stem
|
|
221
|
+
|
|
222
|
+
# Get file stats
|
|
223
|
+
size = f.stat().st_size
|
|
224
|
+
mtime = f.stat().st_mtime
|
|
225
|
+
|
|
226
|
+
services.append({
|
|
227
|
+
"name": name,
|
|
228
|
+
"size": size,
|
|
229
|
+
"file": f.name,
|
|
230
|
+
"mtime": mtime
|
|
231
|
+
})
|
|
232
|
+
|
|
233
|
+
# Sort by service name
|
|
234
|
+
services.sort(key=lambda x: x["name"])
|
|
235
|
+
|
|
236
|
+
if output_format == "table":
|
|
237
|
+
self._print_services_table(services)
|
|
238
|
+
elif output_format == "columns":
|
|
239
|
+
self._print_services_columns(services)
|
|
240
|
+
else: # simple
|
|
241
|
+
self._print_services_simple(services)
|
|
242
|
+
|
|
243
|
+
return 0
|
|
244
|
+
|
|
245
|
+
def _print_services_table(self, services: list[dict]) -> None:
|
|
246
|
+
"""Print services in a nice table format."""
|
|
247
|
+
from datetime import datetime
|
|
248
|
+
|
|
249
|
+
table = Table(title=f"Cached AWS Services ({len(services)} total)")
|
|
250
|
+
table.add_column("Service", style="cyan", no_wrap=True)
|
|
251
|
+
table.add_column("Cache File", style="white")
|
|
252
|
+
table.add_column("Size", style="yellow", justify="right")
|
|
253
|
+
table.add_column("Cached", style="green")
|
|
254
|
+
|
|
255
|
+
for svc in services:
|
|
256
|
+
size_kb = svc["size"] / 1024
|
|
257
|
+
cached_time = datetime.fromtimestamp(svc["mtime"]).strftime("%Y-%m-%d %H:%M")
|
|
258
|
+
|
|
259
|
+
table.add_row(
|
|
260
|
+
svc["name"],
|
|
261
|
+
svc["file"],
|
|
262
|
+
f"{size_kb:.1f} KB",
|
|
263
|
+
cached_time
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
console.print(table)
|
|
267
|
+
|
|
268
|
+
def _print_services_columns(self, services: list[dict]) -> None:
|
|
269
|
+
"""Print services in columns format (like ls)."""
|
|
270
|
+
from rich.columns import Columns
|
|
271
|
+
|
|
272
|
+
console.print(f"[cyan]Cached AWS Services ({len(services)} total):[/cyan]\n")
|
|
273
|
+
|
|
274
|
+
service_names = [f"[green]{svc['name']}[/green]" for svc in services]
|
|
275
|
+
console.print(Columns(service_names, equal=True, expand=False))
|
|
276
|
+
|
|
277
|
+
def _print_services_simple(self, services: list[dict]) -> None:
|
|
278
|
+
"""Print services in simple list format."""
|
|
279
|
+
console.print(f"[cyan]Cached AWS Services ({len(services)} total):[/cyan]\n")
|
|
280
|
+
|
|
281
|
+
for svc in services:
|
|
282
|
+
console.print(svc["name"])
|
|
283
|
+
|
|
284
|
+
async def _clear_cache(self, cache_dir: Path, cache_enabled: bool) -> int:
|
|
285
|
+
"""Clear all cached AWS service definitions."""
|
|
286
|
+
if not cache_enabled:
|
|
287
|
+
console.print("[yellow]Warning:[/yellow] Cache is disabled in config")
|
|
288
|
+
return 0
|
|
289
|
+
|
|
290
|
+
if not cache_dir.exists():
|
|
291
|
+
console.print("[yellow]Cache directory does not exist, nothing to clear[/yellow]")
|
|
292
|
+
return 0
|
|
293
|
+
|
|
294
|
+
# Count files before deletion
|
|
295
|
+
cache_files = list(cache_dir.glob("*.json"))
|
|
296
|
+
file_count = len(cache_files)
|
|
297
|
+
|
|
298
|
+
if file_count == 0:
|
|
299
|
+
console.print("[yellow]Cache is already empty[/yellow]")
|
|
300
|
+
return 0
|
|
301
|
+
|
|
302
|
+
# Delete cache files
|
|
303
|
+
deleted = 0
|
|
304
|
+
failed = 0
|
|
305
|
+
for cache_file in cache_files:
|
|
306
|
+
try:
|
|
307
|
+
cache_file.unlink()
|
|
308
|
+
deleted += 1
|
|
309
|
+
except Exception as e:
|
|
310
|
+
logger.error(f"Failed to delete {cache_file}: {e}")
|
|
311
|
+
failed += 1
|
|
312
|
+
|
|
313
|
+
if failed == 0:
|
|
314
|
+
console.print(f"[green]✓[/green] Cleared {deleted} cached service definitions")
|
|
315
|
+
else:
|
|
316
|
+
console.print(
|
|
317
|
+
f"[yellow]![/yellow] Cleared {deleted} files, failed to delete {failed} files"
|
|
318
|
+
)
|
|
319
|
+
return 1
|
|
320
|
+
|
|
321
|
+
return 0
|
|
322
|
+
|
|
323
|
+
async def _refresh_cache(
|
|
324
|
+
self, cache_enabled: bool, cache_ttl_seconds: int, cache_directory: str | None
|
|
325
|
+
) -> int:
|
|
326
|
+
"""Clear cache and pre-fetch common services."""
|
|
327
|
+
if not cache_enabled:
|
|
328
|
+
console.print("[red]Error:[/red] Cache is disabled in config")
|
|
329
|
+
console.print("Enable cache by setting 'cache_enabled: true' in your config")
|
|
330
|
+
return 1
|
|
331
|
+
|
|
332
|
+
console.print("[cyan]Refreshing cache...[/cyan]")
|
|
333
|
+
|
|
334
|
+
# Create fetcher and clear cache
|
|
335
|
+
async with AWSServiceFetcher(
|
|
336
|
+
enable_cache=cache_enabled,
|
|
337
|
+
cache_ttl=cache_ttl_seconds,
|
|
338
|
+
cache_dir=cache_directory,
|
|
339
|
+
prefetch_common=False, # Don't prefetch yet, we'll do it after clearing
|
|
340
|
+
) as fetcher:
|
|
341
|
+
# Clear existing cache
|
|
342
|
+
console.print("Clearing old cache...")
|
|
343
|
+
await fetcher.clear_caches()
|
|
344
|
+
|
|
345
|
+
# Prefetch common services
|
|
346
|
+
console.print("Fetching fresh AWS service definitions...")
|
|
347
|
+
services = await fetcher.fetch_services()
|
|
348
|
+
console.print(f"[green]✓[/green] Fetched list of {len(services)} AWS services")
|
|
349
|
+
|
|
350
|
+
# Prefetch common services
|
|
351
|
+
console.print("Pre-fetching common services...")
|
|
352
|
+
prefetched = 0
|
|
353
|
+
for service_name in fetcher.COMMON_SERVICES:
|
|
354
|
+
try:
|
|
355
|
+
await fetcher.fetch_service_by_name(service_name)
|
|
356
|
+
prefetched += 1
|
|
357
|
+
except Exception as e:
|
|
358
|
+
logger.warning(f"Failed to prefetch {service_name}: {e}")
|
|
359
|
+
|
|
360
|
+
console.print(f"[green]✓[/green] Pre-fetched {prefetched} common services")
|
|
361
|
+
|
|
362
|
+
console.print("[green]✓[/green] Cache refreshed successfully")
|
|
363
|
+
return 0
|
|
364
|
+
|
|
365
|
+
async def _prefetch_services(
|
|
366
|
+
self, cache_enabled: bool, cache_ttl_seconds: int, cache_directory: str | None
|
|
367
|
+
) -> int:
|
|
368
|
+
"""Pre-fetch common AWS services without clearing cache."""
|
|
369
|
+
if not cache_enabled:
|
|
370
|
+
console.print("[red]Error:[/red] Cache is disabled in config")
|
|
371
|
+
console.print("Enable cache by setting 'cache_enabled: true' in your config")
|
|
372
|
+
return 1
|
|
373
|
+
|
|
374
|
+
console.print("[cyan]Pre-fetching common AWS services...[/cyan]")
|
|
375
|
+
|
|
376
|
+
async with AWSServiceFetcher(
|
|
377
|
+
enable_cache=cache_enabled,
|
|
378
|
+
cache_ttl=cache_ttl_seconds,
|
|
379
|
+
cache_dir=cache_directory,
|
|
380
|
+
prefetch_common=True, # Enable prefetching
|
|
381
|
+
) as fetcher:
|
|
382
|
+
# Prefetching happens in __aenter__, just wait for it
|
|
383
|
+
prefetched = len(fetcher._prefetched_services)
|
|
384
|
+
total = len(fetcher.COMMON_SERVICES)
|
|
385
|
+
|
|
386
|
+
console.print(
|
|
387
|
+
f"[green]✓[/green] Pre-fetched {prefetched}/{total} common services successfully"
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
return 0
|
|
391
|
+
|
|
392
|
+
def _show_location(self, cache_dir: Path) -> int:
|
|
393
|
+
"""Show cache directory location."""
|
|
394
|
+
console.print(f"[cyan]Cache directory:[/cyan] {cache_dir}")
|
|
395
|
+
|
|
396
|
+
if cache_dir.exists():
|
|
397
|
+
console.print("[green]✓[/green] Directory exists")
|
|
398
|
+
else:
|
|
399
|
+
console.print("[yellow]![/yellow] Directory does not exist yet")
|
|
400
|
+
console.print("It will be created automatically when caching is used")
|
|
401
|
+
|
|
402
|
+
return 0
|
|
@@ -59,9 +59,9 @@ Examples:
|
|
|
59
59
|
parser.add_argument(
|
|
60
60
|
"--format",
|
|
61
61
|
"-f",
|
|
62
|
-
choices=["console", "json", "markdown", "html", "csv", "sarif"],
|
|
62
|
+
choices=["console", "enhanced", "json", "markdown", "html", "csv", "sarif"],
|
|
63
63
|
default="console",
|
|
64
|
-
help="Output format (default: console)",
|
|
64
|
+
help="Output format (default: console). Use 'enhanced' for modern visual output with Rich library",
|
|
65
65
|
)
|
|
66
66
|
|
|
67
67
|
parser.add_argument(
|
|
@@ -177,7 +177,8 @@ Examples:
|
|
|
177
177
|
report = generator.generate_report(results)
|
|
178
178
|
|
|
179
179
|
# Output results
|
|
180
|
-
if args.format
|
|
180
|
+
if args.format is None:
|
|
181
|
+
# Default: use classic console output (direct Rich printing)
|
|
181
182
|
generator.print_console_report(report)
|
|
182
183
|
elif args.format == "json":
|
|
183
184
|
if args.output:
|
|
@@ -190,7 +191,7 @@ Examples:
|
|
|
190
191
|
else:
|
|
191
192
|
print(generator.generate_github_comment(report))
|
|
192
193
|
else:
|
|
193
|
-
# Use formatter registry for other formats (html, csv, sarif)
|
|
194
|
+
# Use formatter registry for other formats (enhanced, html, csv, sarif)
|
|
194
195
|
output_content = generator.format_report(report, args.format)
|
|
195
196
|
if args.output:
|
|
196
197
|
with open(args.output, "w", encoding="utf-8") as f:
|
|
@@ -285,6 +286,7 @@ Examples:
|
|
|
285
286
|
|
|
286
287
|
# Output final results
|
|
287
288
|
if args.format == "console":
|
|
289
|
+
# Classic console output (direct Rich printing from report.py)
|
|
288
290
|
generator.print_console_report(report)
|
|
289
291
|
elif args.format == "json":
|
|
290
292
|
if args.output:
|
|
@@ -297,7 +299,7 @@ Examples:
|
|
|
297
299
|
else:
|
|
298
300
|
print(generator.generate_github_comment(report))
|
|
299
301
|
else:
|
|
300
|
-
# Use formatter registry for other formats (html, csv, sarif)
|
|
302
|
+
# Use formatter registry for other formats (enhanced, html, csv, sarif)
|
|
301
303
|
output_content = generator.format_report(report, args.format)
|
|
302
304
|
if args.output:
|
|
303
305
|
with open(args.output, "w", encoding="utf-8") as f:
|
|
@@ -8,6 +8,7 @@ from rich.panel import Panel
|
|
|
8
8
|
from rich.table import Table
|
|
9
9
|
from rich.text import Text
|
|
10
10
|
|
|
11
|
+
from iam_validator.__version__ import __version__
|
|
11
12
|
from iam_validator.core.access_analyzer import (
|
|
12
13
|
AccessAnalyzerFinding,
|
|
13
14
|
AccessAnalyzerReport,
|
|
@@ -70,7 +71,7 @@ class AccessAnalyzerReportFormatter:
|
|
|
70
71
|
|
|
71
72
|
panel = Panel(
|
|
72
73
|
summary_text,
|
|
73
|
-
title="[bold]Access Analyzer Validation Summary[/bold]",
|
|
74
|
+
title=f"[bold]Access Analyzer Validation Summary (iam-validator v{__version__})[/bold]",
|
|
74
75
|
border_style="blue",
|
|
75
76
|
)
|
|
76
77
|
self.console.print(panel)
|
|
@@ -23,7 +23,9 @@ import asyncio
|
|
|
23
23
|
import hashlib
|
|
24
24
|
import json
|
|
25
25
|
import logging
|
|
26
|
+
import os
|
|
26
27
|
import re
|
|
28
|
+
import sys
|
|
27
29
|
import time
|
|
28
30
|
from collections import OrderedDict
|
|
29
31
|
from pathlib import Path
|
|
@@ -122,28 +124,51 @@ class AWSServiceFetcher:
|
|
|
122
124
|
BASE_URL = "https://servicereference.us-east-1.amazonaws.com/"
|
|
123
125
|
|
|
124
126
|
# Common AWS services to pre-fetch
|
|
127
|
+
# All other services will be fetched on-demand (lazy loading if found in policies)
|
|
125
128
|
COMMON_SERVICES = [
|
|
126
|
-
"
|
|
127
|
-
"
|
|
128
|
-
"
|
|
129
|
+
"acm",
|
|
130
|
+
"apigateway",
|
|
131
|
+
"autoscaling",
|
|
132
|
+
"backup",
|
|
133
|
+
"batch",
|
|
134
|
+
"bedrock",
|
|
135
|
+
"cloudformation",
|
|
136
|
+
"cloudfront",
|
|
137
|
+
"cloudtrail",
|
|
138
|
+
"cloudwatch",
|
|
139
|
+
"config",
|
|
140
|
+
"dynamodb",
|
|
141
|
+
"ec2-instance-connect",
|
|
129
142
|
"ec2",
|
|
143
|
+
"ecr",
|
|
144
|
+
"ecs",
|
|
145
|
+
"eks",
|
|
146
|
+
"elasticache",
|
|
147
|
+
"elasticloadbalancing",
|
|
148
|
+
"events",
|
|
149
|
+
"firehose",
|
|
150
|
+
"glacier",
|
|
151
|
+
"glue",
|
|
152
|
+
"guardduty",
|
|
153
|
+
"iam",
|
|
154
|
+
"imagebuilder",
|
|
155
|
+
"inspector2",
|
|
156
|
+
"kinesis",
|
|
157
|
+
"kms",
|
|
130
158
|
"lambda",
|
|
131
|
-
"
|
|
159
|
+
"logs",
|
|
132
160
|
"rds",
|
|
133
|
-
"
|
|
161
|
+
"route53",
|
|
162
|
+
"s3",
|
|
163
|
+
"scheduler",
|
|
164
|
+
"secretsmanager",
|
|
165
|
+
"securityhub",
|
|
134
166
|
"sns",
|
|
135
167
|
"sqs",
|
|
136
|
-
"
|
|
137
|
-
"
|
|
138
|
-
"
|
|
139
|
-
"
|
|
140
|
-
"route53",
|
|
141
|
-
"apigateway",
|
|
142
|
-
"ecs",
|
|
143
|
-
"eks",
|
|
144
|
-
"cloudfront",
|
|
145
|
-
"logs",
|
|
146
|
-
"events",
|
|
168
|
+
"sts",
|
|
169
|
+
"support",
|
|
170
|
+
"waf",
|
|
171
|
+
"wafv2",
|
|
147
172
|
]
|
|
148
173
|
|
|
149
174
|
def __init__(
|
|
@@ -151,11 +176,12 @@ class AWSServiceFetcher:
|
|
|
151
176
|
timeout: float = 30.0,
|
|
152
177
|
retries: int = 3,
|
|
153
178
|
enable_cache: bool = True,
|
|
154
|
-
cache_ttl: int =
|
|
179
|
+
cache_ttl: int = 604800,
|
|
155
180
|
memory_cache_size: int = 256,
|
|
156
181
|
connection_pool_size: int = 50,
|
|
157
182
|
keepalive_connections: int = 20,
|
|
158
183
|
prefetch_common: bool = True,
|
|
184
|
+
cache_dir: Path | str | None = None,
|
|
159
185
|
):
|
|
160
186
|
"""Initialize aws service fetcher.
|
|
161
187
|
|
|
@@ -163,11 +189,12 @@ class AWSServiceFetcher:
|
|
|
163
189
|
timeout: Request timeout in seconds
|
|
164
190
|
retries: Number of retry attempts
|
|
165
191
|
enable_cache: Enable disk caching
|
|
166
|
-
cache_ttl: Cache time to live in seconds (default:
|
|
192
|
+
cache_ttl: Cache time to live in seconds (default: 7 days)
|
|
167
193
|
memory_cache_size: Max items in memory cache
|
|
168
194
|
connection_pool_size: Max connections in pool
|
|
169
195
|
keepalive_connections: Number of keepalive connections
|
|
170
196
|
prefetch_common: Pre-fetch common services on init
|
|
197
|
+
cache_dir: Custom cache directory (defaults to platform-specific user cache dir)
|
|
171
198
|
"""
|
|
172
199
|
self.timeout = timeout
|
|
173
200
|
self.retries = retries
|
|
@@ -177,7 +204,7 @@ class AWSServiceFetcher:
|
|
|
177
204
|
|
|
178
205
|
self._client: httpx.AsyncClient | None = None
|
|
179
206
|
self._memory_cache = LRUCache(maxsize=memory_cache_size, ttl=cache_ttl)
|
|
180
|
-
self._cache_dir =
|
|
207
|
+
self._cache_dir = self._get_cache_directory(cache_dir)
|
|
181
208
|
self._patterns = CompiledPatterns()
|
|
182
209
|
|
|
183
210
|
# Batch request queue
|
|
@@ -195,6 +222,39 @@ class AWSServiceFetcher:
|
|
|
195
222
|
if self.enable_cache:
|
|
196
223
|
self._cache_dir.mkdir(parents=True, exist_ok=True)
|
|
197
224
|
|
|
225
|
+
@staticmethod
|
|
226
|
+
def _get_cache_directory(cache_dir: Path | str | None = None) -> Path:
|
|
227
|
+
"""Get the cache directory path, using platform-appropriate defaults.
|
|
228
|
+
|
|
229
|
+
Priority:
|
|
230
|
+
1. Provided cache_dir parameter
|
|
231
|
+
2. Platform-specific user cache directory
|
|
232
|
+
- Linux/Unix: ~/.cache/iam-validator/aws_services
|
|
233
|
+
- macOS: ~/Library/Caches/iam-validator/aws_services
|
|
234
|
+
- Windows: %LOCALAPPDATA%/iam-validator/cache/aws_services
|
|
235
|
+
|
|
236
|
+
Args:
|
|
237
|
+
cache_dir: Optional custom cache directory path
|
|
238
|
+
|
|
239
|
+
Returns:
|
|
240
|
+
Path object for the cache directory
|
|
241
|
+
"""
|
|
242
|
+
if cache_dir is not None:
|
|
243
|
+
return Path(cache_dir)
|
|
244
|
+
|
|
245
|
+
# Determine platform-specific cache directory
|
|
246
|
+
if sys.platform == "darwin":
|
|
247
|
+
# macOS
|
|
248
|
+
base_cache = Path.home() / "Library" / "Caches"
|
|
249
|
+
elif sys.platform == "win32":
|
|
250
|
+
# Windows
|
|
251
|
+
base_cache = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local"))
|
|
252
|
+
else:
|
|
253
|
+
# Linux and other Unix-like systems
|
|
254
|
+
base_cache = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache"))
|
|
255
|
+
|
|
256
|
+
return base_cache / "iam-validator" / "aws_services"
|
|
257
|
+
|
|
198
258
|
async def __aenter__(self) -> "AWSServiceFetcher":
|
|
199
259
|
"""Async context manager entry with optimized settings."""
|
|
200
260
|
self._client = httpx.AsyncClient(
|
|
@@ -30,6 +30,7 @@ class CheckConfig:
|
|
|
30
30
|
severity: str | None = None # Override default severity
|
|
31
31
|
config: dict[str, Any] = field(default_factory=dict) # Check-specific config
|
|
32
32
|
description: str = ""
|
|
33
|
+
root_config: dict[str, Any] = field(default_factory=dict) # Full config for cross-check access
|
|
33
34
|
|
|
34
35
|
|
|
35
36
|
class PolicyCheck(ABC):
|
|
@@ -433,6 +434,7 @@ def create_default_registry(
|
|
|
433
434
|
# Import and register built-in checks
|
|
434
435
|
from iam_validator.checks import (
|
|
435
436
|
ActionConditionEnforcementCheck,
|
|
437
|
+
ActionResourceConstraintCheck,
|
|
436
438
|
ActionValidationCheck,
|
|
437
439
|
ConditionKeyValidationCheck,
|
|
438
440
|
PolicySizeCheck,
|
|
@@ -446,6 +448,7 @@ def create_default_registry(
|
|
|
446
448
|
registry.register(ResourceValidationCheck())
|
|
447
449
|
registry.register(SecurityBestPracticesCheck())
|
|
448
450
|
registry.register(ActionConditionEnforcementCheck())
|
|
451
|
+
registry.register(ActionResourceConstraintCheck())
|
|
449
452
|
registry.register(SidUniquenessCheck())
|
|
450
453
|
registry.register(PolicySizeCheck())
|
|
451
454
|
|