devguard 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- devguard/INTEGRATION_SUMMARY.md +121 -0
- devguard/__init__.py +3 -0
- devguard/__main__.py +6 -0
- devguard/checkers/__init__.py +41 -0
- devguard/checkers/api_usage.py +523 -0
- devguard/checkers/aws_cost.py +331 -0
- devguard/checkers/aws_iam.py +284 -0
- devguard/checkers/base.py +25 -0
- devguard/checkers/container.py +137 -0
- devguard/checkers/domain.py +189 -0
- devguard/checkers/firecrawl.py +117 -0
- devguard/checkers/fly.py +225 -0
- devguard/checkers/github.py +210 -0
- devguard/checkers/npm.py +327 -0
- devguard/checkers/npm_security.py +244 -0
- devguard/checkers/redteam.py +290 -0
- devguard/checkers/secret.py +279 -0
- devguard/checkers/swarm.py +376 -0
- devguard/checkers/tailscale.py +143 -0
- devguard/checkers/tailsnitch.py +303 -0
- devguard/checkers/tavily.py +179 -0
- devguard/checkers/vercel.py +192 -0
- devguard/cli.py +1510 -0
- devguard/cli_helpers.py +189 -0
- devguard/config.py +249 -0
- devguard/core.py +293 -0
- devguard/dashboard.py +715 -0
- devguard/discovery.py +363 -0
- devguard/http_client.py +142 -0
- devguard/llm_service.py +481 -0
- devguard/mcp_server.py +259 -0
- devguard/metrics.py +144 -0
- devguard/models.py +208 -0
- devguard/reporting.py +1571 -0
- devguard/sarif.py +295 -0
- devguard/scripts/ANALYSIS_SUMMARY.md +141 -0
- devguard/scripts/README.md +221 -0
- devguard/scripts/auto_fix_recommendations.py +145 -0
- devguard/scripts/generate_npmignore.py +175 -0
- devguard/scripts/generate_security_report.py +324 -0
- devguard/scripts/prepublish_check.sh +29 -0
- devguard/scripts/redteam_npm_packages.py +1262 -0
- devguard/scripts/review_all_repos.py +300 -0
- devguard/spec.py +617 -0
- devguard/sweeps/__init__.py +23 -0
- devguard/sweeps/ai_editor_config_audit.py +697 -0
- devguard/sweeps/cargo_publish_audit.py +655 -0
- devguard/sweeps/dependency_audit.py +419 -0
- devguard/sweeps/gitignore_audit.py +336 -0
- devguard/sweeps/local_dev.py +260 -0
- devguard/sweeps/local_dirty_worktree_secrets.py +521 -0
- devguard/sweeps/project_flaudit.py +636 -0
- devguard/sweeps/public_github_secrets.py +680 -0
- devguard/sweeps/publish_audit.py +478 -0
- devguard/sweeps/ssh_key_audit.py +327 -0
- devguard/utils.py +174 -0
- devguard-0.2.0.dist-info/METADATA +225 -0
- devguard-0.2.0.dist-info/RECORD +60 -0
- devguard-0.2.0.dist-info/WHEEL +4 -0
- devguard-0.2.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Comprehensive review of all repos and npm packages."""
|
|
3
|
+
|
|
4
|
+
import asyncio
|
|
5
|
+
import json
|
|
6
|
+
import signal
|
|
7
|
+
import subprocess
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
import httpx
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
from rich.table import Table
|
|
14
|
+
|
|
15
|
+
# Import devguard discovery
|
|
16
|
+
|
|
17
|
+
devguard_path = Path(__file__).parent.parent.parent
|
|
18
|
+
sys.path.insert(0, str(devguard_path))
|
|
19
|
+
|
|
20
|
+
# Import npm security analysis
|
|
21
|
+
from devguard.scripts.redteam_npm_packages import analyze_package
|
|
22
|
+
|
|
23
|
+
console = Console()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
async def discover_github_repos() -> list[str]:
|
|
27
|
+
"""Discover all GitHub repositories."""
|
|
28
|
+
repos = []
|
|
29
|
+
try:
|
|
30
|
+
result = subprocess.run(
|
|
31
|
+
["gh", "repo", "list", "--json", "nameWithOwner", "--limit", "1000"],
|
|
32
|
+
capture_output=True,
|
|
33
|
+
text=True,
|
|
34
|
+
timeout=30,
|
|
35
|
+
)
|
|
36
|
+
if result.returncode == 0:
|
|
37
|
+
data = json.loads(result.stdout)
|
|
38
|
+
repos = [repo["nameWithOwner"] for repo in data]
|
|
39
|
+
except Exception as e:
|
|
40
|
+
console.print(f"[yellow]Warning: Could not discover GitHub repos: {e}[/yellow]")
|
|
41
|
+
return repos
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
async def discover_npm_packages_from_dev(base_path: Path) -> list[dict[str, str]]:
|
|
45
|
+
"""Discover npm packages in dev directory."""
|
|
46
|
+
packages = []
|
|
47
|
+
try:
|
|
48
|
+
# Use a more efficient approach - scan top-level directories first
|
|
49
|
+
console.print(" [dim]Scanning directories...[/dim]")
|
|
50
|
+
dirs_scanned = 0
|
|
51
|
+
for item in base_path.iterdir():
|
|
52
|
+
if not item.is_dir():
|
|
53
|
+
continue
|
|
54
|
+
dirs_scanned += 1
|
|
55
|
+
if dirs_scanned % 10 == 0:
|
|
56
|
+
console.print(f" [dim]Scanned {dirs_scanned} directories...[/dim]")
|
|
57
|
+
|
|
58
|
+
# Check for package.json in this directory
|
|
59
|
+
package_json = item / "package.json"
|
|
60
|
+
if package_json.exists():
|
|
61
|
+
try:
|
|
62
|
+
with open(package_json) as f:
|
|
63
|
+
pkg_data = json.load(f)
|
|
64
|
+
name = pkg_data.get("name")
|
|
65
|
+
version = pkg_data.get("version", "unknown")
|
|
66
|
+
if name:
|
|
67
|
+
packages.append(
|
|
68
|
+
{
|
|
69
|
+
"name": name,
|
|
70
|
+
"version": version,
|
|
71
|
+
"path": str(item.name),
|
|
72
|
+
}
|
|
73
|
+
)
|
|
74
|
+
except Exception:
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
# Also check one level deeper for nested packages
|
|
78
|
+
for subdir in item.iterdir():
|
|
79
|
+
if subdir.is_dir():
|
|
80
|
+
sub_package_json = subdir / "package.json"
|
|
81
|
+
if sub_package_json.exists():
|
|
82
|
+
try:
|
|
83
|
+
with open(sub_package_json) as f:
|
|
84
|
+
pkg_data = json.load(f)
|
|
85
|
+
name = pkg_data.get("name")
|
|
86
|
+
version = pkg_data.get("version", "unknown")
|
|
87
|
+
if name:
|
|
88
|
+
packages.append(
|
|
89
|
+
{
|
|
90
|
+
"name": name,
|
|
91
|
+
"version": version,
|
|
92
|
+
"path": f"{item.name}/{subdir.name}",
|
|
93
|
+
}
|
|
94
|
+
)
|
|
95
|
+
except Exception:
|
|
96
|
+
pass
|
|
97
|
+
except Exception as e:
|
|
98
|
+
console.print(f"[yellow]Warning: Error scanning dev directory: {e}[/yellow]")
|
|
99
|
+
return packages
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
async def discover_published_npm_packages(packages: list[dict]) -> list[dict]:
|
|
103
|
+
"""Check which packages are published to npm."""
|
|
104
|
+
published = []
|
|
105
|
+
async with httpx.AsyncClient(timeout=httpx.Timeout(5.0, connect=5.0)) as client:
|
|
106
|
+
for pkg in packages:
|
|
107
|
+
name = pkg["name"]
|
|
108
|
+
try:
|
|
109
|
+
# Check if package exists on npm
|
|
110
|
+
encoded_name = name.replace("/", "%2F")
|
|
111
|
+
response = await asyncio.wait_for(
|
|
112
|
+
client.get(f"https://registry.npmjs.org/{encoded_name}"),
|
|
113
|
+
timeout=5.0,
|
|
114
|
+
)
|
|
115
|
+
if response.status_code == 200:
|
|
116
|
+
data = response.json()
|
|
117
|
+
dist_tags = data.get("dist-tags", {})
|
|
118
|
+
latest = dist_tags.get("latest", pkg.get("version", "unknown"))
|
|
119
|
+
published.append(
|
|
120
|
+
{
|
|
121
|
+
"name": name,
|
|
122
|
+
"version": latest,
|
|
123
|
+
"path": pkg.get("path", ""),
|
|
124
|
+
}
|
|
125
|
+
)
|
|
126
|
+
except (TimeoutError, httpx.TimeoutException):
|
|
127
|
+
console.print(f" [yellow]Timeout checking {name}[/yellow]")
|
|
128
|
+
except Exception as e:
|
|
129
|
+
console.print(f" [yellow]Error checking {name}: {e}[/yellow]")
|
|
130
|
+
return published
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
async def review_repos():
|
|
134
|
+
"""Main review function."""
|
|
135
|
+
console.print("[bold blue]🔍 Comprehensive Repository Review[/bold blue]\n")
|
|
136
|
+
|
|
137
|
+
base_path = Path.home() / "Documents" / "dev"
|
|
138
|
+
|
|
139
|
+
# Step 1: Discover GitHub repos
|
|
140
|
+
console.print("[cyan]Step 1: Discovering GitHub repositories...[/cyan]")
|
|
141
|
+
github_repos = await discover_github_repos()
|
|
142
|
+
console.print(f" Found {len(github_repos)} GitHub repositories")
|
|
143
|
+
|
|
144
|
+
# Step 2: Discover npm packages in dev directory
|
|
145
|
+
console.print(f"\n[cyan]Step 2: Scanning {base_path} for npm packages...[/cyan]")
|
|
146
|
+
local_packages = await discover_npm_packages_from_dev(base_path)
|
|
147
|
+
console.print(f" Found {len(local_packages)} npm packages locally")
|
|
148
|
+
|
|
149
|
+
# Step 3: Check which are published
|
|
150
|
+
console.print("\n[cyan]Step 3: Checking which packages are published to npm...[/cyan]")
|
|
151
|
+
console.print(f" [dim]Checking {len(local_packages)} packages...[/dim]")
|
|
152
|
+
published_packages = await discover_published_npm_packages(local_packages)
|
|
153
|
+
|
|
154
|
+
# Remove duplicates by package name
|
|
155
|
+
seen = set()
|
|
156
|
+
unique_packages = []
|
|
157
|
+
for pkg in published_packages:
|
|
158
|
+
if pkg["name"] not in seen:
|
|
159
|
+
seen.add(pkg["name"])
|
|
160
|
+
unique_packages.append(pkg)
|
|
161
|
+
published_packages = unique_packages
|
|
162
|
+
|
|
163
|
+
console.print(f" Found {len(published_packages)} published packages")
|
|
164
|
+
|
|
165
|
+
# Step 4: Run security analysis on published packages
|
|
166
|
+
if published_packages:
|
|
167
|
+
console.print("\n[cyan]Step 4: Running security analysis on published packages...[/cyan]")
|
|
168
|
+
console.print(
|
|
169
|
+
f" [dim]Analyzing {min(len(published_packages), 10)} packages (limited to 10 for performance)...[/dim]"
|
|
170
|
+
)
|
|
171
|
+
results = []
|
|
172
|
+
|
|
173
|
+
for i, pkg in enumerate(published_packages[:10], 1): # Limit to first 10 for now
|
|
174
|
+
name = pkg["name"]
|
|
175
|
+
version = pkg.get("version")
|
|
176
|
+
console.print(f" [{i}/10] Analyzing {name}@{version}...")
|
|
177
|
+
try:
|
|
178
|
+
# Add timeout to prevent hanging
|
|
179
|
+
result = await asyncio.wait_for(
|
|
180
|
+
analyze_package(name, version),
|
|
181
|
+
timeout=120.0, # 2 minute timeout per package
|
|
182
|
+
)
|
|
183
|
+
results.append(
|
|
184
|
+
{
|
|
185
|
+
"package": name,
|
|
186
|
+
"version": version,
|
|
187
|
+
"path": pkg.get("path", ""),
|
|
188
|
+
"result": result,
|
|
189
|
+
}
|
|
190
|
+
)
|
|
191
|
+
console.print(" [green]✓ Completed[/green]")
|
|
192
|
+
except TimeoutError:
|
|
193
|
+
console.print(" [red]✗ Timeout after 2 minutes[/red]")
|
|
194
|
+
results.append(
|
|
195
|
+
{
|
|
196
|
+
"package": name,
|
|
197
|
+
"version": version,
|
|
198
|
+
"path": pkg.get("path", ""),
|
|
199
|
+
"error": "Timeout after 2 minutes",
|
|
200
|
+
}
|
|
201
|
+
)
|
|
202
|
+
except Exception as e:
|
|
203
|
+
console.print(f" [red]✗ Error: {e}[/red]")
|
|
204
|
+
results.append(
|
|
205
|
+
{
|
|
206
|
+
"package": name,
|
|
207
|
+
"version": version,
|
|
208
|
+
"path": pkg.get("path", ""),
|
|
209
|
+
"error": str(e),
|
|
210
|
+
}
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
# Display summary
|
|
214
|
+
console.print("\n[bold]📊 Security Analysis Summary[/bold]\n")
|
|
215
|
+
|
|
216
|
+
table = Table(title="Published Packages Security Review")
|
|
217
|
+
table.add_column("Package", style="cyan")
|
|
218
|
+
table.add_column("Version", style="magenta")
|
|
219
|
+
table.add_column("Secrets", justify="right")
|
|
220
|
+
table.add_column("Sensitive Files", justify="right")
|
|
221
|
+
table.add_column("Obfuscated Code", justify="right")
|
|
222
|
+
table.add_column("Git History", justify="right")
|
|
223
|
+
table.add_column("Status", style="green")
|
|
224
|
+
|
|
225
|
+
for result in results:
|
|
226
|
+
if "error" in result:
|
|
227
|
+
table.add_row(
|
|
228
|
+
result["package"],
|
|
229
|
+
result.get("version", "?"),
|
|
230
|
+
"-",
|
|
231
|
+
"-",
|
|
232
|
+
"-",
|
|
233
|
+
"-",
|
|
234
|
+
"[red]Error[/red]",
|
|
235
|
+
)
|
|
236
|
+
else:
|
|
237
|
+
findings = result.get("result", {}).get("findings", {})
|
|
238
|
+
secrets = len(findings.get("secrets", []))
|
|
239
|
+
sensitive_files = len(findings.get("sensitive_files", []))
|
|
240
|
+
obfuscated = len(findings.get("obfuscated_code", []))
|
|
241
|
+
git_history = "Yes" if findings.get("git_history") else "No"
|
|
242
|
+
|
|
243
|
+
status = "[green]✓[/green]"
|
|
244
|
+
if secrets > 0 or sensitive_files > 0 or findings.get("git_history"):
|
|
245
|
+
status = "[red]⚠[/red]"
|
|
246
|
+
|
|
247
|
+
table.add_row(
|
|
248
|
+
result["package"],
|
|
249
|
+
result.get("version", "?"),
|
|
250
|
+
str(secrets),
|
|
251
|
+
str(sensitive_files),
|
|
252
|
+
str(obfuscated),
|
|
253
|
+
git_history,
|
|
254
|
+
status,
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
console.print(table)
|
|
258
|
+
|
|
259
|
+
# Save detailed results
|
|
260
|
+
output_file = Path("repo_review_results.json")
|
|
261
|
+
with open(output_file, "w") as f:
|
|
262
|
+
json.dump(
|
|
263
|
+
{
|
|
264
|
+
"github_repos": github_repos,
|
|
265
|
+
"local_packages": local_packages,
|
|
266
|
+
"published_packages": published_packages,
|
|
267
|
+
"security_results": results,
|
|
268
|
+
},
|
|
269
|
+
f,
|
|
270
|
+
indent=2,
|
|
271
|
+
)
|
|
272
|
+
console.print(f"\n[green]✓[/green] Detailed results saved to {output_file}")
|
|
273
|
+
|
|
274
|
+
# Summary
|
|
275
|
+
console.print("\n[bold]📋 Review Summary[/bold]")
|
|
276
|
+
console.print(f" GitHub Repositories: {len(github_repos)}")
|
|
277
|
+
console.print(f" Local npm Packages: {len(local_packages)}")
|
|
278
|
+
console.print(f" Published Packages: {len(published_packages)}")
|
|
279
|
+
|
|
280
|
+
if published_packages:
|
|
281
|
+
console.print("\n[bold yellow]💡 Next Steps:[/bold yellow]")
|
|
282
|
+
console.print(" 1. Review security findings in the table above")
|
|
283
|
+
console.print(" 2. Check detailed results in repo_review_results.json")
|
|
284
|
+
console.print(" 3. Run: uv run python devguard/scripts/generate_npmignore.py")
|
|
285
|
+
console.print(" 4. Fix any critical issues before next publish")
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def signal_handler(sig, frame):
|
|
289
|
+
"""Handle Ctrl+C gracefully."""
|
|
290
|
+
console.print("\n[yellow]Interrupted by user. Exiting...[/yellow]")
|
|
291
|
+
sys.exit(0)
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
if __name__ == "__main__":
|
|
295
|
+
signal.signal(signal.SIGINT, signal_handler)
|
|
296
|
+
try:
|
|
297
|
+
asyncio.run(review_repos())
|
|
298
|
+
except KeyboardInterrupt:
|
|
299
|
+
console.print("\n[yellow]Interrupted. Exiting...[/yellow]")
|
|
300
|
+
sys.exit(0)
|