devguard 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- devguard/INTEGRATION_SUMMARY.md +121 -0
- devguard/__init__.py +3 -0
- devguard/__main__.py +6 -0
- devguard/checkers/__init__.py +41 -0
- devguard/checkers/api_usage.py +523 -0
- devguard/checkers/aws_cost.py +331 -0
- devguard/checkers/aws_iam.py +284 -0
- devguard/checkers/base.py +25 -0
- devguard/checkers/container.py +137 -0
- devguard/checkers/domain.py +189 -0
- devguard/checkers/firecrawl.py +117 -0
- devguard/checkers/fly.py +225 -0
- devguard/checkers/github.py +210 -0
- devguard/checkers/npm.py +327 -0
- devguard/checkers/npm_security.py +244 -0
- devguard/checkers/redteam.py +290 -0
- devguard/checkers/secret.py +279 -0
- devguard/checkers/swarm.py +376 -0
- devguard/checkers/tailscale.py +143 -0
- devguard/checkers/tailsnitch.py +303 -0
- devguard/checkers/tavily.py +179 -0
- devguard/checkers/vercel.py +192 -0
- devguard/cli.py +1510 -0
- devguard/cli_helpers.py +189 -0
- devguard/config.py +249 -0
- devguard/core.py +293 -0
- devguard/dashboard.py +715 -0
- devguard/discovery.py +363 -0
- devguard/http_client.py +142 -0
- devguard/llm_service.py +481 -0
- devguard/mcp_server.py +259 -0
- devguard/metrics.py +144 -0
- devguard/models.py +208 -0
- devguard/reporting.py +1571 -0
- devguard/sarif.py +295 -0
- devguard/scripts/ANALYSIS_SUMMARY.md +141 -0
- devguard/scripts/README.md +221 -0
- devguard/scripts/auto_fix_recommendations.py +145 -0
- devguard/scripts/generate_npmignore.py +175 -0
- devguard/scripts/generate_security_report.py +324 -0
- devguard/scripts/prepublish_check.sh +29 -0
- devguard/scripts/redteam_npm_packages.py +1262 -0
- devguard/scripts/review_all_repos.py +300 -0
- devguard/spec.py +617 -0
- devguard/sweeps/__init__.py +23 -0
- devguard/sweeps/ai_editor_config_audit.py +697 -0
- devguard/sweeps/cargo_publish_audit.py +655 -0
- devguard/sweeps/dependency_audit.py +419 -0
- devguard/sweeps/gitignore_audit.py +336 -0
- devguard/sweeps/local_dev.py +260 -0
- devguard/sweeps/local_dirty_worktree_secrets.py +521 -0
- devguard/sweeps/project_flaudit.py +636 -0
- devguard/sweeps/public_github_secrets.py +680 -0
- devguard/sweeps/publish_audit.py +478 -0
- devguard/sweeps/ssh_key_audit.py +327 -0
- devguard/utils.py +174 -0
- devguard-0.2.0.dist-info/METADATA +225 -0
- devguard-0.2.0.dist-info/RECORD +60 -0
- devguard-0.2.0.dist-info/WHEEL +4 -0
- devguard-0.2.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""GitHub repository security alerts checker."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
from github import Auth, Github
|
|
8
|
+
from github.GithubException import GithubException
|
|
9
|
+
|
|
10
|
+
from devguard.checkers.base import BaseChecker
|
|
11
|
+
from devguard.config import Settings
|
|
12
|
+
from devguard.http_client import create_client, retry_with_backoff
|
|
13
|
+
from devguard.models import CheckResult, RepositoryAlert, Severity
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class GitHubChecker(BaseChecker):
|
|
19
|
+
"""Check GitHub repositories for security alerts."""
|
|
20
|
+
|
|
21
|
+
check_type = "gh"
|
|
22
|
+
|
|
23
|
+
def __init__(self, settings: Settings):
|
|
24
|
+
"""Initialize GitHub checker."""
|
|
25
|
+
super().__init__(settings)
|
|
26
|
+
# Handle SecretStr if using newer pydantic settings
|
|
27
|
+
token = settings.github_token
|
|
28
|
+
if hasattr(token, "get_secret_value"):
|
|
29
|
+
token = token.get_secret_value()
|
|
30
|
+
|
|
31
|
+
auth = Auth.Token(token)
|
|
32
|
+
self.github = Github(auth=auth)
|
|
33
|
+
|
|
34
|
+
async def check(self) -> CheckResult:
|
|
35
|
+
"""Check GitHub repositories for Dependabot alerts."""
|
|
36
|
+
alerts: list[RepositoryAlert] = []
|
|
37
|
+
errors: list[str] = []
|
|
38
|
+
|
|
39
|
+
# Skip if no repos or org explicitly configured
|
|
40
|
+
if not self.settings.github_repos_to_monitor and not self.settings.github_org:
|
|
41
|
+
return CheckResult(
|
|
42
|
+
check_type=self.check_type,
|
|
43
|
+
success=True,
|
|
44
|
+
repository_alerts=[],
|
|
45
|
+
errors=[],
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
try:
|
|
49
|
+
# Get repositories to check
|
|
50
|
+
repos = await self._get_repositories()
|
|
51
|
+
|
|
52
|
+
for repo in repos:
|
|
53
|
+
try:
|
|
54
|
+
repo_alerts = await self._get_dependabot_alerts(repo)
|
|
55
|
+
alerts.extend(repo_alerts)
|
|
56
|
+
except GithubException as e:
|
|
57
|
+
errors.append(f"Error checking repo {repo.full_name}: {str(e)}")
|
|
58
|
+
except Exception as e:
|
|
59
|
+
errors.append(f"Unexpected error checking repo {repo.full_name}: {str(e)}")
|
|
60
|
+
|
|
61
|
+
except Exception as e:
|
|
62
|
+
errors.append(f"Error getting repositories: {str(e)}")
|
|
63
|
+
return CheckResult(
|
|
64
|
+
check_type=self.check_type,
|
|
65
|
+
success=False,
|
|
66
|
+
repository_alerts=[],
|
|
67
|
+
errors=errors,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
return CheckResult(
|
|
71
|
+
check_type=self.check_type,
|
|
72
|
+
success=len(errors) == 0,
|
|
73
|
+
repository_alerts=alerts,
|
|
74
|
+
errors=errors,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
async def _get_repositories(self) -> list:
|
|
78
|
+
"""Get list of repositories to monitor."""
|
|
79
|
+
repos = []
|
|
80
|
+
|
|
81
|
+
if self.settings.github_org:
|
|
82
|
+
# Get all repos in organization
|
|
83
|
+
try:
|
|
84
|
+
org = self.github.get_organization(self.settings.github_org)
|
|
85
|
+
repos.extend(list(org.get_repos()))
|
|
86
|
+
except GithubException as e:
|
|
87
|
+
org_name = self.settings.github_org
|
|
88
|
+
raise Exception(f"Error accessing organization {org_name}: {str(e)}")
|
|
89
|
+
|
|
90
|
+
# Add specific repos if configured
|
|
91
|
+
if self.settings.github_repos_to_monitor:
|
|
92
|
+
for repo_name in self.settings.github_repos_to_monitor:
|
|
93
|
+
try:
|
|
94
|
+
repo = self.github.get_repo(repo_name)
|
|
95
|
+
if repo not in repos:
|
|
96
|
+
repos.append(repo)
|
|
97
|
+
except GithubException:
|
|
98
|
+
# Repo might not exist or not accessible
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
# If no specific repos configured, get user's repos
|
|
102
|
+
if not repos:
|
|
103
|
+
try:
|
|
104
|
+
user = self.github.get_user()
|
|
105
|
+
repos.extend(list(user.get_repos()))
|
|
106
|
+
except GithubException:
|
|
107
|
+
pass
|
|
108
|
+
|
|
109
|
+
return repos
|
|
110
|
+
|
|
111
|
+
async def _get_dependabot_alerts(self, repo) -> list[RepositoryAlert]:
|
|
112
|
+
"""Get Dependabot alerts for a repository."""
|
|
113
|
+
alerts: list[RepositoryAlert] = []
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
# Use GitHub API to get Dependabot alerts
|
|
117
|
+
# Note: This requires the repository to have Dependabot enabled
|
|
118
|
+
# and the token to have security_events scope
|
|
119
|
+
# Using REST API directly as PyGithub doesn't fully support Dependabot alerts
|
|
120
|
+
headers = {
|
|
121
|
+
"Accept": "application/vnd.github+json",
|
|
122
|
+
"Authorization": f"token {self.settings.github_token}",
|
|
123
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
async with create_client() as client:
|
|
127
|
+
|
|
128
|
+
async def fetch_alerts():
|
|
129
|
+
response = await client.get(
|
|
130
|
+
f"https://api.github.com/repos/{repo.full_name}/dependabot/alerts",
|
|
131
|
+
headers=headers,
|
|
132
|
+
)
|
|
133
|
+
response.raise_for_status()
|
|
134
|
+
return response
|
|
135
|
+
|
|
136
|
+
try:
|
|
137
|
+
response = await retry_with_backoff(fetch_alerts, max_retries=3)
|
|
138
|
+
data = response.json()
|
|
139
|
+
for alert_data in data:
|
|
140
|
+
alert = self._parse_alert(alert_data, repo.full_name)
|
|
141
|
+
if alert:
|
|
142
|
+
alerts.append(alert)
|
|
143
|
+
except httpx.HTTPStatusError as e:
|
|
144
|
+
if e.response.status_code == 404:
|
|
145
|
+
# Dependabot might not be enabled for this repo
|
|
146
|
+
logger.debug(f"Dependabot not enabled for {repo.full_name}")
|
|
147
|
+
else:
|
|
148
|
+
logger.warning(
|
|
149
|
+
f"Failed to fetch Dependabot alerts for {repo.full_name}: "
|
|
150
|
+
f"HTTP {e.response.status_code}"
|
|
151
|
+
)
|
|
152
|
+
except httpx.RequestError as e:
|
|
153
|
+
logger.warning(
|
|
154
|
+
f"Network error fetching Dependabot alerts for {repo.full_name}: {str(e)}"
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
except Exception as e:
|
|
158
|
+
# If we can't get alerts, continue
|
|
159
|
+
logger.warning(f"Unexpected error fetching alerts for {repo.full_name}: {str(e)}")
|
|
160
|
+
|
|
161
|
+
return alerts
|
|
162
|
+
|
|
163
|
+
def _parse_alert(self, alert_data: dict, repo_name: str) -> RepositoryAlert | None:
|
|
164
|
+
"""Parse a Dependabot alert from API response."""
|
|
165
|
+
try:
|
|
166
|
+
# Map severity
|
|
167
|
+
severity_str = alert_data.get("security_advisory", {}).get("severity", "low")
|
|
168
|
+
severity_map = {
|
|
169
|
+
"low": Severity.LOW,
|
|
170
|
+
"medium": Severity.MEDIUM,
|
|
171
|
+
"moderate": Severity.MEDIUM,
|
|
172
|
+
"high": Severity.HIGH,
|
|
173
|
+
"critical": Severity.CRITICAL,
|
|
174
|
+
}
|
|
175
|
+
severity = severity_map.get(severity_str.lower(), Severity.LOW)
|
|
176
|
+
|
|
177
|
+
# Parse dates
|
|
178
|
+
created_at = datetime.fromisoformat(
|
|
179
|
+
alert_data.get("created_at", "").replace("Z", "+00:00")
|
|
180
|
+
)
|
|
181
|
+
updated_at = datetime.fromisoformat(
|
|
182
|
+
alert_data.get("updated_at", "").replace("Z", "+00:00")
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
dismissed_at = None
|
|
186
|
+
if alert_data.get("dismissed_at"):
|
|
187
|
+
dismissed_at = datetime.fromisoformat(
|
|
188
|
+
alert_data.get("dismissed_at", "").replace("Z", "+00:00")
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
fixed_at = None
|
|
192
|
+
if alert_data.get("fixed_at"):
|
|
193
|
+
fixed_at = datetime.fromisoformat(
|
|
194
|
+
alert_data.get("fixed_at", "").replace("Z", "+00:00")
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
return RepositoryAlert(
|
|
198
|
+
repository=repo_name,
|
|
199
|
+
alert_id=alert_data.get("number", 0),
|
|
200
|
+
state=alert_data.get("state", "open"),
|
|
201
|
+
severity=severity,
|
|
202
|
+
dependency=alert_data.get("dependency", {}),
|
|
203
|
+
security_advisory=alert_data.get("security_advisory", {}),
|
|
204
|
+
created_at=created_at,
|
|
205
|
+
updated_at=updated_at,
|
|
206
|
+
dismissed_at=dismissed_at,
|
|
207
|
+
fixed_at=fixed_at,
|
|
208
|
+
)
|
|
209
|
+
except Exception:
|
|
210
|
+
return None
|
devguard/checkers/npm.py
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
"""npm package vulnerability checker."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
|
|
8
|
+
from devguard.checkers.base import BaseChecker
|
|
9
|
+
from devguard.http_client import create_client, retry_with_backoff
|
|
10
|
+
from devguard.models import CheckResult, Severity, Vulnerability
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class NpmChecker(BaseChecker):
|
|
16
|
+
"""Check npm packages for vulnerabilities."""
|
|
17
|
+
|
|
18
|
+
check_type = "npm"
|
|
19
|
+
|
|
20
|
+
async def check(self) -> CheckResult:
|
|
21
|
+
"""Check npm packages for vulnerabilities."""
|
|
22
|
+
vulnerabilities: list[Vulnerability] = []
|
|
23
|
+
errors: list[str] = []
|
|
24
|
+
|
|
25
|
+
if not self.settings.npm_packages_to_monitor:
|
|
26
|
+
return CheckResult(
|
|
27
|
+
check_type=self.check_type,
|
|
28
|
+
success=True,
|
|
29
|
+
vulnerabilities=[],
|
|
30
|
+
errors=["No npm packages configured for monitoring"],
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
for package in self.settings.npm_packages_to_monitor:
|
|
34
|
+
try:
|
|
35
|
+
# Use npm audit for vulnerability checking
|
|
36
|
+
# First, check if package.json exists or create a temporary one
|
|
37
|
+
pkg_vulns = await self._check_package_vulnerabilities(package)
|
|
38
|
+
vulnerabilities.extend(pkg_vulns)
|
|
39
|
+
except Exception as e:
|
|
40
|
+
error_msg = f"Error checking package {package}: {str(e)}"
|
|
41
|
+
errors.append(error_msg)
|
|
42
|
+
logger.warning(error_msg)
|
|
43
|
+
|
|
44
|
+
# Also check for Snyk if token is available
|
|
45
|
+
if self.settings.snyk_token:
|
|
46
|
+
try:
|
|
47
|
+
snyk_vulns = await self._check_snyk_vulnerabilities()
|
|
48
|
+
vulnerabilities.extend(snyk_vulns)
|
|
49
|
+
except Exception as e:
|
|
50
|
+
errors.append(f"Error checking Snyk: {str(e)}")
|
|
51
|
+
|
|
52
|
+
return CheckResult(
|
|
53
|
+
check_type=self.check_type,
|
|
54
|
+
success=len(errors) == 0,
|
|
55
|
+
vulnerabilities=vulnerabilities,
|
|
56
|
+
errors=errors,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
async def _check_package_vulnerabilities(self, package: str) -> list[Vulnerability]:
|
|
60
|
+
"""
|
|
61
|
+
Check a specific package for vulnerabilities using npm registry API.
|
|
62
|
+
|
|
63
|
+
Uses the npm registry security advisories endpoint to check for known vulnerabilities.
|
|
64
|
+
"""
|
|
65
|
+
vulnerabilities: list[Vulnerability] = []
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
# First, get the latest version of the package
|
|
69
|
+
async with create_client() as client:
|
|
70
|
+
|
|
71
|
+
async def fetch_package_info():
|
|
72
|
+
# Get package info from npm registry
|
|
73
|
+
response = await client.get(
|
|
74
|
+
f"https://registry.npmjs.org/{package}",
|
|
75
|
+
)
|
|
76
|
+
response.raise_for_status()
|
|
77
|
+
return response
|
|
78
|
+
|
|
79
|
+
response = await retry_with_backoff(fetch_package_info, max_retries=3)
|
|
80
|
+
package_data = response.json()
|
|
81
|
+
|
|
82
|
+
# Get latest version
|
|
83
|
+
dist_tags = package_data.get("dist-tags", {})
|
|
84
|
+
latest_version = dist_tags.get("latest")
|
|
85
|
+
if not latest_version:
|
|
86
|
+
versions = package_data.get("versions", {})
|
|
87
|
+
if versions:
|
|
88
|
+
latest_version = max(versions.keys())
|
|
89
|
+
|
|
90
|
+
if not latest_version:
|
|
91
|
+
logger.warning(f"Could not determine latest version for {package}")
|
|
92
|
+
return vulnerabilities
|
|
93
|
+
|
|
94
|
+
# Check for vulnerabilities using npm audit API
|
|
95
|
+
# We construct a minimal package.json structure for the audit
|
|
96
|
+
audit_payload = {
|
|
97
|
+
"name": f"devguard-check-{package}",
|
|
98
|
+
"version": "1.0.0",
|
|
99
|
+
"requires": {package: latest_version},
|
|
100
|
+
"dependencies": {
|
|
101
|
+
package: {
|
|
102
|
+
"version": latest_version,
|
|
103
|
+
}
|
|
104
|
+
},
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async def fetch_audit():
|
|
108
|
+
response = await client.post(
|
|
109
|
+
"https://registry.npmjs.org/-/npm/v1/security/audits",
|
|
110
|
+
json=audit_payload,
|
|
111
|
+
)
|
|
112
|
+
response.raise_for_status()
|
|
113
|
+
return response
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
audit_response = await retry_with_backoff(fetch_audit, max_retries=3)
|
|
117
|
+
audit_data = audit_response.json()
|
|
118
|
+
|
|
119
|
+
# Parse vulnerabilities from audit response
|
|
120
|
+
advisories = audit_data.get("advisories", {})
|
|
121
|
+
for advisory_id, advisory_data in advisories.items():
|
|
122
|
+
severity_map = {
|
|
123
|
+
"low": Severity.LOW,
|
|
124
|
+
"moderate": Severity.MEDIUM,
|
|
125
|
+
"high": Severity.HIGH,
|
|
126
|
+
"critical": Severity.CRITICAL,
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
severity = severity_map.get(
|
|
130
|
+
advisory_data.get("severity", "moderate").lower(), Severity.MEDIUM
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
# Find affected package
|
|
134
|
+
findings = advisory_data.get("findings", [])
|
|
135
|
+
for finding in findings:
|
|
136
|
+
for version_range in finding.get("version", []):
|
|
137
|
+
cves = advisory_data.get("cves", [])
|
|
138
|
+
cve_id = cves[0] if cves else None
|
|
139
|
+
|
|
140
|
+
created = advisory_data.get("created")
|
|
141
|
+
published_at = None
|
|
142
|
+
if created:
|
|
143
|
+
iso_str = created.replace("Z", "+00:00")
|
|
144
|
+
published_at = datetime.fromisoformat(iso_str)
|
|
145
|
+
|
|
146
|
+
vuln = Vulnerability(
|
|
147
|
+
package_name=package,
|
|
148
|
+
package_version=latest_version,
|
|
149
|
+
severity=severity,
|
|
150
|
+
advisory_id=advisory_id,
|
|
151
|
+
cve_id=cve_id,
|
|
152
|
+
summary=advisory_data.get("title"),
|
|
153
|
+
description=advisory_data.get("overview"),
|
|
154
|
+
vulnerable_version_range=version_range,
|
|
155
|
+
published_at=published_at,
|
|
156
|
+
source="npm",
|
|
157
|
+
)
|
|
158
|
+
vulnerabilities.append(vuln)
|
|
159
|
+
break # Only add once per advisory
|
|
160
|
+
break
|
|
161
|
+
|
|
162
|
+
except httpx.HTTPStatusError as e:
|
|
163
|
+
if e.response.status_code == 404:
|
|
164
|
+
# No vulnerabilities found or endpoint not available
|
|
165
|
+
logger.debug(f"No vulnerabilities found for {package} via npm audit API")
|
|
166
|
+
else:
|
|
167
|
+
status_code = e.response.status_code
|
|
168
|
+
logger.warning(
|
|
169
|
+
f"Failed to check vulnerabilities for {package}: HTTP {status_code}"
|
|
170
|
+
)
|
|
171
|
+
except httpx.RequestError as e:
|
|
172
|
+
logger.warning(
|
|
173
|
+
f"Network error checking vulnerabilities for {package}: {str(e)}"
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
except httpx.HTTPStatusError as e:
|
|
177
|
+
status_code = e.response.status_code
|
|
178
|
+
error_msg = f"Failed to fetch package info for {package}: HTTP {status_code}"
|
|
179
|
+
logger.warning(error_msg)
|
|
180
|
+
# Re-raise so caller can add to errors list
|
|
181
|
+
raise Exception(error_msg) from e
|
|
182
|
+
except httpx.RequestError as e:
|
|
183
|
+
error_msg = f"Network error fetching package info for {package}: {str(e)}"
|
|
184
|
+
logger.warning(error_msg)
|
|
185
|
+
# Re-raise so caller can add to errors list
|
|
186
|
+
raise Exception(error_msg) from e
|
|
187
|
+
except Exception as e:
|
|
188
|
+
error_msg = f"Unexpected error checking {package}: {str(e)}"
|
|
189
|
+
logger.warning(error_msg)
|
|
190
|
+
# Re-raise so caller can add to errors list
|
|
191
|
+
raise
|
|
192
|
+
|
|
193
|
+
return vulnerabilities
|
|
194
|
+
|
|
195
|
+
async def _check_snyk_vulnerabilities(self) -> list[Vulnerability]:
|
|
196
|
+
"""
|
|
197
|
+
Check vulnerabilities using Snyk API.
|
|
198
|
+
|
|
199
|
+
Note: This requires a Snyk API token and checks packages configured
|
|
200
|
+
in npm_packages_to_monitor.
|
|
201
|
+
"""
|
|
202
|
+
vulnerabilities: list[Vulnerability] = []
|
|
203
|
+
|
|
204
|
+
if not self.settings.snyk_token:
|
|
205
|
+
return vulnerabilities
|
|
206
|
+
|
|
207
|
+
if not self.settings.npm_packages_to_monitor:
|
|
208
|
+
return vulnerabilities
|
|
209
|
+
|
|
210
|
+
# Handle SecretStr if using newer pydantic settings
|
|
211
|
+
snyk_token = self.settings.snyk_token
|
|
212
|
+
if hasattr(snyk_token, "get_secret_value"):
|
|
213
|
+
snyk_token = snyk_token.get_secret_value()
|
|
214
|
+
|
|
215
|
+
try:
|
|
216
|
+
async with create_client() as client:
|
|
217
|
+
headers = {
|
|
218
|
+
"Authorization": f"token {snyk_token}",
|
|
219
|
+
"Content-Type": "application/json",
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
for package in self.settings.npm_packages_to_monitor:
|
|
223
|
+
try:
|
|
224
|
+
# Get package info first to get latest version
|
|
225
|
+
async def fetch_package_info():
|
|
226
|
+
response = await client.get(
|
|
227
|
+
f"https://registry.npmjs.org/{package}",
|
|
228
|
+
)
|
|
229
|
+
response.raise_for_status()
|
|
230
|
+
return response
|
|
231
|
+
|
|
232
|
+
package_response = await retry_with_backoff(
|
|
233
|
+
fetch_package_info, max_retries=3
|
|
234
|
+
)
|
|
235
|
+
package_data = package_response.json()
|
|
236
|
+
dist_tags = package_data.get("dist-tags", {})
|
|
237
|
+
latest_version = dist_tags.get("latest")
|
|
238
|
+
|
|
239
|
+
if not latest_version:
|
|
240
|
+
continue
|
|
241
|
+
|
|
242
|
+
# Check vulnerabilities using Snyk API
|
|
243
|
+
# Note: Snyk API v1 endpoint for testing packages
|
|
244
|
+
async def fetch_snyk_vulns():
|
|
245
|
+
# Using Snyk's test endpoint for npm packages
|
|
246
|
+
response = await client.post(
|
|
247
|
+
"https://api.snyk.io/v1/test/npm",
|
|
248
|
+
headers=headers,
|
|
249
|
+
json={
|
|
250
|
+
"package": {
|
|
251
|
+
"name": package,
|
|
252
|
+
"version": latest_version,
|
|
253
|
+
}
|
|
254
|
+
},
|
|
255
|
+
)
|
|
256
|
+
response.raise_for_status()
|
|
257
|
+
return response
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
snyk_response = await retry_with_backoff(
|
|
261
|
+
fetch_snyk_vulns, max_retries=3
|
|
262
|
+
)
|
|
263
|
+
snyk_data = snyk_response.json()
|
|
264
|
+
|
|
265
|
+
# Parse vulnerabilities from Snyk response
|
|
266
|
+
issues = snyk_data.get("issues", {}).get("vulnerabilities", [])
|
|
267
|
+
for issue in issues:
|
|
268
|
+
severity_map = {
|
|
269
|
+
"low": Severity.LOW,
|
|
270
|
+
"medium": Severity.MEDIUM,
|
|
271
|
+
"high": Severity.HIGH,
|
|
272
|
+
"critical": Severity.CRITICAL,
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
severity = severity_map.get(
|
|
276
|
+
issue.get("severity", "medium").lower(), Severity.MEDIUM
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
identifiers = issue.get("identifiers", {})
|
|
280
|
+
cves = identifiers.get("CVE", [])
|
|
281
|
+
cve_id = cves[0] if cves else None
|
|
282
|
+
|
|
283
|
+
semver = issue.get("semver", {})
|
|
284
|
+
vulnerable = semver.get("vulnerable", [])
|
|
285
|
+
vulnerable_range = vulnerable[0] if vulnerable else None
|
|
286
|
+
|
|
287
|
+
patched = semver.get("patched", [])
|
|
288
|
+
patched_version = patched[0] if patched else None
|
|
289
|
+
|
|
290
|
+
pub_time = issue.get("publicationTime")
|
|
291
|
+
published_at = None
|
|
292
|
+
if pub_time:
|
|
293
|
+
iso_str = pub_time.replace("Z", "+00:00")
|
|
294
|
+
published_at = datetime.fromisoformat(iso_str)
|
|
295
|
+
|
|
296
|
+
vuln = Vulnerability(
|
|
297
|
+
package_name=package,
|
|
298
|
+
package_version=latest_version,
|
|
299
|
+
severity=severity,
|
|
300
|
+
cve_id=cve_id,
|
|
301
|
+
summary=issue.get("title"),
|
|
302
|
+
description=issue.get("description"),
|
|
303
|
+
vulnerable_version_range=vulnerable_range,
|
|
304
|
+
first_patched_version=patched_version,
|
|
305
|
+
published_at=published_at,
|
|
306
|
+
source="snyk",
|
|
307
|
+
)
|
|
308
|
+
vulnerabilities.append(vuln)
|
|
309
|
+
|
|
310
|
+
except httpx.HTTPStatusError as e:
|
|
311
|
+
if e.response.status_code == 401:
|
|
312
|
+
logger.warning("Snyk API authentication failed. Check your token.")
|
|
313
|
+
elif e.response.status_code == 404:
|
|
314
|
+
logger.debug(f"No vulnerabilities found for {package} via Snyk API")
|
|
315
|
+
else:
|
|
316
|
+
status_code = e.response.status_code
|
|
317
|
+
logger.warning(f"Snyk API error for {package}: HTTP {status_code}")
|
|
318
|
+
except httpx.RequestError as e:
|
|
319
|
+
logger.warning(f"Network error checking Snyk for {package}: {str(e)}")
|
|
320
|
+
|
|
321
|
+
except Exception as e:
|
|
322
|
+
logger.warning(f"Error checking {package} with Snyk: {str(e)}")
|
|
323
|
+
|
|
324
|
+
except Exception as e:
|
|
325
|
+
logger.warning(f"Unexpected error checking Snyk vulnerabilities: {str(e)}")
|
|
326
|
+
|
|
327
|
+
return vulnerabilities
|