java-dependency-analyzer 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- java_dependency_analyzer/__init__.py +11 -0
- java_dependency_analyzer/cache/__init__.py +11 -0
- java_dependency_analyzer/cache/db.py +101 -0
- java_dependency_analyzer/cache/vulnerability_cache.py +156 -0
- java_dependency_analyzer/cli.py +394 -0
- java_dependency_analyzer/models/__init__.py +11 -0
- java_dependency_analyzer/models/dependency.py +80 -0
- java_dependency_analyzer/models/report.py +108 -0
- java_dependency_analyzer/parsers/__init__.py +11 -0
- java_dependency_analyzer/parsers/base.py +150 -0
- java_dependency_analyzer/parsers/gradle_dep_tree_parser.py +125 -0
- java_dependency_analyzer/parsers/gradle_parser.py +206 -0
- java_dependency_analyzer/parsers/maven_dep_tree_parser.py +123 -0
- java_dependency_analyzer/parsers/maven_parser.py +182 -0
- java_dependency_analyzer/reporters/__init__.py +11 -0
- java_dependency_analyzer/reporters/base.py +33 -0
- java_dependency_analyzer/reporters/html_reporter.py +82 -0
- java_dependency_analyzer/reporters/json_reporter.py +52 -0
- java_dependency_analyzer/reporters/templates/report.html +406 -0
- java_dependency_analyzer/resolvers/__init__.py +11 -0
- java_dependency_analyzer/resolvers/transitive.py +276 -0
- java_dependency_analyzer/scanners/__init__.py +11 -0
- java_dependency_analyzer/scanners/base.py +102 -0
- java_dependency_analyzer/scanners/ghsa_scanner.py +204 -0
- java_dependency_analyzer/scanners/osv_scanner.py +167 -0
- java_dependency_analyzer/util/__init__.py +11 -0
- java_dependency_analyzer/util/logger.py +48 -0
- java_dependency_analyzer-1.0.0.dist-info/METADATA +193 -0
- java_dependency_analyzer-1.0.0.dist-info/RECORD +31 -0
- java_dependency_analyzer-1.0.0.dist-info/WHEEL +4 -0
- java_dependency_analyzer-1.0.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"""
|
|
2
|
+
base module.
|
|
3
|
+
|
|
4
|
+
Defines the abstract base class for vulnerability scanners.
|
|
5
|
+
|
|
6
|
+
:author: Ron Webb
|
|
7
|
+
:since: 1.0.0
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
from abc import ABC, abstractmethod
|
|
12
|
+
|
|
13
|
+
from ..cache.vulnerability_cache import VulnerabilityCache
|
|
14
|
+
from ..models.dependency import Dependency, Vulnerability
|
|
15
|
+
|
|
16
|
+
__author__ = "Ron Webb"
|
|
17
|
+
__since__ = "1.0.0"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class VulnerabilityScanner(ABC):
|
|
21
|
+
"""
|
|
22
|
+
Abstract base class for all vulnerability scanners.
|
|
23
|
+
|
|
24
|
+
Provides shared cache helpers so concrete scanners can check and store
|
|
25
|
+
raw API payloads without duplicating the logic.
|
|
26
|
+
|
|
27
|
+
:author: Ron Webb
|
|
28
|
+
:since: 1.0.0
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
# Subclasses set this in __init__; None disables caching.
|
|
32
|
+
_cache: VulnerabilityCache | None = None
|
|
33
|
+
|
|
34
|
+
@abstractmethod
|
|
35
|
+
def scan(self, dependency: Dependency) -> list[Vulnerability]:
|
|
36
|
+
"""
|
|
37
|
+
Scan the given dependency for known vulnerabilities.
|
|
38
|
+
|
|
39
|
+
Returns a list of Vulnerability objects found for this exact version.
|
|
40
|
+
|
|
41
|
+
:author: Ron Webb
|
|
42
|
+
:since: 1.0.0
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
@abstractmethod
|
|
46
|
+
def _parse_response(self, data) -> list[Vulnerability]:
|
|
47
|
+
"""
|
|
48
|
+
Parse the raw API response payload into Vulnerability objects.
|
|
49
|
+
|
|
50
|
+
Concrete scanners receive either a ``dict`` (OSV) or a ``list`` (GHSA),
|
|
51
|
+
so the parameter is intentionally untyped here.
|
|
52
|
+
|
|
53
|
+
:author: Ron Webb
|
|
54
|
+
:since: 1.0.0
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
def _get_cached(self, source: str, dependency: Dependency) -> str | None:
|
|
58
|
+
"""
|
|
59
|
+
Return the cached JSON payload for *dependency* under *source*, or ``None``.
|
|
60
|
+
|
|
61
|
+
:author: Ron Webb
|
|
62
|
+
:since: 1.0.0
|
|
63
|
+
"""
|
|
64
|
+
if self._cache is None:
|
|
65
|
+
return None
|
|
66
|
+
return self._cache.get(
|
|
67
|
+
source, dependency.group_id, dependency.artifact_id, dependency.version
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
def _put_cached(self, source: str, dependency: Dependency, payload: str) -> None:
|
|
71
|
+
"""
|
|
72
|
+
Persist *payload* in the cache under *source* for *dependency*.
|
|
73
|
+
|
|
74
|
+
:author: Ron Webb
|
|
75
|
+
:since: 1.0.0
|
|
76
|
+
"""
|
|
77
|
+
if self._cache is not None:
|
|
78
|
+
self._cache.put(
|
|
79
|
+
source,
|
|
80
|
+
dependency.group_id,
|
|
81
|
+
dependency.artifact_id,
|
|
82
|
+
dependency.version,
|
|
83
|
+
payload,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
def _apply_cache_source(
|
|
87
|
+
self, data: str, source: str
|
|
88
|
+
) -> list[Vulnerability]:
|
|
89
|
+
"""
|
|
90
|
+
Deserialise *data* (a cached JSON string), parse into ``Vulnerability``
|
|
91
|
+
objects, and mark each with ``source = "<source>-cache"``.
|
|
92
|
+
|
|
93
|
+
Concrete scanners implement :meth:`_parse_response`; this helper
|
|
94
|
+
delegates to it after decoding the cached payload.
|
|
95
|
+
|
|
96
|
+
:author: Ron Webb
|
|
97
|
+
:since: 1.0.0
|
|
98
|
+
"""
|
|
99
|
+
parsed = self._parse_response(json.loads(data))
|
|
100
|
+
for vuln in parsed:
|
|
101
|
+
vuln.source = f"{source}-cache"
|
|
102
|
+
return parsed
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ghsa_scanner module.
|
|
3
|
+
|
|
4
|
+
Queries the GitHub Advisory Database REST API to get vulnerability information
|
|
5
|
+
for Maven packages.
|
|
6
|
+
|
|
7
|
+
:author: Ron Webb
|
|
8
|
+
:since: 1.0.0
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
|
|
14
|
+
import httpx
|
|
15
|
+
from dotenv import load_dotenv
|
|
16
|
+
|
|
17
|
+
from ..cache.vulnerability_cache import VulnerabilityCache
|
|
18
|
+
from ..models.dependency import Dependency, Vulnerability
|
|
19
|
+
from ..util.logger import setup_logger
|
|
20
|
+
from .base import VulnerabilityScanner
|
|
21
|
+
|
|
22
|
+
__author__ = "Ron Webb"
|
|
23
|
+
__since__ = "1.0.0"
|
|
24
|
+
|
|
25
|
+
load_dotenv()
|
|
26
|
+
|
|
27
|
+
_logger = setup_logger(__name__)
|
|
28
|
+
|
|
29
|
+
_GHSA_API_URL = "https://api.github.com/advisories"
|
|
30
|
+
_ACCEPT_HEADER = "application/vnd.github+json"
|
|
31
|
+
_API_VERSION_HEADER = "2022-11-28"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class GhsaScanner(VulnerabilityScanner):
|
|
35
|
+
"""
|
|
36
|
+
Queries the GitHub Advisory Database REST API (https://api.github.com/advisories)
|
|
37
|
+
to find reviewed security advisories for a given Maven dependency version.
|
|
38
|
+
|
|
39
|
+
Supports optional authentication via the ``GITHUB_TOKEN`` environment variable to
|
|
40
|
+
increase the API rate limit from 60 to 5000 requests per hour.
|
|
41
|
+
|
|
42
|
+
:author: Ron Webb
|
|
43
|
+
:since: 1.0.0
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
client: httpx.Client | None = None,
|
|
49
|
+
cache: VulnerabilityCache | None = None,
|
|
50
|
+
) -> None:
|
|
51
|
+
"""
|
|
52
|
+
Initialise the scanner with an optional shared httpx client and cache.
|
|
53
|
+
|
|
54
|
+
If ``GITHUB_TOKEN`` is set in the environment, it is forwarded as a
|
|
55
|
+
``Bearer`` token to raise the GitHub API rate limit.
|
|
56
|
+
When *cache* is provided, scan results are read from and written to it.
|
|
57
|
+
|
|
58
|
+
:author: Ron Webb
|
|
59
|
+
:since: 1.0.0
|
|
60
|
+
"""
|
|
61
|
+
headers = {
|
|
62
|
+
"Accept": _ACCEPT_HEADER,
|
|
63
|
+
"X-GitHub-Api-Version": _API_VERSION_HEADER,
|
|
64
|
+
}
|
|
65
|
+
token = os.environ.get("GITHUB_TOKEN")
|
|
66
|
+
if token:
|
|
67
|
+
headers["Authorization"] = f"Bearer {token}"
|
|
68
|
+
|
|
69
|
+
self._client = client or httpx.Client(timeout=30, headers=headers)
|
|
70
|
+
self._cache = cache
|
|
71
|
+
|
|
72
|
+
def scan(self, dependency: Dependency) -> list[Vulnerability]:
|
|
73
|
+
"""
|
|
74
|
+
Query the GitHub Advisory Database for advisories affecting this dependency.
|
|
75
|
+
|
|
76
|
+
Checks the cache first when one is configured; only calls the API on a
|
|
77
|
+
cache miss and stores the raw response on success.
|
|
78
|
+
|
|
79
|
+
Uses the ``affects`` query parameter with ``group:artifact@version`` notation
|
|
80
|
+
and filters by ``ecosystem=maven`` and ``type=reviewed``.
|
|
81
|
+
|
|
82
|
+
:author: Ron Webb
|
|
83
|
+
:since: 1.0.0
|
|
84
|
+
"""
|
|
85
|
+
_logger.debug("Querying GHSA for %s", dependency.coordinates)
|
|
86
|
+
cached = self._get_cached("ghsa", dependency)
|
|
87
|
+
if cached is not None:
|
|
88
|
+
return self._apply_cache_source(cached, "ghsa")
|
|
89
|
+
|
|
90
|
+
affects = f"{dependency.group_id}:{dependency.artifact_id}@{dependency.version}"
|
|
91
|
+
params = {
|
|
92
|
+
"ecosystem": "maven",
|
|
93
|
+
"affects": affects,
|
|
94
|
+
"type": "reviewed",
|
|
95
|
+
}
|
|
96
|
+
try:
|
|
97
|
+
response = self._client.get(_GHSA_API_URL, params=params)
|
|
98
|
+
if response.status_code == 429:
|
|
99
|
+
_logger.warning(
|
|
100
|
+
"GitHub Advisory API rate limit exceeded for %s", dependency.coordinates
|
|
101
|
+
)
|
|
102
|
+
return []
|
|
103
|
+
response.raise_for_status()
|
|
104
|
+
data = response.json()
|
|
105
|
+
except httpx.HTTPError as exc:
|
|
106
|
+
_logger.warning("GHSA query failed for %s: %s", dependency.coordinates, exc)
|
|
107
|
+
return []
|
|
108
|
+
|
|
109
|
+
self._put_cached("ghsa", dependency, json.dumps(data))
|
|
110
|
+
return self._parse_response(data)
|
|
111
|
+
|
|
112
|
+
def _parse_response(self, data: list) -> list[Vulnerability]:
|
|
113
|
+
"""
|
|
114
|
+
Parse the GitHub Advisory API response (a JSON array) into Vulnerability objects.
|
|
115
|
+
|
|
116
|
+
:author: Ron Webb
|
|
117
|
+
:since: 1.0.0
|
|
118
|
+
"""
|
|
119
|
+
vulns: list[Vulnerability] = []
|
|
120
|
+
for advisory in data:
|
|
121
|
+
vuln_obj = self._parse_advisory(advisory)
|
|
122
|
+
if vuln_obj is not None:
|
|
123
|
+
vulns.append(vuln_obj)
|
|
124
|
+
return vulns
|
|
125
|
+
|
|
126
|
+
def _parse_advisory(self, advisory: dict) -> Vulnerability | None:
|
|
127
|
+
"""
|
|
128
|
+
Convert a single GitHub Advisory dict into a Vulnerability object.
|
|
129
|
+
|
|
130
|
+
The ``cve_id`` field prefers the CVE identifier when available and falls
|
|
131
|
+
back to the GHSA identifier so every returned advisory has a unique ID.
|
|
132
|
+
|
|
133
|
+
:author: Ron Webb
|
|
134
|
+
:since: 1.0.0
|
|
135
|
+
"""
|
|
136
|
+
ghsa_id = advisory.get("ghsa_id", "UNKNOWN")
|
|
137
|
+
cve_id = advisory.get("cve_id") or ghsa_id
|
|
138
|
+
summary = advisory.get("summary", "No summary available")
|
|
139
|
+
severity = self._extract_severity(advisory)
|
|
140
|
+
affected_versions = self._extract_affected_versions(advisory)
|
|
141
|
+
reference_url = advisory.get("html_url", "")
|
|
142
|
+
|
|
143
|
+
return Vulnerability(
|
|
144
|
+
cve_id=cve_id,
|
|
145
|
+
summary=summary,
|
|
146
|
+
severity=severity,
|
|
147
|
+
affected_versions=affected_versions,
|
|
148
|
+
source="ghsa",
|
|
149
|
+
reference_url=reference_url,
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
def _extract_severity(self, advisory: dict) -> str:
|
|
153
|
+
"""
|
|
154
|
+
Extract a human-readable severity string from a GitHub Advisory entry.
|
|
155
|
+
|
|
156
|
+
Tries, in order:
|
|
157
|
+
1. The top-level ``severity`` label (e.g. ``"high"``, ``"critical"``).
|
|
158
|
+
2. The CVSS v4 score from ``cvss_severities.cvss_v4.score``.
|
|
159
|
+
3. The CVSS v3 score from ``cvss_severities.cvss_v3.score``.
|
|
160
|
+
4. The legacy ``cvss.score`` field.
|
|
161
|
+
|
|
162
|
+
:author: Ron Webb
|
|
163
|
+
:since: 1.0.0
|
|
164
|
+
"""
|
|
165
|
+
label = advisory.get("severity")
|
|
166
|
+
if label and label not in ("", "unknown"):
|
|
167
|
+
return label.upper()
|
|
168
|
+
|
|
169
|
+
cvss_severities = advisory.get("cvss_severities", {})
|
|
170
|
+
for key in ("cvss_v4", "cvss_v3"):
|
|
171
|
+
score = (cvss_severities.get(key) or {}).get("score")
|
|
172
|
+
if score is not None:
|
|
173
|
+
return str(score)
|
|
174
|
+
|
|
175
|
+
legacy_score = (advisory.get("cvss") or {}).get("score")
|
|
176
|
+
if legacy_score is not None:
|
|
177
|
+
return str(legacy_score)
|
|
178
|
+
|
|
179
|
+
return "UNKNOWN"
|
|
180
|
+
|
|
181
|
+
def _extract_affected_versions(self, advisory: dict) -> list[str]:
|
|
182
|
+
"""
|
|
183
|
+
Extract affected version range strings from a GitHub Advisory entry.
|
|
184
|
+
|
|
185
|
+
Each entry in ``vulnerabilities`` may carry a ``vulnerable_version_range``
|
|
186
|
+
string (e.g. ``"< 2.17.0"`` or ``">= 2.0.0, < 2.15.0"``). The method
|
|
187
|
+
splits compound ranges on commas so the returned list contains individual
|
|
188
|
+
constraint tokens.
|
|
189
|
+
|
|
190
|
+
:author: Ron Webb
|
|
191
|
+
:since: 1.0.0
|
|
192
|
+
"""
|
|
193
|
+
affected_versions: list[str] = []
|
|
194
|
+
seen: set[str] = set()
|
|
195
|
+
for vuln_entry in advisory.get("vulnerabilities", []):
|
|
196
|
+
version_range = vuln_entry.get("vulnerable_version_range")
|
|
197
|
+
if not version_range:
|
|
198
|
+
continue
|
|
199
|
+
for part in version_range.split(","):
|
|
200
|
+
constraint = part.strip()
|
|
201
|
+
if constraint and constraint not in seen:
|
|
202
|
+
affected_versions.append(constraint)
|
|
203
|
+
seen.add(constraint)
|
|
204
|
+
return affected_versions
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"""
|
|
2
|
+
osv_scanner module.
|
|
3
|
+
|
|
4
|
+
Queries the OSV.dev API to get vulnerability information for Maven packages.
|
|
5
|
+
|
|
6
|
+
:author: Ron Webb
|
|
7
|
+
:since: 1.0.0
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
from ..cache.vulnerability_cache import VulnerabilityCache
|
|
15
|
+
from ..models.dependency import Dependency, Vulnerability
|
|
16
|
+
from ..util.logger import setup_logger
|
|
17
|
+
from .base import VulnerabilityScanner
|
|
18
|
+
|
|
19
|
+
__author__ = "Ron Webb"
|
|
20
|
+
__since__ = "1.0.0"
|
|
21
|
+
|
|
22
|
+
_logger = setup_logger(__name__)
|
|
23
|
+
|
|
24
|
+
_OSV_QUERY_URL = "https://api.osv.dev/v1/query"
|
|
25
|
+
_OSV_BATCH_URL = "https://api.osv.dev/v1/querybatch"
|
|
26
|
+
_OSV_VULN_URL = "https://osv.dev/vulnerability/"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class OsvScanner(VulnerabilityScanner):
|
|
30
|
+
"""
|
|
31
|
+
Queries the OSV.dev API (https://api.osv.dev) to find vulnerabilities
|
|
32
|
+
for a given Maven dependency version.
|
|
33
|
+
|
|
34
|
+
:author: Ron Webb
|
|
35
|
+
:since: 1.0.0
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(
|
|
39
|
+
self,
|
|
40
|
+
client: httpx.Client | None = None,
|
|
41
|
+
cache: VulnerabilityCache | None = None,
|
|
42
|
+
) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Initialise the scanner with an optional shared httpx client and cache.
|
|
45
|
+
|
|
46
|
+
When *cache* is provided, scan results are read from and written to it.
|
|
47
|
+
|
|
48
|
+
:author: Ron Webb
|
|
49
|
+
:since: 1.0.0
|
|
50
|
+
"""
|
|
51
|
+
self._client = client or httpx.Client(timeout=30)
|
|
52
|
+
self._cache = cache
|
|
53
|
+
|
|
54
|
+
def scan(self, dependency: Dependency) -> list[Vulnerability]:
|
|
55
|
+
"""
|
|
56
|
+
Query OSV.dev for vulnerabilities affecting this dependency version.
|
|
57
|
+
|
|
58
|
+
:author: Ron Webb
|
|
59
|
+
:since: 1.0.0
|
|
60
|
+
"""
|
|
61
|
+
_logger.debug("Querying OSV for %s", dependency.coordinates)
|
|
62
|
+
cached = self._get_cached("osv", dependency)
|
|
63
|
+
if cached is not None:
|
|
64
|
+
return self._apply_cache_source(cached, "osv")
|
|
65
|
+
|
|
66
|
+
payload = {
|
|
67
|
+
"version": dependency.version,
|
|
68
|
+
"package": {
|
|
69
|
+
"name": f"{dependency.group_id}:{dependency.artifact_id}",
|
|
70
|
+
"ecosystem": "Maven",
|
|
71
|
+
},
|
|
72
|
+
}
|
|
73
|
+
try:
|
|
74
|
+
response = self._client.post(_OSV_QUERY_URL, json=payload)
|
|
75
|
+
response.raise_for_status()
|
|
76
|
+
data = response.json()
|
|
77
|
+
except httpx.HTTPError as exc:
|
|
78
|
+
_logger.warning("OSV query failed for %s: %s", dependency.coordinates, exc)
|
|
79
|
+
return []
|
|
80
|
+
|
|
81
|
+
self._put_cached("osv", dependency, json.dumps(data))
|
|
82
|
+
return self._parse_response(data)
|
|
83
|
+
|
|
84
|
+
def _parse_response(self, data: dict) -> list[Vulnerability]:
|
|
85
|
+
"""
|
|
86
|
+
Parse the OSV API response JSON into Vulnerability objects.
|
|
87
|
+
|
|
88
|
+
:author: Ron Webb
|
|
89
|
+
:since: 1.0.0
|
|
90
|
+
"""
|
|
91
|
+
vulns: list[Vulnerability] = []
|
|
92
|
+
for vuln in data.get("vulns", []):
|
|
93
|
+
vuln_obj = self._parse_vuln(vuln)
|
|
94
|
+
if vuln_obj is not None:
|
|
95
|
+
vulns.append(vuln_obj)
|
|
96
|
+
return vulns
|
|
97
|
+
|
|
98
|
+
def _parse_vuln(self, vuln: dict) -> Vulnerability | None:
|
|
99
|
+
"""
|
|
100
|
+
Convert a single OSV vulnerability dict into a Vulnerability object.
|
|
101
|
+
|
|
102
|
+
:author: Ron Webb
|
|
103
|
+
:since: 1.0.0
|
|
104
|
+
"""
|
|
105
|
+
vuln_id = vuln.get("id", "UNKNOWN")
|
|
106
|
+
summary = vuln.get("summary", "No summary available")
|
|
107
|
+
severity = self._extract_severity(vuln)
|
|
108
|
+
affected_versions = self._extract_affected_versions(vuln)
|
|
109
|
+
reference_url = self._extract_reference_url(vuln, vuln_id)
|
|
110
|
+
|
|
111
|
+
return Vulnerability(
|
|
112
|
+
cve_id=vuln_id,
|
|
113
|
+
summary=summary,
|
|
114
|
+
severity=severity,
|
|
115
|
+
affected_versions=affected_versions,
|
|
116
|
+
source="osv",
|
|
117
|
+
reference_url=reference_url,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
def _extract_severity(self, vuln: dict) -> str:
|
|
121
|
+
"""
|
|
122
|
+
Extract a human-readable severity from the vulnerability entry.
|
|
123
|
+
|
|
124
|
+
:author: Ron Webb
|
|
125
|
+
:since: 1.0.0
|
|
126
|
+
"""
|
|
127
|
+
severity_list = vuln.get("severity", [])
|
|
128
|
+
if severity_list:
|
|
129
|
+
return severity_list[0].get("score", "UNKNOWN")
|
|
130
|
+
# Try database_specific CVSS score
|
|
131
|
+
db_specific = vuln.get("database_specific", {})
|
|
132
|
+
return db_specific.get("severity", "UNKNOWN")
|
|
133
|
+
|
|
134
|
+
def _extract_affected_versions(self, vuln: dict) -> list[str]:
|
|
135
|
+
"""
|
|
136
|
+
Extract a flat list of affected version strings from the vulnerability entry.
|
|
137
|
+
|
|
138
|
+
:author: Ron Webb
|
|
139
|
+
:since: 1.0.0
|
|
140
|
+
"""
|
|
141
|
+
affected_versions: list[str] = []
|
|
142
|
+
for affected in vuln.get("affected", []):
|
|
143
|
+
for version_range in affected.get("ranges", []):
|
|
144
|
+
for event in version_range.get("events", []):
|
|
145
|
+
introduced = event.get("introduced")
|
|
146
|
+
fixed = event.get("fixed")
|
|
147
|
+
if introduced:
|
|
148
|
+
affected_versions.append(f">={introduced}")
|
|
149
|
+
if fixed:
|
|
150
|
+
affected_versions.append(f"<{fixed}")
|
|
151
|
+
# Collect explicit version strings
|
|
152
|
+
for ver in affected.get("versions", []):
|
|
153
|
+
if ver not in affected_versions:
|
|
154
|
+
affected_versions.append(ver)
|
|
155
|
+
return affected_versions
|
|
156
|
+
|
|
157
|
+
def _extract_reference_url(self, vuln: dict, vuln_id: str) -> str:
|
|
158
|
+
"""
|
|
159
|
+
Return the most relevant reference URL for the vulnerability.
|
|
160
|
+
|
|
161
|
+
:author: Ron Webb
|
|
162
|
+
:since: 1.0.0
|
|
163
|
+
"""
|
|
164
|
+
for ref in vuln.get("references", []):
|
|
165
|
+
if ref.get("type") == "WEB":
|
|
166
|
+
return ref.get("url", "")
|
|
167
|
+
return f"{_OSV_VULN_URL}{vuln_id}"
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""
|
|
2
|
+
logger module.
|
|
3
|
+
|
|
4
|
+
Provides a setup_logger function for consistent logging configuration.
|
|
5
|
+
|
|
6
|
+
:author: Ron Webb
|
|
7
|
+
:since: 1.0.0
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import logging
|
|
11
|
+
import logging.config
|
|
12
|
+
import os
|
|
13
|
+
|
|
14
|
+
__author__ = "Ron Webb"
|
|
15
|
+
__since__ = "1.0.0"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def setup_logger(name: str) -> logging.Logger:
|
|
19
|
+
"""
|
|
20
|
+
Set up and return a logger with consistent configuration.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
name: The name of the logger to create
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
A configured logger instance
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def find_logging_ini(start_dir: str) -> str | None:
|
|
30
|
+
current = start_dir
|
|
31
|
+
while True:
|
|
32
|
+
candidate = os.path.join(current, "logging.ini")
|
|
33
|
+
if os.path.exists(candidate):
|
|
34
|
+
return candidate
|
|
35
|
+
parent = os.path.dirname(current)
|
|
36
|
+
if parent == current:
|
|
37
|
+
break
|
|
38
|
+
current = parent
|
|
39
|
+
return None
|
|
40
|
+
|
|
41
|
+
search_dir = os.getcwd()
|
|
42
|
+
config_path = find_logging_ini(search_dir)
|
|
43
|
+
if config_path is not None and os.path.exists(config_path):
|
|
44
|
+
logging.config.fileConfig(config_path, disable_existing_loggers=False)
|
|
45
|
+
else:
|
|
46
|
+
logging.basicConfig(level=logging.INFO)
|
|
47
|
+
logger = logging.getLogger(name)
|
|
48
|
+
return logger
|