security-use 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- security_use/__init__.py +15 -0
- security_use/cli.py +348 -0
- security_use/dependency_scanner.py +199 -0
- security_use/fixers/__init__.py +6 -0
- security_use/fixers/dependency_fixer.py +196 -0
- security_use/fixers/iac_fixer.py +191 -0
- security_use/iac/__init__.py +9 -0
- security_use/iac/base.py +69 -0
- security_use/iac/cloudformation.py +207 -0
- security_use/iac/rules/__init__.py +29 -0
- security_use/iac/rules/aws.py +338 -0
- security_use/iac/rules/base.py +96 -0
- security_use/iac/rules/registry.py +115 -0
- security_use/iac/terraform.py +177 -0
- security_use/iac_scanner.py +215 -0
- security_use/models.py +139 -0
- security_use/osv_client.py +386 -0
- security_use/parsers/__init__.py +16 -0
- security_use/parsers/base.py +43 -0
- security_use/parsers/pipfile.py +133 -0
- security_use/parsers/poetry_lock.py +42 -0
- security_use/parsers/pyproject.py +178 -0
- security_use/parsers/requirements.py +86 -0
- security_use/py.typed +0 -0
- security_use/reporter.py +368 -0
- security_use/scanner.py +74 -0
- security_use-0.1.1.dist-info/METADATA +92 -0
- security_use-0.1.1.dist-info/RECORD +30 -0
- security_use-0.1.1.dist-info/WHEEL +4 -0
- security_use-0.1.1.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,386 @@
|
|
|
1
|
+
"""OSV API client for vulnerability database queries."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import time
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Any, Optional
|
|
7
|
+
|
|
8
|
+
import httpx
|
|
9
|
+
|
|
10
|
+
from security_use.models import Severity, Vulnerability
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class CacheEntry:
|
|
15
|
+
"""Cache entry with TTL."""
|
|
16
|
+
|
|
17
|
+
data: Any
|
|
18
|
+
expires_at: float
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class OSVClient:
|
|
22
|
+
"""Client for the OSV (Open Source Vulnerabilities) API.
|
|
23
|
+
|
|
24
|
+
Documentation: https://google.github.io/osv.dev/api/
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
BASE_URL = "https://api.osv.dev/v1"
|
|
28
|
+
CACHE_TTL = 300 # 5 minutes
|
|
29
|
+
BATCH_SIZE = 1000 # Max queries per batch request
|
|
30
|
+
|
|
31
|
+
def __init__(self, timeout: float = 30.0, cache_ttl: int = 300) -> None:
|
|
32
|
+
"""Initialize the OSV client.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
timeout: HTTP request timeout in seconds.
|
|
36
|
+
cache_ttl: Cache time-to-live in seconds.
|
|
37
|
+
"""
|
|
38
|
+
self._client = httpx.Client(timeout=timeout)
|
|
39
|
+
self._cache: dict[str, CacheEntry] = {}
|
|
40
|
+
self.cache_ttl = cache_ttl
|
|
41
|
+
|
|
42
|
+
def query_package(
|
|
43
|
+
self, package: str, version: str, ecosystem: str = "PyPI"
|
|
44
|
+
) -> list[Vulnerability]:
|
|
45
|
+
"""Query vulnerabilities for a single package.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
package: Package name.
|
|
49
|
+
version: Package version.
|
|
50
|
+
ecosystem: Package ecosystem (default: PyPI).
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
List of vulnerabilities affecting this package version.
|
|
54
|
+
"""
|
|
55
|
+
cache_key = self._cache_key(package, version, ecosystem)
|
|
56
|
+
cached = self._get_cached(cache_key)
|
|
57
|
+
if cached is not None:
|
|
58
|
+
return cached
|
|
59
|
+
|
|
60
|
+
payload = {
|
|
61
|
+
"package": {"name": package, "ecosystem": ecosystem},
|
|
62
|
+
"version": version,
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
response = self._client.post(f"{self.BASE_URL}/query", json=payload)
|
|
67
|
+
response.raise_for_status()
|
|
68
|
+
data = response.json()
|
|
69
|
+
except httpx.HTTPError:
|
|
70
|
+
return []
|
|
71
|
+
|
|
72
|
+
vulns = self._parse_vulnerabilities(data.get("vulns", []), package, version)
|
|
73
|
+
self._set_cached(cache_key, vulns)
|
|
74
|
+
|
|
75
|
+
return vulns
|
|
76
|
+
|
|
77
|
+
def query_batch(
|
|
78
|
+
self,
|
|
79
|
+
packages: list[tuple[str, str]],
|
|
80
|
+
ecosystem: str = "PyPI",
|
|
81
|
+
) -> dict[tuple[str, str], list[Vulnerability]]:
|
|
82
|
+
"""Query vulnerabilities for multiple packages at once.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
packages: List of (package_name, version) tuples.
|
|
86
|
+
ecosystem: Package ecosystem (default: PyPI).
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Dict mapping (normalized_name, version) to list of vulnerabilities.
|
|
90
|
+
"""
|
|
91
|
+
results: dict[tuple[str, str], list[Vulnerability]] = {}
|
|
92
|
+
|
|
93
|
+
if not packages:
|
|
94
|
+
return results
|
|
95
|
+
|
|
96
|
+
# Check cache first
|
|
97
|
+
uncached_packages = []
|
|
98
|
+
for name, version in packages:
|
|
99
|
+
normalized = self._normalize_name(name)
|
|
100
|
+
cache_key = self._cache_key(name, version, ecosystem)
|
|
101
|
+
cached = self._get_cached(cache_key)
|
|
102
|
+
if cached is not None:
|
|
103
|
+
results[(normalized, version)] = cached
|
|
104
|
+
else:
|
|
105
|
+
uncached_packages.append((name, version))
|
|
106
|
+
|
|
107
|
+
if not uncached_packages:
|
|
108
|
+
return results
|
|
109
|
+
|
|
110
|
+
# Build batch queries
|
|
111
|
+
queries = [
|
|
112
|
+
{"package": {"name": name, "ecosystem": ecosystem}, "version": version}
|
|
113
|
+
for name, version in uncached_packages
|
|
114
|
+
]
|
|
115
|
+
|
|
116
|
+
# Process in batches
|
|
117
|
+
for i in range(0, len(queries), self.BATCH_SIZE):
|
|
118
|
+
batch = queries[i : i + self.BATCH_SIZE]
|
|
119
|
+
batch_packages = uncached_packages[i : i + self.BATCH_SIZE]
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
response = self._client.post(
|
|
123
|
+
f"{self.BASE_URL}/querybatch",
|
|
124
|
+
json={"queries": batch},
|
|
125
|
+
)
|
|
126
|
+
response.raise_for_status()
|
|
127
|
+
data = response.json()
|
|
128
|
+
except httpx.HTTPError:
|
|
129
|
+
# On error, skip this batch
|
|
130
|
+
continue
|
|
131
|
+
|
|
132
|
+
# Parse results - batch API returns minimal data, need to fetch full details
|
|
133
|
+
for idx, result in enumerate(data.get("results", [])):
|
|
134
|
+
if idx >= len(batch_packages):
|
|
135
|
+
break
|
|
136
|
+
|
|
137
|
+
name, version = batch_packages[idx]
|
|
138
|
+
normalized = self._normalize_name(name)
|
|
139
|
+
|
|
140
|
+
# Batch API only returns IDs, fetch full vulnerability data
|
|
141
|
+
vuln_ids = [v.get("id") for v in result.get("vulns", []) if v.get("id")]
|
|
142
|
+
full_vulns = []
|
|
143
|
+
for vuln_id in vuln_ids:
|
|
144
|
+
vuln_data = self.get_vulnerability(vuln_id)
|
|
145
|
+
if vuln_data:
|
|
146
|
+
full_vulns.append(vuln_data)
|
|
147
|
+
|
|
148
|
+
vulns = self._parse_vulnerabilities(full_vulns, name, version)
|
|
149
|
+
|
|
150
|
+
results[(normalized, version)] = vulns
|
|
151
|
+
cache_key = self._cache_key(name, version, ecosystem)
|
|
152
|
+
self._set_cached(cache_key, vulns)
|
|
153
|
+
|
|
154
|
+
return results
|
|
155
|
+
|
|
156
|
+
def get_vulnerability(self, vuln_id: str) -> Optional[dict[str, Any]]:
|
|
157
|
+
"""Get details for a specific vulnerability.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
vuln_id: Vulnerability ID (e.g., CVE-2021-1234, GHSA-xxxx).
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Vulnerability details or None if not found.
|
|
164
|
+
"""
|
|
165
|
+
cache_key = f"vuln:{vuln_id}"
|
|
166
|
+
cached = self._get_cached(cache_key)
|
|
167
|
+
if cached is not None:
|
|
168
|
+
return cached
|
|
169
|
+
|
|
170
|
+
try:
|
|
171
|
+
response = self._client.get(f"{self.BASE_URL}/vulns/{vuln_id}")
|
|
172
|
+
response.raise_for_status()
|
|
173
|
+
data = response.json()
|
|
174
|
+
self._set_cached(cache_key, data)
|
|
175
|
+
return data
|
|
176
|
+
except httpx.HTTPError:
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
def get_fix_version(
|
|
180
|
+
self, vulnerability_id: str, package: str, ecosystem: str = "PyPI"
|
|
181
|
+
) -> Optional[str]:
|
|
182
|
+
"""Get the recommended fix version for a vulnerability.
|
|
183
|
+
|
|
184
|
+
Args:
|
|
185
|
+
vulnerability_id: The vulnerability ID.
|
|
186
|
+
package: Package name.
|
|
187
|
+
ecosystem: Package ecosystem.
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
Recommended version to upgrade to, or None if unknown.
|
|
191
|
+
"""
|
|
192
|
+
vuln_data = self.get_vulnerability(vulnerability_id)
|
|
193
|
+
if not vuln_data:
|
|
194
|
+
return None
|
|
195
|
+
|
|
196
|
+
# Look for fixed version in affected ranges
|
|
197
|
+
for affected in vuln_data.get("affected", []):
|
|
198
|
+
pkg = affected.get("package", {})
|
|
199
|
+
if (
|
|
200
|
+
pkg.get("name", "").lower() == package.lower()
|
|
201
|
+
and pkg.get("ecosystem", "").lower() == ecosystem.lower()
|
|
202
|
+
):
|
|
203
|
+
for range_info in affected.get("ranges", []):
|
|
204
|
+
for event in range_info.get("events", []):
|
|
205
|
+
if "fixed" in event:
|
|
206
|
+
return event["fixed"]
|
|
207
|
+
|
|
208
|
+
return None
|
|
209
|
+
|
|
210
|
+
def _parse_vulnerabilities(
|
|
211
|
+
self, vulns: list[dict[str, Any]], package: str, version: str
|
|
212
|
+
) -> list[Vulnerability]:
|
|
213
|
+
"""Parse OSV vulnerability data into Vulnerability objects.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
vulns: Raw vulnerability data from OSV.
|
|
217
|
+
package: Package name.
|
|
218
|
+
version: Package version.
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
List of Vulnerability objects.
|
|
222
|
+
"""
|
|
223
|
+
result = []
|
|
224
|
+
|
|
225
|
+
for vuln in vulns:
|
|
226
|
+
# Get CVE ID if available, otherwise use OSV ID
|
|
227
|
+
vuln_id = vuln.get("id", "")
|
|
228
|
+
aliases = vuln.get("aliases", [])
|
|
229
|
+
cve_id = next((a for a in aliases if a.startswith("CVE-")), vuln_id)
|
|
230
|
+
|
|
231
|
+
# Get severity from CVSS or severity field
|
|
232
|
+
severity = self._get_severity(vuln)
|
|
233
|
+
|
|
234
|
+
# Get fixed version
|
|
235
|
+
fixed_version = self._get_fixed_version(vuln, package)
|
|
236
|
+
|
|
237
|
+
# Get affected versions string
|
|
238
|
+
affected_versions = self._get_affected_versions(vuln, package)
|
|
239
|
+
|
|
240
|
+
result.append(
|
|
241
|
+
Vulnerability(
|
|
242
|
+
id=cve_id,
|
|
243
|
+
package=package,
|
|
244
|
+
installed_version=version,
|
|
245
|
+
severity=severity,
|
|
246
|
+
title=vuln.get("summary", "Unknown vulnerability"),
|
|
247
|
+
description=vuln.get("details", ""),
|
|
248
|
+
affected_versions=affected_versions,
|
|
249
|
+
fixed_version=fixed_version,
|
|
250
|
+
cvss_score=self._get_cvss_score(vuln),
|
|
251
|
+
references=[
|
|
252
|
+
ref.get("url", "") for ref in vuln.get("references", [])
|
|
253
|
+
],
|
|
254
|
+
)
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
return result
|
|
258
|
+
|
|
259
|
+
def _get_severity(self, vuln: dict[str, Any]) -> Severity:
|
|
260
|
+
"""Extract severity from vulnerability data."""
|
|
261
|
+
# Try to get from severity field
|
|
262
|
+
for sev in vuln.get("severity", []):
|
|
263
|
+
if sev.get("type") == "CVSS_V3":
|
|
264
|
+
score = self._parse_cvss_score(sev.get("score", ""))
|
|
265
|
+
if score is not None:
|
|
266
|
+
return Severity.from_cvss(score)
|
|
267
|
+
|
|
268
|
+
# Try database_specific severity
|
|
269
|
+
db_specific = vuln.get("database_specific", {})
|
|
270
|
+
severity_str = db_specific.get("severity", "").upper()
|
|
271
|
+
if severity_str in ("CRITICAL", "HIGH", "MEDIUM", "LOW"):
|
|
272
|
+
return Severity(severity_str)
|
|
273
|
+
|
|
274
|
+
return Severity.UNKNOWN
|
|
275
|
+
|
|
276
|
+
def _get_cvss_score(self, vuln: dict[str, Any]) -> Optional[float]:
|
|
277
|
+
"""Extract CVSS score from vulnerability data."""
|
|
278
|
+
for sev in vuln.get("severity", []):
|
|
279
|
+
if sev.get("type") == "CVSS_V3":
|
|
280
|
+
return self._parse_cvss_score(sev.get("score", ""))
|
|
281
|
+
return None
|
|
282
|
+
|
|
283
|
+
def _parse_cvss_score(self, score_str: str) -> Optional[float]:
|
|
284
|
+
"""Parse CVSS score from vector or score string."""
|
|
285
|
+
if not score_str:
|
|
286
|
+
return None
|
|
287
|
+
|
|
288
|
+
# If it's a CVSS vector, extract the base score
|
|
289
|
+
if score_str.startswith("CVSS:"):
|
|
290
|
+
# For now, return None as parsing CVSS vectors is complex
|
|
291
|
+
return None
|
|
292
|
+
|
|
293
|
+
try:
|
|
294
|
+
return float(score_str)
|
|
295
|
+
except ValueError:
|
|
296
|
+
return None
|
|
297
|
+
|
|
298
|
+
def _get_fixed_version(
|
|
299
|
+
self, vuln: dict[str, Any], package: str
|
|
300
|
+
) -> Optional[str]:
|
|
301
|
+
"""Extract fixed version from vulnerability data."""
|
|
302
|
+
for affected in vuln.get("affected", []):
|
|
303
|
+
pkg = affected.get("package", {})
|
|
304
|
+
if pkg.get("name", "").lower() == package.lower():
|
|
305
|
+
for range_info in affected.get("ranges", []):
|
|
306
|
+
for event in range_info.get("events", []):
|
|
307
|
+
if "fixed" in event:
|
|
308
|
+
return event["fixed"]
|
|
309
|
+
return None
|
|
310
|
+
|
|
311
|
+
def _get_affected_versions(
|
|
312
|
+
self, vuln: dict[str, Any], package: str
|
|
313
|
+
) -> str:
|
|
314
|
+
"""Build affected versions string from vulnerability data."""
|
|
315
|
+
for affected in vuln.get("affected", []):
|
|
316
|
+
pkg = affected.get("package", {})
|
|
317
|
+
if pkg.get("name", "").lower() == package.lower():
|
|
318
|
+
versions = affected.get("versions", [])
|
|
319
|
+
if versions:
|
|
320
|
+
if len(versions) <= 3:
|
|
321
|
+
return ", ".join(versions)
|
|
322
|
+
return f"{versions[0]} - {versions[-1]}"
|
|
323
|
+
|
|
324
|
+
# Build from ranges
|
|
325
|
+
ranges = []
|
|
326
|
+
for range_info in affected.get("ranges", []):
|
|
327
|
+
events = range_info.get("events", [])
|
|
328
|
+
introduced = None
|
|
329
|
+
fixed = None
|
|
330
|
+
for event in events:
|
|
331
|
+
if "introduced" in event:
|
|
332
|
+
introduced = event["introduced"]
|
|
333
|
+
if "fixed" in event:
|
|
334
|
+
fixed = event["fixed"]
|
|
335
|
+
if introduced:
|
|
336
|
+
if fixed:
|
|
337
|
+
ranges.append(f">={introduced}, <{fixed}")
|
|
338
|
+
else:
|
|
339
|
+
ranges.append(f">={introduced}")
|
|
340
|
+
if ranges:
|
|
341
|
+
return "; ".join(ranges)
|
|
342
|
+
|
|
343
|
+
return "unknown"
|
|
344
|
+
|
|
345
|
+
def _normalize_name(self, name: str) -> str:
|
|
346
|
+
"""Normalize package name for cache key matching."""
|
|
347
|
+
return name.lower().replace("-", "_").replace(".", "_")
|
|
348
|
+
|
|
349
|
+
def _cache_key(self, package: str, version: str, ecosystem: str) -> str:
|
|
350
|
+
"""Generate cache key for a package query."""
|
|
351
|
+
normalized = self._normalize_name(package)
|
|
352
|
+
key_str = f"{ecosystem}:{normalized}:{version}"
|
|
353
|
+
return hashlib.sha256(key_str.encode()).hexdigest()[:16]
|
|
354
|
+
|
|
355
|
+
def _get_cached(self, key: str) -> Optional[Any]:
|
|
356
|
+
"""Get value from cache if not expired."""
|
|
357
|
+
entry = self._cache.get(key)
|
|
358
|
+
if entry is None:
|
|
359
|
+
return None
|
|
360
|
+
if time.time() > entry.expires_at:
|
|
361
|
+
del self._cache[key]
|
|
362
|
+
return None
|
|
363
|
+
return entry.data
|
|
364
|
+
|
|
365
|
+
def _set_cached(self, key: str, value: Any) -> None:
|
|
366
|
+
"""Set value in cache with TTL."""
|
|
367
|
+
self._cache[key] = CacheEntry(
|
|
368
|
+
data=value,
|
|
369
|
+
expires_at=time.time() + self.cache_ttl,
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
def clear_cache(self) -> None:
|
|
373
|
+
"""Clear the vulnerability cache."""
|
|
374
|
+
self._cache.clear()
|
|
375
|
+
|
|
376
|
+
def close(self) -> None:
|
|
377
|
+
"""Close the HTTP client."""
|
|
378
|
+
self._client.close()
|
|
379
|
+
|
|
380
|
+
def __enter__(self) -> "OSVClient":
|
|
381
|
+
"""Context manager entry."""
|
|
382
|
+
return self
|
|
383
|
+
|
|
384
|
+
def __exit__(self, *args: Any) -> None:
|
|
385
|
+
"""Context manager exit."""
|
|
386
|
+
self.close()
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""Dependency file parsers."""
|
|
2
|
+
|
|
3
|
+
from security_use.parsers.base import Dependency, DependencyParser
|
|
4
|
+
from security_use.parsers.requirements import RequirementsParser
|
|
5
|
+
from security_use.parsers.pyproject import PyProjectParser
|
|
6
|
+
from security_use.parsers.pipfile import PipfileParser
|
|
7
|
+
from security_use.parsers.poetry_lock import PoetryLockParser
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"Dependency",
|
|
11
|
+
"DependencyParser",
|
|
12
|
+
"RequirementsParser",
|
|
13
|
+
"PyProjectParser",
|
|
14
|
+
"PipfileParser",
|
|
15
|
+
"PoetryLockParser",
|
|
16
|
+
]
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""Base classes for dependency parsers."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class Dependency:
|
|
10
|
+
"""Represents a parsed dependency."""
|
|
11
|
+
|
|
12
|
+
name: str
|
|
13
|
+
version: Optional[str]
|
|
14
|
+
version_spec: Optional[str] = None
|
|
15
|
+
line_number: Optional[int] = None
|
|
16
|
+
extras: list[str] | None = None
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
def normalized_name(self) -> str:
|
|
20
|
+
"""Return normalized package name (lowercase, hyphens to underscores)."""
|
|
21
|
+
return self.name.lower().replace("-", "_").replace(".", "_")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class DependencyParser(ABC):
|
|
25
|
+
"""Abstract base class for dependency file parsers."""
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def parse(self, content: str) -> list[Dependency]:
|
|
29
|
+
"""Parse dependency file content and return list of dependencies.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
content: The file content to parse.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
List of Dependency objects.
|
|
36
|
+
"""
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
@abstractmethod
|
|
41
|
+
def supported_filenames(cls) -> list[str]:
|
|
42
|
+
"""Return list of filenames this parser supports."""
|
|
43
|
+
pass
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
"""Parser for Pipfile and Pipfile.lock files."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
|
|
7
|
+
from security_use.parsers.base import Dependency, DependencyParser
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import tomllib
|
|
11
|
+
except ImportError:
|
|
12
|
+
import tomli as tomllib # type: ignore[import-not-found]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class PipfileParser(DependencyParser):
|
|
16
|
+
"""Parser for Pipfile (TOML format)."""
|
|
17
|
+
|
|
18
|
+
def parse(self, content: str) -> list[Dependency]:
|
|
19
|
+
"""Parse Pipfile content."""
|
|
20
|
+
try:
|
|
21
|
+
data = tomllib.loads(content)
|
|
22
|
+
except Exception:
|
|
23
|
+
return []
|
|
24
|
+
|
|
25
|
+
dependencies: list[Dependency] = []
|
|
26
|
+
|
|
27
|
+
# Parse packages
|
|
28
|
+
dependencies.extend(self._parse_section(data.get("packages", {})))
|
|
29
|
+
|
|
30
|
+
# Parse dev-packages
|
|
31
|
+
dependencies.extend(self._parse_section(data.get("dev-packages", {})))
|
|
32
|
+
|
|
33
|
+
return dependencies
|
|
34
|
+
|
|
35
|
+
def _parse_section(self, packages: dict[str, Any]) -> list[Dependency]:
|
|
36
|
+
"""Parse a packages section."""
|
|
37
|
+
dependencies = []
|
|
38
|
+
|
|
39
|
+
for name, spec in packages.items():
|
|
40
|
+
version = None
|
|
41
|
+
version_spec = None
|
|
42
|
+
extras = None
|
|
43
|
+
|
|
44
|
+
if isinstance(spec, str):
|
|
45
|
+
version, version_spec = self._parse_version_spec(spec)
|
|
46
|
+
elif isinstance(spec, dict):
|
|
47
|
+
ver = spec.get("version")
|
|
48
|
+
if ver:
|
|
49
|
+
version, version_spec = self._parse_version_spec(ver)
|
|
50
|
+
extras = spec.get("extras")
|
|
51
|
+
|
|
52
|
+
dependencies.append(
|
|
53
|
+
Dependency(
|
|
54
|
+
name=name,
|
|
55
|
+
version=version,
|
|
56
|
+
version_spec=version_spec,
|
|
57
|
+
extras=extras,
|
|
58
|
+
)
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
return dependencies
|
|
62
|
+
|
|
63
|
+
def _parse_version_spec(self, spec: str) -> tuple[Optional[str], str]:
|
|
64
|
+
"""Parse a version specifier."""
|
|
65
|
+
spec = spec.strip()
|
|
66
|
+
|
|
67
|
+
if spec == "*":
|
|
68
|
+
return None, spec
|
|
69
|
+
|
|
70
|
+
# Extract exact version
|
|
71
|
+
if spec.startswith("=="):
|
|
72
|
+
return spec[2:].strip(), spec
|
|
73
|
+
|
|
74
|
+
# For ranges, try to extract a version number
|
|
75
|
+
match = re.search(r"[\d.]+", spec)
|
|
76
|
+
if match:
|
|
77
|
+
return match.group(0), spec
|
|
78
|
+
|
|
79
|
+
return None, spec
|
|
80
|
+
|
|
81
|
+
@classmethod
|
|
82
|
+
def supported_filenames(cls) -> list[str]:
|
|
83
|
+
"""Return supported filenames."""
|
|
84
|
+
return ["Pipfile"]
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class PipfileLockParser(DependencyParser):
|
|
88
|
+
"""Parser for Pipfile.lock (JSON format)."""
|
|
89
|
+
|
|
90
|
+
def parse(self, content: str) -> list[Dependency]:
|
|
91
|
+
"""Parse Pipfile.lock content."""
|
|
92
|
+
try:
|
|
93
|
+
data = json.loads(content)
|
|
94
|
+
except json.JSONDecodeError:
|
|
95
|
+
return []
|
|
96
|
+
|
|
97
|
+
dependencies: list[Dependency] = []
|
|
98
|
+
|
|
99
|
+
# Parse default packages
|
|
100
|
+
dependencies.extend(self._parse_section(data.get("default", {})))
|
|
101
|
+
|
|
102
|
+
# Parse develop packages
|
|
103
|
+
dependencies.extend(self._parse_section(data.get("develop", {})))
|
|
104
|
+
|
|
105
|
+
return dependencies
|
|
106
|
+
|
|
107
|
+
def _parse_section(self, packages: dict[str, Any]) -> list[Dependency]:
|
|
108
|
+
"""Parse a packages section."""
|
|
109
|
+
dependencies = []
|
|
110
|
+
|
|
111
|
+
for name, info in packages.items():
|
|
112
|
+
version = None
|
|
113
|
+
if isinstance(info, dict):
|
|
114
|
+
ver = info.get("version", "")
|
|
115
|
+
if ver.startswith("=="):
|
|
116
|
+
version = ver[2:]
|
|
117
|
+
else:
|
|
118
|
+
version = ver
|
|
119
|
+
|
|
120
|
+
dependencies.append(
|
|
121
|
+
Dependency(
|
|
122
|
+
name=name,
|
|
123
|
+
version=version,
|
|
124
|
+
version_spec=info.get("version") if isinstance(info, dict) else None,
|
|
125
|
+
)
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
return dependencies
|
|
129
|
+
|
|
130
|
+
@classmethod
|
|
131
|
+
def supported_filenames(cls) -> list[str]:
|
|
132
|
+
"""Return supported filenames."""
|
|
133
|
+
return ["Pipfile.lock"]
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""Parser for poetry.lock files."""
|
|
2
|
+
|
|
3
|
+
from security_use.parsers.base import Dependency, DependencyParser
|
|
4
|
+
|
|
5
|
+
try:
|
|
6
|
+
import tomllib
|
|
7
|
+
except ImportError:
|
|
8
|
+
import tomli as tomllib # type: ignore[import-not-found]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class PoetryLockParser(DependencyParser):
|
|
12
|
+
"""Parser for poetry.lock files (TOML format)."""
|
|
13
|
+
|
|
14
|
+
def parse(self, content: str) -> list[Dependency]:
|
|
15
|
+
"""Parse poetry.lock content."""
|
|
16
|
+
try:
|
|
17
|
+
data = tomllib.loads(content)
|
|
18
|
+
except Exception:
|
|
19
|
+
return []
|
|
20
|
+
|
|
21
|
+
dependencies: list[Dependency] = []
|
|
22
|
+
|
|
23
|
+
# Parse package entries
|
|
24
|
+
for package in data.get("package", []):
|
|
25
|
+
name = package.get("name")
|
|
26
|
+
version = package.get("version")
|
|
27
|
+
|
|
28
|
+
if name:
|
|
29
|
+
dependencies.append(
|
|
30
|
+
Dependency(
|
|
31
|
+
name=name,
|
|
32
|
+
version=version,
|
|
33
|
+
version_spec=f"=={version}" if version else None,
|
|
34
|
+
)
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
return dependencies
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def supported_filenames(cls) -> list[str]:
|
|
41
|
+
"""Return supported filenames."""
|
|
42
|
+
return ["poetry.lock"]
|