cve-sentinel 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cve_sentinel/__init__.py +4 -0
- cve_sentinel/__main__.py +18 -0
- cve_sentinel/analyzers/__init__.py +19 -0
- cve_sentinel/analyzers/base.py +274 -0
- cve_sentinel/analyzers/go.py +186 -0
- cve_sentinel/analyzers/maven.py +291 -0
- cve_sentinel/analyzers/npm.py +586 -0
- cve_sentinel/analyzers/php.py +238 -0
- cve_sentinel/analyzers/python.py +435 -0
- cve_sentinel/analyzers/ruby.py +182 -0
- cve_sentinel/analyzers/rust.py +199 -0
- cve_sentinel/cli.py +517 -0
- cve_sentinel/config.py +347 -0
- cve_sentinel/fetchers/__init__.py +22 -0
- cve_sentinel/fetchers/nvd.py +544 -0
- cve_sentinel/fetchers/osv.py +719 -0
- cve_sentinel/matcher.py +496 -0
- cve_sentinel/reporter.py +549 -0
- cve_sentinel/scanner.py +513 -0
- cve_sentinel/scanners/__init__.py +13 -0
- cve_sentinel/scanners/import_scanner.py +1121 -0
- cve_sentinel/utils/__init__.py +5 -0
- cve_sentinel/utils/cache.py +61 -0
- cve_sentinel-0.1.2.dist-info/METADATA +454 -0
- cve_sentinel-0.1.2.dist-info/RECORD +28 -0
- cve_sentinel-0.1.2.dist-info/WHEEL +4 -0
- cve_sentinel-0.1.2.dist-info/entry_points.txt +2 -0
- cve_sentinel-0.1.2.dist-info/licenses/LICENSE +21 -0
cve_sentinel/__init__.py
ADDED
cve_sentinel/__main__.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""CVE Sentinel CLI entry point.
|
|
2
|
+
|
|
3
|
+
Allows running:
|
|
4
|
+
python -m cve_sentinel [command] [args]
|
|
5
|
+
|
|
6
|
+
Commands:
|
|
7
|
+
scan - Scan project for CVE vulnerabilities (default)
|
|
8
|
+
init - Initialize CVE Sentinel in a project
|
|
9
|
+
update - Update CVE Sentinel to the latest version
|
|
10
|
+
uninstall - Uninstall CVE Sentinel
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import sys
|
|
14
|
+
|
|
15
|
+
from cve_sentinel.cli import main
|
|
16
|
+
|
|
17
|
+
if __name__ == "__main__":
|
|
18
|
+
sys.exit(main())
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Dependency analyzers for various package managers."""
|
|
2
|
+
|
|
3
|
+
from cve_sentinel.analyzers.base import (
|
|
4
|
+
AnalysisResult,
|
|
5
|
+
AnalyzerRegistry,
|
|
6
|
+
BaseAnalyzer,
|
|
7
|
+
FileDetector,
|
|
8
|
+
Package,
|
|
9
|
+
)
|
|
10
|
+
from cve_sentinel.analyzers.python import PythonAnalyzer
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"AnalysisResult",
|
|
14
|
+
"AnalyzerRegistry",
|
|
15
|
+
"BaseAnalyzer",
|
|
16
|
+
"FileDetector",
|
|
17
|
+
"Package",
|
|
18
|
+
"PythonAnalyzer",
|
|
19
|
+
]
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
"""Base analyzer class and common data models."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import List, Optional, Set
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class Package:
|
|
13
|
+
"""Represents a detected package dependency."""
|
|
14
|
+
|
|
15
|
+
name: str
|
|
16
|
+
version: str
|
|
17
|
+
ecosystem: str # npm, pypi, go, maven, rubygems, crates.io, packagist
|
|
18
|
+
source_file: Path
|
|
19
|
+
source_line: Optional[int] = None
|
|
20
|
+
is_direct: bool = True # True for direct dependency, False for transitive
|
|
21
|
+
|
|
22
|
+
def __hash__(self) -> int:
|
|
23
|
+
return hash((self.name, self.version, self.ecosystem))
|
|
24
|
+
|
|
25
|
+
def __eq__(self, other: object) -> bool:
|
|
26
|
+
if not isinstance(other, Package):
|
|
27
|
+
return False
|
|
28
|
+
return (
|
|
29
|
+
self.name == other.name
|
|
30
|
+
and self.version == other.version
|
|
31
|
+
and self.ecosystem == other.ecosystem
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class AnalysisResult:
|
|
37
|
+
"""Result of dependency analysis."""
|
|
38
|
+
|
|
39
|
+
packages: List[Package] = field(default_factory=list)
|
|
40
|
+
errors: List[str] = field(default_factory=list)
|
|
41
|
+
scanned_files: List[Path] = field(default_factory=list)
|
|
42
|
+
|
|
43
|
+
@property
|
|
44
|
+
def package_count(self) -> int:
|
|
45
|
+
"""Return total number of packages found."""
|
|
46
|
+
return len(self.packages)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def direct_packages(self) -> List[Package]:
|
|
50
|
+
"""Return only direct dependencies."""
|
|
51
|
+
return [p for p in self.packages if p.is_direct]
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def transitive_packages(self) -> List[Package]:
|
|
55
|
+
"""Return only transitive dependencies."""
|
|
56
|
+
return [p for p in self.packages if not p.is_direct]
|
|
57
|
+
|
|
58
|
+
def merge(self, other: AnalysisResult) -> AnalysisResult:
|
|
59
|
+
"""Merge another result into this one."""
|
|
60
|
+
return AnalysisResult(
|
|
61
|
+
packages=self.packages + other.packages,
|
|
62
|
+
errors=self.errors + other.errors,
|
|
63
|
+
scanned_files=self.scanned_files + other.scanned_files,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class BaseAnalyzer(ABC):
|
|
68
|
+
"""Abstract base class for dependency analyzers."""
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
@abstractmethod
|
|
72
|
+
def ecosystem(self) -> str:
|
|
73
|
+
"""Return the ecosystem name (npm, pypi, etc.)."""
|
|
74
|
+
...
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
@abstractmethod
|
|
78
|
+
def manifest_patterns(self) -> List[str]:
|
|
79
|
+
"""Return glob patterns for manifest files (e.g., package.json)."""
|
|
80
|
+
...
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
@abstractmethod
|
|
84
|
+
def lock_patterns(self) -> List[str]:
|
|
85
|
+
"""Return glob patterns for lock files (e.g., package-lock.json)."""
|
|
86
|
+
...
|
|
87
|
+
|
|
88
|
+
@abstractmethod
|
|
89
|
+
def detect_files(self, path: Path) -> List[Path]:
|
|
90
|
+
"""Detect dependency files in the given path."""
|
|
91
|
+
...
|
|
92
|
+
|
|
93
|
+
@abstractmethod
|
|
94
|
+
def parse(self, file_path: Path) -> List[Package]:
|
|
95
|
+
"""Parse a dependency file and return list of packages."""
|
|
96
|
+
...
|
|
97
|
+
|
|
98
|
+
def analyze(self, path: Path, exclude_patterns: Optional[List[str]] = None) -> AnalysisResult:
|
|
99
|
+
"""Analyze dependencies in the given path.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
path: Directory to analyze
|
|
103
|
+
exclude_patterns: Glob patterns to exclude from scanning
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
AnalysisResult with found packages and any errors
|
|
107
|
+
"""
|
|
108
|
+
result = AnalysisResult()
|
|
109
|
+
try:
|
|
110
|
+
files = self.detect_files(path)
|
|
111
|
+
# Filter excluded paths
|
|
112
|
+
if exclude_patterns:
|
|
113
|
+
files = [f for f in files if not _matches_any_pattern(f, exclude_patterns)]
|
|
114
|
+
|
|
115
|
+
for file_path in files:
|
|
116
|
+
result.scanned_files.append(file_path)
|
|
117
|
+
try:
|
|
118
|
+
packages = self.parse(file_path)
|
|
119
|
+
result.packages.extend(packages)
|
|
120
|
+
except Exception as e:
|
|
121
|
+
result.errors.append(f"Error parsing {file_path}: {e}")
|
|
122
|
+
except Exception as e:
|
|
123
|
+
result.errors.append(f"Error detecting files: {e}")
|
|
124
|
+
|
|
125
|
+
return result
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def _matches_any_pattern(path: Path, patterns: List[str]) -> bool:
|
|
129
|
+
"""Check if path matches any of the given glob patterns."""
|
|
130
|
+
path_str = str(path)
|
|
131
|
+
for pattern in patterns:
|
|
132
|
+
# Support common patterns like node_modules/**, .git/**, etc.
|
|
133
|
+
if pattern.endswith("/**"):
|
|
134
|
+
dir_pattern = pattern[:-3]
|
|
135
|
+
if dir_pattern in path_str:
|
|
136
|
+
return True
|
|
137
|
+
elif pattern in path_str:
|
|
138
|
+
return True
|
|
139
|
+
return False
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class AnalyzerRegistry:
|
|
143
|
+
"""Registry for managing dependency analyzers."""
|
|
144
|
+
|
|
145
|
+
_instance: Optional[AnalyzerRegistry] = None
|
|
146
|
+
_analyzers: List[BaseAnalyzer]
|
|
147
|
+
|
|
148
|
+
def __init__(self) -> None:
|
|
149
|
+
self._analyzers = []
|
|
150
|
+
|
|
151
|
+
@classmethod
|
|
152
|
+
def get_instance(cls) -> AnalyzerRegistry:
|
|
153
|
+
"""Get singleton instance."""
|
|
154
|
+
if cls._instance is None:
|
|
155
|
+
cls._instance = AnalyzerRegistry()
|
|
156
|
+
return cls._instance
|
|
157
|
+
|
|
158
|
+
@classmethod
|
|
159
|
+
def reset(cls) -> None:
|
|
160
|
+
"""Reset the registry (mainly for testing)."""
|
|
161
|
+
cls._instance = None
|
|
162
|
+
|
|
163
|
+
def register(self, analyzer: BaseAnalyzer) -> None:
|
|
164
|
+
"""Register an analyzer."""
|
|
165
|
+
# Avoid duplicate registration
|
|
166
|
+
for existing in self._analyzers:
|
|
167
|
+
if existing.ecosystem == analyzer.ecosystem:
|
|
168
|
+
return
|
|
169
|
+
self._analyzers.append(analyzer)
|
|
170
|
+
|
|
171
|
+
def get_all(self) -> List[BaseAnalyzer]:
|
|
172
|
+
"""Get all registered analyzers."""
|
|
173
|
+
return self._analyzers.copy()
|
|
174
|
+
|
|
175
|
+
def get_by_ecosystem(self, ecosystem: str) -> Optional[BaseAnalyzer]:
|
|
176
|
+
"""Get analyzer by ecosystem name."""
|
|
177
|
+
for analyzer in self._analyzers:
|
|
178
|
+
if analyzer.ecosystem == ecosystem:
|
|
179
|
+
return analyzer
|
|
180
|
+
return None
|
|
181
|
+
|
|
182
|
+
def clear(self) -> None:
|
|
183
|
+
"""Clear all registered analyzers."""
|
|
184
|
+
self._analyzers = []
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
class FileDetector:
|
|
188
|
+
"""Utility for detecting dependency files in a directory."""
|
|
189
|
+
|
|
190
|
+
def __init__(
|
|
191
|
+
self,
|
|
192
|
+
exclude_patterns: Optional[List[str]] = None,
|
|
193
|
+
max_depth: Optional[int] = None,
|
|
194
|
+
) -> None:
|
|
195
|
+
"""Initialize file detector.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
exclude_patterns: Glob patterns to exclude (e.g., node_modules/**)
|
|
199
|
+
max_depth: Maximum directory depth to search (None for unlimited)
|
|
200
|
+
"""
|
|
201
|
+
self.exclude_patterns = exclude_patterns or []
|
|
202
|
+
self.max_depth = max_depth
|
|
203
|
+
# Default exclusions
|
|
204
|
+
self._default_excludes: Set[str] = {
|
|
205
|
+
"node_modules",
|
|
206
|
+
".git",
|
|
207
|
+
".svn",
|
|
208
|
+
".hg",
|
|
209
|
+
"__pycache__",
|
|
210
|
+
".pytest_cache",
|
|
211
|
+
".mypy_cache",
|
|
212
|
+
".tox",
|
|
213
|
+
".venv",
|
|
214
|
+
"venv",
|
|
215
|
+
".env",
|
|
216
|
+
"dist",
|
|
217
|
+
"build",
|
|
218
|
+
".eggs",
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
def find_files(
|
|
222
|
+
self,
|
|
223
|
+
root: Path,
|
|
224
|
+
patterns: List[str],
|
|
225
|
+
include_default_excludes: bool = True,
|
|
226
|
+
) -> List[Path]:
|
|
227
|
+
"""Find files matching patterns in the given root directory.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
root: Root directory to search
|
|
231
|
+
patterns: Glob patterns to match (e.g., ["package.json", "*/package.json"])
|
|
232
|
+
include_default_excludes: Whether to exclude common directories
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
List of matching file paths
|
|
236
|
+
"""
|
|
237
|
+
if not root.is_dir():
|
|
238
|
+
return []
|
|
239
|
+
|
|
240
|
+
found_files: List[Path] = []
|
|
241
|
+
exclude_dirs = self._default_excludes if include_default_excludes else set()
|
|
242
|
+
|
|
243
|
+
# Add user-specified exclusions
|
|
244
|
+
for pattern in self.exclude_patterns:
|
|
245
|
+
if pattern.endswith("/**"):
|
|
246
|
+
exclude_dirs.add(pattern[:-3])
|
|
247
|
+
|
|
248
|
+
self._search_recursive(root, patterns, found_files, exclude_dirs, 0)
|
|
249
|
+
return sorted(found_files)
|
|
250
|
+
|
|
251
|
+
def _search_recursive(
|
|
252
|
+
self,
|
|
253
|
+
current: Path,
|
|
254
|
+
patterns: List[str],
|
|
255
|
+
found: List[Path],
|
|
256
|
+
exclude_dirs: Set[str],
|
|
257
|
+
depth: int,
|
|
258
|
+
) -> None:
|
|
259
|
+
"""Recursively search for files."""
|
|
260
|
+
if self.max_depth is not None and depth > self.max_depth:
|
|
261
|
+
return
|
|
262
|
+
|
|
263
|
+
try:
|
|
264
|
+
for item in current.iterdir():
|
|
265
|
+
if item.is_dir():
|
|
266
|
+
if item.name not in exclude_dirs:
|
|
267
|
+
self._search_recursive(item, patterns, found, exclude_dirs, depth + 1)
|
|
268
|
+
elif item.is_file():
|
|
269
|
+
for pattern in patterns:
|
|
270
|
+
if item.match(pattern):
|
|
271
|
+
found.append(item)
|
|
272
|
+
break
|
|
273
|
+
except PermissionError:
|
|
274
|
+
pass # Skip directories we can't access
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""Go dependency analyzer."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, List, Optional
|
|
8
|
+
|
|
9
|
+
from cve_sentinel.analyzers.base import (
|
|
10
|
+
AnalyzerRegistry,
|
|
11
|
+
BaseAnalyzer,
|
|
12
|
+
FileDetector,
|
|
13
|
+
Package,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class GoAnalyzer(BaseAnalyzer):
|
|
18
|
+
"""Analyzer for Go modules.
|
|
19
|
+
|
|
20
|
+
Supports:
|
|
21
|
+
- go.mod (Level 1: direct dependencies)
|
|
22
|
+
- go.sum (Level 2: transitive dependencies)
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
@property
|
|
26
|
+
def ecosystem(self) -> str:
|
|
27
|
+
"""Return the ecosystem name."""
|
|
28
|
+
return "go"
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def manifest_patterns(self) -> List[str]:
|
|
32
|
+
"""Return glob patterns for manifest files."""
|
|
33
|
+
default_patterns = ["go.mod"]
|
|
34
|
+
custom = self._custom_patterns.get("manifests", [])
|
|
35
|
+
return default_patterns + custom
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def lock_patterns(self) -> List[str]:
|
|
39
|
+
"""Return glob patterns for lock files."""
|
|
40
|
+
default_patterns = ["go.sum"]
|
|
41
|
+
custom = self._custom_patterns.get("locks", [])
|
|
42
|
+
return default_patterns + custom
|
|
43
|
+
|
|
44
|
+
def __init__(
|
|
45
|
+
self,
|
|
46
|
+
analysis_level: int = 2,
|
|
47
|
+
custom_patterns: Optional[Dict[str, List[str]]] = None,
|
|
48
|
+
) -> None:
|
|
49
|
+
"""Initialize Go analyzer.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
analysis_level: Analysis depth (1=manifest only, 2=include lock files)
|
|
53
|
+
custom_patterns: Optional custom file patterns {"manifests": [...], "locks": [...]}
|
|
54
|
+
"""
|
|
55
|
+
self.analysis_level = analysis_level
|
|
56
|
+
self._custom_patterns = custom_patterns or {}
|
|
57
|
+
self._file_detector = FileDetector()
|
|
58
|
+
|
|
59
|
+
def detect_files(self, path: Path) -> List[Path]:
|
|
60
|
+
"""Detect Go dependency files."""
|
|
61
|
+
patterns = self.manifest_patterns.copy()
|
|
62
|
+
if self.analysis_level >= 2:
|
|
63
|
+
patterns.extend(self.lock_patterns)
|
|
64
|
+
return self._file_detector.find_files(path, patterns)
|
|
65
|
+
|
|
66
|
+
def parse(self, file_path: Path) -> List[Package]:
|
|
67
|
+
"""Parse a Go dependency file."""
|
|
68
|
+
if file_path.name == "go.mod":
|
|
69
|
+
return self._parse_go_mod(file_path)
|
|
70
|
+
elif file_path.name == "go.sum":
|
|
71
|
+
return self._parse_go_sum(file_path)
|
|
72
|
+
return []
|
|
73
|
+
|
|
74
|
+
def _parse_go_mod(self, file_path: Path) -> List[Package]:
|
|
75
|
+
"""Parse go.mod file."""
|
|
76
|
+
packages: List[Package] = []
|
|
77
|
+
content = file_path.read_text(encoding="utf-8")
|
|
78
|
+
lines = content.split("\n")
|
|
79
|
+
|
|
80
|
+
in_require_block = False
|
|
81
|
+
line_num = 0
|
|
82
|
+
|
|
83
|
+
for line in lines:
|
|
84
|
+
line_num += 1
|
|
85
|
+
stripped = line.strip()
|
|
86
|
+
|
|
87
|
+
# Skip comments and empty lines
|
|
88
|
+
if not stripped or stripped.startswith("//"):
|
|
89
|
+
continue
|
|
90
|
+
|
|
91
|
+
# Check for require block start
|
|
92
|
+
if stripped == "require (":
|
|
93
|
+
in_require_block = True
|
|
94
|
+
continue
|
|
95
|
+
|
|
96
|
+
# Check for block end
|
|
97
|
+
if stripped == ")":
|
|
98
|
+
in_require_block = False
|
|
99
|
+
continue
|
|
100
|
+
|
|
101
|
+
# Parse single-line require
|
|
102
|
+
if stripped.startswith("require ") and "(" not in stripped:
|
|
103
|
+
match = re.match(r"require\s+(\S+)\s+(\S+)", stripped)
|
|
104
|
+
if match:
|
|
105
|
+
module_path = match.group(1)
|
|
106
|
+
version = self._normalize_version(match.group(2))
|
|
107
|
+
packages.append(
|
|
108
|
+
Package(
|
|
109
|
+
name=module_path,
|
|
110
|
+
version=version,
|
|
111
|
+
ecosystem=self.ecosystem,
|
|
112
|
+
source_file=file_path,
|
|
113
|
+
source_line=line_num,
|
|
114
|
+
is_direct=True,
|
|
115
|
+
)
|
|
116
|
+
)
|
|
117
|
+
continue
|
|
118
|
+
|
|
119
|
+
# Parse require block entries
|
|
120
|
+
if in_require_block:
|
|
121
|
+
# Format: module/path v1.2.3 [// indirect]
|
|
122
|
+
match = re.match(r"(\S+)\s+(\S+)(?:\s+//\s*indirect)?", stripped)
|
|
123
|
+
if match:
|
|
124
|
+
module_path = match.group(1)
|
|
125
|
+
version = self._normalize_version(match.group(2))
|
|
126
|
+
is_indirect = "// indirect" in stripped
|
|
127
|
+
packages.append(
|
|
128
|
+
Package(
|
|
129
|
+
name=module_path,
|
|
130
|
+
version=version,
|
|
131
|
+
ecosystem=self.ecosystem,
|
|
132
|
+
source_file=file_path,
|
|
133
|
+
source_line=line_num,
|
|
134
|
+
is_direct=not is_indirect,
|
|
135
|
+
)
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
return packages
|
|
139
|
+
|
|
140
|
+
def _parse_go_sum(self, file_path: Path) -> List[Package]:
|
|
141
|
+
"""Parse go.sum file."""
|
|
142
|
+
packages: List[Package] = []
|
|
143
|
+
content = file_path.read_text(encoding="utf-8")
|
|
144
|
+
seen: set = set()
|
|
145
|
+
|
|
146
|
+
for line in content.split("\n"):
|
|
147
|
+
stripped = line.strip()
|
|
148
|
+
if not stripped:
|
|
149
|
+
continue
|
|
150
|
+
|
|
151
|
+
# Format: module/path v1.2.3 h1:hash=
|
|
152
|
+
# or: module/path v1.2.3/go.mod h1:hash=
|
|
153
|
+
match = re.match(r"(\S+)\s+(\S+?)(?:/go\.mod)?\s+", stripped)
|
|
154
|
+
if match:
|
|
155
|
+
module_path = match.group(1)
|
|
156
|
+
version = self._normalize_version(match.group(2))
|
|
157
|
+
|
|
158
|
+
pkg_key = (module_path, version)
|
|
159
|
+
if pkg_key not in seen:
|
|
160
|
+
seen.add(pkg_key)
|
|
161
|
+
packages.append(
|
|
162
|
+
Package(
|
|
163
|
+
name=module_path,
|
|
164
|
+
version=version,
|
|
165
|
+
ecosystem=self.ecosystem,
|
|
166
|
+
source_file=file_path,
|
|
167
|
+
source_line=None,
|
|
168
|
+
is_direct=False,
|
|
169
|
+
)
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
return packages
|
|
173
|
+
|
|
174
|
+
def _normalize_version(self, version: str) -> str:
|
|
175
|
+
"""Normalize Go version string."""
|
|
176
|
+
# Remove v prefix if present
|
|
177
|
+
if version.startswith("v"):
|
|
178
|
+
version = version[1:]
|
|
179
|
+
# Handle +incompatible suffix
|
|
180
|
+
version = version.replace("+incompatible", "")
|
|
181
|
+
return version
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def register() -> None:
|
|
185
|
+
"""Register the Go analyzer."""
|
|
186
|
+
AnalyzerRegistry.get_instance().register(GoAnalyzer())
|