gate-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gate/__init__.py +1 -0
- gate/checks/__init__.py +0 -0
- gate/checks/cve.py +42 -0
- gate/checks/integrity.py +59 -0
- gate/checks/maintainer.py +32 -0
- gate/checks/quarantine.py +18 -0
- gate/checks/scripts.py +51 -0
- gate/cli.py +283 -0
- gate/config.py +27 -0
- gate/hooks/__init__.py +0 -0
- gate/hooks/precommit.py +56 -0
- gate/output.py +59 -0
- gate/registry/__init__.py +0 -0
- gate/registry/npm.py +85 -0
- gate/registry/pypi.py +81 -0
- gate/sbom.py +86 -0
- gate_cli-0.1.0.dist-info/METADATA +149 -0
- gate_cli-0.1.0.dist-info/RECORD +21 -0
- gate_cli-0.1.0.dist-info/WHEEL +4 -0
- gate_cli-0.1.0.dist-info/entry_points.txt +2 -0
- gate_cli-0.1.0.dist-info/licenses/LICENSE +21 -0
gate/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.0"
|
gate/checks/__init__.py
ADDED
|
File without changes
|
gate/checks/cve.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import urllib.request
|
|
3
|
+
import urllib.error
|
|
4
|
+
|
|
5
|
+
OSV_API = "https://api.osv.dev/v1/query"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def check_cve(name: str, version: str, ecosystem: str) -> list[dict]:
|
|
9
|
+
payload = json.dumps({
|
|
10
|
+
"package": {"name": name, "ecosystem": ecosystem},
|
|
11
|
+
"version": version,
|
|
12
|
+
}).encode()
|
|
13
|
+
|
|
14
|
+
req = urllib.request.Request(
|
|
15
|
+
OSV_API,
|
|
16
|
+
data=payload,
|
|
17
|
+
headers={"Content-Type": "application/json"},
|
|
18
|
+
method="POST",
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
with urllib.request.urlopen(req, timeout=10) as resp:
|
|
23
|
+
data = json.loads(resp.read())
|
|
24
|
+
except Exception:
|
|
25
|
+
return []
|
|
26
|
+
|
|
27
|
+
seen: set[str] = set()
|
|
28
|
+
vulns = []
|
|
29
|
+
for vuln in data.get("vulns", []):
|
|
30
|
+
cve_id = next(
|
|
31
|
+
(a for a in vuln.get("aliases", []) if a.startswith("CVE-")),
|
|
32
|
+
vuln.get("id", ""),
|
|
33
|
+
)
|
|
34
|
+
if cve_id in seen:
|
|
35
|
+
continue
|
|
36
|
+
seen.add(cve_id)
|
|
37
|
+
vulns.append({
|
|
38
|
+
"id": cve_id,
|
|
39
|
+
"summary": vuln.get("summary", "No description"),
|
|
40
|
+
})
|
|
41
|
+
|
|
42
|
+
return vulns
|
gate/checks/integrity.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
def check_integrity(local: str, remote: str) -> dict:
|
|
2
|
+
"""
|
|
3
|
+
Compare a locally stored integrity hash against the value from the registry.
|
|
4
|
+
|
|
5
|
+
Both values are expected in standard format:
|
|
6
|
+
npm: "sha512-<base64>"
|
|
7
|
+
PyPI: "sha256:<hex>"
|
|
8
|
+
"""
|
|
9
|
+
if not local or not remote:
|
|
10
|
+
return {"ok": True, "skipped": True, "message": "No hash to compare"}
|
|
11
|
+
|
|
12
|
+
match = local.strip() == remote.strip()
|
|
13
|
+
return {
|
|
14
|
+
"ok": match,
|
|
15
|
+
"skipped": False,
|
|
16
|
+
"local": local,
|
|
17
|
+
"remote": remote,
|
|
18
|
+
"message": None if match else "Hash mismatch — package may have been tampered with",
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def parse_requirements_hashes(path) -> dict[str, dict[str, str]]:
|
|
23
|
+
"""
|
|
24
|
+
Parse hashes from a requirements.txt file that was generated with
|
|
25
|
+
pip-compile --generate-hashes or pip install --require-hashes.
|
|
26
|
+
|
|
27
|
+
Returns: {package_name_lower: {version: "sha256:<hex>"}}
|
|
28
|
+
|
|
29
|
+
Example line:
|
|
30
|
+
requests==2.28.0 \\
|
|
31
|
+
--hash=sha256:ae72a32d...
|
|
32
|
+
"""
|
|
33
|
+
import re
|
|
34
|
+
from pathlib import Path
|
|
35
|
+
|
|
36
|
+
text = Path(path).read_text(encoding="utf-8")
|
|
37
|
+
# Join continuation lines
|
|
38
|
+
text = re.sub(r"\\\n\s*", " ", text)
|
|
39
|
+
|
|
40
|
+
result: dict[str, dict[str, str]] = {}
|
|
41
|
+
for line in text.splitlines():
|
|
42
|
+
line = line.strip()
|
|
43
|
+
if not line or line.startswith("#"):
|
|
44
|
+
continue
|
|
45
|
+
if "==" not in line:
|
|
46
|
+
continue
|
|
47
|
+
|
|
48
|
+
# Extract name==version
|
|
49
|
+
spec_part = line.split()[0]
|
|
50
|
+
name, version = spec_part.split("==", 1)
|
|
51
|
+
name = name.strip().lower()
|
|
52
|
+
version = version.strip()
|
|
53
|
+
|
|
54
|
+
# Extract first sha256 hash
|
|
55
|
+
match = re.search(r"--hash=sha256:([a-f0-9]+)", line)
|
|
56
|
+
if match:
|
|
57
|
+
result.setdefault(name, {})[version] = f"sha256:{match.group(1)}"
|
|
58
|
+
|
|
59
|
+
return result
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
def check_maintainer_change(
|
|
2
|
+
current: list[str],
|
|
3
|
+
previous: list[str],
|
|
4
|
+
) -> dict:
|
|
5
|
+
"""
|
|
6
|
+
Compare maintainer lists between two versions.
|
|
7
|
+
Returns a dict describing any changes found.
|
|
8
|
+
"""
|
|
9
|
+
current_set = set(current)
|
|
10
|
+
previous_set = set(previous)
|
|
11
|
+
|
|
12
|
+
added = current_set - previous_set
|
|
13
|
+
removed = previous_set - current_set
|
|
14
|
+
|
|
15
|
+
if not added and not removed:
|
|
16
|
+
return {"ok": True, "added": [], "removed": []}
|
|
17
|
+
|
|
18
|
+
return {
|
|
19
|
+
"ok": False,
|
|
20
|
+
"added": sorted(added),
|
|
21
|
+
"removed": sorted(removed),
|
|
22
|
+
"message": _format_message(added, removed),
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _format_message(added: set[str], removed: set[str]) -> str:
|
|
27
|
+
parts = []
|
|
28
|
+
if added:
|
|
29
|
+
parts.append(f"new maintainer(s): {', '.join(sorted(added))}")
|
|
30
|
+
if removed:
|
|
31
|
+
parts.append(f"removed maintainer(s): {', '.join(sorted(removed))}")
|
|
32
|
+
return "; ".join(parts)
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from datetime import datetime, timezone
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def check_quarantine(published: datetime | None, quarantine_days: int = 7) -> dict:
|
|
5
|
+
if published is None:
|
|
6
|
+
return {"ok": True, "days_old": None, "message": None}
|
|
7
|
+
|
|
8
|
+
now = datetime.now(timezone.utc)
|
|
9
|
+
days_old = (now - published).days
|
|
10
|
+
|
|
11
|
+
if days_old < quarantine_days:
|
|
12
|
+
return {
|
|
13
|
+
"ok": False,
|
|
14
|
+
"days_old": days_old,
|
|
15
|
+
"message": f"Published {days_old} day(s) ago (quarantine window: {quarantine_days} days)",
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return {"ok": True, "days_old": days_old, "message": None}
|
gate/checks/scripts.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
# Patterns that indicate a script is doing something suspicious
|
|
4
|
+
_SUSPICIOUS: list[tuple[str, str]] = [
|
|
5
|
+
(r"\bcurl\b", "network fetch (curl)"),
|
|
6
|
+
(r"\bwget\b", "network fetch (wget)"),
|
|
7
|
+
(r"\bfetch\b", "network fetch (fetch)"),
|
|
8
|
+
(r"\bbase64\b", "base64 encoding"),
|
|
9
|
+
(r"atob\s*\(", "base64 decode (atob)"),
|
|
10
|
+
(r"Buffer\.from\b", "binary decoding (Buffer.from)"),
|
|
11
|
+
(r"\beval\s*\(", "eval execution"),
|
|
12
|
+
(r"Function\s*\(", "dynamic code execution (Function)"),
|
|
13
|
+
(r"\bexec\s*\(", "shell execution (exec)"),
|
|
14
|
+
(r"\bspawn\s*\(", "shell execution (spawn)"),
|
|
15
|
+
(r"\bchild_process\b", "child process usage"),
|
|
16
|
+
(r"https?://", "hardcoded URL"),
|
|
17
|
+
(r"\b(?:\d{1,3}\.){3}\d{1,3}\b", "hardcoded IP address"),
|
|
18
|
+
(r"\bpowershell\b", "PowerShell execution"),
|
|
19
|
+
(r"\bchmod\b", "permission change"),
|
|
20
|
+
(r"process\.env\b", "environment variable access"),
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def analyze_script(script: str) -> list[str]:
|
|
25
|
+
"""Return a list of suspicious pattern descriptions found in the script."""
|
|
26
|
+
found = []
|
|
27
|
+
for pattern, description in _SUSPICIOUS:
|
|
28
|
+
if re.search(pattern, script, re.IGNORECASE):
|
|
29
|
+
found.append(description)
|
|
30
|
+
return found
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def check_install_scripts(install_scripts: dict[str, str]) -> dict:
|
|
34
|
+
"""
|
|
35
|
+
Analyze install scripts for suspicious patterns.
|
|
36
|
+
Returns a dict with 'ok', 'findings' (per script), and 'suspicious' flag.
|
|
37
|
+
"""
|
|
38
|
+
if not install_scripts:
|
|
39
|
+
return {"ok": True, "findings": {}}
|
|
40
|
+
|
|
41
|
+
findings: dict[str, list[str]] = {}
|
|
42
|
+
for script_name, script_body in install_scripts.items():
|
|
43
|
+
patterns = analyze_script(script_body)
|
|
44
|
+
if patterns:
|
|
45
|
+
findings[script_name] = patterns
|
|
46
|
+
|
|
47
|
+
return {
|
|
48
|
+
"ok": len(findings) == 0,
|
|
49
|
+
"findings": findings,
|
|
50
|
+
"raw": install_scripts,
|
|
51
|
+
}
|
gate/cli.py
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import json
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from gate import __version__
|
|
7
|
+
from gate.config import load_config, Config
|
|
8
|
+
from gate.registry import pypi, npm
|
|
9
|
+
from gate.checks.quarantine import check_quarantine
|
|
10
|
+
from gate.checks.cve import check_cve
|
|
11
|
+
from gate.checks.scripts import check_install_scripts
|
|
12
|
+
from gate.checks.maintainer import check_maintainer_change
|
|
13
|
+
from gate.checks.integrity import check_integrity, parse_requirements_hashes
|
|
14
|
+
from gate.hooks.precommit import install_hook, uninstall_hook
|
|
15
|
+
from gate import sbom as sbom_mod
|
|
16
|
+
import gate.output as out
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
# ── Result helpers ────────────────────────────────────────────────────────────
|
|
20
|
+
|
|
21
|
+
def _check_package(
|
|
22
|
+
name: str,
|
|
23
|
+
version: str | None,
|
|
24
|
+
ecosystem: str,
|
|
25
|
+
config: Config,
|
|
26
|
+
local_integrity: str | None = None,
|
|
27
|
+
) -> dict:
|
|
28
|
+
result: dict = {"errors": [], "warnings": [], "version": None}
|
|
29
|
+
|
|
30
|
+
if ecosystem == "PyPI":
|
|
31
|
+
info = pypi.get_package_info(name, version)
|
|
32
|
+
else:
|
|
33
|
+
info = npm.get_package_info(name, version)
|
|
34
|
+
|
|
35
|
+
if info is None:
|
|
36
|
+
result["errors"].append("Package not found in registry")
|
|
37
|
+
return result
|
|
38
|
+
|
|
39
|
+
result["version"] = info["version"]
|
|
40
|
+
|
|
41
|
+
# Quarantine
|
|
42
|
+
q = check_quarantine(info.get("published"), config.quarantine_days)
|
|
43
|
+
if not q["ok"]:
|
|
44
|
+
if "recent_release" in config.fail_on:
|
|
45
|
+
result["errors"].append(q["message"])
|
|
46
|
+
else:
|
|
47
|
+
result["warnings"].append(q["message"])
|
|
48
|
+
|
|
49
|
+
# CVE
|
|
50
|
+
for v in check_cve(name, info["version"], ecosystem):
|
|
51
|
+
result["errors"].append(f"{v['id']}: {v['summary'][:72]}")
|
|
52
|
+
|
|
53
|
+
# Maintainer change
|
|
54
|
+
current_m = info.get("maintainers") or []
|
|
55
|
+
previous_m = info.get("previous_maintainers")
|
|
56
|
+
if previous_m is not None:
|
|
57
|
+
m = check_maintainer_change(current_m, previous_m)
|
|
58
|
+
if not m["ok"]:
|
|
59
|
+
if "maintainer_change" in config.fail_on:
|
|
60
|
+
result["errors"].append(f"maintainer change: {m['message']}")
|
|
61
|
+
else:
|
|
62
|
+
result["warnings"].append(f"maintainer change: {m['message']}")
|
|
63
|
+
|
|
64
|
+
# Integrity / hash verification
|
|
65
|
+
remote_integrity = info.get("remote_integrity")
|
|
66
|
+
if local_integrity and remote_integrity:
|
|
67
|
+
integ = check_integrity(local_integrity, remote_integrity)
|
|
68
|
+
if not integ["ok"] and not integ.get("skipped"):
|
|
69
|
+
result["errors"].append(integ["message"])
|
|
70
|
+
result["errors"].append(f" local: {local_integrity}")
|
|
71
|
+
result["errors"].append(f" remote: {remote_integrity}")
|
|
72
|
+
|
|
73
|
+
# Install scripts (npm)
|
|
74
|
+
if ecosystem == "npm":
|
|
75
|
+
script_result = check_install_scripts(info.get("install_scripts", {}))
|
|
76
|
+
if not script_result["ok"]:
|
|
77
|
+
for script_name, patterns in script_result["findings"].items():
|
|
78
|
+
cmd = script_result["raw"][script_name]
|
|
79
|
+
msg = f"install script [{script_name}]: {cmd[:60]}"
|
|
80
|
+
detail = f" suspicious: {', '.join(patterns)}"
|
|
81
|
+
if "install_script" in config.fail_on:
|
|
82
|
+
result["errors"].append(msg)
|
|
83
|
+
result["errors"].append(detail)
|
|
84
|
+
else:
|
|
85
|
+
result["warnings"].append(msg)
|
|
86
|
+
result["warnings"].append(detail)
|
|
87
|
+
elif info.get("install_scripts"):
|
|
88
|
+
# Has install scripts but no suspicious patterns — still worth noting
|
|
89
|
+
for script_name, cmd in info["install_scripts"].items():
|
|
90
|
+
result["warnings"].append(f"install script [{script_name}]: {cmd[:60]}")
|
|
91
|
+
|
|
92
|
+
return result
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _print_result(name: str, result: dict) -> None:
|
|
96
|
+
ver = result.get("version") or ""
|
|
97
|
+
label = out.bold(name) + (f" {out.dim(ver)}" if ver else "")
|
|
98
|
+
|
|
99
|
+
if result["errors"]:
|
|
100
|
+
out.fail(label)
|
|
101
|
+
for msg in result["errors"]:
|
|
102
|
+
out.error(msg)
|
|
103
|
+
elif result["warnings"]:
|
|
104
|
+
out.warn(label)
|
|
105
|
+
for msg in result["warnings"]:
|
|
106
|
+
out.warning(msg)
|
|
107
|
+
else:
|
|
108
|
+
out.ok(label)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
# ── Parsers ───────────────────────────────────────────────────────────────────
|
|
112
|
+
|
|
113
|
+
def _parse_requirements(path: Path) -> list[tuple[str, str | None]]:
|
|
114
|
+
packages = []
|
|
115
|
+
for line in path.read_text(encoding="utf-8").splitlines():
|
|
116
|
+
line = line.strip()
|
|
117
|
+
if not line or line.startswith(("#", "-", "git+", "http")):
|
|
118
|
+
continue
|
|
119
|
+
for sep in ("==", ">=", "<=", "~=", "!=", ">", "<"):
|
|
120
|
+
if sep in line:
|
|
121
|
+
name = line.split(sep)[0].strip()
|
|
122
|
+
version = line.split("==")[1].strip() if "==" in line else None
|
|
123
|
+
packages.append((name, version))
|
|
124
|
+
break
|
|
125
|
+
else:
|
|
126
|
+
packages.append((line, None))
|
|
127
|
+
return packages
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _parse_package_lock(path: Path) -> list[tuple[str, str | None, str | None]]:
|
|
131
|
+
data = json.loads(path.read_text(encoding="utf-8"))
|
|
132
|
+
packages = []
|
|
133
|
+
for pkg_path, pkg_data in data.get("packages", {}).items():
|
|
134
|
+
if not pkg_path:
|
|
135
|
+
continue
|
|
136
|
+
name = pkg_path.removeprefix("node_modules/")
|
|
137
|
+
packages.append((name, pkg_data.get("version"), pkg_data.get("integrity")))
|
|
138
|
+
return packages
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _detect_project() -> tuple[list[tuple[str, str | None, str | None]], str] | None:
|
|
142
|
+
if Path("requirements.txt").exists():
|
|
143
|
+
req_path = Path("requirements.txt")
|
|
144
|
+
hashes = parse_requirements_hashes(req_path)
|
|
145
|
+
packages = []
|
|
146
|
+
for name, version in _parse_requirements(req_path):
|
|
147
|
+
local_hash = hashes.get(name.lower(), {}).get(version) if version else None
|
|
148
|
+
packages.append((name, version, local_hash))
|
|
149
|
+
return packages, "PyPI"
|
|
150
|
+
if Path("package-lock.json").exists():
|
|
151
|
+
return _parse_package_lock(Path("package-lock.json")), "npm"
|
|
152
|
+
return None
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
# ── Commands ──────────────────────────────────────────────────────────────────
|
|
156
|
+
|
|
157
|
+
def cmd_init(args: argparse.Namespace) -> None:
|
|
158
|
+
ok_flag, msg = install_hook()
|
|
159
|
+
if ok_flag:
|
|
160
|
+
out.ok(msg)
|
|
161
|
+
else:
|
|
162
|
+
print(out.red(f"✗ {msg}"), file=sys.stderr)
|
|
163
|
+
sys.exit(1)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def cmd_uninstall(args: argparse.Namespace) -> None:
|
|
167
|
+
ok_flag, msg = uninstall_hook()
|
|
168
|
+
if ok_flag:
|
|
169
|
+
out.ok(msg)
|
|
170
|
+
else:
|
|
171
|
+
out.warn(msg)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def cmd_check(args: argparse.Namespace) -> None:
|
|
175
|
+
config = load_config()
|
|
176
|
+
ecosystem = "npm" if args.npm else "PyPI"
|
|
177
|
+
|
|
178
|
+
if "==" in args.package:
|
|
179
|
+
name, version = args.package.split("==", 1)
|
|
180
|
+
else:
|
|
181
|
+
name, version = args.package, None
|
|
182
|
+
|
|
183
|
+
print()
|
|
184
|
+
result = _check_package(name, version, ecosystem, config)
|
|
185
|
+
_print_result(name, result)
|
|
186
|
+
print()
|
|
187
|
+
|
|
188
|
+
if result["errors"] and not args.force:
|
|
189
|
+
sys.exit(1)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def cmd_scan(args: argparse.Namespace) -> None:
|
|
193
|
+
config = load_config()
|
|
194
|
+
detected = _detect_project()
|
|
195
|
+
|
|
196
|
+
if detected is None:
|
|
197
|
+
out.warn("No requirements.txt or package-lock.json found")
|
|
198
|
+
sys.exit(0)
|
|
199
|
+
|
|
200
|
+
packages, ecosystem = detected
|
|
201
|
+
|
|
202
|
+
if not args.hook:
|
|
203
|
+
print(f"\nScanning {out.bold(str(len(packages)))} {ecosystem} packages...\n")
|
|
204
|
+
|
|
205
|
+
errors = 0
|
|
206
|
+
warnings = 0
|
|
207
|
+
sbom_entries: list[dict] = []
|
|
208
|
+
|
|
209
|
+
for name, version, local_integrity in packages:
|
|
210
|
+
result = _check_package(name, version, ecosystem, config, local_integrity)
|
|
211
|
+
_print_result(name, result)
|
|
212
|
+
errors += len(result["errors"])
|
|
213
|
+
warnings += len(result["warnings"])
|
|
214
|
+
if args.sbom is not None:
|
|
215
|
+
sbom_entries.append({"name": name, **result})
|
|
216
|
+
|
|
217
|
+
print()
|
|
218
|
+
if errors:
|
|
219
|
+
print(out.red(f"✗ {errors} error(s)") + f", {warnings} warning(s)")
|
|
220
|
+
print(out.dim("Use --force to override and proceed anyway"))
|
|
221
|
+
if not args.force:
|
|
222
|
+
if args.sbom is not None:
|
|
223
|
+
_write_sbom(sbom_entries, ecosystem, args.sbom)
|
|
224
|
+
sys.exit(1)
|
|
225
|
+
elif warnings:
|
|
226
|
+
print(out.green("✓ 0 errors") + f", {out.yellow(f'{warnings} warning(s)')}")
|
|
227
|
+
else:
|
|
228
|
+
print(out.green(f"✓ All {len(packages)} packages passed"))
|
|
229
|
+
|
|
230
|
+
if args.sbom is not None:
|
|
231
|
+
_write_sbom(sbom_entries, ecosystem, args.sbom)
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def _write_sbom(entries: list[dict], ecosystem: str, path: str) -> None:
|
|
235
|
+
doc = sbom_mod.generate(entries, ecosystem)
|
|
236
|
+
output_path = path if path else None
|
|
237
|
+
sbom_mod.write(doc, output_path)
|
|
238
|
+
if output_path:
|
|
239
|
+
print(out.dim(f"SBOM written to {output_path}"))
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
# ── Entry point ───────────────────────────────────────────────────────────────
|
|
243
|
+
|
|
244
|
+
def main() -> None:
|
|
245
|
+
parser = argparse.ArgumentParser(
|
|
246
|
+
prog="gate",
|
|
247
|
+
description="Supply chain security scanner for npm and pip packages",
|
|
248
|
+
)
|
|
249
|
+
parser.add_argument("--version", action="version", version=f"gate {__version__}")
|
|
250
|
+
sub = parser.add_subparsers(dest="command", required=True)
|
|
251
|
+
|
|
252
|
+
sub.add_parser("init", help="Install git pre-commit hook")
|
|
253
|
+
sub.add_parser("uninstall", help="Remove git pre-commit hook")
|
|
254
|
+
|
|
255
|
+
p_check = sub.add_parser("check", help="Check a single package")
|
|
256
|
+
p_check.add_argument("package", help="Package name or name==version")
|
|
257
|
+
p_check.add_argument("--npm", action="store_true", help="Treat as npm package")
|
|
258
|
+
p_check.add_argument("--force", action="store_true", help="Exit 0 even on errors")
|
|
259
|
+
|
|
260
|
+
p_scan = sub.add_parser("scan", help="Scan all packages in lock file")
|
|
261
|
+
p_scan.add_argument("--force", action="store_true", help="Exit 0 even on errors")
|
|
262
|
+
p_scan.add_argument("--hook", action="store_true", help=argparse.SUPPRESS)
|
|
263
|
+
p_scan.add_argument(
|
|
264
|
+
"--sbom",
|
|
265
|
+
nargs="?",
|
|
266
|
+
const="",
|
|
267
|
+
metavar="FILE",
|
|
268
|
+
help="Export CycloneDX SBOM (optional: path to output file, default: stdout)",
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
args = parser.parse_args()
|
|
272
|
+
|
|
273
|
+
commands = {
|
|
274
|
+
"init": cmd_init,
|
|
275
|
+
"uninstall": cmd_uninstall,
|
|
276
|
+
"check": cmd_check,
|
|
277
|
+
"scan": cmd_scan,
|
|
278
|
+
}
|
|
279
|
+
commands[args.command](args)
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
if __name__ == "__main__":
|
|
283
|
+
main()
|
gate/config.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import tomllib
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass
|
|
7
|
+
class Config:
|
|
8
|
+
quarantine_days: int = 7
|
|
9
|
+
fail_on: list[str] = field(default_factory=lambda: ["critical_cve", "install_script"])
|
|
10
|
+
warn_on: list[str] = field(default_factory=lambda: ["recent_release", "maintainer_change"])
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def load_config(path: Path | None = None) -> Config:
|
|
14
|
+
if path is None:
|
|
15
|
+
path = Path(".gate.toml")
|
|
16
|
+
|
|
17
|
+
if not path.exists():
|
|
18
|
+
return Config()
|
|
19
|
+
|
|
20
|
+
with path.open("rb") as f:
|
|
21
|
+
data = tomllib.load(f)
|
|
22
|
+
|
|
23
|
+
return Config(
|
|
24
|
+
quarantine_days=data.get("quarantine_days", 7),
|
|
25
|
+
fail_on=data.get("fail_on", ["critical_cve", "install_script"]),
|
|
26
|
+
warn_on=data.get("warn_on", ["recent_release"]),
|
|
27
|
+
)
|
gate/hooks/__init__.py
ADDED
|
File without changes
|
gate/hooks/precommit.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import stat
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
GATE_MARKER = "# gate pre-commit hook"
|
|
5
|
+
HOOK_SCRIPT = f"""{GATE_MARKER}
|
|
6
|
+
if git diff --cached --name-only | grep -qE '(package-lock\\.json|requirements\\.txt|Pipfile\\.lock|poetry\\.lock|pyproject\\.toml)'; then
|
|
7
|
+
gate scan --hook || exit 1
|
|
8
|
+
fi
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def install_hook() -> tuple[bool, str]:
|
|
13
|
+
git_dir = Path(".git")
|
|
14
|
+
if not git_dir.exists():
|
|
15
|
+
return False, "Not a git repository"
|
|
16
|
+
|
|
17
|
+
hook_path = git_dir / "hooks" / "pre-commit"
|
|
18
|
+
|
|
19
|
+
if hook_path.exists():
|
|
20
|
+
content = hook_path.read_text()
|
|
21
|
+
if GATE_MARKER in content:
|
|
22
|
+
return True, "Hook already installed"
|
|
23
|
+
with hook_path.open("a") as f:
|
|
24
|
+
f.write("\n" + HOOK_SCRIPT)
|
|
25
|
+
else:
|
|
26
|
+
hook_path.write_text("#!/bin/sh\n" + HOOK_SCRIPT)
|
|
27
|
+
|
|
28
|
+
current = hook_path.stat().st_mode
|
|
29
|
+
hook_path.chmod(current | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
|
30
|
+
|
|
31
|
+
return True, "Hook installed"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def uninstall_hook() -> tuple[bool, str]:
|
|
35
|
+
hook_path = Path(".git") / "hooks" / "pre-commit"
|
|
36
|
+
if not hook_path.exists():
|
|
37
|
+
return False, "No pre-commit hook found"
|
|
38
|
+
|
|
39
|
+
content = hook_path.read_text()
|
|
40
|
+
if GATE_MARKER not in content:
|
|
41
|
+
return False, "Gate hook not found in pre-commit"
|
|
42
|
+
|
|
43
|
+
lines = content.splitlines(keepends=True)
|
|
44
|
+
filtered = []
|
|
45
|
+
skip = False
|
|
46
|
+
for line in lines:
|
|
47
|
+
if GATE_MARKER in line:
|
|
48
|
+
skip = True
|
|
49
|
+
if skip and line.strip() == "fi":
|
|
50
|
+
skip = False
|
|
51
|
+
continue
|
|
52
|
+
if not skip:
|
|
53
|
+
filtered.append(line)
|
|
54
|
+
|
|
55
|
+
hook_path.write_text("".join(filtered))
|
|
56
|
+
return True, "Hook removed"
|
gate/output.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
3
|
+
# ANSI escape codes — stdlib only, zero dependencies
|
|
4
|
+
_RED = "\033[31m"
|
|
5
|
+
_GREEN = "\033[32m"
|
|
6
|
+
_YELLOW = "\033[33m"
|
|
7
|
+
_BOLD = "\033[1m"
|
|
8
|
+
_DIM = "\033[2m"
|
|
9
|
+
_RESET = "\033[0m"
|
|
10
|
+
|
|
11
|
+
_COLOR = sys.stdout.isatty()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _c(code: str, text: str) -> str:
|
|
15
|
+
return f"{code}{text}{_RESET}" if _COLOR else text
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def red(text: str) -> str:
|
|
19
|
+
return _c(_RED, text)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def green(text: str) -> str:
|
|
23
|
+
return _c(_GREEN, text)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def yellow(text: str) -> str:
|
|
27
|
+
return _c(_YELLOW, text)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def bold(text: str) -> str:
|
|
31
|
+
return _c(_BOLD, text)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def dim(text: str) -> str:
|
|
35
|
+
return _c(_DIM, text)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def ok(msg: str) -> None:
|
|
39
|
+
print(f" {green('✓')} {msg}")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def warn(msg: str) -> None:
|
|
43
|
+
print(f" {yellow('⚠')} {msg}")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def fail(msg: str) -> None:
|
|
47
|
+
print(f" {red('✗')} {msg}")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def info(msg: str) -> None:
|
|
51
|
+
print(f" {dim(msg)}")
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def error(msg: str) -> None:
|
|
55
|
+
print(f" {red(msg)}")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def warning(msg: str) -> None:
|
|
59
|
+
print(f" {yellow(msg)}")
|
|
File without changes
|
gate/registry/npm.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import urllib.request
|
|
3
|
+
import urllib.error
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_package_info(name: str, version: str | None = None) -> dict | None:
|
|
8
|
+
encoded = name.replace("/", "%2F")
|
|
9
|
+
url = f"https://registry.npmjs.org/{encoded}"
|
|
10
|
+
try:
|
|
11
|
+
with urllib.request.urlopen(url, timeout=10) as resp:
|
|
12
|
+
data = json.loads(resp.read())
|
|
13
|
+
except urllib.error.HTTPError:
|
|
14
|
+
return None
|
|
15
|
+
except Exception:
|
|
16
|
+
return None
|
|
17
|
+
|
|
18
|
+
if version is None:
|
|
19
|
+
version = data.get("dist-tags", {}).get("latest")
|
|
20
|
+
|
|
21
|
+
if not version or version not in data.get("versions", {}):
|
|
22
|
+
return None
|
|
23
|
+
|
|
24
|
+
version_data = data["versions"][version]
|
|
25
|
+
time_str = data.get("time", {}).get(version)
|
|
26
|
+
published = None
|
|
27
|
+
if time_str:
|
|
28
|
+
published = datetime.fromisoformat(time_str.replace("Z", "+00:00"))
|
|
29
|
+
|
|
30
|
+
all_scripts = version_data.get("scripts", {})
|
|
31
|
+
install_scripts = {
|
|
32
|
+
k: v for k, v in all_scripts.items()
|
|
33
|
+
if k in ("install", "preinstall", "postinstall", "preuninstall", "postuninstall")
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
current_maintainers = _extract_maintainers(version_data.get("maintainers", []))
|
|
37
|
+
previous_maintainers = _get_previous_maintainers(data, version)
|
|
38
|
+
|
|
39
|
+
# dist.integrity is the canonical hash from the registry
|
|
40
|
+
remote_integrity = version_data.get("dist", {}).get("integrity")
|
|
41
|
+
|
|
42
|
+
return {
|
|
43
|
+
"name": data["name"],
|
|
44
|
+
"version": version,
|
|
45
|
+
"published": published,
|
|
46
|
+
"install_scripts": install_scripts,
|
|
47
|
+
"maintainers": current_maintainers,
|
|
48
|
+
"previous_maintainers": previous_maintainers,
|
|
49
|
+
"remote_integrity": remote_integrity,
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _extract_maintainers(maintainers: list) -> list[str]:
|
|
54
|
+
return [
|
|
55
|
+
m["name"] if isinstance(m, dict) else str(m)
|
|
56
|
+
for m in maintainers
|
|
57
|
+
]
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _get_previous_maintainers(data: dict, current_version: str) -> list[str] | None:
|
|
61
|
+
"""Return maintainer list from the version published just before current_version."""
|
|
62
|
+
times: dict[str, str] = data.get("time", {})
|
|
63
|
+
versions_with_time = {
|
|
64
|
+
v: t for v, t in times.items()
|
|
65
|
+
if v not in ("created", "modified") and v in data.get("versions", {})
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if not versions_with_time:
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
current_time = versions_with_time.get(current_version)
|
|
72
|
+
if not current_time:
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
earlier = [
|
|
76
|
+
v for v, t in versions_with_time.items()
|
|
77
|
+
if t < current_time and v != current_version
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
if not earlier:
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
prev_version = max(earlier, key=lambda v: versions_with_time[v])
|
|
84
|
+
prev_data = data["versions"].get(prev_version, {})
|
|
85
|
+
return _extract_maintainers(prev_data.get("maintainers", []))
|
gate/registry/pypi.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import urllib.request
|
|
3
|
+
import urllib.error
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_package_info(name: str, version: str | None = None) -> dict | None:
|
|
8
|
+
url = f"https://pypi.org/pypi/{name}/json"
|
|
9
|
+
try:
|
|
10
|
+
with urllib.request.urlopen(url, timeout=10) as resp:
|
|
11
|
+
data = json.loads(resp.read())
|
|
12
|
+
except urllib.error.HTTPError:
|
|
13
|
+
return None
|
|
14
|
+
except Exception:
|
|
15
|
+
return None
|
|
16
|
+
|
|
17
|
+
if version is None:
|
|
18
|
+
version = data["info"]["version"]
|
|
19
|
+
|
|
20
|
+
releases = data.get("releases", {})
|
|
21
|
+
if version not in releases or not releases[version]:
|
|
22
|
+
return None
|
|
23
|
+
|
|
24
|
+
files = releases[version]
|
|
25
|
+
upload_time = min(
|
|
26
|
+
datetime.fromisoformat(f["upload_time"]).replace(tzinfo=timezone.utc)
|
|
27
|
+
for f in files
|
|
28
|
+
if f.get("upload_time")
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
# PyPI exposes the uploader username per file
|
|
32
|
+
uploaders = list({f["uploader"] for f in files if f.get("uploader")})
|
|
33
|
+
|
|
34
|
+
# Prefer the source distribution hash, fall back to first wheel
|
|
35
|
+
remote_integrity = None
|
|
36
|
+
sdist = next((f for f in files if f.get("packagetype") == "sdist"), None)
|
|
37
|
+
ref = sdist or files[0]
|
|
38
|
+
sha256 = ref.get("digests", {}).get("sha256")
|
|
39
|
+
if sha256:
|
|
40
|
+
remote_integrity = f"sha256:{sha256}"
|
|
41
|
+
|
|
42
|
+
previous_uploaders = _get_previous_uploaders(releases, version)
|
|
43
|
+
|
|
44
|
+
return {
|
|
45
|
+
"name": data["info"]["name"],
|
|
46
|
+
"version": version,
|
|
47
|
+
"published": upload_time,
|
|
48
|
+
"home_page": data["info"].get("home_page"),
|
|
49
|
+
"maintainers": uploaders,
|
|
50
|
+
"previous_maintainers": previous_uploaders,
|
|
51
|
+
"remote_integrity": remote_integrity,
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _get_previous_uploaders(releases: dict, current_version: str) -> list[str] | None:
|
|
56
|
+
"""Return uploaders from the release published just before current_version."""
|
|
57
|
+
def earliest_upload(files: list) -> datetime | None:
|
|
58
|
+
times = [
|
|
59
|
+
datetime.fromisoformat(f["upload_time"]).replace(tzinfo=timezone.utc)
|
|
60
|
+
for f in files
|
|
61
|
+
if f.get("upload_time")
|
|
62
|
+
]
|
|
63
|
+
return min(times) if times else None
|
|
64
|
+
|
|
65
|
+
current_time = earliest_upload(releases.get(current_version, []))
|
|
66
|
+
if not current_time:
|
|
67
|
+
return None
|
|
68
|
+
|
|
69
|
+
candidates = []
|
|
70
|
+
for ver, files in releases.items():
|
|
71
|
+
if ver == current_version or not files:
|
|
72
|
+
continue
|
|
73
|
+
t = earliest_upload(files)
|
|
74
|
+
if t and t < current_time:
|
|
75
|
+
candidates.append((t, files))
|
|
76
|
+
|
|
77
|
+
if not candidates:
|
|
78
|
+
return None
|
|
79
|
+
|
|
80
|
+
_, prev_files = max(candidates, key=lambda x: x[0])
|
|
81
|
+
return list({f["uploader"] for f in prev_files if f.get("uploader")}) or None
|
gate/sbom.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import uuid
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
|
|
5
|
+
from gate import __version__
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _purl(name: str, version: str, ecosystem: str) -> str:
|
|
9
|
+
"""Generate a Package URL (purl) per the purl spec."""
|
|
10
|
+
if ecosystem == "npm":
|
|
11
|
+
# Scoped packages: @scope/name → pkg:npm/%40scope%2Fname@version
|
|
12
|
+
encoded = name.replace("@", "%40").replace("/", "%2F")
|
|
13
|
+
return f"pkg:npm/{encoded}@{version}"
|
|
14
|
+
return f"pkg:pypi/{name.lower()}@{version}"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def generate(
|
|
18
|
+
packages: list[dict],
|
|
19
|
+
ecosystem: str,
|
|
20
|
+
) -> dict:
|
|
21
|
+
"""
|
|
22
|
+
Generate a CycloneDX 1.6 SBOM document.
|
|
23
|
+
|
|
24
|
+
Each entry in packages is a result dict from _check_package(), extended with
|
|
25
|
+
'name' and 'version' keys added by cmd_scan before calling this function.
|
|
26
|
+
"""
|
|
27
|
+
now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
28
|
+
serial = f"urn:uuid:{uuid.uuid4()}"
|
|
29
|
+
|
|
30
|
+
components = []
|
|
31
|
+
vulnerabilities = []
|
|
32
|
+
|
|
33
|
+
for pkg in packages:
|
|
34
|
+
name = pkg["name"]
|
|
35
|
+
version = pkg.get("version") or "unknown"
|
|
36
|
+
ref = f"urn:cdx:{uuid.uuid4()}"
|
|
37
|
+
|
|
38
|
+
component: dict = {
|
|
39
|
+
"type": "library",
|
|
40
|
+
"bom-ref": ref,
|
|
41
|
+
"name": name,
|
|
42
|
+
"version": version,
|
|
43
|
+
"purl": _purl(name, version, ecosystem),
|
|
44
|
+
}
|
|
45
|
+
components.append(component)
|
|
46
|
+
|
|
47
|
+
# Attach CVEs as top-level vulnerabilities linked to this component
|
|
48
|
+
for msg in pkg.get("errors", []) + pkg.get("warnings", []):
|
|
49
|
+
# Extract CVE/GHSA IDs from error messages like "CVE-2023-xxx: ..."
|
|
50
|
+
for prefix in ("CVE-", "GHSA-"):
|
|
51
|
+
if msg.startswith(prefix):
|
|
52
|
+
vuln_id = msg.split(":")[0].strip()
|
|
53
|
+
description = msg.split(":", 1)[1].strip() if ":" in msg else ""
|
|
54
|
+
vulnerabilities.append({
|
|
55
|
+
"id": vuln_id,
|
|
56
|
+
"description": description,
|
|
57
|
+
"affects": [{"ref": ref}],
|
|
58
|
+
})
|
|
59
|
+
break
|
|
60
|
+
|
|
61
|
+
doc = {
|
|
62
|
+
"bomFormat": "CycloneDX",
|
|
63
|
+
"specVersion": "1.6",
|
|
64
|
+
"serialNumber": serial,
|
|
65
|
+
"version": 1,
|
|
66
|
+
"metadata": {
|
|
67
|
+
"timestamp": now,
|
|
68
|
+
"tools": [{"name": "gate", "version": __version__}],
|
|
69
|
+
},
|
|
70
|
+
"components": components,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if vulnerabilities:
|
|
74
|
+
doc["vulnerabilities"] = vulnerabilities
|
|
75
|
+
|
|
76
|
+
return doc
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def write(doc: dict, path: str | None) -> None:
|
|
80
|
+
"""Write SBOM to file or stdout."""
|
|
81
|
+
output = json.dumps(doc, indent=2)
|
|
82
|
+
if path:
|
|
83
|
+
with open(path, "w", encoding="utf-8") as f:
|
|
84
|
+
f.write(output)
|
|
85
|
+
else:
|
|
86
|
+
print(output)
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: gate-cli
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Supply chain security scanner for npm and pip packages
|
|
5
|
+
Project-URL: Homepage, https://github.com/Mhacker1020/gate
|
|
6
|
+
Project-URL: Repository, https://github.com/Mhacker1020/gate
|
|
7
|
+
Project-URL: Issues, https://github.com/Mhacker1020/gate/issues
|
|
8
|
+
License: MIT
|
|
9
|
+
License-File: LICENSE
|
|
10
|
+
Keywords: cve,npm,pip,sbom,security,supply-chain
|
|
11
|
+
Classifier: Development Status :: 3 - Alpha
|
|
12
|
+
Classifier: Environment :: Console
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
17
|
+
Classifier: Topic :: Security
|
|
18
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
19
|
+
Requires-Python: >=3.12
|
|
20
|
+
Provides-Extra: dev
|
|
21
|
+
Requires-Dist: pytest; extra == 'dev'
|
|
22
|
+
Description-Content-Type: text/markdown
|
|
23
|
+
|
|
24
|
+
# gate
|
|
25
|
+
|
|
26
|
+
Supply chain security scanner for npm and pip packages.
|
|
27
|
+
|
|
28
|
+
Checks packages for known CVEs, quarantines newly published versions, and warns about suspicious install scripts — before they hit your project.
|
|
29
|
+
|
|
30
|
+
```
|
|
31
|
+
✓ flask 3.1.1
|
|
32
|
+
✗ requests 2.28.0
|
|
33
|
+
CVE-2023-32681: Unintended leak of Proxy-Authorization header
|
|
34
|
+
⚠ urllib3 2.3.0
|
|
35
|
+
Published 2 day(s) ago (quarantine window: 7 days)
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
## Why
|
|
39
|
+
|
|
40
|
+
Supply chain attacks increasingly target the window between a package being published and being detected as malicious. Existing tools (Trivy, Snyk, Dependabot) catch *known* CVEs but miss:
|
|
41
|
+
|
|
42
|
+
- Newly published malicious versions not yet in any database
|
|
43
|
+
- Install scripts that run arbitrary code on `pip install`
|
|
44
|
+
|
|
45
|
+
Gate adds a quarantine window — new versions are flagged until the community has had time to catch problems.
|
|
46
|
+
|
|
47
|
+
**Zero runtime dependencies.** A supply chain security tool that trusts its own supply chain is not a security tool.
|
|
48
|
+
|
|
49
|
+
## Checks
|
|
50
|
+
|
|
51
|
+
| Check | What it catches |
|
|
52
|
+
|-------|----------------|
|
|
53
|
+
| CVE scan | Known vulnerabilities via OSV.dev |
|
|
54
|
+
| Quarantine window | Versions published within N days |
|
|
55
|
+
| Install scripts | npm packages running suspicious install hooks |
|
|
56
|
+
| Hash verification | Detects tampered packages via lock file integrity checks |
|
|
57
|
+
| Maintainer change | Flags when a package owner has changed between versions |
|
|
58
|
+
| SBOM export | Generates a CycloneDX 1.6 Software Bill of Materials |
|
|
59
|
+
|
|
60
|
+
## Installation
|
|
61
|
+
|
|
62
|
+
```bash
|
|
63
|
+
pip install gate-cli
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
Requires Python 3.12+.
|
|
67
|
+
|
|
68
|
+
## Usage
|
|
69
|
+
|
|
70
|
+
### Check a single package
|
|
71
|
+
|
|
72
|
+
```bash
|
|
73
|
+
gate check requests
|
|
74
|
+
gate check requests==2.28.0
|
|
75
|
+
gate check lodash --npm
|
|
76
|
+
gate check lodash==4.17.15 --npm
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### Scan all packages in a project
|
|
80
|
+
|
|
81
|
+
Automatically detects `requirements.txt` or `package-lock.json`:
|
|
82
|
+
|
|
83
|
+
```bash
|
|
84
|
+
gate scan
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
Exit code is non-zero if errors are found — suitable for CI pipelines.
|
|
88
|
+
|
|
89
|
+
### Export a CycloneDX SBOM
|
|
90
|
+
|
|
91
|
+
```bash
|
|
92
|
+
gate scan --sbom # print to stdout
|
|
93
|
+
gate scan --sbom report.cdx.json # write to file
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
### Install as a git pre-commit hook
|
|
97
|
+
|
|
98
|
+
```bash
|
|
99
|
+
gate init
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
Gate will run automatically on every `git commit` when lock files change. To remove:
|
|
103
|
+
|
|
104
|
+
```bash
|
|
105
|
+
gate uninstall
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
## Configuration
|
|
109
|
+
|
|
110
|
+
Create `.gate.toml` in your project root to override defaults:
|
|
111
|
+
|
|
112
|
+
```toml
|
|
113
|
+
quarantine_days = 14
|
|
114
|
+
|
|
115
|
+
fail_on = ["critical_cve", "install_script"]
|
|
116
|
+
warn_on = ["recent_release"]
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
| Option | Default | Description |
|
|
120
|
+
|--------|---------|-------------|
|
|
121
|
+
| `quarantine_days` | `7` | Days a new release must age before passing |
|
|
122
|
+
| `fail_on` | `["critical_cve", "install_script"]` | Conditions that block the commit / exit 1 |
|
|
123
|
+
| `warn_on` | `["recent_release"]` | Conditions that warn but allow through |
|
|
124
|
+
|
|
125
|
+
Move `recent_release` from `warn_on` to `fail_on` to enforce the quarantine window strictly.
|
|
126
|
+
|
|
127
|
+
## Supported ecosystems
|
|
128
|
+
|
|
129
|
+
| Ecosystem | Lock file | Registry |
|
|
130
|
+
|-----------|-----------|----------|
|
|
131
|
+
| PyPI | `requirements.txt` | pypi.org |
|
|
132
|
+
| npm | `package-lock.json` | registry.npmjs.org |
|
|
133
|
+
|
|
134
|
+
CVE data is sourced from [OSV.dev](https://osv.dev) — Google's open vulnerability database.
|
|
135
|
+
|
|
136
|
+
## Contributing
|
|
137
|
+
|
|
138
|
+
Gate is open source and built for the community. Contributions welcome.
|
|
139
|
+
|
|
140
|
+
```bash
|
|
141
|
+
git clone https://github.com/Mhacker1020/gate
|
|
142
|
+
cd gate
|
|
143
|
+
pip install -e ".[dev]"
|
|
144
|
+
python -m pytest
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
## License
|
|
148
|
+
|
|
149
|
+
MIT
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
gate/__init__.py,sha256=kUR5RAFc7HCeiqdlX36dZOHkUI5wI6V_43RpEcD8b-0,22
|
|
2
|
+
gate/cli.py,sha256=CL4zhP1fu9UEr6k9byHgAGHEhrmh_vmpvjGRDB2c5P0,10296
|
|
3
|
+
gate/config.py,sha256=aLkf4kGs4X1XAwelfAeFepFedC8YPRjOR2ayOouCiaY,760
|
|
4
|
+
gate/output.py,sha256=rntZW0A07W-OrzFFKwYrI_AHSLDpQK7uUJrkpehGM4w,973
|
|
5
|
+
gate/sbom.py,sha256=hQNlb_LWiOtjaZPnMRs1oSHF6ZRQBdZazk9sFxk88SU,2643
|
|
6
|
+
gate/checks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
gate/checks/cve.py,sha256=B9_goX6OpxKWStRA3MuQMPjFi7tlFlXhKEW8O-e04WM,1044
|
|
8
|
+
gate/checks/integrity.py,sha256=JK7wZOdScMm1buFH3QentWwRtKWRhXFAL7GDBDL_cNI,1782
|
|
9
|
+
gate/checks/maintainer.py,sha256=bfbHXkDnH0NuKKCvvfjSRNh-M8lG8L-QQ5u6o9euHoU,889
|
|
10
|
+
gate/checks/quarantine.py,sha256=nHb6Im9rxIRvmqK6P9jsgDwcuO3v9c_nSqmw1UVmnck,580
|
|
11
|
+
gate/checks/scripts.py,sha256=sR4CbOGWpW4TqFWMMzF6pU9s6OhanOGEtOA1oRD_xYY,1903
|
|
12
|
+
gate/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
+
gate/hooks/precommit.py,sha256=15t26EBWHYUAsfni370uHYNIuxTJHAov_X2MBkBAg_o,1617
|
|
14
|
+
gate/registry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
+
gate/registry/npm.py,sha256=bl-7TVAkDkSpuzSDskqQd81mUXyTIuZDM5ki47Qw8hU,2696
|
|
16
|
+
gate/registry/pypi.py,sha256=HUx5BXRzZddSz80C-NJ2MGJzNm7yW6jK9_apbzHefeI,2608
|
|
17
|
+
gate_cli-0.1.0.dist-info/METADATA,sha256=v9-ylvkqWVZfo4xSxdS515ipGX3pwovalT-ff6WkkXA,4162
|
|
18
|
+
gate_cli-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
19
|
+
gate_cli-0.1.0.dist-info/entry_points.txt,sha256=VD6pWoNQ1iGwnUY9KaUsXgXe4jjzHW9w9O4EHocZW8E,39
|
|
20
|
+
gate_cli-0.1.0.dist-info/licenses/LICENSE,sha256=pbTeGA7N7sxgwJJyrMjNItTfD41_Cze5md-vI0zeAHI,1061
|
|
21
|
+
gate_cli-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Mika
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|