@raghulm/aegis-mcp 1.0.4 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +286 -290
- package/audit/audit_logger.py +62 -62
- package/package.json +5 -6
- package/policies/roles.yaml +34 -34
- package/policies/scope_rules.yaml +16 -16
- package/requirements.txt +8 -7
- package/run_stdio.py +22 -22
- package/server/auth.py +69 -69
- package/server/config.py +82 -82
- package/server/health.py +19 -19
- package/server/logging.py +33 -33
- package/server/main.py +212 -144
- package/server/stdio.py +7 -7
- package/tools/aws/ec2.py +26 -26
- package/tools/aws/s3.py +54 -54
- package/tools/cicd/jenkins.py +256 -0
- package/tools/cicd/pipeline.py +33 -33
- package/tools/git/repo.py +22 -22
- package/tools/kubernetes/audit.py +108 -108
- package/tools/kubernetes/pods.py +27 -27
- package/tools/network/headers.py +99 -99
- package/tools/network/port_scanner.py +66 -66
- package/tools/network/ssl_checker.py +65 -65
- package/tools/security/deps.py +103 -103
- package/tools/security/secrets.py +91 -91
- package/tools/security/semgrep.py +261 -261
- package/tools/security/trivy.py +19 -19
package/tools/security/deps.py
CHANGED
|
@@ -1,103 +1,103 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
import os
|
|
5
|
-
import re
|
|
6
|
-
from typing import Any
|
|
7
|
-
|
|
8
|
-
import requests
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def _parse_requirements_txt(file_path: str) -> list[dict[str, str]]:
|
|
12
|
-
packages: list[dict[str, str]] = []
|
|
13
|
-
with open(file_path, "r", encoding="utf-8") as fh:
|
|
14
|
-
for line in fh:
|
|
15
|
-
line = line.strip()
|
|
16
|
-
if not line or line.startswith("#") or line.startswith("-"):
|
|
17
|
-
continue
|
|
18
|
-
match = re.match(r"^([A-Za-z0-9_\-\.]+)\s*(?:[=<>!~]+\s*(.+))?", line)
|
|
19
|
-
if match:
|
|
20
|
-
name = match.group(1)
|
|
21
|
-
version = match.group(2).strip() if match.group(2) else ""
|
|
22
|
-
packages.append({"name": name, "version": version})
|
|
23
|
-
return packages
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def _parse_package_json(file_path: str) -> list[dict[str, str]]:
|
|
27
|
-
with open(file_path, "r", encoding="utf-8") as fh:
|
|
28
|
-
data = json.load(fh)
|
|
29
|
-
packages: list[dict[str, str]] = []
|
|
30
|
-
for dep_key in ("dependencies", "devDependencies"):
|
|
31
|
-
for name, version in data.get(dep_key, {}).items():
|
|
32
|
-
clean_version = re.sub(r"^[\^~>=<]", "", version)
|
|
33
|
-
packages.append({"name": name, "version": clean_version})
|
|
34
|
-
return packages
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def _query_osv(ecosystem: str, package_name: str, version: str) -> list[dict[str, Any]]:
|
|
38
|
-
"""Query the OSV.dev API for known vulnerabilities."""
|
|
39
|
-
payload: dict[str, Any] = {
|
|
40
|
-
"package": {"name": package_name, "ecosystem": ecosystem},
|
|
41
|
-
}
|
|
42
|
-
if version:
|
|
43
|
-
payload["version"] = version
|
|
44
|
-
|
|
45
|
-
try:
|
|
46
|
-
resp = requests.post(
|
|
47
|
-
"https://api.osv.dev/v1/query",
|
|
48
|
-
json=payload,
|
|
49
|
-
timeout=10,
|
|
50
|
-
)
|
|
51
|
-
resp.raise_for_status()
|
|
52
|
-
except requests.RequestException:
|
|
53
|
-
return []
|
|
54
|
-
|
|
55
|
-
vulns = resp.json().get("vulns", [])
|
|
56
|
-
results: list[dict[str, Any]] = []
|
|
57
|
-
for vuln in vulns:
|
|
58
|
-
aliases = vuln.get("aliases", [])
|
|
59
|
-
cve = next((a for a in aliases if a.startswith("CVE-")), aliases[0] if aliases else vuln.get("id", ""))
|
|
60
|
-
severity_list = vuln.get("severity", [])
|
|
61
|
-
severity = severity_list[0].get("score", "unknown") if severity_list else "unknown"
|
|
62
|
-
results.append({
|
|
63
|
-
"id": vuln.get("id", ""),
|
|
64
|
-
"cve": cve,
|
|
65
|
-
"summary": vuln.get("summary", "No summary available"),
|
|
66
|
-
"severity": severity,
|
|
67
|
-
})
|
|
68
|
-
return results
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
def check_dependencies(file_path: str) -> list[dict[str, Any]]:
|
|
72
|
-
"""Scan a dependency file for known vulnerabilities via OSV.dev.
|
|
73
|
-
|
|
74
|
-
Args:
|
|
75
|
-
file_path: Path to requirements.txt or package.json.
|
|
76
|
-
|
|
77
|
-
Returns:
|
|
78
|
-
List of packages with their vulnerability status.
|
|
79
|
-
"""
|
|
80
|
-
if not os.path.exists(file_path):
|
|
81
|
-
raise RuntimeError(f"File does not exist: {file_path}")
|
|
82
|
-
|
|
83
|
-
basename = os.path.basename(file_path).lower()
|
|
84
|
-
if basename == "requirements.txt":
|
|
85
|
-
packages = _parse_requirements_txt(file_path)
|
|
86
|
-
ecosystem = "PyPI"
|
|
87
|
-
elif basename == "package.json":
|
|
88
|
-
packages = _parse_package_json(file_path)
|
|
89
|
-
ecosystem = "npm"
|
|
90
|
-
else:
|
|
91
|
-
raise RuntimeError(f"Unsupported dependency file: {basename}. Use requirements.txt or package.json.")
|
|
92
|
-
|
|
93
|
-
results: list[dict[str, Any]] = []
|
|
94
|
-
for pkg in packages:
|
|
95
|
-
vulns = _query_osv(ecosystem, pkg["name"], pkg["version"])
|
|
96
|
-
results.append({
|
|
97
|
-
"package": pkg["name"],
|
|
98
|
-
"version": pkg["version"] or "unspecified",
|
|
99
|
-
"vulnerabilities_found": len(vulns),
|
|
100
|
-
"vulnerabilities": vulns,
|
|
101
|
-
})
|
|
102
|
-
|
|
103
|
-
return results
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _parse_requirements_txt(file_path: str) -> list[dict[str, str]]:
|
|
12
|
+
packages: list[dict[str, str]] = []
|
|
13
|
+
with open(file_path, "r", encoding="utf-8") as fh:
|
|
14
|
+
for line in fh:
|
|
15
|
+
line = line.strip()
|
|
16
|
+
if not line or line.startswith("#") or line.startswith("-"):
|
|
17
|
+
continue
|
|
18
|
+
match = re.match(r"^([A-Za-z0-9_\-\.]+)\s*(?:[=<>!~]+\s*(.+))?", line)
|
|
19
|
+
if match:
|
|
20
|
+
name = match.group(1)
|
|
21
|
+
version = match.group(2).strip() if match.group(2) else ""
|
|
22
|
+
packages.append({"name": name, "version": version})
|
|
23
|
+
return packages
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _parse_package_json(file_path: str) -> list[dict[str, str]]:
|
|
27
|
+
with open(file_path, "r", encoding="utf-8") as fh:
|
|
28
|
+
data = json.load(fh)
|
|
29
|
+
packages: list[dict[str, str]] = []
|
|
30
|
+
for dep_key in ("dependencies", "devDependencies"):
|
|
31
|
+
for name, version in data.get(dep_key, {}).items():
|
|
32
|
+
clean_version = re.sub(r"^[\^~>=<]", "", version)
|
|
33
|
+
packages.append({"name": name, "version": clean_version})
|
|
34
|
+
return packages
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _query_osv(ecosystem: str, package_name: str, version: str) -> list[dict[str, Any]]:
|
|
38
|
+
"""Query the OSV.dev API for known vulnerabilities."""
|
|
39
|
+
payload: dict[str, Any] = {
|
|
40
|
+
"package": {"name": package_name, "ecosystem": ecosystem},
|
|
41
|
+
}
|
|
42
|
+
if version:
|
|
43
|
+
payload["version"] = version
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
resp = requests.post(
|
|
47
|
+
"https://api.osv.dev/v1/query",
|
|
48
|
+
json=payload,
|
|
49
|
+
timeout=10,
|
|
50
|
+
)
|
|
51
|
+
resp.raise_for_status()
|
|
52
|
+
except requests.RequestException:
|
|
53
|
+
return []
|
|
54
|
+
|
|
55
|
+
vulns = resp.json().get("vulns", [])
|
|
56
|
+
results: list[dict[str, Any]] = []
|
|
57
|
+
for vuln in vulns:
|
|
58
|
+
aliases = vuln.get("aliases", [])
|
|
59
|
+
cve = next((a for a in aliases if a.startswith("CVE-")), aliases[0] if aliases else vuln.get("id", ""))
|
|
60
|
+
severity_list = vuln.get("severity", [])
|
|
61
|
+
severity = severity_list[0].get("score", "unknown") if severity_list else "unknown"
|
|
62
|
+
results.append({
|
|
63
|
+
"id": vuln.get("id", ""),
|
|
64
|
+
"cve": cve,
|
|
65
|
+
"summary": vuln.get("summary", "No summary available"),
|
|
66
|
+
"severity": severity,
|
|
67
|
+
})
|
|
68
|
+
return results
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def check_dependencies(file_path: str) -> list[dict[str, Any]]:
|
|
72
|
+
"""Scan a dependency file for known vulnerabilities via OSV.dev.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
file_path: Path to requirements.txt or package.json.
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
List of packages with their vulnerability status.
|
|
79
|
+
"""
|
|
80
|
+
if not os.path.exists(file_path):
|
|
81
|
+
raise RuntimeError(f"File does not exist: {file_path}")
|
|
82
|
+
|
|
83
|
+
basename = os.path.basename(file_path).lower()
|
|
84
|
+
if basename == "requirements.txt":
|
|
85
|
+
packages = _parse_requirements_txt(file_path)
|
|
86
|
+
ecosystem = "PyPI"
|
|
87
|
+
elif basename == "package.json":
|
|
88
|
+
packages = _parse_package_json(file_path)
|
|
89
|
+
ecosystem = "npm"
|
|
90
|
+
else:
|
|
91
|
+
raise RuntimeError(f"Unsupported dependency file: {basename}. Use requirements.txt or package.json.")
|
|
92
|
+
|
|
93
|
+
results: list[dict[str, Any]] = []
|
|
94
|
+
for pkg in packages:
|
|
95
|
+
vulns = _query_osv(ecosystem, pkg["name"], pkg["version"])
|
|
96
|
+
results.append({
|
|
97
|
+
"package": pkg["name"],
|
|
98
|
+
"version": pkg["version"] or "unspecified",
|
|
99
|
+
"vulnerabilities_found": len(vulns),
|
|
100
|
+
"vulnerabilities": vulns,
|
|
101
|
+
})
|
|
102
|
+
|
|
103
|
+
return results
|
|
@@ -1,91 +1,91 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
import re
|
|
5
|
-
from typing import Any
|
|
6
|
-
|
|
7
|
-
# Common secret patterns with descriptive names
|
|
8
|
-
_PATTERNS: list[tuple[str, re.Pattern[str]]] = [
|
|
9
|
-
("AWS Access Key", re.compile(r"(?:^|[^A-Z0-9])(?:AKIA[0-9A-Z]{16})(?:[^A-Z0-9]|$)")),
|
|
10
|
-
("AWS Secret Key", re.compile(
|
|
11
|
-
r"""(?:aws_secret_access_key|secret_access_key|aws_secret)\s*[=:]\s*['"]?([A-Za-z0-9/+=]{40})['"]?""",
|
|
12
|
-
re.IGNORECASE,
|
|
13
|
-
)),
|
|
14
|
-
("Generic API Key", re.compile(
|
|
15
|
-
r"""(?:api[_-]?key|apikey|api[_-]?secret)\s*[=:]\s*['"]?([A-Za-z0-9_\-]{20,60})['"]?""",
|
|
16
|
-
re.IGNORECASE,
|
|
17
|
-
)),
|
|
18
|
-
("Generic Token", re.compile(
|
|
19
|
-
r"""(?:token|auth[_-]?token|access[_-]?token|bearer)\s*[=:]\s*['"]?([A-Za-z0-9_\-\.]{20,200})['"]?""",
|
|
20
|
-
re.IGNORECASE,
|
|
21
|
-
)),
|
|
22
|
-
("Generic Password", re.compile(
|
|
23
|
-
r"""(?:password|passwd|pwd|secret)\s*[=:]\s*['"]?([^\s'"]{8,})['"]?""",
|
|
24
|
-
re.IGNORECASE,
|
|
25
|
-
)),
|
|
26
|
-
("Private Key", re.compile(r"-----BEGIN (?:RSA |EC |DSA |OPENSSH )?PRIVATE KEY-----")),
|
|
27
|
-
("GitHub Token", re.compile(r"(?:ghp|gho|ghu|ghs|ghr)_[A-Za-z0-9_]{36,}")),
|
|
28
|
-
("Slack Token", re.compile(r"xox[bporas]-[A-Za-z0-9\-]{10,}")),
|
|
29
|
-
("Stripe Key", re.compile(r"(?:sk|pk)_(?:test|live)_[A-Za-z0-9]{20,}")),
|
|
30
|
-
("SendGrid Key", re.compile(r"SG\.[A-Za-z0-9_\-]{22,}\.[A-Za-z0-9_\-]{43,}")),
|
|
31
|
-
]
|
|
32
|
-
|
|
33
|
-
_SKIP_DIRS = {".git", "__pycache__", "node_modules", ".venv", "venv", ".env", ".tox", "dist", "build"}
|
|
34
|
-
_SKIP_EXTENSIONS = {".pyc", ".pyo", ".so", ".dll", ".exe", ".bin", ".jpg", ".png", ".gif", ".ico", ".woff", ".ttf"}
|
|
35
|
-
_MAX_FILE_SIZE = 1_048_576 # 1 MB
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def _redact(match_text: str, keep: int = 6) -> str:
|
|
39
|
-
if len(match_text) <= keep:
|
|
40
|
-
return "***REDACTED***"
|
|
41
|
-
return match_text[:keep] + "***REDACTED***"
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def _scan_file(file_path: str) -> list[dict[str, Any]]:
|
|
45
|
-
findings: list[dict[str, Any]] = []
|
|
46
|
-
try:
|
|
47
|
-
if os.path.getsize(file_path) > _MAX_FILE_SIZE:
|
|
48
|
-
return findings
|
|
49
|
-
with open(file_path, "r", encoding="utf-8", errors="ignore") as fh:
|
|
50
|
-
for line_no, line in enumerate(fh, start=1):
|
|
51
|
-
for pattern_name, pattern in _PATTERNS:
|
|
52
|
-
match = pattern.search(line)
|
|
53
|
-
if match:
|
|
54
|
-
matched_text = match.group(0).strip()
|
|
55
|
-
findings.append({
|
|
56
|
-
"file": file_path,
|
|
57
|
-
"line": line_no,
|
|
58
|
-
"pattern": pattern_name,
|
|
59
|
-
"match": _redact(matched_text),
|
|
60
|
-
})
|
|
61
|
-
except (OSError, UnicodeDecodeError):
|
|
62
|
-
pass
|
|
63
|
-
return findings
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def scan_secrets(path: str) -> list[dict[str, Any]]:
|
|
67
|
-
"""Scan a file or directory for exposed secrets using regex patterns.
|
|
68
|
-
|
|
69
|
-
Args:
|
|
70
|
-
path: Path to a file or directory to scan.
|
|
71
|
-
|
|
72
|
-
Returns:
|
|
73
|
-
List of findings, each with file, line, pattern name, and redacted match.
|
|
74
|
-
"""
|
|
75
|
-
if not os.path.exists(path):
|
|
76
|
-
raise RuntimeError(f"Path does not exist: {path}")
|
|
77
|
-
|
|
78
|
-
findings: list[dict[str, Any]] = []
|
|
79
|
-
if os.path.isfile(path):
|
|
80
|
-
return _scan_file(path)
|
|
81
|
-
|
|
82
|
-
for root, dirs, files in os.walk(path):
|
|
83
|
-
dirs[:] = [d for d in dirs if d not in _SKIP_DIRS]
|
|
84
|
-
for fname in files:
|
|
85
|
-
ext = os.path.splitext(fname)[1].lower()
|
|
86
|
-
if ext in _SKIP_EXTENSIONS:
|
|
87
|
-
continue
|
|
88
|
-
full_path = os.path.join(root, fname)
|
|
89
|
-
findings.extend(_scan_file(full_path))
|
|
90
|
-
|
|
91
|
-
return findings
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
# Common secret patterns with descriptive names
|
|
8
|
+
_PATTERNS: list[tuple[str, re.Pattern[str]]] = [
|
|
9
|
+
("AWS Access Key", re.compile(r"(?:^|[^A-Z0-9])(?:AKIA[0-9A-Z]{16})(?:[^A-Z0-9]|$)")),
|
|
10
|
+
("AWS Secret Key", re.compile(
|
|
11
|
+
r"""(?:aws_secret_access_key|secret_access_key|aws_secret)\s*[=:]\s*['"]?([A-Za-z0-9/+=]{40})['"]?""",
|
|
12
|
+
re.IGNORECASE,
|
|
13
|
+
)),
|
|
14
|
+
("Generic API Key", re.compile(
|
|
15
|
+
r"""(?:api[_-]?key|apikey|api[_-]?secret)\s*[=:]\s*['"]?([A-Za-z0-9_\-]{20,60})['"]?""",
|
|
16
|
+
re.IGNORECASE,
|
|
17
|
+
)),
|
|
18
|
+
("Generic Token", re.compile(
|
|
19
|
+
r"""(?:token|auth[_-]?token|access[_-]?token|bearer)\s*[=:]\s*['"]?([A-Za-z0-9_\-\.]{20,200})['"]?""",
|
|
20
|
+
re.IGNORECASE,
|
|
21
|
+
)),
|
|
22
|
+
("Generic Password", re.compile(
|
|
23
|
+
r"""(?:password|passwd|pwd|secret)\s*[=:]\s*['"]?([^\s'"]{8,})['"]?""",
|
|
24
|
+
re.IGNORECASE,
|
|
25
|
+
)),
|
|
26
|
+
("Private Key", re.compile(r"-----BEGIN (?:RSA |EC |DSA |OPENSSH )?PRIVATE KEY-----")),
|
|
27
|
+
("GitHub Token", re.compile(r"(?:ghp|gho|ghu|ghs|ghr)_[A-Za-z0-9_]{36,}")),
|
|
28
|
+
("Slack Token", re.compile(r"xox[bporas]-[A-Za-z0-9\-]{10,}")),
|
|
29
|
+
("Stripe Key", re.compile(r"(?:sk|pk)_(?:test|live)_[A-Za-z0-9]{20,}")),
|
|
30
|
+
("SendGrid Key", re.compile(r"SG\.[A-Za-z0-9_\-]{22,}\.[A-Za-z0-9_\-]{43,}")),
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
_SKIP_DIRS = {".git", "__pycache__", "node_modules", ".venv", "venv", ".env", ".tox", "dist", "build"}
|
|
34
|
+
_SKIP_EXTENSIONS = {".pyc", ".pyo", ".so", ".dll", ".exe", ".bin", ".jpg", ".png", ".gif", ".ico", ".woff", ".ttf"}
|
|
35
|
+
_MAX_FILE_SIZE = 1_048_576 # 1 MB
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _redact(match_text: str, keep: int = 6) -> str:
|
|
39
|
+
if len(match_text) <= keep:
|
|
40
|
+
return "***REDACTED***"
|
|
41
|
+
return match_text[:keep] + "***REDACTED***"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _scan_file(file_path: str) -> list[dict[str, Any]]:
|
|
45
|
+
findings: list[dict[str, Any]] = []
|
|
46
|
+
try:
|
|
47
|
+
if os.path.getsize(file_path) > _MAX_FILE_SIZE:
|
|
48
|
+
return findings
|
|
49
|
+
with open(file_path, "r", encoding="utf-8", errors="ignore") as fh:
|
|
50
|
+
for line_no, line in enumerate(fh, start=1):
|
|
51
|
+
for pattern_name, pattern in _PATTERNS:
|
|
52
|
+
match = pattern.search(line)
|
|
53
|
+
if match:
|
|
54
|
+
matched_text = match.group(0).strip()
|
|
55
|
+
findings.append({
|
|
56
|
+
"file": file_path,
|
|
57
|
+
"line": line_no,
|
|
58
|
+
"pattern": pattern_name,
|
|
59
|
+
"match": _redact(matched_text),
|
|
60
|
+
})
|
|
61
|
+
except (OSError, UnicodeDecodeError):
|
|
62
|
+
pass
|
|
63
|
+
return findings
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def scan_secrets(path: str) -> list[dict[str, Any]]:
|
|
67
|
+
"""Scan a file or directory for exposed secrets using regex patterns.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
path: Path to a file or directory to scan.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
List of findings, each with file, line, pattern name, and redacted match.
|
|
74
|
+
"""
|
|
75
|
+
if not os.path.exists(path):
|
|
76
|
+
raise RuntimeError(f"Path does not exist: {path}")
|
|
77
|
+
|
|
78
|
+
findings: list[dict[str, Any]] = []
|
|
79
|
+
if os.path.isfile(path):
|
|
80
|
+
return _scan_file(path)
|
|
81
|
+
|
|
82
|
+
for root, dirs, files in os.walk(path):
|
|
83
|
+
dirs[:] = [d for d in dirs if d not in _SKIP_DIRS]
|
|
84
|
+
for fname in files:
|
|
85
|
+
ext = os.path.splitext(fname)[1].lower()
|
|
86
|
+
if ext in _SKIP_EXTENSIONS:
|
|
87
|
+
continue
|
|
88
|
+
full_path = os.path.join(root, fname)
|
|
89
|
+
findings.extend(_scan_file(full_path))
|
|
90
|
+
|
|
91
|
+
return findings
|