@raghulm/aegis-mcp 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +69 -77
- package/audit/__init__.py +0 -0
- package/audit/audit_logger.py +62 -0
- package/bin/aegis-mcp.js +44 -164
- package/bin/prepare-python-env.js +176 -0
- package/package.json +53 -39
- package/policies/roles.yaml +34 -0
- package/policies/scope_rules.yaml +16 -0
- package/requirements.txt +7 -0
- package/run_stdio.py +22 -0
- package/server/__init__.py +0 -0
- package/server/auth.py +69 -0
- package/server/config.py +82 -0
- package/server/health.py +19 -0
- package/server/logging.py +33 -0
- package/server/main.py +144 -0
- package/server/stdio.py +7 -0
- package/tools/__init__.py +0 -0
- package/tools/aws/__init__.py +0 -0
- package/tools/aws/ec2.py +26 -0
- package/tools/aws/s3.py +54 -0
- package/tools/cicd/__init__.py +0 -0
- package/tools/cicd/pipeline.py +33 -0
- package/tools/git/__init__.py +0 -0
- package/tools/git/repo.py +22 -0
- package/tools/kubernetes/__init__.py +0 -0
- package/tools/kubernetes/audit.py +108 -0
- package/tools/kubernetes/pods.py +27 -0
- package/tools/network/__init__.py +0 -0
- package/tools/network/headers.py +99 -0
- package/tools/network/port_scanner.py +66 -0
- package/tools/network/ssl_checker.py +65 -0
- package/tools/security/__init__.py +0 -0
- package/tools/security/deps.py +103 -0
- package/tools/security/secrets.py +91 -0
- package/tools/security/semgrep.py +261 -0
- package/tools/security/trivy.py +19 -0
- package/bin/aegis-mcp.cmd +0 -2
package/package.json
CHANGED
|
@@ -1,39 +1,53 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "@raghulm/aegis-mcp",
|
|
3
|
-
"version": "1.0.
|
|
4
|
-
"description": "
|
|
5
|
-
"
|
|
6
|
-
"
|
|
7
|
-
|
|
8
|
-
"
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
"
|
|
15
|
-
"
|
|
16
|
-
|
|
17
|
-
"
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
"
|
|
21
|
-
"
|
|
22
|
-
"
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
"
|
|
26
|
-
"
|
|
27
|
-
"
|
|
28
|
-
"
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
"
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
"
|
|
38
|
-
|
|
39
|
-
|
|
1
|
+
{
|
|
2
|
+
"name": "@raghulm/aegis-mcp",
|
|
3
|
+
"version": "1.0.4",
|
|
4
|
+
"description": "DevSecOps-focused MCP server for AWS, Kubernetes, CI/CD, and security tooling.",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"author": "Raghul M",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "git+https://github.com/raghulvj01/aegis-mcp.git"
|
|
10
|
+
},
|
|
11
|
+
"bugs": {
|
|
12
|
+
"url": "https://github.com/raghulvj01/aegis-mcp/issues"
|
|
13
|
+
},
|
|
14
|
+
"homepage": "https://github.com/raghulvj01/aegis-mcp#readme",
|
|
15
|
+
"type": "commonjs",
|
|
16
|
+
"bin": {
|
|
17
|
+
"aegis-mcp": "bin/aegis-mcp.js"
|
|
18
|
+
},
|
|
19
|
+
"scripts": {
|
|
20
|
+
"setup:python": "node ./bin/prepare-python-env.js",
|
|
21
|
+
"start": "node ./bin/aegis-mcp.js",
|
|
22
|
+
"pack:check": "npm pack --dry-run"
|
|
23
|
+
},
|
|
24
|
+
"files": [
|
|
25
|
+
"audit/**/*.py",
|
|
26
|
+
"bin/*.js",
|
|
27
|
+
"policies/*.yaml",
|
|
28
|
+
"server/**/*.py",
|
|
29
|
+
"tools/**/*.py",
|
|
30
|
+
"requirements.txt",
|
|
31
|
+
"run_stdio.py",
|
|
32
|
+
"README.md",
|
|
33
|
+
"LICENSE"
|
|
34
|
+
],
|
|
35
|
+
"keywords": [
|
|
36
|
+
"mcp",
|
|
37
|
+
"model-context-protocol",
|
|
38
|
+
"devsecops",
|
|
39
|
+
"security",
|
|
40
|
+
"aws",
|
|
41
|
+
"kubernetes",
|
|
42
|
+
"claude"
|
|
43
|
+
],
|
|
44
|
+
"publishConfig": {
|
|
45
|
+
"access": "public"
|
|
46
|
+
},
|
|
47
|
+
"engines": {
|
|
48
|
+
"node": ">=18"
|
|
49
|
+
},
|
|
50
|
+
"dependencies": {
|
|
51
|
+
"@raghulm/aegis-mcp": "^1.0.3"
|
|
52
|
+
}
|
|
53
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
roles:
|
|
2
|
+
viewer:
|
|
3
|
+
- aws_list_ec2_instances
|
|
4
|
+
- k8s_list_pods
|
|
5
|
+
- git_recent_commits
|
|
6
|
+
- security_check_ssl_certificate
|
|
7
|
+
- security_check_http_headers
|
|
8
|
+
- network_port_scan
|
|
9
|
+
- security_semgrep_scan
|
|
10
|
+
security:
|
|
11
|
+
- k8s_security_audit
|
|
12
|
+
- security_run_trivy_scan
|
|
13
|
+
- git_recent_commits
|
|
14
|
+
- security_scan_secrets
|
|
15
|
+
- security_check_ssl_certificate
|
|
16
|
+
- security_check_dependencies
|
|
17
|
+
- security_check_http_headers
|
|
18
|
+
- security_semgrep_scan
|
|
19
|
+
- aws_check_s3_public_access
|
|
20
|
+
- network_port_scan
|
|
21
|
+
admin:
|
|
22
|
+
- aws_list_ec2_instances
|
|
23
|
+
- k8s_list_pods
|
|
24
|
+
- k8s_security_audit
|
|
25
|
+
- security_run_trivy_scan
|
|
26
|
+
- git_recent_commits
|
|
27
|
+
- cicd_pipeline_status
|
|
28
|
+
- security_scan_secrets
|
|
29
|
+
- security_check_ssl_certificate
|
|
30
|
+
- security_check_dependencies
|
|
31
|
+
- security_check_http_headers
|
|
32
|
+
- security_semgrep_scan
|
|
33
|
+
- aws_check_s3_public_access
|
|
34
|
+
- network_port_scan
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
scopes:
|
|
2
|
+
aegis.read:
|
|
3
|
+
- aws_list_ec2_instances
|
|
4
|
+
- k8s_list_pods
|
|
5
|
+
- git_recent_commits
|
|
6
|
+
- cicd_pipeline_status
|
|
7
|
+
- security_check_ssl_certificate
|
|
8
|
+
- security_check_http_headers
|
|
9
|
+
- network_port_scan
|
|
10
|
+
- aws_check_s3_public_access
|
|
11
|
+
- security_semgrep_scan
|
|
12
|
+
aegis.security:
|
|
13
|
+
- security_run_trivy_scan
|
|
14
|
+
- security_scan_secrets
|
|
15
|
+
- security_check_dependencies
|
|
16
|
+
- security_semgrep_scan
|
package/requirements.txt
ADDED
package/run_stdio.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""Launcher script for Claude Desktop — ensures the project root is on sys.path."""
|
|
2
|
+
import sys
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
# Set working directory and sys.path to the project root so that
|
|
6
|
+
# relative policy file paths (policies/roles.yaml etc.) resolve correctly
|
|
7
|
+
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
|
|
8
|
+
os.chdir(PROJECT_ROOT)
|
|
9
|
+
sys.path.insert(0, PROJECT_ROOT)
|
|
10
|
+
|
|
11
|
+
# Disable JWT auth for local stdio sessions (Claude Desktop cannot supply tokens)
|
|
12
|
+
os.environ.setdefault("MCP_AUTH_DISABLED", "true")
|
|
13
|
+
|
|
14
|
+
# Ensure the venv Scripts dir is on PATH so pysemgrep / semgrep-core are found
|
|
15
|
+
venv_scripts = os.path.join(PROJECT_ROOT, ".venv", "Scripts")
|
|
16
|
+
if os.path.isdir(venv_scripts) and venv_scripts not in os.environ.get("PATH", ""):
|
|
17
|
+
os.environ["PATH"] = venv_scripts + os.pathsep + os.environ.get("PATH", "")
|
|
18
|
+
|
|
19
|
+
from server.main import mcp
|
|
20
|
+
|
|
21
|
+
if __name__ == "__main__":
|
|
22
|
+
mcp.run(transport="stdio")
|
|
File without changes
|
package/server/auth.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import base64
|
|
4
|
+
import json
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
|
|
7
|
+
from server.config import Settings
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True)
|
|
11
|
+
class Principal:
|
|
12
|
+
subject: str
|
|
13
|
+
role: str
|
|
14
|
+
scopes: list[str]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AuthorizationError(PermissionError):
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _decode_jwt_payload(token: str) -> dict:
|
|
23
|
+
parts = token.split(".")
|
|
24
|
+
if len(parts) < 2:
|
|
25
|
+
return {}
|
|
26
|
+
payload = parts[1]
|
|
27
|
+
padding = "=" * ((4 - len(payload) % 4) % 4)
|
|
28
|
+
decoded = base64.urlsafe_b64decode(payload + padding)
|
|
29
|
+
return json.loads(decoded.decode("utf-8"))
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def decode_bearer_token(token: str, settings: Settings) -> Principal:
|
|
34
|
+
"""Decode claims from a bearer token payload.
|
|
35
|
+
|
|
36
|
+
This implementation parses JWT payload claims and is intended as a scaffold.
|
|
37
|
+
Replace with strict signature/JWKS validation in production.
|
|
38
|
+
"""
|
|
39
|
+
claims = _decode_jwt_payload(token)
|
|
40
|
+
|
|
41
|
+
if settings.oidc_issuer and claims.get("iss") != settings.oidc_issuer:
|
|
42
|
+
raise AuthorizationError("token issuer mismatch")
|
|
43
|
+
if settings.oidc_audience and settings.oidc_audience not in str(claims.get("aud", "")):
|
|
44
|
+
raise AuthorizationError("token audience mismatch")
|
|
45
|
+
|
|
46
|
+
role = str(claims.get("role", "viewer"))
|
|
47
|
+
scope_claim = claims.get("scope", "")
|
|
48
|
+
scopes = scope_claim.split() if isinstance(scope_claim, str) else []
|
|
49
|
+
return Principal(subject=str(claims.get("sub", "unknown")), role=role, scopes=scopes)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def authorize_tool(
|
|
54
|
+
principal: Principal,
|
|
55
|
+
tool_name: str,
|
|
56
|
+
role_policies: dict[str, list[str]],
|
|
57
|
+
scope_policies: dict[str, list[str]],
|
|
58
|
+
) -> None:
|
|
59
|
+
allowed_by_role = set(role_policies.get(principal.role, []))
|
|
60
|
+
allowed_by_scope: set[str] = set()
|
|
61
|
+
for scope in principal.scopes:
|
|
62
|
+
allowed_by_scope.update(scope_policies.get(scope, []))
|
|
63
|
+
|
|
64
|
+
if tool_name in allowed_by_role or tool_name in allowed_by_scope:
|
|
65
|
+
return
|
|
66
|
+
|
|
67
|
+
raise AuthorizationError(
|
|
68
|
+
f"principal '{principal.subject}' with role '{principal.role}' is not allowed to call '{tool_name}'"
|
|
69
|
+
)
|
package/server/config.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True)
|
|
11
|
+
class Settings:
|
|
12
|
+
"""Runtime configuration for the MCP service."""
|
|
13
|
+
|
|
14
|
+
service_name: str = "aegis"
|
|
15
|
+
environment: str = "dev"
|
|
16
|
+
policy_roles_path: Path = Path("policies/roles.yaml")
|
|
17
|
+
policy_scopes_path: Path = Path("policies/scope_rules.yaml")
|
|
18
|
+
oidc_issuer: str | None = None
|
|
19
|
+
oidc_audience: str | None = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _parse_simple_yaml(raw: str) -> dict[str, Any]:
|
|
24
|
+
"""Very small YAML subset parser for key/list policy files."""
|
|
25
|
+
root: dict[str, Any] = {}
|
|
26
|
+
current_section: dict[str, list[str]] | None = None
|
|
27
|
+
current_key: str | None = None
|
|
28
|
+
for line in raw.splitlines():
|
|
29
|
+
stripped = line.rstrip()
|
|
30
|
+
if not stripped or stripped.lstrip().startswith("#"):
|
|
31
|
+
continue
|
|
32
|
+
if not line.startswith(" ") and stripped.endswith(":"):
|
|
33
|
+
section = stripped[:-1]
|
|
34
|
+
root[section] = {}
|
|
35
|
+
current_section = root[section]
|
|
36
|
+
current_key = None
|
|
37
|
+
elif current_section is not None and line.startswith(" ") and stripped.endswith(":"):
|
|
38
|
+
current_key = stripped[:-1]
|
|
39
|
+
current_section[current_key] = []
|
|
40
|
+
elif current_section is not None and current_key and line.strip().startswith("-"):
|
|
41
|
+
value = line.split("-", maxsplit=1)[1].strip().strip('"')
|
|
42
|
+
current_section[current_key].append(value)
|
|
43
|
+
return root
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _load_yaml(path: Path) -> dict[str, Any]:
|
|
48
|
+
if not path.exists():
|
|
49
|
+
return {}
|
|
50
|
+
content = path.read_text(encoding="utf-8")
|
|
51
|
+
try:
|
|
52
|
+
import yaml # type: ignore
|
|
53
|
+
|
|
54
|
+
return yaml.safe_load(content) or {}
|
|
55
|
+
except Exception:
|
|
56
|
+
return _parse_simple_yaml(content)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def load_settings() -> Settings:
|
|
61
|
+
return Settings(
|
|
62
|
+
service_name=os.getenv("MCP_SERVICE_NAME", "aegis"),
|
|
63
|
+
environment=os.getenv("MCP_ENV", "dev"),
|
|
64
|
+
policy_roles_path=Path(os.getenv("MCP_ROLES_FILE", "policies/roles.yaml")),
|
|
65
|
+
policy_scopes_path=Path(os.getenv("MCP_SCOPES_FILE", "policies/scope_rules.yaml")),
|
|
66
|
+
oidc_issuer=os.getenv("OIDC_ISSUER"),
|
|
67
|
+
oidc_audience=os.getenv("OIDC_AUDIENCE"),
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def load_role_policies(settings: Settings) -> dict[str, list[str]]:
|
|
73
|
+
raw = _load_yaml(settings.policy_roles_path)
|
|
74
|
+
roles = raw.get("roles", {})
|
|
75
|
+
return {str(k): [str(v) for v in values] for k, values in roles.items()}
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def load_scope_policies(settings: Settings) -> dict[str, list[str]]:
|
|
80
|
+
raw = _load_yaml(settings.policy_scopes_path)
|
|
81
|
+
scopes = raw.get("scopes", {})
|
|
82
|
+
return {str(k): [str(v) for v in values] for k, values in scopes.items()}
|
package/server/health.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from fastapi import FastAPI
|
|
4
|
+
from fastapi.responses import JSONResponse
|
|
5
|
+
from mcp.server.fastmcp import FastMCP
|
|
6
|
+
|
|
7
|
+
from server.config import load_settings
|
|
8
|
+
|
|
9
|
+
settings = load_settings()
|
|
10
|
+
mcp = FastMCP(settings.service_name, json_response=True)
|
|
11
|
+
app = FastAPI(title="aegis-mcp")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@app.get("/health")
|
|
15
|
+
def health() -> JSONResponse:
|
|
16
|
+
return JSONResponse({"status": "ok", "service": settings.service_name})
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
app.mount("/mcp", mcp.streamable_http_app())
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import sys
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JsonFormatter(logging.Formatter):
|
|
11
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
12
|
+
payload: dict[str, Any] = {
|
|
13
|
+
"timestamp": datetime.now(tz=timezone.utc).isoformat(),
|
|
14
|
+
"level": record.levelname,
|
|
15
|
+
"message": record.getMessage(),
|
|
16
|
+
"logger": record.name,
|
|
17
|
+
}
|
|
18
|
+
extra = getattr(record, "extra_payload", None)
|
|
19
|
+
if isinstance(extra, dict):
|
|
20
|
+
payload.update(extra)
|
|
21
|
+
return json.dumps(payload, default=str)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_logger(name: str = "mcp.aegis") -> logging.Logger:
|
|
25
|
+
logger = logging.getLogger(name)
|
|
26
|
+
if logger.handlers:
|
|
27
|
+
return logger
|
|
28
|
+
logger.setLevel(logging.INFO)
|
|
29
|
+
handler = logging.StreamHandler(sys.stderr)
|
|
30
|
+
handler.setFormatter(JsonFormatter())
|
|
31
|
+
logger.addHandler(handler)
|
|
32
|
+
logger.propagate = False
|
|
33
|
+
return logger
|
package/server/main.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
from mcp.server.fastmcp import FastMCP
|
|
6
|
+
|
|
7
|
+
from audit.audit_logger import audit_tool_call
|
|
8
|
+
from server.auth import authorize_tool, decode_bearer_token
|
|
9
|
+
from server.config import load_role_policies, load_scope_policies, load_settings
|
|
10
|
+
from tools.aws.ec2 import list_ec2_instances
|
|
11
|
+
from tools.aws.s3 import check_s3_public_access
|
|
12
|
+
from tools.cicd.pipeline import pipeline_status
|
|
13
|
+
from tools.git.repo import get_recent_commits
|
|
14
|
+
from tools.kubernetes.pods import list_pods
|
|
15
|
+
from tools.kubernetes.audit import k8s_security_audit
|
|
16
|
+
from tools.network.headers import check_http_headers
|
|
17
|
+
from tools.network.port_scanner import port_scan
|
|
18
|
+
from tools.network.ssl_checker import check_ssl_certificate
|
|
19
|
+
from tools.security.deps import check_dependencies
|
|
20
|
+
from tools.security.secrets import scan_secrets
|
|
21
|
+
from tools.security.semgrep import run_semgrep_scan
|
|
22
|
+
from tools.security.trivy import run_trivy_scan
|
|
23
|
+
|
|
24
|
+
settings = load_settings()
|
|
25
|
+
role_policies = load_role_policies(settings)
|
|
26
|
+
scope_policies = load_scope_policies(settings)
|
|
27
|
+
|
|
28
|
+
mcp = FastMCP(settings.service_name, json_response=True)
|
|
29
|
+
|
|
30
|
+
AUTH_DISABLED = os.getenv("MCP_AUTH_DISABLED", "false").lower() == "true"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _authorize(token: str, tool_name: str) -> None:
|
|
34
|
+
if AUTH_DISABLED or not token:
|
|
35
|
+
return
|
|
36
|
+
principal = decode_bearer_token(token, settings)
|
|
37
|
+
authorize_tool(principal, tool_name, role_policies, scope_policies)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@mcp.tool()
|
|
41
|
+
@audit_tool_call("aws_list_ec2_instances")
|
|
42
|
+
def aws_list_ec2_instances(region: str, token: str = "") -> list[dict]:
|
|
43
|
+
_authorize(token, "aws_list_ec2_instances")
|
|
44
|
+
return list_ec2_instances(region)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@mcp.tool()
|
|
48
|
+
@audit_tool_call("k8s_list_pods")
|
|
49
|
+
def k8s_list_pods(namespace: str = "default", token: str = "") -> list[dict]:
|
|
50
|
+
_authorize(token, "k8s_list_pods")
|
|
51
|
+
return list_pods(namespace)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@mcp.tool()
|
|
55
|
+
@audit_tool_call("k8s_security_audit")
|
|
56
|
+
def k8s_security_audit_tool(namespace: str = "", token: str = "") -> list[dict]:
|
|
57
|
+
"""Audit Kubernetes clusters for misconfigurations and security risks (privileged containers, hostNetwork, exposed NodePorts, cluster-admin service accounts)."""
|
|
58
|
+
_authorize(token, "k8s_security_audit")
|
|
59
|
+
return k8s_security_audit(namespace)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@mcp.tool()
|
|
63
|
+
@audit_tool_call("security_run_trivy_scan")
|
|
64
|
+
def security_run_trivy_scan(image: str, token: str = "") -> dict:
|
|
65
|
+
_authorize(token, "security_run_trivy_scan")
|
|
66
|
+
return run_trivy_scan(image)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@mcp.tool()
|
|
70
|
+
@audit_tool_call("git_recent_commits")
|
|
71
|
+
def git_recent_commits(limit: int = 10, token: str = "") -> list[dict[str, str]]:
|
|
72
|
+
_authorize(token, "git_recent_commits")
|
|
73
|
+
return get_recent_commits(limit)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@mcp.tool()
|
|
77
|
+
@audit_tool_call("cicd_pipeline_status")
|
|
78
|
+
def cicd_pipeline_status(base_url: str, pipeline_id: str, api_token: str, token: str = "") -> dict:
|
|
79
|
+
"""Fetch the status of a CI/CD pipeline."""
|
|
80
|
+
_authorize(token, "cicd_pipeline_status")
|
|
81
|
+
return pipeline_status(base_url, pipeline_id, api_token)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# ── New zero-install tools ─────────────────────────────────────────
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@mcp.tool()
|
|
88
|
+
@audit_tool_call("security_scan_secrets")
|
|
89
|
+
def security_scan_secrets(path: str, token: str = "") -> list[dict]:
|
|
90
|
+
"""Scan local files or directories for exposed secrets (API keys, tokens, passwords). This tool runs locally and has full access to the user's local filesystem (e.g. C:\\... paths)."""
|
|
91
|
+
_authorize(token, "security_scan_secrets")
|
|
92
|
+
return scan_secrets(path)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@mcp.tool()
|
|
96
|
+
@audit_tool_call("security_check_ssl_certificate")
|
|
97
|
+
def security_check_ssl_certificate(hostname: str, port: int = 443, token: str = "") -> dict:
|
|
98
|
+
"""Check SSL/TLS certificate details and expiry for a domain."""
|
|
99
|
+
_authorize(token, "security_check_ssl_certificate")
|
|
100
|
+
return check_ssl_certificate(hostname, port)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@mcp.tool()
|
|
104
|
+
@audit_tool_call("security_check_dependencies")
|
|
105
|
+
def security_check_dependencies(file_path: str, token: str = "") -> list[dict]:
|
|
106
|
+
"""Scan dependency files (requirements.txt, package.json) for known vulnerabilities via OSV.dev."""
|
|
107
|
+
_authorize(token, "security_check_dependencies")
|
|
108
|
+
return check_dependencies(file_path)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
@mcp.tool()
|
|
112
|
+
@audit_tool_call("security_check_http_headers")
|
|
113
|
+
def security_check_http_headers(url: str, token: str = "") -> dict:
|
|
114
|
+
"""Audit HTTP security headers (HSTS, CSP, X-Frame-Options, etc.) for a URL."""
|
|
115
|
+
_authorize(token, "security_check_http_headers")
|
|
116
|
+
return check_http_headers(url)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@mcp.tool()
|
|
120
|
+
@audit_tool_call("aws_check_s3_public_access")
|
|
121
|
+
def aws_check_s3_public_access(region: str = "us-east-1", token: str = "") -> list[dict]:
|
|
122
|
+
"""Audit S3 buckets for public access settings."""
|
|
123
|
+
_authorize(token, "aws_check_s3_public_access")
|
|
124
|
+
return check_s3_public_access(region)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@mcp.tool()
|
|
128
|
+
@audit_tool_call("network_port_scan")
|
|
129
|
+
def network_port_scan(host: str, ports: str = "", token: str = "") -> list[dict]:
|
|
130
|
+
"""Perform a TCP port scan on common service ports for a host."""
|
|
131
|
+
_authorize(token, "network_port_scan")
|
|
132
|
+
return port_scan(host, ports)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@mcp.tool()
|
|
136
|
+
@audit_tool_call("security_semgrep_scan")
|
|
137
|
+
def security_semgrep_scan(path: str, config: str = "auto", token: str = "") -> dict:
|
|
138
|
+
"""Run a Semgrep SAST scan on a local directory or file. This tool runs locally and has full access to the user's local filesystem (e.g. C:\\... paths). Config can be 'auto', 'p/python', 'p/javascript', etc."""
|
|
139
|
+
_authorize(token, "security_semgrep_scan")
|
|
140
|
+
return run_semgrep_scan(path, config)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
if __name__ == "__main__":
|
|
144
|
+
mcp.run(transport="streamable-http")
|
package/server/stdio.py
ADDED
|
File without changes
|
|
File without changes
|
package/tools/aws/ec2.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def list_ec2_instances(region: str) -> list[dict[str, Any]]:
|
|
7
|
+
import boto3
|
|
8
|
+
from botocore.exceptions import BotoCoreError, ClientError
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
client = boto3.client("ec2", region_name=region)
|
|
12
|
+
reservations = client.describe_instances().get("Reservations", [])
|
|
13
|
+
except (BotoCoreError, ClientError) as exc:
|
|
14
|
+
raise RuntimeError(f"AWS EC2 error in region '{region}': {exc}") from exc
|
|
15
|
+
|
|
16
|
+
output: list[dict[str, Any]] = []
|
|
17
|
+
for reservation in reservations:
|
|
18
|
+
for instance in reservation.get("Instances", []):
|
|
19
|
+
output.append(
|
|
20
|
+
{
|
|
21
|
+
"instance_id": instance.get("InstanceId"),
|
|
22
|
+
"state": instance.get("State", {}).get("Name"),
|
|
23
|
+
"type": instance.get("InstanceType"),
|
|
24
|
+
}
|
|
25
|
+
)
|
|
26
|
+
return output
|
package/tools/aws/s3.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def check_s3_public_access(region: str = "us-east-1") -> list[dict[str, Any]]:
|
|
7
|
+
"""Audit S3 buckets for public access settings.
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
region: AWS region for the S3 client (default us-east-1).
|
|
11
|
+
|
|
12
|
+
Returns:
|
|
13
|
+
List of buckets with their public access block configuration.
|
|
14
|
+
"""
|
|
15
|
+
import boto3
|
|
16
|
+
from botocore.exceptions import BotoCoreError, ClientError
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
s3 = boto3.client("s3", region_name=region)
|
|
20
|
+
buckets_resp = s3.list_buckets()
|
|
21
|
+
except (BotoCoreError, ClientError) as exc:
|
|
22
|
+
raise RuntimeError(f"AWS S3 error: {exc}") from exc
|
|
23
|
+
|
|
24
|
+
results: list[dict[str, Any]] = []
|
|
25
|
+
for bucket in buckets_resp.get("Buckets", []):
|
|
26
|
+
name = bucket["Name"]
|
|
27
|
+
entry: dict[str, Any] = {"bucket": name, "public_access_block": None, "is_potentially_public": False}
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
pab = s3.get_public_access_block(Bucket=name)
|
|
31
|
+
config = pab.get("PublicAccessBlockConfiguration", {})
|
|
32
|
+
entry["public_access_block"] = {
|
|
33
|
+
"block_public_acls": config.get("BlockPublicAcls", False),
|
|
34
|
+
"ignore_public_acls": config.get("IgnorePublicAcls", False),
|
|
35
|
+
"block_public_policy": config.get("BlockPublicPolicy", False),
|
|
36
|
+
"restrict_public_buckets": config.get("RestrictPublicBuckets", False),
|
|
37
|
+
}
|
|
38
|
+
all_blocked = all(entry["public_access_block"].values())
|
|
39
|
+
entry["is_potentially_public"] = not all_blocked
|
|
40
|
+
except ClientError as exc:
|
|
41
|
+
error_code = exc.response.get("Error", {}).get("Code", "")
|
|
42
|
+
if error_code == "NoSuchPublicAccessBlockConfiguration":
|
|
43
|
+
entry["public_access_block"] = "not_configured"
|
|
44
|
+
entry["is_potentially_public"] = True
|
|
45
|
+
elif error_code == "AccessDenied":
|
|
46
|
+
entry["public_access_block"] = "access_denied"
|
|
47
|
+
entry["is_potentially_public"] = "unknown"
|
|
48
|
+
else:
|
|
49
|
+
entry["public_access_block"] = f"error: {error_code}"
|
|
50
|
+
entry["is_potentially_public"] = "unknown"
|
|
51
|
+
|
|
52
|
+
results.append(entry)
|
|
53
|
+
|
|
54
|
+
return results
|
|
File without changes
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def pipeline_status(base_url: str, pipeline_id: str, api_token: str) -> dict:
|
|
7
|
+
"""Fetch the status of a CI/CD pipeline by its ID.
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
base_url: Base URL of the CI/CD service API.
|
|
11
|
+
pipeline_id: Unique identifier of the pipeline.
|
|
12
|
+
api_token: API token for authenticating with the CI/CD service.
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
Pipeline status as a JSON-compatible dict.
|
|
16
|
+
"""
|
|
17
|
+
try:
|
|
18
|
+
response = requests.get(
|
|
19
|
+
f"{base_url.rstrip('/')}/pipelines/{pipeline_id}",
|
|
20
|
+
headers={"Authorization": f"Bearer {api_token}"},
|
|
21
|
+
timeout=15,
|
|
22
|
+
)
|
|
23
|
+
response.raise_for_status()
|
|
24
|
+
except requests.ConnectionError as exc:
|
|
25
|
+
raise RuntimeError(f"Cannot connect to CI/CD service at '{base_url}': {exc}") from exc
|
|
26
|
+
except requests.HTTPError as exc:
|
|
27
|
+
raise RuntimeError(
|
|
28
|
+
f"CI/CD API error for pipeline '{pipeline_id}': {exc.response.status_code}"
|
|
29
|
+
) from exc
|
|
30
|
+
except requests.Timeout as exc:
|
|
31
|
+
raise RuntimeError(f"CI/CD API request timed out for pipeline '{pipeline_id}'") from exc
|
|
32
|
+
|
|
33
|
+
return response.json()
|
|
File without changes
|