kubectl-mcp-server 1.14.0__py3-none-any.whl → 1.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kubectl_mcp_server-1.16.0.dist-info/METADATA +1047 -0
- kubectl_mcp_server-1.16.0.dist-info/RECORD +61 -0
- kubectl_mcp_tool/__init__.py +1 -1
- kubectl_mcp_tool/crd_detector.py +247 -0
- kubectl_mcp_tool/k8s_config.py +304 -63
- kubectl_mcp_tool/mcp_server.py +27 -0
- kubectl_mcp_tool/tools/__init__.py +20 -0
- kubectl_mcp_tool/tools/backup.py +881 -0
- kubectl_mcp_tool/tools/capi.py +727 -0
- kubectl_mcp_tool/tools/certs.py +709 -0
- kubectl_mcp_tool/tools/cilium.py +582 -0
- kubectl_mcp_tool/tools/cluster.py +395 -121
- kubectl_mcp_tool/tools/core.py +157 -60
- kubectl_mcp_tool/tools/cost.py +97 -41
- kubectl_mcp_tool/tools/deployments.py +173 -56
- kubectl_mcp_tool/tools/diagnostics.py +40 -13
- kubectl_mcp_tool/tools/gitops.py +552 -0
- kubectl_mcp_tool/tools/helm.py +133 -46
- kubectl_mcp_tool/tools/keda.py +464 -0
- kubectl_mcp_tool/tools/kiali.py +652 -0
- kubectl_mcp_tool/tools/kubevirt.py +803 -0
- kubectl_mcp_tool/tools/networking.py +106 -32
- kubectl_mcp_tool/tools/operations.py +176 -50
- kubectl_mcp_tool/tools/pods.py +162 -50
- kubectl_mcp_tool/tools/policy.py +554 -0
- kubectl_mcp_tool/tools/rollouts.py +790 -0
- kubectl_mcp_tool/tools/security.py +89 -36
- kubectl_mcp_tool/tools/storage.py +35 -16
- tests/test_browser.py +2 -2
- tests/test_ecosystem.py +331 -0
- tests/test_tools.py +73 -10
- kubectl_mcp_server-1.14.0.dist-info/METADATA +0 -780
- kubectl_mcp_server-1.14.0.dist-info/RECORD +0 -49
- {kubectl_mcp_server-1.14.0.dist-info → kubectl_mcp_server-1.16.0.dist-info}/WHEEL +0 -0
- {kubectl_mcp_server-1.14.0.dist-info → kubectl_mcp_server-1.16.0.dist-info}/entry_points.txt +0 -0
- {kubectl_mcp_server-1.14.0.dist-info → kubectl_mcp_server-1.16.0.dist-info}/licenses/LICENSE +0 -0
- {kubectl_mcp_server-1.14.0.dist-info → kubectl_mcp_server-1.16.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,554 @@
|
|
|
1
|
+
"""Policy toolset for kubectl-mcp-server.
|
|
2
|
+
|
|
3
|
+
Provides tools for managing Kyverno and Gatekeeper policies.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import subprocess
|
|
7
|
+
import json
|
|
8
|
+
from typing import Dict, Any, List, Optional
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
from fastmcp import FastMCP
|
|
12
|
+
from fastmcp.tools import ToolAnnotations
|
|
13
|
+
except ImportError:
|
|
14
|
+
from mcp.server.fastmcp import FastMCP
|
|
15
|
+
from mcp.types import ToolAnnotations
|
|
16
|
+
|
|
17
|
+
from ..k8s_config import _get_kubectl_context_args
|
|
18
|
+
from ..crd_detector import crd_exists
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
KYVERNO_CLUSTER_POLICY_CRD = "clusterpolicies.kyverno.io"
|
|
22
|
+
KYVERNO_POLICY_CRD = "policies.kyverno.io"
|
|
23
|
+
KYVERNO_POLICY_REPORT_CRD = "policyreports.wgpolicyk8s.io"
|
|
24
|
+
KYVERNO_CLUSTER_POLICY_REPORT_CRD = "clusterpolicyreports.wgpolicyk8s.io"
|
|
25
|
+
GATEKEEPER_CONSTRAINT_TEMPLATE_CRD = "constrainttemplates.templates.gatekeeper.sh"
|
|
26
|
+
GATEKEEPER_CONFIG_CRD = "configs.config.gatekeeper.sh"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _run_kubectl(args: List[str], context: str = "") -> Dict[str, Any]:
|
|
30
|
+
"""Run kubectl command and return result."""
|
|
31
|
+
cmd = ["kubectl"] + _get_kubectl_context_args(context) + args
|
|
32
|
+
try:
|
|
33
|
+
result = subprocess.run(cmd, capture_output=True, text=True, timeout=60)
|
|
34
|
+
if result.returncode == 0:
|
|
35
|
+
return {"success": True, "output": result.stdout}
|
|
36
|
+
return {"success": False, "error": result.stderr}
|
|
37
|
+
except subprocess.TimeoutExpired:
|
|
38
|
+
return {"success": False, "error": "Command timed out"}
|
|
39
|
+
except Exception as e:
|
|
40
|
+
return {"success": False, "error": str(e)}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _get_resources(kind: str, namespace: str = "", context: str = "", label_selector: str = "") -> List[Dict]:
|
|
44
|
+
"""Get Kubernetes resources of a specific kind."""
|
|
45
|
+
args = ["get", kind, "-o", "json"]
|
|
46
|
+
if namespace:
|
|
47
|
+
args.extend(["-n", namespace])
|
|
48
|
+
else:
|
|
49
|
+
args.append("-A")
|
|
50
|
+
if label_selector:
|
|
51
|
+
args.extend(["-l", label_selector])
|
|
52
|
+
|
|
53
|
+
result = _run_kubectl(args, context)
|
|
54
|
+
if result["success"]:
|
|
55
|
+
try:
|
|
56
|
+
data = json.loads(result["output"])
|
|
57
|
+
return data.get("items", [])
|
|
58
|
+
except json.JSONDecodeError:
|
|
59
|
+
return []
|
|
60
|
+
return []
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _get_condition(conditions: List[Dict], condition_type: str) -> Optional[Dict]:
|
|
64
|
+
"""Get a specific condition from conditions list."""
|
|
65
|
+
return next((c for c in conditions if c.get("type") == condition_type), None)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def policy_list(
|
|
69
|
+
namespace: str = "",
|
|
70
|
+
context: str = "",
|
|
71
|
+
engine: str = "all",
|
|
72
|
+
label_selector: str = ""
|
|
73
|
+
) -> Dict[str, Any]:
|
|
74
|
+
"""List policies from Kyverno or Gatekeeper.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
namespace: Filter by namespace (empty for cluster-wide)
|
|
78
|
+
context: Kubernetes context to use (optional)
|
|
79
|
+
engine: Policy engine filter (kyverno, gatekeeper, all)
|
|
80
|
+
label_selector: Label selector to filter policies
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
List of policies with their status
|
|
84
|
+
"""
|
|
85
|
+
policies = []
|
|
86
|
+
|
|
87
|
+
if engine in ("kyverno", "all"):
|
|
88
|
+
if crd_exists(KYVERNO_CLUSTER_POLICY_CRD, context):
|
|
89
|
+
for item in _get_resources("clusterpolicies.kyverno.io", "", context, label_selector):
|
|
90
|
+
status = item.get("status", {})
|
|
91
|
+
conditions = status.get("conditions", [])
|
|
92
|
+
ready_cond = _get_condition(conditions, "Ready")
|
|
93
|
+
spec = item.get("spec", {})
|
|
94
|
+
|
|
95
|
+
policies.append({
|
|
96
|
+
"name": item["metadata"]["name"],
|
|
97
|
+
"namespace": "",
|
|
98
|
+
"kind": "ClusterPolicy",
|
|
99
|
+
"engine": "kyverno",
|
|
100
|
+
"ready": ready_cond.get("status") == "True" if ready_cond else True,
|
|
101
|
+
"validation_failure_action": spec.get("validationFailureAction", "Audit"),
|
|
102
|
+
"background": spec.get("background", True),
|
|
103
|
+
"rules_count": len(spec.get("rules", [])),
|
|
104
|
+
"message": ready_cond.get("message", "") if ready_cond else "",
|
|
105
|
+
})
|
|
106
|
+
|
|
107
|
+
if crd_exists(KYVERNO_POLICY_CRD, context):
|
|
108
|
+
for item in _get_resources("policies.kyverno.io", namespace, context, label_selector):
|
|
109
|
+
status = item.get("status", {})
|
|
110
|
+
conditions = status.get("conditions", [])
|
|
111
|
+
ready_cond = _get_condition(conditions, "Ready")
|
|
112
|
+
spec = item.get("spec", {})
|
|
113
|
+
|
|
114
|
+
policies.append({
|
|
115
|
+
"name": item["metadata"]["name"],
|
|
116
|
+
"namespace": item["metadata"]["namespace"],
|
|
117
|
+
"kind": "Policy",
|
|
118
|
+
"engine": "kyverno",
|
|
119
|
+
"ready": ready_cond.get("status") == "True" if ready_cond else True,
|
|
120
|
+
"validation_failure_action": spec.get("validationFailureAction", "Audit"),
|
|
121
|
+
"background": spec.get("background", True),
|
|
122
|
+
"rules_count": len(spec.get("rules", [])),
|
|
123
|
+
"message": ready_cond.get("message", "") if ready_cond else "",
|
|
124
|
+
})
|
|
125
|
+
|
|
126
|
+
if engine in ("gatekeeper", "all"):
|
|
127
|
+
if crd_exists(GATEKEEPER_CONSTRAINT_TEMPLATE_CRD, context):
|
|
128
|
+
for item in _get_resources("constrainttemplates.templates.gatekeeper.sh", "", context, label_selector):
|
|
129
|
+
status = item.get("status", {})
|
|
130
|
+
spec = item.get("spec", {})
|
|
131
|
+
|
|
132
|
+
created = status.get("created", False)
|
|
133
|
+
policies.append({
|
|
134
|
+
"name": item["metadata"]["name"],
|
|
135
|
+
"namespace": "",
|
|
136
|
+
"kind": "ConstraintTemplate",
|
|
137
|
+
"engine": "gatekeeper",
|
|
138
|
+
"ready": created,
|
|
139
|
+
"crd_kind": spec.get("crd", {}).get("spec", {}).get("names", {}).get("kind", ""),
|
|
140
|
+
"targets": [t.get("target", "") for t in spec.get("targets", [])],
|
|
141
|
+
})
|
|
142
|
+
|
|
143
|
+
constraints = _get_gatekeeper_constraints(context)
|
|
144
|
+
for constraint in constraints:
|
|
145
|
+
policies.append(constraint)
|
|
146
|
+
|
|
147
|
+
enforce_count = sum(1 for p in policies if p.get("validation_failure_action") == "Enforce" or p.get("kind") == "Constraint")
|
|
148
|
+
|
|
149
|
+
return {
|
|
150
|
+
"context": context or "current",
|
|
151
|
+
"total": len(policies),
|
|
152
|
+
"enforcing": enforce_count,
|
|
153
|
+
"policies": policies,
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _get_gatekeeper_constraints(context: str = "") -> List[Dict]:
|
|
158
|
+
"""Get all Gatekeeper constraints dynamically."""
|
|
159
|
+
constraints = []
|
|
160
|
+
|
|
161
|
+
templates = _get_resources("constrainttemplates.templates.gatekeeper.sh", "", context)
|
|
162
|
+
for template in templates:
|
|
163
|
+
crd_kind = template.get("spec", {}).get("crd", {}).get("spec", {}).get("names", {}).get("kind", "")
|
|
164
|
+
if not crd_kind:
|
|
165
|
+
continue
|
|
166
|
+
|
|
167
|
+
try:
|
|
168
|
+
constraint_items = _get_resources(crd_kind.lower(), "", context)
|
|
169
|
+
for item in constraint_items:
|
|
170
|
+
status = item.get("status", {})
|
|
171
|
+
spec = item.get("spec", {})
|
|
172
|
+
match = spec.get("match", {})
|
|
173
|
+
|
|
174
|
+
total_violations = status.get("totalViolations", 0)
|
|
175
|
+
|
|
176
|
+
constraints.append({
|
|
177
|
+
"name": item["metadata"]["name"],
|
|
178
|
+
"namespace": "",
|
|
179
|
+
"kind": "Constraint",
|
|
180
|
+
"constraint_kind": crd_kind,
|
|
181
|
+
"engine": "gatekeeper",
|
|
182
|
+
"ready": True,
|
|
183
|
+
"enforcement_action": spec.get("enforcementAction", "deny"),
|
|
184
|
+
"total_violations": total_violations,
|
|
185
|
+
"match_kinds": match.get("kinds", []),
|
|
186
|
+
"match_namespaces": match.get("namespaces", []),
|
|
187
|
+
"excluded_namespaces": match.get("excludedNamespaces", []),
|
|
188
|
+
})
|
|
189
|
+
except Exception:
|
|
190
|
+
continue
|
|
191
|
+
|
|
192
|
+
return constraints
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def policy_get(
|
|
196
|
+
name: str,
|
|
197
|
+
namespace: str = "",
|
|
198
|
+
kind: str = "ClusterPolicy",
|
|
199
|
+
context: str = ""
|
|
200
|
+
) -> Dict[str, Any]:
|
|
201
|
+
"""Get detailed information about a policy.
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
name: Name of the policy
|
|
205
|
+
namespace: Namespace (for namespaced policies)
|
|
206
|
+
kind: Kind of policy (ClusterPolicy, Policy, ConstraintTemplate, or constraint kind)
|
|
207
|
+
context: Kubernetes context to use (optional)
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
Detailed policy information
|
|
211
|
+
"""
|
|
212
|
+
kind_map = {
|
|
213
|
+
"clusterpolicy": "clusterpolicies.kyverno.io",
|
|
214
|
+
"policy": "policies.kyverno.io",
|
|
215
|
+
"constrainttemplate": "constrainttemplates.templates.gatekeeper.sh",
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
k8s_kind = kind_map.get(kind.lower(), kind.lower())
|
|
219
|
+
|
|
220
|
+
if namespace:
|
|
221
|
+
args = ["get", k8s_kind, name, "-n", namespace, "-o", "json"]
|
|
222
|
+
else:
|
|
223
|
+
args = ["get", k8s_kind, name, "-o", "json"]
|
|
224
|
+
|
|
225
|
+
result = _run_kubectl(args, context)
|
|
226
|
+
|
|
227
|
+
if result["success"]:
|
|
228
|
+
try:
|
|
229
|
+
data = json.loads(result["output"])
|
|
230
|
+
return {
|
|
231
|
+
"success": True,
|
|
232
|
+
"context": context or "current",
|
|
233
|
+
"policy": data,
|
|
234
|
+
}
|
|
235
|
+
except json.JSONDecodeError:
|
|
236
|
+
return {"success": False, "error": "Failed to parse response"}
|
|
237
|
+
|
|
238
|
+
return {"success": False, "error": result.get("error", "Unknown error")}
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def policy_violations_list(
|
|
242
|
+
namespace: str = "",
|
|
243
|
+
context: str = "",
|
|
244
|
+
engine: str = "all",
|
|
245
|
+
severity: str = ""
|
|
246
|
+
) -> Dict[str, Any]:
|
|
247
|
+
"""List policy violations from PolicyReports or Gatekeeper.
|
|
248
|
+
|
|
249
|
+
Args:
|
|
250
|
+
namespace: Filter by namespace (empty for all)
|
|
251
|
+
context: Kubernetes context to use (optional)
|
|
252
|
+
engine: Policy engine filter (kyverno, gatekeeper, all)
|
|
253
|
+
severity: Filter by severity (high, medium, low)
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
List of policy violations
|
|
257
|
+
"""
|
|
258
|
+
violations = []
|
|
259
|
+
|
|
260
|
+
if engine in ("kyverno", "all"):
|
|
261
|
+
if crd_exists(KYVERNO_POLICY_REPORT_CRD, context):
|
|
262
|
+
for report in _get_resources("policyreports.wgpolicyk8s.io", namespace, context):
|
|
263
|
+
results = report.get("results", [])
|
|
264
|
+
for result in results:
|
|
265
|
+
if result.get("result") in ("fail", "error"):
|
|
266
|
+
if severity and result.get("severity", "").lower() != severity.lower():
|
|
267
|
+
continue
|
|
268
|
+
violations.append({
|
|
269
|
+
"source": "PolicyReport",
|
|
270
|
+
"engine": "kyverno",
|
|
271
|
+
"namespace": report["metadata"]["namespace"],
|
|
272
|
+
"policy": result.get("policy", ""),
|
|
273
|
+
"rule": result.get("rule", ""),
|
|
274
|
+
"result": result.get("result", ""),
|
|
275
|
+
"severity": result.get("severity", ""),
|
|
276
|
+
"message": result.get("message", ""),
|
|
277
|
+
"category": result.get("category", ""),
|
|
278
|
+
"resources": result.get("resources", []),
|
|
279
|
+
})
|
|
280
|
+
|
|
281
|
+
if crd_exists(KYVERNO_CLUSTER_POLICY_REPORT_CRD, context):
|
|
282
|
+
for report in _get_resources("clusterpolicyreports.wgpolicyk8s.io", "", context):
|
|
283
|
+
results = report.get("results", [])
|
|
284
|
+
for result in results:
|
|
285
|
+
if result.get("result") in ("fail", "error"):
|
|
286
|
+
if severity and result.get("severity", "").lower() != severity.lower():
|
|
287
|
+
continue
|
|
288
|
+
violations.append({
|
|
289
|
+
"source": "ClusterPolicyReport",
|
|
290
|
+
"engine": "kyverno",
|
|
291
|
+
"namespace": "",
|
|
292
|
+
"policy": result.get("policy", ""),
|
|
293
|
+
"rule": result.get("rule", ""),
|
|
294
|
+
"result": result.get("result", ""),
|
|
295
|
+
"severity": result.get("severity", ""),
|
|
296
|
+
"message": result.get("message", ""),
|
|
297
|
+
"category": result.get("category", ""),
|
|
298
|
+
"resources": result.get("resources", []),
|
|
299
|
+
})
|
|
300
|
+
|
|
301
|
+
if engine in ("gatekeeper", "all"):
|
|
302
|
+
constraints = _get_gatekeeper_constraints(context)
|
|
303
|
+
for constraint in constraints:
|
|
304
|
+
if constraint.get("total_violations", 0) > 0:
|
|
305
|
+
constraint_detail = policy_get(
|
|
306
|
+
constraint["name"], "", constraint["constraint_kind"], context
|
|
307
|
+
)
|
|
308
|
+
if constraint_detail.get("success"):
|
|
309
|
+
policy_data = constraint_detail["policy"]
|
|
310
|
+
status_violations = policy_data.get("status", {}).get("violations", [])
|
|
311
|
+
for v in status_violations:
|
|
312
|
+
violations.append({
|
|
313
|
+
"source": "GatekeeperConstraint",
|
|
314
|
+
"engine": "gatekeeper",
|
|
315
|
+
"constraint": constraint["name"],
|
|
316
|
+
"constraint_kind": constraint["constraint_kind"],
|
|
317
|
+
"enforcement_action": v.get("enforcementAction", "deny"),
|
|
318
|
+
"kind": v.get("kind", ""),
|
|
319
|
+
"name": v.get("name", ""),
|
|
320
|
+
"namespace": v.get("namespace", ""),
|
|
321
|
+
"message": v.get("message", ""),
|
|
322
|
+
})
|
|
323
|
+
|
|
324
|
+
critical = sum(1 for v in violations if v.get("severity", "").lower() == "high")
|
|
325
|
+
|
|
326
|
+
return {
|
|
327
|
+
"context": context or "current",
|
|
328
|
+
"total": len(violations),
|
|
329
|
+
"critical": critical,
|
|
330
|
+
"violations": violations,
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
def policy_explain_denial(
|
|
335
|
+
message: str,
|
|
336
|
+
context: str = ""
|
|
337
|
+
) -> Dict[str, Any]:
|
|
338
|
+
"""Explain an admission denial message by matching against policies.
|
|
339
|
+
|
|
340
|
+
Args:
|
|
341
|
+
message: The denial message from Kubernetes admission
|
|
342
|
+
context: Kubernetes context to use (optional)
|
|
343
|
+
|
|
344
|
+
Returns:
|
|
345
|
+
Explanation with matched policies and recommendations
|
|
346
|
+
"""
|
|
347
|
+
matches = []
|
|
348
|
+
recommendations = []
|
|
349
|
+
|
|
350
|
+
message_lower = message.lower()
|
|
351
|
+
|
|
352
|
+
if crd_exists(KYVERNO_CLUSTER_POLICY_CRD, context):
|
|
353
|
+
for policy in _get_resources("clusterpolicies.kyverno.io", "", context):
|
|
354
|
+
policy_name = policy["metadata"]["name"]
|
|
355
|
+
if policy_name.lower() in message_lower:
|
|
356
|
+
spec = policy.get("spec", {})
|
|
357
|
+
matches.append({
|
|
358
|
+
"engine": "kyverno",
|
|
359
|
+
"type": "ClusterPolicy",
|
|
360
|
+
"name": policy_name,
|
|
361
|
+
"confidence": 0.9,
|
|
362
|
+
"validation_failure_action": spec.get("validationFailureAction", "Audit"),
|
|
363
|
+
"rules": [r.get("name", "") for r in spec.get("rules", [])],
|
|
364
|
+
})
|
|
365
|
+
|
|
366
|
+
for rule in policy.get("spec", {}).get("rules", []):
|
|
367
|
+
rule_name = rule.get("name", "")
|
|
368
|
+
if rule_name.lower() in message_lower:
|
|
369
|
+
matches.append({
|
|
370
|
+
"engine": "kyverno",
|
|
371
|
+
"type": "ClusterPolicy",
|
|
372
|
+
"name": policy_name,
|
|
373
|
+
"rule": rule_name,
|
|
374
|
+
"confidence": 0.85,
|
|
375
|
+
})
|
|
376
|
+
|
|
377
|
+
if crd_exists(GATEKEEPER_CONSTRAINT_TEMPLATE_CRD, context):
|
|
378
|
+
constraints = _get_gatekeeper_constraints(context)
|
|
379
|
+
for constraint in constraints:
|
|
380
|
+
if constraint["name"].lower() in message_lower:
|
|
381
|
+
matches.append({
|
|
382
|
+
"engine": "gatekeeper",
|
|
383
|
+
"type": "Constraint",
|
|
384
|
+
"name": constraint["name"],
|
|
385
|
+
"constraint_kind": constraint.get("constraint_kind", ""),
|
|
386
|
+
"confidence": 0.9,
|
|
387
|
+
"enforcement_action": constraint.get("enforcement_action", "deny"),
|
|
388
|
+
})
|
|
389
|
+
|
|
390
|
+
if "kyverno" in message_lower:
|
|
391
|
+
recommendations.append("This appears to be a Kyverno policy denial")
|
|
392
|
+
recommendations.append("Check policy with: kubectl get clusterpolicy -o yaml")
|
|
393
|
+
recommendations.append("View violations: kubectl get policyreport -A")
|
|
394
|
+
elif "gatekeeper" in message_lower or "admission webhook" in message_lower:
|
|
395
|
+
recommendations.append("This appears to be a Gatekeeper/OPA policy denial")
|
|
396
|
+
recommendations.append("Check constraints with: kubectl get constraints")
|
|
397
|
+
recommendations.append("View constraint templates: kubectl get constrainttemplates")
|
|
398
|
+
|
|
399
|
+
if not matches:
|
|
400
|
+
recommendations.append("No exact policy match found")
|
|
401
|
+
recommendations.append("Try listing all policies: policy_list()")
|
|
402
|
+
recommendations.append("Check admission webhooks: kubectl get validatingwebhookconfigurations")
|
|
403
|
+
|
|
404
|
+
return {
|
|
405
|
+
"context": context or "current",
|
|
406
|
+
"original_message": message,
|
|
407
|
+
"matches": matches,
|
|
408
|
+
"recommendations": recommendations,
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def policy_audit(
|
|
413
|
+
namespace: str = "",
|
|
414
|
+
context: str = "",
|
|
415
|
+
resource_kind: str = ""
|
|
416
|
+
) -> Dict[str, Any]:
|
|
417
|
+
"""Audit resources against installed policies.
|
|
418
|
+
|
|
419
|
+
Args:
|
|
420
|
+
namespace: Namespace to audit (empty for all)
|
|
421
|
+
context: Kubernetes context to use (optional)
|
|
422
|
+
resource_kind: Filter by resource kind
|
|
423
|
+
|
|
424
|
+
Returns:
|
|
425
|
+
Audit results with violation summary
|
|
426
|
+
"""
|
|
427
|
+
violations = policy_violations_list(namespace, context)
|
|
428
|
+
policies = policy_list(namespace, context)
|
|
429
|
+
|
|
430
|
+
by_policy = {}
|
|
431
|
+
by_namespace = {}
|
|
432
|
+
by_kind = {}
|
|
433
|
+
|
|
434
|
+
for v in violations.get("violations", []):
|
|
435
|
+
policy_name = v.get("policy", v.get("constraint", "unknown"))
|
|
436
|
+
if policy_name not in by_policy:
|
|
437
|
+
by_policy[policy_name] = 0
|
|
438
|
+
by_policy[policy_name] += 1
|
|
439
|
+
|
|
440
|
+
ns = v.get("namespace", "cluster-scoped")
|
|
441
|
+
if ns not in by_namespace:
|
|
442
|
+
by_namespace[ns] = 0
|
|
443
|
+
by_namespace[ns] += 1
|
|
444
|
+
|
|
445
|
+
kind = v.get("kind", "unknown")
|
|
446
|
+
if resource_kind and kind.lower() != resource_kind.lower():
|
|
447
|
+
continue
|
|
448
|
+
if kind not in by_kind:
|
|
449
|
+
by_kind[kind] = 0
|
|
450
|
+
by_kind[kind] += 1
|
|
451
|
+
|
|
452
|
+
return {
|
|
453
|
+
"context": context or "current",
|
|
454
|
+
"summary": {
|
|
455
|
+
"total_policies": policies.get("total", 0),
|
|
456
|
+
"enforcing_policies": policies.get("enforcing", 0),
|
|
457
|
+
"total_violations": violations.get("total", 0),
|
|
458
|
+
"critical_violations": violations.get("critical", 0),
|
|
459
|
+
},
|
|
460
|
+
"violations_by_policy": by_policy,
|
|
461
|
+
"violations_by_namespace": by_namespace,
|
|
462
|
+
"violations_by_kind": by_kind,
|
|
463
|
+
"top_violating_policies": sorted(by_policy.items(), key=lambda x: x[1], reverse=True)[:5],
|
|
464
|
+
"top_violating_namespaces": sorted(by_namespace.items(), key=lambda x: x[1], reverse=True)[:5],
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
def policy_detect(context: str = "") -> Dict[str, Any]:
|
|
469
|
+
"""Detect which policy engines are installed in the cluster.
|
|
470
|
+
|
|
471
|
+
Args:
|
|
472
|
+
context: Kubernetes context to use (optional)
|
|
473
|
+
|
|
474
|
+
Returns:
|
|
475
|
+
Detection results for Kyverno and Gatekeeper
|
|
476
|
+
"""
|
|
477
|
+
kyverno_installed = any([
|
|
478
|
+
crd_exists(KYVERNO_CLUSTER_POLICY_CRD, context),
|
|
479
|
+
crd_exists(KYVERNO_POLICY_CRD, context),
|
|
480
|
+
])
|
|
481
|
+
|
|
482
|
+
gatekeeper_installed = crd_exists(GATEKEEPER_CONSTRAINT_TEMPLATE_CRD, context)
|
|
483
|
+
|
|
484
|
+
return {
|
|
485
|
+
"context": context or "current",
|
|
486
|
+
"kyverno": {
|
|
487
|
+
"installed": kyverno_installed,
|
|
488
|
+
"cluster_policies": crd_exists(KYVERNO_CLUSTER_POLICY_CRD, context),
|
|
489
|
+
"policies": crd_exists(KYVERNO_POLICY_CRD, context),
|
|
490
|
+
"policy_reports": crd_exists(KYVERNO_POLICY_REPORT_CRD, context),
|
|
491
|
+
"cluster_policy_reports": crd_exists(KYVERNO_CLUSTER_POLICY_REPORT_CRD, context),
|
|
492
|
+
},
|
|
493
|
+
"gatekeeper": {
|
|
494
|
+
"installed": gatekeeper_installed,
|
|
495
|
+
"constraint_templates": crd_exists(GATEKEEPER_CONSTRAINT_TEMPLATE_CRD, context),
|
|
496
|
+
"config": crd_exists(GATEKEEPER_CONFIG_CRD, context),
|
|
497
|
+
},
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
def register_policy_tools(mcp: FastMCP, non_destructive: bool = False):
|
|
502
|
+
"""Register policy tools with the MCP server."""
|
|
503
|
+
|
|
504
|
+
@mcp.tool(annotations=ToolAnnotations(readOnlyHint=True))
|
|
505
|
+
def policy_list_tool(
|
|
506
|
+
namespace: str = "",
|
|
507
|
+
context: str = "",
|
|
508
|
+
engine: str = "all",
|
|
509
|
+
label_selector: str = ""
|
|
510
|
+
) -> str:
|
|
511
|
+
"""List policies from Kyverno or Gatekeeper."""
|
|
512
|
+
return json.dumps(policy_list(namespace, context, engine, label_selector), indent=2)
|
|
513
|
+
|
|
514
|
+
@mcp.tool(annotations=ToolAnnotations(readOnlyHint=True))
|
|
515
|
+
def policy_get_tool(
|
|
516
|
+
name: str,
|
|
517
|
+
namespace: str = "",
|
|
518
|
+
kind: str = "ClusterPolicy",
|
|
519
|
+
context: str = ""
|
|
520
|
+
) -> str:
|
|
521
|
+
"""Get detailed information about a policy."""
|
|
522
|
+
return json.dumps(policy_get(name, namespace, kind, context), indent=2)
|
|
523
|
+
|
|
524
|
+
@mcp.tool(annotations=ToolAnnotations(readOnlyHint=True))
|
|
525
|
+
def policy_violations_list_tool(
|
|
526
|
+
namespace: str = "",
|
|
527
|
+
context: str = "",
|
|
528
|
+
engine: str = "all",
|
|
529
|
+
severity: str = ""
|
|
530
|
+
) -> str:
|
|
531
|
+
"""List policy violations from PolicyReports or Gatekeeper."""
|
|
532
|
+
return json.dumps(policy_violations_list(namespace, context, engine, severity), indent=2)
|
|
533
|
+
|
|
534
|
+
@mcp.tool(annotations=ToolAnnotations(readOnlyHint=True))
|
|
535
|
+
def policy_explain_denial_tool(
|
|
536
|
+
message: str,
|
|
537
|
+
context: str = ""
|
|
538
|
+
) -> str:
|
|
539
|
+
"""Explain an admission denial message by matching against policies."""
|
|
540
|
+
return json.dumps(policy_explain_denial(message, context), indent=2)
|
|
541
|
+
|
|
542
|
+
@mcp.tool(annotations=ToolAnnotations(readOnlyHint=True))
|
|
543
|
+
def policy_audit_tool(
|
|
544
|
+
namespace: str = "",
|
|
545
|
+
context: str = "",
|
|
546
|
+
resource_kind: str = ""
|
|
547
|
+
) -> str:
|
|
548
|
+
"""Audit resources against installed policies."""
|
|
549
|
+
return json.dumps(policy_audit(namespace, context, resource_kind), indent=2)
|
|
550
|
+
|
|
551
|
+
@mcp.tool(annotations=ToolAnnotations(readOnlyHint=True))
|
|
552
|
+
def policy_detect_tool(context: str = "") -> str:
|
|
553
|
+
"""Detect which policy engines are installed in the cluster."""
|
|
554
|
+
return json.dumps(policy_detect(context), indent=2)
|