@techwavedev/agi-agent-kit 1.1.7 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of @techwavedev/agi-agent-kit might be problematic. Click here for more details.
- package/CHANGELOG.md +82 -1
- package/README.md +190 -12
- package/bin/init.js +30 -2
- package/package.json +6 -3
- package/templates/base/AGENTS.md +54 -23
- package/templates/base/README.md +325 -0
- package/templates/base/directives/memory_integration.md +95 -0
- package/templates/base/execution/memory_manager.py +309 -0
- package/templates/base/execution/session_boot.py +218 -0
- package/templates/base/execution/session_init.py +320 -0
- package/templates/base/skill-creator/SKILL_skillcreator.md +23 -36
- package/templates/base/skill-creator/scripts/init_skill.py +18 -135
- package/templates/skills/ec/README.md +31 -0
- package/templates/skills/ec/aws/SKILL.md +1020 -0
- package/templates/skills/ec/aws/defaults.yaml +13 -0
- package/templates/skills/ec/aws/references/common_patterns.md +80 -0
- package/templates/skills/ec/aws/references/mcp_servers.md +98 -0
- package/templates/skills/ec/aws-terraform/SKILL.md +349 -0
- package/templates/skills/ec/aws-terraform/references/best_practices.md +394 -0
- package/templates/skills/ec/aws-terraform/references/checkov_reference.md +337 -0
- package/templates/skills/ec/aws-terraform/scripts/configure_mcp.py +150 -0
- package/templates/skills/ec/confluent-kafka/SKILL.md +655 -0
- package/templates/skills/ec/confluent-kafka/references/ansible_playbooks.md +792 -0
- package/templates/skills/ec/confluent-kafka/references/ec_deployment.md +579 -0
- package/templates/skills/ec/confluent-kafka/references/kraft_migration.md +490 -0
- package/templates/skills/ec/confluent-kafka/references/troubleshooting.md +778 -0
- package/templates/skills/ec/confluent-kafka/references/upgrade_7x_to_8x.md +488 -0
- package/templates/skills/ec/confluent-kafka/scripts/kafka_health_check.py +435 -0
- package/templates/skills/ec/confluent-kafka/scripts/upgrade_preflight.py +568 -0
- package/templates/skills/ec/confluent-kafka/scripts/validate_config.py +455 -0
- package/templates/skills/ec/consul/SKILL.md +427 -0
- package/templates/skills/ec/consul/references/acl_setup.md +168 -0
- package/templates/skills/ec/consul/references/ha_config.md +196 -0
- package/templates/skills/ec/consul/references/troubleshooting.md +267 -0
- package/templates/skills/ec/consul/references/upgrades.md +213 -0
- package/templates/skills/ec/consul/scripts/consul_health_report.py +530 -0
- package/templates/skills/ec/consul/scripts/consul_status.py +264 -0
- package/templates/skills/ec/consul/scripts/generate_values.py +170 -0
- package/templates/skills/ec/documentation/SKILL.md +351 -0
- package/templates/skills/ec/documentation/references/best_practices.md +201 -0
- package/templates/skills/ec/documentation/scripts/analyze_code.py +307 -0
- package/templates/skills/ec/documentation/scripts/detect_changes.py +460 -0
- package/templates/skills/ec/documentation/scripts/generate_changelog.py +312 -0
- package/templates/skills/ec/documentation/scripts/sync_docs.py +272 -0
- package/templates/skills/ec/documentation/scripts/update_skill_docs.py +366 -0
- package/templates/skills/ec/gitlab/SKILL.md +529 -0
- package/templates/skills/ec/gitlab/references/agent_installation.md +416 -0
- package/templates/skills/ec/gitlab/references/api_reference.md +508 -0
- package/templates/skills/ec/gitlab/references/gitops_flux.md +465 -0
- package/templates/skills/ec/gitlab/references/troubleshooting.md +518 -0
- package/templates/skills/ec/gitlab/scripts/generate_agent_values.py +329 -0
- package/templates/skills/ec/gitlab/scripts/gitlab_agent_status.py +414 -0
- package/templates/skills/ec/jira/SKILL.md +484 -0
- package/templates/skills/ec/jira/references/jql_reference.md +148 -0
- package/templates/skills/ec/jira/scripts/add_comment.py +91 -0
- package/templates/skills/ec/jira/scripts/bulk_log_work.py +124 -0
- package/templates/skills/ec/jira/scripts/create_ticket.py +162 -0
- package/templates/skills/ec/jira/scripts/get_ticket.py +191 -0
- package/templates/skills/ec/jira/scripts/jira_client.py +383 -0
- package/templates/skills/ec/jira/scripts/log_work.py +154 -0
- package/templates/skills/ec/jira/scripts/search_tickets.py +104 -0
- package/templates/skills/ec/jira/scripts/update_comment.py +67 -0
- package/templates/skills/ec/jira/scripts/update_ticket.py +161 -0
- package/templates/skills/ec/karpenter/SKILL.md +301 -0
- package/templates/skills/ec/karpenter/references/ec2nodeclasses.md +421 -0
- package/templates/skills/ec/karpenter/references/migration.md +396 -0
- package/templates/skills/ec/karpenter/references/nodepools.md +400 -0
- package/templates/skills/ec/karpenter/references/troubleshooting.md +359 -0
- package/templates/skills/ec/karpenter/scripts/generate_ec2nodeclass.py +187 -0
- package/templates/skills/ec/karpenter/scripts/generate_nodepool.py +245 -0
- package/templates/skills/ec/karpenter/scripts/karpenter_status.py +359 -0
- package/templates/skills/ec/opensearch/SKILL.md +720 -0
- package/templates/skills/ec/opensearch/references/ml_neural_search.md +576 -0
- package/templates/skills/ec/opensearch/references/operator.md +532 -0
- package/templates/skills/ec/opensearch/references/query_dsl.md +532 -0
- package/templates/skills/ec/opensearch/scripts/configure_mcp.py +148 -0
- package/templates/skills/ec/victoriametrics/SKILL.md +598 -0
- package/templates/skills/ec/victoriametrics/references/kubernetes.md +531 -0
- package/templates/skills/ec/victoriametrics/references/prometheus_migration.md +333 -0
- package/templates/skills/ec/victoriametrics/references/troubleshooting.md +442 -0
- package/templates/skills/knowledge/SKILLS_CATALOG.md +274 -4
- package/templates/skills/knowledge/intelligent-routing/SKILL.md +237 -164
- package/templates/skills/knowledge/parallel-agents/SKILL.md +345 -73
- package/templates/skills/knowledge/plugin-discovery/SKILL.md +582 -0
- package/templates/skills/knowledge/plugin-discovery/scripts/platform_setup.py +1083 -0
- package/templates/skills/knowledge/design-md/README.md +0 -34
- package/templates/skills/knowledge/design-md/SKILL.md +0 -193
- package/templates/skills/knowledge/design-md/examples/DESIGN.md +0 -154
- package/templates/skills/knowledge/notebooklm-mcp/SKILL.md +0 -71
- package/templates/skills/knowledge/notebooklm-mcp/assets/example_asset.txt +0 -24
- package/templates/skills/knowledge/notebooklm-mcp/references/api_reference.md +0 -34
- package/templates/skills/knowledge/notebooklm-mcp/scripts/example.py +0 -19
- package/templates/skills/knowledge/react-components/README.md +0 -36
- package/templates/skills/knowledge/react-components/SKILL.md +0 -53
- package/templates/skills/knowledge/react-components/examples/gold-standard-card.tsx +0 -80
- package/templates/skills/knowledge/react-components/package-lock.json +0 -231
- package/templates/skills/knowledge/react-components/package.json +0 -16
- package/templates/skills/knowledge/react-components/resources/architecture-checklist.md +0 -15
- package/templates/skills/knowledge/react-components/resources/component-template.tsx +0 -37
- package/templates/skills/knowledge/react-components/resources/stitch-api-reference.md +0 -14
- package/templates/skills/knowledge/react-components/resources/style-guide.json +0 -27
- package/templates/skills/knowledge/react-components/scripts/fetch-stitch.sh +0 -30
- package/templates/skills/knowledge/react-components/scripts/validate.js +0 -68
- package/templates/skills/knowledge/self-update/SKILL.md +0 -60
- package/templates/skills/knowledge/self-update/scripts/update_kit.py +0 -103
- package/templates/skills/knowledge/stitch-loop/README.md +0 -54
- package/templates/skills/knowledge/stitch-loop/SKILL.md +0 -235
- package/templates/skills/knowledge/stitch-loop/examples/SITE.md +0 -73
- package/templates/skills/knowledge/stitch-loop/examples/next-prompt.md +0 -25
- package/templates/skills/knowledge/stitch-loop/resources/baton-schema.md +0 -61
- package/templates/skills/knowledge/stitch-loop/resources/site-template.md +0 -104
|
@@ -0,0 +1,568 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Confluent Kafka Upgrade Pre-flight Check Script
|
|
4
|
+
|
|
5
|
+
Validates cluster readiness for upgrade from 7.x to 8.x versions:
|
|
6
|
+
- Version compatibility check
|
|
7
|
+
- Cluster health verification
|
|
8
|
+
- Deprecated configuration detection
|
|
9
|
+
- Java version validation
|
|
10
|
+
- ZooKeeper/KRaft mode assessment
|
|
11
|
+
- Backup recommendations
|
|
12
|
+
|
|
13
|
+
Usage:
|
|
14
|
+
python upgrade_preflight.py --current-version 7.6 --target-version 8.0 --bootstrap-servers kafka-01:9092
|
|
15
|
+
python upgrade_preflight.py -c 7.5 -t 8.0 -b localhost:9092 --config /opt/confluent/etc/kafka/server.properties
|
|
16
|
+
|
|
17
|
+
Arguments:
|
|
18
|
+
--current-version, -c Current Confluent Platform version (required)
|
|
19
|
+
--target-version, -t Target Confluent Platform version (required)
|
|
20
|
+
--bootstrap-servers, -b Bootstrap servers for cluster checks (required)
|
|
21
|
+
--config Path to server.properties for config validation
|
|
22
|
+
--skip-cluster Skip cluster health checks (offline validation only)
|
|
23
|
+
--json Output results as JSON
|
|
24
|
+
|
|
25
|
+
Exit Codes:
|
|
26
|
+
0 - Ready for upgrade
|
|
27
|
+
1 - Invalid arguments
|
|
28
|
+
2 - Connection failed
|
|
29
|
+
3 - Pre-flight checks failed (blockers found)
|
|
30
|
+
4 - Critical issues (do not proceed)
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
import argparse
|
|
34
|
+
import json
|
|
35
|
+
import os
|
|
36
|
+
import subprocess
|
|
37
|
+
import sys
|
|
38
|
+
from dataclasses import dataclass, field
|
|
39
|
+
from pathlib import Path
|
|
40
|
+
from typing import Dict, List, Optional, Tuple
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class PreflightResult:
|
|
45
|
+
"""Pre-flight check result."""
|
|
46
|
+
name: str
|
|
47
|
+
status: str # "pass", "fail", "warn", "skip"
|
|
48
|
+
message: str
|
|
49
|
+
details: Optional[str] = None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class PreflightReport:
|
|
54
|
+
"""Complete pre-flight report."""
|
|
55
|
+
current_version: str
|
|
56
|
+
target_version: str
|
|
57
|
+
overall_status: str = "unknown"
|
|
58
|
+
blockers: List[PreflightResult] = field(default_factory=list)
|
|
59
|
+
warnings: List[PreflightResult] = field(default_factory=list)
|
|
60
|
+
passed: List[PreflightResult] = field(default_factory=list)
|
|
61
|
+
recommendations: List[str] = field(default_factory=list)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class UpgradePreflight:
|
|
65
|
+
"""Confluent Kafka upgrade pre-flight checker."""
|
|
66
|
+
|
|
67
|
+
# Upgrade paths matrix
|
|
68
|
+
UPGRADE_PATHS = {
|
|
69
|
+
("7.3", "8.0"): {"direct": False, "via": "7.6"},
|
|
70
|
+
("7.4", "8.0"): {"direct": True},
|
|
71
|
+
("7.5", "8.0"): {"direct": True},
|
|
72
|
+
("7.6", "8.0"): {"direct": True},
|
|
73
|
+
("7.6", "8.1"): {"direct": True},
|
|
74
|
+
("8.0", "8.1"): {"direct": True},
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
def __init__(
|
|
78
|
+
self,
|
|
79
|
+
current_version: str,
|
|
80
|
+
target_version: str,
|
|
81
|
+
bootstrap_servers: Optional[str] = None,
|
|
82
|
+
config_path: Optional[str] = None,
|
|
83
|
+
):
|
|
84
|
+
self.current_version = current_version
|
|
85
|
+
self.target_version = target_version
|
|
86
|
+
self.bootstrap_servers = bootstrap_servers
|
|
87
|
+
self.config_path = Path(config_path) if config_path else None
|
|
88
|
+
self.report = PreflightReport(current_version, target_version)
|
|
89
|
+
self.kafka_bin = self._find_kafka_bin()
|
|
90
|
+
|
|
91
|
+
def _find_kafka_bin(self) -> str:
|
|
92
|
+
"""Find Kafka binary directory."""
|
|
93
|
+
paths = [
|
|
94
|
+
"/opt/confluent/bin",
|
|
95
|
+
"/opt/kafka/bin",
|
|
96
|
+
os.path.expanduser("~/confluent/bin"),
|
|
97
|
+
]
|
|
98
|
+
for path in paths:
|
|
99
|
+
if os.path.exists(os.path.join(path, "kafka-topics")):
|
|
100
|
+
return path
|
|
101
|
+
return "/opt/confluent/bin"
|
|
102
|
+
|
|
103
|
+
def _run_cmd(self, cmd: List[str], timeout: int = 30) -> Tuple[int, str, str]:
|
|
104
|
+
"""Run a command and return exit code, stdout, stderr."""
|
|
105
|
+
try:
|
|
106
|
+
result = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout)
|
|
107
|
+
return result.returncode, result.stdout, result.stderr
|
|
108
|
+
except subprocess.TimeoutExpired:
|
|
109
|
+
return -1, "", "Command timed out"
|
|
110
|
+
except Exception as e:
|
|
111
|
+
return -1, "", str(e)
|
|
112
|
+
|
|
113
|
+
def check_upgrade_path(self) -> PreflightResult:
|
|
114
|
+
"""Verify upgrade path is supported."""
|
|
115
|
+
key = (self.current_version, self.target_version)
|
|
116
|
+
|
|
117
|
+
if key in self.UPGRADE_PATHS:
|
|
118
|
+
path_info = self.UPGRADE_PATHS[key]
|
|
119
|
+
if path_info["direct"]:
|
|
120
|
+
return PreflightResult(
|
|
121
|
+
name="Upgrade Path",
|
|
122
|
+
status="pass",
|
|
123
|
+
message=f"Direct upgrade from {self.current_version} to {self.target_version} is supported",
|
|
124
|
+
)
|
|
125
|
+
else:
|
|
126
|
+
return PreflightResult(
|
|
127
|
+
name="Upgrade Path",
|
|
128
|
+
status="fail",
|
|
129
|
+
message=f"Direct upgrade not supported. Upgrade to {path_info['via']} first.",
|
|
130
|
+
details=f"Required path: {self.current_version} → {path_info['via']} → {self.target_version}",
|
|
131
|
+
)
|
|
132
|
+
else:
|
|
133
|
+
# Check if it's a minor version difference
|
|
134
|
+
current_major = self.current_version.split(".")[0]
|
|
135
|
+
target_major = self.target_version.split(".")[0]
|
|
136
|
+
|
|
137
|
+
if current_major == target_major:
|
|
138
|
+
return PreflightResult(
|
|
139
|
+
name="Upgrade Path",
|
|
140
|
+
status="pass",
|
|
141
|
+
message=f"Minor version upgrade within {current_major}.x is supported",
|
|
142
|
+
)
|
|
143
|
+
else:
|
|
144
|
+
return PreflightResult(
|
|
145
|
+
name="Upgrade Path",
|
|
146
|
+
status="warn",
|
|
147
|
+
message=f"Upgrade path {self.current_version} → {self.target_version} not in known matrix",
|
|
148
|
+
details="Consult Confluent documentation for compatibility",
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
def check_java_version(self) -> PreflightResult:
|
|
152
|
+
"""Check Java version compatibility."""
|
|
153
|
+
code, stdout, stderr = self._run_cmd(["java", "-version"])
|
|
154
|
+
|
|
155
|
+
if code != 0:
|
|
156
|
+
return PreflightResult(
|
|
157
|
+
name="Java Version",
|
|
158
|
+
status="fail",
|
|
159
|
+
message="Cannot determine Java version",
|
|
160
|
+
details=stderr,
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# Parse Java version from stderr (Java outputs version to stderr)
|
|
164
|
+
version_output = stderr.lower()
|
|
165
|
+
|
|
166
|
+
# Extract version number
|
|
167
|
+
java_version = None
|
|
168
|
+
if "version" in version_output:
|
|
169
|
+
import re
|
|
170
|
+
match = re.search(r'version ["\']?(\d+)', version_output)
|
|
171
|
+
if match:
|
|
172
|
+
java_version = int(match.group(1))
|
|
173
|
+
|
|
174
|
+
if java_version is None:
|
|
175
|
+
return PreflightResult(
|
|
176
|
+
name="Java Version",
|
|
177
|
+
status="warn",
|
|
178
|
+
message="Could not parse Java version",
|
|
179
|
+
details=version_output[:200],
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
# Check requirements for target version
|
|
183
|
+
if self.target_version.startswith("8."):
|
|
184
|
+
if java_version >= 17:
|
|
185
|
+
return PreflightResult(
|
|
186
|
+
name="Java Version",
|
|
187
|
+
status="pass",
|
|
188
|
+
message=f"Java {java_version} meets requirement (Java 17+ required for 8.x)",
|
|
189
|
+
)
|
|
190
|
+
else:
|
|
191
|
+
return PreflightResult(
|
|
192
|
+
name="Java Version",
|
|
193
|
+
status="fail",
|
|
194
|
+
message=f"Java {java_version} is below requirement. Java 17+ required for version 8.x",
|
|
195
|
+
details="Install Amazon Corretto 17 or OpenJDK 17+",
|
|
196
|
+
)
|
|
197
|
+
else:
|
|
198
|
+
if java_version >= 11:
|
|
199
|
+
return PreflightResult(
|
|
200
|
+
name="Java Version",
|
|
201
|
+
status="pass",
|
|
202
|
+
message=f"Java {java_version} meets requirement for 7.x",
|
|
203
|
+
)
|
|
204
|
+
else:
|
|
205
|
+
return PreflightResult(
|
|
206
|
+
name="Java Version",
|
|
207
|
+
status="fail",
|
|
208
|
+
message=f"Java {java_version} is below requirement. Java 11+ required.",
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
def check_cluster_health(self) -> List[PreflightResult]:
|
|
212
|
+
"""Check cluster health indicators."""
|
|
213
|
+
results = []
|
|
214
|
+
|
|
215
|
+
if not self.bootstrap_servers:
|
|
216
|
+
results.append(PreflightResult(
|
|
217
|
+
name="Cluster Health",
|
|
218
|
+
status="skip",
|
|
219
|
+
message="Bootstrap servers not provided, skipping cluster checks",
|
|
220
|
+
))
|
|
221
|
+
return results
|
|
222
|
+
|
|
223
|
+
# Check connectivity
|
|
224
|
+
cmd = [
|
|
225
|
+
f"{self.kafka_bin}/kafka-broker-api-versions",
|
|
226
|
+
"--bootstrap-server", self.bootstrap_servers,
|
|
227
|
+
]
|
|
228
|
+
code, stdout, stderr = self._run_cmd(cmd)
|
|
229
|
+
|
|
230
|
+
if code != 0:
|
|
231
|
+
results.append(PreflightResult(
|
|
232
|
+
name="Broker Connectivity",
|
|
233
|
+
status="fail",
|
|
234
|
+
message="Cannot connect to Kafka brokers",
|
|
235
|
+
details=stderr[:200],
|
|
236
|
+
))
|
|
237
|
+
return results
|
|
238
|
+
|
|
239
|
+
results.append(PreflightResult(
|
|
240
|
+
name="Broker Connectivity",
|
|
241
|
+
status="pass",
|
|
242
|
+
message="Successfully connected to Kafka cluster",
|
|
243
|
+
))
|
|
244
|
+
|
|
245
|
+
# Check under-replicated partitions
|
|
246
|
+
cmd = [
|
|
247
|
+
f"{self.kafka_bin}/kafka-topics",
|
|
248
|
+
"--bootstrap-server", self.bootstrap_servers,
|
|
249
|
+
"--describe", "--under-replicated-partitions",
|
|
250
|
+
]
|
|
251
|
+
code, stdout, stderr = self._run_cmd(cmd)
|
|
252
|
+
|
|
253
|
+
if code == 0:
|
|
254
|
+
urp_lines = [l for l in stdout.strip().split("\n") if l.strip()]
|
|
255
|
+
if urp_lines:
|
|
256
|
+
results.append(PreflightResult(
|
|
257
|
+
name="Under-replicated Partitions",
|
|
258
|
+
status="fail",
|
|
259
|
+
message=f"{len(urp_lines)} under-replicated partitions found",
|
|
260
|
+
details="Resolve URP before upgrade to ensure data safety",
|
|
261
|
+
))
|
|
262
|
+
else:
|
|
263
|
+
results.append(PreflightResult(
|
|
264
|
+
name="Under-replicated Partitions",
|
|
265
|
+
status="pass",
|
|
266
|
+
message="No under-replicated partitions",
|
|
267
|
+
))
|
|
268
|
+
|
|
269
|
+
# Check offline partitions
|
|
270
|
+
cmd = [
|
|
271
|
+
f"{self.kafka_bin}/kafka-topics",
|
|
272
|
+
"--bootstrap-server", self.bootstrap_servers,
|
|
273
|
+
"--describe", "--unavailable-partitions",
|
|
274
|
+
]
|
|
275
|
+
code, stdout, stderr = self._run_cmd(cmd)
|
|
276
|
+
|
|
277
|
+
if code == 0:
|
|
278
|
+
offline_lines = [l for l in stdout.strip().split("\n") if l.strip()]
|
|
279
|
+
if offline_lines:
|
|
280
|
+
results.append(PreflightResult(
|
|
281
|
+
name="Offline Partitions",
|
|
282
|
+
status="fail",
|
|
283
|
+
message=f"CRITICAL: {len(offline_lines)} offline partitions found",
|
|
284
|
+
details="DO NOT PROCEED. Resolve offline partitions first.",
|
|
285
|
+
))
|
|
286
|
+
else:
|
|
287
|
+
results.append(PreflightResult(
|
|
288
|
+
name="Offline Partitions",
|
|
289
|
+
status="pass",
|
|
290
|
+
message="No offline partitions",
|
|
291
|
+
))
|
|
292
|
+
|
|
293
|
+
return results
|
|
294
|
+
|
|
295
|
+
def check_config_compatibility(self) -> List[PreflightResult]:
|
|
296
|
+
"""Check configuration for deprecated settings."""
|
|
297
|
+
results = []
|
|
298
|
+
|
|
299
|
+
if not self.config_path:
|
|
300
|
+
results.append(PreflightResult(
|
|
301
|
+
name="Configuration Check",
|
|
302
|
+
status="skip",
|
|
303
|
+
message="No config file provided, skipping config validation",
|
|
304
|
+
))
|
|
305
|
+
return results
|
|
306
|
+
|
|
307
|
+
if not self.config_path.exists():
|
|
308
|
+
results.append(PreflightResult(
|
|
309
|
+
name="Configuration Check",
|
|
310
|
+
status="fail",
|
|
311
|
+
message=f"Config file not found: {self.config_path}",
|
|
312
|
+
))
|
|
313
|
+
return results
|
|
314
|
+
|
|
315
|
+
# Parse config
|
|
316
|
+
config = {}
|
|
317
|
+
try:
|
|
318
|
+
with open(self.config_path) as f:
|
|
319
|
+
for line in f:
|
|
320
|
+
line = line.strip()
|
|
321
|
+
if line and not line.startswith("#") and "=" in line:
|
|
322
|
+
key, value = line.split("=", 1)
|
|
323
|
+
config[key.strip()] = value.strip()
|
|
324
|
+
except Exception as e:
|
|
325
|
+
results.append(PreflightResult(
|
|
326
|
+
name="Configuration Check",
|
|
327
|
+
status="fail",
|
|
328
|
+
message=f"Cannot parse config: {e}",
|
|
329
|
+
))
|
|
330
|
+
return results
|
|
331
|
+
|
|
332
|
+
# Check deprecated settings for 8.x
|
|
333
|
+
if self.target_version.startswith("8."):
|
|
334
|
+
deprecated = ["log.message.format.version", "inter.broker.protocol.version"]
|
|
335
|
+
found_deprecated = [d for d in deprecated if d in config]
|
|
336
|
+
|
|
337
|
+
if found_deprecated:
|
|
338
|
+
results.append(PreflightResult(
|
|
339
|
+
name="Deprecated Settings",
|
|
340
|
+
status="warn",
|
|
341
|
+
message=f"Deprecated settings found: {', '.join(found_deprecated)}",
|
|
342
|
+
details="These should be removed before or during upgrade",
|
|
343
|
+
))
|
|
344
|
+
else:
|
|
345
|
+
results.append(PreflightResult(
|
|
346
|
+
name="Deprecated Settings",
|
|
347
|
+
status="pass",
|
|
348
|
+
message="No deprecated settings found",
|
|
349
|
+
))
|
|
350
|
+
|
|
351
|
+
# Check ZooKeeper vs KRaft
|
|
352
|
+
has_zk = "zookeeper.connect" in config
|
|
353
|
+
has_kraft = "process.roles" in config
|
|
354
|
+
has_migration = "zookeeper.metadata.migration.enable" in config
|
|
355
|
+
|
|
356
|
+
if has_zk and not has_kraft and not has_migration:
|
|
357
|
+
results.append(PreflightResult(
|
|
358
|
+
name="Metadata Mode",
|
|
359
|
+
status="warn",
|
|
360
|
+
message="ZooKeeper mode detected. Consider migrating to KRaft.",
|
|
361
|
+
details="ZooKeeper is deprecated in 8.x. Plan KRaft migration.",
|
|
362
|
+
))
|
|
363
|
+
elif has_zk and has_kraft and has_migration:
|
|
364
|
+
results.append(PreflightResult(
|
|
365
|
+
name="Metadata Mode",
|
|
366
|
+
status="pass",
|
|
367
|
+
message="Migration mode configured (ZK → KRaft)",
|
|
368
|
+
))
|
|
369
|
+
elif has_kraft and not has_zk:
|
|
370
|
+
results.append(PreflightResult(
|
|
371
|
+
name="Metadata Mode",
|
|
372
|
+
status="pass",
|
|
373
|
+
message="KRaft mode active",
|
|
374
|
+
))
|
|
375
|
+
|
|
376
|
+
return results
|
|
377
|
+
|
|
378
|
+
def generate_recommendations(self) -> List[str]:
|
|
379
|
+
"""Generate upgrade recommendations."""
|
|
380
|
+
recs = []
|
|
381
|
+
|
|
382
|
+
# Always recommend backup
|
|
383
|
+
recs.append("Create full backup of configurations and ZooKeeper/KRaft data before upgrade")
|
|
384
|
+
|
|
385
|
+
# Version-specific recommendations
|
|
386
|
+
if self.target_version.startswith("8."):
|
|
387
|
+
recs.append("Verify Java 17+ is installed on all nodes")
|
|
388
|
+
recs.append("Review Confluent 8.x release notes for breaking changes")
|
|
389
|
+
recs.append("Plan for ZooKeeper to KRaft migration if not already using KRaft")
|
|
390
|
+
recs.append("Remove 'log.message.format.version' and 'inter.broker.protocol.version' settings")
|
|
391
|
+
|
|
392
|
+
# General recommendations
|
|
393
|
+
recs.append("Test upgrade in non-production environment first")
|
|
394
|
+
recs.append("Schedule upgrade during low-traffic period")
|
|
395
|
+
recs.append("Prepare rollback plan and verify backup restorability")
|
|
396
|
+
recs.append("Monitor cluster metrics during and after upgrade")
|
|
397
|
+
|
|
398
|
+
return recs
|
|
399
|
+
|
|
400
|
+
def run_checks(self, skip_cluster: bool = False) -> PreflightReport:
|
|
401
|
+
"""Run all pre-flight checks."""
|
|
402
|
+
print("🔍 Running upgrade pre-flight checks...")
|
|
403
|
+
print(f" Current: {self.current_version} → Target: {self.target_version}")
|
|
404
|
+
print()
|
|
405
|
+
|
|
406
|
+
# Upgrade path
|
|
407
|
+
print(" ├── Checking upgrade path...")
|
|
408
|
+
path_result = self.check_upgrade_path()
|
|
409
|
+
self._categorize_result(path_result)
|
|
410
|
+
|
|
411
|
+
# Java version
|
|
412
|
+
print(" ├── Checking Java version...")
|
|
413
|
+
java_result = self.check_java_version()
|
|
414
|
+
self._categorize_result(java_result)
|
|
415
|
+
|
|
416
|
+
# Configuration
|
|
417
|
+
print(" ├── Checking configuration...")
|
|
418
|
+
config_results = self.check_config_compatibility()
|
|
419
|
+
for result in config_results:
|
|
420
|
+
self._categorize_result(result)
|
|
421
|
+
|
|
422
|
+
# Cluster health
|
|
423
|
+
if not skip_cluster:
|
|
424
|
+
print(" ├── Checking cluster health...")
|
|
425
|
+
health_results = self.check_cluster_health()
|
|
426
|
+
for result in health_results:
|
|
427
|
+
self._categorize_result(result)
|
|
428
|
+
|
|
429
|
+
# Generate recommendations
|
|
430
|
+
print(" └── Generating recommendations...")
|
|
431
|
+
self.report.recommendations = self.generate_recommendations()
|
|
432
|
+
|
|
433
|
+
# Determine overall status
|
|
434
|
+
if self.report.blockers:
|
|
435
|
+
self.report.overall_status = "blocked"
|
|
436
|
+
elif self.report.warnings:
|
|
437
|
+
self.report.overall_status = "warnings"
|
|
438
|
+
else:
|
|
439
|
+
self.report.overall_status = "ready"
|
|
440
|
+
|
|
441
|
+
return self.report
|
|
442
|
+
|
|
443
|
+
def _categorize_result(self, result: PreflightResult) -> None:
|
|
444
|
+
"""Categorize result into blockers, warnings, or passed."""
|
|
445
|
+
if result.status == "fail":
|
|
446
|
+
self.report.blockers.append(result)
|
|
447
|
+
elif result.status == "warn":
|
|
448
|
+
self.report.warnings.append(result)
|
|
449
|
+
elif result.status == "pass":
|
|
450
|
+
self.report.passed.append(result)
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
def print_report(report: PreflightReport, as_json: bool = False) -> None:
|
|
454
|
+
"""Print the pre-flight report."""
|
|
455
|
+
if as_json:
|
|
456
|
+
output = {
|
|
457
|
+
"current_version": report.current_version,
|
|
458
|
+
"target_version": report.target_version,
|
|
459
|
+
"overall_status": report.overall_status,
|
|
460
|
+
"blockers": [{"name": r.name, "message": r.message, "details": r.details} for r in report.blockers],
|
|
461
|
+
"warnings": [{"name": r.name, "message": r.message, "details": r.details} for r in report.warnings],
|
|
462
|
+
"passed": [{"name": r.name, "message": r.message} for r in report.passed],
|
|
463
|
+
"recommendations": report.recommendations,
|
|
464
|
+
}
|
|
465
|
+
print(json.dumps(output, indent=2))
|
|
466
|
+
return
|
|
467
|
+
|
|
468
|
+
print("\n" + "=" * 60)
|
|
469
|
+
print("📋 UPGRADE PRE-FLIGHT REPORT")
|
|
470
|
+
print("=" * 60)
|
|
471
|
+
print(f" From: Confluent {report.current_version}")
|
|
472
|
+
print(f" To: Confluent {report.target_version}")
|
|
473
|
+
print(f" Status: {'🟢 READY' if report.overall_status == 'ready' else '🟡 WARNINGS' if report.overall_status == 'warnings' else '🔴 BLOCKED'}")
|
|
474
|
+
print()
|
|
475
|
+
|
|
476
|
+
if report.blockers:
|
|
477
|
+
print("❌ BLOCKERS (must resolve before upgrade):")
|
|
478
|
+
for r in report.blockers:
|
|
479
|
+
print(f" • {r.name}: {r.message}")
|
|
480
|
+
if r.details:
|
|
481
|
+
print(f" → {r.details}")
|
|
482
|
+
print()
|
|
483
|
+
|
|
484
|
+
if report.warnings:
|
|
485
|
+
print("⚠️ WARNINGS (review before upgrade):")
|
|
486
|
+
for r in report.warnings:
|
|
487
|
+
print(f" • {r.name}: {r.message}")
|
|
488
|
+
if r.details:
|
|
489
|
+
print(f" → {r.details}")
|
|
490
|
+
print()
|
|
491
|
+
|
|
492
|
+
if report.passed:
|
|
493
|
+
print("✅ PASSED:")
|
|
494
|
+
for r in report.passed:
|
|
495
|
+
print(f" • {r.name}: {r.message}")
|
|
496
|
+
print()
|
|
497
|
+
|
|
498
|
+
print("📝 RECOMMENDATIONS:")
|
|
499
|
+
for i, rec in enumerate(report.recommendations, 1):
|
|
500
|
+
print(f" {i}. {rec}")
|
|
501
|
+
print()
|
|
502
|
+
|
|
503
|
+
print("=" * 60)
|
|
504
|
+
if report.overall_status == "ready":
|
|
505
|
+
print("✅ Cluster is ready for upgrade. Proceed with caution.")
|
|
506
|
+
elif report.overall_status == "warnings":
|
|
507
|
+
print("⚠️ Cluster can be upgraded, but review warnings first.")
|
|
508
|
+
else:
|
|
509
|
+
print("🛑 DO NOT PROCEED. Resolve blockers before upgrade.")
|
|
510
|
+
print("=" * 60)
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
def main():
|
|
514
|
+
parser = argparse.ArgumentParser(
|
|
515
|
+
description="Confluent Kafka Upgrade Pre-flight Check",
|
|
516
|
+
)
|
|
517
|
+
parser.add_argument(
|
|
518
|
+
"--current-version", "-c",
|
|
519
|
+
required=True,
|
|
520
|
+
help="Current Confluent Platform version (e.g., 7.6)",
|
|
521
|
+
)
|
|
522
|
+
parser.add_argument(
|
|
523
|
+
"--target-version", "-t",
|
|
524
|
+
required=True,
|
|
525
|
+
help="Target Confluent Platform version (e.g., 8.0)",
|
|
526
|
+
)
|
|
527
|
+
parser.add_argument(
|
|
528
|
+
"--bootstrap-servers", "-b",
|
|
529
|
+
help="Bootstrap servers for cluster checks",
|
|
530
|
+
)
|
|
531
|
+
parser.add_argument(
|
|
532
|
+
"--config",
|
|
533
|
+
help="Path to server.properties for config validation",
|
|
534
|
+
)
|
|
535
|
+
parser.add_argument(
|
|
536
|
+
"--skip-cluster",
|
|
537
|
+
action="store_true",
|
|
538
|
+
help="Skip cluster health checks",
|
|
539
|
+
)
|
|
540
|
+
parser.add_argument(
|
|
541
|
+
"--json",
|
|
542
|
+
action="store_true",
|
|
543
|
+
help="Output results as JSON",
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
args = parser.parse_args()
|
|
547
|
+
|
|
548
|
+
checker = UpgradePreflight(
|
|
549
|
+
current_version=args.current_version,
|
|
550
|
+
target_version=args.target_version,
|
|
551
|
+
bootstrap_servers=args.bootstrap_servers,
|
|
552
|
+
config_path=args.config,
|
|
553
|
+
)
|
|
554
|
+
|
|
555
|
+
report = checker.run_checks(skip_cluster=args.skip_cluster)
|
|
556
|
+
print_report(report, args.json)
|
|
557
|
+
|
|
558
|
+
# Exit codes
|
|
559
|
+
if report.overall_status == "blocked":
|
|
560
|
+
sys.exit(4)
|
|
561
|
+
elif report.overall_status == "warnings":
|
|
562
|
+
sys.exit(3)
|
|
563
|
+
else:
|
|
564
|
+
sys.exit(0)
|
|
565
|
+
|
|
566
|
+
|
|
567
|
+
if __name__ == "__main__":
|
|
568
|
+
main()
|