@techwavedev/agi-agent-kit 1.1.7 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of @techwavedev/agi-agent-kit might be problematic. Click here for more details.

Files changed (111) hide show
  1. package/CHANGELOG.md +82 -1
  2. package/README.md +190 -12
  3. package/bin/init.js +30 -2
  4. package/package.json +6 -3
  5. package/templates/base/AGENTS.md +54 -23
  6. package/templates/base/README.md +325 -0
  7. package/templates/base/directives/memory_integration.md +95 -0
  8. package/templates/base/execution/memory_manager.py +309 -0
  9. package/templates/base/execution/session_boot.py +218 -0
  10. package/templates/base/execution/session_init.py +320 -0
  11. package/templates/base/skill-creator/SKILL_skillcreator.md +23 -36
  12. package/templates/base/skill-creator/scripts/init_skill.py +18 -135
  13. package/templates/skills/ec/README.md +31 -0
  14. package/templates/skills/ec/aws/SKILL.md +1020 -0
  15. package/templates/skills/ec/aws/defaults.yaml +13 -0
  16. package/templates/skills/ec/aws/references/common_patterns.md +80 -0
  17. package/templates/skills/ec/aws/references/mcp_servers.md +98 -0
  18. package/templates/skills/ec/aws-terraform/SKILL.md +349 -0
  19. package/templates/skills/ec/aws-terraform/references/best_practices.md +394 -0
  20. package/templates/skills/ec/aws-terraform/references/checkov_reference.md +337 -0
  21. package/templates/skills/ec/aws-terraform/scripts/configure_mcp.py +150 -0
  22. package/templates/skills/ec/confluent-kafka/SKILL.md +655 -0
  23. package/templates/skills/ec/confluent-kafka/references/ansible_playbooks.md +792 -0
  24. package/templates/skills/ec/confluent-kafka/references/ec_deployment.md +579 -0
  25. package/templates/skills/ec/confluent-kafka/references/kraft_migration.md +490 -0
  26. package/templates/skills/ec/confluent-kafka/references/troubleshooting.md +778 -0
  27. package/templates/skills/ec/confluent-kafka/references/upgrade_7x_to_8x.md +488 -0
  28. package/templates/skills/ec/confluent-kafka/scripts/kafka_health_check.py +435 -0
  29. package/templates/skills/ec/confluent-kafka/scripts/upgrade_preflight.py +568 -0
  30. package/templates/skills/ec/confluent-kafka/scripts/validate_config.py +455 -0
  31. package/templates/skills/ec/consul/SKILL.md +427 -0
  32. package/templates/skills/ec/consul/references/acl_setup.md +168 -0
  33. package/templates/skills/ec/consul/references/ha_config.md +196 -0
  34. package/templates/skills/ec/consul/references/troubleshooting.md +267 -0
  35. package/templates/skills/ec/consul/references/upgrades.md +213 -0
  36. package/templates/skills/ec/consul/scripts/consul_health_report.py +530 -0
  37. package/templates/skills/ec/consul/scripts/consul_status.py +264 -0
  38. package/templates/skills/ec/consul/scripts/generate_values.py +170 -0
  39. package/templates/skills/ec/documentation/SKILL.md +351 -0
  40. package/templates/skills/ec/documentation/references/best_practices.md +201 -0
  41. package/templates/skills/ec/documentation/scripts/analyze_code.py +307 -0
  42. package/templates/skills/ec/documentation/scripts/detect_changes.py +460 -0
  43. package/templates/skills/ec/documentation/scripts/generate_changelog.py +312 -0
  44. package/templates/skills/ec/documentation/scripts/sync_docs.py +272 -0
  45. package/templates/skills/ec/documentation/scripts/update_skill_docs.py +366 -0
  46. package/templates/skills/ec/gitlab/SKILL.md +529 -0
  47. package/templates/skills/ec/gitlab/references/agent_installation.md +416 -0
  48. package/templates/skills/ec/gitlab/references/api_reference.md +508 -0
  49. package/templates/skills/ec/gitlab/references/gitops_flux.md +465 -0
  50. package/templates/skills/ec/gitlab/references/troubleshooting.md +518 -0
  51. package/templates/skills/ec/gitlab/scripts/generate_agent_values.py +329 -0
  52. package/templates/skills/ec/gitlab/scripts/gitlab_agent_status.py +414 -0
  53. package/templates/skills/ec/jira/SKILL.md +484 -0
  54. package/templates/skills/ec/jira/references/jql_reference.md +148 -0
  55. package/templates/skills/ec/jira/scripts/add_comment.py +91 -0
  56. package/templates/skills/ec/jira/scripts/bulk_log_work.py +124 -0
  57. package/templates/skills/ec/jira/scripts/create_ticket.py +162 -0
  58. package/templates/skills/ec/jira/scripts/get_ticket.py +191 -0
  59. package/templates/skills/ec/jira/scripts/jira_client.py +383 -0
  60. package/templates/skills/ec/jira/scripts/log_work.py +154 -0
  61. package/templates/skills/ec/jira/scripts/search_tickets.py +104 -0
  62. package/templates/skills/ec/jira/scripts/update_comment.py +67 -0
  63. package/templates/skills/ec/jira/scripts/update_ticket.py +161 -0
  64. package/templates/skills/ec/karpenter/SKILL.md +301 -0
  65. package/templates/skills/ec/karpenter/references/ec2nodeclasses.md +421 -0
  66. package/templates/skills/ec/karpenter/references/migration.md +396 -0
  67. package/templates/skills/ec/karpenter/references/nodepools.md +400 -0
  68. package/templates/skills/ec/karpenter/references/troubleshooting.md +359 -0
  69. package/templates/skills/ec/karpenter/scripts/generate_ec2nodeclass.py +187 -0
  70. package/templates/skills/ec/karpenter/scripts/generate_nodepool.py +245 -0
  71. package/templates/skills/ec/karpenter/scripts/karpenter_status.py +359 -0
  72. package/templates/skills/ec/opensearch/SKILL.md +720 -0
  73. package/templates/skills/ec/opensearch/references/ml_neural_search.md +576 -0
  74. package/templates/skills/ec/opensearch/references/operator.md +532 -0
  75. package/templates/skills/ec/opensearch/references/query_dsl.md +532 -0
  76. package/templates/skills/ec/opensearch/scripts/configure_mcp.py +148 -0
  77. package/templates/skills/ec/victoriametrics/SKILL.md +598 -0
  78. package/templates/skills/ec/victoriametrics/references/kubernetes.md +531 -0
  79. package/templates/skills/ec/victoriametrics/references/prometheus_migration.md +333 -0
  80. package/templates/skills/ec/victoriametrics/references/troubleshooting.md +442 -0
  81. package/templates/skills/knowledge/SKILLS_CATALOG.md +274 -4
  82. package/templates/skills/knowledge/intelligent-routing/SKILL.md +237 -164
  83. package/templates/skills/knowledge/parallel-agents/SKILL.md +345 -73
  84. package/templates/skills/knowledge/plugin-discovery/SKILL.md +582 -0
  85. package/templates/skills/knowledge/plugin-discovery/scripts/platform_setup.py +1083 -0
  86. package/templates/skills/knowledge/design-md/README.md +0 -34
  87. package/templates/skills/knowledge/design-md/SKILL.md +0 -193
  88. package/templates/skills/knowledge/design-md/examples/DESIGN.md +0 -154
  89. package/templates/skills/knowledge/notebooklm-mcp/SKILL.md +0 -71
  90. package/templates/skills/knowledge/notebooklm-mcp/assets/example_asset.txt +0 -24
  91. package/templates/skills/knowledge/notebooklm-mcp/references/api_reference.md +0 -34
  92. package/templates/skills/knowledge/notebooklm-mcp/scripts/example.py +0 -19
  93. package/templates/skills/knowledge/react-components/README.md +0 -36
  94. package/templates/skills/knowledge/react-components/SKILL.md +0 -53
  95. package/templates/skills/knowledge/react-components/examples/gold-standard-card.tsx +0 -80
  96. package/templates/skills/knowledge/react-components/package-lock.json +0 -231
  97. package/templates/skills/knowledge/react-components/package.json +0 -16
  98. package/templates/skills/knowledge/react-components/resources/architecture-checklist.md +0 -15
  99. package/templates/skills/knowledge/react-components/resources/component-template.tsx +0 -37
  100. package/templates/skills/knowledge/react-components/resources/stitch-api-reference.md +0 -14
  101. package/templates/skills/knowledge/react-components/resources/style-guide.json +0 -27
  102. package/templates/skills/knowledge/react-components/scripts/fetch-stitch.sh +0 -30
  103. package/templates/skills/knowledge/react-components/scripts/validate.js +0 -68
  104. package/templates/skills/knowledge/self-update/SKILL.md +0 -60
  105. package/templates/skills/knowledge/self-update/scripts/update_kit.py +0 -103
  106. package/templates/skills/knowledge/stitch-loop/README.md +0 -54
  107. package/templates/skills/knowledge/stitch-loop/SKILL.md +0 -235
  108. package/templates/skills/knowledge/stitch-loop/examples/SITE.md +0 -73
  109. package/templates/skills/knowledge/stitch-loop/examples/next-prompt.md +0 -25
  110. package/templates/skills/knowledge/stitch-loop/resources/baton-schema.md +0 -61
  111. package/templates/skills/knowledge/stitch-loop/resources/site-template.md +0 -104
@@ -0,0 +1,435 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Confluent Kafka Health Check Script
4
+
5
+ Performs comprehensive health assessment of a Confluent Kafka cluster including:
6
+ - Broker connectivity and status
7
+ - Controller quorum health (KRaft)
8
+ - Partition health (under-replicated, offline)
9
+ - Consumer group lag
10
+ - Disk usage analysis
11
+
12
+ Usage:
13
+ python kafka_health_check.py --bootstrap-servers kafka-01:9092,kafka-02:9092 --output ./reports/
14
+ python kafka_health_check.py --bootstrap-servers localhost:9092 --quick
15
+ python kafka_health_check.py --bootstrap-servers kafka-01:9092 --format pdf
16
+
17
+ Arguments:
18
+ --bootstrap-servers, -b Comma-separated list of broker addresses (required)
19
+ --output, -o Output directory for reports (default: current dir)
20
+ --format, -f Output format: pdf, markdown, both, json (default: both)
21
+ --quick Quick check only, no detailed reports
22
+ --timeout, -t Connection timeout in seconds (default: 30)
23
+
24
+ Exit Codes:
25
+ 0 - Healthy cluster
26
+ 1 - Invalid arguments
27
+ 2 - Connection failed
28
+ 3 - Cluster unhealthy (warnings)
29
+ 4 - Cluster critical (offline partitions)
30
+ """
31
+
32
+ import argparse
33
+ import json
34
+ import subprocess
35
+ import sys
36
+ import os
37
+ from datetime import datetime
38
+ from pathlib import Path
39
+ from typing import Dict, List, Optional, Tuple
40
+
41
+
42
+ class KafkaHealthChecker:
43
+ """Confluent Kafka cluster health checker."""
44
+
45
+ def __init__(self, bootstrap_servers: str, timeout: int = 30):
46
+ self.bootstrap_servers = bootstrap_servers
47
+ self.timeout = timeout
48
+ self.kafka_bin = self._find_kafka_bin()
49
+ self.results: Dict = {
50
+ "timestamp": datetime.now().isoformat(),
51
+ "bootstrap_servers": bootstrap_servers,
52
+ "status": "unknown",
53
+ "brokers": [],
54
+ "topics": {},
55
+ "partitions": {},
56
+ "consumer_groups": [],
57
+ "alerts": [],
58
+ "warnings": [],
59
+ }
60
+
61
+ def _find_kafka_bin(self) -> str:
62
+ """Find Kafka binary directory."""
63
+ paths = [
64
+ "/opt/confluent/bin",
65
+ "/opt/kafka/bin",
66
+ os.path.expanduser("~/confluent/bin"),
67
+ os.environ.get("KAFKA_HOME", "") + "/bin",
68
+ ]
69
+ for path in paths:
70
+ if os.path.exists(os.path.join(path, "kafka-topics")):
71
+ return path
72
+ # Try to find via which
73
+ try:
74
+ result = subprocess.run(["which", "kafka-topics"], capture_output=True, text=True)
75
+ if result.returncode == 0:
76
+ return os.path.dirname(result.stdout.strip())
77
+ except Exception:
78
+ pass
79
+ return "/opt/confluent/bin" # Default fallback
80
+
81
+ def _run_kafka_cmd(self, cmd: List[str], timeout: Optional[int] = None) -> Tuple[int, str, str]:
82
+ """Run a Kafka CLI command and return exit code, stdout, stderr."""
83
+ timeout = timeout or self.timeout
84
+ try:
85
+ result = subprocess.run(
86
+ cmd,
87
+ capture_output=True,
88
+ text=True,
89
+ timeout=timeout,
90
+ )
91
+ return result.returncode, result.stdout, result.stderr
92
+ except subprocess.TimeoutExpired:
93
+ return -1, "", f"Command timed out after {timeout}s"
94
+ except Exception as e:
95
+ return -1, "", str(e)
96
+
97
+ def check_broker_connectivity(self) -> bool:
98
+ """Check if we can connect to brokers."""
99
+ cmd = [
100
+ f"{self.kafka_bin}/kafka-broker-api-versions",
101
+ "--bootstrap-server", self.bootstrap_servers,
102
+ ]
103
+ code, stdout, stderr = self._run_kafka_cmd(cmd)
104
+
105
+ if code == 0:
106
+ # Parse broker list from output
107
+ lines = stdout.strip().split("\n")
108
+ for line in lines:
109
+ if ":" in line and "ApiVersion" not in line:
110
+ self.results["brokers"].append(line.strip())
111
+ return True
112
+ else:
113
+ self.results["alerts"].append(f"Cannot connect to brokers: {stderr}")
114
+ return False
115
+
116
+ def check_under_replicated_partitions(self) -> int:
117
+ """Check for under-replicated partitions."""
118
+ cmd = [
119
+ f"{self.kafka_bin}/kafka-topics",
120
+ "--bootstrap-server", self.bootstrap_servers,
121
+ "--describe",
122
+ "--under-replicated-partitions",
123
+ ]
124
+ code, stdout, stderr = self._run_kafka_cmd(cmd)
125
+
126
+ if code == 0:
127
+ urp_count = len([l for l in stdout.strip().split("\n") if l.strip()])
128
+ self.results["partitions"]["under_replicated"] = urp_count
129
+ if urp_count > 0:
130
+ self.results["warnings"].append(f"{urp_count} under-replicated partitions")
131
+ return urp_count
132
+ else:
133
+ self.results["alerts"].append(f"Failed to check URP: {stderr}")
134
+ return -1
135
+
136
+ def check_offline_partitions(self) -> int:
137
+ """Check for offline/unavailable partitions."""
138
+ cmd = [
139
+ f"{self.kafka_bin}/kafka-topics",
140
+ "--bootstrap-server", self.bootstrap_servers,
141
+ "--describe",
142
+ "--unavailable-partitions",
143
+ ]
144
+ code, stdout, stderr = self._run_kafka_cmd(cmd)
145
+
146
+ if code == 0:
147
+ offline_count = len([l for l in stdout.strip().split("\n") if l.strip()])
148
+ self.results["partitions"]["offline"] = offline_count
149
+ if offline_count > 0:
150
+ self.results["alerts"].append(f"CRITICAL: {offline_count} offline partitions")
151
+ return offline_count
152
+ else:
153
+ self.results["alerts"].append(f"Failed to check offline partitions: {stderr}")
154
+ return -1
155
+
156
+ def check_topic_count(self) -> int:
157
+ """Get total topic count."""
158
+ cmd = [
159
+ f"{self.kafka_bin}/kafka-topics",
160
+ "--bootstrap-server", self.bootstrap_servers,
161
+ "--list",
162
+ ]
163
+ code, stdout, stderr = self._run_kafka_cmd(cmd)
164
+
165
+ if code == 0:
166
+ topics = [t for t in stdout.strip().split("\n") if t.strip()]
167
+ self.results["topics"]["count"] = len(topics)
168
+ self.results["topics"]["list"] = topics[:50] # First 50 topics
169
+ return len(topics)
170
+ else:
171
+ return -1
172
+
173
+ def check_consumer_groups(self) -> List[Dict]:
174
+ """Check consumer group status and lag."""
175
+ cmd = [
176
+ f"{self.kafka_bin}/kafka-consumer-groups",
177
+ "--bootstrap-server", self.bootstrap_servers,
178
+ "--all-groups",
179
+ "--describe",
180
+ ]
181
+ code, stdout, stderr = self._run_kafka_cmd(cmd, timeout=60)
182
+
183
+ groups = []
184
+ if code == 0:
185
+ current_group = None
186
+ total_lag = 0
187
+
188
+ for line in stdout.strip().split("\n"):
189
+ if not line.strip():
190
+ if current_group:
191
+ groups.append(current_group)
192
+ current_group = None
193
+ continue
194
+
195
+ parts = line.split()
196
+ if len(parts) >= 7 and parts[0] not in ("GROUP", "Consumer"):
197
+ group_name = parts[0]
198
+ try:
199
+ lag = int(parts[5]) if parts[5] != "-" else 0
200
+ total_lag += lag
201
+ except (ValueError, IndexError):
202
+ lag = 0
203
+
204
+ if current_group is None or current_group["name"] != group_name:
205
+ if current_group:
206
+ groups.append(current_group)
207
+ current_group = {
208
+ "name": group_name,
209
+ "lag": lag,
210
+ "partitions": 1,
211
+ }
212
+ else:
213
+ current_group["lag"] += lag
214
+ current_group["partitions"] += 1
215
+
216
+ if current_group:
217
+ groups.append(current_group)
218
+
219
+ self.results["consumer_groups"] = groups[:20] # Top 20
220
+ self.results["total_consumer_lag"] = total_lag
221
+
222
+ # Alert on high lag
223
+ high_lag_groups = [g for g in groups if g["lag"] > 100000]
224
+ if high_lag_groups:
225
+ self.results["warnings"].append(
226
+ f"{len(high_lag_groups)} consumer groups have high lag (>100k)"
227
+ )
228
+
229
+ return groups
230
+
231
+ def run_checks(self, quick: bool = False) -> Dict:
232
+ """Run all health checks."""
233
+ print("🔍 Running Kafka health checks...")
234
+
235
+ # Critical checks
236
+ print(" ├── Checking broker connectivity...")
237
+ if not self.check_broker_connectivity():
238
+ self.results["status"] = "critical"
239
+ return self.results
240
+
241
+ print(" ├── Checking offline partitions...")
242
+ offline = self.check_offline_partitions()
243
+ if offline > 0:
244
+ self.results["status"] = "critical"
245
+
246
+ print(" ├── Checking under-replicated partitions...")
247
+ urp = self.check_under_replicated_partitions()
248
+ if urp > 0 and self.results["status"] != "critical":
249
+ self.results["status"] = "warning"
250
+
251
+ if not quick:
252
+ print(" ├── Getting topic count...")
253
+ self.check_topic_count()
254
+
255
+ print(" ├── Checking consumer groups...")
256
+ self.check_consumer_groups()
257
+
258
+ # Set final status
259
+ if self.results["status"] == "unknown":
260
+ self.results["status"] = "healthy"
261
+
262
+ print(f" └── Status: {self.results['status'].upper()}")
263
+ return self.results
264
+
265
+
266
+ def generate_markdown_report(results: Dict, output_path: Path) -> str:
267
+ """Generate a Markdown health report."""
268
+ report = []
269
+ report.append("# Confluent Kafka Health Report")
270
+ report.append("")
271
+ report.append(f"**Generated:** {results['timestamp']}")
272
+ report.append(f"**Bootstrap Servers:** `{results['bootstrap_servers']}`")
273
+ report.append(f"**Status:** {'🟢' if results['status'] == 'healthy' else '🟡' if results['status'] == 'warning' else '🔴'} **{results['status'].upper()}**")
274
+ report.append("")
275
+
276
+ # Alerts
277
+ if results.get("alerts"):
278
+ report.append("## 🚨 Alerts")
279
+ report.append("")
280
+ for alert in results["alerts"]:
281
+ report.append(f"- ❌ {alert}")
282
+ report.append("")
283
+
284
+ # Warnings
285
+ if results.get("warnings"):
286
+ report.append("## ⚠️ Warnings")
287
+ report.append("")
288
+ for warning in results["warnings"]:
289
+ report.append(f"- ⚠️ {warning}")
290
+ report.append("")
291
+
292
+ # Brokers
293
+ report.append("## Broker Status")
294
+ report.append("")
295
+ report.append(f"**Active Brokers:** {len(results.get('brokers', []))}")
296
+ report.append("")
297
+
298
+ # Partitions
299
+ report.append("## Partition Health")
300
+ report.append("")
301
+ partitions = results.get("partitions", {})
302
+ report.append(f"| Metric | Count |")
303
+ report.append(f"| ------ | ----- |")
304
+ report.append(f"| Under-replicated | {partitions.get('under_replicated', 'N/A')} |")
305
+ report.append(f"| Offline | {partitions.get('offline', 'N/A')} |")
306
+ report.append("")
307
+
308
+ # Topics
309
+ if results.get("topics"):
310
+ report.append("## Topics")
311
+ report.append("")
312
+ report.append(f"**Total Topics:** {results['topics'].get('count', 'N/A')}")
313
+ report.append("")
314
+
315
+ # Consumer Groups
316
+ if results.get("consumer_groups"):
317
+ report.append("## Consumer Groups")
318
+ report.append("")
319
+ report.append(f"**Total Lag:** {results.get('total_consumer_lag', 0):,}")
320
+ report.append("")
321
+ report.append("| Group | Lag | Partitions |")
322
+ report.append("| ----- | --- | ---------- |")
323
+ for group in results["consumer_groups"][:10]:
324
+ report.append(f"| {group['name']} | {group['lag']:,} | {group['partitions']} |")
325
+ report.append("")
326
+
327
+ # Write report
328
+ report_content = "\n".join(report)
329
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
330
+ filename = output_path / f"kafka_health_{timestamp}.md"
331
+ filename.write_text(report_content)
332
+
333
+ return str(filename)
334
+
335
+
336
+ def generate_pdf_report(results: Dict, output_path: Path) -> Optional[str]:
337
+ """Generate a PDF health report (requires markdown-pdf or similar)."""
338
+ # First generate markdown
339
+ md_file = generate_markdown_report(results, output_path)
340
+
341
+ # Try to convert to PDF using various tools
342
+ pdf_file = md_file.replace(".md", ".pdf")
343
+
344
+ converters = [
345
+ # pandoc (most reliable)
346
+ ["pandoc", md_file, "-o", pdf_file, "--pdf-engine=xelatex"],
347
+ # md-to-pdf (npm package)
348
+ ["md-to-pdf", md_file, "--dest", pdf_file],
349
+ ]
350
+
351
+ for cmd in converters:
352
+ try:
353
+ result = subprocess.run(cmd, capture_output=True, timeout=60)
354
+ if result.returncode == 0:
355
+ print(f" 📄 PDF generated: {pdf_file}")
356
+ return pdf_file
357
+ except (subprocess.SubprocessError, FileNotFoundError):
358
+ continue
359
+
360
+ print(" ⚠️ PDF generation failed (pandoc or md-to-pdf not available)")
361
+ return None
362
+
363
+
364
+ def main():
365
+ parser = argparse.ArgumentParser(
366
+ description="Confluent Kafka Health Check",
367
+ formatter_class=argparse.RawDescriptionHelpFormatter,
368
+ epilog=__doc__,
369
+ )
370
+ parser.add_argument(
371
+ "--bootstrap-servers", "-b",
372
+ required=True,
373
+ help="Comma-separated list of broker addresses",
374
+ )
375
+ parser.add_argument(
376
+ "--output", "-o",
377
+ default=".",
378
+ help="Output directory for reports (default: current dir)",
379
+ )
380
+ parser.add_argument(
381
+ "--format", "-f",
382
+ choices=["pdf", "markdown", "both", "json"],
383
+ default="both",
384
+ help="Output format (default: both)",
385
+ )
386
+ parser.add_argument(
387
+ "--quick",
388
+ action="store_true",
389
+ help="Quick check only, no detailed reports",
390
+ )
391
+ parser.add_argument(
392
+ "--timeout", "-t",
393
+ type=int,
394
+ default=30,
395
+ help="Connection timeout in seconds (default: 30)",
396
+ )
397
+
398
+ args = parser.parse_args()
399
+
400
+ # Create output directory
401
+ output_path = Path(args.output)
402
+ output_path.mkdir(parents=True, exist_ok=True)
403
+
404
+ # Run health checks
405
+ checker = KafkaHealthChecker(args.bootstrap_servers, args.timeout)
406
+ results = checker.run_checks(quick=args.quick)
407
+
408
+ # Generate reports
409
+ if args.format in ("json", "both"):
410
+ json_file = output_path / f"kafka_health_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
411
+ json_file.write_text(json.dumps(results, indent=2))
412
+ print(f" 📄 JSON saved: {json_file}")
413
+
414
+ if not args.quick:
415
+ if args.format in ("markdown", "both"):
416
+ md_file = generate_markdown_report(results, output_path)
417
+ print(f" 📄 Markdown saved: {md_file}")
418
+
419
+ if args.format in ("pdf", "both"):
420
+ generate_pdf_report(results, output_path)
421
+
422
+ # Exit with appropriate code
423
+ if results["status"] == "critical":
424
+ print("\n❌ CRITICAL: Cluster has critical issues!")
425
+ sys.exit(4)
426
+ elif results["status"] == "warning":
427
+ print("\n⚠️ WARNING: Cluster has warnings")
428
+ sys.exit(3)
429
+ else:
430
+ print("\n✅ Cluster is healthy")
431
+ sys.exit(0)
432
+
433
+
434
+ if __name__ == "__main__":
435
+ main()