kubeagent 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +72 -0
- package/README.md +154 -0
- package/dist/auth.d.ts +23 -0
- package/dist/auth.js +162 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +447 -0
- package/dist/config.d.ts +50 -0
- package/dist/config.js +79 -0
- package/dist/debug.d.ts +10 -0
- package/dist/debug.js +18 -0
- package/dist/diagnoser/index.d.ts +17 -0
- package/dist/diagnoser/index.js +251 -0
- package/dist/diagnoser/tools.d.ts +119 -0
- package/dist/diagnoser/tools.js +108 -0
- package/dist/kb/loader.d.ts +1 -0
- package/dist/kb/loader.js +41 -0
- package/dist/kb/writer.d.ts +11 -0
- package/dist/kb/writer.js +36 -0
- package/dist/kubectl-config.d.ts +7 -0
- package/dist/kubectl-config.js +47 -0
- package/dist/kubectl.d.ts +13 -0
- package/dist/kubectl.js +57 -0
- package/dist/monitor/checks.d.ts +71 -0
- package/dist/monitor/checks.js +167 -0
- package/dist/monitor/index.d.ts +7 -0
- package/dist/monitor/index.js +126 -0
- package/dist/monitor/types.d.ts +11 -0
- package/dist/monitor/types.js +1 -0
- package/dist/notify/index.d.ts +5 -0
- package/dist/notify/index.js +40 -0
- package/dist/notify/setup.d.ts +4 -0
- package/dist/notify/setup.js +88 -0
- package/dist/notify/slack.d.ts +4 -0
- package/dist/notify/slack.js +76 -0
- package/dist/notify/telegram.d.ts +8 -0
- package/dist/notify/telegram.js +63 -0
- package/dist/notify/webhook.d.ts +3 -0
- package/dist/notify/webhook.js +49 -0
- package/dist/onboard/cluster-scan.d.ts +42 -0
- package/dist/onboard/cluster-scan.js +103 -0
- package/dist/onboard/code-scan.d.ts +9 -0
- package/dist/onboard/code-scan.js +114 -0
- package/dist/onboard/index.d.ts +1 -0
- package/dist/onboard/index.js +328 -0
- package/dist/onboard/interview.d.ts +12 -0
- package/dist/onboard/interview.js +71 -0
- package/dist/onboard/project-matcher.d.ts +25 -0
- package/dist/onboard/project-matcher.js +149 -0
- package/dist/orchestrator.d.ts +3 -0
- package/dist/orchestrator.js +222 -0
- package/dist/proxy-client.d.ts +15 -0
- package/dist/proxy-client.js +72 -0
- package/dist/render.d.ts +5 -0
- package/dist/render.js +143 -0
- package/dist/verifier.d.ts +9 -0
- package/dist/verifier.js +17 -0
- package/package.json +39 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,447 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Command } from "commander";
|
|
3
|
+
import chalk from "chalk";
|
|
4
|
+
import ora from "ora";
|
|
5
|
+
import { homedir } from "node:os";
|
|
6
|
+
function expandPath(p) {
|
|
7
|
+
return p.startsWith("~/") ? homedir() + p.slice(1) : p;
|
|
8
|
+
}
|
|
9
|
+
import { loadConfig, saveConfig, configDir } from "./config.js";
|
|
10
|
+
import { onboard } from "./onboard/index.js";
|
|
11
|
+
import { runChecks } from "./monitor/index.js";
|
|
12
|
+
import { startMonitor } from "./monitor/index.js";
|
|
13
|
+
import { handleIssues } from "./orchestrator.js";
|
|
14
|
+
import { diagnose } from "./diagnoser/index.js";
|
|
15
|
+
import { buildSystemPrompt } from "./kb/loader.js";
|
|
16
|
+
import { join } from "node:path";
|
|
17
|
+
import { loadAuth, loginBrowser, createApiKey, showAccount, clearAuth } from "./auth.js";
|
|
18
|
+
import { fetchSlackWebhook } from "./proxy-client.js";
|
|
19
|
+
import { createInterface } from "node:readline";
|
|
20
|
+
import { scanProjectDirectory, matchProjectsToWorkloads, bestMatches } from "./onboard/project-matcher.js";
|
|
21
|
+
import { scanCluster } from "./onboard/cluster-scan.js";
|
|
22
|
+
import { formatProjectMarkdown } from "./onboard/code-scan.js";
|
|
23
|
+
import { writeProjectKb, ensureKbDir } from "./kb/writer.js";
|
|
24
|
+
const program = new Command();
|
|
25
|
+
program
|
|
26
|
+
.name("kubeagent")
|
|
27
|
+
.description("AI-powered Kubernetes management CLI")
|
|
28
|
+
.version("0.1.0");
|
|
29
|
+
program
|
|
30
|
+
.command("status")
|
|
31
|
+
.description("Quick cluster health summary (no LLM)")
|
|
32
|
+
.option("-c, --context <context>", "Kubernetes context")
|
|
33
|
+
.action(async (opts) => {
|
|
34
|
+
try {
|
|
35
|
+
const issues = await runChecks({ context: opts.context });
|
|
36
|
+
if (issues.length === 0) {
|
|
37
|
+
console.log(chalk.green("All clear. No issues detected."));
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
for (const issue of issues) {
|
|
41
|
+
const color = issue.severity === "critical" ? chalk.red : chalk.yellow;
|
|
42
|
+
console.log(color(`[${issue.severity}] ${issue.message}`));
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch (err) {
|
|
47
|
+
console.error(chalk.red(`Error: ${err.message}`));
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
program
|
|
52
|
+
.command("onboard")
|
|
53
|
+
.description("Scan cluster + codebases, interview user, generate knowledge base")
|
|
54
|
+
.action(async () => {
|
|
55
|
+
await onboard();
|
|
56
|
+
});
|
|
57
|
+
program
|
|
58
|
+
.command("watch")
|
|
59
|
+
.description("Continuous monitoring with auto-remediation")
|
|
60
|
+
.option("-c, --context <context>", "Kubernetes context (skips prompt)")
|
|
61
|
+
.option("-i, --interval <seconds>", "Override check interval in seconds")
|
|
62
|
+
.option("--no-interactive", "Auto-deny all approvals and skip questions (safe for background use)")
|
|
63
|
+
.action(async (opts) => {
|
|
64
|
+
const config = loadConfig();
|
|
65
|
+
const { pickContext } = await import("./kubectl-config.js");
|
|
66
|
+
// Resolve context: flag skips prompt, otherwise always ask
|
|
67
|
+
const context = opts.context ?? await pickContext();
|
|
68
|
+
// Validate the context is onboarded
|
|
69
|
+
const clusterCfg = config.clusters.find((c) => c.context === context);
|
|
70
|
+
if (!clusterCfg) {
|
|
71
|
+
console.log("\n " + chalk.red("✖ ") + chalk.bold(`"${context}"`) + " has not been onboarded yet.");
|
|
72
|
+
console.log(chalk.dim(" Run: ") + chalk.cyan("kubeagent onboard") + chalk.dim(" to set it up first."));
|
|
73
|
+
process.exit(1);
|
|
74
|
+
}
|
|
75
|
+
// Interval: CLI flag > saved cluster interval > 300s default
|
|
76
|
+
const intervalSec = opts.interval
|
|
77
|
+
? parseInt(opts.interval, 10)
|
|
78
|
+
: (clusterCfg.interval ?? 300);
|
|
79
|
+
const interval = intervalSec * 1000;
|
|
80
|
+
// Pre-flight: verify cluster connectivity before entering the watch loop
|
|
81
|
+
const preflight = ora("Connecting to cluster...").start();
|
|
82
|
+
try {
|
|
83
|
+
await import("./kubectl.js").then(({ kubectl }) => kubectl(["cluster-info", "--request-timeout=5s"], { context }));
|
|
84
|
+
preflight.succeed(`Connected to ${context}`);
|
|
85
|
+
}
|
|
86
|
+
catch (err) {
|
|
87
|
+
preflight.fail(`Cannot reach cluster: ${err.message}`);
|
|
88
|
+
process.exit(1);
|
|
89
|
+
}
|
|
90
|
+
const intervalLabel = intervalSec >= 60
|
|
91
|
+
? `${Math.floor(intervalSec / 60)}m`
|
|
92
|
+
: `${intervalSec}s`;
|
|
93
|
+
const bannerWidth = 44;
|
|
94
|
+
const bannerInner = ` KubeAgent ${chalk.cyan(context)} ${chalk.dim(intervalLabel)} `;
|
|
95
|
+
const pad = Math.max(0, bannerWidth - context.length - intervalLabel.length - 18);
|
|
96
|
+
console.log("\n" + chalk.dim(" ┌" + "─".repeat(bannerWidth) + "┐"));
|
|
97
|
+
console.log(chalk.dim(" │") + bannerInner + " ".repeat(pad) + chalk.dim("│"));
|
|
98
|
+
console.log(chalk.dim(" └" + "─".repeat(bannerWidth) + "┘"));
|
|
99
|
+
console.log(chalk.dim(" Ctrl+C to stop\n"));
|
|
100
|
+
const noInteractive = opts.noInteractive === true;
|
|
101
|
+
if (noInteractive) {
|
|
102
|
+
console.log(chalk.dim(" Non-interactive mode — approvals auto-denied, questions skipped\n"));
|
|
103
|
+
}
|
|
104
|
+
// Merge server-stored Slack webhook (from dashboard "Add to Slack") into local config channels
|
|
105
|
+
const auth = loadAuth();
|
|
106
|
+
if (auth?.apiKey) {
|
|
107
|
+
const serverSlack = await fetchSlackWebhook(auth);
|
|
108
|
+
if (serverSlack.connected && serverSlack.webhookUrl) {
|
|
109
|
+
const alreadyConfigured = config.notifications.channels.some((ch) => ch.type === "slack" && ch.webhook_url === serverSlack.webhookUrl);
|
|
110
|
+
if (!alreadyConfigured) {
|
|
111
|
+
config.notifications.channels.unshift({
|
|
112
|
+
type: "slack",
|
|
113
|
+
webhook_url: serverSlack.webhookUrl,
|
|
114
|
+
label: serverSlack.teamName ?? serverSlack.channelName ?? "KubeAgent",
|
|
115
|
+
severity: "warning",
|
|
116
|
+
});
|
|
117
|
+
console.log(chalk.dim(` Slack: ${serverSlack.teamName ?? ""}${serverSlack.channelName ? ` ${serverSlack.channelName}` : ""} (from dashboard)\n`));
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
const { stop } = startMonitor({ context }, interval, async (issues) => {
|
|
122
|
+
await handleIssues(issues, config, context, noInteractive);
|
|
123
|
+
});
|
|
124
|
+
// Handle graceful shutdown
|
|
125
|
+
process.on("SIGINT", () => {
|
|
126
|
+
console.log("\n" + chalk.dim(" Stopping monitor..."));
|
|
127
|
+
stop();
|
|
128
|
+
process.exit(0);
|
|
129
|
+
});
|
|
130
|
+
// Keep process alive
|
|
131
|
+
await new Promise(() => { });
|
|
132
|
+
});
|
|
133
|
+
program
|
|
134
|
+
.command("diagnose <resource>")
|
|
135
|
+
.description("One-shot diagnosis of a specific pod/deployment/service")
|
|
136
|
+
.option("-c, --context <context>", "Kubernetes context")
|
|
137
|
+
.option("-n, --namespace <namespace>", "Kubernetes namespace", "default")
|
|
138
|
+
.action(async (resource, opts) => {
|
|
139
|
+
const kbDir = join(configDir(), "clusters", opts.context ?? "default");
|
|
140
|
+
const config = loadConfig();
|
|
141
|
+
// Create a synthetic issue for the resource
|
|
142
|
+
const issues = [
|
|
143
|
+
{
|
|
144
|
+
kind: "pod_error",
|
|
145
|
+
severity: "warning",
|
|
146
|
+
namespace: opts.namespace,
|
|
147
|
+
resource,
|
|
148
|
+
message: `User requested diagnosis of ${resource} in ${opts.namespace}`,
|
|
149
|
+
details: {},
|
|
150
|
+
timestamp: new Date(),
|
|
151
|
+
},
|
|
152
|
+
];
|
|
153
|
+
const result = await diagnose(issues, kbDir, opts.context, {
|
|
154
|
+
autoFix: false,
|
|
155
|
+
onApproval: async () => false,
|
|
156
|
+
});
|
|
157
|
+
console.log(result.analysis);
|
|
158
|
+
});
|
|
159
|
+
program
|
|
160
|
+
.command("scan <directory>")
|
|
161
|
+
.description("Scan a directory and suggest which subdirectory maps to which cluster deployment")
|
|
162
|
+
.option("-c, --context <context>", "Kubernetes context")
|
|
163
|
+
.option("--write-kb", "Save high-confidence matches to knowledge base and config")
|
|
164
|
+
.action(async (directory, opts) => {
|
|
165
|
+
const HIGH_CONFIDENCE = 60;
|
|
166
|
+
const resolvedDir = expandPath(directory.startsWith("/") || directory.startsWith("~") ? directory : join(process.cwd(), directory));
|
|
167
|
+
const contextName = opts.context ?? "default";
|
|
168
|
+
// Load existing saved mappings for this context
|
|
169
|
+
const config = loadConfig();
|
|
170
|
+
const clusterCfg = config.clusters.find((c) => c.context === contextName);
|
|
171
|
+
const existingMappings = new Map((clusterCfg?.projects ?? []).map((p) => [p.dir, p]));
|
|
172
|
+
if (existingMappings.size > 0) {
|
|
173
|
+
console.log(chalk.dim(`\nLoaded ${existingMappings.size} saved mapping(s) for context "${contextName}"`));
|
|
174
|
+
}
|
|
175
|
+
// Scan local projects
|
|
176
|
+
const spinner = ora("Scanning project directories...").start();
|
|
177
|
+
const projects = scanProjectDirectory(resolvedDir);
|
|
178
|
+
spinner.succeed(`Found ${projects.length} project(s) in ${resolvedDir}`);
|
|
179
|
+
if (projects.length === 0) {
|
|
180
|
+
console.log(chalk.yellow("No projects found. Make sure subdirectories contain package.json, go.mod, Dockerfile, etc."));
|
|
181
|
+
return;
|
|
182
|
+
}
|
|
183
|
+
// Scan cluster
|
|
184
|
+
const kubectlOpts = opts.context ? { context: opts.context } : {};
|
|
185
|
+
const clusterSpinner = ora("Scanning cluster deployments...").start();
|
|
186
|
+
let clusterInfo;
|
|
187
|
+
try {
|
|
188
|
+
clusterInfo = await scanCluster(kubectlOpts);
|
|
189
|
+
clusterSpinner.succeed(`Found ${clusterInfo.deployments.length} deployments, ${clusterInfo.statefulsets.length} statefulsets`);
|
|
190
|
+
}
|
|
191
|
+
catch (err) {
|
|
192
|
+
clusterSpinner.fail(`Cluster scan failed: ${err.message}`);
|
|
193
|
+
return;
|
|
194
|
+
}
|
|
195
|
+
// Match
|
|
196
|
+
const allSuggestions = matchProjectsToWorkloads(projects, clusterInfo.deployments, clusterInfo.statefulsets);
|
|
197
|
+
const topMatches = bestMatches(allSuggestions, 30);
|
|
198
|
+
// Group by project for display
|
|
199
|
+
const grouped = new Map();
|
|
200
|
+
for (const m of topMatches) {
|
|
201
|
+
if (!grouped.has(m.project.dir))
|
|
202
|
+
grouped.set(m.project.dir, []);
|
|
203
|
+
grouped.get(m.project.dir).push(m);
|
|
204
|
+
}
|
|
205
|
+
// Show saved mappings first
|
|
206
|
+
const alreadySaved = projects.filter((p) => existingMappings.has(p.dir));
|
|
207
|
+
if (alreadySaved.length > 0) {
|
|
208
|
+
console.log(chalk.bold("\nAlready mapped (saved in config):\n"));
|
|
209
|
+
for (const p of alreadySaved) {
|
|
210
|
+
const saved = existingMappings.get(p.dir);
|
|
211
|
+
console.log(` ${chalk.green("✓")} ${p.name}/ → ${saved.kind}/${saved.namespace}/${saved.deployment}`);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
// New suggestions
|
|
215
|
+
const newMatches = [...grouped.entries()].filter(([dir]) => !existingMappings.has(dir));
|
|
216
|
+
if (newMatches.length === 0 && alreadySaved.length > 0) {
|
|
217
|
+
console.log(chalk.dim("\nAll projects are already mapped."));
|
|
218
|
+
}
|
|
219
|
+
else if (newMatches.length > 0) {
|
|
220
|
+
console.log(chalk.bold("\nNew suggestions:\n"));
|
|
221
|
+
const toSave = [];
|
|
222
|
+
const skipped = [];
|
|
223
|
+
for (const [, matches] of newMatches) {
|
|
224
|
+
const best = matches[0];
|
|
225
|
+
const isHigh = best.score >= HIGH_CONFIDENCE;
|
|
226
|
+
const confidence = isHigh ? chalk.green("high") : best.score >= 40 ? chalk.yellow("medium") : chalk.red("low");
|
|
227
|
+
console.log(chalk.bold(` ${best.project.name}/`));
|
|
228
|
+
console.log(` → ${best.workload.kind}/${best.workload.namespace}/${best.workload.name}`);
|
|
229
|
+
console.log(` Confidence: ${confidence} (${best.score}/100) ${best.reasons[0]}`);
|
|
230
|
+
if (matches.length > 1) {
|
|
231
|
+
console.log(chalk.dim(` Alternatives: ${matches.slice(1, 3).map((m) => m.workload.name).join(", ")}`));
|
|
232
|
+
}
|
|
233
|
+
if (opts.writeKb) {
|
|
234
|
+
if (isHigh) {
|
|
235
|
+
toSave.push(best);
|
|
236
|
+
console.log(chalk.dim(" → auto-saved (high confidence)"));
|
|
237
|
+
}
|
|
238
|
+
else {
|
|
239
|
+
skipped.push(best);
|
|
240
|
+
console.log(chalk.dim(` → skipped (below ${HIGH_CONFIDENCE} threshold)`));
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
console.log();
|
|
244
|
+
}
|
|
245
|
+
// Show unmatched
|
|
246
|
+
const matchedDirs = new Set(topMatches.map((m) => m.project.dir));
|
|
247
|
+
const unmatched = projects.filter((p) => !matchedDirs.has(p.dir) && !existingMappings.has(p.dir));
|
|
248
|
+
if (unmatched.length > 0) {
|
|
249
|
+
console.log(chalk.dim("No match found for:"));
|
|
250
|
+
for (const p of unmatched) {
|
|
251
|
+
console.log(chalk.dim(` ${p.name}/ (${p.stack.language}/${p.stack.framework})`));
|
|
252
|
+
}
|
|
253
|
+
console.log();
|
|
254
|
+
}
|
|
255
|
+
// Save high-confidence matches
|
|
256
|
+
if (opts.writeKb && toSave.length > 0) {
|
|
257
|
+
const kbDir = join(configDir(), "clusters", contextName);
|
|
258
|
+
ensureKbDir(kbDir);
|
|
259
|
+
// Update config
|
|
260
|
+
const newMappings = toSave.map((m) => ({
|
|
261
|
+
name: m.project.name,
|
|
262
|
+
dir: m.project.dir,
|
|
263
|
+
deployment: m.workload.name,
|
|
264
|
+
namespace: m.workload.namespace,
|
|
265
|
+
kind: m.workload.kind,
|
|
266
|
+
}));
|
|
267
|
+
const existingClusterIdx = config.clusters.findIndex((c) => c.context === contextName);
|
|
268
|
+
if (existingClusterIdx >= 0) {
|
|
269
|
+
const existing = config.clusters[existingClusterIdx].projects ?? [];
|
|
270
|
+
config.clusters[existingClusterIdx].projects = [...existing, ...newMappings];
|
|
271
|
+
}
|
|
272
|
+
else {
|
|
273
|
+
config.clusters.push({ context: contextName, interval: 60, codepaths: [], projects: newMappings });
|
|
274
|
+
}
|
|
275
|
+
saveConfig(config);
|
|
276
|
+
// Write KB
|
|
277
|
+
for (const m of toSave) {
|
|
278
|
+
const md = formatProjectMarkdown(m.project.name, m.project.dir, m.project.stack, `**Deployment:** ${m.workload.name} (${m.workload.namespace})\n**Image:** \`${m.workload.image}\``);
|
|
279
|
+
writeProjectKb(kbDir, m.project.name, md);
|
|
280
|
+
console.log(chalk.green(`Saved: ${m.project.name} → ${m.workload.name}`));
|
|
281
|
+
}
|
|
282
|
+
if (skipped.length > 0) {
|
|
283
|
+
console.log(chalk.yellow(`\nSkipped ${skipped.length} low-confidence match(es). Review manually if needed.`));
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
else if (!opts.writeKb) {
|
|
287
|
+
console.log(chalk.dim(`Run with --write-kb to save high-confidence matches (score ≥ ${HIGH_CONFIDENCE}).`));
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
// Notify commands
|
|
292
|
+
const notifyCmd = program.command("notify").description("Manage notification channels");
|
|
293
|
+
notifyCmd
|
|
294
|
+
.command("list")
|
|
295
|
+
.description("List configured notification channels")
|
|
296
|
+
.action(() => {
|
|
297
|
+
const config = loadConfig();
|
|
298
|
+
const { describeChannel } = require("./notify/index.js");
|
|
299
|
+
const channels = config.notifications.channels;
|
|
300
|
+
if (channels.length === 0) {
|
|
301
|
+
console.log(chalk.dim("No notification channels configured."));
|
|
302
|
+
console.log(chalk.dim("Run: kubeagent notify add"));
|
|
303
|
+
return;
|
|
304
|
+
}
|
|
305
|
+
console.log(chalk.bold(`\n${channels.length} channel(s):\n`));
|
|
306
|
+
channels.forEach((ch, i) => {
|
|
307
|
+
console.log(` ${chalk.cyan(String(i + 1))}. ${describeChannel(ch)}`);
|
|
308
|
+
});
|
|
309
|
+
console.log();
|
|
310
|
+
});
|
|
311
|
+
notifyCmd
|
|
312
|
+
.command("add")
|
|
313
|
+
.description("Add a Slack or Telegram notification channel")
|
|
314
|
+
.action(async () => {
|
|
315
|
+
const { interactiveAddChannel } = await import("./notify/setup.js");
|
|
316
|
+
const channel = await interactiveAddChannel();
|
|
317
|
+
if (!channel)
|
|
318
|
+
return;
|
|
319
|
+
const config = loadConfig();
|
|
320
|
+
config.notifications.channels.push(channel);
|
|
321
|
+
saveConfig(config);
|
|
322
|
+
console.log(chalk.green("\n ✔ Channel saved."));
|
|
323
|
+
});
|
|
324
|
+
notifyCmd
|
|
325
|
+
.command("remove <index>")
|
|
326
|
+
.description("Remove a channel by number (from: kubeagent notify list)")
|
|
327
|
+
.action((indexStr) => {
|
|
328
|
+
const config = loadConfig();
|
|
329
|
+
const idx = parseInt(indexStr, 10) - 1;
|
|
330
|
+
if (isNaN(idx) || idx < 0 || idx >= config.notifications.channels.length) {
|
|
331
|
+
console.error(chalk.red(`Invalid index. Run: kubeagent notify list`));
|
|
332
|
+
process.exit(1);
|
|
333
|
+
}
|
|
334
|
+
const removed = config.notifications.channels.splice(idx, 1)[0];
|
|
335
|
+
saveConfig(config);
|
|
336
|
+
const type = removed.type.charAt(0).toUpperCase() + removed.type.slice(1);
|
|
337
|
+
console.log(chalk.green(` ✔ Removed ${type} channel.`));
|
|
338
|
+
});
|
|
339
|
+
notifyCmd
|
|
340
|
+
.command("test")
|
|
341
|
+
.description("Send a test alert to all configured channels")
|
|
342
|
+
.option("-c, --context <context>", "Cluster context label to include in test message", "test-cluster")
|
|
343
|
+
.action(async (opts) => {
|
|
344
|
+
const config = loadConfig();
|
|
345
|
+
if (config.notifications.channels.length === 0) {
|
|
346
|
+
console.log(chalk.yellow("No channels configured. Run: kubeagent notify add"));
|
|
347
|
+
return;
|
|
348
|
+
}
|
|
349
|
+
const { sendNotification } = await import("./notify/index.js");
|
|
350
|
+
const testIssues = [
|
|
351
|
+
{ kind: "pod_crashloop", severity: "critical", namespace: "prod", resource: "my-app-abc123", message: "Test alert: Container my-app in my-app-abc123 is CrashLoopBackOff (5 restarts)", details: {}, timestamp: new Date() },
|
|
352
|
+
{ kind: "pod_pending", severity: "warning", namespace: "staging", resource: "worker-xyz", message: "Test alert: Pod worker-xyz is Pending", details: {}, timestamp: new Date() },
|
|
353
|
+
];
|
|
354
|
+
console.log(chalk.dim(`\nSending test to ${config.notifications.channels.length} channel(s)...`));
|
|
355
|
+
await sendNotification(testIssues, config, opts.context);
|
|
356
|
+
console.log(chalk.green(" ✔ Done. Check your channels."));
|
|
357
|
+
});
|
|
358
|
+
// Auth commands
|
|
359
|
+
function prompt(question) {
|
|
360
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
361
|
+
return new Promise((resolve) => {
|
|
362
|
+
rl.question(question, (answer) => {
|
|
363
|
+
rl.close();
|
|
364
|
+
resolve(answer.trim());
|
|
365
|
+
});
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
const DEFAULT_SERVER = "https://api.kubeagent.net";
|
|
369
|
+
const DEFAULT_APP_URL = "https://app.kubeagent.net";
|
|
370
|
+
program
|
|
371
|
+
.command("login")
|
|
372
|
+
.description("Log in to KubeAgent (opens browser)")
|
|
373
|
+
.option("-s, --server <url>", "API server URL", DEFAULT_SERVER)
|
|
374
|
+
.option("--app-url <url>", "Dashboard URL (for browser login)", DEFAULT_APP_URL)
|
|
375
|
+
.action(async (opts) => {
|
|
376
|
+
try {
|
|
377
|
+
const auth = await loginBrowser(opts.server, opts.appUrl);
|
|
378
|
+
console.log(chalk.green(`Logged in as ${auth.email ?? auth.name ?? "unknown"}`));
|
|
379
|
+
const { prefix } = await createApiKey(auth, "cli-default");
|
|
380
|
+
console.log(chalk.dim(`API key created: ${prefix}...`));
|
|
381
|
+
console.log(chalk.dim("Credentials saved to ~/.kubeagent/auth.json"));
|
|
382
|
+
}
|
|
383
|
+
catch (err) {
|
|
384
|
+
console.error(chalk.red(err.message));
|
|
385
|
+
process.exit(1);
|
|
386
|
+
}
|
|
387
|
+
});
|
|
388
|
+
program
|
|
389
|
+
.command("account")
|
|
390
|
+
.description("Show account info and token balance")
|
|
391
|
+
.action(async () => {
|
|
392
|
+
const auth = loadAuth();
|
|
393
|
+
if (!auth) {
|
|
394
|
+
console.log(chalk.yellow("Not logged in. Run: kubeagent login"));
|
|
395
|
+
return;
|
|
396
|
+
}
|
|
397
|
+
try {
|
|
398
|
+
const { balance } = await showAccount(auth);
|
|
399
|
+
console.log(chalk.bold("Token Balance"));
|
|
400
|
+
console.log(` Monthly remaining: ${balance.monthlyRemaining.toLocaleString()}`);
|
|
401
|
+
console.log(` Extra credits: ${balance.extraRemaining.toLocaleString()}`);
|
|
402
|
+
console.log(` Total: ${balance.totalRemaining.toLocaleString()}`);
|
|
403
|
+
}
|
|
404
|
+
catch (err) {
|
|
405
|
+
console.error(chalk.red(err.message));
|
|
406
|
+
}
|
|
407
|
+
});
|
|
408
|
+
program
|
|
409
|
+
.command("logout")
|
|
410
|
+
.description("Clear saved credentials")
|
|
411
|
+
.action(() => {
|
|
412
|
+
clearAuth();
|
|
413
|
+
console.log(chalk.dim("Credentials removed."));
|
|
414
|
+
});
|
|
415
|
+
// Default: interactive mode
|
|
416
|
+
program
|
|
417
|
+
.argument("[prompt...]", "Ask a question about your cluster")
|
|
418
|
+
.option("-c, --context <context>", "Kubernetes context")
|
|
419
|
+
.action(async (prompt, opts) => {
|
|
420
|
+
if (prompt.length === 0) {
|
|
421
|
+
program.help();
|
|
422
|
+
return;
|
|
423
|
+
}
|
|
424
|
+
const context = opts.context;
|
|
425
|
+
const kbDir = join(configDir(), "clusters", context ?? "default");
|
|
426
|
+
const systemPrompt = buildSystemPrompt(kbDir);
|
|
427
|
+
const auth = loadAuth();
|
|
428
|
+
if (!auth?.apiKey) {
|
|
429
|
+
console.error(chalk.red("Not logged in. Run 'kubeagent login' to get started."));
|
|
430
|
+
process.exit(1);
|
|
431
|
+
}
|
|
432
|
+
const { proxyRequest } = await import("./proxy-client.js");
|
|
433
|
+
const result = await proxyRequest(auth, {
|
|
434
|
+
model: "claude-sonnet-4-6",
|
|
435
|
+
max_tokens: 16000,
|
|
436
|
+
system: systemPrompt,
|
|
437
|
+
thinking: { type: "adaptive" },
|
|
438
|
+
messages: [{ role: "user", content: prompt.join(" ") }],
|
|
439
|
+
});
|
|
440
|
+
const response = result;
|
|
441
|
+
for (const block of response.content) {
|
|
442
|
+
if (block.type === "text") {
|
|
443
|
+
console.log(block.text);
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
});
|
|
447
|
+
program.parse();
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
export interface ProjectMapping {
|
|
2
|
+
name: string;
|
|
3
|
+
dir: string;
|
|
4
|
+
deployment: string;
|
|
5
|
+
namespace: string;
|
|
6
|
+
kind: "Deployment" | "StatefulSet";
|
|
7
|
+
}
|
|
8
|
+
export interface ClusterConfig {
|
|
9
|
+
context: string;
|
|
10
|
+
interval: number;
|
|
11
|
+
codepaths: string[];
|
|
12
|
+
projects?: ProjectMapping[];
|
|
13
|
+
}
|
|
14
|
+
export type NotificationSeverity = "info" | "warning" | "critical";
|
|
15
|
+
export interface SlackChannel {
|
|
16
|
+
type: "slack";
|
|
17
|
+
webhook_url: string;
|
|
18
|
+
severity: NotificationSeverity;
|
|
19
|
+
label?: string;
|
|
20
|
+
}
|
|
21
|
+
export interface TelegramChannel {
|
|
22
|
+
type: "telegram";
|
|
23
|
+
bot_token: string;
|
|
24
|
+
chat_id: string;
|
|
25
|
+
severity: NotificationSeverity;
|
|
26
|
+
label?: string;
|
|
27
|
+
}
|
|
28
|
+
export type NotificationChannel = SlackChannel | TelegramChannel;
|
|
29
|
+
export declare const ALL_ACTIONS: readonly ["restart_pod", "rollout_restart", "scale_deployment", "set_resources"];
|
|
30
|
+
export type RemediationAction = (typeof ALL_ACTIONS)[number];
|
|
31
|
+
export interface KubeAgentConfig {
|
|
32
|
+
clusters: ClusterConfig[];
|
|
33
|
+
notifications: {
|
|
34
|
+
terminal: boolean;
|
|
35
|
+
channels: NotificationChannel[];
|
|
36
|
+
};
|
|
37
|
+
remediation: {
|
|
38
|
+
auto_fix: boolean;
|
|
39
|
+
max_retries: number;
|
|
40
|
+
cooldown: number;
|
|
41
|
+
safe_actions: RemediationAction[];
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
export declare const DEFAULT_SAFE_ACTIONS: RemediationAction[];
|
|
45
|
+
export declare function defaultConfig(): KubeAgentConfig;
|
|
46
|
+
export declare function parseConfig(raw: Record<string, unknown>): KubeAgentConfig;
|
|
47
|
+
export declare function configDir(): string;
|
|
48
|
+
export declare function configPath(): string;
|
|
49
|
+
export declare function loadConfig(): KubeAgentConfig;
|
|
50
|
+
export declare function saveConfig(config: KubeAgentConfig): void;
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { readFileSync, existsSync, mkdirSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { homedir } from "node:os";
|
|
4
|
+
import yaml from "js-yaml";
|
|
5
|
+
// Actions that are auto-approved without user confirmation.
|
|
6
|
+
// Valid values: restart_pod, rollout_restart, scale_deployment, set_resources
|
|
7
|
+
export const ALL_ACTIONS = [
|
|
8
|
+
"restart_pod",
|
|
9
|
+
"rollout_restart",
|
|
10
|
+
"scale_deployment",
|
|
11
|
+
"set_resources",
|
|
12
|
+
];
|
|
13
|
+
export const DEFAULT_SAFE_ACTIONS = ["restart_pod", "rollout_restart"];
|
|
14
|
+
export function defaultConfig() {
|
|
15
|
+
return {
|
|
16
|
+
clusters: [],
|
|
17
|
+
notifications: { terminal: true, channels: [] },
|
|
18
|
+
remediation: {
|
|
19
|
+
auto_fix: true,
|
|
20
|
+
max_retries: 2,
|
|
21
|
+
cooldown: 30,
|
|
22
|
+
safe_actions: DEFAULT_SAFE_ACTIONS,
|
|
23
|
+
},
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
export function parseConfig(raw) {
|
|
27
|
+
const defaults = defaultConfig();
|
|
28
|
+
const notifications = (raw.notifications ?? {});
|
|
29
|
+
const remediation = (raw.remediation ?? {});
|
|
30
|
+
const clusters = (raw.clusters ?? []);
|
|
31
|
+
// Migrate legacy webhook field → channels array
|
|
32
|
+
const channels = [];
|
|
33
|
+
if (Array.isArray(notifications.channels)) {
|
|
34
|
+
channels.push(...notifications.channels);
|
|
35
|
+
}
|
|
36
|
+
else if (notifications.webhook) {
|
|
37
|
+
// One-time migration from old single-webhook format
|
|
38
|
+
const w = notifications.webhook;
|
|
39
|
+
if (w.url && (w.type === "slack" || w.type === "mattermost")) {
|
|
40
|
+
channels.push({ type: "slack", webhook_url: w.url, severity: w.severity ?? "warning", label: "migrated" });
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return {
|
|
44
|
+
clusters,
|
|
45
|
+
notifications: {
|
|
46
|
+
terminal: notifications.terminal ?? defaults.notifications.terminal,
|
|
47
|
+
channels,
|
|
48
|
+
},
|
|
49
|
+
remediation: {
|
|
50
|
+
auto_fix: remediation.auto_fix ?? defaults.remediation.auto_fix,
|
|
51
|
+
max_retries: remediation.max_retries ?? defaults.remediation.max_retries,
|
|
52
|
+
cooldown: remediation.cooldown ?? defaults.remediation.cooldown,
|
|
53
|
+
safe_actions: (remediation.safe_actions
|
|
54
|
+
?.filter((a) => ALL_ACTIONS.includes(a))
|
|
55
|
+
?? defaults.remediation.safe_actions),
|
|
56
|
+
},
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
export function configDir() {
|
|
60
|
+
return join(homedir(), ".kubeagent");
|
|
61
|
+
}
|
|
62
|
+
export function configPath() {
|
|
63
|
+
return join(configDir(), "config.yaml");
|
|
64
|
+
}
|
|
65
|
+
export function loadConfig() {
|
|
66
|
+
const path = configPath();
|
|
67
|
+
if (!existsSync(path)) {
|
|
68
|
+
return defaultConfig();
|
|
69
|
+
}
|
|
70
|
+
const raw = yaml.load(readFileSync(path, "utf-8"));
|
|
71
|
+
return parseConfig(raw ?? {});
|
|
72
|
+
}
|
|
73
|
+
export function saveConfig(config) {
|
|
74
|
+
const dir = configDir();
|
|
75
|
+
if (!existsSync(dir)) {
|
|
76
|
+
mkdirSync(dir, { recursive: true });
|
|
77
|
+
}
|
|
78
|
+
writeFileSync(configPath(), yaml.dump(config, { lineWidth: 120 }));
|
|
79
|
+
}
|
package/dist/debug.d.ts
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Debug logging — enabled with KUBEAGENT_DEBUG=1 (or any truthy value).
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* KUBEAGENT_DEBUG=1 kubeagent watch
|
|
6
|
+
*
|
|
7
|
+
* Logs to stderr so it doesn't interfere with piped output.
|
|
8
|
+
*/
|
|
9
|
+
export declare const DEBUG: boolean;
|
|
10
|
+
export declare function dbg(tag: string, message: string, data?: unknown): void;
|
package/dist/debug.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Debug logging — enabled with KUBEAGENT_DEBUG=1 (or any truthy value).
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* KUBEAGENT_DEBUG=1 kubeagent watch
|
|
6
|
+
*
|
|
7
|
+
* Logs to stderr so it doesn't interfere with piped output.
|
|
8
|
+
*/
|
|
9
|
+
export const DEBUG = Boolean(process.env.KUBEAGENT_DEBUG || process.env.DEBUG);
|
|
10
|
+
function ts() {
|
|
11
|
+
return new Date().toISOString().slice(11, 23); // HH:MM:SS.mmm
|
|
12
|
+
}
|
|
13
|
+
export function dbg(tag, message, data) {
|
|
14
|
+
if (!DEBUG)
|
|
15
|
+
return;
|
|
16
|
+
const extra = data !== undefined ? ` ${JSON.stringify(data, null, 2)}` : "";
|
|
17
|
+
process.stderr.write(`\x1b[2m[${ts()}] [${tag}] ${message}${extra}\x1b[0m\n`);
|
|
18
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { Issue } from "../monitor/types.js";
|
|
2
|
+
export interface DiagnosisResult {
|
|
3
|
+
analysis: string;
|
|
4
|
+
action?: {
|
|
5
|
+
name: string;
|
|
6
|
+
safe: boolean;
|
|
7
|
+
params: Record<string, unknown>;
|
|
8
|
+
};
|
|
9
|
+
verificationContract?: string;
|
|
10
|
+
}
|
|
11
|
+
export declare function diagnose(issues: Issue[], kbDir: string, clusterContext?: string, options?: {
|
|
12
|
+
autoFix?: boolean;
|
|
13
|
+
safeActions?: string[];
|
|
14
|
+
noInteractive?: boolean;
|
|
15
|
+
onApproval?: (action: string, params: Record<string, unknown>) => Promise<boolean>;
|
|
16
|
+
onQuestion?: (question: string, choices: string[] | undefined) => Promise<void>;
|
|
17
|
+
}): Promise<DiagnosisResult>;
|