@meshxdata/fops 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of @meshxdata/fops might be problematic. Click here for more details.

Files changed (57) hide show
  1. package/README.md +98 -0
  2. package/STRUCTURE.md +43 -0
  3. package/foundation.mjs +16 -0
  4. package/package.json +52 -0
  5. package/src/agent/agent.js +367 -0
  6. package/src/agent/agent.test.js +233 -0
  7. package/src/agent/context.js +143 -0
  8. package/src/agent/context.test.js +81 -0
  9. package/src/agent/index.js +2 -0
  10. package/src/agent/llm.js +127 -0
  11. package/src/agent/llm.test.js +139 -0
  12. package/src/auth/index.js +4 -0
  13. package/src/auth/keychain.js +58 -0
  14. package/src/auth/keychain.test.js +185 -0
  15. package/src/auth/login.js +421 -0
  16. package/src/auth/login.test.js +192 -0
  17. package/src/auth/oauth.js +203 -0
  18. package/src/auth/oauth.test.js +118 -0
  19. package/src/auth/resolve.js +78 -0
  20. package/src/auth/resolve.test.js +153 -0
  21. package/src/commands/index.js +268 -0
  22. package/src/config.js +24 -0
  23. package/src/config.test.js +70 -0
  24. package/src/doctor.js +487 -0
  25. package/src/doctor.test.js +134 -0
  26. package/src/plugins/api.js +37 -0
  27. package/src/plugins/api.test.js +95 -0
  28. package/src/plugins/discovery.js +78 -0
  29. package/src/plugins/discovery.test.js +92 -0
  30. package/src/plugins/hooks.js +13 -0
  31. package/src/plugins/hooks.test.js +118 -0
  32. package/src/plugins/index.js +3 -0
  33. package/src/plugins/loader.js +110 -0
  34. package/src/plugins/manifest.js +26 -0
  35. package/src/plugins/manifest.test.js +106 -0
  36. package/src/plugins/registry.js +14 -0
  37. package/src/plugins/registry.test.js +43 -0
  38. package/src/plugins/skills.js +126 -0
  39. package/src/plugins/skills.test.js +173 -0
  40. package/src/project.js +61 -0
  41. package/src/project.test.js +196 -0
  42. package/src/setup/aws.js +369 -0
  43. package/src/setup/aws.test.js +280 -0
  44. package/src/setup/index.js +3 -0
  45. package/src/setup/setup.js +161 -0
  46. package/src/setup/wizard.js +119 -0
  47. package/src/shell.js +9 -0
  48. package/src/shell.test.js +72 -0
  49. package/src/skills/foundation/SKILL.md +107 -0
  50. package/src/ui/banner.js +56 -0
  51. package/src/ui/banner.test.js +97 -0
  52. package/src/ui/confirm.js +97 -0
  53. package/src/ui/index.js +5 -0
  54. package/src/ui/input.js +199 -0
  55. package/src/ui/spinner.js +170 -0
  56. package/src/ui/spinner.test.js +29 -0
  57. package/src/ui/streaming.js +106 -0
package/src/doctor.js ADDED
@@ -0,0 +1,487 @@
1
+ import fs from "node:fs";
2
+ import https from "node:https";
3
+ import os from "node:os";
4
+ import net from "node:net";
5
+ import path from "node:path";
6
+ import chalk from "chalk";
7
+ import { execa } from "execa";
8
+ import { rootDir } from "./project.js";
9
+ import { detectEcrRegistry, detectAwsSsoProfiles, fixAwsSso, fixEcr } from "./setup/aws.js";
10
+ import { confirm } from "./ui/index.js";
11
+
12
+ const KEY_PORTS = {
13
+ 5432: "Postgres",
14
+ 9092: "Kafka",
15
+ 9001: "Backend",
16
+ 3002: "Frontend",
17
+ 8081: "Trino",
18
+ 9083: "Hive Metastore",
19
+ 8181: "OPA",
20
+ 18201: "Vault",
21
+ };
22
+
23
+ function header(title) {
24
+ console.log(chalk.bold.cyan(`\n ${title}`));
25
+ console.log(chalk.gray(" " + "─".repeat(40)));
26
+ }
27
+
28
+ async function checkPort(port) {
29
+ return new Promise((resolve) => {
30
+ const srv = net.createServer();
31
+ srv.once("error", () => resolve(true)); // port in use
32
+ srv.once("listening", () => { srv.close(); resolve(false); });
33
+ srv.listen(port, "127.0.0.1");
34
+ });
35
+ }
36
+
37
+ async function cmdVersion(cmd, args = ["--version"]) {
38
+ try {
39
+ const { stdout } = await execa(cmd, args, { reject: false, timeout: 5000 });
40
+ return stdout?.split("\n")[0]?.trim() || null;
41
+ } catch {
42
+ return null;
43
+ }
44
+ }
45
+
46
+ /**
47
+ * Parse ~/.netrc and return the password/token for a given machine, or null.
48
+ */
49
+ function readNetrcToken(content, machine) {
50
+ // Handles both multi-line and single-line netrc formats
51
+ const lines = content.replace(/\r\n/g, "\n").split("\n");
52
+ let inMachine = false;
53
+ for (const line of lines) {
54
+ const tokens = line.trim().split(/\s+/);
55
+ for (let i = 0; i < tokens.length; i++) {
56
+ if (tokens[i] === "machine" && tokens[i + 1] === machine) {
57
+ inMachine = true;
58
+ } else if (tokens[i] === "machine" && tokens[i + 1] !== machine) {
59
+ if (inMachine) return null; // entered a different machine block
60
+ }
61
+ if (inMachine && tokens[i] === "password" && tokens[i + 1]) {
62
+ return tokens[i + 1];
63
+ }
64
+ }
65
+ }
66
+ return null;
67
+ }
68
+
69
+ /**
70
+ * Make an authenticated GitHub API GET request. Returns { status, body }.
71
+ */
72
+ function ghApiGet(path, token) {
73
+ return new Promise((resolve) => {
74
+ const req = https.get(`https://api.github.com${path}`, {
75
+ headers: {
76
+ Authorization: `token ${token}`,
77
+ "User-Agent": "fops-doctor",
78
+ Accept: "application/vnd.github+json",
79
+ },
80
+ timeout: 5000,
81
+ }, (res) => {
82
+ let body = "";
83
+ res.on("data", (chunk) => { body += chunk; });
84
+ res.on("end", () => {
85
+ try { resolve({ status: res.statusCode, body: JSON.parse(body) }); }
86
+ catch { resolve({ status: res.statusCode, body: {} }); }
87
+ });
88
+ });
89
+ req.on("error", () => resolve({ status: 0, body: {} }));
90
+ req.on("timeout", () => { req.destroy(); resolve({ status: 0, body: {} }); });
91
+ });
92
+ }
93
+
94
+ export async function runDoctor(opts = {}, registry = null) {
95
+ const dir = rootDir() || null;
96
+ let passed = 0;
97
+ let warned = 0;
98
+ let failed = 0;
99
+
100
+ const fixes = []; // collect fix actions to run at the end
101
+
102
+ const ok = (name, detail) => {
103
+ console.log(chalk.green(" ✓ ") + name + (detail ? chalk.gray(` — ${detail}`) : ""));
104
+ passed++;
105
+ };
106
+ const warn = (name, detail) => {
107
+ console.log(chalk.yellow(" ⚠ ") + name + (detail ? chalk.gray(` — ${detail}`) : ""));
108
+ warned++;
109
+ };
110
+ const fail = (name, detail, fixFn) => {
111
+ console.log(chalk.red(" ✗ ") + name + (detail ? chalk.gray(` — ${detail}`) : ""));
112
+ failed++;
113
+ if (fixFn) fixes.push({ name, fn: fixFn });
114
+ };
115
+
116
+ // ── Prerequisites ──────────────────────────────────
117
+ header("Prerequisites");
118
+
119
+ // Docker
120
+ const dockerVer = await cmdVersion("docker");
121
+ if (dockerVer) {
122
+ // Check if daemon is actually running
123
+ try {
124
+ await execa("docker", ["info"], { timeout: 5000 });
125
+ ok("Docker running", dockerVer);
126
+ } catch {
127
+ fail("Docker daemon not running", "start Docker Desktop or dockerd");
128
+ }
129
+ } else {
130
+ fail("Docker not found", "install from docker.com");
131
+ }
132
+
133
+ // Git
134
+ const gitVer = await cmdVersion("git");
135
+ if (gitVer) ok("Git available", gitVer);
136
+ else fail("Git not found", "install git");
137
+
138
+ // Node.js version
139
+ const nodeVer = process.versions.node;
140
+ const nodeMajor = parseInt(nodeVer.split(".")[0], 10);
141
+ if (nodeMajor >= 18) ok(`Node.js v${nodeVer}`, ">=18 required");
142
+ else fail(`Node.js v${nodeVer}`, "upgrade to >=18");
143
+
144
+ // AWS CLI (optional)
145
+ const awsVer = await cmdVersion("aws");
146
+ if (awsVer) ok("AWS CLI", awsVer);
147
+ else warn("AWS CLI not found", "optional — needed for ECR login");
148
+
149
+ // ~/.netrc GitHub credentials (optional — validate against API + repo access)
150
+ const netrcPath = path.join(os.homedir(), ".netrc");
151
+ if (fs.existsSync(netrcPath)) {
152
+ try {
153
+ const content = fs.readFileSync(netrcPath, "utf8");
154
+ if (!content.includes("github.com")) {
155
+ warn("~/.netrc exists but no github.com entry");
156
+ } else {
157
+ const token = readNetrcToken(content, "github.com");
158
+ if (!token) {
159
+ warn("~/.netrc has github.com but no password/token");
160
+ } else {
161
+ const userRes = await ghApiGet("/user", token);
162
+ if (userRes.status !== 200) {
163
+ fail("~/.netrc GitHub token invalid or expired", "regenerate at github.com/settings/tokens");
164
+ } else {
165
+ const login = userRes.body.login || "authenticated";
166
+ ok("~/.netrc GitHub credentials", `authenticated as ${login}`);
167
+ const repoRes = await ghApiGet("/repos/meshxdata/foundation-compose", token);
168
+ if (repoRes.status === 200) {
169
+ ok("GitHub repo access", "meshxdata/foundation-compose readable");
170
+ } else if (repoRes.status === 404) {
171
+ fail("GitHub repo access", "meshxdata/foundation-compose not found — token may lack repo scope");
172
+ } else {
173
+ fail("GitHub repo access", `meshxdata/foundation-compose returned ${repoRes.status}`);
174
+ }
175
+ }
176
+ }
177
+ }
178
+ } catch {
179
+ warn("~/.netrc not readable");
180
+ }
181
+ } else {
182
+ warn("~/.netrc not found", "optional — used for private repo access");
183
+ }
184
+
185
+ // ~/.fops.json config (optional)
186
+ const fopsConfig = path.join(os.homedir(), ".fops.json");
187
+ if (fs.existsSync(fopsConfig)) ok("~/.fops.json config");
188
+ else warn("~/.fops.json not found", "optional — run fops init to create");
189
+
190
+ // ── AWS / ECR ──────────────────────────────────────
191
+ header("AWS / ECR");
192
+
193
+ const awsConfigPath = path.join(os.homedir(), ".aws", "config");
194
+ let awsSessionValid = false;
195
+
196
+ if (fs.existsSync(awsConfigPath)) {
197
+ ok("~/.aws/config exists");
198
+
199
+ // Check for SSO session — use detected profile
200
+ const ssoProfiles = awsVer ? detectAwsSsoProfiles() : [];
201
+ const defaultProfile = ssoProfiles[0];
202
+
203
+ if (awsVer) {
204
+ const profileArgs = defaultProfile ? ["--profile", defaultProfile.name] : [];
205
+ try {
206
+ const { stdout } = await execa("aws", ["sts", "get-caller-identity", "--output", "json", ...profileArgs], {
207
+ timeout: 10000, reject: false,
208
+ });
209
+ if (stdout && stdout.includes("Account")) {
210
+ ok("AWS SSO session valid");
211
+ awsSessionValid = true;
212
+ } else {
213
+ fail("AWS SSO session expired or invalid", "run: aws sso login", fixAwsSso);
214
+ }
215
+ } catch {
216
+ fail("AWS SSO session check failed", "run: aws sso login", fixAwsSso);
217
+ }
218
+ }
219
+ } else {
220
+ warn("~/.aws/config not found", "optional — needed for ECR");
221
+ }
222
+
223
+ // Validate ECR access if project references ECR images
224
+ const ecrInfo = dir ? detectEcrRegistry(dir) : null;
225
+ if (ecrInfo) {
226
+ const ecrUrl = `${ecrInfo.accountId}.dkr.ecr.${ecrInfo.region}.amazonaws.com`;
227
+ if (!awsSessionValid) {
228
+ fail(`ECR registry ${ecrUrl}`, "fix AWS session first", () => fixEcr(ecrInfo));
229
+ } else {
230
+ // Check we can get an ECR login password (same call the actual login uses)
231
+ const ssoProfiles = detectAwsSsoProfiles();
232
+ const ecrProfile = ssoProfiles.find((p) => p.region === ecrInfo.region) || ssoProfiles[0];
233
+ const ecrProfileArgs = ecrProfile ? ["--profile", ecrProfile.name] : [];
234
+ try {
235
+ const { stdout, exitCode } = await execa("aws", [
236
+ "ecr", "get-login-password", "--region", ecrInfo.region, ...ecrProfileArgs,
237
+ ], { timeout: 10000, reject: false });
238
+ if (exitCode === 0 && stdout?.trim()) {
239
+ ok(`ECR registry accessible`, ecrUrl);
240
+ } else {
241
+ fail(`ECR registry not accessible`, `${ecrUrl} — run: fops doctor --fix`, () => fixEcr(ecrInfo));
242
+ }
243
+ } catch {
244
+ fail(`ECR registry check failed`, ecrUrl, () => fixEcr(ecrInfo));
245
+ }
246
+ }
247
+ } else if (dir) {
248
+ warn("No ECR images detected in docker-compose.yaml", "skipping ECR check");
249
+ }
250
+
251
+ // ── Project ────────────────────────────────────────
252
+ header("Project");
253
+
254
+ if (dir) {
255
+ // .env — only report if missing (the CLI creates it automatically)
256
+ const envPath = path.join(dir, ".env");
257
+ const envExample = path.join(dir, ".env.example");
258
+ if (!fs.existsSync(envPath)) {
259
+ if (opts.fix && fs.existsSync(envExample)) {
260
+ fs.copyFileSync(envExample, envPath);
261
+ ok(".env created from .env.example", "--fix applied");
262
+ } else {
263
+ fail(".env missing", fs.existsSync(envExample) ? "run: cp .env.example .env" : "create .env");
264
+ }
265
+ }
266
+ } else {
267
+ fail("No Foundation project found", "run: fops init", async () => {
268
+ console.log(chalk.cyan(" ▶ fops init"));
269
+ const { runInitWizard } = await import("./setup/index.js");
270
+ await runInitWizard();
271
+ });
272
+ }
273
+
274
+ // ── Docker Resources ───────────────────────────────
275
+ if (dockerVer) {
276
+ header("Docker Resources");
277
+
278
+ // CPU check
279
+ try {
280
+ const { stdout: cpuOut } = await execa("docker", ["info", "--format", "{{.NCPU}}"], {
281
+ timeout: 5000, reject: false,
282
+ });
283
+ if (cpuOut) {
284
+ const cpus = parseInt(cpuOut.trim(), 10);
285
+ if (cpus >= 4) ok(`Docker CPUs: ${cpus}`);
286
+ else warn(`Docker CPUs: ${cpus}`, "recommend 4+ for full stack");
287
+ }
288
+ } catch {}
289
+
290
+ // Memory check
291
+ try {
292
+ const { stdout } = await execa("docker", ["info", "--format", "{{.MemTotal}}"], {
293
+ timeout: 5000, reject: false,
294
+ });
295
+ if (stdout) {
296
+ const bytes = parseInt(stdout.trim(), 10);
297
+ const gb = (bytes / (1024 ** 3)).toFixed(1);
298
+ if (bytes >= 7.5 * (1024 ** 3)) {
299
+ ok(`Docker memory: ${gb} GB`);
300
+ } else {
301
+ warn(`Docker memory: ${gb} GB`, "recommend 8 GB+ for full stack");
302
+ }
303
+ }
304
+ } catch {}
305
+
306
+ // Docker disk usage (images + containers + volumes + build cache)
307
+ try {
308
+ const { stdout: dfOut } = await execa("docker", ["system", "df", "--format", "{{.Size}}"], {
309
+ timeout: 10000, reject: false,
310
+ });
311
+ if (dfOut?.trim()) {
312
+ const sizes = dfOut.trim().split("\n").filter(Boolean);
313
+ ok(`Docker disk used`, sizes.join(" + "));
314
+ }
315
+ } catch {}
316
+
317
+ // Host disk available
318
+ try {
319
+ const { stdout: dfHost } = await execa("df", ["-h", "/"], {
320
+ timeout: 5000, reject: false,
321
+ });
322
+ if (dfHost) {
323
+ const lines = dfHost.trim().split("\n");
324
+ if (lines.length >= 2) {
325
+ const cols = lines[lines.length - 1].split(/\s+/);
326
+ const size = cols[1];
327
+ const avail = cols[3];
328
+ const pct = parseInt(cols[4], 10);
329
+ if (pct >= 90) fail(`Host disk: ${avail} free of ${size}`, "critically low — Docker needs room");
330
+ else if (pct >= 80) warn(`Host disk: ${avail} free of ${size}`, "getting low");
331
+ else ok(`Host disk: ${avail} free of ${size}`);
332
+ }
333
+ }
334
+ } catch {}
335
+
336
+ // Port conflicts
337
+ header("Ports");
338
+ for (const [port, service] of Object.entries(KEY_PORTS)) {
339
+ const inUse = await checkPort(parseInt(port, 10));
340
+ if (inUse) {
341
+ // If project containers are running, ports in use is expected
342
+ if (dir) ok(`:${port} ${service} — in use`);
343
+ else warn(`:${port} ${service} — in use`, "potential conflict");
344
+ } else {
345
+ ok(`:${port} ${service} — available`);
346
+ }
347
+ }
348
+ }
349
+
350
+ // ── Services ───────────────────────────────────────
351
+ if (dir && dockerVer) {
352
+ header("Services");
353
+
354
+ try {
355
+ const { stdout } = await execa("docker", ["compose", "ps", "--format", "json"], {
356
+ cwd: dir, reject: false, timeout: 10000,
357
+ });
358
+ if (stdout && stdout.trim()) {
359
+ const lines = stdout.trim().split("\n").filter(Boolean);
360
+ let anyService = false;
361
+ for (const line of lines) {
362
+ try {
363
+ const svc = JSON.parse(line);
364
+ const name = svc.Name || svc.name || "?";
365
+ const state = svc.State || svc.state || "?";
366
+ const health = svc.Health || svc.health || "";
367
+ anyService = true;
368
+
369
+ if (state === "running" && (!health || health === "healthy")) {
370
+ ok(name, state + (health ? ` (${health})` : ""));
371
+ } else if (state === "running" && health === "unhealthy") {
372
+ warn(name, `${state} (${health})`);
373
+ } else {
374
+ fail(name, state + (health ? ` (${health})` : ""));
375
+ }
376
+ } catch {
377
+ // skip unparseable line
378
+ }
379
+ }
380
+ if (!anyService) warn("No containers found", "run: fops up");
381
+ } else {
382
+ warn("No containers running", "run: fops up");
383
+ }
384
+ } catch {
385
+ warn("Could not check containers", "Docker Compose error");
386
+ }
387
+ }
388
+
389
+ // ── Images ──────────────────────────────────────────
390
+ if (dir && dockerVer) {
391
+ header("Images");
392
+ const STALE_DAYS = 7;
393
+
394
+ try {
395
+ const { stdout: imgOut } = await execa("docker", [
396
+ "compose", "images", "--format", "json",
397
+ ], { cwd: dir, reject: false, timeout: 10000 });
398
+
399
+ if (imgOut?.trim()) {
400
+ const stale = [];
401
+ const lines = imgOut.trim().split("\n").filter(Boolean);
402
+ for (const line of lines) {
403
+ try {
404
+ const img = JSON.parse(line);
405
+ const repo = img.Repository || img.repository || "";
406
+ const tag = img.Tag || img.tag || "";
407
+ const id = img.ID || img.id || "";
408
+ if (!id) continue;
409
+
410
+ // Inspect for creation date
411
+ const { stdout: inspectOut } = await execa("docker", [
412
+ "image", "inspect", id, "--format", "{{.Created}}",
413
+ ], { reject: false, timeout: 5000 });
414
+
415
+ if (inspectOut?.trim()) {
416
+ const created = new Date(inspectOut.trim());
417
+ const ageDays = Math.floor((Date.now() - created.getTime()) / (1000 * 60 * 60 * 24));
418
+ const name = `${repo}:${tag}`.replace(/^:/, "").replace(/:$/, "") || id.slice(0, 12);
419
+
420
+ if (ageDays > STALE_DAYS) {
421
+ warn(name, `${ageDays}d old — consider rebuilding`);
422
+ stale.push(name);
423
+ } else {
424
+ ok(name, ageDays === 0 ? "today" : `${ageDays}d ago`);
425
+ }
426
+ }
427
+ } catch {}
428
+ }
429
+
430
+ if (stale.length > 0) {
431
+ fixes.push({
432
+ name: `Rebuild ${stale.length} stale image(s)`,
433
+ fn: async () => {
434
+ console.log(chalk.cyan(" ▶ docker compose build --pull"));
435
+ await execa("docker", ["compose", "build", "--pull"], {
436
+ cwd: dir, stdio: "inherit", reject: false, timeout: 600_000,
437
+ });
438
+ },
439
+ });
440
+ }
441
+ }
442
+ } catch {}
443
+ }
444
+
445
+ // ── Plugins ─────────────────────────────────────────
446
+ if (registry && registry.doctorChecks.length > 0) {
447
+ header("Plugins");
448
+ for (const check of registry.doctorChecks) {
449
+ await check.fn(ok, warn, fail);
450
+ }
451
+ }
452
+
453
+ // ── Summary ────────────────────────────────────────
454
+ console.log(chalk.gray("\n " + "─".repeat(40)));
455
+ const parts = [];
456
+ if (passed) parts.push(chalk.green(`${passed} passed`));
457
+ if (warned) parts.push(chalk.yellow(`${warned} warnings`));
458
+ if (failed) parts.push(chalk.red(`${failed} failed`));
459
+ console.log(" " + parts.join(chalk.gray(" · ")));
460
+
461
+ if (failed > 0 && fixes.length > 0) {
462
+ const shouldFix = opts.fix || await confirm(`\n Fix ${fixes.length} issue(s) automatically?`, true);
463
+ if (shouldFix) {
464
+ console.log("");
465
+ for (const fix of fixes) {
466
+ try {
467
+ console.log(chalk.bold(` Fixing: ${fix.name}`));
468
+ await fix.fn();
469
+ console.log(chalk.green(" ✓ Fixed\n"));
470
+ } catch (err) {
471
+ console.log(chalk.red(` ✗ Fix failed: ${err.message}\n`));
472
+ }
473
+ }
474
+ console.log(chalk.gray(" Run fops doctor again to verify.\n"));
475
+ } else {
476
+ console.log("");
477
+ process.exit(1);
478
+ }
479
+ } else if (failed > 0) {
480
+ console.log(chalk.yellow("\n No automatic fix available. Resolve the issues above manually.\n"));
481
+ process.exit(1);
482
+ } else if (warned > 0) {
483
+ console.log(chalk.green("\n Looking good. Warnings are optional.\n"));
484
+ } else {
485
+ console.log(chalk.green("\n All systems operational. Run: fops up\n"));
486
+ }
487
+ }
@@ -0,0 +1,134 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import fs from "node:fs";
3
+ import path from "node:path";
4
+ import { fileURLToPath } from "node:url";
5
+
6
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
7
+ const doctorSource = fs.readFileSync(path.join(__dirname, "doctor.js"), "utf8");
8
+
9
+ // readNetrcToken is not exported, so we inline a copy for testing
10
+ function readNetrcToken(content, machine) {
11
+ const lines = content.replace(/\r\n/g, "\n").split("\n");
12
+ let inMachine = false;
13
+ for (const line of lines) {
14
+ const tokens = line.trim().split(/\s+/);
15
+ for (let i = 0; i < tokens.length; i++) {
16
+ if (tokens[i] === "machine" && tokens[i + 1] === machine) {
17
+ inMachine = true;
18
+ } else if (tokens[i] === "machine" && tokens[i + 1] !== machine) {
19
+ if (inMachine) return null;
20
+ }
21
+ if (inMachine && tokens[i] === "password" && tokens[i + 1]) {
22
+ return tokens[i + 1];
23
+ }
24
+ }
25
+ }
26
+ return null;
27
+ }
28
+
29
+ describe("doctor", () => {
30
+ describe("readNetrcToken", () => {
31
+ it("returns token for multi-line netrc", () => {
32
+ const content = `machine github.com\nlogin user\npassword ghp_abc123\n`;
33
+ expect(readNetrcToken(content, "github.com")).toBe("ghp_abc123");
34
+ });
35
+
36
+ it("returns token for single-line netrc", () => {
37
+ const content = `machine github.com login user password ghp_abc123`;
38
+ expect(readNetrcToken(content, "github.com")).toBe("ghp_abc123");
39
+ });
40
+
41
+ it("returns null for wrong machine", () => {
42
+ const content = `machine gitlab.com\nlogin user\npassword glpat_xyz\n`;
43
+ expect(readNetrcToken(content, "github.com")).toBe(null);
44
+ });
45
+
46
+ it("returns null for missing password", () => {
47
+ const content = `machine github.com\nlogin user\n`;
48
+ expect(readNetrcToken(content, "github.com")).toBe(null);
49
+ });
50
+
51
+ it("handles multiple machines", () => {
52
+ const content = [
53
+ "machine gitlab.com login a password glpat_xyz",
54
+ "machine github.com login b password ghp_123",
55
+ ].join("\n");
56
+ expect(readNetrcToken(content, "github.com")).toBe("ghp_123");
57
+ expect(readNetrcToken(content, "gitlab.com")).toBe("glpat_xyz");
58
+ });
59
+
60
+ it("stops at next machine block", () => {
61
+ const content = [
62
+ "machine github.com",
63
+ "login user",
64
+ "machine other.com",
65
+ "password secret",
66
+ ].join("\n");
67
+ expect(readNetrcToken(content, "github.com")).toBe(null);
68
+ });
69
+
70
+ it("handles windows line endings", () => {
71
+ const content = "machine github.com\r\nlogin user\r\npassword ghp_win\r\n";
72
+ expect(readNetrcToken(content, "github.com")).toBe("ghp_win");
73
+ });
74
+
75
+ it("handles empty content", () => {
76
+ expect(readNetrcToken("", "github.com")).toBe(null);
77
+ });
78
+
79
+ it("handles whitespace-only content", () => {
80
+ expect(readNetrcToken(" \n \n ", "github.com")).toBe(null);
81
+ });
82
+
83
+ it("handles machine with no following tokens", () => {
84
+ const content = "machine\ngithub.com\npassword ghp_123";
85
+ // "machine" without immediate next token matching
86
+ expect(readNetrcToken(content, "github.com")).toBe(null);
87
+ });
88
+
89
+ it("handles mixed multi-line and single-line entries", () => {
90
+ const content = [
91
+ "machine gitlab.com login gl password glpat_abc",
92
+ "",
93
+ "machine github.com",
94
+ "login gh",
95
+ "password ghp_xyz",
96
+ ].join("\n");
97
+ expect(readNetrcToken(content, "github.com")).toBe("ghp_xyz");
98
+ expect(readNetrcToken(content, "gitlab.com")).toBe("glpat_abc");
99
+ });
100
+
101
+ it("returns first password for a machine", () => {
102
+ const content = "machine github.com login user password first_token";
103
+ expect(readNetrcToken(content, "github.com")).toBe("first_token");
104
+ });
105
+ });
106
+
107
+ describe("KEY_PORTS", () => {
108
+ it("defines all expected ports", () => {
109
+ expect(doctorSource).toContain("5432");
110
+ expect(doctorSource).toContain("Postgres");
111
+ expect(doctorSource).toContain("9092");
112
+ expect(doctorSource).toContain("Kafka");
113
+ expect(doctorSource).toContain("9001");
114
+ expect(doctorSource).toContain("Backend");
115
+ expect(doctorSource).toContain("3002");
116
+ expect(doctorSource).toContain("Frontend");
117
+ expect(doctorSource).toContain("8081");
118
+ expect(doctorSource).toContain("Trino");
119
+ expect(doctorSource).toContain("9083");
120
+ expect(doctorSource).toContain("Hive Metastore");
121
+ expect(doctorSource).toContain("8181");
122
+ expect(doctorSource).toContain("OPA");
123
+ expect(doctorSource).toContain("18201");
124
+ expect(doctorSource).toContain("Vault");
125
+ });
126
+ });
127
+
128
+ describe("runDoctor export", () => {
129
+ it("is exported as a function", async () => {
130
+ const { runDoctor } = await import("./doctor.js");
131
+ expect(typeof runDoctor).toBe("function");
132
+ });
133
+ });
134
+ });
@@ -0,0 +1,37 @@
1
+ import fs from "node:fs";
2
+ import os from "node:os";
3
+ import path from "node:path";
4
+
5
+ /**
6
+ * FopsPluginApi — the single API surface passed to each plugin's register() function.
7
+ */
8
+ export function createPluginApi(pluginId, registry) {
9
+ // Load plugin-specific config from ~/.fops.json → plugins.entries.<id>.config
10
+ let config = {};
11
+ try {
12
+ const fopsConfig = path.join(os.homedir(), ".fops.json");
13
+ if (fs.existsSync(fopsConfig)) {
14
+ const raw = JSON.parse(fs.readFileSync(fopsConfig, "utf8"));
15
+ config = raw?.plugins?.entries?.[pluginId]?.config || {};
16
+ }
17
+ } catch {
18
+ // ignore parse errors
19
+ }
20
+
21
+ return {
22
+ id: pluginId,
23
+ config,
24
+
25
+ registerCommand(spec) {
26
+ registry.commands.push({ pluginId, spec });
27
+ },
28
+
29
+ registerDoctorCheck(check) {
30
+ registry.doctorChecks.push({ pluginId, name: check.name, fn: check.fn });
31
+ },
32
+
33
+ registerHook(event, handler, priority = 0) {
34
+ registry.hooks.push({ pluginId, event, handler, priority });
35
+ },
36
+ };
37
+ }