@meshxdata/fops 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -1
- package/src/agent/agent.js +139 -38
- package/src/agent/agents.js +224 -0
- package/src/agent/context.js +146 -12
- package/src/agent/index.js +1 -0
- package/src/agent/llm.js +84 -13
- package/src/auth/coda.js +10 -10
- package/src/auth/login.js +13 -13
- package/src/auth/oauth.js +4 -4
- package/src/commands/index.js +121 -15
- package/src/config.js +2 -2
- package/src/doctor.js +215 -26
- package/src/feature-flags.js +197 -0
- package/src/plugins/api.js +14 -0
- package/src/plugins/builtins/stack-api.js +36 -0
- package/src/plugins/loader.js +67 -0
- package/src/plugins/registry.js +2 -0
- package/src/project.js +20 -1
- package/src/setup/aws.js +58 -45
- package/src/setup/setup.js +10 -9
- package/src/setup/wizard.js +195 -15
- package/src/ui/confirm.js +3 -2
- package/src/ui/input.js +2 -2
- package/src/ui/spinner.js +4 -4
- package/src/ui/streaming.js +2 -2
package/src/commands/index.js
CHANGED
|
@@ -4,12 +4,14 @@ import path from "node:path";
|
|
|
4
4
|
import chalk from "chalk";
|
|
5
5
|
import { Command } from "commander";
|
|
6
6
|
import { PKG } from "../config.js";
|
|
7
|
-
import { rootDir, requireRoot, hasComposeInDir, isFoundationRoot, findComposeRootUp } from "../project.js";
|
|
8
|
-
import {
|
|
7
|
+
import { rootDir, requireRoot, hasComposeInDir, isFoundationRoot, findComposeRootUp, checkInitState } from "../project.js";
|
|
8
|
+
import { execa } from "execa";
|
|
9
|
+
import { make, dockerCompose } from "../shell.js";
|
|
9
10
|
import { runSetup, runInitWizard } from "../setup/index.js";
|
|
10
11
|
import { ensureEcrAuth } from "../setup/aws.js";
|
|
11
12
|
import { runAgentSingleTurn, runAgentInteractive } from "../agent/index.js";
|
|
12
13
|
import { runDoctor } from "../doctor.js";
|
|
14
|
+
import { runFeatureFlags } from "../feature-flags.js";
|
|
13
15
|
import { runLogin, runCodaLogin } from "../auth/index.js";
|
|
14
16
|
import { runHook, loadSkills } from "../plugins/index.js";
|
|
15
17
|
|
|
@@ -104,14 +106,53 @@ export function registerCommands(program, registry) {
|
|
|
104
106
|
.option("--no-chat", "Skip interactive AI assistant after startup")
|
|
105
107
|
.action(async (opts) => {
|
|
106
108
|
const root = requireRoot(program);
|
|
109
|
+
|
|
110
|
+
// Pre-flight: check if project is initialised
|
|
111
|
+
const initIssue = checkInitState(root);
|
|
112
|
+
if (initIssue) {
|
|
113
|
+
console.error(chalk.red(`\n Project not ready: ${initIssue}.`));
|
|
114
|
+
console.error(chalk.dim(" Run `fops init` first to set up the project.\n"));
|
|
115
|
+
process.exit(1);
|
|
116
|
+
}
|
|
117
|
+
|
|
107
118
|
await ensureEcrAuth(root);
|
|
108
119
|
await runHook(registry, "before:up", { root });
|
|
109
|
-
|
|
120
|
+
|
|
121
|
+
// Detect stuck containers (restarting / unhealthy) and force-recreate them
|
|
122
|
+
const forceRecreate = [];
|
|
123
|
+
try {
|
|
124
|
+
const { stdout } = await execa("docker", ["compose", "ps", "--format", "json"], {
|
|
125
|
+
cwd: root, reject: false, timeout: 10000,
|
|
126
|
+
});
|
|
127
|
+
if (stdout?.trim()) {
|
|
128
|
+
for (const line of stdout.trim().split("\n").filter(Boolean)) {
|
|
129
|
+
try {
|
|
130
|
+
const svc = JSON.parse(line);
|
|
131
|
+
const state = (svc.State || "").toLowerCase();
|
|
132
|
+
const health = (svc.Health || "").toLowerCase();
|
|
133
|
+
if (state === "restarting" || health === "unhealthy") {
|
|
134
|
+
forceRecreate.push(svc.Service || svc.Name);
|
|
135
|
+
}
|
|
136
|
+
} catch {}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
} catch {}
|
|
140
|
+
|
|
141
|
+
if (forceRecreate.length > 0) {
|
|
142
|
+
console.log(chalk.yellow(` Recreating stuck containers: ${forceRecreate.join(", ")}`));
|
|
143
|
+
await dockerCompose(root, ["rm", "-f", "-s", ...forceRecreate]);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
console.log(chalk.green(" Starting services..."));
|
|
147
|
+
const result = await dockerCompose(root, ["up", "-d", "--remove-orphans", "--pull", "always"]);
|
|
148
|
+
// Clear any trailing \r progress line from docker compose output
|
|
149
|
+
process.stdout.write("\x1b[2K\r");
|
|
110
150
|
await runHook(registry, "after:up", { root });
|
|
111
151
|
if (result.exitCode !== 0) {
|
|
112
152
|
console.error(chalk.red(`\n Some services failed to start (exit code ${result.exitCode}).`));
|
|
113
|
-
console.error(chalk.
|
|
114
|
-
|
|
153
|
+
console.error(chalk.dim(" Dropping into debug agent to diagnose...\n"));
|
|
154
|
+
await runAgentInteractive(root, { registry, initialAgent: "debug" });
|
|
155
|
+
return;
|
|
115
156
|
}
|
|
116
157
|
if (opts.chat !== false) await runAgentInteractive(root, { registry });
|
|
117
158
|
});
|
|
@@ -164,10 +205,28 @@ export function registerCommands(program, registry) {
|
|
|
164
205
|
|
|
165
206
|
program
|
|
166
207
|
.command("config")
|
|
167
|
-
.description("
|
|
208
|
+
.description("Toggle MX_FF_* feature flags and restart affected services")
|
|
209
|
+
.action(async () => {
|
|
210
|
+
const root = requireRoot(program);
|
|
211
|
+
await runFeatureFlags(root);
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
program
|
|
215
|
+
.command("build")
|
|
216
|
+
.description("Build all Foundation service images from source")
|
|
217
|
+
.action(async () => {
|
|
218
|
+
const root = requireRoot(program);
|
|
219
|
+
await ensureEcrAuth(root);
|
|
220
|
+
await make(root, "build");
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
program
|
|
224
|
+
.command("download")
|
|
225
|
+
.description("Pull all container images from registry (requires ECR auth)")
|
|
168
226
|
.action(async () => {
|
|
169
227
|
const root = requireRoot(program);
|
|
170
|
-
await
|
|
228
|
+
await ensureEcrAuth(root);
|
|
229
|
+
await make(root, "download");
|
|
171
230
|
});
|
|
172
231
|
|
|
173
232
|
program
|
|
@@ -197,14 +256,14 @@ export function registerCommands(program, registry) {
|
|
|
197
256
|
.action(async () => {
|
|
198
257
|
const skills = await loadSkills(registry);
|
|
199
258
|
if (skills.length === 0) {
|
|
200
|
-
console.log(chalk.
|
|
259
|
+
console.log(chalk.dim(" No skills available."));
|
|
201
260
|
return;
|
|
202
261
|
}
|
|
203
262
|
console.log(chalk.bold.cyan("\n Agent Skills\n"));
|
|
204
263
|
for (const s of skills) {
|
|
205
|
-
const source = s.pluginId ? chalk.
|
|
264
|
+
const source = s.pluginId ? chalk.dim(`(plugin: ${s.pluginId})`) : chalk.dim("(built-in)");
|
|
206
265
|
console.log(` ${chalk.green("●")} ${chalk.bold(s.name)} ${source}`);
|
|
207
|
-
if (s.description) console.log(chalk.
|
|
266
|
+
if (s.description) console.log(chalk.dim(` ${s.description}`));
|
|
208
267
|
}
|
|
209
268
|
console.log("");
|
|
210
269
|
});
|
|
@@ -214,20 +273,41 @@ export function registerCommands(program, registry) {
|
|
|
214
273
|
.command("plugin")
|
|
215
274
|
.description("Manage fops plugins");
|
|
216
275
|
|
|
276
|
+
// Helper: read/write plugin enabled state in ~/.fops.json
|
|
277
|
+
const fopsConfigPath = path.join(os.homedir(), ".fops.json");
|
|
278
|
+
const readFopsConfig = () => {
|
|
279
|
+
try { return fs.existsSync(fopsConfigPath) ? JSON.parse(fs.readFileSync(fopsConfigPath, "utf8")) : {}; } catch { return {}; }
|
|
280
|
+
};
|
|
281
|
+
const setPluginEnabled = (id, enabled) => {
|
|
282
|
+
const cfg = readFopsConfig();
|
|
283
|
+
if (!cfg.plugins) cfg.plugins = {};
|
|
284
|
+
if (!cfg.plugins.entries) cfg.plugins.entries = {};
|
|
285
|
+
if (!cfg.plugins.entries[id]) cfg.plugins.entries[id] = {};
|
|
286
|
+
cfg.plugins.entries[id].enabled = enabled;
|
|
287
|
+
fs.writeFileSync(fopsConfigPath, JSON.stringify(cfg, null, 2) + "\n");
|
|
288
|
+
};
|
|
289
|
+
const isPluginEnabled = (id) => {
|
|
290
|
+
const cfg = readFopsConfig();
|
|
291
|
+
return cfg?.plugins?.entries?.[id]?.enabled !== false;
|
|
292
|
+
};
|
|
293
|
+
|
|
217
294
|
pluginCmd
|
|
218
295
|
.command("list")
|
|
219
296
|
.description("List installed plugins with status")
|
|
220
297
|
.action(async () => {
|
|
221
298
|
if (registry.plugins.length === 0) {
|
|
222
|
-
console.log(chalk.
|
|
223
|
-
console.log(chalk.
|
|
299
|
+
console.log(chalk.dim(" No plugins installed."));
|
|
300
|
+
console.log(chalk.dim(" Install plugins to ~/.fops/plugins/ or via npm (fops-plugin-*)."));
|
|
224
301
|
return;
|
|
225
302
|
}
|
|
226
303
|
console.log(chalk.bold.cyan("\n Installed Plugins\n"));
|
|
227
304
|
for (const p of registry.plugins) {
|
|
228
|
-
const
|
|
229
|
-
|
|
230
|
-
|
|
305
|
+
const enabled = isPluginEnabled(p.id);
|
|
306
|
+
const dot = enabled ? chalk.green("●") : chalk.red("○");
|
|
307
|
+
const status = enabled ? "" : chalk.red(" (disabled)");
|
|
308
|
+
const source = chalk.dim(`(${p.source})`);
|
|
309
|
+
console.log(` ${dot} ${chalk.bold(p.name)} ${chalk.dim("v" + p.version)} ${source}${status}`);
|
|
310
|
+
console.log(chalk.dim(` id: ${p.id} path: ${p.path}`));
|
|
231
311
|
}
|
|
232
312
|
console.log("");
|
|
233
313
|
});
|
|
@@ -279,4 +359,30 @@ export function registerCommands(program, registry) {
|
|
|
279
359
|
fs.rmSync(pluginDir, { recursive: true, force: true });
|
|
280
360
|
console.log(chalk.green(` ✓ Removed plugin "${id}"`));
|
|
281
361
|
});
|
|
362
|
+
|
|
363
|
+
pluginCmd
|
|
364
|
+
.command("enable <id>")
|
|
365
|
+
.description("Enable a plugin")
|
|
366
|
+
.action(async (id) => {
|
|
367
|
+
const found = registry.plugins.find((p) => p.id === id);
|
|
368
|
+
if (!found) {
|
|
369
|
+
console.error(chalk.red(`Plugin "${id}" not found. Run: fops plugin list`));
|
|
370
|
+
process.exit(1);
|
|
371
|
+
}
|
|
372
|
+
setPluginEnabled(id, true);
|
|
373
|
+
console.log(chalk.green(` ✓ Enabled plugin "${id}". Restart fops to apply.`));
|
|
374
|
+
});
|
|
375
|
+
|
|
376
|
+
pluginCmd
|
|
377
|
+
.command("disable <id>")
|
|
378
|
+
.description("Disable a plugin without removing it")
|
|
379
|
+
.action(async (id) => {
|
|
380
|
+
const found = registry.plugins.find((p) => p.id === id);
|
|
381
|
+
if (!found) {
|
|
382
|
+
console.error(chalk.red(`Plugin "${id}" not found. Run: fops plugin list`));
|
|
383
|
+
process.exit(1);
|
|
384
|
+
}
|
|
385
|
+
setPluginEnabled(id, false);
|
|
386
|
+
console.log(chalk.yellow(` ○ Disabled plugin "${id}". Restart fops to apply.`));
|
|
387
|
+
});
|
|
282
388
|
}
|
package/src/config.js
CHANGED
|
@@ -18,7 +18,7 @@ export const CLI_BRAND = {
|
|
|
18
18
|
export function printFoundationBanner(cwd) {
|
|
19
19
|
const cwdShort = cwd.replace(os.homedir(), "~");
|
|
20
20
|
console.log(chalk.cyan(` ${CLI_BRAND.title} ${CLI_BRAND.version}`));
|
|
21
|
-
console.log(chalk.
|
|
22
|
-
console.log(chalk.
|
|
21
|
+
console.log(chalk.dim(` ${CLI_BRAND.byline}`));
|
|
22
|
+
console.log(chalk.dim(` ${cwdShort}`));
|
|
23
23
|
console.log("");
|
|
24
24
|
}
|
package/src/doctor.js
CHANGED
|
@@ -6,8 +6,8 @@ import path from "node:path";
|
|
|
6
6
|
import chalk from "chalk";
|
|
7
7
|
import { execa } from "execa";
|
|
8
8
|
import { rootDir } from "./project.js";
|
|
9
|
+
import inquirer from "inquirer";
|
|
9
10
|
import { detectEcrRegistry, detectAwsSsoProfiles, fixAwsSso, fixEcr } from "./setup/aws.js";
|
|
10
|
-
import { confirm } from "./ui/index.js";
|
|
11
11
|
|
|
12
12
|
const KEY_PORTS = {
|
|
13
13
|
5432: "Postgres",
|
|
@@ -22,7 +22,7 @@ const KEY_PORTS = {
|
|
|
22
22
|
|
|
23
23
|
function header(title) {
|
|
24
24
|
console.log(chalk.bold.cyan(`\n ${title}`));
|
|
25
|
-
console.log(chalk.
|
|
25
|
+
console.log(chalk.dim(" " + "─".repeat(40)));
|
|
26
26
|
}
|
|
27
27
|
|
|
28
28
|
async function checkPort(port) {
|
|
@@ -34,6 +34,25 @@ async function checkPort(port) {
|
|
|
34
34
|
});
|
|
35
35
|
}
|
|
36
36
|
|
|
37
|
+
/**
|
|
38
|
+
* Ensure Homebrew is available (macOS). Installs if missing.
|
|
39
|
+
* Returns true if brew is usable after the call.
|
|
40
|
+
*/
|
|
41
|
+
async function ensureBrew() {
|
|
42
|
+
try { await execa("brew", ["--version"]); return true; } catch {}
|
|
43
|
+
console.log(chalk.cyan(" ▶ Installing Homebrew…"));
|
|
44
|
+
try {
|
|
45
|
+
await execa("bash", ["-c", 'NONINTERACTIVE=1 /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"'], {
|
|
46
|
+
stdio: "inherit", timeout: 300_000,
|
|
47
|
+
});
|
|
48
|
+
const brewPaths = ["/opt/homebrew/bin/brew", "/usr/local/bin/brew"];
|
|
49
|
+
for (const bp of brewPaths) {
|
|
50
|
+
if (fs.existsSync(bp)) { process.env.PATH = path.dirname(bp) + ":" + process.env.PATH; break; }
|
|
51
|
+
}
|
|
52
|
+
return true;
|
|
53
|
+
} catch { return false; }
|
|
54
|
+
}
|
|
55
|
+
|
|
37
56
|
async function cmdVersion(cmd, args = ["--version"]) {
|
|
38
57
|
try {
|
|
39
58
|
const { stdout } = await execa(cmd, args, { reject: false, timeout: 5000 });
|
|
@@ -98,19 +117,21 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
98
117
|
let failed = 0;
|
|
99
118
|
|
|
100
119
|
const fixes = []; // collect fix actions to run at the end
|
|
120
|
+
const fixFns = new Set(); // deduplicate by function reference
|
|
101
121
|
|
|
102
122
|
const ok = (name, detail) => {
|
|
103
|
-
console.log(chalk.green(" ✓ ") + name + (detail ? chalk.
|
|
123
|
+
console.log(chalk.green(" ✓ ") + name + (detail ? chalk.dim(` — ${detail}`) : ""));
|
|
104
124
|
passed++;
|
|
105
125
|
};
|
|
106
|
-
const warn = (name, detail) => {
|
|
107
|
-
console.log(chalk.yellow(" ⚠ ") + name + (detail ? chalk.
|
|
126
|
+
const warn = (name, detail, fixFn) => {
|
|
127
|
+
console.log(chalk.yellow(" ⚠ ") + name + (detail ? chalk.dim(` — ${detail}`) : ""));
|
|
108
128
|
warned++;
|
|
129
|
+
if (fixFn && !fixFns.has(fixFn)) { fixes.push({ name, fn: fixFn }); fixFns.add(fixFn); }
|
|
109
130
|
};
|
|
110
131
|
const fail = (name, detail, fixFn) => {
|
|
111
|
-
console.log(chalk.red(" ✗ ") + name + (detail ? chalk.
|
|
132
|
+
console.log(chalk.red(" ✗ ") + name + (detail ? chalk.dim(` — ${detail}`) : ""));
|
|
112
133
|
failed++;
|
|
113
|
-
if (fixFn) fixes.push({ name, fn: fixFn });
|
|
134
|
+
if (fixFn && !fixFns.has(fixFn)) { fixes.push({ name, fn: fixFn }); fixFns.add(fixFn); }
|
|
114
135
|
};
|
|
115
136
|
|
|
116
137
|
// ── Prerequisites ──────────────────────────────────
|
|
@@ -160,7 +181,7 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
160
181
|
return;
|
|
161
182
|
}
|
|
162
183
|
// macOS / Windows: wait for daemon to become ready
|
|
163
|
-
console.log(chalk.
|
|
184
|
+
console.log(chalk.dim(" Waiting for Docker daemon to start…"));
|
|
164
185
|
for (let i = 0; i < 30; i++) {
|
|
165
186
|
await new Promise((r) => setTimeout(r, 2000));
|
|
166
187
|
try {
|
|
@@ -215,7 +236,7 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
215
236
|
return;
|
|
216
237
|
}
|
|
217
238
|
// macOS / Windows: wait for daemon after install
|
|
218
|
-
console.log(chalk.
|
|
239
|
+
console.log(chalk.dim(" Waiting for Docker daemon to start…"));
|
|
219
240
|
for (let i = 0; i < 30; i++) {
|
|
220
241
|
await new Promise((r) => setTimeout(r, 2000));
|
|
221
242
|
try {
|
|
@@ -235,7 +256,20 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
235
256
|
// Git
|
|
236
257
|
const gitVer = await cmdVersion("git");
|
|
237
258
|
if (gitVer) ok("Git available", gitVer);
|
|
238
|
-
else fail("Git not found", "install git")
|
|
259
|
+
else fail("Git not found", "install git", async () => {
|
|
260
|
+
if (process.platform === "darwin") {
|
|
261
|
+
if (!(await ensureBrew())) throw new Error("Homebrew required");
|
|
262
|
+
console.log(chalk.cyan(" ▶ brew install git"));
|
|
263
|
+
await execa("brew", ["install", "git"], { stdio: "inherit", timeout: 300_000 });
|
|
264
|
+
} else if (process.platform === "win32") {
|
|
265
|
+
if (!hasWinget) throw new Error("winget required");
|
|
266
|
+
console.log(chalk.cyan(" ▶ winget install Git.Git"));
|
|
267
|
+
await execa("winget", ["install", "Git.Git", "--accept-source-agreements", "--accept-package-agreements"], { stdio: "inherit", timeout: 300_000 });
|
|
268
|
+
} else {
|
|
269
|
+
console.log(chalk.cyan(" ▶ sudo apt-get install -y git"));
|
|
270
|
+
await execa("sudo", ["apt-get", "install", "-y", "git"], { stdio: "inherit", timeout: 300_000 });
|
|
271
|
+
}
|
|
272
|
+
});
|
|
239
273
|
|
|
240
274
|
// Node.js version
|
|
241
275
|
const nodeVer = process.versions.node;
|
|
@@ -246,28 +280,121 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
246
280
|
// Claude CLI (bundled as a dependency)
|
|
247
281
|
const claudeVer = await cmdVersion("claude");
|
|
248
282
|
if (claudeVer) ok("Claude CLI", claudeVer);
|
|
249
|
-
else fail("Claude CLI not found", "
|
|
283
|
+
else fail("Claude CLI not found", "included as a dependency", async () => {
|
|
284
|
+
console.log(chalk.cyan(" ▶ npm install"));
|
|
285
|
+
await execa("npm", ["install"], { stdio: "inherit", timeout: 300_000 });
|
|
286
|
+
});
|
|
250
287
|
|
|
251
288
|
// AWS CLI (optional)
|
|
252
289
|
const awsVer = await cmdVersion("aws");
|
|
253
290
|
if (awsVer) ok("AWS CLI", awsVer);
|
|
254
|
-
else warn("AWS CLI not found", "
|
|
291
|
+
else warn("AWS CLI not found", "needed for ECR login", async () => {
|
|
292
|
+
if (process.platform === "darwin") {
|
|
293
|
+
if (!(await ensureBrew())) throw new Error("Homebrew required");
|
|
294
|
+
console.log(chalk.cyan(" ▶ brew install awscli"));
|
|
295
|
+
await execa("brew", ["install", "awscli"], { stdio: "inherit", timeout: 300_000 });
|
|
296
|
+
} else if (process.platform === "win32") {
|
|
297
|
+
if (!hasWinget) throw new Error("winget required");
|
|
298
|
+
console.log(chalk.cyan(" ▶ winget install Amazon.AWSCLI"));
|
|
299
|
+
await execa("winget", ["install", "Amazon.AWSCLI", "--accept-source-agreements", "--accept-package-agreements"], { stdio: "inherit", timeout: 300_000 });
|
|
300
|
+
} else {
|
|
301
|
+
console.log(chalk.cyan(" ▶ curl + unzip install"));
|
|
302
|
+
await execa("sh", ["-c", 'curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o /tmp/awscliv2.zip && unzip -qo /tmp/awscliv2.zip -d /tmp && sudo /tmp/aws/install'], {
|
|
303
|
+
stdio: "inherit", timeout: 300_000,
|
|
304
|
+
});
|
|
305
|
+
}
|
|
306
|
+
});
|
|
255
307
|
|
|
256
|
-
//
|
|
308
|
+
// 1Password CLI (optional — needed for secret sync)
|
|
309
|
+
const opVer = await cmdVersion("op");
|
|
310
|
+
if (opVer) ok("1Password CLI (op)", opVer);
|
|
311
|
+
else warn("1Password CLI (op) not installed", "needed for secret sync", async () => {
|
|
312
|
+
if (process.platform === "darwin") {
|
|
313
|
+
if (!(await ensureBrew())) throw new Error("Homebrew required");
|
|
314
|
+
console.log(chalk.cyan(" ▶ brew install --cask 1password-cli"));
|
|
315
|
+
await execa("brew", ["install", "--cask", "1password-cli"], { stdio: "inherit", timeout: 300_000 });
|
|
316
|
+
} else if (process.platform === "win32") {
|
|
317
|
+
if (!hasWinget) throw new Error("winget required");
|
|
318
|
+
console.log(chalk.cyan(" ▶ winget install AgileBits.1Password.CLI"));
|
|
319
|
+
await execa("winget", ["install", "AgileBits.1Password.CLI", "--accept-source-agreements", "--accept-package-agreements"], { stdio: "inherit", timeout: 300_000 });
|
|
320
|
+
} else {
|
|
321
|
+
console.log(chalk.dim(" Install manually: https://developer.1password.com/docs/cli/get-started/#install"));
|
|
322
|
+
}
|
|
323
|
+
});
|
|
324
|
+
|
|
325
|
+
// GitHub CLI
|
|
326
|
+
const ghVer = await cmdVersion("gh");
|
|
327
|
+
if (ghVer) ok("GitHub CLI (gh)", ghVer);
|
|
328
|
+
else warn("GitHub CLI (gh) not installed", "needed for auth", async () => {
|
|
329
|
+
if (process.platform === "darwin") {
|
|
330
|
+
if (!(await ensureBrew())) throw new Error("Homebrew required");
|
|
331
|
+
console.log(chalk.cyan(" ▶ brew install gh"));
|
|
332
|
+
await execa("brew", ["install", "gh"], { stdio: "inherit", timeout: 300_000 });
|
|
333
|
+
} else if (process.platform === "win32") {
|
|
334
|
+
if (!hasWinget) throw new Error("winget required");
|
|
335
|
+
console.log(chalk.cyan(" ▶ winget install GitHub.cli"));
|
|
336
|
+
await execa("winget", ["install", "GitHub.cli", "--accept-source-agreements", "--accept-package-agreements"], { stdio: "inherit", timeout: 300_000 });
|
|
337
|
+
} else {
|
|
338
|
+
console.log(chalk.dim(" Install: https://cli.github.com/"));
|
|
339
|
+
}
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
// ~/.netrc GitHub credentials (required for private repo access)
|
|
257
343
|
const netrcPath = path.join(os.homedir(), ".netrc");
|
|
344
|
+
const netrcFixFn = async () => {
|
|
345
|
+
// Install gh if missing
|
|
346
|
+
let hasGh = false;
|
|
347
|
+
try { await execa("gh", ["--version"]); hasGh = true; } catch {}
|
|
348
|
+
if (!hasGh) {
|
|
349
|
+
if (process.platform === "darwin") {
|
|
350
|
+
if (!(await ensureBrew())) throw new Error("Homebrew required to install gh");
|
|
351
|
+
console.log(chalk.cyan(" ▶ brew install gh"));
|
|
352
|
+
await execa("brew", ["install", "gh"], { stdio: "inherit", timeout: 300_000 });
|
|
353
|
+
hasGh = true;
|
|
354
|
+
} else if (process.platform === "win32") {
|
|
355
|
+
console.log(chalk.cyan(" ▶ winget install GitHub.cli"));
|
|
356
|
+
await execa("winget", ["install", "GitHub.cli", "--accept-source-agreements", "--accept-package-agreements"], { stdio: "inherit", timeout: 300_000 });
|
|
357
|
+
hasGh = true;
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
// Authenticate via gh
|
|
361
|
+
console.log(chalk.cyan("\n ▶ gh auth login -p https -h github.com -w"));
|
|
362
|
+
await execa("gh", ["auth", "login", "-p", "https", "-h", "github.com", "-w"], {
|
|
363
|
+
stdio: "inherit", timeout: 120_000,
|
|
364
|
+
});
|
|
365
|
+
console.log(chalk.cyan(" ▶ gh auth setup-git"));
|
|
366
|
+
await execa("gh", ["auth", "setup-git"], { stdio: "inherit", timeout: 10_000 }).catch(() => {});
|
|
367
|
+
// Extract token and write to .netrc for tools that need it directly
|
|
368
|
+
try {
|
|
369
|
+
const { stdout: ghToken } = await execa("gh", ["auth", "token"], { timeout: 5000 });
|
|
370
|
+
const { stdout: ghUser } = await execa("gh", ["api", "/user", "--jq", ".login"], { timeout: 10000 });
|
|
371
|
+
if (ghToken?.trim() && ghUser?.trim()) {
|
|
372
|
+
const entry = `machine github.com\nlogin ${ghUser.trim()}\npassword ${ghToken.trim()}\n`;
|
|
373
|
+
if (fs.existsSync(netrcPath)) {
|
|
374
|
+
const content = fs.readFileSync(netrcPath, "utf8");
|
|
375
|
+
if (!content.includes("github.com")) {
|
|
376
|
+
fs.appendFileSync(netrcPath, "\n" + entry);
|
|
377
|
+
}
|
|
378
|
+
} else {
|
|
379
|
+
fs.writeFileSync(netrcPath, entry, { mode: 0o600 });
|
|
380
|
+
}
|
|
381
|
+
console.log(chalk.green(" ✓ ~/.netrc updated with GitHub credentials"));
|
|
382
|
+
}
|
|
383
|
+
} catch {}
|
|
384
|
+
};
|
|
258
385
|
if (fs.existsSync(netrcPath)) {
|
|
259
386
|
try {
|
|
260
387
|
const content = fs.readFileSync(netrcPath, "utf8");
|
|
261
388
|
if (!content.includes("github.com")) {
|
|
262
|
-
|
|
389
|
+
fail("~/.netrc exists but no github.com entry", "needed for private repos", netrcFixFn);
|
|
263
390
|
} else {
|
|
264
391
|
const token = readNetrcToken(content, "github.com");
|
|
265
392
|
if (!token) {
|
|
266
|
-
|
|
393
|
+
fail("~/.netrc has github.com but no password/token", "add token", netrcFixFn);
|
|
267
394
|
} else {
|
|
268
395
|
const userRes = await ghApiGet("/user", token);
|
|
269
396
|
if (userRes.status !== 200) {
|
|
270
|
-
fail("~/.netrc GitHub token invalid or expired", "regenerate at github.com/settings/tokens");
|
|
397
|
+
fail("~/.netrc GitHub token invalid or expired", "regenerate at github.com/settings/tokens", netrcFixFn);
|
|
271
398
|
} else {
|
|
272
399
|
const login = userRes.body.login || "authenticated";
|
|
273
400
|
ok("~/.netrc GitHub credentials", `authenticated as ${login}`);
|
|
@@ -283,10 +410,10 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
283
410
|
}
|
|
284
411
|
}
|
|
285
412
|
} catch {
|
|
286
|
-
|
|
413
|
+
fail("~/.netrc not readable", "check file permissions", netrcFixFn);
|
|
287
414
|
}
|
|
288
415
|
} else {
|
|
289
|
-
|
|
416
|
+
fail("~/.netrc not found", "needed for private repo access", netrcFixFn);
|
|
290
417
|
}
|
|
291
418
|
|
|
292
419
|
// ~/.fops.json config (optional)
|
|
@@ -324,7 +451,7 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
324
451
|
}
|
|
325
452
|
}
|
|
326
453
|
} else {
|
|
327
|
-
warn("~/.aws/config not found", "
|
|
454
|
+
warn("~/.aws/config not found", "needed for ECR", fixAwsSso);
|
|
328
455
|
}
|
|
329
456
|
|
|
330
457
|
// Validate ECR access if project references ECR images
|
|
@@ -332,7 +459,7 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
332
459
|
if (ecrInfo) {
|
|
333
460
|
const ecrUrl = `${ecrInfo.accountId}.dkr.ecr.${ecrInfo.region}.amazonaws.com`;
|
|
334
461
|
if (!awsSessionValid) {
|
|
335
|
-
fail(`ECR registry ${ecrUrl}`, "fix AWS session first"
|
|
462
|
+
fail(`ECR registry ${ecrUrl}`, "fix AWS session first");
|
|
336
463
|
} else {
|
|
337
464
|
// Check we can get an ECR login password (same call the actual login uses)
|
|
338
465
|
const ssoProfiles = detectAwsSsoProfiles();
|
|
@@ -514,6 +641,64 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
514
641
|
}
|
|
515
642
|
}
|
|
516
643
|
|
|
644
|
+
// ── Logs ──────────────────────────────────────────────
|
|
645
|
+
if (dir && dockerVer) {
|
|
646
|
+
header("Logs");
|
|
647
|
+
|
|
648
|
+
try {
|
|
649
|
+
const { stdout: logOut } = await execa("docker", [
|
|
650
|
+
"compose", "logs", "--tail", "50", "--no-color",
|
|
651
|
+
], { cwd: dir, reject: false, timeout: 30000 });
|
|
652
|
+
|
|
653
|
+
if (logOut?.trim()) {
|
|
654
|
+
const serviceIssues = {};
|
|
655
|
+
const ERROR_RE = /\b(ERROR|FATAL|PANIC|CRITICAL)\b/;
|
|
656
|
+
const CRASH_RE = /\b(OOM|OutOfMemory|out of memory|segmentation fault|segfault)\b/i;
|
|
657
|
+
const CONN_RE = /\b(ECONNREFUSED|ETIMEDOUT|connection refused)\b/i;
|
|
658
|
+
|
|
659
|
+
for (const line of logOut.split("\n")) {
|
|
660
|
+
const sep = line.indexOf(" | ");
|
|
661
|
+
if (sep === -1) continue;
|
|
662
|
+
const service = line.slice(0, sep).trim();
|
|
663
|
+
const msg = line.slice(sep + 3);
|
|
664
|
+
|
|
665
|
+
let level = null;
|
|
666
|
+
if (CRASH_RE.test(msg)) level = "crash";
|
|
667
|
+
else if (ERROR_RE.test(msg)) level = "error";
|
|
668
|
+
else if (CONN_RE.test(msg)) level = "conn";
|
|
669
|
+
else continue;
|
|
670
|
+
|
|
671
|
+
if (!serviceIssues[service]) serviceIssues[service] = { errors: 0, crashes: 0, conn: 0, last: "" };
|
|
672
|
+
const entry = serviceIssues[service];
|
|
673
|
+
if (level === "crash") entry.crashes++;
|
|
674
|
+
else if (level === "conn") entry.conn++;
|
|
675
|
+
else entry.errors++;
|
|
676
|
+
entry.last = msg.trim();
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
const services = Object.keys(serviceIssues);
|
|
680
|
+
if (services.length === 0) {
|
|
681
|
+
ok("No errors in recent logs");
|
|
682
|
+
} else {
|
|
683
|
+
for (const svc of services) {
|
|
684
|
+
const { errors, crashes, conn, last } = serviceIssues[svc];
|
|
685
|
+
const parts = [];
|
|
686
|
+
if (crashes) parts.push(`${crashes} crash`);
|
|
687
|
+
if (errors) parts.push(`${errors} error${errors > 1 ? "s" : ""}`);
|
|
688
|
+
if (conn) parts.push(`${conn} conn issue${conn > 1 ? "s" : ""}`);
|
|
689
|
+
const sample = last.length > 120 ? last.slice(0, 120) + "…" : last;
|
|
690
|
+
if (crashes) fail(svc, `${parts.join(", ")} — ${sample}`);
|
|
691
|
+
else warn(svc, `${parts.join(", ")} — ${sample}`);
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
} else {
|
|
695
|
+
ok("No log output", "services may not be running");
|
|
696
|
+
}
|
|
697
|
+
} catch {
|
|
698
|
+
warn("Could not fetch logs", "Docker Compose error");
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
|
|
517
702
|
// ── Images ──────────────────────────────────────────
|
|
518
703
|
if (dir && dockerVer) {
|
|
519
704
|
header("Images");
|
|
@@ -579,15 +764,19 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
579
764
|
}
|
|
580
765
|
|
|
581
766
|
// ── Summary ────────────────────────────────────────
|
|
582
|
-
console.log(chalk.
|
|
767
|
+
console.log(chalk.dim("\n " + "─".repeat(40)));
|
|
583
768
|
const parts = [];
|
|
584
769
|
if (passed) parts.push(chalk.green(`${passed} passed`));
|
|
585
770
|
if (warned) parts.push(chalk.yellow(`${warned} warnings`));
|
|
586
771
|
if (failed) parts.push(chalk.red(`${failed} failed`));
|
|
587
|
-
console.log(" " + parts.join(chalk.
|
|
772
|
+
console.log(" " + parts.join(chalk.dim(" · ")));
|
|
588
773
|
|
|
589
|
-
if (
|
|
590
|
-
|
|
774
|
+
if (fixes.length > 0) {
|
|
775
|
+
let shouldFix = opts.fix;
|
|
776
|
+
if (!shouldFix) {
|
|
777
|
+
const { ans } = await inquirer.prompt([{ type: "confirm", name: "ans", message: `Fix ${fixes.length} issue(s) automatically?`, default: true }]);
|
|
778
|
+
shouldFix = ans;
|
|
779
|
+
}
|
|
591
780
|
if (shouldFix) {
|
|
592
781
|
console.log("");
|
|
593
782
|
for (const fix of fixes) {
|
|
@@ -599,8 +788,8 @@ export async function runDoctor(opts = {}, registry = null) {
|
|
|
599
788
|
console.log(chalk.red(` ✗ Fix failed: ${err.message}\n`));
|
|
600
789
|
}
|
|
601
790
|
}
|
|
602
|
-
console.log(chalk.
|
|
603
|
-
} else {
|
|
791
|
+
console.log(chalk.dim(" Run fops doctor again to verify.\n"));
|
|
792
|
+
} else if (failed > 0) {
|
|
604
793
|
console.log("");
|
|
605
794
|
process.exit(1);
|
|
606
795
|
}
|