@meshxdata/fops 0.0.3 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/doctor.js CHANGED
@@ -22,7 +22,7 @@ const KEY_PORTS = {
22
22
 
23
23
  function header(title) {
24
24
  console.log(chalk.bold.cyan(`\n ${title}`));
25
- console.log(chalk.gray(" " + "─".repeat(40)));
25
+ console.log(chalk.dim(" " + "─".repeat(40)));
26
26
  }
27
27
 
28
28
  async function checkPort(port) {
@@ -100,15 +100,15 @@ export async function runDoctor(opts = {}, registry = null) {
100
100
  const fixes = []; // collect fix actions to run at the end
101
101
 
102
102
  const ok = (name, detail) => {
103
- console.log(chalk.green(" ✓ ") + name + (detail ? chalk.gray(` — ${detail}`) : ""));
103
+ console.log(chalk.green(" ✓ ") + name + (detail ? chalk.dim(` — ${detail}`) : ""));
104
104
  passed++;
105
105
  };
106
106
  const warn = (name, detail) => {
107
- console.log(chalk.yellow(" ⚠ ") + name + (detail ? chalk.gray(` — ${detail}`) : ""));
107
+ console.log(chalk.yellow(" ⚠ ") + name + (detail ? chalk.dim(` — ${detail}`) : ""));
108
108
  warned++;
109
109
  };
110
110
  const fail = (name, detail, fixFn) => {
111
- console.log(chalk.red(" ✗ ") + name + (detail ? chalk.gray(` — ${detail}`) : ""));
111
+ console.log(chalk.red(" ✗ ") + name + (detail ? chalk.dim(` — ${detail}`) : ""));
112
112
  failed++;
113
113
  if (fixFn) fixes.push({ name, fn: fixFn });
114
114
  };
@@ -160,7 +160,7 @@ export async function runDoctor(opts = {}, registry = null) {
160
160
  return;
161
161
  }
162
162
  // macOS / Windows: wait for daemon to become ready
163
- console.log(chalk.gray(" Waiting for Docker daemon to start…"));
163
+ console.log(chalk.dim(" Waiting for Docker daemon to start…"));
164
164
  for (let i = 0; i < 30; i++) {
165
165
  await new Promise((r) => setTimeout(r, 2000));
166
166
  try {
@@ -215,7 +215,7 @@ export async function runDoctor(opts = {}, registry = null) {
215
215
  return;
216
216
  }
217
217
  // macOS / Windows: wait for daemon after install
218
- console.log(chalk.gray(" Waiting for Docker daemon to start…"));
218
+ console.log(chalk.dim(" Waiting for Docker daemon to start…"));
219
219
  for (let i = 0; i < 30; i++) {
220
220
  await new Promise((r) => setTimeout(r, 2000));
221
221
  try {
@@ -514,6 +514,64 @@ export async function runDoctor(opts = {}, registry = null) {
514
514
  }
515
515
  }
516
516
 
517
+ // ── Logs ──────────────────────────────────────────────
518
+ if (dir && dockerVer) {
519
+ header("Logs");
520
+
521
+ try {
522
+ const { stdout: logOut } = await execa("docker", [
523
+ "compose", "logs", "--tail", "50", "--no-color",
524
+ ], { cwd: dir, reject: false, timeout: 30000 });
525
+
526
+ if (logOut?.trim()) {
527
+ const serviceIssues = {};
528
+ const ERROR_RE = /\b(ERROR|FATAL|PANIC|CRITICAL)\b/;
529
+ const CRASH_RE = /\b(OOM|OutOfMemory|out of memory|segmentation fault|segfault)\b/i;
530
+ const CONN_RE = /\b(ECONNREFUSED|ETIMEDOUT|connection refused)\b/i;
531
+
532
+ for (const line of logOut.split("\n")) {
533
+ const sep = line.indexOf(" | ");
534
+ if (sep === -1) continue;
535
+ const service = line.slice(0, sep).trim();
536
+ const msg = line.slice(sep + 3);
537
+
538
+ let level = null;
539
+ if (CRASH_RE.test(msg)) level = "crash";
540
+ else if (ERROR_RE.test(msg)) level = "error";
541
+ else if (CONN_RE.test(msg)) level = "conn";
542
+ else continue;
543
+
544
+ if (!serviceIssues[service]) serviceIssues[service] = { errors: 0, crashes: 0, conn: 0, last: "" };
545
+ const entry = serviceIssues[service];
546
+ if (level === "crash") entry.crashes++;
547
+ else if (level === "conn") entry.conn++;
548
+ else entry.errors++;
549
+ entry.last = msg.trim();
550
+ }
551
+
552
+ const services = Object.keys(serviceIssues);
553
+ if (services.length === 0) {
554
+ ok("No errors in recent logs");
555
+ } else {
556
+ for (const svc of services) {
557
+ const { errors, crashes, conn, last } = serviceIssues[svc];
558
+ const parts = [];
559
+ if (crashes) parts.push(`${crashes} crash`);
560
+ if (errors) parts.push(`${errors} error${errors > 1 ? "s" : ""}`);
561
+ if (conn) parts.push(`${conn} conn issue${conn > 1 ? "s" : ""}`);
562
+ const sample = last.length > 120 ? last.slice(0, 120) + "…" : last;
563
+ if (crashes) fail(svc, `${parts.join(", ")} — ${sample}`);
564
+ else warn(svc, `${parts.join(", ")} — ${sample}`);
565
+ }
566
+ }
567
+ } else {
568
+ ok("No log output", "services may not be running");
569
+ }
570
+ } catch {
571
+ warn("Could not fetch logs", "Docker Compose error");
572
+ }
573
+ }
574
+
517
575
  // ── Images ──────────────────────────────────────────
518
576
  if (dir && dockerVer) {
519
577
  header("Images");
@@ -579,12 +637,12 @@ export async function runDoctor(opts = {}, registry = null) {
579
637
  }
580
638
 
581
639
  // ── Summary ────────────────────────────────────────
582
- console.log(chalk.gray("\n " + "─".repeat(40)));
640
+ console.log(chalk.dim("\n " + "─".repeat(40)));
583
641
  const parts = [];
584
642
  if (passed) parts.push(chalk.green(`${passed} passed`));
585
643
  if (warned) parts.push(chalk.yellow(`${warned} warnings`));
586
644
  if (failed) parts.push(chalk.red(`${failed} failed`));
587
- console.log(" " + parts.join(chalk.gray(" · ")));
645
+ console.log(" " + parts.join(chalk.dim(" · ")));
588
646
 
589
647
  if (failed > 0 && fixes.length > 0) {
590
648
  const shouldFix = opts.fix || await confirm(`\n Fix ${fixes.length} issue(s) automatically?`, true);
@@ -599,7 +657,7 @@ export async function runDoctor(opts = {}, registry = null) {
599
657
  console.log(chalk.red(` ✗ Fix failed: ${err.message}\n`));
600
658
  }
601
659
  }
602
- console.log(chalk.gray(" Run fops doctor again to verify.\n"));
660
+ console.log(chalk.dim(" Run fops doctor again to verify.\n"));
603
661
  } else {
604
662
  console.log("");
605
663
  process.exit(1);
@@ -0,0 +1,197 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import chalk from "chalk";
4
+ import { execa } from "execa";
5
+ import inquirer from "inquirer";
6
+
7
+ /**
8
+ * Canonical feature flags — the complete set known to the platform.
9
+ * Label is a human-readable short description for the toggle UI.
10
+ */
11
+ const KNOWN_FLAGS = {
12
+ MX_FF_STORAGE_EXPLORER_ENABLED: "Storage Explorer",
13
+ MX_FF_USER_MANAGEMENT_ENABLED: "User Management",
14
+ MX_FF_SETTINGS_PAGE_ENABLED: "Settings Page",
15
+ MX_FF_ENCRYPTION_STATUS_DISPLAY: "Encryption Status",
16
+ MX_FF_USER_PAT_ENABLED: "User PAT (Personal Access Tokens)",
17
+ MX_FF_SENTRY_ENABLED: "Sentry Error Tracking",
18
+ MX_FF_EXPLORER_ENABLED: "Data Explorer",
19
+ MX_FF_NEW_PROFILE_ENABLED: "New Profile Page",
20
+ };
21
+
22
+ /**
23
+ * Parse docker-compose.yaml for all MX_FF_* entries.
24
+ * Returns a map: flagName → { value, services: Set<string>, lines: [{ lineNum, original }] }
25
+ */
26
+ function parseComposeFlags(composePath) {
27
+ const content = fs.readFileSync(composePath, "utf8");
28
+ const lines = content.split("\n");
29
+ const flags = {};
30
+
31
+ // Track which service block we're in
32
+ let currentService = null;
33
+
34
+ for (let i = 0; i < lines.length; i++) {
35
+ const line = lines[i];
36
+
37
+ // Service definition: exactly 2-space indent, ends with colon only
38
+ const svcMatch = line.match(/^ ([a-z][\w-]+):\s*$/);
39
+ if (svcMatch) {
40
+ currentService = svcMatch[1];
41
+ }
42
+
43
+ // Match MX_FF_* in both YAML map and list formats
44
+ // Map format: MX_FF_NAME: "value"
45
+ const mapMatch = line.match(/\b(MX_FF_\w+)\s*:\s*"?(true|false)"?/);
46
+ // List format: - MX_FF_NAME=value
47
+ const listMatch = !mapMatch && line.match(/[-]\s*(MX_FF_\w+)\s*=\s*(true|false)/);
48
+
49
+ const match = mapMatch || listMatch;
50
+ if (match) {
51
+ const name = match[1];
52
+ const value = match[2] === "true";
53
+
54
+ if (!flags[name]) {
55
+ flags[name] = { value, services: new Set(), lines: [] };
56
+ }
57
+ flags[name].lines.push({ lineNum: i, original: line });
58
+ if (currentService) flags[name].services.add(currentService);
59
+ // If any occurrence is true, treat the flag as enabled
60
+ if (value) flags[name].value = true;
61
+ }
62
+ }
63
+
64
+ return flags;
65
+ }
66
+
67
+ /**
68
+ * Update docker-compose.yaml by flipping flag values on specific lines.
69
+ */
70
+ function updateComposeFlags(composePath, changes) {
71
+ const content = fs.readFileSync(composePath, "utf8");
72
+ const lines = content.split("\n");
73
+
74
+ for (const { lineNum, newValue } of changes) {
75
+ const line = lines[lineNum];
76
+ lines[lineNum] = line
77
+ .replace(/(MX_FF_\w+\s*:\s*)"?(true|false)"?/, `$1"${newValue}"`)
78
+ .replace(/(MX_FF_\w+=)(true|false)/, `$1${newValue}`);
79
+ }
80
+
81
+ fs.writeFileSync(composePath, lines.join("\n"));
82
+ }
83
+
84
+ /**
85
+ * Interactive feature flag configuration.
86
+ * Reads flags from compose, presents toggle UI, applies changes, restarts services.
87
+ */
88
+ export async function runFeatureFlags(root) {
89
+ const composePath = path.join(root, "docker-compose.yaml");
90
+ if (!fs.existsSync(composePath)) {
91
+ console.log(chalk.red(" No docker-compose.yaml found."));
92
+ return;
93
+ }
94
+
95
+ console.log(chalk.bold.cyan("\n Feature Flags\n"));
96
+
97
+ // Parse current state from compose
98
+ const composeFlags = parseComposeFlags(composePath);
99
+
100
+ // Build the full flag list: compose flags + canonical flags not yet in compose
101
+ const allFlags = {};
102
+ for (const [name, info] of Object.entries(composeFlags)) {
103
+ allFlags[name] = { ...info, inCompose: true };
104
+ }
105
+ for (const name of Object.keys(KNOWN_FLAGS)) {
106
+ if (!allFlags[name]) {
107
+ allFlags[name] = { value: false, services: new Set(), lines: [], inCompose: false };
108
+ }
109
+ }
110
+
111
+ const flagNames = Object.keys(allFlags).sort();
112
+
113
+ // Show current state
114
+ for (const name of flagNames) {
115
+ const flag = allFlags[name];
116
+ const label = KNOWN_FLAGS[name] || name;
117
+ const services = flag.services.size > 0 ? chalk.dim(` (${[...flag.services].join(", ")})`) : "";
118
+ if (flag.value) {
119
+ console.log(chalk.green(` ✓ ${label}`) + services);
120
+ } else {
121
+ console.log(chalk.dim(` · ${label}`) + services);
122
+ }
123
+ }
124
+ console.log("");
125
+
126
+ // Checkbox prompt
127
+ const choices = flagNames.map((name) => ({
128
+ name: KNOWN_FLAGS[name] || name,
129
+ value: name,
130
+ checked: allFlags[name].value,
131
+ }));
132
+
133
+ const { enabled } = await inquirer.prompt([{
134
+ type: "checkbox",
135
+ name: "enabled",
136
+ message: "Toggle feature flags:",
137
+ choices,
138
+ }]);
139
+
140
+ // Calculate changes
141
+ const changes = [];
142
+ const affectedServices = new Set();
143
+
144
+ for (const name of flagNames) {
145
+ const flag = allFlags[name];
146
+ const newValue = enabled.includes(name);
147
+
148
+ if (newValue !== flag.value) {
149
+ if (flag.inCompose) {
150
+ for (const line of flag.lines) {
151
+ changes.push({ lineNum: line.lineNum, newValue: String(newValue) });
152
+ }
153
+ for (const svc of flag.services) affectedServices.add(svc);
154
+ } else if (newValue) {
155
+ console.log(chalk.yellow(` ⚠ ${KNOWN_FLAGS[name] || name} not in docker-compose.yaml — add it to service environments to take effect`));
156
+ }
157
+ }
158
+ }
159
+
160
+ if (changes.length === 0) {
161
+ console.log(chalk.dim("\n No changes.\n"));
162
+ return;
163
+ }
164
+
165
+ // Apply changes to compose file
166
+ updateComposeFlags(composePath, changes);
167
+ console.log(chalk.green(`\n ✓ Updated ${changes.length} flag value(s) in docker-compose.yaml`));
168
+
169
+ if (affectedServices.size === 0) {
170
+ console.log("");
171
+ return;
172
+ }
173
+
174
+ // Restart affected services
175
+ const serviceList = [...affectedServices];
176
+ console.log(chalk.dim(` Affected: ${serviceList.join(", ")}`));
177
+
178
+ const { restart } = await inquirer.prompt([{
179
+ type: "confirm",
180
+ name: "restart",
181
+ message: `Restart ${serviceList.length} service(s)?`,
182
+ default: true,
183
+ }]);
184
+
185
+ if (restart) {
186
+ console.log(chalk.cyan(`\n ▶ docker compose up -d ${serviceList.join(" ")}\n`));
187
+ await execa("docker", ["compose", "up", "-d", ...serviceList], {
188
+ cwd: root,
189
+ stdio: "inherit",
190
+ reject: false,
191
+ timeout: 120_000,
192
+ });
193
+ console.log(chalk.green("\n ✓ Services restarted.\n"));
194
+ } else {
195
+ console.log(chalk.dim("\n Changes saved. Restart manually: docker compose up -d\n"));
196
+ }
197
+ }
@@ -42,5 +42,19 @@ export function createPluginApi(pluginId, registry) {
42
42
  search: source.search,
43
43
  });
44
44
  },
45
+
46
+ registerAutoRunPattern(pattern) {
47
+ registry.autoRunPatterns.push({ pluginId, pattern });
48
+ },
49
+
50
+ registerAgent(agent) {
51
+ registry.agents.push({
52
+ pluginId,
53
+ name: agent.name,
54
+ description: agent.description || "",
55
+ systemPrompt: agent.systemPrompt,
56
+ contextMode: agent.contextMode || "full",
57
+ });
58
+ },
45
59
  };
46
60
  }
@@ -0,0 +1,36 @@
1
+ import http from "node:http";
2
+
3
+ /**
4
+ * Built-in Stack API plugin.
5
+ * Registers a doctor check (health ping) and auto-run patterns for curl commands.
6
+ */
7
+ export function register(api) {
8
+ // Doctor check — ping GET /health on localhost:3090
9
+ api.registerDoctorCheck({
10
+ name: "Stack API",
11
+ fn: async (ok, warn) => {
12
+ try {
13
+ const body = await new Promise((resolve, reject) => {
14
+ const req = http.get("http://localhost:3090/health", { timeout: 3000 }, (res) => {
15
+ let data = "";
16
+ res.on("data", (chunk) => { data += chunk; });
17
+ res.on("end", () => {
18
+ if (res.statusCode === 200) resolve(data);
19
+ else reject(new Error(`HTTP ${res.statusCode}`));
20
+ });
21
+ });
22
+ req.on("error", reject);
23
+ req.on("timeout", () => { req.destroy(); reject(new Error("timeout")); });
24
+ });
25
+ ok("Stack API", `healthy — ${body.trim().slice(0, 60)}`);
26
+ } catch {
27
+ warn("Stack API", "not reachable on localhost:3090");
28
+ }
29
+ },
30
+ });
31
+
32
+ // Auto-run: GET curl commands to the stack API execute without confirmation
33
+ api.registerAutoRunPattern("curl http://localhost:3090/");
34
+ api.registerAutoRunPattern("curl -s http://localhost:3090/");
35
+ api.registerAutoRunPattern("curl --silent http://localhost:3090/");
36
+ }
@@ -1,10 +1,50 @@
1
1
  import fs from "node:fs";
2
2
  import os from "node:os";
3
3
  import path from "node:path";
4
+ import { fileURLToPath } from "node:url";
4
5
  import { createRegistry } from "./registry.js";
5
6
  import { validateManifest } from "./manifest.js";
6
7
  import { discoverPlugins } from "./discovery.js";
7
8
  import { createPluginApi } from "./api.js";
9
+ import { loadBuiltinAgents } from "../agent/agents.js";
10
+
11
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
12
+
13
+ /**
14
+ * Ensure ~/.fops/plugins/node_modules symlinks to the CLI's node_modules.
15
+ * This lets global plugins resolve bare imports (chalk, execa, inquirer, etc.)
16
+ * via Node's upward directory walk.
17
+ */
18
+ function ensurePluginNodeModules() {
19
+ const cliNodeModules = path.resolve(__dirname, "../../node_modules");
20
+ const pluginsDir = path.join(os.homedir(), ".fops", "plugins");
21
+ const link = path.join(pluginsDir, "node_modules");
22
+
23
+ if (!fs.existsSync(cliNodeModules)) return;
24
+ if (!fs.existsSync(pluginsDir)) return;
25
+
26
+ try {
27
+ const stat = fs.lstatSync(link);
28
+ // Already a symlink — check it points to the right place
29
+ if (stat.isSymbolicLink()) {
30
+ const target = fs.readlinkSync(link);
31
+ if (path.resolve(pluginsDir, target) === cliNodeModules) return;
32
+ // Stale symlink — remove and recreate
33
+ fs.unlinkSync(link);
34
+ } else {
35
+ // Not a symlink (somehow a real dir) — leave it alone
36
+ return;
37
+ }
38
+ } catch {
39
+ // Doesn't exist — create it
40
+ }
41
+
42
+ try {
43
+ fs.symlinkSync(cliNodeModules, link, "junction");
44
+ } catch {
45
+ // Non-fatal — plugins that use only node: builtins still work
46
+ }
47
+ }
8
48
 
9
49
  /**
10
50
  * Parse SKILL.md frontmatter (lightweight, same logic as skills.js).
@@ -37,12 +77,39 @@ function isPluginEnabled(pluginId) {
37
77
  return true;
38
78
  }
39
79
 
80
+ /**
81
+ * Load built-in plugins from ./builtins/ directory.
82
+ * Each module must export a register(api) function.
83
+ */
84
+ async function loadBuiltinPlugins(registry) {
85
+ const builtinsDir = path.join(__dirname, "builtins");
86
+ if (!fs.existsSync(builtinsDir)) return;
87
+
88
+ const entries = fs.readdirSync(builtinsDir).filter((f) => f.endsWith(".js"));
89
+ for (const file of entries) {
90
+ try {
91
+ const mod = await import(path.join(builtinsDir, file));
92
+ const plugin = mod.default || mod;
93
+ if (typeof plugin.register === "function") {
94
+ const pluginId = `builtin:${path.basename(file, ".js")}`;
95
+ const api = createPluginApi(pluginId, registry);
96
+ await plugin.register(api);
97
+ }
98
+ } catch (err) {
99
+ console.error(` Built-in plugin "${file}" failed to load: ${err.message}`);
100
+ }
101
+ }
102
+ }
103
+
40
104
  /**
41
105
  * Load and activate all discovered plugins.
42
106
  * Returns a populated PluginRegistry.
43
107
  */
44
108
  export async function loadPlugins() {
109
+ ensurePluginNodeModules();
45
110
  const registry = createRegistry();
111
+ loadBuiltinAgents(registry);
112
+ await loadBuiltinPlugins(registry);
46
113
  const candidates = discoverPlugins();
47
114
 
48
115
  for (const candidate of candidates) {
@@ -11,5 +11,7 @@ export function createRegistry() {
11
11
  hooks: [], // { pluginId, event, handler, priority }
12
12
  skills: [], // { pluginId, name, description, content }
13
13
  knowledgeSources: [], // { pluginId, name, description, search }
14
+ autoRunPatterns: [], // { pluginId, pattern: string } — commands matching these prefixes auto-execute
15
+ agents: [], // { pluginId, name, description, systemPrompt, contextMode }
14
16
  };
15
17
  }
package/src/project.js CHANGED
@@ -49,12 +49,31 @@ export function rootDir(cwd = process.cwd()) {
49
49
  return null;
50
50
  }
51
51
 
52
+ /**
53
+ * Check whether the Foundation project is fully initialised.
54
+ * Returns null when ready, or a short reason string when not.
55
+ */
56
+ export function checkInitState(root) {
57
+ if (!root) return "no project root";
58
+ if (!fs.existsSync(path.join(root, ".env"))) return "missing .env";
59
+ // Check that at least one submodule dir has content
60
+ const markers = ["foundation-backend", "foundation-frontend", "foundation-storage-engine"];
61
+ const empty = markers.filter((d) => {
62
+ const dir = path.join(root, d);
63
+ if (!fs.existsSync(dir)) return true;
64
+ try { return fs.readdirSync(dir).length === 0; } catch { return true; }
65
+ });
66
+ if (empty.length === markers.length) return "submodules not cloned";
67
+ return null;
68
+ }
69
+
52
70
  export function requireRoot(program) {
53
71
  const r = rootDir();
54
72
  if (!r) {
55
73
  console.error(
56
- chalk.red("Not a Foundation project (no docker-compose + Makefile). Run from foundation-compose or set FOUNDATION_ROOT.")
74
+ chalk.red("Not a Foundation project (no docker-compose + Makefile).")
57
75
  );
76
+ console.error(chalk.dim(" Run `fops init` to set up, or set FOUNDATION_ROOT."));
58
77
  program.error({ exitCode: 1 });
59
78
  }
60
79
  return r;
package/src/setup/aws.js CHANGED
@@ -32,8 +32,8 @@ export function saveFopsConfig(config) {
32
32
  */
33
33
  export async function promptAwsSsoConfig() {
34
34
  console.log(chalk.cyan("\n AWS SSO Configuration\n"));
35
- console.log(chalk.gray(" We'll set up an AWS CLI profile for ECR image pulls."));
36
- console.log(chalk.gray(" You can find these values in your AWS SSO portal.\n"));
35
+ console.log(chalk.dim(" We'll set up an AWS CLI profile for ECR image pulls."));
36
+ console.log(chalk.dim(" You can find these values in your AWS SSO portal.\n"));
37
37
 
38
38
  const answers = await inquirer.prompt([
39
39
  {
@@ -143,7 +143,7 @@ export function detectAwsSsoProfiles() {
143
143
  */
144
144
  function ask(question, defaultVal) {
145
145
  const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
146
- const suffix = defaultVal ? chalk.gray(` (${defaultVal})`) : "";
146
+ const suffix = defaultVal ? chalk.dim(` (${defaultVal})`) : "";
147
147
  return new Promise((resolve) => {
148
148
  rl.question(` ${question}${suffix}: `, (answer) => {
149
149
  rl.close();
@@ -164,7 +164,7 @@ export async function ensureSsoConfig() {
164
164
  if (/sso_start_url\s*=/.test(content)) return; // already configured
165
165
 
166
166
  console.log(chalk.cyan("\n AWS SSO is not configured. Let's set it up.\n"));
167
- console.log(chalk.gray(" You can find these values in your AWS SSO portal.\n"));
167
+ console.log(chalk.dim(" You can find these values in your AWS SSO portal.\n"));
168
168
 
169
169
  const sessionName = await ask("SSO session name", "meshx");
170
170
  const startUrl = await ask("SSO start URL");
@@ -212,7 +212,7 @@ export async function fixAwsSso() {
212
212
  }
213
213
 
214
214
  const profile = profiles[0];
215
- console.log(chalk.gray(` Using AWS profile: ${profile.name}`));
215
+ console.log(chalk.dim(` Using AWS profile: ${profile.name}`));
216
216
  console.log(chalk.cyan(` ▶ aws sso login --profile ${profile.name}`));
217
217
 
218
218
  // Open /dev/tty directly so SSO login gets a real terminal even when
@@ -358,8 +358,8 @@ export async function checkEcrRepos(dir, awsConfig) {
358
358
  const missing = neededRepos.filter((r) => !existingRepos.includes(r));
359
359
  if (missing.length > 0) {
360
360
  console.log(chalk.yellow("\n⚠ These ECR repos are referenced but don't exist (will need local build):"));
361
- for (const r of missing) console.log(chalk.gray(` ✗ ${r}`));
362
- console.log(chalk.gray(" These services will be built from source instead.\n"));
361
+ for (const r of missing) console.log(chalk.dim(` ✗ ${r}`));
362
+ console.log(chalk.dim(" These services will be built from source instead.\n"));
363
363
  } else {
364
364
  console.log(chalk.green("All required ECR repos exist."));
365
365
  }
@@ -24,7 +24,7 @@ export function runSetup(dir, opts = {}) {
24
24
  fs.copyFileSync(envExample, envPath);
25
25
  console.log(chalk.green("Created .env from .env.example. Edit .env with your settings."));
26
26
  } else if (fs.existsSync(envPath)) {
27
- console.log(chalk.gray(".env already exists."));
27
+ console.log(chalk.dim(".env already exists."));
28
28
  }
29
29
  }
30
30
  if (netrcCheck) {
@@ -33,25 +33,26 @@ export function runSetup(dir, opts = {}) {
33
33
  const hasGitHub = hasNetrc && fs.readFileSync(netrcPath, "utf8").includes("machine github.com");
34
34
  if (!hasGitHub) {
35
35
  console.log(chalk.yellow("⚠ GitHub: ensure ~/.netrc has credentials for github.com (needed for submodules)."));
36
- console.log(chalk.gray(" See README: Configure GitHub Authentication"));
36
+ console.log(chalk.dim(" See README: Configure GitHub Authentication"));
37
37
  }
38
38
  }
39
39
  if (submodules) {
40
40
  console.log(chalk.blue(`Initializing git submodules (checking out ${CLONE_BRANCH})...`));
41
41
  try {
42
- await execa("git", ["submodule", "update", "--init", "--remote", "--recursive"], { cwd: dir, stdio: "inherit" });
42
+ await execa("git", ["submodule", "update", "--init", "--force", "--remote", "--recursive"], { cwd: dir, stdio: "inherit" });
43
43
  // Check out the target branch on each submodule
44
44
  await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: dir, stdio: "inherit" });
45
45
  console.log(chalk.green(`Submodules initialized — on ${CLONE_BRANCH} (falling back to main).`));
46
46
  } catch {
47
- console.log(chalk.yellow(`⚠ Some submodules had issues. Attempting to check out ${CLONE_BRANCH} individually...`));
47
+ console.log(chalk.yellow(`⚠ Some submodules had issues. Attempting to recover individually...`));
48
48
  try {
49
- await execa("git", ["submodule", "init"], { cwd: dir, stdio: "inherit" });
49
+ await execa("git", ["submodule", "absorbgitdirs"], { cwd: dir, stdio: "inherit" });
50
+ await execa("git", ["submodule", "update", "--init", "--force", "--recursive"], { cwd: dir, stdio: "inherit" });
50
51
  await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: dir, stdio: "inherit" });
51
52
  console.log(chalk.green("Submodules recovered."));
52
53
  } catch {
53
54
  console.log(chalk.yellow("Some submodules still failed. Fix manually with:"));
54
- console.log(chalk.gray(` cd ${dir} && git submodule foreach 'git checkout ${CLONE_BRANCH} || git checkout main && git pull'`));
55
+ console.log(chalk.dim(` cd ${dir} && git submodule foreach 'git checkout ${CLONE_BRANCH} || git checkout main && git pull'`));
55
56
  }
56
57
  }
57
58
  }
@@ -101,7 +102,7 @@ export function runSetup(dir, opts = {}) {
101
102
  else fs.writeFileSync(awsConfigPath, ssoConfig, { mode: 0o600 });
102
103
  console.log(chalk.green(` Created AWS profile '${awsConfig.profileName}' in ~/.aws/config`));
103
104
  } else {
104
- console.log(chalk.gray(" Skipping AWS setup. Private ECR images won't be available."));
105
+ console.log(chalk.dim(" Skipping AWS setup. Private ECR images won't be available."));
105
106
  console.log(chalk.blue("Downloading public container images..."));
106
107
  try {
107
108
  await make(dir, "download");
@@ -157,8 +158,8 @@ export function runSetup(dir, opts = {}) {
157
158
  await make(dir, "download");
158
159
  } catch {
159
160
  console.log(chalk.yellow("\n⚠ Some images failed to download. Public images are fine."));
160
- console.log(chalk.gray(` For private ECR images, ensure: aws sso login --profile ${awsConfig.profileName}`));
161
- console.log(chalk.gray(" Then re-run: fops init --download\n"));
161
+ console.log(chalk.dim(` For private ECR images, ensure: aws sso login --profile ${awsConfig.profileName}`));
162
+ console.log(chalk.dim(" Then re-run: fops init --download\n"));
162
163
  }
163
164
  }
164
165
  console.log(chalk.green("Setup complete. Run: fops up"));