@meshxdata/fops 0.0.1 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/README.md +62 -40
  2. package/package.json +4 -3
  3. package/src/agent/agent.js +161 -68
  4. package/src/agent/agents.js +224 -0
  5. package/src/agent/context.js +287 -96
  6. package/src/agent/index.js +1 -0
  7. package/src/agent/llm.js +134 -20
  8. package/src/auth/coda.js +128 -0
  9. package/src/auth/index.js +1 -0
  10. package/src/auth/login.js +13 -13
  11. package/src/auth/oauth.js +4 -4
  12. package/src/commands/index.js +94 -21
  13. package/src/config.js +2 -2
  14. package/src/doctor.js +208 -22
  15. package/src/feature-flags.js +197 -0
  16. package/src/plugins/api.js +23 -0
  17. package/src/plugins/builtins/stack-api.js +36 -0
  18. package/src/plugins/index.js +1 -0
  19. package/src/plugins/knowledge.js +124 -0
  20. package/src/plugins/loader.js +67 -0
  21. package/src/plugins/registry.js +3 -0
  22. package/src/project.js +20 -1
  23. package/src/setup/aws.js +7 -7
  24. package/src/setup/setup.js +18 -12
  25. package/src/setup/wizard.js +86 -15
  26. package/src/shell.js +2 -2
  27. package/src/skills/foundation/SKILL.md +200 -66
  28. package/src/ui/confirm.js +3 -2
  29. package/src/ui/input.js +31 -34
  30. package/src/ui/spinner.js +39 -13
  31. package/src/ui/streaming.js +2 -2
  32. package/STRUCTURE.md +0 -43
  33. package/src/agent/agent.test.js +0 -233
  34. package/src/agent/context.test.js +0 -81
  35. package/src/agent/llm.test.js +0 -139
  36. package/src/auth/keychain.test.js +0 -185
  37. package/src/auth/login.test.js +0 -192
  38. package/src/auth/oauth.test.js +0 -118
  39. package/src/auth/resolve.test.js +0 -153
  40. package/src/config.test.js +0 -70
  41. package/src/doctor.test.js +0 -134
  42. package/src/plugins/api.test.js +0 -95
  43. package/src/plugins/discovery.test.js +0 -92
  44. package/src/plugins/hooks.test.js +0 -118
  45. package/src/plugins/manifest.test.js +0 -106
  46. package/src/plugins/registry.test.js +0 -43
  47. package/src/plugins/skills.test.js +0 -173
  48. package/src/project.test.js +0 -196
  49. package/src/setup/aws.test.js +0 -280
  50. package/src/shell.test.js +0 -72
  51. package/src/ui/banner.test.js +0 -97
  52. package/src/ui/spinner.test.js +0 -29
@@ -0,0 +1,124 @@
1
+ /**
2
+ * RAG knowledge orchestrator.
3
+ * Fans out search queries to all registered knowledge sources,
4
+ * merges results, enforces token budgets, and caches.
5
+ */
6
+
7
+ const MAX_TOTAL_BYTES = 16_384; // ~4000 tokens
8
+ const MAX_PER_SOURCE_BYTES = 8192;
9
+ const MAX_PER_RESULT_BYTES = 4096;
10
+ const SOURCE_TIMEOUT_MS = 5000;
11
+ const CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
12
+ const CACHE_MAX_ENTRIES = 50;
13
+
14
+ /** Simple LRU cache keyed by query string. */
15
+ const cache = new Map();
16
+
17
+ function cacheGet(key) {
18
+ const entry = cache.get(key);
19
+ if (!entry) return null;
20
+ if (Date.now() - entry.ts > CACHE_TTL_MS) {
21
+ cache.delete(key);
22
+ return null;
23
+ }
24
+ // Move to end (most-recently used)
25
+ cache.delete(key);
26
+ cache.set(key, entry);
27
+ return entry.value;
28
+ }
29
+
30
+ function cacheSet(key, value) {
31
+ // Evict oldest if over capacity
32
+ if (cache.size >= CACHE_MAX_ENTRIES) {
33
+ const oldest = cache.keys().next().value;
34
+ cache.delete(oldest);
35
+ }
36
+ cache.set(key, { value, ts: Date.now() });
37
+ }
38
+
39
+ function truncate(str, maxBytes) {
40
+ if (str.length <= maxBytes) return str;
41
+ return str.slice(0, maxBytes - 3) + "...";
42
+ }
43
+
44
+ /**
45
+ * Search all registered knowledge sources and return formatted context.
46
+ * @param {object} registry — plugin registry
47
+ * @param {string} query — user's message / search query
48
+ * @returns {string|null} formatted knowledge block or null if no results
49
+ */
50
+ export async function searchKnowledge(registry, query) {
51
+ const sources = registry?.knowledgeSources;
52
+ if (!sources?.length || !query?.trim()) return null;
53
+
54
+ const normalizedQuery = query.trim().toLowerCase();
55
+ const cached = cacheGet(normalizedQuery);
56
+ if (cached !== null) return cached;
57
+
58
+ // Fan out to all sources in parallel with individual timeouts
59
+ const sourceResults = await Promise.all(
60
+ sources.map(async (source) => {
61
+ try {
62
+ const results = await Promise.race([
63
+ source.search(query),
64
+ new Promise((_, reject) =>
65
+ setTimeout(() => reject(new Error("timeout")), SOURCE_TIMEOUT_MS),
66
+ ),
67
+ ]);
68
+ if (!Array.isArray(results)) return [];
69
+ return results.map((r) => ({ ...r, _source: source.name }));
70
+ } catch {
71
+ // Skip failing sources silently
72
+ return [];
73
+ }
74
+ }),
75
+ );
76
+
77
+ // Flatten and sort by score (descending), then by source order
78
+ const allResults = sourceResults.flat();
79
+ if (!allResults.length) {
80
+ cacheSet(normalizedQuery, null);
81
+ return null;
82
+ }
83
+
84
+ allResults.sort((a, b) => (b.score ?? 0) - (a.score ?? 0));
85
+
86
+ // Enforce per-source budget
87
+ const sourceBytes = {};
88
+ const budgeted = allResults.filter((r) => {
89
+ const src = r._source;
90
+ const used = sourceBytes[src] || 0;
91
+ const content = truncate(r.content || "", MAX_PER_RESULT_BYTES);
92
+ const size = content.length;
93
+ if (used + size > MAX_PER_SOURCE_BYTES) return false;
94
+ sourceBytes[src] = used + size;
95
+ r._truncatedContent = content;
96
+ return true;
97
+ });
98
+
99
+ // Enforce total budget and format
100
+ let totalBytes = 0;
101
+ const sections = [];
102
+
103
+ for (const r of budgeted) {
104
+ const title = r.title || "Untitled";
105
+ const source = r._source || "unknown";
106
+ const url = r.url ? ` (${r.url})` : "";
107
+ const header = `### [${source}] ${title}${url}`;
108
+ const content = r._truncatedContent || "";
109
+ const section = `${header}\n${content}`;
110
+
111
+ if (totalBytes + section.length > MAX_TOTAL_BYTES) break;
112
+ totalBytes += section.length;
113
+ sections.push(section);
114
+ }
115
+
116
+ if (!sections.length) {
117
+ cacheSet(normalizedQuery, null);
118
+ return null;
119
+ }
120
+
121
+ const result = "## Knowledge Base\n\n" + sections.join("\n\n");
122
+ cacheSet(normalizedQuery, result);
123
+ return result;
124
+ }
@@ -1,10 +1,50 @@
1
1
  import fs from "node:fs";
2
2
  import os from "node:os";
3
3
  import path from "node:path";
4
+ import { fileURLToPath } from "node:url";
4
5
  import { createRegistry } from "./registry.js";
5
6
  import { validateManifest } from "./manifest.js";
6
7
  import { discoverPlugins } from "./discovery.js";
7
8
  import { createPluginApi } from "./api.js";
9
+ import { loadBuiltinAgents } from "../agent/agents.js";
10
+
11
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
12
+
13
+ /**
14
+ * Ensure ~/.fops/plugins/node_modules symlinks to the CLI's node_modules.
15
+ * This lets global plugins resolve bare imports (chalk, execa, inquirer, etc.)
16
+ * via Node's upward directory walk.
17
+ */
18
+ function ensurePluginNodeModules() {
19
+ const cliNodeModules = path.resolve(__dirname, "../../node_modules");
20
+ const pluginsDir = path.join(os.homedir(), ".fops", "plugins");
21
+ const link = path.join(pluginsDir, "node_modules");
22
+
23
+ if (!fs.existsSync(cliNodeModules)) return;
24
+ if (!fs.existsSync(pluginsDir)) return;
25
+
26
+ try {
27
+ const stat = fs.lstatSync(link);
28
+ // Already a symlink — check it points to the right place
29
+ if (stat.isSymbolicLink()) {
30
+ const target = fs.readlinkSync(link);
31
+ if (path.resolve(pluginsDir, target) === cliNodeModules) return;
32
+ // Stale symlink — remove and recreate
33
+ fs.unlinkSync(link);
34
+ } else {
35
+ // Not a symlink (somehow a real dir) — leave it alone
36
+ return;
37
+ }
38
+ } catch {
39
+ // Doesn't exist — create it
40
+ }
41
+
42
+ try {
43
+ fs.symlinkSync(cliNodeModules, link, "junction");
44
+ } catch {
45
+ // Non-fatal — plugins that use only node: builtins still work
46
+ }
47
+ }
8
48
 
9
49
  /**
10
50
  * Parse SKILL.md frontmatter (lightweight, same logic as skills.js).
@@ -37,12 +77,39 @@ function isPluginEnabled(pluginId) {
37
77
  return true;
38
78
  }
39
79
 
80
+ /**
81
+ * Load built-in plugins from ./builtins/ directory.
82
+ * Each module must export a register(api) function.
83
+ */
84
+ async function loadBuiltinPlugins(registry) {
85
+ const builtinsDir = path.join(__dirname, "builtins");
86
+ if (!fs.existsSync(builtinsDir)) return;
87
+
88
+ const entries = fs.readdirSync(builtinsDir).filter((f) => f.endsWith(".js"));
89
+ for (const file of entries) {
90
+ try {
91
+ const mod = await import(path.join(builtinsDir, file));
92
+ const plugin = mod.default || mod;
93
+ if (typeof plugin.register === "function") {
94
+ const pluginId = `builtin:${path.basename(file, ".js")}`;
95
+ const api = createPluginApi(pluginId, registry);
96
+ await plugin.register(api);
97
+ }
98
+ } catch (err) {
99
+ console.error(` Built-in plugin "${file}" failed to load: ${err.message}`);
100
+ }
101
+ }
102
+ }
103
+
40
104
  /**
41
105
  * Load and activate all discovered plugins.
42
106
  * Returns a populated PluginRegistry.
43
107
  */
44
108
  export async function loadPlugins() {
109
+ ensurePluginNodeModules();
45
110
  const registry = createRegistry();
111
+ loadBuiltinAgents(registry);
112
+ await loadBuiltinPlugins(registry);
46
113
  const candidates = discoverPlugins();
47
114
 
48
115
  for (const candidate of candidates) {
@@ -10,5 +10,8 @@ export function createRegistry() {
10
10
  doctorChecks: [], // { pluginId, name, fn }
11
11
  hooks: [], // { pluginId, event, handler, priority }
12
12
  skills: [], // { pluginId, name, description, content }
13
+ knowledgeSources: [], // { pluginId, name, description, search }
14
+ autoRunPatterns: [], // { pluginId, pattern: string } — commands matching these prefixes auto-execute
15
+ agents: [], // { pluginId, name, description, systemPrompt, contextMode }
13
16
  };
14
17
  }
package/src/project.js CHANGED
@@ -49,12 +49,31 @@ export function rootDir(cwd = process.cwd()) {
49
49
  return null;
50
50
  }
51
51
 
52
+ /**
53
+ * Check whether the Foundation project is fully initialised.
54
+ * Returns null when ready, or a short reason string when not.
55
+ */
56
+ export function checkInitState(root) {
57
+ if (!root) return "no project root";
58
+ if (!fs.existsSync(path.join(root, ".env"))) return "missing .env";
59
+ // Check that at least one submodule dir has content
60
+ const markers = ["foundation-backend", "foundation-frontend", "foundation-storage-engine"];
61
+ const empty = markers.filter((d) => {
62
+ const dir = path.join(root, d);
63
+ if (!fs.existsSync(dir)) return true;
64
+ try { return fs.readdirSync(dir).length === 0; } catch { return true; }
65
+ });
66
+ if (empty.length === markers.length) return "submodules not cloned";
67
+ return null;
68
+ }
69
+
52
70
  export function requireRoot(program) {
53
71
  const r = rootDir();
54
72
  if (!r) {
55
73
  console.error(
56
- chalk.red("Not a Foundation project (no docker-compose + Makefile). Run from foundation-compose or set FOUNDATION_ROOT.")
74
+ chalk.red("Not a Foundation project (no docker-compose + Makefile).")
57
75
  );
76
+ console.error(chalk.dim(" Run `fops init` to set up, or set FOUNDATION_ROOT."));
58
77
  program.error({ exitCode: 1 });
59
78
  }
60
79
  return r;
package/src/setup/aws.js CHANGED
@@ -32,8 +32,8 @@ export function saveFopsConfig(config) {
32
32
  */
33
33
  export async function promptAwsSsoConfig() {
34
34
  console.log(chalk.cyan("\n AWS SSO Configuration\n"));
35
- console.log(chalk.gray(" We'll set up an AWS CLI profile for ECR image pulls."));
36
- console.log(chalk.gray(" You can find these values in your AWS SSO portal.\n"));
35
+ console.log(chalk.dim(" We'll set up an AWS CLI profile for ECR image pulls."));
36
+ console.log(chalk.dim(" You can find these values in your AWS SSO portal.\n"));
37
37
 
38
38
  const answers = await inquirer.prompt([
39
39
  {
@@ -143,7 +143,7 @@ export function detectAwsSsoProfiles() {
143
143
  */
144
144
  function ask(question, defaultVal) {
145
145
  const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
146
- const suffix = defaultVal ? chalk.gray(` (${defaultVal})`) : "";
146
+ const suffix = defaultVal ? chalk.dim(` (${defaultVal})`) : "";
147
147
  return new Promise((resolve) => {
148
148
  rl.question(` ${question}${suffix}: `, (answer) => {
149
149
  rl.close();
@@ -164,7 +164,7 @@ export async function ensureSsoConfig() {
164
164
  if (/sso_start_url\s*=/.test(content)) return; // already configured
165
165
 
166
166
  console.log(chalk.cyan("\n AWS SSO is not configured. Let's set it up.\n"));
167
- console.log(chalk.gray(" You can find these values in your AWS SSO portal.\n"));
167
+ console.log(chalk.dim(" You can find these values in your AWS SSO portal.\n"));
168
168
 
169
169
  const sessionName = await ask("SSO session name", "meshx");
170
170
  const startUrl = await ask("SSO start URL");
@@ -212,7 +212,7 @@ export async function fixAwsSso() {
212
212
  }
213
213
 
214
214
  const profile = profiles[0];
215
- console.log(chalk.gray(` Using AWS profile: ${profile.name}`));
215
+ console.log(chalk.dim(` Using AWS profile: ${profile.name}`));
216
216
  console.log(chalk.cyan(` ▶ aws sso login --profile ${profile.name}`));
217
217
 
218
218
  // Open /dev/tty directly so SSO login gets a real terminal even when
@@ -358,8 +358,8 @@ export async function checkEcrRepos(dir, awsConfig) {
358
358
  const missing = neededRepos.filter((r) => !existingRepos.includes(r));
359
359
  if (missing.length > 0) {
360
360
  console.log(chalk.yellow("\n⚠ These ECR repos are referenced but don't exist (will need local build):"));
361
- for (const r of missing) console.log(chalk.gray(` ✗ ${r}`));
362
- console.log(chalk.gray(" These services will be built from source instead.\n"));
361
+ for (const r of missing) console.log(chalk.dim(` ✗ ${r}`));
362
+ console.log(chalk.dim(" These services will be built from source instead.\n"));
363
363
  } else {
364
364
  console.log(chalk.green("All required ECR repos exist."));
365
365
  }
@@ -7,6 +7,9 @@ import inquirer from "inquirer";
7
7
  import { make } from "../shell.js";
8
8
  import { readFopsConfig, saveFopsConfig, promptAwsSsoConfig, detectEcrRegistry, checkEcrRepos } from "./aws.js";
9
9
 
10
+ // TODO: change back to "main" once stack/api is merged
11
+ export const CLONE_BRANCH = "stack/api";
12
+
10
13
  export function runSetup(dir, opts = {}) {
11
14
  const submodules = opts.submodules !== false;
12
15
  const createEnv = opts.env !== false;
@@ -21,7 +24,7 @@ export function runSetup(dir, opts = {}) {
21
24
  fs.copyFileSync(envExample, envPath);
22
25
  console.log(chalk.green("Created .env from .env.example. Edit .env with your settings."));
23
26
  } else if (fs.existsSync(envPath)) {
24
- console.log(chalk.gray(".env already exists."));
27
+ console.log(chalk.dim(".env already exists."));
25
28
  }
26
29
  }
27
30
  if (netrcCheck) {
@@ -30,23 +33,26 @@ export function runSetup(dir, opts = {}) {
30
33
  const hasGitHub = hasNetrc && fs.readFileSync(netrcPath, "utf8").includes("machine github.com");
31
34
  if (!hasGitHub) {
32
35
  console.log(chalk.yellow("⚠ GitHub: ensure ~/.netrc has credentials for github.com (needed for submodules)."));
33
- console.log(chalk.gray(" See README: Configure GitHub Authentication"));
36
+ console.log(chalk.dim(" See README: Configure GitHub Authentication"));
34
37
  }
35
38
  }
36
39
  if (submodules) {
37
- console.log(chalk.blue("Initializing git submodules (checking out main)..."));
40
+ console.log(chalk.blue(`Initializing git submodules (checking out ${CLONE_BRANCH})...`));
38
41
  try {
39
- await execa("git", ["submodule", "update", "--init", "--remote", "--recursive"], { cwd: dir, stdio: "inherit" });
40
- console.log(chalk.green("Submodules initialized all on main."));
42
+ await execa("git", ["submodule", "update", "--init", "--force", "--remote", "--recursive"], { cwd: dir, stdio: "inherit" });
43
+ // Check out the target branch on each submodule
44
+ await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: dir, stdio: "inherit" });
45
+ console.log(chalk.green(`Submodules initialized — on ${CLONE_BRANCH} (falling back to main).`));
41
46
  } catch {
42
- console.log(chalk.yellow("⚠ Some submodules had issues. Attempting to check out main individually..."));
47
+ console.log(chalk.yellow(`⚠ Some submodules had issues. Attempting to recover individually...`));
43
48
  try {
44
- await execa("git", ["submodule", "init"], { cwd: dir, stdio: "inherit" });
45
- await execa("git", ["submodule", "foreach", "git fetch origin main && git checkout origin/main"], { cwd: dir, stdio: "inherit" });
49
+ await execa("git", ["submodule", "absorbgitdirs"], { cwd: dir, stdio: "inherit" });
50
+ await execa("git", ["submodule", "update", "--init", "--force", "--recursive"], { cwd: dir, stdio: "inherit" });
51
+ await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: dir, stdio: "inherit" });
46
52
  console.log(chalk.green("Submodules recovered."));
47
53
  } catch {
48
54
  console.log(chalk.yellow("Some submodules still failed. Fix manually with:"));
49
- console.log(chalk.gray(` cd ${dir} && git submodule foreach 'git checkout main && git pull'`));
55
+ console.log(chalk.dim(` cd ${dir} && git submodule foreach 'git checkout ${CLONE_BRANCH} || git checkout main && git pull'`));
50
56
  }
51
57
  }
52
58
  }
@@ -96,7 +102,7 @@ export function runSetup(dir, opts = {}) {
96
102
  else fs.writeFileSync(awsConfigPath, ssoConfig, { mode: 0o600 });
97
103
  console.log(chalk.green(` Created AWS profile '${awsConfig.profileName}' in ~/.aws/config`));
98
104
  } else {
99
- console.log(chalk.gray(" Skipping AWS setup. Private ECR images won't be available."));
105
+ console.log(chalk.dim(" Skipping AWS setup. Private ECR images won't be available."));
100
106
  console.log(chalk.blue("Downloading public container images..."));
101
107
  try {
102
108
  await make(dir, "download");
@@ -152,8 +158,8 @@ export function runSetup(dir, opts = {}) {
152
158
  await make(dir, "download");
153
159
  } catch {
154
160
  console.log(chalk.yellow("\n⚠ Some images failed to download. Public images are fine."));
155
- console.log(chalk.gray(` For private ECR images, ensure: aws sso login --profile ${awsConfig.profileName}`));
156
- console.log(chalk.gray(" Then re-run: fops init --download\n"));
161
+ console.log(chalk.dim(` For private ECR images, ensure: aws sso login --profile ${awsConfig.profileName}`));
162
+ console.log(chalk.dim(" Then re-run: fops init --download\n"));
157
163
  }
158
164
  }
159
165
  console.log(chalk.green("Setup complete. Run: fops up"));
@@ -5,7 +5,9 @@ import chalk from "chalk";
5
5
  import { execa } from "execa";
6
6
  import inquirer from "inquirer";
7
7
  import { isFoundationRoot, findComposeRootUp } from "../project.js";
8
- import { runSetup } from "./setup.js";
8
+ import { discoverPlugins } from "../plugins/discovery.js";
9
+ import { validateManifest } from "../plugins/manifest.js";
10
+ import { runSetup, CLONE_BRANCH } from "./setup.js";
9
11
 
10
12
  export async function runInitWizard() {
11
13
  const cwd = process.cwd();
@@ -13,10 +15,10 @@ export async function runInitWizard() {
13
15
  let projectRoot = null;
14
16
  if (envRoot && fs.existsSync(envRoot) && isFoundationRoot(envRoot)) {
15
17
  projectRoot = path.resolve(envRoot);
16
- console.log(chalk.gray(`Using FOUNDATION_ROOT: ${projectRoot}\n`));
18
+ console.log(chalk.dim(`Using FOUNDATION_ROOT: ${projectRoot}\n`));
17
19
  } else if (isFoundationRoot(cwd)) {
18
20
  projectRoot = cwd;
19
- console.log(chalk.gray("Using current directory as project root.\n"));
21
+ console.log(chalk.dim("Using current directory as project root.\n"));
20
22
  } else {
21
23
  const foundUp = findComposeRootUp(cwd);
22
24
  if (foundUp && foundUp !== cwd) {
@@ -28,19 +30,33 @@ export async function runInitWizard() {
28
30
  projectRoot = foundUp;
29
31
  }
30
32
  if (!projectRoot) {
31
- let hasGit = false, hasDocker = false, hasAws = false;
33
+ let hasGit = false, hasDocker = false, hasAws = false, hasClaude = false;
32
34
  try { await execa("git", ["--version"]); hasGit = true; } catch {}
33
35
  try { await execa("docker", ["info"], { timeout: 5000 }); hasDocker = true; } catch {}
34
36
  try { await execa("aws", ["--version"]); hasAws = true; } catch {}
37
+ try { await execa("claude", ["--version"]); hasClaude = true; } catch {}
35
38
  console.log(chalk.cyan(" Prerequisites\n"));
36
39
  console.log(hasGit ? chalk.green(" ✓ Git") : chalk.red(" ✗ Git — install git first"));
37
40
  console.log(hasDocker ? chalk.green(" ✓ Docker") : chalk.red(" ✗ Docker — install and start Docker Desktop"));
41
+ console.log(hasClaude ? chalk.green(" ✓ Claude CLI") : chalk.red(" ✗ Claude CLI — run: npm install (included as a dependency)"));
38
42
  console.log(hasAws ? chalk.green(" ✓ AWS CLI") : chalk.yellow(" ⚠ AWS CLI — install for ECR image pulls (brew install awscli)"));
39
43
  const netrcPath = path.join(os.homedir(), ".netrc");
40
44
  const hasNetrc = fs.existsSync(netrcPath) && fs.readFileSync(netrcPath, "utf8").includes("machine github.com");
41
45
  console.log(hasNetrc ? chalk.green(" ✓ GitHub credentials (~/.netrc)") : chalk.yellow(" ⚠ GitHub credentials — add to ~/.netrc (needed for private submodules)"));
46
+ // Cursor IDE (only when cursor plugin is installed)
47
+ const cursorPluginDir = path.join(os.homedir(), ".fops", "plugins", "cursor");
48
+ if (fs.existsSync(cursorPluginDir)) {
49
+ let cursorVer = null;
50
+ try {
51
+ const { stdout } = await execa("cursor", ["--version"]);
52
+ cursorVer = (stdout || "").split("\n")[0].trim();
53
+ } catch {}
54
+ console.log(cursorVer
55
+ ? chalk.green(" ✓ Cursor IDE") + chalk.dim(` — ${cursorVer}`)
56
+ : chalk.yellow(" ⚠ Cursor IDE — install from cursor.com, then: Cmd+Shift+P → 'Install cursor command'"));
57
+ }
42
58
  console.log("");
43
- if (!hasGit || !hasDocker) {
59
+ if (!hasGit || !hasDocker || !hasClaude) {
44
60
  console.log(chalk.red("Fix the missing prerequisites above, then run fops init again.\n"));
45
61
  process.exit(1);
46
62
  }
@@ -55,8 +71,9 @@ export async function runInitWizard() {
55
71
  const { repoUrl } = await inquirer.prompt([
56
72
  { type: "input", name: "repoUrl", message: "Repository URL:", default: "https://github.com/meshxdata/foundation-compose.git", validate: (v) => (v?.trim() ? true : "Repository URL is required.") },
57
73
  ]);
74
+ const repoName = repoUrl.trim().replace(/\.git$/, "").split("/").pop() || "foundation-compose";
58
75
  const { targetDir } = await inquirer.prompt([
59
- { type: "input", name: "targetDir", message: "Clone into:", default: cwd },
76
+ { type: "input", name: "targetDir", message: "Clone into:", default: path.join(cwd, repoName) },
60
77
  ]);
61
78
  const resolved = path.resolve(targetDir.trim());
62
79
  if (fs.existsSync(resolved)) {
@@ -64,9 +81,9 @@ export async function runInitWizard() {
64
81
  if (isFoundationRoot(resolved)) { console.log(chalk.green(" Looks like a Foundation project — using it.\n")); projectRoot = resolved; }
65
82
  else { console.log(chalk.red(" Not a Foundation project. Remove it or choose a different path.\n")); process.exit(1); }
66
83
  } else {
67
- console.log(chalk.blue("\nCloning (this may take a minute)...\n"));
84
+ console.log(chalk.blue(`\nCloning (branch: ${CLONE_BRANCH}, this may take a minute)...\n`));
68
85
  try {
69
- await execa("git", ["clone", repoUrl.trim(), resolved], { stdio: "inherit" });
86
+ await execa("git", ["clone", "-b", CLONE_BRANCH, repoUrl.trim(), resolved], { stdio: "inherit" });
70
87
  } catch (err) {
71
88
  console.log(chalk.red("\n Clone failed. Check the URL and your credentials.\n"));
72
89
  if (!hasNetrc) {
@@ -75,19 +92,21 @@ export async function runInitWizard() {
75
92
  }
76
93
  process.exit(1);
77
94
  }
78
- console.log(chalk.blue("\nInitializing submodules (checking out main)...\n"));
95
+ console.log(chalk.blue(`\nInitializing submodules (checking out ${CLONE_BRANCH})...\n`));
79
96
  try {
80
- await execa("git", ["submodule", "update", "--init", "--remote", "--recursive"], { cwd: resolved, stdio: "inherit" });
81
- console.log(chalk.green("\n Cloned successfully all submodules on main.\n"));
97
+ await execa("git", ["submodule", "update", "--init", "--force", "--remote", "--recursive"], { cwd: resolved, stdio: "inherit" });
98
+ await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: resolved, stdio: "inherit" });
99
+ console.log(chalk.green(`\n Cloned successfully — submodules on ${CLONE_BRANCH} (falling back to main).\n`));
82
100
  } catch {
83
- console.log(chalk.yellow("\n ⚠ Some submodules had issues. Attempting to check out main individually...\n"));
101
+ console.log(chalk.yellow(`\n ⚠ Some submodules had issues. Attempting to recover...\n`));
84
102
  try {
85
- await execa("git", ["submodule", "init"], { cwd: resolved, stdio: "inherit" });
86
- await execa("git", ["submodule", "foreach", "git fetch origin main && git checkout origin/main"], { cwd: resolved, stdio: "inherit" });
103
+ await execa("git", ["submodule", "absorbgitdirs"], { cwd: resolved, stdio: "inherit" });
104
+ await execa("git", ["submodule", "update", "--init", "--force", "--recursive"], { cwd: resolved, stdio: "inherit" });
105
+ await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: resolved, stdio: "inherit" });
87
106
  console.log(chalk.green(" Submodules recovered.\n"));
88
107
  } catch {
89
108
  console.log(chalk.yellow(" Some submodules still failed. Fix manually with:"));
90
- console.log(chalk.gray(` cd ${resolved} && git submodule foreach 'git checkout main && git pull'\n`));
109
+ console.log(chalk.dim(` cd ${resolved} && git submodule foreach 'git checkout ${CLONE_BRANCH} || git checkout main && git pull'\n`));
91
110
  }
92
111
  }
93
112
  projectRoot = resolved;
@@ -114,6 +133,58 @@ export async function runInitWizard() {
114
133
  { type: "confirm", name: "env", message: "Create .env from .env.example (if missing)?", default: true },
115
134
  { type: "confirm", name: "download", message: "Download container images now (make download)?", default: false },
116
135
  ]);
136
+
137
+ // ── Plugin selection ───────────────────────────────
138
+ const candidates = discoverPlugins();
139
+ const plugins = candidates
140
+ .map((c) => {
141
+ const manifest = validateManifest(c.path);
142
+ if (!manifest) return null;
143
+ return { id: manifest.id, name: manifest.name, description: manifest.description || "", path: c.path };
144
+ })
145
+ .filter(Boolean);
146
+
147
+ if (plugins.length > 0) {
148
+ console.log(chalk.cyan("\n Plugins\n"));
149
+ console.log(chalk.dim(" Select which plugins to enable:\n"));
150
+
151
+ // Read existing config to preserve current enabled state
152
+ const fopsConfigPath = path.join(os.homedir(), ".fops.json");
153
+ let fopsConfig = {};
154
+ try {
155
+ if (fs.existsSync(fopsConfigPath)) {
156
+ fopsConfig = JSON.parse(fs.readFileSync(fopsConfigPath, "utf8"));
157
+ }
158
+ } catch {}
159
+
160
+ const currentEntries = fopsConfig?.plugins?.entries || {};
161
+ const choices = plugins.map((p) => {
162
+ const isEnabled = currentEntries[p.id]?.enabled !== false;
163
+ return {
164
+ name: `${p.name}${p.description ? chalk.dim(` — ${p.description}`) : ""}`,
165
+ value: p.id,
166
+ checked: isEnabled,
167
+ };
168
+ });
169
+
170
+ const { enabledPlugins } = await inquirer.prompt([{
171
+ type: "checkbox",
172
+ name: "enabledPlugins",
173
+ message: "Plugins:",
174
+ choices,
175
+ }]);
176
+
177
+ // Save enabled/disabled state
178
+ if (!fopsConfig.plugins) fopsConfig.plugins = {};
179
+ if (!fopsConfig.plugins.entries) fopsConfig.plugins.entries = {};
180
+ for (const p of plugins) {
181
+ if (!fopsConfig.plugins.entries[p.id]) fopsConfig.plugins.entries[p.id] = {};
182
+ fopsConfig.plugins.entries[p.id].enabled = enabledPlugins.includes(p.id);
183
+ }
184
+ fs.writeFileSync(fopsConfigPath, JSON.stringify(fopsConfig, null, 2) + "\n");
185
+ console.log(chalk.green(` ✓ ${enabledPlugins.length}/${plugins.length} plugin(s) enabled`));
186
+ }
187
+
117
188
  console.log("");
118
189
  await runSetup(projectRoot, { submodules, env, download, netrcCheck: true });
119
190
  }
package/src/shell.js CHANGED
@@ -1,9 +1,9 @@
1
1
  import { execa } from "execa";
2
2
 
3
3
  export async function make(root, target, args = []) {
4
- return execa("make", [target, ...args], { cwd: root, stdio: "inherit" });
4
+ return execa("make", [target, ...args], { cwd: root, stdio: "inherit", reject: false });
5
5
  }
6
6
 
7
7
  export async function dockerCompose(root, args) {
8
- return execa("docker", ["compose", ...args], { cwd: root, stdio: "inherit" });
8
+ return execa("docker", ["compose", ...args], { cwd: root, stdio: "inherit", reject: false });
9
9
  }