@meshxdata/fops 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -1
- package/src/agent/agent.js +139 -38
- package/src/agent/agents.js +224 -0
- package/src/agent/context.js +146 -12
- package/src/agent/index.js +1 -0
- package/src/agent/llm.js +84 -13
- package/src/auth/coda.js +10 -10
- package/src/auth/login.js +13 -13
- package/src/auth/oauth.js +4 -4
- package/src/commands/index.js +121 -15
- package/src/config.js +2 -2
- package/src/doctor.js +215 -26
- package/src/feature-flags.js +197 -0
- package/src/plugins/api.js +14 -0
- package/src/plugins/builtins/stack-api.js +36 -0
- package/src/plugins/loader.js +67 -0
- package/src/plugins/registry.js +2 -0
- package/src/project.js +20 -1
- package/src/setup/aws.js +58 -45
- package/src/setup/setup.js +10 -9
- package/src/setup/wizard.js +195 -15
- package/src/ui/confirm.js +3 -2
- package/src/ui/input.js +2 -2
- package/src/ui/spinner.js +4 -4
- package/src/ui/streaming.js +2 -2
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import chalk from "chalk";
|
|
4
|
+
import { execa } from "execa";
|
|
5
|
+
import inquirer from "inquirer";
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Canonical feature flags — the complete set known to the platform.
|
|
9
|
+
* Label is a human-readable short description for the toggle UI.
|
|
10
|
+
*/
|
|
11
|
+
const KNOWN_FLAGS = {
|
|
12
|
+
MX_FF_STORAGE_EXPLORER_ENABLED: "Storage Explorer",
|
|
13
|
+
MX_FF_USER_MANAGEMENT_ENABLED: "User Management",
|
|
14
|
+
MX_FF_SETTINGS_PAGE_ENABLED: "Settings Page",
|
|
15
|
+
MX_FF_ENCRYPTION_STATUS_DISPLAY: "Encryption Status",
|
|
16
|
+
MX_FF_USER_PAT_ENABLED: "User PAT (Personal Access Tokens)",
|
|
17
|
+
MX_FF_SENTRY_ENABLED: "Sentry Error Tracking",
|
|
18
|
+
MX_FF_EXPLORER_ENABLED: "Data Explorer",
|
|
19
|
+
MX_FF_NEW_PROFILE_ENABLED: "New Profile Page",
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Parse docker-compose.yaml for all MX_FF_* entries.
|
|
24
|
+
* Returns a map: flagName → { value, services: Set<string>, lines: [{ lineNum, original }] }
|
|
25
|
+
*/
|
|
26
|
+
function parseComposeFlags(composePath) {
|
|
27
|
+
const content = fs.readFileSync(composePath, "utf8");
|
|
28
|
+
const lines = content.split("\n");
|
|
29
|
+
const flags = {};
|
|
30
|
+
|
|
31
|
+
// Track which service block we're in
|
|
32
|
+
let currentService = null;
|
|
33
|
+
|
|
34
|
+
for (let i = 0; i < lines.length; i++) {
|
|
35
|
+
const line = lines[i];
|
|
36
|
+
|
|
37
|
+
// Service definition: exactly 2-space indent, ends with colon only
|
|
38
|
+
const svcMatch = line.match(/^ ([a-z][\w-]+):\s*$/);
|
|
39
|
+
if (svcMatch) {
|
|
40
|
+
currentService = svcMatch[1];
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Match MX_FF_* in both YAML map and list formats
|
|
44
|
+
// Map format: MX_FF_NAME: "value"
|
|
45
|
+
const mapMatch = line.match(/\b(MX_FF_\w+)\s*:\s*"?(true|false)"?/);
|
|
46
|
+
// List format: - MX_FF_NAME=value
|
|
47
|
+
const listMatch = !mapMatch && line.match(/[-]\s*(MX_FF_\w+)\s*=\s*(true|false)/);
|
|
48
|
+
|
|
49
|
+
const match = mapMatch || listMatch;
|
|
50
|
+
if (match) {
|
|
51
|
+
const name = match[1];
|
|
52
|
+
const value = match[2] === "true";
|
|
53
|
+
|
|
54
|
+
if (!flags[name]) {
|
|
55
|
+
flags[name] = { value, services: new Set(), lines: [] };
|
|
56
|
+
}
|
|
57
|
+
flags[name].lines.push({ lineNum: i, original: line });
|
|
58
|
+
if (currentService) flags[name].services.add(currentService);
|
|
59
|
+
// If any occurrence is true, treat the flag as enabled
|
|
60
|
+
if (value) flags[name].value = true;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return flags;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Update docker-compose.yaml by flipping flag values on specific lines.
|
|
69
|
+
*/
|
|
70
|
+
function updateComposeFlags(composePath, changes) {
|
|
71
|
+
const content = fs.readFileSync(composePath, "utf8");
|
|
72
|
+
const lines = content.split("\n");
|
|
73
|
+
|
|
74
|
+
for (const { lineNum, newValue } of changes) {
|
|
75
|
+
const line = lines[lineNum];
|
|
76
|
+
lines[lineNum] = line
|
|
77
|
+
.replace(/(MX_FF_\w+\s*:\s*)"?(true|false)"?/, `$1"${newValue}"`)
|
|
78
|
+
.replace(/(MX_FF_\w+=)(true|false)/, `$1${newValue}`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
fs.writeFileSync(composePath, lines.join("\n"));
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Interactive feature flag configuration.
|
|
86
|
+
* Reads flags from compose, presents toggle UI, applies changes, restarts services.
|
|
87
|
+
*/
|
|
88
|
+
export async function runFeatureFlags(root) {
|
|
89
|
+
const composePath = path.join(root, "docker-compose.yaml");
|
|
90
|
+
if (!fs.existsSync(composePath)) {
|
|
91
|
+
console.log(chalk.red(" No docker-compose.yaml found."));
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
console.log(chalk.bold.cyan("\n Feature Flags\n"));
|
|
96
|
+
|
|
97
|
+
// Parse current state from compose
|
|
98
|
+
const composeFlags = parseComposeFlags(composePath);
|
|
99
|
+
|
|
100
|
+
// Build the full flag list: compose flags + canonical flags not yet in compose
|
|
101
|
+
const allFlags = {};
|
|
102
|
+
for (const [name, info] of Object.entries(composeFlags)) {
|
|
103
|
+
allFlags[name] = { ...info, inCompose: true };
|
|
104
|
+
}
|
|
105
|
+
for (const name of Object.keys(KNOWN_FLAGS)) {
|
|
106
|
+
if (!allFlags[name]) {
|
|
107
|
+
allFlags[name] = { value: false, services: new Set(), lines: [], inCompose: false };
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const flagNames = Object.keys(allFlags).sort();
|
|
112
|
+
|
|
113
|
+
// Show current state
|
|
114
|
+
for (const name of flagNames) {
|
|
115
|
+
const flag = allFlags[name];
|
|
116
|
+
const label = KNOWN_FLAGS[name] || name;
|
|
117
|
+
const services = flag.services.size > 0 ? chalk.dim(` (${[...flag.services].join(", ")})`) : "";
|
|
118
|
+
if (flag.value) {
|
|
119
|
+
console.log(chalk.green(` ✓ ${label}`) + services);
|
|
120
|
+
} else {
|
|
121
|
+
console.log(chalk.dim(` · ${label}`) + services);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
console.log("");
|
|
125
|
+
|
|
126
|
+
// Checkbox prompt
|
|
127
|
+
const choices = flagNames.map((name) => ({
|
|
128
|
+
name: KNOWN_FLAGS[name] || name,
|
|
129
|
+
value: name,
|
|
130
|
+
checked: allFlags[name].value,
|
|
131
|
+
}));
|
|
132
|
+
|
|
133
|
+
const { enabled } = await inquirer.prompt([{
|
|
134
|
+
type: "checkbox",
|
|
135
|
+
name: "enabled",
|
|
136
|
+
message: "Toggle feature flags:",
|
|
137
|
+
choices,
|
|
138
|
+
}]);
|
|
139
|
+
|
|
140
|
+
// Calculate changes
|
|
141
|
+
const changes = [];
|
|
142
|
+
const affectedServices = new Set();
|
|
143
|
+
|
|
144
|
+
for (const name of flagNames) {
|
|
145
|
+
const flag = allFlags[name];
|
|
146
|
+
const newValue = enabled.includes(name);
|
|
147
|
+
|
|
148
|
+
if (newValue !== flag.value) {
|
|
149
|
+
if (flag.inCompose) {
|
|
150
|
+
for (const line of flag.lines) {
|
|
151
|
+
changes.push({ lineNum: line.lineNum, newValue: String(newValue) });
|
|
152
|
+
}
|
|
153
|
+
for (const svc of flag.services) affectedServices.add(svc);
|
|
154
|
+
} else if (newValue) {
|
|
155
|
+
console.log(chalk.yellow(` ⚠ ${KNOWN_FLAGS[name] || name} not in docker-compose.yaml — add it to service environments to take effect`));
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (changes.length === 0) {
|
|
161
|
+
console.log(chalk.dim("\n No changes.\n"));
|
|
162
|
+
return;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// Apply changes to compose file
|
|
166
|
+
updateComposeFlags(composePath, changes);
|
|
167
|
+
console.log(chalk.green(`\n ✓ Updated ${changes.length} flag value(s) in docker-compose.yaml`));
|
|
168
|
+
|
|
169
|
+
if (affectedServices.size === 0) {
|
|
170
|
+
console.log("");
|
|
171
|
+
return;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// Restart affected services
|
|
175
|
+
const serviceList = [...affectedServices];
|
|
176
|
+
console.log(chalk.dim(` Affected: ${serviceList.join(", ")}`));
|
|
177
|
+
|
|
178
|
+
const { restart } = await inquirer.prompt([{
|
|
179
|
+
type: "confirm",
|
|
180
|
+
name: "restart",
|
|
181
|
+
message: `Restart ${serviceList.length} service(s)?`,
|
|
182
|
+
default: true,
|
|
183
|
+
}]);
|
|
184
|
+
|
|
185
|
+
if (restart) {
|
|
186
|
+
console.log(chalk.cyan(`\n ▶ docker compose up -d ${serviceList.join(" ")}\n`));
|
|
187
|
+
await execa("docker", ["compose", "up", "-d", ...serviceList], {
|
|
188
|
+
cwd: root,
|
|
189
|
+
stdio: "inherit",
|
|
190
|
+
reject: false,
|
|
191
|
+
timeout: 120_000,
|
|
192
|
+
});
|
|
193
|
+
console.log(chalk.green("\n ✓ Services restarted.\n"));
|
|
194
|
+
} else {
|
|
195
|
+
console.log(chalk.dim("\n Changes saved. Restart manually: docker compose up -d\n"));
|
|
196
|
+
}
|
|
197
|
+
}
|
package/src/plugins/api.js
CHANGED
|
@@ -42,5 +42,19 @@ export function createPluginApi(pluginId, registry) {
|
|
|
42
42
|
search: source.search,
|
|
43
43
|
});
|
|
44
44
|
},
|
|
45
|
+
|
|
46
|
+
registerAutoRunPattern(pattern) {
|
|
47
|
+
registry.autoRunPatterns.push({ pluginId, pattern });
|
|
48
|
+
},
|
|
49
|
+
|
|
50
|
+
registerAgent(agent) {
|
|
51
|
+
registry.agents.push({
|
|
52
|
+
pluginId,
|
|
53
|
+
name: agent.name,
|
|
54
|
+
description: agent.description || "",
|
|
55
|
+
systemPrompt: agent.systemPrompt,
|
|
56
|
+
contextMode: agent.contextMode || "full",
|
|
57
|
+
});
|
|
58
|
+
},
|
|
45
59
|
};
|
|
46
60
|
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import http from "node:http";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Built-in Stack API plugin.
|
|
5
|
+
* Registers a doctor check (health ping) and auto-run patterns for curl commands.
|
|
6
|
+
*/
|
|
7
|
+
export function register(api) {
|
|
8
|
+
// Doctor check — ping GET /health on localhost:3090
|
|
9
|
+
api.registerDoctorCheck({
|
|
10
|
+
name: "Stack API",
|
|
11
|
+
fn: async (ok, warn) => {
|
|
12
|
+
try {
|
|
13
|
+
const body = await new Promise((resolve, reject) => {
|
|
14
|
+
const req = http.get("http://localhost:3090/health", { timeout: 3000 }, (res) => {
|
|
15
|
+
let data = "";
|
|
16
|
+
res.on("data", (chunk) => { data += chunk; });
|
|
17
|
+
res.on("end", () => {
|
|
18
|
+
if (res.statusCode === 200) resolve(data);
|
|
19
|
+
else reject(new Error(`HTTP ${res.statusCode}`));
|
|
20
|
+
});
|
|
21
|
+
});
|
|
22
|
+
req.on("error", reject);
|
|
23
|
+
req.on("timeout", () => { req.destroy(); reject(new Error("timeout")); });
|
|
24
|
+
});
|
|
25
|
+
ok("Stack API", `healthy — ${body.trim().slice(0, 60)}`);
|
|
26
|
+
} catch {
|
|
27
|
+
warn("Stack API", "not reachable on localhost:3090");
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
// Auto-run: GET curl commands to the stack API execute without confirmation
|
|
33
|
+
api.registerAutoRunPattern("curl http://localhost:3090/");
|
|
34
|
+
api.registerAutoRunPattern("curl -s http://localhost:3090/");
|
|
35
|
+
api.registerAutoRunPattern("curl --silent http://localhost:3090/");
|
|
36
|
+
}
|
package/src/plugins/loader.js
CHANGED
|
@@ -1,10 +1,50 @@
|
|
|
1
1
|
import fs from "node:fs";
|
|
2
2
|
import os from "node:os";
|
|
3
3
|
import path from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
4
5
|
import { createRegistry } from "./registry.js";
|
|
5
6
|
import { validateManifest } from "./manifest.js";
|
|
6
7
|
import { discoverPlugins } from "./discovery.js";
|
|
7
8
|
import { createPluginApi } from "./api.js";
|
|
9
|
+
import { loadBuiltinAgents } from "../agent/agents.js";
|
|
10
|
+
|
|
11
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Ensure ~/.fops/plugins/node_modules symlinks to the CLI's node_modules.
|
|
15
|
+
* This lets global plugins resolve bare imports (chalk, execa, inquirer, etc.)
|
|
16
|
+
* via Node's upward directory walk.
|
|
17
|
+
*/
|
|
18
|
+
function ensurePluginNodeModules() {
|
|
19
|
+
const cliNodeModules = path.resolve(__dirname, "../../node_modules");
|
|
20
|
+
const pluginsDir = path.join(os.homedir(), ".fops", "plugins");
|
|
21
|
+
const link = path.join(pluginsDir, "node_modules");
|
|
22
|
+
|
|
23
|
+
if (!fs.existsSync(cliNodeModules)) return;
|
|
24
|
+
if (!fs.existsSync(pluginsDir)) return;
|
|
25
|
+
|
|
26
|
+
try {
|
|
27
|
+
const stat = fs.lstatSync(link);
|
|
28
|
+
// Already a symlink — check it points to the right place
|
|
29
|
+
if (stat.isSymbolicLink()) {
|
|
30
|
+
const target = fs.readlinkSync(link);
|
|
31
|
+
if (path.resolve(pluginsDir, target) === cliNodeModules) return;
|
|
32
|
+
// Stale symlink — remove and recreate
|
|
33
|
+
fs.unlinkSync(link);
|
|
34
|
+
} else {
|
|
35
|
+
// Not a symlink (somehow a real dir) — leave it alone
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
} catch {
|
|
39
|
+
// Doesn't exist — create it
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
try {
|
|
43
|
+
fs.symlinkSync(cliNodeModules, link, "junction");
|
|
44
|
+
} catch {
|
|
45
|
+
// Non-fatal — plugins that use only node: builtins still work
|
|
46
|
+
}
|
|
47
|
+
}
|
|
8
48
|
|
|
9
49
|
/**
|
|
10
50
|
* Parse SKILL.md frontmatter (lightweight, same logic as skills.js).
|
|
@@ -37,12 +77,39 @@ function isPluginEnabled(pluginId) {
|
|
|
37
77
|
return true;
|
|
38
78
|
}
|
|
39
79
|
|
|
80
|
+
/**
|
|
81
|
+
* Load built-in plugins from ./builtins/ directory.
|
|
82
|
+
* Each module must export a register(api) function.
|
|
83
|
+
*/
|
|
84
|
+
async function loadBuiltinPlugins(registry) {
|
|
85
|
+
const builtinsDir = path.join(__dirname, "builtins");
|
|
86
|
+
if (!fs.existsSync(builtinsDir)) return;
|
|
87
|
+
|
|
88
|
+
const entries = fs.readdirSync(builtinsDir).filter((f) => f.endsWith(".js"));
|
|
89
|
+
for (const file of entries) {
|
|
90
|
+
try {
|
|
91
|
+
const mod = await import(path.join(builtinsDir, file));
|
|
92
|
+
const plugin = mod.default || mod;
|
|
93
|
+
if (typeof plugin.register === "function") {
|
|
94
|
+
const pluginId = `builtin:${path.basename(file, ".js")}`;
|
|
95
|
+
const api = createPluginApi(pluginId, registry);
|
|
96
|
+
await plugin.register(api);
|
|
97
|
+
}
|
|
98
|
+
} catch (err) {
|
|
99
|
+
console.error(` Built-in plugin "${file}" failed to load: ${err.message}`);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
40
104
|
/**
|
|
41
105
|
* Load and activate all discovered plugins.
|
|
42
106
|
* Returns a populated PluginRegistry.
|
|
43
107
|
*/
|
|
44
108
|
export async function loadPlugins() {
|
|
109
|
+
ensurePluginNodeModules();
|
|
45
110
|
const registry = createRegistry();
|
|
111
|
+
loadBuiltinAgents(registry);
|
|
112
|
+
await loadBuiltinPlugins(registry);
|
|
46
113
|
const candidates = discoverPlugins();
|
|
47
114
|
|
|
48
115
|
for (const candidate of candidates) {
|
package/src/plugins/registry.js
CHANGED
|
@@ -11,5 +11,7 @@ export function createRegistry() {
|
|
|
11
11
|
hooks: [], // { pluginId, event, handler, priority }
|
|
12
12
|
skills: [], // { pluginId, name, description, content }
|
|
13
13
|
knowledgeSources: [], // { pluginId, name, description, search }
|
|
14
|
+
autoRunPatterns: [], // { pluginId, pattern: string } — commands matching these prefixes auto-execute
|
|
15
|
+
agents: [], // { pluginId, name, description, systemPrompt, contextMode }
|
|
14
16
|
};
|
|
15
17
|
}
|
package/src/project.js
CHANGED
|
@@ -49,12 +49,31 @@ export function rootDir(cwd = process.cwd()) {
|
|
|
49
49
|
return null;
|
|
50
50
|
}
|
|
51
51
|
|
|
52
|
+
/**
|
|
53
|
+
* Check whether the Foundation project is fully initialised.
|
|
54
|
+
* Returns null when ready, or a short reason string when not.
|
|
55
|
+
*/
|
|
56
|
+
export function checkInitState(root) {
|
|
57
|
+
if (!root) return "no project root";
|
|
58
|
+
if (!fs.existsSync(path.join(root, ".env"))) return "missing .env";
|
|
59
|
+
// Check that at least one submodule dir has content
|
|
60
|
+
const markers = ["foundation-backend", "foundation-frontend", "foundation-storage-engine"];
|
|
61
|
+
const empty = markers.filter((d) => {
|
|
62
|
+
const dir = path.join(root, d);
|
|
63
|
+
if (!fs.existsSync(dir)) return true;
|
|
64
|
+
try { return fs.readdirSync(dir).length === 0; } catch { return true; }
|
|
65
|
+
});
|
|
66
|
+
if (empty.length === markers.length) return "submodules not cloned";
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
|
|
52
70
|
export function requireRoot(program) {
|
|
53
71
|
const r = rootDir();
|
|
54
72
|
if (!r) {
|
|
55
73
|
console.error(
|
|
56
|
-
chalk.red("Not a Foundation project (no docker-compose + Makefile).
|
|
74
|
+
chalk.red("Not a Foundation project (no docker-compose + Makefile).")
|
|
57
75
|
);
|
|
76
|
+
console.error(chalk.dim(" Run `fops init` to set up, or set FOUNDATION_ROOT."));
|
|
58
77
|
program.error({ exitCode: 1 });
|
|
59
78
|
}
|
|
60
79
|
return r;
|
package/src/setup/aws.js
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import fs from "node:fs";
|
|
2
2
|
import os from "node:os";
|
|
3
3
|
import path from "node:path";
|
|
4
|
-
import readline from "node:readline";
|
|
5
4
|
import chalk from "chalk";
|
|
6
5
|
import { execa } from "execa";
|
|
7
6
|
import inquirer from "inquirer";
|
|
@@ -32,8 +31,8 @@ export function saveFopsConfig(config) {
|
|
|
32
31
|
*/
|
|
33
32
|
export async function promptAwsSsoConfig() {
|
|
34
33
|
console.log(chalk.cyan("\n AWS SSO Configuration\n"));
|
|
35
|
-
console.log(chalk.
|
|
36
|
-
console.log(chalk.
|
|
34
|
+
console.log(chalk.dim(" We'll set up an AWS CLI profile for ECR image pulls."));
|
|
35
|
+
console.log(chalk.dim(" You can find these values in your AWS SSO portal.\n"));
|
|
37
36
|
|
|
38
37
|
const answers = await inquirer.prompt([
|
|
39
38
|
{
|
|
@@ -138,20 +137,6 @@ export function detectAwsSsoProfiles() {
|
|
|
138
137
|
return profiles;
|
|
139
138
|
}
|
|
140
139
|
|
|
141
|
-
/**
|
|
142
|
-
* Simple readline prompt helper.
|
|
143
|
-
*/
|
|
144
|
-
function ask(question, defaultVal) {
|
|
145
|
-
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
|
146
|
-
const suffix = defaultVal ? chalk.gray(` (${defaultVal})`) : "";
|
|
147
|
-
return new Promise((resolve) => {
|
|
148
|
-
rl.question(` ${question}${suffix}: `, (answer) => {
|
|
149
|
-
rl.close();
|
|
150
|
-
resolve(answer.trim() || defaultVal || "");
|
|
151
|
-
});
|
|
152
|
-
});
|
|
153
|
-
}
|
|
154
|
-
|
|
155
140
|
/**
|
|
156
141
|
* Check if ~/.aws/config has an sso-session block with sso_start_url.
|
|
157
142
|
* If not, prompt user for the values and write them.
|
|
@@ -164,40 +149,37 @@ export async function ensureSsoConfig() {
|
|
|
164
149
|
if (/sso_start_url\s*=/.test(content)) return; // already configured
|
|
165
150
|
|
|
166
151
|
console.log(chalk.cyan("\n AWS SSO is not configured. Let's set it up.\n"));
|
|
167
|
-
console.log(chalk.
|
|
168
|
-
|
|
169
|
-
const
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
throw new Error("SSO start URL and account ID are required");
|
|
179
|
-
}
|
|
152
|
+
console.log(chalk.dim(" You can find these values in your AWS SSO portal.\n"));
|
|
153
|
+
|
|
154
|
+
const answers = await inquirer.prompt([
|
|
155
|
+
{ type: "input", name: "sessionName", message: "SSO session name:", default: "me-central-1" },
|
|
156
|
+
{ type: "input", name: "startUrl", message: "SSO start URL:", validate: (v) => v?.trim() ? true : "Required." },
|
|
157
|
+
{ type: "input", name: "ssoRegion", message: "SSO region:", default: "us-east-1" },
|
|
158
|
+
{ type: "input", name: "accountId", message: "AWS account ID:", validate: (v) => /^\d{12}$/.test(v?.trim()) ? true : "Must be 12 digits." },
|
|
159
|
+
{ type: "input", name: "roleName", message: "SSO role name:", default: "AdministratorAccess" },
|
|
160
|
+
{ type: "input", name: "profileName", message: "Profile name:", default: "dev" },
|
|
161
|
+
{ type: "input", name: "region", message: "Default region:", default: (a) => a.ssoRegion },
|
|
162
|
+
]);
|
|
180
163
|
|
|
181
164
|
// Ensure ~/.aws directory exists
|
|
182
165
|
const awsDir = path.join(os.homedir(), ".aws");
|
|
183
166
|
if (!fs.existsSync(awsDir)) fs.mkdirSync(awsDir, { mode: 0o700 });
|
|
184
167
|
|
|
185
|
-
const block = `
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
sso_region = ${ssoRegion}
|
|
168
|
+
const block = `[sso-session ${answers.sessionName.trim()}]
|
|
169
|
+
sso_start_url = ${answers.startUrl.trim()}
|
|
170
|
+
sso_region = ${answers.ssoRegion.trim()}
|
|
189
171
|
sso_registration_scopes = sso:account:access
|
|
190
172
|
|
|
191
|
-
[profile ${profileName}]
|
|
192
|
-
sso_session = ${sessionName}
|
|
193
|
-
sso_account_id = ${accountId}
|
|
194
|
-
sso_role_name = ${roleName}
|
|
195
|
-
region = ${region}
|
|
173
|
+
[profile ${answers.profileName.trim()}]
|
|
174
|
+
sso_session = ${answers.sessionName.trim()}
|
|
175
|
+
sso_account_id = ${answers.accountId.trim()}
|
|
176
|
+
sso_role_name = ${answers.roleName.trim()}
|
|
177
|
+
region = ${answers.region.trim()}
|
|
196
178
|
output = json
|
|
197
179
|
`;
|
|
198
180
|
|
|
199
|
-
fs.
|
|
200
|
-
console.log(chalk.green(`\n ✓ Written to ~/.aws/config (profile: ${profileName})`));
|
|
181
|
+
fs.writeFileSync(configPath, block);
|
|
182
|
+
console.log(chalk.green(`\n ✓ Written to ~/.aws/config (profile: ${answers.profileName.trim()})`));
|
|
201
183
|
}
|
|
202
184
|
|
|
203
185
|
/**
|
|
@@ -212,7 +194,7 @@ export async function fixAwsSso() {
|
|
|
212
194
|
}
|
|
213
195
|
|
|
214
196
|
const profile = profiles[0];
|
|
215
|
-
console.log(chalk.
|
|
197
|
+
console.log(chalk.dim(` Using AWS profile: ${profile.name}`));
|
|
216
198
|
console.log(chalk.cyan(` ▶ aws sso login --profile ${profile.name}`));
|
|
217
199
|
|
|
218
200
|
// Open /dev/tty directly so SSO login gets a real terminal even when
|
|
@@ -220,13 +202,44 @@ export async function fixAwsSso() {
|
|
|
220
202
|
let ttyFd;
|
|
221
203
|
try { ttyFd = fs.openSync("/dev/tty", "r"); } catch { ttyFd = null; }
|
|
222
204
|
|
|
223
|
-
await execa("aws", ["sso", "login", "--profile", profile.name], {
|
|
205
|
+
const { exitCode } = await execa("aws", ["sso", "login", "--profile", profile.name], {
|
|
224
206
|
stdio: [ttyFd ?? "inherit", "inherit", "inherit"],
|
|
225
207
|
reject: false,
|
|
226
208
|
timeout: 120_000,
|
|
227
209
|
});
|
|
228
210
|
|
|
229
211
|
if (ttyFd !== null) fs.closeSync(ttyFd);
|
|
212
|
+
|
|
213
|
+
if (exitCode !== 0) {
|
|
214
|
+
// SSO login failed — likely bad config. Remove it and re-run setup.
|
|
215
|
+
console.log(chalk.yellow(" SSO login failed — re-running setup with new values...\n"));
|
|
216
|
+
const configPath = path.join(os.homedir(), ".aws", "config");
|
|
217
|
+
if (fs.existsSync(configPath)) fs.unlinkSync(configPath);
|
|
218
|
+
await ensureSsoConfig();
|
|
219
|
+
|
|
220
|
+
const retryProfiles = detectAwsSsoProfiles();
|
|
221
|
+
if (retryProfiles.length === 0) {
|
|
222
|
+
throw new Error("No SSO profiles found after config — check ~/.aws/config");
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
const retryProfile = retryProfiles[0];
|
|
226
|
+
console.log(chalk.cyan(` ▶ aws sso login --profile ${retryProfile.name}`));
|
|
227
|
+
|
|
228
|
+
let retryTtyFd;
|
|
229
|
+
try { retryTtyFd = fs.openSync("/dev/tty", "r"); } catch { retryTtyFd = null; }
|
|
230
|
+
|
|
231
|
+
const { exitCode: retryCode } = await execa("aws", ["sso", "login", "--profile", retryProfile.name], {
|
|
232
|
+
stdio: [retryTtyFd ?? "inherit", "inherit", "inherit"],
|
|
233
|
+
reject: false,
|
|
234
|
+
timeout: 120_000,
|
|
235
|
+
});
|
|
236
|
+
|
|
237
|
+
if (retryTtyFd !== null) fs.closeSync(retryTtyFd);
|
|
238
|
+
|
|
239
|
+
if (retryCode !== 0) {
|
|
240
|
+
throw new Error("SSO login failed. Check your SSO start URL and region in ~/.aws/config");
|
|
241
|
+
}
|
|
242
|
+
}
|
|
230
243
|
}
|
|
231
244
|
|
|
232
245
|
/**
|
|
@@ -358,8 +371,8 @@ export async function checkEcrRepos(dir, awsConfig) {
|
|
|
358
371
|
const missing = neededRepos.filter((r) => !existingRepos.includes(r));
|
|
359
372
|
if (missing.length > 0) {
|
|
360
373
|
console.log(chalk.yellow("\n⚠ These ECR repos are referenced but don't exist (will need local build):"));
|
|
361
|
-
for (const r of missing) console.log(chalk.
|
|
362
|
-
console.log(chalk.
|
|
374
|
+
for (const r of missing) console.log(chalk.dim(` ✗ ${r}`));
|
|
375
|
+
console.log(chalk.dim(" These services will be built from source instead.\n"));
|
|
363
376
|
} else {
|
|
364
377
|
console.log(chalk.green("All required ECR repos exist."));
|
|
365
378
|
}
|
package/src/setup/setup.js
CHANGED
|
@@ -24,7 +24,7 @@ export function runSetup(dir, opts = {}) {
|
|
|
24
24
|
fs.copyFileSync(envExample, envPath);
|
|
25
25
|
console.log(chalk.green("Created .env from .env.example. Edit .env with your settings."));
|
|
26
26
|
} else if (fs.existsSync(envPath)) {
|
|
27
|
-
console.log(chalk.
|
|
27
|
+
console.log(chalk.dim(".env already exists."));
|
|
28
28
|
}
|
|
29
29
|
}
|
|
30
30
|
if (netrcCheck) {
|
|
@@ -33,25 +33,26 @@ export function runSetup(dir, opts = {}) {
|
|
|
33
33
|
const hasGitHub = hasNetrc && fs.readFileSync(netrcPath, "utf8").includes("machine github.com");
|
|
34
34
|
if (!hasGitHub) {
|
|
35
35
|
console.log(chalk.yellow("⚠ GitHub: ensure ~/.netrc has credentials for github.com (needed for submodules)."));
|
|
36
|
-
console.log(chalk.
|
|
36
|
+
console.log(chalk.dim(" See README: Configure GitHub Authentication"));
|
|
37
37
|
}
|
|
38
38
|
}
|
|
39
39
|
if (submodules) {
|
|
40
40
|
console.log(chalk.blue(`Initializing git submodules (checking out ${CLONE_BRANCH})...`));
|
|
41
41
|
try {
|
|
42
|
-
await execa("git", ["submodule", "update", "--init", "--remote", "--recursive"], { cwd: dir, stdio: "inherit" });
|
|
42
|
+
await execa("git", ["submodule", "update", "--init", "--force", "--remote", "--recursive"], { cwd: dir, stdio: "inherit" });
|
|
43
43
|
// Check out the target branch on each submodule
|
|
44
44
|
await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: dir, stdio: "inherit" });
|
|
45
45
|
console.log(chalk.green(`Submodules initialized — on ${CLONE_BRANCH} (falling back to main).`));
|
|
46
46
|
} catch {
|
|
47
|
-
console.log(chalk.yellow(`⚠ Some submodules had issues. Attempting to
|
|
47
|
+
console.log(chalk.yellow(`⚠ Some submodules had issues. Attempting to recover individually...`));
|
|
48
48
|
try {
|
|
49
|
-
await execa("git", ["submodule", "
|
|
49
|
+
await execa("git", ["submodule", "absorbgitdirs"], { cwd: dir, stdio: "inherit" });
|
|
50
|
+
await execa("git", ["submodule", "update", "--init", "--force", "--recursive"], { cwd: dir, stdio: "inherit" });
|
|
50
51
|
await execa("git", ["submodule", "foreach", `git fetch origin && git checkout origin/${CLONE_BRANCH} 2>/dev/null || git checkout origin/main`], { cwd: dir, stdio: "inherit" });
|
|
51
52
|
console.log(chalk.green("Submodules recovered."));
|
|
52
53
|
} catch {
|
|
53
54
|
console.log(chalk.yellow("Some submodules still failed. Fix manually with:"));
|
|
54
|
-
console.log(chalk.
|
|
55
|
+
console.log(chalk.dim(` cd ${dir} && git submodule foreach 'git checkout ${CLONE_BRANCH} || git checkout main && git pull'`));
|
|
55
56
|
}
|
|
56
57
|
}
|
|
57
58
|
}
|
|
@@ -101,7 +102,7 @@ export function runSetup(dir, opts = {}) {
|
|
|
101
102
|
else fs.writeFileSync(awsConfigPath, ssoConfig, { mode: 0o600 });
|
|
102
103
|
console.log(chalk.green(` Created AWS profile '${awsConfig.profileName}' in ~/.aws/config`));
|
|
103
104
|
} else {
|
|
104
|
-
console.log(chalk.
|
|
105
|
+
console.log(chalk.dim(" Skipping AWS setup. Private ECR images won't be available."));
|
|
105
106
|
console.log(chalk.blue("Downloading public container images..."));
|
|
106
107
|
try {
|
|
107
108
|
await make(dir, "download");
|
|
@@ -157,8 +158,8 @@ export function runSetup(dir, opts = {}) {
|
|
|
157
158
|
await make(dir, "download");
|
|
158
159
|
} catch {
|
|
159
160
|
console.log(chalk.yellow("\n⚠ Some images failed to download. Public images are fine."));
|
|
160
|
-
console.log(chalk.
|
|
161
|
-
console.log(chalk.
|
|
161
|
+
console.log(chalk.dim(` For private ECR images, ensure: aws sso login --profile ${awsConfig.profileName}`));
|
|
162
|
+
console.log(chalk.dim(" Then re-run: fops init --download\n"));
|
|
162
163
|
}
|
|
163
164
|
}
|
|
164
165
|
console.log(chalk.green("Setup complete. Run: fops up"));
|