@meshxdata/fops 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +98 -0
- package/STRUCTURE.md +43 -0
- package/foundation.mjs +16 -0
- package/package.json +52 -0
- package/src/agent/agent.js +367 -0
- package/src/agent/agent.test.js +233 -0
- package/src/agent/context.js +143 -0
- package/src/agent/context.test.js +81 -0
- package/src/agent/index.js +2 -0
- package/src/agent/llm.js +127 -0
- package/src/agent/llm.test.js +139 -0
- package/src/auth/index.js +4 -0
- package/src/auth/keychain.js +58 -0
- package/src/auth/keychain.test.js +185 -0
- package/src/auth/login.js +421 -0
- package/src/auth/login.test.js +192 -0
- package/src/auth/oauth.js +203 -0
- package/src/auth/oauth.test.js +118 -0
- package/src/auth/resolve.js +78 -0
- package/src/auth/resolve.test.js +153 -0
- package/src/commands/index.js +268 -0
- package/src/config.js +24 -0
- package/src/config.test.js +70 -0
- package/src/doctor.js +487 -0
- package/src/doctor.test.js +134 -0
- package/src/plugins/api.js +37 -0
- package/src/plugins/api.test.js +95 -0
- package/src/plugins/discovery.js +78 -0
- package/src/plugins/discovery.test.js +92 -0
- package/src/plugins/hooks.js +13 -0
- package/src/plugins/hooks.test.js +118 -0
- package/src/plugins/index.js +3 -0
- package/src/plugins/loader.js +110 -0
- package/src/plugins/manifest.js +26 -0
- package/src/plugins/manifest.test.js +106 -0
- package/src/plugins/registry.js +14 -0
- package/src/plugins/registry.test.js +43 -0
- package/src/plugins/skills.js +126 -0
- package/src/plugins/skills.test.js +173 -0
- package/src/project.js +61 -0
- package/src/project.test.js +196 -0
- package/src/setup/aws.js +369 -0
- package/src/setup/aws.test.js +280 -0
- package/src/setup/index.js +3 -0
- package/src/setup/setup.js +161 -0
- package/src/setup/wizard.js +119 -0
- package/src/shell.js +9 -0
- package/src/shell.test.js +72 -0
- package/src/skills/foundation/SKILL.md +107 -0
- package/src/ui/banner.js +56 -0
- package/src/ui/banner.test.js +97 -0
- package/src/ui/confirm.js +97 -0
- package/src/ui/index.js +5 -0
- package/src/ui/input.js +199 -0
- package/src/ui/spinner.js +170 -0
- package/src/ui/spinner.test.js +29 -0
- package/src/ui/streaming.js +106 -0
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import chalk from "chalk";
|
|
5
|
+
import { execa } from "execa";
|
|
6
|
+
import inquirer from "inquirer";
|
|
7
|
+
import { make } from "../shell.js";
|
|
8
|
+
import { readFopsConfig, saveFopsConfig, promptAwsSsoConfig, detectEcrRegistry, checkEcrRepos } from "./aws.js";
|
|
9
|
+
|
|
10
|
+
export function runSetup(dir, opts = {}) {
|
|
11
|
+
const submodules = opts.submodules !== false;
|
|
12
|
+
const createEnv = opts.env !== false;
|
|
13
|
+
const download = opts.download === true;
|
|
14
|
+
const netrcCheck = opts.netrcCheck !== false;
|
|
15
|
+
|
|
16
|
+
return (async () => {
|
|
17
|
+
if (createEnv) {
|
|
18
|
+
const envPath = path.join(dir, ".env");
|
|
19
|
+
const envExample = path.join(dir, ".env.example");
|
|
20
|
+
if (!fs.existsSync(envPath) && fs.existsSync(envExample)) {
|
|
21
|
+
fs.copyFileSync(envExample, envPath);
|
|
22
|
+
console.log(chalk.green("Created .env from .env.example. Edit .env with your settings."));
|
|
23
|
+
} else if (fs.existsSync(envPath)) {
|
|
24
|
+
console.log(chalk.gray(".env already exists."));
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
if (netrcCheck) {
|
|
28
|
+
const netrcPath = path.join(os.homedir(), ".netrc");
|
|
29
|
+
const hasNetrc = fs.existsSync(netrcPath);
|
|
30
|
+
const hasGitHub = hasNetrc && fs.readFileSync(netrcPath, "utf8").includes("machine github.com");
|
|
31
|
+
if (!hasGitHub) {
|
|
32
|
+
console.log(chalk.yellow("⚠ GitHub: ensure ~/.netrc has credentials for github.com (needed for submodules)."));
|
|
33
|
+
console.log(chalk.gray(" See README: Configure GitHub Authentication"));
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
if (submodules) {
|
|
37
|
+
console.log(chalk.blue("Initializing git submodules (checking out main)..."));
|
|
38
|
+
try {
|
|
39
|
+
await execa("git", ["submodule", "update", "--init", "--remote", "--recursive"], { cwd: dir, stdio: "inherit" });
|
|
40
|
+
console.log(chalk.green("Submodules initialized — all on main."));
|
|
41
|
+
} catch {
|
|
42
|
+
console.log(chalk.yellow("⚠ Some submodules had issues. Attempting to check out main individually..."));
|
|
43
|
+
try {
|
|
44
|
+
await execa("git", ["submodule", "init"], { cwd: dir, stdio: "inherit" });
|
|
45
|
+
await execa("git", ["submodule", "foreach", "git fetch origin main && git checkout origin/main"], { cwd: dir, stdio: "inherit" });
|
|
46
|
+
console.log(chalk.green("Submodules recovered."));
|
|
47
|
+
} catch {
|
|
48
|
+
console.log(chalk.yellow("Some submodules still failed. Fix manually with:"));
|
|
49
|
+
console.log(chalk.gray(` cd ${dir} && git submodule foreach 'git checkout main && git pull'`));
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
if (download) {
|
|
54
|
+
// Load saved config or prompt for AWS SSO details
|
|
55
|
+
let config = readFopsConfig();
|
|
56
|
+
let awsConfig = config.aws;
|
|
57
|
+
|
|
58
|
+
const awsConfigPath = path.join(os.homedir(), ".aws", "config");
|
|
59
|
+
const profileName = awsConfig?.profileName || "dev";
|
|
60
|
+
const hasProfile = fs.existsSync(awsConfigPath) &&
|
|
61
|
+
fs.readFileSync(awsConfigPath, "utf8").includes(`[profile ${profileName}]`);
|
|
62
|
+
|
|
63
|
+
if (!hasProfile) {
|
|
64
|
+
// Check if docker-compose references ECR to auto-detect some values
|
|
65
|
+
const ecrInfo = detectEcrRegistry(dir);
|
|
66
|
+
|
|
67
|
+
const { setupAws } = await inquirer.prompt([{
|
|
68
|
+
type: "confirm",
|
|
69
|
+
name: "setupAws",
|
|
70
|
+
message: ecrInfo
|
|
71
|
+
? `ECR images detected (account ${ecrInfo.accountId}, region ${ecrInfo.region}). Set up AWS SSO profile?`
|
|
72
|
+
: "No AWS profile found. Set up AWS SSO for ECR image pulls?",
|
|
73
|
+
default: true,
|
|
74
|
+
}]);
|
|
75
|
+
|
|
76
|
+
if (setupAws) {
|
|
77
|
+
awsConfig = await promptAwsSsoConfig();
|
|
78
|
+
|
|
79
|
+
// Save config for future runs
|
|
80
|
+
config.aws = awsConfig;
|
|
81
|
+
saveFopsConfig(config);
|
|
82
|
+
|
|
83
|
+
// Write AWS profile
|
|
84
|
+
const ssoConfig = [
|
|
85
|
+
"", `[profile ${awsConfig.profileName}]`,
|
|
86
|
+
`sso_start_url = ${awsConfig.ssoStartUrl}`,
|
|
87
|
+
`sso_region = ${awsConfig.ssoRegion}`,
|
|
88
|
+
`sso_account_id = ${awsConfig.accountId}`,
|
|
89
|
+
`sso_role_name = ${awsConfig.roleName}`,
|
|
90
|
+
`region = ${awsConfig.region}`,
|
|
91
|
+
"output = json", "",
|
|
92
|
+
].join("\n");
|
|
93
|
+
const awsDir = path.join(os.homedir(), ".aws");
|
|
94
|
+
if (!fs.existsSync(awsDir)) fs.mkdirSync(awsDir, { mode: 0o700 });
|
|
95
|
+
if (fs.existsSync(awsConfigPath)) fs.appendFileSync(awsConfigPath, ssoConfig);
|
|
96
|
+
else fs.writeFileSync(awsConfigPath, ssoConfig, { mode: 0o600 });
|
|
97
|
+
console.log(chalk.green(` Created AWS profile '${awsConfig.profileName}' in ~/.aws/config`));
|
|
98
|
+
} else {
|
|
99
|
+
console.log(chalk.gray(" Skipping AWS setup. Private ECR images won't be available."));
|
|
100
|
+
console.log(chalk.blue("Downloading public container images..."));
|
|
101
|
+
try {
|
|
102
|
+
await make(dir, "download");
|
|
103
|
+
} catch {
|
|
104
|
+
console.log(chalk.yellow("\n⚠ Some images failed to download."));
|
|
105
|
+
}
|
|
106
|
+
console.log(chalk.green("Setup complete. Run: fops up"));
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (!awsConfig) {
|
|
112
|
+
awsConfig = config.aws || { profileName: "dev" };
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
let ecrAuthed = false;
|
|
116
|
+
try {
|
|
117
|
+
await execa("aws", ["sts", "get-caller-identity", "--profile", awsConfig.profileName], { timeout: 5000 });
|
|
118
|
+
ecrAuthed = true;
|
|
119
|
+
} catch {
|
|
120
|
+
console.log(chalk.yellow("\n⚠ AWS SSO session expired or not logged in."));
|
|
121
|
+
console.log(chalk.blue(` Running: aws sso login --profile ${awsConfig.profileName}\n`));
|
|
122
|
+
try {
|
|
123
|
+
await execa("aws", ["sso", "login", "--profile", awsConfig.profileName], { stdio: "inherit" });
|
|
124
|
+
ecrAuthed = true;
|
|
125
|
+
} catch {
|
|
126
|
+
console.log(chalk.yellow(` AWS SSO login failed. You can retry later with: aws sso login --profile ${awsConfig.profileName}`));
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
if (ecrAuthed) {
|
|
130
|
+
// Detect ECR registry from compose file or config
|
|
131
|
+
const ecrInfo = detectEcrRegistry(dir);
|
|
132
|
+
const ecrRegion = ecrInfo?.region || awsConfig.region;
|
|
133
|
+
const ecrAccountId = ecrInfo?.accountId || awsConfig.accountId;
|
|
134
|
+
const ecrUrl = `${ecrAccountId}.dkr.ecr.${ecrRegion}.amazonaws.com`;
|
|
135
|
+
|
|
136
|
+
try {
|
|
137
|
+
console.log(chalk.blue("Logging into ECR..."));
|
|
138
|
+
const { stdout: ecrPassword } = await execa("aws", [
|
|
139
|
+
"ecr", "get-login-password", "--region", ecrRegion, "--profile", awsConfig.profileName,
|
|
140
|
+
]);
|
|
141
|
+
await execa("docker", [
|
|
142
|
+
"login", "--username", "AWS", "--password-stdin", ecrUrl,
|
|
143
|
+
], { input: ecrPassword });
|
|
144
|
+
console.log(chalk.green("ECR login successful."));
|
|
145
|
+
await checkEcrRepos(dir, awsConfig);
|
|
146
|
+
} catch {
|
|
147
|
+
console.log(chalk.yellow("ECR login failed. Image pull may fail for private images."));
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
console.log(chalk.blue("Downloading container images..."));
|
|
151
|
+
try {
|
|
152
|
+
await make(dir, "download");
|
|
153
|
+
} catch {
|
|
154
|
+
console.log(chalk.yellow("\n⚠ Some images failed to download. Public images are fine."));
|
|
155
|
+
console.log(chalk.gray(` For private ECR images, ensure: aws sso login --profile ${awsConfig.profileName}`));
|
|
156
|
+
console.log(chalk.gray(" Then re-run: fops init --download\n"));
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
console.log(chalk.green("Setup complete. Run: fops up"));
|
|
160
|
+
})();
|
|
161
|
+
}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import chalk from "chalk";
|
|
5
|
+
import { execa } from "execa";
|
|
6
|
+
import inquirer from "inquirer";
|
|
7
|
+
import { isFoundationRoot, findComposeRootUp } from "../project.js";
|
|
8
|
+
import { runSetup } from "./setup.js";
|
|
9
|
+
|
|
10
|
+
export async function runInitWizard() {
|
|
11
|
+
const cwd = process.cwd();
|
|
12
|
+
const envRoot = process.env.FOUNDATION_ROOT;
|
|
13
|
+
let projectRoot = null;
|
|
14
|
+
if (envRoot && fs.existsSync(envRoot) && isFoundationRoot(envRoot)) {
|
|
15
|
+
projectRoot = path.resolve(envRoot);
|
|
16
|
+
console.log(chalk.gray(`Using FOUNDATION_ROOT: ${projectRoot}\n`));
|
|
17
|
+
} else if (isFoundationRoot(cwd)) {
|
|
18
|
+
projectRoot = cwd;
|
|
19
|
+
console.log(chalk.gray("Using current directory as project root.\n"));
|
|
20
|
+
} else {
|
|
21
|
+
const foundUp = findComposeRootUp(cwd);
|
|
22
|
+
if (foundUp && foundUp !== cwd) {
|
|
23
|
+
const { useFound } = await inquirer.prompt([
|
|
24
|
+
{ type: "confirm", name: "useFound", message: `Found Foundation project at:\n ${foundUp}\n Use it instead of the current directory?`, default: false },
|
|
25
|
+
]);
|
|
26
|
+
if (useFound) projectRoot = foundUp;
|
|
27
|
+
} else if (foundUp) {
|
|
28
|
+
projectRoot = foundUp;
|
|
29
|
+
}
|
|
30
|
+
if (!projectRoot) {
|
|
31
|
+
let hasGit = false, hasDocker = false, hasAws = false;
|
|
32
|
+
try { await execa("git", ["--version"]); hasGit = true; } catch {}
|
|
33
|
+
try { await execa("docker", ["info"], { timeout: 5000 }); hasDocker = true; } catch {}
|
|
34
|
+
try { await execa("aws", ["--version"]); hasAws = true; } catch {}
|
|
35
|
+
console.log(chalk.cyan(" Prerequisites\n"));
|
|
36
|
+
console.log(hasGit ? chalk.green(" ✓ Git") : chalk.red(" ✗ Git — install git first"));
|
|
37
|
+
console.log(hasDocker ? chalk.green(" ✓ Docker") : chalk.red(" ✗ Docker — install and start Docker Desktop"));
|
|
38
|
+
console.log(hasAws ? chalk.green(" ✓ AWS CLI") : chalk.yellow(" ⚠ AWS CLI — install for ECR image pulls (brew install awscli)"));
|
|
39
|
+
const netrcPath = path.join(os.homedir(), ".netrc");
|
|
40
|
+
const hasNetrc = fs.existsSync(netrcPath) && fs.readFileSync(netrcPath, "utf8").includes("machine github.com");
|
|
41
|
+
console.log(hasNetrc ? chalk.green(" ✓ GitHub credentials (~/.netrc)") : chalk.yellow(" ⚠ GitHub credentials — add to ~/.netrc (needed for private submodules)"));
|
|
42
|
+
console.log("");
|
|
43
|
+
if (!hasGit || !hasDocker) {
|
|
44
|
+
console.log(chalk.red("Fix the missing prerequisites above, then run fops init again.\n"));
|
|
45
|
+
process.exit(1);
|
|
46
|
+
}
|
|
47
|
+
const choices = [
|
|
48
|
+
{ name: "Clone foundation-compose into this directory", value: "clone" },
|
|
49
|
+
{ name: "Enter path to an existing foundation-compose directory", value: "path" },
|
|
50
|
+
{ name: "Cancel", value: "cancel" },
|
|
51
|
+
];
|
|
52
|
+
const { action } = await inquirer.prompt([{ type: "list", name: "action", message: "No Foundation project found. What do you want to do?", choices }]);
|
|
53
|
+
if (action === "cancel") process.exit(0);
|
|
54
|
+
if (action === "clone") {
|
|
55
|
+
const { repoUrl } = await inquirer.prompt([
|
|
56
|
+
{ type: "input", name: "repoUrl", message: "Repository URL:", default: "https://github.com/meshxdata/foundation-compose.git", validate: (v) => (v?.trim() ? true : "Repository URL is required.") },
|
|
57
|
+
]);
|
|
58
|
+
const { targetDir } = await inquirer.prompt([
|
|
59
|
+
{ type: "input", name: "targetDir", message: "Clone into:", default: cwd },
|
|
60
|
+
]);
|
|
61
|
+
const resolved = path.resolve(targetDir.trim());
|
|
62
|
+
if (fs.existsSync(resolved)) {
|
|
63
|
+
console.log(chalk.yellow(`\n ${resolved} already exists.`));
|
|
64
|
+
if (isFoundationRoot(resolved)) { console.log(chalk.green(" Looks like a Foundation project — using it.\n")); projectRoot = resolved; }
|
|
65
|
+
else { console.log(chalk.red(" Not a Foundation project. Remove it or choose a different path.\n")); process.exit(1); }
|
|
66
|
+
} else {
|
|
67
|
+
console.log(chalk.blue("\nCloning (this may take a minute)...\n"));
|
|
68
|
+
try {
|
|
69
|
+
await execa("git", ["clone", repoUrl.trim(), resolved], { stdio: "inherit" });
|
|
70
|
+
} catch (err) {
|
|
71
|
+
console.log(chalk.red("\n Clone failed. Check the URL and your credentials.\n"));
|
|
72
|
+
if (!hasNetrc) {
|
|
73
|
+
console.log(chalk.yellow(" Hint: Add GitHub credentials to ~/.netrc:\n"));
|
|
74
|
+
console.log(" machine github.com\n login <your-username>\n password <your-token>\n");
|
|
75
|
+
}
|
|
76
|
+
process.exit(1);
|
|
77
|
+
}
|
|
78
|
+
console.log(chalk.blue("\nInitializing submodules (checking out main)...\n"));
|
|
79
|
+
try {
|
|
80
|
+
await execa("git", ["submodule", "update", "--init", "--remote", "--recursive"], { cwd: resolved, stdio: "inherit" });
|
|
81
|
+
console.log(chalk.green("\n Cloned successfully — all submodules on main.\n"));
|
|
82
|
+
} catch {
|
|
83
|
+
console.log(chalk.yellow("\n ⚠ Some submodules had issues. Attempting to check out main individually...\n"));
|
|
84
|
+
try {
|
|
85
|
+
await execa("git", ["submodule", "init"], { cwd: resolved, stdio: "inherit" });
|
|
86
|
+
await execa("git", ["submodule", "foreach", "git fetch origin main && git checkout origin/main"], { cwd: resolved, stdio: "inherit" });
|
|
87
|
+
console.log(chalk.green(" Submodules recovered.\n"));
|
|
88
|
+
} catch {
|
|
89
|
+
console.log(chalk.yellow(" Some submodules still failed. Fix manually with:"));
|
|
90
|
+
console.log(chalk.gray(` cd ${resolved} && git submodule foreach 'git checkout main && git pull'\n`));
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
projectRoot = resolved;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
if (action === "path") {
|
|
97
|
+
const { dir } = await inquirer.prompt([
|
|
98
|
+
{
|
|
99
|
+
type: "input", name: "dir", message: "Path to foundation-compose directory:",
|
|
100
|
+
validate: (value) => {
|
|
101
|
+
const resolved = path.resolve(value.trim());
|
|
102
|
+
if (!fs.existsSync(resolved)) return "Directory does not exist.";
|
|
103
|
+
if (!isFoundationRoot(resolved)) return "Directory has no docker-compose.yaml + Makefile.";
|
|
104
|
+
return true;
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
]);
|
|
108
|
+
projectRoot = path.resolve(dir.trim());
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
const { submodules, env, download } = await inquirer.prompt([
|
|
113
|
+
{ type: "confirm", name: "submodules", message: "Initialize and update git submodules?", default: true },
|
|
114
|
+
{ type: "confirm", name: "env", message: "Create .env from .env.example (if missing)?", default: true },
|
|
115
|
+
{ type: "confirm", name: "download", message: "Download container images now (make download)?", default: false },
|
|
116
|
+
]);
|
|
117
|
+
console.log("");
|
|
118
|
+
await runSetup(projectRoot, { submodules, env, download, netrcCheck: true });
|
|
119
|
+
}
|
package/src/shell.js
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { execa } from "execa";
|
|
2
|
+
|
|
3
|
+
export async function make(root, target, args = []) {
|
|
4
|
+
return execa("make", [target, ...args], { cwd: root, stdio: "inherit" });
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export async function dockerCompose(root, args) {
|
|
8
|
+
return execa("docker", ["compose", ...args], { cwd: root, stdio: "inherit" });
|
|
9
|
+
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { describe, it, expect, vi } from "vitest";
|
|
2
|
+
|
|
3
|
+
vi.mock("execa", () => ({
|
|
4
|
+
execa: vi.fn(() => Promise.resolve({ stdout: "", exitCode: 0 })),
|
|
5
|
+
}));
|
|
6
|
+
|
|
7
|
+
const { execa } = await import("execa");
|
|
8
|
+
const { make, dockerCompose } = await import("./shell.js");
|
|
9
|
+
|
|
10
|
+
describe("shell", () => {
|
|
11
|
+
describe("make", () => {
|
|
12
|
+
it("calls execa with make, target, and cwd", async () => {
|
|
13
|
+
await make("/project", "up");
|
|
14
|
+
expect(execa).toHaveBeenCalledWith("make", ["up"], { cwd: "/project", stdio: "inherit" });
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
it("passes extra args", async () => {
|
|
18
|
+
await make("/project", "logs", ["-f"]);
|
|
19
|
+
expect(execa).toHaveBeenCalledWith("make", ["logs", "-f"], { cwd: "/project", stdio: "inherit" });
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
it("uses empty args by default", async () => {
|
|
23
|
+
await make("/root", "build");
|
|
24
|
+
expect(execa).toHaveBeenCalledWith("make", ["build"], { cwd: "/root", stdio: "inherit" });
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it("propagates execa rejection", async () => {
|
|
28
|
+
execa.mockRejectedValueOnce(new Error("make failed"));
|
|
29
|
+
await expect(make("/project", "bad-target")).rejects.toThrow("make failed");
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
it("passes multiple args correctly", async () => {
|
|
33
|
+
await make("/project", "deploy", ["--env=prod", "--verbose", "--dry-run"]);
|
|
34
|
+
expect(execa).toHaveBeenCalledWith(
|
|
35
|
+
"make",
|
|
36
|
+
["deploy", "--env=prod", "--verbose", "--dry-run"],
|
|
37
|
+
{ cwd: "/project", stdio: "inherit" }
|
|
38
|
+
);
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
describe("dockerCompose", () => {
|
|
43
|
+
it("calls execa with docker compose and args", async () => {
|
|
44
|
+
await dockerCompose("/project", ["ps", "--format", "json"]);
|
|
45
|
+
expect(execa).toHaveBeenCalledWith("docker", ["compose", "ps", "--format", "json"], {
|
|
46
|
+
cwd: "/project",
|
|
47
|
+
stdio: "inherit",
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it("prefixes args with compose subcommand", async () => {
|
|
52
|
+
await dockerCompose("/project", ["up", "-d"]);
|
|
53
|
+
expect(execa).toHaveBeenCalledWith("docker", ["compose", "up", "-d"], {
|
|
54
|
+
cwd: "/project",
|
|
55
|
+
stdio: "inherit",
|
|
56
|
+
});
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
it("propagates execa rejection", async () => {
|
|
60
|
+
execa.mockRejectedValueOnce(new Error("docker failed"));
|
|
61
|
+
await expect(dockerCompose("/project", ["up"])).rejects.toThrow("docker failed");
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it("handles empty args array", async () => {
|
|
65
|
+
await dockerCompose("/project", []);
|
|
66
|
+
expect(execa).toHaveBeenCalledWith("docker", ["compose"], {
|
|
67
|
+
cwd: "/project",
|
|
68
|
+
stdio: "inherit",
|
|
69
|
+
});
|
|
70
|
+
});
|
|
71
|
+
});
|
|
72
|
+
});
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: foundation-stack
|
|
3
|
+
description: Managing the Foundation data mesh stack with fops
|
|
4
|
+
---
|
|
5
|
+
## Foundation Stack Management
|
|
6
|
+
|
|
7
|
+
### Lifecycle
|
|
8
|
+
|
|
9
|
+
Start the full stack:
|
|
10
|
+
```bash
|
|
11
|
+
fops up
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
Stop all services:
|
|
15
|
+
```bash
|
|
16
|
+
fops down
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
Stop and remove all volumes (clean slate):
|
|
20
|
+
```bash
|
|
21
|
+
fops down --clean
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
Check what's running:
|
|
25
|
+
```bash
|
|
26
|
+
fops status
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
### Debugging
|
|
30
|
+
|
|
31
|
+
Run full environment diagnostics:
|
|
32
|
+
```bash
|
|
33
|
+
fops doctor
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
Auto-fix detected issues:
|
|
37
|
+
```bash
|
|
38
|
+
fops doctor --fix
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
Tail logs for all services:
|
|
42
|
+
```bash
|
|
43
|
+
fops logs
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
Tail logs for a specific service:
|
|
47
|
+
```bash
|
|
48
|
+
fops logs backend
|
|
49
|
+
fops logs frontend
|
|
50
|
+
fops logs postgres
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### Suggesting Commands
|
|
54
|
+
|
|
55
|
+
Always suggest **2–3 commands** in separate fenced blocks so the user can choose. Pair the primary action with a useful follow-up:
|
|
56
|
+
|
|
57
|
+
- Restart → then logs: `fops restart kafka` + `fops logs kafka`
|
|
58
|
+
- Debug → then fix: `fops doctor` + `fops doctor --fix`
|
|
59
|
+
- Status → then logs: `fops status` + `fops logs`
|
|
60
|
+
- Setup → then verify: `fops init` + `fops doctor`
|
|
61
|
+
|
|
62
|
+
Never suggest only 1 command when a follow-up would be useful.
|
|
63
|
+
|
|
64
|
+
### Stale Images
|
|
65
|
+
|
|
66
|
+
If an image is more than 7 days old, it's likely stale — dependencies may have changed. When you see errors about missing packages, commands not found, or broken virtualenvs, **always check image ages first** and suggest a rebuild:
|
|
67
|
+
```bash
|
|
68
|
+
docker compose build --pull
|
|
69
|
+
```
|
|
70
|
+
```bash
|
|
71
|
+
fops doctor
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Common Issues
|
|
75
|
+
|
|
76
|
+
**Containers stuck in "unhealthy"**: Check logs for the specific service, then restart. Often caused by a dependency not being ready yet — `fops down` then `fops up` usually resolves it.
|
|
77
|
+
|
|
78
|
+
**Port conflicts**: Run `fops doctor` to see which ports are in use. Kill the conflicting process or change the port mapping in `.env`.
|
|
79
|
+
|
|
80
|
+
**ECR auth expired**: Run `fops login` or `fops doctor --fix` to re-authenticate with AWS ECR.
|
|
81
|
+
|
|
82
|
+
**Missing .env**: Run `fops setup` to regenerate from `.env.example`.
|
|
83
|
+
|
|
84
|
+
**Submodules out of date**: Run `fops setup` to re-init and pull submodules.
|
|
85
|
+
|
|
86
|
+
### Services & Ports
|
|
87
|
+
|
|
88
|
+
| Service | Port | Purpose |
|
|
89
|
+
|-----------------|-------|----------------------------|
|
|
90
|
+
| Backend | 9001 | Core API server |
|
|
91
|
+
| Frontend | 3002 | Web UI |
|
|
92
|
+
| Storage Engine | 9002 | Object storage layer |
|
|
93
|
+
| Trino | 8081 | Distributed SQL engine |
|
|
94
|
+
| OPA | 8181 | Policy engine |
|
|
95
|
+
| Kafka | 9092 | Event streaming |
|
|
96
|
+
| Postgres | 5432 | Metadata database |
|
|
97
|
+
| Hive Metastore | 9083 | Table metadata catalog |
|
|
98
|
+
| Vault | 18201 | Secrets management |
|
|
99
|
+
|
|
100
|
+
### First-Time Setup
|
|
101
|
+
|
|
102
|
+
```bash
|
|
103
|
+
npm install -g @meshxdata/fops
|
|
104
|
+
fops init
|
|
105
|
+
fops up
|
|
106
|
+
fops doctor
|
|
107
|
+
```
|
package/src/ui/banner.js
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { PKG } from "../config.js";
|
|
2
|
+
|
|
3
|
+
// Fun AI/OPS quotes
|
|
4
|
+
export const QUOTES = [
|
|
5
|
+
// Mr Robot vibes
|
|
6
|
+
"Control is an illusion. But root access is real.",
|
|
7
|
+
"The world is a zero-day. We're just living in it.",
|
|
8
|
+
"I don't debug. I interrogate code until it confesses.",
|
|
9
|
+
"Every container is a prison. Some are just nicer than others.",
|
|
10
|
+
"sudo rm -rf doubt",
|
|
11
|
+
"Trust no process. Verify all containers.",
|
|
12
|
+
"The logs never lie. But they do omit.",
|
|
13
|
+
"Every daemon was once an angel.",
|
|
14
|
+
"First rule of production: you don't push on Friday.",
|
|
15
|
+
// Classic ops humor
|
|
16
|
+
"It works on my container™",
|
|
17
|
+
"Have you tried turning the pod off and on again?",
|
|
18
|
+
"99.9% uptime means 8.7 hours of chaos per year",
|
|
19
|
+
"YAML: Yet Another Markup Landmine",
|
|
20
|
+
"There's no place like 127.0.0.1",
|
|
21
|
+
"Docker: because 'it works on my machine' wasn't enough",
|
|
22
|
+
"The cloud is just someone else's computer having a bad day",
|
|
23
|
+
"sudo make me a sandwich",
|
|
24
|
+
"I don't always test my code, but when I do, I do it in production",
|
|
25
|
+
"Containers: VMs but make it fashion",
|
|
26
|
+
"Keep calm and kubectl apply -f",
|
|
27
|
+
"In a world of microservices, be a monolith of calm",
|
|
28
|
+
"Terraform: Infrastructure as YOLO",
|
|
29
|
+
"The best time to fix prod was yesterday. The second best is now.",
|
|
30
|
+
"May your builds be green and your deploys be boring",
|
|
31
|
+
"Observability: watching things break, but fancier",
|
|
32
|
+
];
|
|
33
|
+
|
|
34
|
+
export function getRandomQuote() {
|
|
35
|
+
return QUOTES[Math.floor(Math.random() * QUOTES.length)];
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// Banner ASCII art
|
|
39
|
+
export const BANNER = `
|
|
40
|
+
███████╗ ██████╗ ██████╗ ███████╗
|
|
41
|
+
██╔════╝██╔═══██╗██╔══██╗██╔════╝
|
|
42
|
+
█████╗ ██║ ██║██████╔╝███████╗
|
|
43
|
+
██╔══╝ ██║ ██║██╔═══╝ ╚════██║
|
|
44
|
+
██║ ╚██████╔╝██║ ███████║
|
|
45
|
+
╚═╝ ╚═════╝ ╚═╝ ╚══════╝
|
|
46
|
+
`;
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Render the Foundation banner (static, no ink)
|
|
50
|
+
*/
|
|
51
|
+
export function renderBanner() {
|
|
52
|
+
console.log("\x1b[36m" + BANNER + "\x1b[0m");
|
|
53
|
+
console.log(" \x1b[1mFoundation OPS CLI\x1b[0m");
|
|
54
|
+
console.log(" \x1b[90mv" + PKG.version + " • /exit to quit • ↑↓ history\x1b[0m");
|
|
55
|
+
console.log("\n \x1b[3;33m\"" + getRandomQuote() + "\"\x1b[0m\n");
|
|
56
|
+
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { describe, it, expect, vi } from "vitest";
|
|
2
|
+
import { QUOTES, getRandomQuote, BANNER, renderBanner } from "./banner.js";
|
|
3
|
+
|
|
4
|
+
describe("ui/banner", () => {
|
|
5
|
+
describe("QUOTES", () => {
|
|
6
|
+
it("is a non-empty array of strings", () => {
|
|
7
|
+
expect(Array.isArray(QUOTES)).toBe(true);
|
|
8
|
+
expect(QUOTES.length).toBeGreaterThan(10);
|
|
9
|
+
for (const q of QUOTES) {
|
|
10
|
+
expect(typeof q).toBe("string");
|
|
11
|
+
expect(q.length).toBeGreaterThan(0);
|
|
12
|
+
}
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
it("has no duplicate quotes", () => {
|
|
16
|
+
const unique = new Set(QUOTES);
|
|
17
|
+
expect(unique.size).toBe(QUOTES.length);
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
it("includes Mr Robot themed quotes", () => {
|
|
21
|
+
const hasMrRobot = QUOTES.some((q) => q.includes("zero-day") || q.includes("daemon") || q.includes("root access"));
|
|
22
|
+
expect(hasMrRobot).toBe(true);
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
it("includes ops humor quotes", () => {
|
|
26
|
+
const hasOps = QUOTES.some((q) => q.includes("container") || q.includes("production") || q.includes("Docker"));
|
|
27
|
+
expect(hasOps).toBe(true);
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
describe("getRandomQuote", () => {
|
|
32
|
+
it("returns a string from QUOTES", () => {
|
|
33
|
+
const quote = getRandomQuote();
|
|
34
|
+
expect(typeof quote).toBe("string");
|
|
35
|
+
expect(QUOTES).toContain(quote);
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
it("returns a value on every call", () => {
|
|
39
|
+
for (let i = 0; i < 20; i++) {
|
|
40
|
+
const quote = getRandomQuote();
|
|
41
|
+
expect(typeof quote).toBe("string");
|
|
42
|
+
expect(quote.length).toBeGreaterThan(0);
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
describe("BANNER", () => {
|
|
48
|
+
it("contains ASCII art block characters", () => {
|
|
49
|
+
expect(BANNER).toContain("█");
|
|
50
|
+
expect(BANNER).toContain("╗");
|
|
51
|
+
expect(BANNER).toContain("╚");
|
|
52
|
+
expect(BANNER).toContain("═");
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
it("is multi-line", () => {
|
|
56
|
+
const lines = BANNER.split("\n").filter((l) => l.trim());
|
|
57
|
+
expect(lines.length).toBeGreaterThanOrEqual(5);
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
it("has substantial length", () => {
|
|
61
|
+
expect(BANNER.length).toBeGreaterThan(100);
|
|
62
|
+
});
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
describe("renderBanner", () => {
|
|
66
|
+
it("prints banner to stdout", () => {
|
|
67
|
+
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
68
|
+
renderBanner();
|
|
69
|
+
expect(spy).toHaveBeenCalled();
|
|
70
|
+
const output = spy.mock.calls.map((c) => c[0]).join("\n");
|
|
71
|
+
expect(output).toContain("Foundation OPS CLI");
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
it("displays version", () => {
|
|
75
|
+
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
76
|
+
renderBanner();
|
|
77
|
+
const output = spy.mock.calls.map((c) => c[0]).join("\n");
|
|
78
|
+
expect(output).toMatch(/v\d+\.\d+/);
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it("displays exit instructions", () => {
|
|
82
|
+
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
83
|
+
renderBanner();
|
|
84
|
+
const output = spy.mock.calls.map((c) => c[0]).join("\n");
|
|
85
|
+
expect(output).toContain("/exit");
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
it("displays a quote", () => {
|
|
89
|
+
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
90
|
+
renderBanner();
|
|
91
|
+
const output = spy.mock.calls.map((c) => c[0]).join("\n");
|
|
92
|
+
// At least one quote should be present
|
|
93
|
+
const hasQuote = QUOTES.some((q) => output.includes(q));
|
|
94
|
+
expect(hasQuote).toBe(true);
|
|
95
|
+
});
|
|
96
|
+
});
|
|
97
|
+
});
|