@meshxdata/fops 0.0.1 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of @meshxdata/fops might be problematic. Click here for more details.
- package/README.md +62 -40
- package/package.json +4 -3
- package/src/agent/agent.js +161 -68
- package/src/agent/agents.js +224 -0
- package/src/agent/context.js +287 -96
- package/src/agent/index.js +1 -0
- package/src/agent/llm.js +134 -20
- package/src/auth/coda.js +128 -0
- package/src/auth/index.js +1 -0
- package/src/auth/login.js +13 -13
- package/src/auth/oauth.js +4 -4
- package/src/commands/index.js +94 -21
- package/src/config.js +2 -2
- package/src/doctor.js +208 -22
- package/src/feature-flags.js +197 -0
- package/src/plugins/api.js +23 -0
- package/src/plugins/builtins/stack-api.js +36 -0
- package/src/plugins/index.js +1 -0
- package/src/plugins/knowledge.js +124 -0
- package/src/plugins/loader.js +67 -0
- package/src/plugins/registry.js +3 -0
- package/src/project.js +20 -1
- package/src/setup/aws.js +7 -7
- package/src/setup/setup.js +18 -12
- package/src/setup/wizard.js +86 -15
- package/src/shell.js +2 -2
- package/src/skills/foundation/SKILL.md +200 -66
- package/src/ui/confirm.js +3 -2
- package/src/ui/input.js +31 -34
- package/src/ui/spinner.js +39 -13
- package/src/ui/streaming.js +2 -2
- package/STRUCTURE.md +0 -43
- package/src/agent/agent.test.js +0 -233
- package/src/agent/context.test.js +0 -81
- package/src/agent/llm.test.js +0 -139
- package/src/auth/keychain.test.js +0 -185
- package/src/auth/login.test.js +0 -192
- package/src/auth/oauth.test.js +0 -118
- package/src/auth/resolve.test.js +0 -153
- package/src/config.test.js +0 -70
- package/src/doctor.test.js +0 -134
- package/src/plugins/api.test.js +0 -95
- package/src/plugins/discovery.test.js +0 -92
- package/src/plugins/hooks.test.js +0 -118
- package/src/plugins/manifest.test.js +0 -106
- package/src/plugins/registry.test.js +0 -43
- package/src/plugins/skills.test.js +0 -173
- package/src/project.test.js +0 -196
- package/src/setup/aws.test.js +0 -280
- package/src/shell.test.js +0 -72
- package/src/ui/banner.test.js +0 -97
- package/src/ui/spinner.test.js +0 -29
package/src/project.test.js
DELETED
|
@@ -1,196 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
|
2
|
-
import fs from "node:fs";
|
|
3
|
-
import os from "node:os";
|
|
4
|
-
import path from "node:path";
|
|
5
|
-
import { hasComposeInDir, isFoundationRoot, findComposeRootUp, rootDir, requireRoot } from "./project.js";
|
|
6
|
-
|
|
7
|
-
function makeTmpDir() {
|
|
8
|
-
return fs.mkdtempSync(path.join(os.tmpdir(), "fops-test-"));
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
describe("project", () => {
|
|
12
|
-
let tmpDir;
|
|
13
|
-
|
|
14
|
-
beforeEach(() => {
|
|
15
|
-
tmpDir = makeTmpDir();
|
|
16
|
-
delete process.env.FOUNDATION_ROOT;
|
|
17
|
-
});
|
|
18
|
-
|
|
19
|
-
afterEach(() => {
|
|
20
|
-
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
21
|
-
});
|
|
22
|
-
|
|
23
|
-
describe("hasComposeInDir", () => {
|
|
24
|
-
it("returns false for empty dir", () => {
|
|
25
|
-
expect(hasComposeInDir(tmpDir)).toBe(false);
|
|
26
|
-
});
|
|
27
|
-
|
|
28
|
-
it("returns true when docker-compose.yaml exists", () => {
|
|
29
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "version: '3'\n");
|
|
30
|
-
expect(hasComposeInDir(tmpDir)).toBe(true);
|
|
31
|
-
});
|
|
32
|
-
|
|
33
|
-
it("returns true when docker-compose.yml exists", () => {
|
|
34
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yml"), "version: '3'\n");
|
|
35
|
-
expect(hasComposeInDir(tmpDir)).toBe(true);
|
|
36
|
-
});
|
|
37
|
-
|
|
38
|
-
it("returns false when only unrelated files exist", () => {
|
|
39
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
40
|
-
fs.writeFileSync(path.join(tmpDir, "README.md"), "");
|
|
41
|
-
expect(hasComposeInDir(tmpDir)).toBe(false);
|
|
42
|
-
});
|
|
43
|
-
|
|
44
|
-
it("returns true when both .yaml and .yml exist", () => {
|
|
45
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
46
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yml"), "");
|
|
47
|
-
expect(hasComposeInDir(tmpDir)).toBe(true);
|
|
48
|
-
});
|
|
49
|
-
});
|
|
50
|
-
|
|
51
|
-
describe("isFoundationRoot", () => {
|
|
52
|
-
it("returns false with only docker-compose", () => {
|
|
53
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
54
|
-
expect(isFoundationRoot(tmpDir)).toBe(false);
|
|
55
|
-
});
|
|
56
|
-
|
|
57
|
-
it("returns false with only Makefile", () => {
|
|
58
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
59
|
-
expect(isFoundationRoot(tmpDir)).toBe(false);
|
|
60
|
-
});
|
|
61
|
-
|
|
62
|
-
it("returns true with both docker-compose.yaml and Makefile", () => {
|
|
63
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
64
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
65
|
-
expect(isFoundationRoot(tmpDir)).toBe(true);
|
|
66
|
-
});
|
|
67
|
-
|
|
68
|
-
it("returns true with docker-compose.yml and Makefile", () => {
|
|
69
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yml"), "");
|
|
70
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
71
|
-
expect(isFoundationRoot(tmpDir)).toBe(true);
|
|
72
|
-
});
|
|
73
|
-
|
|
74
|
-
it("returns false for empty dir", () => {
|
|
75
|
-
expect(isFoundationRoot(tmpDir)).toBe(false);
|
|
76
|
-
});
|
|
77
|
-
});
|
|
78
|
-
|
|
79
|
-
describe("findComposeRootUp", () => {
|
|
80
|
-
it("returns null when nothing found", () => {
|
|
81
|
-
expect(findComposeRootUp(tmpDir)).toBe(null);
|
|
82
|
-
});
|
|
83
|
-
|
|
84
|
-
it("finds root in current dir", () => {
|
|
85
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
86
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
87
|
-
expect(findComposeRootUp(tmpDir)).toBe(path.resolve(tmpDir));
|
|
88
|
-
});
|
|
89
|
-
|
|
90
|
-
it("finds root in parent dir", () => {
|
|
91
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
92
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
93
|
-
const child = path.join(tmpDir, "subdir");
|
|
94
|
-
fs.mkdirSync(child);
|
|
95
|
-
expect(findComposeRootUp(child)).toBe(path.resolve(tmpDir));
|
|
96
|
-
});
|
|
97
|
-
|
|
98
|
-
it("finds root in grandparent dir", () => {
|
|
99
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
100
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
101
|
-
const child = path.join(tmpDir, "a", "b");
|
|
102
|
-
fs.mkdirSync(child, { recursive: true });
|
|
103
|
-
expect(findComposeRootUp(child)).toBe(path.resolve(tmpDir));
|
|
104
|
-
});
|
|
105
|
-
|
|
106
|
-
it("resolves relative paths", () => {
|
|
107
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
108
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
109
|
-
// Uses path.resolve internally
|
|
110
|
-
expect(findComposeRootUp(tmpDir + "/./")).toBe(path.resolve(tmpDir));
|
|
111
|
-
});
|
|
112
|
-
});
|
|
113
|
-
|
|
114
|
-
describe("rootDir", () => {
|
|
115
|
-
it("returns null for empty dir tree", () => {
|
|
116
|
-
const emptyDir = makeTmpDir();
|
|
117
|
-
try {
|
|
118
|
-
expect(rootDir(emptyDir)).toBe(null);
|
|
119
|
-
} finally {
|
|
120
|
-
fs.rmSync(emptyDir, { recursive: true, force: true });
|
|
121
|
-
}
|
|
122
|
-
});
|
|
123
|
-
|
|
124
|
-
it("uses FOUNDATION_ROOT env var when set", () => {
|
|
125
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
126
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
127
|
-
process.env.FOUNDATION_ROOT = tmpDir;
|
|
128
|
-
expect(rootDir("/nonexistent")).toBe(path.resolve(tmpDir));
|
|
129
|
-
});
|
|
130
|
-
|
|
131
|
-
it("ignores FOUNDATION_ROOT when path does not exist", () => {
|
|
132
|
-
process.env.FOUNDATION_ROOT = "/nonexistent/path/that/does/not/exist";
|
|
133
|
-
// Should fall through to cwd-based detection
|
|
134
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
135
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
136
|
-
expect(rootDir(tmpDir)).toBe(path.resolve(tmpDir));
|
|
137
|
-
});
|
|
138
|
-
|
|
139
|
-
it("detects root in cwd", () => {
|
|
140
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
141
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
142
|
-
expect(rootDir(tmpDir)).toBe(path.resolve(tmpDir));
|
|
143
|
-
});
|
|
144
|
-
|
|
145
|
-
it("detects root one level down", () => {
|
|
146
|
-
const child = path.join(tmpDir, "foundation-compose");
|
|
147
|
-
fs.mkdirSync(child);
|
|
148
|
-
fs.writeFileSync(path.join(child, "docker-compose.yaml"), "");
|
|
149
|
-
fs.writeFileSync(path.join(child, "Makefile"), "");
|
|
150
|
-
expect(rootDir(tmpDir)).toBe(path.resolve(child));
|
|
151
|
-
});
|
|
152
|
-
|
|
153
|
-
it("prefers cwd match over child dir", () => {
|
|
154
|
-
// If cwd itself is a root, don't look into children
|
|
155
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
156
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
157
|
-
const child = path.join(tmpDir, "nested");
|
|
158
|
-
fs.mkdirSync(child);
|
|
159
|
-
fs.writeFileSync(path.join(child, "docker-compose.yaml"), "");
|
|
160
|
-
fs.writeFileSync(path.join(child, "Makefile"), "");
|
|
161
|
-
expect(rootDir(tmpDir)).toBe(path.resolve(tmpDir));
|
|
162
|
-
});
|
|
163
|
-
|
|
164
|
-
it("skips files when scanning one level down", () => {
|
|
165
|
-
// Only directories should be checked, not files
|
|
166
|
-
fs.writeFileSync(path.join(tmpDir, "some-file.txt"), "");
|
|
167
|
-
expect(rootDir(tmpDir)).toBe(null);
|
|
168
|
-
});
|
|
169
|
-
});
|
|
170
|
-
|
|
171
|
-
describe("requireRoot", () => {
|
|
172
|
-
it("returns root when project exists", () => {
|
|
173
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "");
|
|
174
|
-
fs.writeFileSync(path.join(tmpDir, "Makefile"), "");
|
|
175
|
-
process.env.FOUNDATION_ROOT = tmpDir;
|
|
176
|
-
const mockProgram = { error: vi.fn() };
|
|
177
|
-
const result = requireRoot(mockProgram);
|
|
178
|
-
expect(result).toBe(path.resolve(tmpDir));
|
|
179
|
-
expect(mockProgram.error).not.toHaveBeenCalled();
|
|
180
|
-
});
|
|
181
|
-
|
|
182
|
-
it("calls program.error when no project found", () => {
|
|
183
|
-
process.env.FOUNDATION_ROOT = "";
|
|
184
|
-
delete process.env.FOUNDATION_ROOT;
|
|
185
|
-
const spy = vi.spyOn(console, "error").mockImplementation(() => {});
|
|
186
|
-
const mockProgram = { error: vi.fn() };
|
|
187
|
-
// rootDir will search from cwd which is the operator-cli dir (a real project)
|
|
188
|
-
// So we need to make sure FOUNDATION_ROOT doesn't point to anything
|
|
189
|
-
// and cwd doesn't have the files. We mock rootDir behavior indirectly.
|
|
190
|
-
// For this test, just verify the function signature works.
|
|
191
|
-
requireRoot(mockProgram);
|
|
192
|
-
// If real project found, no error. If not, error called.
|
|
193
|
-
// Either way, function returns something or calls error.
|
|
194
|
-
});
|
|
195
|
-
});
|
|
196
|
-
});
|
package/src/setup/aws.test.js
DELETED
|
@@ -1,280 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
|
2
|
-
import fs from "node:fs";
|
|
3
|
-
import os from "node:os";
|
|
4
|
-
import path from "node:path";
|
|
5
|
-
|
|
6
|
-
describe("setup/aws", () => {
|
|
7
|
-
let tmpDir;
|
|
8
|
-
|
|
9
|
-
beforeEach(() => {
|
|
10
|
-
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "fops-aws-"));
|
|
11
|
-
});
|
|
12
|
-
|
|
13
|
-
afterEach(() => {
|
|
14
|
-
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
15
|
-
});
|
|
16
|
-
|
|
17
|
-
describe("readFopsConfig / saveFopsConfig", () => {
|
|
18
|
-
it("readFopsConfig returns empty object when file does not exist", () => {
|
|
19
|
-
const configPath = path.join(tmpDir, ".fops.json");
|
|
20
|
-
expect(fs.existsSync(configPath)).toBe(false);
|
|
21
|
-
let config = {};
|
|
22
|
-
try {
|
|
23
|
-
if (fs.existsSync(configPath)) {
|
|
24
|
-
config = JSON.parse(fs.readFileSync(configPath, "utf8"));
|
|
25
|
-
}
|
|
26
|
-
} catch {}
|
|
27
|
-
expect(config).toEqual({});
|
|
28
|
-
});
|
|
29
|
-
|
|
30
|
-
it("saveFopsConfig writes valid JSON", () => {
|
|
31
|
-
const configPath = path.join(tmpDir, ".fops.json");
|
|
32
|
-
const data = { aws: { profile: "dev" } };
|
|
33
|
-
fs.writeFileSync(configPath, JSON.stringify(data, null, 2) + "\n", { mode: 0o600 });
|
|
34
|
-
const read = JSON.parse(fs.readFileSync(configPath, "utf8"));
|
|
35
|
-
expect(read).toEqual(data);
|
|
36
|
-
});
|
|
37
|
-
|
|
38
|
-
it("readFopsConfig handles corrupted JSON gracefully", () => {
|
|
39
|
-
const configPath = path.join(tmpDir, ".fops.json");
|
|
40
|
-
fs.writeFileSync(configPath, "not valid json{");
|
|
41
|
-
let config = {};
|
|
42
|
-
try {
|
|
43
|
-
if (fs.existsSync(configPath)) {
|
|
44
|
-
config = JSON.parse(fs.readFileSync(configPath, "utf8"));
|
|
45
|
-
}
|
|
46
|
-
} catch {}
|
|
47
|
-
expect(config).toEqual({});
|
|
48
|
-
});
|
|
49
|
-
|
|
50
|
-
it("round-trips nested config", () => {
|
|
51
|
-
const configPath = path.join(tmpDir, ".fops.json");
|
|
52
|
-
const data = {
|
|
53
|
-
aws: { profile: "dev", region: "us-east-1" },
|
|
54
|
-
plugins: { entries: { "my-plugin": { enabled: true, config: { key: "val" } } } },
|
|
55
|
-
};
|
|
56
|
-
fs.writeFileSync(configPath, JSON.stringify(data, null, 2) + "\n");
|
|
57
|
-
const read = JSON.parse(fs.readFileSync(configPath, "utf8"));
|
|
58
|
-
expect(read).toEqual(data);
|
|
59
|
-
});
|
|
60
|
-
});
|
|
61
|
-
|
|
62
|
-
describe("detectEcrRegistry", () => {
|
|
63
|
-
let detectEcrRegistry;
|
|
64
|
-
|
|
65
|
-
beforeEach(async () => {
|
|
66
|
-
({ detectEcrRegistry } = await import("./aws.js"));
|
|
67
|
-
});
|
|
68
|
-
|
|
69
|
-
it("returns null when no docker-compose.yaml", () => {
|
|
70
|
-
expect(detectEcrRegistry(tmpDir)).toBe(null);
|
|
71
|
-
});
|
|
72
|
-
|
|
73
|
-
it("returns null when no ECR URL in compose", () => {
|
|
74
|
-
fs.writeFileSync(path.join(tmpDir, "docker-compose.yaml"), "version: '3'\nservices:\n web:\n image: nginx\n");
|
|
75
|
-
expect(detectEcrRegistry(tmpDir)).toBe(null);
|
|
76
|
-
});
|
|
77
|
-
|
|
78
|
-
it("extracts ECR info from docker-compose.yaml", () => {
|
|
79
|
-
fs.writeFileSync(
|
|
80
|
-
path.join(tmpDir, "docker-compose.yaml"),
|
|
81
|
-
`services:\n app:\n image: 123456789012.dkr.ecr.us-east-1.amazonaws.com/my-app:latest\n`
|
|
82
|
-
);
|
|
83
|
-
const result = detectEcrRegistry(tmpDir);
|
|
84
|
-
expect(result).toEqual({ accountId: "123456789012", region: "us-east-1" });
|
|
85
|
-
});
|
|
86
|
-
|
|
87
|
-
it("extracts different regions", () => {
|
|
88
|
-
fs.writeFileSync(
|
|
89
|
-
path.join(tmpDir, "docker-compose.yaml"),
|
|
90
|
-
`services:\n app:\n image: 987654321098.dkr.ecr.eu-west-1.amazonaws.com/app:v1\n`
|
|
91
|
-
);
|
|
92
|
-
const result = detectEcrRegistry(tmpDir);
|
|
93
|
-
expect(result).toEqual({ accountId: "987654321098", region: "eu-west-1" });
|
|
94
|
-
});
|
|
95
|
-
|
|
96
|
-
it("handles multiple ECR references (returns first match)", () => {
|
|
97
|
-
fs.writeFileSync(
|
|
98
|
-
path.join(tmpDir, "docker-compose.yaml"),
|
|
99
|
-
[
|
|
100
|
-
"services:",
|
|
101
|
-
" app1:",
|
|
102
|
-
" image: 111111111111.dkr.ecr.us-east-1.amazonaws.com/app1:latest",
|
|
103
|
-
" app2:",
|
|
104
|
-
" image: 222222222222.dkr.ecr.eu-west-1.amazonaws.com/app2:latest",
|
|
105
|
-
].join("\n")
|
|
106
|
-
);
|
|
107
|
-
const result = detectEcrRegistry(tmpDir);
|
|
108
|
-
expect(result.accountId).toBe("111111111111");
|
|
109
|
-
});
|
|
110
|
-
|
|
111
|
-
it("returns null for compose with no image directives", () => {
|
|
112
|
-
fs.writeFileSync(
|
|
113
|
-
path.join(tmpDir, "docker-compose.yaml"),
|
|
114
|
-
"services:\n app:\n build: .\n"
|
|
115
|
-
);
|
|
116
|
-
expect(detectEcrRegistry(tmpDir)).toBe(null);
|
|
117
|
-
});
|
|
118
|
-
});
|
|
119
|
-
|
|
120
|
-
describe("detectAwsSsoProfiles", () => {
|
|
121
|
-
let detectAwsSsoProfiles;
|
|
122
|
-
|
|
123
|
-
beforeEach(async () => {
|
|
124
|
-
({ detectAwsSsoProfiles } = await import("./aws.js"));
|
|
125
|
-
});
|
|
126
|
-
|
|
127
|
-
it("returns an array", () => {
|
|
128
|
-
const result = detectAwsSsoProfiles();
|
|
129
|
-
expect(Array.isArray(result)).toBe(true);
|
|
130
|
-
});
|
|
131
|
-
});
|
|
132
|
-
|
|
133
|
-
describe("detectAwsSsoProfiles parsing logic", () => {
|
|
134
|
-
it("parses SSO profiles from aws config content", () => {
|
|
135
|
-
const configContent = `[profile dev]
|
|
136
|
-
sso_session = meshx
|
|
137
|
-
sso_account_id = 123456789012
|
|
138
|
-
sso_role_name = AdministratorAccess
|
|
139
|
-
region = us-east-1
|
|
140
|
-
|
|
141
|
-
[sso-session meshx]
|
|
142
|
-
sso_start_url = https://meshx.awsapps.com/start
|
|
143
|
-
sso_region = us-east-1
|
|
144
|
-
|
|
145
|
-
[profile staging]
|
|
146
|
-
sso_session = meshx
|
|
147
|
-
sso_account_id = 987654321098
|
|
148
|
-
sso_role_name = ReadOnly
|
|
149
|
-
region = us-west-2
|
|
150
|
-
`;
|
|
151
|
-
// Inline the parsing logic (same as detectAwsSsoProfiles)
|
|
152
|
-
const content = configContent;
|
|
153
|
-
const profiles = [];
|
|
154
|
-
let currentProfile = null;
|
|
155
|
-
let currentAttrs = {};
|
|
156
|
-
for (const line of content.split("\n")) {
|
|
157
|
-
const profileMatch = line.match(/^\[profile\s+(.+?)\]/);
|
|
158
|
-
if (profileMatch) {
|
|
159
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
160
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
161
|
-
}
|
|
162
|
-
currentProfile = profileMatch[1];
|
|
163
|
-
currentAttrs = {};
|
|
164
|
-
continue;
|
|
165
|
-
}
|
|
166
|
-
if (line.startsWith("[")) {
|
|
167
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
168
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
169
|
-
}
|
|
170
|
-
currentProfile = null;
|
|
171
|
-
currentAttrs = {};
|
|
172
|
-
continue;
|
|
173
|
-
}
|
|
174
|
-
const kv = line.match(/^\s*(\S+)\s*=\s*(.+)/);
|
|
175
|
-
if (kv && currentProfile) {
|
|
176
|
-
currentAttrs[kv[1]] = kv[2].trim();
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
180
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
expect(profiles).toHaveLength(2);
|
|
184
|
-
expect(profiles[0].name).toBe("dev");
|
|
185
|
-
expect(profiles[0].sso_session).toBe("meshx");
|
|
186
|
-
expect(profiles[0].sso_account_id).toBe("123456789012");
|
|
187
|
-
expect(profiles[0].region).toBe("us-east-1");
|
|
188
|
-
expect(profiles[1].name).toBe("staging");
|
|
189
|
-
expect(profiles[1].sso_account_id).toBe("987654321098");
|
|
190
|
-
expect(profiles[1].region).toBe("us-west-2");
|
|
191
|
-
});
|
|
192
|
-
|
|
193
|
-
it("skips profiles without sso_session", () => {
|
|
194
|
-
const content = `[profile default]
|
|
195
|
-
region = us-east-1
|
|
196
|
-
output = json
|
|
197
|
-
|
|
198
|
-
[profile sso-dev]
|
|
199
|
-
sso_session = meshx
|
|
200
|
-
region = us-east-1
|
|
201
|
-
`;
|
|
202
|
-
const profiles = [];
|
|
203
|
-
let currentProfile = null;
|
|
204
|
-
let currentAttrs = {};
|
|
205
|
-
for (const line of content.split("\n")) {
|
|
206
|
-
const profileMatch = line.match(/^\[profile\s+(.+?)\]/);
|
|
207
|
-
if (profileMatch) {
|
|
208
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
209
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
210
|
-
}
|
|
211
|
-
currentProfile = profileMatch[1];
|
|
212
|
-
currentAttrs = {};
|
|
213
|
-
continue;
|
|
214
|
-
}
|
|
215
|
-
if (line.startsWith("[")) {
|
|
216
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
217
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
218
|
-
}
|
|
219
|
-
currentProfile = null;
|
|
220
|
-
currentAttrs = {};
|
|
221
|
-
continue;
|
|
222
|
-
}
|
|
223
|
-
const kv = line.match(/^\s*(\S+)\s*=\s*(.+)/);
|
|
224
|
-
if (kv && currentProfile) {
|
|
225
|
-
currentAttrs[kv[1]] = kv[2].trim();
|
|
226
|
-
}
|
|
227
|
-
}
|
|
228
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
229
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
expect(profiles).toHaveLength(1);
|
|
233
|
-
expect(profiles[0].name).toBe("sso-dev");
|
|
234
|
-
});
|
|
235
|
-
|
|
236
|
-
it("handles empty config", () => {
|
|
237
|
-
const profiles = [];
|
|
238
|
-
// No profiles to parse
|
|
239
|
-
expect(profiles).toHaveLength(0);
|
|
240
|
-
});
|
|
241
|
-
|
|
242
|
-
it("handles profile at end of file (no trailing section)", () => {
|
|
243
|
-
const content = `[profile last]
|
|
244
|
-
sso_session = test
|
|
245
|
-
region = us-east-1`;
|
|
246
|
-
const profiles = [];
|
|
247
|
-
let currentProfile = null;
|
|
248
|
-
let currentAttrs = {};
|
|
249
|
-
for (const line of content.split("\n")) {
|
|
250
|
-
const profileMatch = line.match(/^\[profile\s+(.+?)\]/);
|
|
251
|
-
if (profileMatch) {
|
|
252
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
253
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
254
|
-
}
|
|
255
|
-
currentProfile = profileMatch[1];
|
|
256
|
-
currentAttrs = {};
|
|
257
|
-
continue;
|
|
258
|
-
}
|
|
259
|
-
if (line.startsWith("[")) {
|
|
260
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
261
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
262
|
-
}
|
|
263
|
-
currentProfile = null;
|
|
264
|
-
currentAttrs = {};
|
|
265
|
-
continue;
|
|
266
|
-
}
|
|
267
|
-
const kv = line.match(/^\s*(\S+)\s*=\s*(.+)/);
|
|
268
|
-
if (kv && currentProfile) {
|
|
269
|
-
currentAttrs[kv[1]] = kv[2].trim();
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
if (currentProfile && currentAttrs.sso_session) {
|
|
273
|
-
profiles.push({ name: currentProfile, ...currentAttrs });
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
expect(profiles).toHaveLength(1);
|
|
277
|
-
expect(profiles[0].name).toBe("last");
|
|
278
|
-
});
|
|
279
|
-
});
|
|
280
|
-
});
|
package/src/shell.test.js
DELETED
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect, vi } from "vitest";
|
|
2
|
-
|
|
3
|
-
vi.mock("execa", () => ({
|
|
4
|
-
execa: vi.fn(() => Promise.resolve({ stdout: "", exitCode: 0 })),
|
|
5
|
-
}));
|
|
6
|
-
|
|
7
|
-
const { execa } = await import("execa");
|
|
8
|
-
const { make, dockerCompose } = await import("./shell.js");
|
|
9
|
-
|
|
10
|
-
describe("shell", () => {
|
|
11
|
-
describe("make", () => {
|
|
12
|
-
it("calls execa with make, target, and cwd", async () => {
|
|
13
|
-
await make("/project", "up");
|
|
14
|
-
expect(execa).toHaveBeenCalledWith("make", ["up"], { cwd: "/project", stdio: "inherit" });
|
|
15
|
-
});
|
|
16
|
-
|
|
17
|
-
it("passes extra args", async () => {
|
|
18
|
-
await make("/project", "logs", ["-f"]);
|
|
19
|
-
expect(execa).toHaveBeenCalledWith("make", ["logs", "-f"], { cwd: "/project", stdio: "inherit" });
|
|
20
|
-
});
|
|
21
|
-
|
|
22
|
-
it("uses empty args by default", async () => {
|
|
23
|
-
await make("/root", "build");
|
|
24
|
-
expect(execa).toHaveBeenCalledWith("make", ["build"], { cwd: "/root", stdio: "inherit" });
|
|
25
|
-
});
|
|
26
|
-
|
|
27
|
-
it("propagates execa rejection", async () => {
|
|
28
|
-
execa.mockRejectedValueOnce(new Error("make failed"));
|
|
29
|
-
await expect(make("/project", "bad-target")).rejects.toThrow("make failed");
|
|
30
|
-
});
|
|
31
|
-
|
|
32
|
-
it("passes multiple args correctly", async () => {
|
|
33
|
-
await make("/project", "deploy", ["--env=prod", "--verbose", "--dry-run"]);
|
|
34
|
-
expect(execa).toHaveBeenCalledWith(
|
|
35
|
-
"make",
|
|
36
|
-
["deploy", "--env=prod", "--verbose", "--dry-run"],
|
|
37
|
-
{ cwd: "/project", stdio: "inherit" }
|
|
38
|
-
);
|
|
39
|
-
});
|
|
40
|
-
});
|
|
41
|
-
|
|
42
|
-
describe("dockerCompose", () => {
|
|
43
|
-
it("calls execa with docker compose and args", async () => {
|
|
44
|
-
await dockerCompose("/project", ["ps", "--format", "json"]);
|
|
45
|
-
expect(execa).toHaveBeenCalledWith("docker", ["compose", "ps", "--format", "json"], {
|
|
46
|
-
cwd: "/project",
|
|
47
|
-
stdio: "inherit",
|
|
48
|
-
});
|
|
49
|
-
});
|
|
50
|
-
|
|
51
|
-
it("prefixes args with compose subcommand", async () => {
|
|
52
|
-
await dockerCompose("/project", ["up", "-d"]);
|
|
53
|
-
expect(execa).toHaveBeenCalledWith("docker", ["compose", "up", "-d"], {
|
|
54
|
-
cwd: "/project",
|
|
55
|
-
stdio: "inherit",
|
|
56
|
-
});
|
|
57
|
-
});
|
|
58
|
-
|
|
59
|
-
it("propagates execa rejection", async () => {
|
|
60
|
-
execa.mockRejectedValueOnce(new Error("docker failed"));
|
|
61
|
-
await expect(dockerCompose("/project", ["up"])).rejects.toThrow("docker failed");
|
|
62
|
-
});
|
|
63
|
-
|
|
64
|
-
it("handles empty args array", async () => {
|
|
65
|
-
await dockerCompose("/project", []);
|
|
66
|
-
expect(execa).toHaveBeenCalledWith("docker", ["compose"], {
|
|
67
|
-
cwd: "/project",
|
|
68
|
-
stdio: "inherit",
|
|
69
|
-
});
|
|
70
|
-
});
|
|
71
|
-
});
|
|
72
|
-
});
|
package/src/ui/banner.test.js
DELETED
|
@@ -1,97 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect, vi } from "vitest";
|
|
2
|
-
import { QUOTES, getRandomQuote, BANNER, renderBanner } from "./banner.js";
|
|
3
|
-
|
|
4
|
-
describe("ui/banner", () => {
|
|
5
|
-
describe("QUOTES", () => {
|
|
6
|
-
it("is a non-empty array of strings", () => {
|
|
7
|
-
expect(Array.isArray(QUOTES)).toBe(true);
|
|
8
|
-
expect(QUOTES.length).toBeGreaterThan(10);
|
|
9
|
-
for (const q of QUOTES) {
|
|
10
|
-
expect(typeof q).toBe("string");
|
|
11
|
-
expect(q.length).toBeGreaterThan(0);
|
|
12
|
-
}
|
|
13
|
-
});
|
|
14
|
-
|
|
15
|
-
it("has no duplicate quotes", () => {
|
|
16
|
-
const unique = new Set(QUOTES);
|
|
17
|
-
expect(unique.size).toBe(QUOTES.length);
|
|
18
|
-
});
|
|
19
|
-
|
|
20
|
-
it("includes Mr Robot themed quotes", () => {
|
|
21
|
-
const hasMrRobot = QUOTES.some((q) => q.includes("zero-day") || q.includes("daemon") || q.includes("root access"));
|
|
22
|
-
expect(hasMrRobot).toBe(true);
|
|
23
|
-
});
|
|
24
|
-
|
|
25
|
-
it("includes ops humor quotes", () => {
|
|
26
|
-
const hasOps = QUOTES.some((q) => q.includes("container") || q.includes("production") || q.includes("Docker"));
|
|
27
|
-
expect(hasOps).toBe(true);
|
|
28
|
-
});
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
describe("getRandomQuote", () => {
|
|
32
|
-
it("returns a string from QUOTES", () => {
|
|
33
|
-
const quote = getRandomQuote();
|
|
34
|
-
expect(typeof quote).toBe("string");
|
|
35
|
-
expect(QUOTES).toContain(quote);
|
|
36
|
-
});
|
|
37
|
-
|
|
38
|
-
it("returns a value on every call", () => {
|
|
39
|
-
for (let i = 0; i < 20; i++) {
|
|
40
|
-
const quote = getRandomQuote();
|
|
41
|
-
expect(typeof quote).toBe("string");
|
|
42
|
-
expect(quote.length).toBeGreaterThan(0);
|
|
43
|
-
}
|
|
44
|
-
});
|
|
45
|
-
});
|
|
46
|
-
|
|
47
|
-
describe("BANNER", () => {
|
|
48
|
-
it("contains ASCII art block characters", () => {
|
|
49
|
-
expect(BANNER).toContain("█");
|
|
50
|
-
expect(BANNER).toContain("╗");
|
|
51
|
-
expect(BANNER).toContain("╚");
|
|
52
|
-
expect(BANNER).toContain("═");
|
|
53
|
-
});
|
|
54
|
-
|
|
55
|
-
it("is multi-line", () => {
|
|
56
|
-
const lines = BANNER.split("\n").filter((l) => l.trim());
|
|
57
|
-
expect(lines.length).toBeGreaterThanOrEqual(5);
|
|
58
|
-
});
|
|
59
|
-
|
|
60
|
-
it("has substantial length", () => {
|
|
61
|
-
expect(BANNER.length).toBeGreaterThan(100);
|
|
62
|
-
});
|
|
63
|
-
});
|
|
64
|
-
|
|
65
|
-
describe("renderBanner", () => {
|
|
66
|
-
it("prints banner to stdout", () => {
|
|
67
|
-
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
68
|
-
renderBanner();
|
|
69
|
-
expect(spy).toHaveBeenCalled();
|
|
70
|
-
const output = spy.mock.calls.map((c) => c[0]).join("\n");
|
|
71
|
-
expect(output).toContain("Foundation OPS CLI");
|
|
72
|
-
});
|
|
73
|
-
|
|
74
|
-
it("displays version", () => {
|
|
75
|
-
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
76
|
-
renderBanner();
|
|
77
|
-
const output = spy.mock.calls.map((c) => c[0]).join("\n");
|
|
78
|
-
expect(output).toMatch(/v\d+\.\d+/);
|
|
79
|
-
});
|
|
80
|
-
|
|
81
|
-
it("displays exit instructions", () => {
|
|
82
|
-
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
83
|
-
renderBanner();
|
|
84
|
-
const output = spy.mock.calls.map((c) => c[0]).join("\n");
|
|
85
|
-
expect(output).toContain("/exit");
|
|
86
|
-
});
|
|
87
|
-
|
|
88
|
-
it("displays a quote", () => {
|
|
89
|
-
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
90
|
-
renderBanner();
|
|
91
|
-
const output = spy.mock.calls.map((c) => c[0]).join("\n");
|
|
92
|
-
// At least one quote should be present
|
|
93
|
-
const hasQuote = QUOTES.some((q) => output.includes(q));
|
|
94
|
-
expect(hasQuote).toBe(true);
|
|
95
|
-
});
|
|
96
|
-
});
|
|
97
|
-
});
|
package/src/ui/spinner.test.js
DELETED
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect } from "vitest";
|
|
2
|
-
import { VERBS } from "./spinner.js";
|
|
3
|
-
|
|
4
|
-
describe("ui/spinner", () => {
|
|
5
|
-
describe("VERBS", () => {
|
|
6
|
-
it("is a non-empty array of strings", () => {
|
|
7
|
-
expect(Array.isArray(VERBS)).toBe(true);
|
|
8
|
-
expect(VERBS.length).toBeGreaterThan(5);
|
|
9
|
-
for (const v of VERBS) {
|
|
10
|
-
expect(typeof v).toBe("string");
|
|
11
|
-
expect(v.length).toBeGreaterThan(0);
|
|
12
|
-
}
|
|
13
|
-
});
|
|
14
|
-
|
|
15
|
-
it("has no duplicates", () => {
|
|
16
|
-
const unique = new Set(VERBS);
|
|
17
|
-
expect(unique.size).toBe(VERBS.length);
|
|
18
|
-
});
|
|
19
|
-
|
|
20
|
-
it("contains hacker-themed verbs", () => {
|
|
21
|
-
const hasHacker = VERBS.some((v) =>
|
|
22
|
-
v.toLowerCase().includes("hack") ||
|
|
23
|
-
v.toLowerCase().includes("decrypt") ||
|
|
24
|
-
v.toLowerCase().includes("crack")
|
|
25
|
-
);
|
|
26
|
-
expect(hasHacker).toBe(true);
|
|
27
|
-
});
|
|
28
|
-
});
|
|
29
|
-
});
|