@usezombie/zombiectl 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +76 -0
- package/bin/zombiectl.js +11 -0
- package/bun.lock +29 -0
- package/package.json +28 -0
- package/scripts/run-tests.mjs +38 -0
- package/src/cli.js +275 -0
- package/src/commands/admin.js +39 -0
- package/src/commands/agent.js +98 -0
- package/src/commands/agent_harness.js +43 -0
- package/src/commands/agent_improvement_report.js +42 -0
- package/src/commands/agent_profile.js +39 -0
- package/src/commands/agent_proposals.js +158 -0
- package/src/commands/agent_scores.js +44 -0
- package/src/commands/core-ops.js +108 -0
- package/src/commands/core.js +537 -0
- package/src/commands/harness.js +35 -0
- package/src/commands/harness_activate.js +53 -0
- package/src/commands/harness_active.js +32 -0
- package/src/commands/harness_compile.js +40 -0
- package/src/commands/harness_source.js +72 -0
- package/src/commands/run_preview.js +212 -0
- package/src/commands/run_preview_walk.js +1 -0
- package/src/commands/runs.js +35 -0
- package/src/commands/spec_init.js +287 -0
- package/src/commands/workspace_billing.js +26 -0
- package/src/constants/error-codes.js +1 -0
- package/src/lib/agent-loop.js +106 -0
- package/src/lib/analytics.js +114 -0
- package/src/lib/api-paths.js +2 -0
- package/src/lib/browser.js +96 -0
- package/src/lib/http.js +149 -0
- package/src/lib/sse-parser.js +50 -0
- package/src/lib/state.js +67 -0
- package/src/lib/tool-executors.js +110 -0
- package/src/lib/walk-dir.js +41 -0
- package/src/program/args.js +95 -0
- package/src/program/auth-guard.js +12 -0
- package/src/program/auth-token.js +44 -0
- package/src/program/banner.js +46 -0
- package/src/program/command-registry.js +17 -0
- package/src/program/http-client.js +38 -0
- package/src/program/io.js +83 -0
- package/src/program/routes.js +20 -0
- package/src/program/suggest.js +76 -0
- package/src/program/validate.js +24 -0
- package/src/ui-progress.js +59 -0
- package/src/ui-theme.js +62 -0
- package/test/admin_config.unit.test.js +25 -0
- package/test/agent-loop.unit.test.js +497 -0
- package/test/agent_harness.unit.test.js +52 -0
- package/test/agent_improvement_report.unit.test.js +74 -0
- package/test/agent_profile.unit.test.js +156 -0
- package/test/agent_proposals.unit.test.js +167 -0
- package/test/agent_scores.unit.test.js +220 -0
- package/test/analytics.unit.test.js +41 -0
- package/test/args.unit.test.js +69 -0
- package/test/auth-guard.test.js +33 -0
- package/test/auth-token.unit.test.js +112 -0
- package/test/banner.unit.test.js +442 -0
- package/test/browser.unit.test.js +16 -0
- package/test/cli-analytics.unit.test.js +296 -0
- package/test/did-you-mean.integration.test.js +76 -0
- package/test/doctor-json.test.js +81 -0
- package/test/error-codes.unit.test.js +7 -0
- package/test/harness-command.unit.test.js +180 -0
- package/test/harness-compile.test.js +81 -0
- package/test/harness-lifecycle.integration.test.js +339 -0
- package/test/harness-source-put.test.js +72 -0
- package/test/harness_activate.unit.test.js +48 -0
- package/test/harness_active.unit.test.js +53 -0
- package/test/harness_compile.unit.test.js +54 -0
- package/test/harness_source.unit.test.js +59 -0
- package/test/help.test.js +276 -0
- package/test/helpers-fs.js +32 -0
- package/test/helpers.js +31 -0
- package/test/io.unit.test.js +57 -0
- package/test/login.unit.test.js +115 -0
- package/test/logout.unit.test.js +65 -0
- package/test/parse.test.js +16 -0
- package/test/run-preview.edge.test.js +422 -0
- package/test/run-preview.integration.test.js +135 -0
- package/test/run-preview.security.test.js +246 -0
- package/test/run-preview.unit.test.js +131 -0
- package/test/run.unit.test.js +149 -0
- package/test/runs-cancel.unit.test.js +288 -0
- package/test/runs-list.unit.test.js +105 -0
- package/test/skill-secret.unit.test.js +94 -0
- package/test/spec-init.edge.test.js +232 -0
- package/test/spec-init.integration.test.js +128 -0
- package/test/spec-init.security.test.js +285 -0
- package/test/spec-init.unit.test.js +160 -0
- package/test/specs-sync.unit.test.js +164 -0
- package/test/sse-parser.unit.test.js +54 -0
- package/test/state.unit.test.js +34 -0
- package/test/streamfetch.unit.test.js +211 -0
- package/test/suggest.test.js +75 -0
- package/test/tool-executors.unit.test.js +165 -0
- package/test/validate.test.js +81 -0
- package/test/workspace-add.test.js +106 -0
- package/test/workspace.unit.test.js +230 -0
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { levenshteinDistance, suggestCommand } from "../src/program/suggest.js";
|
|
3
|
+
|
|
4
|
+
describe("levenshteinDistance", () => {
|
|
5
|
+
test("identical strings return 0", () => {
|
|
6
|
+
expect(levenshteinDistance("login", "login")).toBe(0);
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
test("single character difference", () => {
|
|
10
|
+
expect(levenshteinDistance("login", "loign")).toBe(2);
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
test("empty string vs non-empty", () => {
|
|
14
|
+
expect(levenshteinDistance("", "abc")).toBe(3);
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
test("both empty", () => {
|
|
18
|
+
expect(levenshteinDistance("", "")).toBe(0);
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
test("completely different strings", () => {
|
|
22
|
+
expect(levenshteinDistance("abc", "xyz")).toBe(3);
|
|
23
|
+
});
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
describe("suggestCommand", () => {
|
|
27
|
+
test("exact match returns empty for distant input", () => {
|
|
28
|
+
// "doctor" is far enough from all other commands to return only itself-related matches
|
|
29
|
+
const result = suggestCommand("doctor");
|
|
30
|
+
// exact match has distance 0, so it should be excluded
|
|
31
|
+
expect(result).not.toContain("doctor");
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
test("close typo 'loign' suggests 'login'", () => {
|
|
35
|
+
const result = suggestCommand("loign");
|
|
36
|
+
expect(result).toContain("login");
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
test("close typo 'logut' suggests 'logout'", () => {
|
|
40
|
+
const result = suggestCommand("logut");
|
|
41
|
+
expect(result).toContain("logout");
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
test("multi-word: 'workspace ad' suggests 'workspace add'", () => {
|
|
45
|
+
const result = suggestCommand("workspace ad");
|
|
46
|
+
expect(result).toContain("workspace add");
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
test("no match for very different input", () => {
|
|
50
|
+
const result = suggestCommand("zzzzzzzzzzzzzzzzz");
|
|
51
|
+
expect(result).toEqual([]);
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
test("multiple suggestions sorted by distance", () => {
|
|
55
|
+
const result = suggestCommand("run");
|
|
56
|
+
// "run" is an exact top-level match (distance 0, excluded)
|
|
57
|
+
// but "runs list" and "run status" may be close
|
|
58
|
+
// All results should be sorted by ascending distance
|
|
59
|
+
for (let i = 1; i < result.length; i++) {
|
|
60
|
+
const dPrev = levenshteinDistance("run", result[i - 1]);
|
|
61
|
+
const dCurr = levenshteinDistance("run", result[i]);
|
|
62
|
+
expect(dCurr).toBeGreaterThanOrEqual(dPrev);
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
test("'harnes' suggests 'harness'", () => {
|
|
67
|
+
const result = suggestCommand("harnes");
|
|
68
|
+
expect(result).toContain("harness");
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
test("'doctr' suggests 'doctor'", () => {
|
|
72
|
+
const result = suggestCommand("doctr");
|
|
73
|
+
expect(result).toContain("doctor");
|
|
74
|
+
});
|
|
75
|
+
});
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
|
|
2
|
+
import { mkdirSync, writeFileSync, rmSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { validatePath, executeTool } from "../src/lib/tool-executors.js";
|
|
5
|
+
|
|
6
|
+
function makeTmp() {
|
|
7
|
+
const dir = join(import.meta.dir, ".tmp-tool-exec-" + Date.now());
|
|
8
|
+
mkdirSync(dir, { recursive: true });
|
|
9
|
+
return dir;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
describe("validatePath", () => {
|
|
13
|
+
test("allows path within repo root", () => {
|
|
14
|
+
const result = validatePath("src/main.js", "/repo");
|
|
15
|
+
expect(result.resolved).toBe("/repo/src/main.js");
|
|
16
|
+
expect(result.error).toBeUndefined();
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
test("allows repo root itself", () => {
|
|
20
|
+
const result = validatePath(".", "/repo");
|
|
21
|
+
expect(result.resolved).toBe("/repo");
|
|
22
|
+
expect(result.error).toBeUndefined();
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
test("rejects path traversal with ../", () => {
|
|
26
|
+
const result = validatePath("../../.ssh/id_rsa", "/repo");
|
|
27
|
+
expect(result.error).toBe("path outside repo root");
|
|
28
|
+
expect(result.resolved).toBeUndefined();
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
test("rejects absolute path outside repo", () => {
|
|
32
|
+
const result = validatePath("/etc/passwd", "/repo");
|
|
33
|
+
expect(result.error).toBe("path outside repo root");
|
|
34
|
+
});
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
describe("executeTool — read_file", () => {
|
|
38
|
+
let tmp;
|
|
39
|
+
beforeEach(() => { tmp = makeTmp(); });
|
|
40
|
+
afterEach(() => { rmSync(tmp, { recursive: true, force: true }); });
|
|
41
|
+
|
|
42
|
+
test("reads file content", () => {
|
|
43
|
+
writeFileSync(join(tmp, "hello.txt"), "world");
|
|
44
|
+
const result = executeTool("read_file", { path: "hello.txt" }, tmp);
|
|
45
|
+
expect(result).toBe("world");
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
test("returns error for missing file", () => {
|
|
49
|
+
const result = executeTool("read_file", { path: "nope.txt" }, tmp);
|
|
50
|
+
expect(result).toContain("error:");
|
|
51
|
+
expect(result).toContain("not found");
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
test("rejects path traversal", () => {
|
|
55
|
+
const result = executeTool("read_file", { path: "../../.ssh/id_rsa" }, tmp);
|
|
56
|
+
expect(result).toContain("error: path outside repo root");
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
describe("executeTool — list_dir", () => {
|
|
61
|
+
let tmp;
|
|
62
|
+
beforeEach(() => { tmp = makeTmp(); });
|
|
63
|
+
afterEach(() => { rmSync(tmp, { recursive: true, force: true }); });
|
|
64
|
+
|
|
65
|
+
test("lists directory entries", () => {
|
|
66
|
+
mkdirSync(join(tmp, "subdir"));
|
|
67
|
+
writeFileSync(join(tmp, "file.txt"), "");
|
|
68
|
+
const result = executeTool("list_dir", { path: "." }, tmp);
|
|
69
|
+
expect(result).toContain("file.txt");
|
|
70
|
+
expect(result).toContain("subdir/");
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
test("filters out .git", () => {
|
|
74
|
+
mkdirSync(join(tmp, ".git"));
|
|
75
|
+
writeFileSync(join(tmp, "file.txt"), "");
|
|
76
|
+
const result = executeTool("list_dir", { path: "." }, tmp);
|
|
77
|
+
expect(result).not.toContain(".git");
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
test("rejects path traversal", () => {
|
|
81
|
+
const result = executeTool("list_dir", { path: "../.." }, tmp);
|
|
82
|
+
expect(result).toContain("error: path outside repo root");
|
|
83
|
+
});
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
describe("executeTool — glob", () => {
|
|
87
|
+
let tmp;
|
|
88
|
+
beforeEach(() => { tmp = makeTmp(); });
|
|
89
|
+
afterEach(() => { rmSync(tmp, { recursive: true, force: true }); });
|
|
90
|
+
|
|
91
|
+
test("matches files by pattern", () => {
|
|
92
|
+
mkdirSync(join(tmp, "src"), { recursive: true });
|
|
93
|
+
writeFileSync(join(tmp, "src", "main.js"), "");
|
|
94
|
+
writeFileSync(join(tmp, "src", "util.js"), "");
|
|
95
|
+
const result = executeTool("glob", { pattern: "src/*.js" }, tmp);
|
|
96
|
+
expect(result).toContain("src/main.js");
|
|
97
|
+
expect(result).toContain("src/util.js");
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
test("returns no matches message", () => {
|
|
101
|
+
const result = executeTool("glob", { pattern: "*.nonexistent" }, tmp);
|
|
102
|
+
expect(result).toBe("(no matches)");
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
test("** matches deeply nested files", () => {
|
|
106
|
+
mkdirSync(join(tmp, "a", "b", "c"), { recursive: true });
|
|
107
|
+
writeFileSync(join(tmp, "a", "b", "c", "deep.js"), "");
|
|
108
|
+
writeFileSync(join(tmp, "a", "top.js"), "");
|
|
109
|
+
const result = executeTool("glob", { pattern: "**/*.js" }, tmp);
|
|
110
|
+
expect(result).toContain("a/b/c/deep.js");
|
|
111
|
+
expect(result).toContain("a/top.js");
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
test("** matches root-level files", () => {
|
|
115
|
+
mkdirSync(join(tmp, "src"), { recursive: true });
|
|
116
|
+
writeFileSync(join(tmp, "jest.config.js"), "");
|
|
117
|
+
writeFileSync(join(tmp, "src", "nested.js"), "");
|
|
118
|
+
const result = executeTool("glob", { pattern: "**/*.js" }, tmp);
|
|
119
|
+
expect(result).toContain("jest.config.js");
|
|
120
|
+
expect(result).toContain("src/nested.js");
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
test("? matches single character", () => {
|
|
124
|
+
writeFileSync(join(tmp, "a.js"), "");
|
|
125
|
+
writeFileSync(join(tmp, "ab.js"), "");
|
|
126
|
+
const result = executeTool("glob", { pattern: "?.js" }, tmp);
|
|
127
|
+
expect(result).toContain("a.js");
|
|
128
|
+
expect(result).not.toContain("ab.js");
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
test("excludes .git directory", () => {
|
|
132
|
+
mkdirSync(join(tmp, ".git", "objects"), { recursive: true });
|
|
133
|
+
mkdirSync(join(tmp, "src"), { recursive: true });
|
|
134
|
+
writeFileSync(join(tmp, ".git", "config"), "");
|
|
135
|
+
writeFileSync(join(tmp, "src", "real.js"), "");
|
|
136
|
+
const result = executeTool("glob", { pattern: "**/*.js" }, tmp);
|
|
137
|
+
expect(result).toContain("src/real.js");
|
|
138
|
+
expect(result).not.toContain(".git");
|
|
139
|
+
expect(result).not.toContain("config");
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
test("* does not cross directory boundaries", () => {
|
|
143
|
+
mkdirSync(join(tmp, "src"), { recursive: true });
|
|
144
|
+
writeFileSync(join(tmp, "src", "nested.js"), "");
|
|
145
|
+
writeFileSync(join(tmp, "root.js"), "");
|
|
146
|
+
const result = executeTool("glob", { pattern: "*.js" }, tmp);
|
|
147
|
+
expect(result).toContain("root.js");
|
|
148
|
+
expect(result).not.toContain("src/nested.js");
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
test("matches dotfiles in pattern", () => {
|
|
152
|
+
writeFileSync(join(tmp, ".env"), "");
|
|
153
|
+
writeFileSync(join(tmp, ".gitignore"), "");
|
|
154
|
+
const result = executeTool("glob", { pattern: ".*" }, tmp);
|
|
155
|
+
expect(result).toContain(".env");
|
|
156
|
+
expect(result).toContain(".gitignore");
|
|
157
|
+
});
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
describe("executeTool — unknown tool", () => {
|
|
161
|
+
test("returns error for unknown tool name", () => {
|
|
162
|
+
const result = executeTool("write_file", { path: "x" }, "/tmp");
|
|
163
|
+
expect(result).toContain('error: unknown tool "write_file"');
|
|
164
|
+
});
|
|
165
|
+
});
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { isValidId, validateRequiredId } from "../src/program/validate.js";
|
|
3
|
+
|
|
4
|
+
describe("isValidId", () => {
|
|
5
|
+
test("valid UUID passes", () => {
|
|
6
|
+
expect(isValidId("550e8400-e29b-41d4-a716-446655440000")).toBe(true);
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
test("uppercase UUID passes", () => {
|
|
10
|
+
expect(isValidId("550E8400-E29B-41D4-A716-446655440000")).toBe(true);
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
test("empty string fails", () => {
|
|
14
|
+
expect(isValidId("")).toBe(false);
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
test("null fails", () => {
|
|
18
|
+
expect(isValidId(null)).toBe(false);
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
test("undefined fails", () => {
|
|
22
|
+
expect(isValidId(undefined)).toBe(false);
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
test("too short fails (3 chars)", () => {
|
|
26
|
+
expect(isValidId("abc")).toBe(false);
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
test("valid non-UUID ID (alphanumeric) passes", () => {
|
|
30
|
+
expect(isValidId("ws_123456789abc")).toBe(true);
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
test("valid non-UUID with dashes passes", () => {
|
|
34
|
+
expect(isValidId("my-workspace-id")).toBe(true);
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
test("special characters (spaces) fail", () => {
|
|
38
|
+
expect(isValidId("has spaces")).toBe(false);
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
test("special characters (dots) fail", () => {
|
|
42
|
+
expect(isValidId("has.dots")).toBe(false);
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
test("4-char minimum passes", () => {
|
|
46
|
+
expect(isValidId("abcd")).toBe(true);
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
test("128-char maximum passes", () => {
|
|
50
|
+
expect(isValidId("a".repeat(128))).toBe(true);
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
test("129-char string fails", () => {
|
|
54
|
+
expect(isValidId("a".repeat(129))).toBe(false);
|
|
55
|
+
});
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
describe("validateRequiredId", () => {
|
|
59
|
+
test("valid UUID returns ok", () => {
|
|
60
|
+
const result = validateRequiredId("550e8400-e29b-41d4-a716-446655440000", "workspace_id");
|
|
61
|
+
expect(result.ok).toBe(true);
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
test("empty string returns error with name", () => {
|
|
65
|
+
const result = validateRequiredId("", "workspace_id");
|
|
66
|
+
expect(result.ok).toBe(false);
|
|
67
|
+
expect(result.message).toContain("workspace_id");
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
test("invalid format returns helpful message", () => {
|
|
71
|
+
const result = validateRequiredId("!@#", "run_id");
|
|
72
|
+
expect(result.ok).toBe(false);
|
|
73
|
+
expect(result.message).toContain("run_id");
|
|
74
|
+
expect(result.message).toContain("UUID");
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
test("valid non-UUID ID returns ok", () => {
|
|
78
|
+
const result = validateRequiredId("ws_123456789abc", "workspace_id");
|
|
79
|
+
expect(result.ok).toBe(true);
|
|
80
|
+
});
|
|
81
|
+
});
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { test } from "bun:test";
|
|
2
|
+
import assert from "node:assert/strict";
|
|
3
|
+
import fs from "node:fs/promises";
|
|
4
|
+
import os from "node:os";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import { Writable } from "node:stream";
|
|
7
|
+
import { runCli } from "../src/cli.js";
|
|
8
|
+
import { loadWorkspaces } from "../src/lib/state.js";
|
|
9
|
+
|
|
10
|
+
function bufferStream() {
|
|
11
|
+
let data = "";
|
|
12
|
+
return {
|
|
13
|
+
stream: new Writable({
|
|
14
|
+
write(chunk, _enc, cb) {
|
|
15
|
+
data += String(chunk);
|
|
16
|
+
cb();
|
|
17
|
+
},
|
|
18
|
+
}),
|
|
19
|
+
read: () => data,
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
async function withStateDir(fn) {
|
|
24
|
+
const old = process.env.ZOMBIE_STATE_DIR;
|
|
25
|
+
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "zombiectl-state-"));
|
|
26
|
+
process.env.ZOMBIE_STATE_DIR = dir;
|
|
27
|
+
try {
|
|
28
|
+
return await fn(dir);
|
|
29
|
+
} finally {
|
|
30
|
+
if (old === undefined) delete process.env.ZOMBIE_STATE_DIR;
|
|
31
|
+
else process.env.ZOMBIE_STATE_DIR = old;
|
|
32
|
+
await fs.rm(dir, { recursive: true, force: true });
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
test("workspace add does not persist local state when API create fails", async () => {
|
|
37
|
+
await withStateDir(async () => {
|
|
38
|
+
const out = bufferStream();
|
|
39
|
+
const err = bufferStream();
|
|
40
|
+
|
|
41
|
+
const fetchImpl = async (url, options) => {
|
|
42
|
+
assert.equal(url, "http://localhost:3000/v1/workspaces");
|
|
43
|
+
assert.equal(options.method, "POST");
|
|
44
|
+
return {
|
|
45
|
+
ok: false,
|
|
46
|
+
status: 500,
|
|
47
|
+
statusText: "Internal Server Error",
|
|
48
|
+
text: async () => JSON.stringify({
|
|
49
|
+
error: { code: "INTERNAL_ERROR", message: "Failed to create workspace" },
|
|
50
|
+
request_id: "req_abc123",
|
|
51
|
+
}),
|
|
52
|
+
};
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
const code = await runCli(["workspace", "add", "https://github.com/acme/repo"], {
|
|
56
|
+
env: { ...process.env, ZOMBIE_TOKEN: "header.payload.sig", BROWSER: "false" },
|
|
57
|
+
stdout: out.stream,
|
|
58
|
+
stderr: err.stream,
|
|
59
|
+
fetchImpl,
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
assert.equal(code, 1);
|
|
63
|
+
assert.match(err.read(), /INTERNAL_ERROR/);
|
|
64
|
+
assert.match(err.read(), /request_id: req_abc123/);
|
|
65
|
+
|
|
66
|
+
const workspaces = await loadWorkspaces();
|
|
67
|
+
assert.equal(workspaces.current_workspace_id, null);
|
|
68
|
+
assert.deepEqual(workspaces.items, []);
|
|
69
|
+
});
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
test("workspace add persists backend workspace_id and emits install url in json mode", async () => {
|
|
73
|
+
await withStateDir(async () => {
|
|
74
|
+
const out = bufferStream();
|
|
75
|
+
const err = bufferStream();
|
|
76
|
+
|
|
77
|
+
const fetchImpl = async () => ({
|
|
78
|
+
ok: true,
|
|
79
|
+
status: 201,
|
|
80
|
+
text: async () => JSON.stringify({
|
|
81
|
+
workspace_id: "ws_123456789abc",
|
|
82
|
+
repo_url: "https://github.com/acme/repo",
|
|
83
|
+
default_branch: "main",
|
|
84
|
+
install_url: "https://github.com/apps/usezombie/installations/new?state=ws_123456789abc",
|
|
85
|
+
request_id: "req_123",
|
|
86
|
+
}),
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
const code = await runCli(["--json", "workspace", "add", "https://github.com/acme/repo"], {
|
|
90
|
+
env: { ...process.env, ZOMBIE_TOKEN: "header.payload.sig" },
|
|
91
|
+
stdout: out.stream,
|
|
92
|
+
stderr: err.stream,
|
|
93
|
+
fetchImpl,
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
assert.equal(code, 0);
|
|
97
|
+
const parsed = JSON.parse(out.read());
|
|
98
|
+
assert.equal(parsed.workspace_id, "ws_123456789abc");
|
|
99
|
+
assert.match(parsed.install_url, /github\.com\/apps\/usezombie\/installations\/new\?state=ws_123456789abc/);
|
|
100
|
+
|
|
101
|
+
const workspaces = await loadWorkspaces();
|
|
102
|
+
assert.equal(workspaces.current_workspace_id, "ws_123456789abc");
|
|
103
|
+
assert.equal(workspaces.items.length, 1);
|
|
104
|
+
assert.equal(workspaces.items[0].workspace_id, "ws_123456789abc");
|
|
105
|
+
});
|
|
106
|
+
});
|
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { makeNoop, makeBufferStream, ui, WS_ID } from "./helpers.js";
|
|
3
|
+
import { createCoreHandlers } from "../src/commands/core.js";
|
|
4
|
+
|
|
5
|
+
const WS_ID_2 = "0195b4ba-8d3a-7f13-8abc-000000000099";
|
|
6
|
+
|
|
7
|
+
function makeDeps(overrides = {}) {
|
|
8
|
+
return {
|
|
9
|
+
clearCredentials: async () => {},
|
|
10
|
+
createSpinner: () => ({ start() {}, succeed() {}, fail() {} }),
|
|
11
|
+
newIdempotencyKey: () => "idem_test",
|
|
12
|
+
openUrl: async () => false,
|
|
13
|
+
parseFlags: (tokens) => {
|
|
14
|
+
const options = {};
|
|
15
|
+
const positionals = [];
|
|
16
|
+
for (let i = 0; i < tokens.length; i++) {
|
|
17
|
+
if (tokens[i].startsWith("--")) {
|
|
18
|
+
const key = tokens[i].slice(2);
|
|
19
|
+
const next = tokens[i + 1];
|
|
20
|
+
if (next && !next.startsWith("--")) { options[key] = next; i++; }
|
|
21
|
+
else options[key] = true;
|
|
22
|
+
} else { positionals.push(tokens[i]); }
|
|
23
|
+
}
|
|
24
|
+
return { options, positionals };
|
|
25
|
+
},
|
|
26
|
+
printJson: (_s, v) => {},
|
|
27
|
+
printKeyValue: () => {},
|
|
28
|
+
printTable: () => {},
|
|
29
|
+
request: async () => ({}),
|
|
30
|
+
saveCredentials: async () => {},
|
|
31
|
+
saveWorkspaces: async () => {},
|
|
32
|
+
ui,
|
|
33
|
+
writeLine: (stream, line = "") => stream.write(`${line}\n`),
|
|
34
|
+
apiHeaders: () => ({}),
|
|
35
|
+
...overrides,
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
describe("commandWorkspace", () => {
|
|
40
|
+
test("list shows table", async () => {
|
|
41
|
+
let tableRows = null;
|
|
42
|
+
const deps = makeDeps({
|
|
43
|
+
printTable: (_s, _cols, rows) => { tableRows = rows; },
|
|
44
|
+
});
|
|
45
|
+
const items = [
|
|
46
|
+
{ workspace_id: WS_ID, repo_url: "https://github.com/acme/repo" },
|
|
47
|
+
{ workspace_id: WS_ID_2, repo_url: "https://github.com/acme/other" },
|
|
48
|
+
];
|
|
49
|
+
const ctx = { stdout: makeNoop(), stderr: makeNoop(), jsonMode: false, env: {} };
|
|
50
|
+
const workspaces = { current_workspace_id: WS_ID, items };
|
|
51
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
52
|
+
const code = await core.commandWorkspace(["list"]);
|
|
53
|
+
expect(code).toBe(0);
|
|
54
|
+
expect(tableRows.length).toBe(2);
|
|
55
|
+
expect(tableRows[0].active).toBe("*");
|
|
56
|
+
expect(tableRows[1].active).toBe("");
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
test("list empty state", async () => {
|
|
60
|
+
const out = makeBufferStream();
|
|
61
|
+
const deps = makeDeps({
|
|
62
|
+
printTable: () => {},
|
|
63
|
+
});
|
|
64
|
+
const ctx = { stdout: out.stream, stderr: makeNoop(), jsonMode: false, env: {} };
|
|
65
|
+
const workspaces = { current_workspace_id: null, items: [] };
|
|
66
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
67
|
+
const code = await core.commandWorkspace(["list"]);
|
|
68
|
+
expect(code).toBe(0);
|
|
69
|
+
expect(out.read()).toContain("no workspaces");
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
test("remove by ID", async () => {
|
|
73
|
+
let savedWs = null;
|
|
74
|
+
const deps = makeDeps({
|
|
75
|
+
saveWorkspaces: async (ws) => { savedWs = ws; },
|
|
76
|
+
});
|
|
77
|
+
const items = [{ workspace_id: WS_ID, repo_url: "https://github.com/acme/repo" }];
|
|
78
|
+
const ctx = { stdout: makeNoop(), stderr: makeNoop(), jsonMode: false, env: {} };
|
|
79
|
+
const workspaces = { current_workspace_id: WS_ID, items: [...items] };
|
|
80
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
81
|
+
const code = await core.commandWorkspace(["remove", WS_ID]);
|
|
82
|
+
expect(code).toBe(0);
|
|
83
|
+
expect(savedWs.items.length).toBe(0);
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
test("remove updates current workspace", async () => {
|
|
87
|
+
let savedWs = null;
|
|
88
|
+
const deps = makeDeps({
|
|
89
|
+
saveWorkspaces: async (ws) => { savedWs = ws; },
|
|
90
|
+
});
|
|
91
|
+
const items = [
|
|
92
|
+
{ workspace_id: WS_ID, repo_url: "https://github.com/acme/repo" },
|
|
93
|
+
{ workspace_id: WS_ID_2, repo_url: "https://github.com/acme/other" },
|
|
94
|
+
];
|
|
95
|
+
const ctx = { stdout: makeNoop(), stderr: makeNoop(), jsonMode: false, env: {} };
|
|
96
|
+
const workspaces = { current_workspace_id: WS_ID, items: [...items] };
|
|
97
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
98
|
+
const code = await core.commandWorkspace(["remove", WS_ID]);
|
|
99
|
+
expect(code).toBe(0);
|
|
100
|
+
expect(savedWs.current_workspace_id).toBe(WS_ID_2);
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
test("remove without id returns error", async () => {
|
|
104
|
+
const err = makeBufferStream();
|
|
105
|
+
const deps = makeDeps();
|
|
106
|
+
const ctx = { stdout: makeNoop(), stderr: err.stream, jsonMode: false, env: {} };
|
|
107
|
+
const workspaces = { current_workspace_id: null, items: [] };
|
|
108
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
109
|
+
const code = await core.commandWorkspace(["remove"]);
|
|
110
|
+
expect(code).toBe(2);
|
|
111
|
+
expect(err.read()).toContain("workspace remove requires");
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
test("upgrade-scale requires workspace id", async () => {
|
|
115
|
+
const err = makeBufferStream();
|
|
116
|
+
const deps = makeDeps();
|
|
117
|
+
const ctx = { stdout: makeNoop(), stderr: err.stream, jsonMode: false, env: {} };
|
|
118
|
+
const workspaces = { current_workspace_id: null, items: [] };
|
|
119
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
120
|
+
const code = await core.commandWorkspace(["upgrade-scale"]);
|
|
121
|
+
expect(code).toBe(2);
|
|
122
|
+
expect(err.read()).toContain("workspace upgrade-scale requires --workspace-id");
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
test("upgrade-scale requires subscription id", async () => {
|
|
126
|
+
const err = makeBufferStream();
|
|
127
|
+
const deps = makeDeps();
|
|
128
|
+
const ctx = { stdout: makeNoop(), stderr: err.stream, jsonMode: false, env: {} };
|
|
129
|
+
const workspaces = { current_workspace_id: WS_ID, items: [] };
|
|
130
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
131
|
+
const code = await core.commandWorkspace(["upgrade-scale", "--workspace-id", WS_ID]);
|
|
132
|
+
expect(code).toBe(2);
|
|
133
|
+
expect(err.read()).toContain("workspace upgrade-scale requires --subscription-id");
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
test("upgrade-scale calls billing endpoint", async () => {
|
|
137
|
+
const out = makeBufferStream();
|
|
138
|
+
let called = null;
|
|
139
|
+
const deps = makeDeps({
|
|
140
|
+
request: async (_ctx, reqPath, options) => {
|
|
141
|
+
called = { reqPath, options };
|
|
142
|
+
return { plan_tier: "scale", billing_status: "active", subscription_id: "sub_scale_123" };
|
|
143
|
+
},
|
|
144
|
+
});
|
|
145
|
+
const ctx = { stdout: out.stream, stderr: makeNoop(), jsonMode: false, env: {} };
|
|
146
|
+
const workspaces = { current_workspace_id: WS_ID, items: [] };
|
|
147
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
148
|
+
const code = await core.commandWorkspace(["upgrade-scale", "--workspace-id", WS_ID, "--subscription-id", "sub_scale_123"]);
|
|
149
|
+
expect(code).toBe(0);
|
|
150
|
+
expect(called.reqPath).toContain(`/v1/workspaces/${WS_ID}/billing/scale`);
|
|
151
|
+
expect(JSON.parse(called.options.body).subscription_id).toBe("sub_scale_123");
|
|
152
|
+
expect(out.read()).toContain("workspace upgraded to scale");
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
test("upgrade-scale with subscription_id as second positional (both positional)", async () => {
|
|
156
|
+
const out = makeBufferStream();
|
|
157
|
+
let called = null;
|
|
158
|
+
const deps = makeDeps({
|
|
159
|
+
request: async (_ctx, reqPath, options) => {
|
|
160
|
+
called = { reqPath, options };
|
|
161
|
+
return { plan_tier: "scale", billing_status: "active", subscription_id: "sub_pos_456" };
|
|
162
|
+
},
|
|
163
|
+
});
|
|
164
|
+
const ctx = { stdout: out.stream, stderr: makeNoop(), jsonMode: false, env: {} };
|
|
165
|
+
const workspaces = { current_workspace_id: WS_ID, items: [] };
|
|
166
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
167
|
+
const code = await core.commandWorkspace(["upgrade-scale", WS_ID, "sub_pos_456"]);
|
|
168
|
+
expect(code).toBe(0);
|
|
169
|
+
expect(called.reqPath).toContain(`/v1/workspaces/${WS_ID}/billing/scale`);
|
|
170
|
+
expect(JSON.parse(called.options.body).subscription_id).toBe("sub_pos_456");
|
|
171
|
+
const output = out.read();
|
|
172
|
+
expect(output).toContain("workspace upgraded to scale");
|
|
173
|
+
expect(output).toContain("subscription_id: sub_pos_456");
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
test("upgrade-scale with --workspace-id flag and bare positional requires --subscription-id", async () => {
|
|
177
|
+
const err = makeBufferStream();
|
|
178
|
+
const deps = makeDeps();
|
|
179
|
+
const ctx = { stdout: makeNoop(), stderr: err.stream, jsonMode: false, env: {} };
|
|
180
|
+
const workspaces = { current_workspace_id: WS_ID, items: [] };
|
|
181
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
182
|
+
const code = await core.commandWorkspace(["upgrade-scale", "--workspace-id", WS_ID, "sub_pos_456"]);
|
|
183
|
+
expect(code).toBe(2);
|
|
184
|
+
expect(err.read()).toContain("requires --subscription-id");
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
test("upgrade-scale with null subscription_id in response omits subscription_id line", async () => {
|
|
188
|
+
const out = makeBufferStream();
|
|
189
|
+
const deps = makeDeps({
|
|
190
|
+
request: async () => ({ plan_tier: "scale", billing_status: "active", subscription_id: null }),
|
|
191
|
+
});
|
|
192
|
+
const ctx = { stdout: out.stream, stderr: makeNoop(), jsonMode: false, env: {} };
|
|
193
|
+
const workspaces = { current_workspace_id: WS_ID, items: [] };
|
|
194
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
195
|
+
const code = await core.commandWorkspace(["upgrade-scale", "--workspace-id", WS_ID, "--subscription-id", "sub_input_789"]);
|
|
196
|
+
expect(code).toBe(0);
|
|
197
|
+
const output = out.read();
|
|
198
|
+
expect(output).toContain("workspace upgraded to scale");
|
|
199
|
+
expect(output).toContain("plan_tier: scale");
|
|
200
|
+
expect(output).toContain("billing_status: active");
|
|
201
|
+
expect(output).not.toContain("subscription_id:");
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
test("upgrade-scale in JSON mode prints JSON output", async () => {
|
|
205
|
+
const apiResponse = { plan_tier: "scale", billing_status: "active", subscription_id: "sub_json_001" };
|
|
206
|
+
let jsonOutput = null;
|
|
207
|
+
const deps = makeDeps({
|
|
208
|
+
request: async () => apiResponse,
|
|
209
|
+
printJson: (_s, v) => { jsonOutput = v; },
|
|
210
|
+
});
|
|
211
|
+
const ctx = { stdout: makeNoop(), stderr: makeNoop(), jsonMode: true, env: {} };
|
|
212
|
+
const workspaces = { current_workspace_id: WS_ID, items: [] };
|
|
213
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
214
|
+
const code = await core.commandWorkspace(["upgrade-scale", "--workspace-id", WS_ID, "--subscription-id", "sub_json_001"]);
|
|
215
|
+
expect(code).toBe(0);
|
|
216
|
+
expect(jsonOutput).toEqual(apiResponse);
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
test("upgrade-scale when API request throws propagates error", async () => {
|
|
220
|
+
const deps = makeDeps({
|
|
221
|
+
request: async () => { throw new Error("network failure"); },
|
|
222
|
+
});
|
|
223
|
+
const ctx = { stdout: makeNoop(), stderr: makeNoop(), jsonMode: false, env: {} };
|
|
224
|
+
const workspaces = { current_workspace_id: WS_ID, items: [] };
|
|
225
|
+
const core = createCoreHandlers(ctx, workspaces, deps);
|
|
226
|
+
await expect(
|
|
227
|
+
core.commandWorkspace(["upgrade-scale", "--workspace-id", WS_ID, "--subscription-id", "sub_err_999"]),
|
|
228
|
+
).rejects.toThrow("network failure");
|
|
229
|
+
});
|
|
230
|
+
});
|