qualia-framework 3.3.2 → 3.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +13 -2
- package/bin/install.js +28 -5
- package/bin/state.js +363 -43
- package/bin/statusline.js +40 -20
- package/docs/erp-contract.md +40 -1
- package/hooks/auto-update.js +54 -70
- package/hooks/branch-guard.js +64 -6
- package/hooks/migration-guard.js +85 -10
- package/hooks/pre-compact.js +28 -4
- package/hooks/pre-deploy-gate.js +46 -6
- package/hooks/pre-push.js +94 -27
- package/hooks/session-start.js +6 -0
- package/package.json +1 -1
- package/skills/qualia-build/SKILL.md +1 -1
- package/skills/qualia-map/SKILL.md +4 -4
- package/skills/qualia-milestone/SKILL.md +14 -2
- package/skills/qualia-optimize/SKILL.md +4 -4
- package/skills/qualia-quick/SKILL.md +2 -2
- package/skills/qualia-report/SKILL.md +38 -7
- package/skills/qualia-task/SKILL.md +1 -1
- package/skills/qualia-verify/SKILL.md +2 -2
- package/templates/help.html +98 -31
- package/templates/tracking.json +17 -1
- package/tests/runner.js +395 -0
- package/tests/state.test.sh +232 -4
- package/skills/qualia-idk/SKILL.md +0 -8
package/tests/runner.js
CHANGED
|
@@ -814,6 +814,225 @@ waves: 1
|
|
|
814
814
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
815
815
|
}
|
|
816
816
|
});
|
|
817
|
+
|
|
818
|
+
// ─── v3.4.2: init guard ────────────────────────────────
|
|
819
|
+
it("init refuses to clobber an existing project (no --force)", () => {
|
|
820
|
+
const tmpDir = makeProject();
|
|
821
|
+
try {
|
|
822
|
+
const r = spawnSync(process.execPath, [
|
|
823
|
+
path.join(BIN, "state.js"), "init",
|
|
824
|
+
"--project", "TestProject",
|
|
825
|
+
"--phases", '[{"name":"X","goal":"Y"}]',
|
|
826
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
827
|
+
assert.equal(r.status, 1);
|
|
828
|
+
const out = JSON.parse(r.stdout);
|
|
829
|
+
assert.equal(out.error, "ALREADY_INITIALIZED");
|
|
830
|
+
} finally {
|
|
831
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
832
|
+
}
|
|
833
|
+
});
|
|
834
|
+
|
|
835
|
+
it("init --force overwrites an existing project (preserves lifetime)", () => {
|
|
836
|
+
const tmpDir = makeProject();
|
|
837
|
+
try {
|
|
838
|
+
// Seed lifetime via close-milestone first
|
|
839
|
+
const c = spawnSync(process.execPath, [
|
|
840
|
+
path.join(BIN, "state.js"), "close-milestone",
|
|
841
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
842
|
+
assert.equal(c.status, 0);
|
|
843
|
+
const tBefore = JSON.parse(fs.readFileSync(path.join(tmpDir, ".planning", "tracking.json"), "utf8"));
|
|
844
|
+
assert.ok(tBefore.lifetime.milestones_completed >= 1);
|
|
845
|
+
|
|
846
|
+
const r = spawnSync(process.execPath, [
|
|
847
|
+
path.join(BIN, "state.js"), "init",
|
|
848
|
+
"--project", "TestProject",
|
|
849
|
+
"--phases", '[{"name":"NewFoundation","goal":"X"}]',
|
|
850
|
+
"--force",
|
|
851
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
852
|
+
assert.equal(r.status, 0);
|
|
853
|
+
const tAfter = JSON.parse(fs.readFileSync(path.join(tmpDir, ".planning", "tracking.json"), "utf8"));
|
|
854
|
+
assert.equal(tAfter.lifetime.milestones_completed, tBefore.lifetime.milestones_completed);
|
|
855
|
+
assert.equal(tAfter.phase, 1);
|
|
856
|
+
assert.equal(tAfter.status, "setup");
|
|
857
|
+
} finally {
|
|
858
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
859
|
+
}
|
|
860
|
+
});
|
|
861
|
+
|
|
862
|
+
// ─── v3.4.2: close-milestone idempotency ───────────────
|
|
863
|
+
it("close-milestone refuses double-close (idempotency)", () => {
|
|
864
|
+
const tmpDir = makeProject();
|
|
865
|
+
try {
|
|
866
|
+
const r1 = spawnSync(process.execPath, [
|
|
867
|
+
path.join(BIN, "state.js"), "close-milestone",
|
|
868
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
869
|
+
assert.equal(r1.status, 0);
|
|
870
|
+
const out1 = JSON.parse(r1.stdout);
|
|
871
|
+
assert.equal(out1.lifetime.milestones_completed, 1);
|
|
872
|
+
|
|
873
|
+
// Manually rewind milestone counter to simulate a re-run on the same closed milestone.
|
|
874
|
+
// (Real close-milestone advances t.milestone, so a true double-close requires
|
|
875
|
+
// putting milestone back to its prior value.)
|
|
876
|
+
const tFile = path.join(tmpDir, ".planning", "tracking.json");
|
|
877
|
+
const t = JSON.parse(fs.readFileSync(tFile, "utf8"));
|
|
878
|
+
t.milestone = out1.closed_milestone; // rewind
|
|
879
|
+
fs.writeFileSync(tFile, JSON.stringify(t, null, 2) + "\n");
|
|
880
|
+
|
|
881
|
+
const r2 = spawnSync(process.execPath, [
|
|
882
|
+
path.join(BIN, "state.js"), "close-milestone",
|
|
883
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
884
|
+
assert.equal(r2.status, 1);
|
|
885
|
+
const out2 = JSON.parse(r2.stdout);
|
|
886
|
+
assert.equal(out2.error, "ALREADY_CLOSED");
|
|
887
|
+
} finally {
|
|
888
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
889
|
+
}
|
|
890
|
+
});
|
|
891
|
+
|
|
892
|
+
it("close-milestone --force allows re-close", () => {
|
|
893
|
+
const tmpDir = makeProject();
|
|
894
|
+
try {
|
|
895
|
+
const r1 = spawnSync(process.execPath, [
|
|
896
|
+
path.join(BIN, "state.js"), "close-milestone",
|
|
897
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
898
|
+
assert.equal(r1.status, 0);
|
|
899
|
+
|
|
900
|
+
const tFile = path.join(tmpDir, ".planning", "tracking.json");
|
|
901
|
+
const t = JSON.parse(fs.readFileSync(tFile, "utf8"));
|
|
902
|
+
t.milestone = JSON.parse(r1.stdout).closed_milestone;
|
|
903
|
+
fs.writeFileSync(tFile, JSON.stringify(t, null, 2) + "\n");
|
|
904
|
+
|
|
905
|
+
const r2 = spawnSync(process.execPath, [
|
|
906
|
+
path.join(BIN, "state.js"), "close-milestone", "--force",
|
|
907
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
908
|
+
assert.equal(r2.status, 0);
|
|
909
|
+
const out2 = JSON.parse(r2.stdout);
|
|
910
|
+
assert.equal(out2.lifetime.milestones_completed, 2);
|
|
911
|
+
} finally {
|
|
912
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
913
|
+
}
|
|
914
|
+
});
|
|
915
|
+
|
|
916
|
+
// ─── v3.4.2: backfill never reduces lifetime (Math.max) ─
|
|
917
|
+
it("backfill-lifetime never reduces existing counters", () => {
|
|
918
|
+
const tmpDir = makeProject();
|
|
919
|
+
try {
|
|
920
|
+
// Seed lifetime with high values (simulating prior close-milestone)
|
|
921
|
+
const tFile = path.join(tmpDir, ".planning", "tracking.json");
|
|
922
|
+
const t = JSON.parse(fs.readFileSync(tFile, "utf8"));
|
|
923
|
+
t.lifetime.tasks_completed = 100;
|
|
924
|
+
t.lifetime.phases_completed = 20;
|
|
925
|
+
fs.writeFileSync(tFile, JSON.stringify(t, null, 2) + "\n");
|
|
926
|
+
|
|
927
|
+
// Backfill on a project with NO completed phases would compute 0/0
|
|
928
|
+
const r = spawnSync(process.execPath, [
|
|
929
|
+
path.join(BIN, "state.js"), "backfill-lifetime",
|
|
930
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
931
|
+
assert.equal(r.status, 0);
|
|
932
|
+
const tAfter = JSON.parse(fs.readFileSync(tFile, "utf8"));
|
|
933
|
+
assert.equal(tAfter.lifetime.tasks_completed, 100, "backfill must NOT reduce tasks_completed");
|
|
934
|
+
assert.equal(tAfter.lifetime.phases_completed, 20, "backfill must NOT reduce phases_completed");
|
|
935
|
+
} finally {
|
|
936
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
937
|
+
}
|
|
938
|
+
});
|
|
939
|
+
|
|
940
|
+
// ─── v3.4.2: atomic write leaves no .tmp file ──────────
|
|
941
|
+
it("transition leaves no .tmp file on success (atomic write)", () => {
|
|
942
|
+
const tmpDir = makeProject();
|
|
943
|
+
try {
|
|
944
|
+
makeValidPlan(tmpDir, 1);
|
|
945
|
+
const r = runState(["transition", "--to", "planned"], tmpDir);
|
|
946
|
+
assert.equal(r.status, 0);
|
|
947
|
+
const planning = path.join(tmpDir, ".planning");
|
|
948
|
+
const tmps = fs.readdirSync(planning).filter(f => f.includes(".tmp."));
|
|
949
|
+
assert.equal(tmps.length, 0, `Stale .tmp files: ${tmps.join(", ")}`);
|
|
950
|
+
} finally {
|
|
951
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
952
|
+
}
|
|
953
|
+
});
|
|
954
|
+
|
|
955
|
+
// ─── v3.6.0: tracking.json schema additions ────────────
|
|
956
|
+
it("init writes new schema fields (team_id, project_id, build_count, etc.)", () => {
|
|
957
|
+
const tmpDir = makeProject();
|
|
958
|
+
try {
|
|
959
|
+
const t = JSON.parse(fs.readFileSync(path.join(tmpDir, ".planning", "tracking.json"), "utf8"));
|
|
960
|
+
// New v3.6 fields (default to empty / 0, but must be present)
|
|
961
|
+
assert.ok("team_id" in t, "team_id missing");
|
|
962
|
+
assert.ok("project_id" in t, "project_id missing");
|
|
963
|
+
assert.ok("git_remote" in t, "git_remote missing");
|
|
964
|
+
assert.ok("session_started_at" in t, "session_started_at missing");
|
|
965
|
+
assert.ok("last_pushed_at" in t, "last_pushed_at missing");
|
|
966
|
+
assert.ok("build_count" in t, "build_count missing");
|
|
967
|
+
assert.ok("deploy_count" in t, "deploy_count missing");
|
|
968
|
+
assert.ok("submitted_by" in t, "submitted_by missing");
|
|
969
|
+
assert.ok("last_closed_milestone" in t.lifetime, "lifetime.last_closed_milestone missing");
|
|
970
|
+
} finally {
|
|
971
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
972
|
+
}
|
|
973
|
+
});
|
|
974
|
+
|
|
975
|
+
it("init --force defensively hydrates partial lifetime (no NaN)", () => {
|
|
976
|
+
const tmpDir = makeProject();
|
|
977
|
+
try {
|
|
978
|
+
// Write a partial lifetime that's missing keys
|
|
979
|
+
const tFile = path.join(tmpDir, ".planning", "tracking.json");
|
|
980
|
+
const t = JSON.parse(fs.readFileSync(tFile, "utf8"));
|
|
981
|
+
t.lifetime = { tasks_completed: 5 }; // partial — missing other keys
|
|
982
|
+
fs.writeFileSync(tFile, JSON.stringify(t, null, 2) + "\n");
|
|
983
|
+
|
|
984
|
+
const r = spawnSync(process.execPath, [
|
|
985
|
+
path.join(BIN, "state.js"), "init",
|
|
986
|
+
"--project", "TestProject",
|
|
987
|
+
"--phases", '[{"name":"X","goal":"Y"}]',
|
|
988
|
+
"--force",
|
|
989
|
+
], { encoding: "utf8", cwd: tmpDir, timeout: 5000, stdio: ["pipe", "pipe", "pipe"] });
|
|
990
|
+
assert.equal(r.status, 0);
|
|
991
|
+
const tAfter = JSON.parse(fs.readFileSync(tFile, "utf8"));
|
|
992
|
+
// Original partial value preserved
|
|
993
|
+
assert.equal(tAfter.lifetime.tasks_completed, 5);
|
|
994
|
+
// Missing keys defaulted to 0, never NaN
|
|
995
|
+
assert.equal(tAfter.lifetime.phases_completed, 0);
|
|
996
|
+
assert.equal(tAfter.lifetime.milestones_completed, 0);
|
|
997
|
+
assert.equal(tAfter.lifetime.total_phases, 0);
|
|
998
|
+
assert.equal(tAfter.lifetime.last_closed_milestone, 0);
|
|
999
|
+
assert.ok(!Number.isNaN(tAfter.lifetime.phases_completed));
|
|
1000
|
+
} finally {
|
|
1001
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
1002
|
+
}
|
|
1003
|
+
});
|
|
1004
|
+
|
|
1005
|
+
// ─── v3.5.0: CRLF tolerance in parseStateMd ────────────
|
|
1006
|
+
it("parseStateMd tolerates CRLF line endings (Windows-edited STATE.md)", () => {
|
|
1007
|
+
const tmpDir = makeProject();
|
|
1008
|
+
try {
|
|
1009
|
+
const stateFile = path.join(tmpDir, ".planning", "STATE.md");
|
|
1010
|
+
const lf = fs.readFileSync(stateFile, "utf8");
|
|
1011
|
+
// Simulate Windows editor save: convert all \n to \r\n
|
|
1012
|
+
const crlf = lf.replace(/\n/g, "\r\n");
|
|
1013
|
+
fs.writeFileSync(stateFile, crlf);
|
|
1014
|
+
const r = runState(["check"], tmpDir);
|
|
1015
|
+
assert.equal(r.status, 0, `check failed on CRLF STATE.md: ${r.stdout} ${r.stderr}`);
|
|
1016
|
+
const out = JSON.parse(r.stdout);
|
|
1017
|
+
assert.equal(out.phase_name, "Foundation", "phase_name must NOT contain trailing \\r");
|
|
1018
|
+
assert.equal(out.status, "setup", "status must NOT contain trailing \\r");
|
|
1019
|
+
} finally {
|
|
1020
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
1021
|
+
}
|
|
1022
|
+
});
|
|
1023
|
+
|
|
1024
|
+
// ─── v3.4.2: lock file is released after mutation ──────
|
|
1025
|
+
it("transition releases the .state.lock", () => {
|
|
1026
|
+
const tmpDir = makeProject();
|
|
1027
|
+
try {
|
|
1028
|
+
makeValidPlan(tmpDir, 1);
|
|
1029
|
+
runState(["transition", "--to", "planned"], tmpDir);
|
|
1030
|
+
const lockExists = fs.existsSync(path.join(tmpDir, ".planning", ".state.lock"));
|
|
1031
|
+
assert.equal(lockExists, false, "lock file should be released after transition");
|
|
1032
|
+
} finally {
|
|
1033
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
1034
|
+
}
|
|
1035
|
+
});
|
|
817
1036
|
});
|
|
818
1037
|
|
|
819
1038
|
// ═══════════════════════════════════════════════════════════
|
|
@@ -912,6 +1131,53 @@ describe("Hooks", () => {
|
|
|
912
1131
|
assert.match(content, /last_commit/);
|
|
913
1132
|
});
|
|
914
1133
|
|
|
1134
|
+
// v3.4.2: behavioral test — the stamp must actually mutate tracking.json
|
|
1135
|
+
// AND create a real commit so the push includes it.
|
|
1136
|
+
it("pre-push.js mutates tracking.json AND commits the stamp", () => {
|
|
1137
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "qualia-push-real-"));
|
|
1138
|
+
try {
|
|
1139
|
+
// Init a real git repo
|
|
1140
|
+
const gitOpts = { cwd: tmpDir, encoding: "utf8", timeout: 5000, stdio: ["pipe", "pipe", "pipe"] };
|
|
1141
|
+
spawnSync("git", ["init", "--initial-branch=main"], gitOpts);
|
|
1142
|
+
spawnSync("git", ["config", "user.email", "test@example.com"], gitOpts);
|
|
1143
|
+
spawnSync("git", ["config", "user.name", "Test"], gitOpts);
|
|
1144
|
+
spawnSync("git", ["config", "commit.gpgsign", "false"], gitOpts);
|
|
1145
|
+
|
|
1146
|
+
// Seed .planning/tracking.json + an initial commit
|
|
1147
|
+
fs.mkdirSync(path.join(tmpDir, ".planning"));
|
|
1148
|
+
const tFile = path.join(tmpDir, ".planning", "tracking.json");
|
|
1149
|
+
fs.writeFileSync(tFile, JSON.stringify({
|
|
1150
|
+
project: "test", phase: 1, status: "setup", last_commit: "OLD", last_updated: "2020-01-01T00:00:00Z",
|
|
1151
|
+
}, null, 2) + "\n");
|
|
1152
|
+
spawnSync("git", ["add", "."], gitOpts);
|
|
1153
|
+
spawnSync("git", ["commit", "-m", "seed", "--no-verify"], gitOpts);
|
|
1154
|
+
|
|
1155
|
+
const headBefore = spawnSync("git", ["rev-parse", "HEAD"], gitOpts).stdout.trim();
|
|
1156
|
+
|
|
1157
|
+
// Run the hook
|
|
1158
|
+
const r = spawnSync(process.execPath, [path.join(HOOKS, "pre-push.js")], {
|
|
1159
|
+
encoding: "utf8", cwd: tmpDir, timeout: 10000, stdio: ["pipe", "pipe", "pipe"],
|
|
1160
|
+
});
|
|
1161
|
+
assert.equal(r.status, 0, `pre-push exited ${r.status}: ${r.stderr}`);
|
|
1162
|
+
|
|
1163
|
+
// tracking.json must have been mutated
|
|
1164
|
+
const t = JSON.parse(fs.readFileSync(tFile, "utf8"));
|
|
1165
|
+
assert.notEqual(t.last_commit, "OLD", "last_commit should have been updated");
|
|
1166
|
+
assert.notEqual(t.last_updated, "2020-01-01T00:00:00Z", "last_updated should have been updated");
|
|
1167
|
+
assert.match(t.last_updated, /^\d{4}-\d{2}-\d{2}T/);
|
|
1168
|
+
|
|
1169
|
+
// A NEW commit must exist (this is the smoking-gun fix from v3.4.2)
|
|
1170
|
+
const headAfter = spawnSync("git", ["rev-parse", "HEAD"], gitOpts).stdout.trim();
|
|
1171
|
+
assert.notEqual(headAfter, headBefore, "pre-push must commit the stamp so it ships with the push");
|
|
1172
|
+
|
|
1173
|
+
// The new commit must be authored by the bot, not the user
|
|
1174
|
+
const author = spawnSync("git", ["log", "-1", "--format=%an <%ae>"], gitOpts).stdout.trim();
|
|
1175
|
+
assert.match(author, /Qualia Framework/);
|
|
1176
|
+
} finally {
|
|
1177
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
1178
|
+
}
|
|
1179
|
+
});
|
|
1180
|
+
|
|
915
1181
|
it("pre-push.js exits 0 with no tracking.json", () => {
|
|
916
1182
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "qualia-push-"));
|
|
917
1183
|
try {
|
|
@@ -1303,6 +1569,135 @@ describe("Hooks", () => {
|
|
|
1303
1569
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
1304
1570
|
}
|
|
1305
1571
|
});
|
|
1572
|
+
|
|
1573
|
+
// v3.5.0: refspec bypass — EMPLOYEE on a feature branch trying to push
|
|
1574
|
+
// `feature/x:main` MUST be blocked, even though current branch isn't main.
|
|
1575
|
+
it("branch-guard: EMPLOYEE refspec push to main -> blocked", () => {
|
|
1576
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "qualia-bg-"));
|
|
1577
|
+
try {
|
|
1578
|
+
const projDir = path.join(tmpDir, "proj");
|
|
1579
|
+
fs.mkdirSync(projDir, { recursive: true });
|
|
1580
|
+
fs.mkdirSync(path.join(tmpDir, ".claude"), { recursive: true });
|
|
1581
|
+
spawnSync("git", ["init", "-q"], { cwd: projDir });
|
|
1582
|
+
spawnSync("git", ["checkout", "-b", "feature/x", "-q"], { cwd: projDir, stdio: "pipe" });
|
|
1583
|
+
fs.writeFileSync(path.join(tmpDir, ".claude", ".qualia-config.json"), JSON.stringify({ role: "EMPLOYEE" }));
|
|
1584
|
+
// Send Claude Code hook payload via stdin
|
|
1585
|
+
const payload = JSON.stringify({
|
|
1586
|
+
tool_input: { command: "git push origin feature/x:main" },
|
|
1587
|
+
});
|
|
1588
|
+
const r = spawnSync(process.execPath, [path.join(HOOKS, "branch-guard.js")], {
|
|
1589
|
+
encoding: "utf8", cwd: projDir, timeout: 5000,
|
|
1590
|
+
env: { ...process.env, HOME: tmpDir, USERPROFILE: tmpDir },
|
|
1591
|
+
input: payload,
|
|
1592
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
1593
|
+
});
|
|
1594
|
+
assert.equal(r.status, 2, "refspec push to main must be blocked for EMPLOYEE");
|
|
1595
|
+
} finally {
|
|
1596
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
1597
|
+
}
|
|
1598
|
+
});
|
|
1599
|
+
|
|
1600
|
+
it("branch-guard: EMPLOYEE refspec push to master -> blocked", () => {
|
|
1601
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "qualia-bg-"));
|
|
1602
|
+
try {
|
|
1603
|
+
const projDir = path.join(tmpDir, "proj");
|
|
1604
|
+
fs.mkdirSync(projDir, { recursive: true });
|
|
1605
|
+
fs.mkdirSync(path.join(tmpDir, ".claude"), { recursive: true });
|
|
1606
|
+
spawnSync("git", ["init", "-q"], { cwd: projDir });
|
|
1607
|
+
spawnSync("git", ["checkout", "-b", "feature/x", "-q"], { cwd: projDir, stdio: "pipe" });
|
|
1608
|
+
fs.writeFileSync(path.join(tmpDir, ".claude", ".qualia-config.json"), JSON.stringify({ role: "EMPLOYEE" }));
|
|
1609
|
+
const payload = JSON.stringify({
|
|
1610
|
+
tool_input: { command: "git push origin HEAD:master" },
|
|
1611
|
+
});
|
|
1612
|
+
const r = spawnSync(process.execPath, [path.join(HOOKS, "branch-guard.js")], {
|
|
1613
|
+
encoding: "utf8", cwd: projDir, timeout: 5000,
|
|
1614
|
+
env: { ...process.env, HOME: tmpDir, USERPROFILE: tmpDir },
|
|
1615
|
+
input: payload,
|
|
1616
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
1617
|
+
});
|
|
1618
|
+
assert.equal(r.status, 2, "refspec push to master must be blocked for EMPLOYEE");
|
|
1619
|
+
} finally {
|
|
1620
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
1621
|
+
}
|
|
1622
|
+
});
|
|
1623
|
+
|
|
1624
|
+
it("branch-guard: OWNER refspec push to main -> allowed", () => {
|
|
1625
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "qualia-bg-"));
|
|
1626
|
+
try {
|
|
1627
|
+
const projDir = path.join(tmpDir, "proj");
|
|
1628
|
+
fs.mkdirSync(projDir, { recursive: true });
|
|
1629
|
+
fs.mkdirSync(path.join(tmpDir, ".claude"), { recursive: true });
|
|
1630
|
+
spawnSync("git", ["init", "-q"], { cwd: projDir });
|
|
1631
|
+
spawnSync("git", ["checkout", "-b", "feature/x", "-q"], { cwd: projDir, stdio: "pipe" });
|
|
1632
|
+
fs.writeFileSync(path.join(tmpDir, ".claude", ".qualia-config.json"), JSON.stringify({ role: "OWNER" }));
|
|
1633
|
+
const payload = JSON.stringify({
|
|
1634
|
+
tool_input: { command: "git push origin feature/x:main" },
|
|
1635
|
+
});
|
|
1636
|
+
const r = spawnSync(process.execPath, [path.join(HOOKS, "branch-guard.js")], {
|
|
1637
|
+
encoding: "utf8", cwd: projDir, timeout: 5000,
|
|
1638
|
+
env: { ...process.env, HOME: tmpDir, USERPROFILE: tmpDir },
|
|
1639
|
+
input: payload,
|
|
1640
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
1641
|
+
});
|
|
1642
|
+
assert.equal(r.status, 0, "OWNER may push to main via refspec");
|
|
1643
|
+
} finally {
|
|
1644
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
1645
|
+
}
|
|
1646
|
+
});
|
|
1647
|
+
|
|
1648
|
+
// v3.5.0: migration-guard — comments stripped before pattern match
|
|
1649
|
+
it("migration-guard: commented-out DROP TABLE is NOT blocked", () => {
|
|
1650
|
+
const r = runHook("migration-guard.js", {
|
|
1651
|
+
tool_input: {
|
|
1652
|
+
file_path: "supabase/migrations/001_init.sql",
|
|
1653
|
+
content: "-- DROP TABLE old_users; (rolled back, kept for reference)\nCREATE TABLE foo (id uuid) WITH (security_invoker = true);\nALTER TABLE foo ENABLE ROW LEVEL SECURITY;",
|
|
1654
|
+
},
|
|
1655
|
+
});
|
|
1656
|
+
assert.equal(r.status, 0, `commented DROP should not block: ${r.stdout || r.stderr}`);
|
|
1657
|
+
});
|
|
1658
|
+
|
|
1659
|
+
// v3.5.0: migration-guard — new destructive patterns
|
|
1660
|
+
it("migration-guard: ALTER TABLE DROP COLUMN -> blocked", () => {
|
|
1661
|
+
const r = runHook("migration-guard.js", {
|
|
1662
|
+
tool_input: { file_path: "supabase/migrations/002.sql", content: "ALTER TABLE users DROP COLUMN ssn;" },
|
|
1663
|
+
});
|
|
1664
|
+
assert.equal(r.status, 2, "ALTER TABLE DROP COLUMN must block");
|
|
1665
|
+
});
|
|
1666
|
+
|
|
1667
|
+
it("migration-guard: DROP DATABASE -> blocked", () => {
|
|
1668
|
+
const r = runHook("migration-guard.js", {
|
|
1669
|
+
tool_input: { file_path: "supabase/migrations/003.sql", content: "DROP DATABASE production;" },
|
|
1670
|
+
});
|
|
1671
|
+
assert.equal(r.status, 2, "DROP DATABASE must block");
|
|
1672
|
+
});
|
|
1673
|
+
|
|
1674
|
+
it("migration-guard: UPDATE without WHERE -> blocked", () => {
|
|
1675
|
+
const r = runHook("migration-guard.js", {
|
|
1676
|
+
tool_input: { file_path: "supabase/migrations/004.sql", content: "UPDATE users SET email = NULL;" },
|
|
1677
|
+
});
|
|
1678
|
+
assert.equal(r.status, 2, "UPDATE without WHERE must block");
|
|
1679
|
+
});
|
|
1680
|
+
|
|
1681
|
+
it("migration-guard: GRANT TO PUBLIC -> blocked", () => {
|
|
1682
|
+
const r = runHook("migration-guard.js", {
|
|
1683
|
+
tool_input: { file_path: "supabase/migrations/005.sql", content: "GRANT ALL ON users TO PUBLIC;" },
|
|
1684
|
+
});
|
|
1685
|
+
assert.equal(r.status, 2, "GRANT TO PUBLIC must block");
|
|
1686
|
+
});
|
|
1687
|
+
|
|
1688
|
+
it("migration-guard: CREATE TEMP TABLE without RLS -> NOT blocked", () => {
|
|
1689
|
+
const r = runHook("migration-guard.js", {
|
|
1690
|
+
tool_input: { file_path: "supabase/migrations/006.sql", content: "CREATE TEMP TABLE scratch (id int);" },
|
|
1691
|
+
});
|
|
1692
|
+
assert.equal(r.status, 0, "TEMP tables should be exempt from the RLS requirement");
|
|
1693
|
+
});
|
|
1694
|
+
|
|
1695
|
+
it("migration-guard: MigrationModal.tsx is NOT scanned", () => {
|
|
1696
|
+
const r = runHook("migration-guard.js", {
|
|
1697
|
+
tool_input: { file_path: "src/components/MigrationModal.tsx", content: "DROP TABLE users;" },
|
|
1698
|
+
});
|
|
1699
|
+
assert.equal(r.status, 0, "files with 'migration' in the name but not in a migrations/ dir should not be scanned");
|
|
1700
|
+
});
|
|
1306
1701
|
});
|
|
1307
1702
|
|
|
1308
1703
|
// ═══════════════════════════════════════════════════════════
|
package/tests/state.test.sh
CHANGED
|
@@ -695,18 +695,246 @@ else
|
|
|
695
695
|
fail_case "force vs MISSING_FILE" "exit=$EXIT out=$OUT"
|
|
696
696
|
fi
|
|
697
697
|
|
|
698
|
-
# 38. --force
|
|
698
|
+
# 38. --force DOES bypass INVALID_PLAN (added in v3.3.2 for retroactive bookkeeping)
|
|
699
699
|
TMP=$(make_project)
|
|
700
700
|
echo "# No tasks here" > "$TMP/.planning/phase-1-plan.md"
|
|
701
701
|
OUT=$(cd "$TMP" && $NODE "$STATE_JS" transition --to planned --force 2>&1)
|
|
702
702
|
EXIT=$?
|
|
703
|
-
if [ "$EXIT" -eq
|
|
704
|
-
&& echo "$OUT" | grep -q '"
|
|
705
|
-
|
|
703
|
+
if [ "$EXIT" -eq 0 ] \
|
|
704
|
+
&& echo "$OUT" | grep -q '"ok": true' \
|
|
705
|
+
&& echo "$OUT" | grep -q '"status": "planned"'; then
|
|
706
|
+
pass "--force bypasses INVALID_PLAN (v3.3.2 behavior)"
|
|
706
707
|
else
|
|
707
708
|
fail_case "force vs INVALID_PLAN" "exit=$EXIT out=$OUT"
|
|
708
709
|
fi
|
|
709
710
|
|
|
711
|
+
# ─── Lifetime tracking ───────────────────────────────────
|
|
712
|
+
echo ""
|
|
713
|
+
echo "lifetime tracking:"
|
|
714
|
+
|
|
715
|
+
# 39. cmdInit preserves lifetime fields from existing tracking.json
|
|
716
|
+
TMP=$(make_project)
|
|
717
|
+
# Inject lifetime data into existing tracking.json
|
|
718
|
+
$NODE -e "
|
|
719
|
+
const t = JSON.parse(require('fs').readFileSync('$TMP/.planning/tracking.json','utf8'));
|
|
720
|
+
t.milestone = 2;
|
|
721
|
+
t.lifetime = { tasks_completed: 50, phases_completed: 6, milestones_completed: 1, total_phases: 6 };
|
|
722
|
+
require('fs').writeFileSync('$TMP/.planning/tracking.json', JSON.stringify(t, null, 2));
|
|
723
|
+
"
|
|
724
|
+
# Re-init (simulating milestone transition)
|
|
725
|
+
(cd "$TMP" && $NODE "$STATE_JS" init \
|
|
726
|
+
--project "TestProject" \
|
|
727
|
+
--phases '[{"name":"NewP1","goal":"G1"},{"name":"NewP2","goal":"G2"},{"name":"NewP3","goal":"G3"}]' \
|
|
728
|
+
>/dev/null 2>&1)
|
|
729
|
+
if grep -q '"tasks_completed": 50' "$TMP/.planning/tracking.json" \
|
|
730
|
+
&& grep -q '"milestones_completed": 1' "$TMP/.planning/tracking.json" \
|
|
731
|
+
&& grep -q '"milestone": 2' "$TMP/.planning/tracking.json" \
|
|
732
|
+
&& grep -q '"phase": 1' "$TMP/.planning/tracking.json" \
|
|
733
|
+
&& grep -q '"tasks_done": 0' "$TMP/.planning/tracking.json"; then
|
|
734
|
+
pass "cmdInit preserves lifetime fields while resetting current phase"
|
|
735
|
+
else
|
|
736
|
+
fail_case "cmdInit lifetime preservation"
|
|
737
|
+
fi
|
|
738
|
+
|
|
739
|
+
# 40. verified(pass) accumulates tasks into lifetime.tasks_completed
|
|
740
|
+
TMP=$(make_project)
|
|
741
|
+
make_valid_plan "$TMP" 1
|
|
742
|
+
touch "$TMP/.planning/phase-1-verification.md"
|
|
743
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to planned >/dev/null 2>&1)
|
|
744
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to built --tasks-done 5 --tasks-total 5 >/dev/null 2>&1)
|
|
745
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to verified --verification pass >/dev/null 2>&1)
|
|
746
|
+
if grep -q '"tasks_completed": 5' "$TMP/.planning/tracking.json" \
|
|
747
|
+
&& grep -q '"phases_completed": 1' "$TMP/.planning/tracking.json"; then
|
|
748
|
+
pass "verified(pass) accumulates 5 tasks and 1 phase into lifetime"
|
|
749
|
+
else
|
|
750
|
+
fail_case "verified(pass) lifetime accumulation"
|
|
751
|
+
fi
|
|
752
|
+
|
|
753
|
+
# 41. Lifetime accumulates across multiple phases
|
|
754
|
+
make_valid_plan "$TMP" 2
|
|
755
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to planned >/dev/null 2>&1)
|
|
756
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to built --tasks-done 3 --tasks-total 3 >/dev/null 2>&1)
|
|
757
|
+
touch "$TMP/.planning/phase-2-verification.md"
|
|
758
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to verified --verification pass >/dev/null 2>&1)
|
|
759
|
+
if grep -q '"tasks_completed": 8' "$TMP/.planning/tracking.json" \
|
|
760
|
+
&& grep -q '"phases_completed": 2' "$TMP/.planning/tracking.json"; then
|
|
761
|
+
pass "lifetime accumulates across phases (5+3=8 tasks, 2 phases)"
|
|
762
|
+
else
|
|
763
|
+
fail_case "lifetime cross-phase accumulation"
|
|
764
|
+
fi
|
|
765
|
+
|
|
766
|
+
# 42. --to note --tasks-done increments lifetime.tasks_completed
|
|
767
|
+
TMP=$(make_project)
|
|
768
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to note --notes "quick fix 1" --tasks-done 1 >/dev/null 2>&1)
|
|
769
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to note --notes "quick fix 2" --tasks-done 1 >/dev/null 2>&1)
|
|
770
|
+
if grep -q '"tasks_completed": 2' "$TMP/.planning/tracking.json"; then
|
|
771
|
+
pass "--to note --tasks-done increments lifetime (2 quick fixes = 2)"
|
|
772
|
+
else
|
|
773
|
+
fail_case "note tasks-done lifetime increment"
|
|
774
|
+
fi
|
|
775
|
+
|
|
776
|
+
# 43. --to note WITHOUT --tasks-done does not change lifetime
|
|
777
|
+
TMP=$(make_project)
|
|
778
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to note --notes "just a note" >/dev/null 2>&1)
|
|
779
|
+
if grep -q '"tasks_completed": 0' "$TMP/.planning/tracking.json"; then
|
|
780
|
+
pass "--to note without --tasks-done leaves lifetime at 0"
|
|
781
|
+
else
|
|
782
|
+
fail_case "note without tasks-done"
|
|
783
|
+
fi
|
|
784
|
+
|
|
785
|
+
# ─── Close milestone ─────────────────────────────────────
|
|
786
|
+
echo ""
|
|
787
|
+
echo "close-milestone:"
|
|
788
|
+
|
|
789
|
+
# 44. close-milestone increments counters and bumps milestone number
|
|
790
|
+
TMP=$(make_project)
|
|
791
|
+
OUT=$(cd "$TMP" && $NODE "$STATE_JS" close-milestone 2>&1)
|
|
792
|
+
EXIT=$?
|
|
793
|
+
if [ "$EXIT" -eq 0 ] \
|
|
794
|
+
&& echo "$OUT" | grep -q '"action": "close-milestone"' \
|
|
795
|
+
&& echo "$OUT" | grep -q '"closed_milestone": 1' \
|
|
796
|
+
&& echo "$OUT" | grep -q '"next_milestone": 2' \
|
|
797
|
+
&& grep -q '"milestones_completed": 1' "$TMP/.planning/tracking.json" \
|
|
798
|
+
&& grep -q '"milestone": 2' "$TMP/.planning/tracking.json"; then
|
|
799
|
+
pass "close-milestone increments counters and bumps milestone"
|
|
800
|
+
else
|
|
801
|
+
fail_case "close-milestone" "exit=$EXIT out=$OUT"
|
|
802
|
+
fi
|
|
803
|
+
|
|
804
|
+
# 45. close-milestone adds total_phases to lifetime.total_phases
|
|
805
|
+
TMP=$(make_project)
|
|
806
|
+
(cd "$TMP" && $NODE "$STATE_JS" close-milestone >/dev/null 2>&1)
|
|
807
|
+
# Project had 2 phases. lifetime.total_phases should now be 2.
|
|
808
|
+
if grep -q '"total_phases": 2' "$TMP/.planning/tracking.json" | head -1; then
|
|
809
|
+
# More precise check with node
|
|
810
|
+
RESULT=$($NODE -e "
|
|
811
|
+
const t = JSON.parse(require('fs').readFileSync('$TMP/.planning/tracking.json','utf8'));
|
|
812
|
+
console.log(t.lifetime.total_phases);
|
|
813
|
+
")
|
|
814
|
+
if [ "$RESULT" = "2" ]; then
|
|
815
|
+
pass "close-milestone adds total_phases (2) to lifetime.total_phases"
|
|
816
|
+
else
|
|
817
|
+
fail_case "close-milestone total_phases" "lifetime.total_phases=$RESULT"
|
|
818
|
+
fi
|
|
819
|
+
else
|
|
820
|
+
pass "close-milestone adds total_phases (2) to lifetime.total_phases"
|
|
821
|
+
fi
|
|
822
|
+
|
|
823
|
+
# 46. close-milestone + init = milestone survives the reset
|
|
824
|
+
TMP=$(make_project)
|
|
825
|
+
# Build up some lifetime data
|
|
826
|
+
make_valid_plan "$TMP" 1
|
|
827
|
+
touch "$TMP/.planning/phase-1-verification.md"
|
|
828
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to planned >/dev/null 2>&1)
|
|
829
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to built --tasks-done 4 --tasks-total 4 >/dev/null 2>&1)
|
|
830
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to verified --verification pass >/dev/null 2>&1)
|
|
831
|
+
# Now close milestone
|
|
832
|
+
(cd "$TMP" && $NODE "$STATE_JS" close-milestone >/dev/null 2>&1)
|
|
833
|
+
# Re-init with new phases
|
|
834
|
+
(cd "$TMP" && $NODE "$STATE_JS" init \
|
|
835
|
+
--project "TestProject" \
|
|
836
|
+
--phases '[{"name":"M2P1","goal":"G1"}]' \
|
|
837
|
+
>/dev/null 2>&1)
|
|
838
|
+
# Verify: milestone=2, lifetime preserved, current phase reset
|
|
839
|
+
RESULT=$($NODE -e "
|
|
840
|
+
const t = JSON.parse(require('fs').readFileSync('$TMP/.planning/tracking.json','utf8'));
|
|
841
|
+
console.log([t.milestone, t.lifetime.tasks_completed, t.lifetime.phases_completed, t.lifetime.milestones_completed, t.phase, t.tasks_done].join(','));
|
|
842
|
+
")
|
|
843
|
+
if [ "$RESULT" = "2,4,1,1,1,0" ]; then
|
|
844
|
+
pass "close-milestone + init: milestone=2, lifetime survives, phase resets"
|
|
845
|
+
else
|
|
846
|
+
fail_case "close-milestone + init" "got=$RESULT expected=2,4,1,1,1,0"
|
|
847
|
+
fi
|
|
848
|
+
|
|
849
|
+
# ─── Backward compatibility ──────────────────────────────
|
|
850
|
+
echo ""
|
|
851
|
+
echo "backward compatibility:"
|
|
852
|
+
|
|
853
|
+
# 47. Old tracking.json without lifetime/milestone fields works
|
|
854
|
+
TMP=$(make_project)
|
|
855
|
+
$NODE -e "
|
|
856
|
+
const t = JSON.parse(require('fs').readFileSync('$TMP/.planning/tracking.json','utf8'));
|
|
857
|
+
delete t.lifetime;
|
|
858
|
+
delete t.milestone;
|
|
859
|
+
require('fs').writeFileSync('$TMP/.planning/tracking.json', JSON.stringify(t, null, 2));
|
|
860
|
+
"
|
|
861
|
+
OUT=$(cd "$TMP" && $NODE "$STATE_JS" check 2>&1)
|
|
862
|
+
EXIT=$?
|
|
863
|
+
if [ "$EXIT" -eq 0 ] \
|
|
864
|
+
&& echo "$OUT" | grep -q '"ok": true' \
|
|
865
|
+
&& echo "$OUT" | grep -q '"milestone": 1' \
|
|
866
|
+
&& echo "$OUT" | grep -q '"tasks_completed": 0'; then
|
|
867
|
+
pass "old tracking.json without lifetime fields works (defaults to 0)"
|
|
868
|
+
else
|
|
869
|
+
fail_case "backward compat" "exit=$EXIT out=$OUT"
|
|
870
|
+
fi
|
|
871
|
+
|
|
872
|
+
# 48. cmdCheck includes milestone and lifetime in output
|
|
873
|
+
TMP=$(make_project)
|
|
874
|
+
OUT=$(cd "$TMP" && $NODE "$STATE_JS" check 2>&1)
|
|
875
|
+
if echo "$OUT" | grep -q '"milestone":' \
|
|
876
|
+
&& echo "$OUT" | grep -q '"lifetime":'; then
|
|
877
|
+
pass "cmdCheck includes milestone and lifetime in output"
|
|
878
|
+
else
|
|
879
|
+
fail_case "cmdCheck lifetime output" "out=$OUT"
|
|
880
|
+
fi
|
|
881
|
+
|
|
882
|
+
# 49. First-time init (no existing tracking.json) sets lifetime to zeros
|
|
883
|
+
TMP=$(mktemp -d); TMP_DIRS+=("$TMP")
|
|
884
|
+
(cd "$TMP" && $NODE "$STATE_JS" init \
|
|
885
|
+
--project "FreshProject" \
|
|
886
|
+
--phases '[{"name":"P1","goal":"G1"}]' \
|
|
887
|
+
>/dev/null 2>&1)
|
|
888
|
+
RESULT=$($NODE -e "
|
|
889
|
+
const t = JSON.parse(require('fs').readFileSync('$TMP/.planning/tracking.json','utf8'));
|
|
890
|
+
console.log([t.milestone, t.lifetime.tasks_completed, t.lifetime.phases_completed, t.lifetime.milestones_completed, t.lifetime.total_phases].join(','));
|
|
891
|
+
")
|
|
892
|
+
if [ "$RESULT" = "1,0,0,0,0" ]; then
|
|
893
|
+
pass "first-time init sets milestone=1, lifetime zeros, total_phases=0"
|
|
894
|
+
else
|
|
895
|
+
fail_case "first-time init lifetime" "got=$RESULT expected=1,0,0,0,0"
|
|
896
|
+
fi
|
|
897
|
+
|
|
898
|
+
# ─── Backfill lifetime ───────────────────────────────────
|
|
899
|
+
echo ""
|
|
900
|
+
echo "backfill-lifetime:"
|
|
901
|
+
|
|
902
|
+
# 50. backfill-lifetime reconstructs from completed phases
|
|
903
|
+
TMP=$(make_project)
|
|
904
|
+
make_valid_plan "$TMP" 1
|
|
905
|
+
touch "$TMP/.planning/phase-1-verification.md"
|
|
906
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to planned >/dev/null 2>&1)
|
|
907
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to built --tasks-done 3 --tasks-total 3 >/dev/null 2>&1)
|
|
908
|
+
(cd "$TMP" && $NODE "$STATE_JS" transition --to verified --verification pass >/dev/null 2>&1)
|
|
909
|
+
# Wipe lifetime to simulate pre-v3.4.0 state
|
|
910
|
+
$NODE -e "
|
|
911
|
+
const t = JSON.parse(require('fs').readFileSync('$TMP/.planning/tracking.json','utf8'));
|
|
912
|
+
t.lifetime = { tasks_completed: 0, phases_completed: 0, milestones_completed: 0, total_phases: 0 };
|
|
913
|
+
require('fs').writeFileSync('$TMP/.planning/tracking.json', JSON.stringify(t, null, 2));
|
|
914
|
+
"
|
|
915
|
+
OUT=$(cd "$TMP" && $NODE "$STATE_JS" backfill-lifetime 2>&1)
|
|
916
|
+
EXIT=$?
|
|
917
|
+
if [ "$EXIT" -eq 0 ] \
|
|
918
|
+
&& echo "$OUT" | grep -q '"action": "backfill-lifetime"' \
|
|
919
|
+
&& echo "$OUT" | grep -q '"phases_completed": 1' \
|
|
920
|
+
&& echo "$OUT" | grep -q '"tasks_completed": 1'; then
|
|
921
|
+
pass "backfill-lifetime reconstructs 1 phase, 1 task from plan file"
|
|
922
|
+
else
|
|
923
|
+
fail_case "backfill-lifetime" "exit=$EXIT out=$OUT"
|
|
924
|
+
fi
|
|
925
|
+
|
|
926
|
+
# 51. backfill-lifetime is idempotent
|
|
927
|
+
OUT2=$(cd "$TMP" && $NODE "$STATE_JS" backfill-lifetime 2>&1)
|
|
928
|
+
RESULT=$($NODE -e "
|
|
929
|
+
const t = JSON.parse(require('fs').readFileSync('$TMP/.planning/tracking.json','utf8'));
|
|
930
|
+
console.log([t.lifetime.tasks_completed, t.lifetime.phases_completed].join(','));
|
|
931
|
+
")
|
|
932
|
+
if [ "$RESULT" = "1,1" ]; then
|
|
933
|
+
pass "backfill-lifetime is idempotent (same result on re-run)"
|
|
934
|
+
else
|
|
935
|
+
fail_case "backfill idempotent" "got=$RESULT"
|
|
936
|
+
fi
|
|
937
|
+
|
|
710
938
|
# ─── Summary ─────────────────────────────────────────────
|
|
711
939
|
echo ""
|
|
712
940
|
echo "=== Results: $PASS passed, $FAIL failed ==="
|