cclaw-cli 6.8.0 → 6.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/artifact-linter/design.js +1 -1
- package/dist/artifact-linter/shared.js +2 -1
- package/dist/artifact-linter/tdd.d.ts +11 -0
- package/dist/artifact-linter/tdd.js +174 -7
- package/dist/content/harness-doc.js +1 -1
- package/dist/content/hooks.js +5 -1
- package/dist/content/iron-laws.js +6 -2
- package/dist/content/node-hooks.js +15 -1308
- package/dist/content/skills-elicitation.js +2 -2
- package/dist/content/stages/brainstorm.js +2 -2
- package/dist/content/stages/design.js +2 -2
- package/dist/content/stages/scope.js +2 -2
- package/dist/content/stages/tdd.js +1 -0
- package/dist/content/subagents.js +11 -1
- package/dist/delegation.d.ts +10 -3
- package/dist/delegation.js +13 -4
- package/dist/early-loop.js +15 -1
- package/dist/gate-evidence.js +15 -23
- package/dist/harness-adapters.js +4 -2
- package/dist/install.js +37 -221
- package/dist/internal/detect-supply-chain-changes.d.ts +6 -0
- package/dist/internal/detect-supply-chain-changes.js +138 -0
- package/dist/internal/flow-state-repair.d.ts +7 -0
- package/dist/internal/flow-state-repair.js +57 -18
- package/dist/run-persistence.d.ts +2 -0
- package/dist/run-persistence.js +62 -3
- package/dist/runtime/run-hook.mjs +44 -8729
- package/package.json +1 -1
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
const execFileAsync = promisify(execFile);
|
|
4
|
+
const WORKFLOW_PATH = /(^|\/)\.github\/workflows\//u;
|
|
5
|
+
const CURSOR_CONFIG_PATH = /(^|\/)\.cursor\//u;
|
|
6
|
+
const PACKAGE_JSON_PATH = /(^|\/)package\.json$/u;
|
|
7
|
+
const SUPPLY_CHAIN_DEP_KEYS = [
|
|
8
|
+
"dependencies",
|
|
9
|
+
"devDependencies",
|
|
10
|
+
"peerDependencies",
|
|
11
|
+
"optionalDependencies"
|
|
12
|
+
];
|
|
13
|
+
async function resolveDiffBase(projectRoot) {
|
|
14
|
+
try {
|
|
15
|
+
const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD~1"], {
|
|
16
|
+
cwd: projectRoot
|
|
17
|
+
});
|
|
18
|
+
const base = stdout.trim();
|
|
19
|
+
return base.length > 0 ? base : null;
|
|
20
|
+
}
|
|
21
|
+
catch {
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
async function readFileAtRev(projectRoot, rev, filePath) {
|
|
26
|
+
try {
|
|
27
|
+
const { stdout } = await execFileAsync("git", ["show", `${rev}:${filePath}`], {
|
|
28
|
+
cwd: projectRoot,
|
|
29
|
+
maxBuffer: 32 * 1024 * 1024
|
|
30
|
+
});
|
|
31
|
+
return stdout;
|
|
32
|
+
}
|
|
33
|
+
catch {
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
function dependencyMapsDiffer(before, after) {
|
|
38
|
+
const beforeKeys = before ? Object.keys(before).sort() : [];
|
|
39
|
+
const afterKeys = after ? Object.keys(after).sort() : [];
|
|
40
|
+
if (beforeKeys.length !== afterKeys.length)
|
|
41
|
+
return true;
|
|
42
|
+
for (let i = 0; i < beforeKeys.length; i += 1) {
|
|
43
|
+
if (beforeKeys[i] !== afterKeys[i])
|
|
44
|
+
return true;
|
|
45
|
+
const k = beforeKeys[i];
|
|
46
|
+
if (before[k] !== after[k]) {
|
|
47
|
+
return true;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return false;
|
|
51
|
+
}
|
|
52
|
+
async function packageJsonHasDependencyDiff(projectRoot, base, filePath) {
|
|
53
|
+
const beforeRaw = await readFileAtRev(projectRoot, base, filePath);
|
|
54
|
+
const afterRaw = await readFileAtRev(projectRoot, "HEAD", filePath);
|
|
55
|
+
// If either side is missing or unparseable, treat as changed (be conservative).
|
|
56
|
+
if (beforeRaw === null || afterRaw === null)
|
|
57
|
+
return true;
|
|
58
|
+
let beforeJson;
|
|
59
|
+
let afterJson;
|
|
60
|
+
try {
|
|
61
|
+
beforeJson = JSON.parse(beforeRaw);
|
|
62
|
+
}
|
|
63
|
+
catch {
|
|
64
|
+
return true;
|
|
65
|
+
}
|
|
66
|
+
try {
|
|
67
|
+
afterJson = JSON.parse(afterRaw);
|
|
68
|
+
}
|
|
69
|
+
catch {
|
|
70
|
+
return true;
|
|
71
|
+
}
|
|
72
|
+
const beforeObj = beforeJson !== null && typeof beforeJson === "object"
|
|
73
|
+
? beforeJson
|
|
74
|
+
: {};
|
|
75
|
+
const afterObj = afterJson !== null && typeof afterJson === "object"
|
|
76
|
+
? afterJson
|
|
77
|
+
: {};
|
|
78
|
+
for (const key of SUPPLY_CHAIN_DEP_KEYS) {
|
|
79
|
+
const beforeMap = (beforeObj[key] !== null && typeof beforeObj[key] === "object")
|
|
80
|
+
? beforeObj[key]
|
|
81
|
+
: undefined;
|
|
82
|
+
const afterMap = (afterObj[key] !== null && typeof afterObj[key] === "object")
|
|
83
|
+
? afterObj[key]
|
|
84
|
+
: undefined;
|
|
85
|
+
if (dependencyMapsDiffer(beforeMap, afterMap)) {
|
|
86
|
+
return true;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return false;
|
|
90
|
+
}
|
|
91
|
+
export async function detectSupplyChainChanges(projectRoot) {
|
|
92
|
+
const base = await resolveDiffBase(projectRoot);
|
|
93
|
+
if (!base) {
|
|
94
|
+
return { triggered: false, changedFiles: [], reasons: [] };
|
|
95
|
+
}
|
|
96
|
+
let changed = [];
|
|
97
|
+
try {
|
|
98
|
+
const range = `${base}..HEAD`;
|
|
99
|
+
const { stdout } = await execFileAsync("git", ["diff", "--name-only", range], {
|
|
100
|
+
cwd: projectRoot
|
|
101
|
+
});
|
|
102
|
+
changed = stdout
|
|
103
|
+
.split(/\r?\n/gu)
|
|
104
|
+
.map((line) => line.trim())
|
|
105
|
+
.filter((line) => line.length > 0);
|
|
106
|
+
}
|
|
107
|
+
catch {
|
|
108
|
+
return { triggered: false, changedFiles: [], reasons: [] };
|
|
109
|
+
}
|
|
110
|
+
const matchedFiles = [];
|
|
111
|
+
const reasons = [];
|
|
112
|
+
for (const filePath of changed) {
|
|
113
|
+
if (WORKFLOW_PATH.test(filePath)) {
|
|
114
|
+
matchedFiles.push(filePath);
|
|
115
|
+
reasons.push(`.github/workflows changed: ${filePath}`);
|
|
116
|
+
continue;
|
|
117
|
+
}
|
|
118
|
+
if (CURSOR_CONFIG_PATH.test(filePath)) {
|
|
119
|
+
matchedFiles.push(filePath);
|
|
120
|
+
reasons.push(`.cursor config changed: ${filePath}`);
|
|
121
|
+
continue;
|
|
122
|
+
}
|
|
123
|
+
if (PACKAGE_JSON_PATH.test(filePath)) {
|
|
124
|
+
// Only flag when supply-chain dependency keys differ.
|
|
125
|
+
const depDiffers = await packageJsonHasDependencyDiff(projectRoot, base, filePath);
|
|
126
|
+
if (depDiffers) {
|
|
127
|
+
matchedFiles.push(filePath);
|
|
128
|
+
reasons.push(`${filePath} dependencies/devDependencies/peerDependencies/optionalDependencies changed`);
|
|
129
|
+
}
|
|
130
|
+
continue;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return {
|
|
134
|
+
triggered: matchedFiles.length > 0,
|
|
135
|
+
changedFiles: matchedFiles,
|
|
136
|
+
reasons
|
|
137
|
+
};
|
|
138
|
+
}
|
|
@@ -7,6 +7,13 @@ export interface FlowStateRepairArgs {
|
|
|
7
7
|
reason: string;
|
|
8
8
|
json: boolean;
|
|
9
9
|
quiet: boolean;
|
|
10
|
+
/**
|
|
11
|
+
* v6.9.0 — when true, normalize `state/early-loop.json` to the canonical
|
|
12
|
+
* shape derived from `early-loop-log.jsonl`. Lets operators recover from
|
|
13
|
+
* legacy hand-written `early-loop.json` files that drifted from the
|
|
14
|
+
* source-of-truth log.
|
|
15
|
+
*/
|
|
16
|
+
earlyLoop: boolean;
|
|
10
17
|
}
|
|
11
18
|
export declare function parseFlowStateRepairArgs(tokens: string[]): FlowStateRepairArgs;
|
|
12
19
|
export declare function runFlowStateRepair(projectRoot: string, args: FlowStateRepairArgs, io: InternalIo): Promise<number>;
|
|
@@ -1,10 +1,15 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
1
2
|
import path from "node:path";
|
|
2
3
|
import { RUNTIME_ROOT } from "../constants.js";
|
|
4
|
+
import { clampEarlyLoopStatusForWrite, computeEarlyLoopStatus, isEarlyLoopStage } from "../early-loop.js";
|
|
5
|
+
import { writeFileSafe } from "../fs-utils.js";
|
|
3
6
|
import { repairFlowStateGuard } from "../run-persistence.js";
|
|
7
|
+
import { readFlowState } from "../runs.js";
|
|
4
8
|
export function parseFlowStateRepairArgs(tokens) {
|
|
5
9
|
let reason;
|
|
6
10
|
let json = false;
|
|
7
11
|
let quiet = false;
|
|
12
|
+
let earlyLoop = false;
|
|
8
13
|
for (let i = 0; i < tokens.length; i += 1) {
|
|
9
14
|
const token = tokens[i];
|
|
10
15
|
const nextToken = tokens[i + 1];
|
|
@@ -16,6 +21,10 @@ export function parseFlowStateRepairArgs(tokens) {
|
|
|
16
21
|
quiet = true;
|
|
17
22
|
continue;
|
|
18
23
|
}
|
|
24
|
+
if (token === "--early-loop") {
|
|
25
|
+
earlyLoop = true;
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
19
28
|
if (token === "--reason") {
|
|
20
29
|
if (!nextToken || nextToken.startsWith("--")) {
|
|
21
30
|
throw new Error("--reason requires a short slug value.");
|
|
@@ -33,33 +42,63 @@ export function parseFlowStateRepairArgs(tokens) {
|
|
|
33
42
|
if (!reason || reason.length === 0) {
|
|
34
43
|
throw new Error("internal flow-state-repair requires --reason=<slug> (e.g. --reason=manual_edit_recovery).");
|
|
35
44
|
}
|
|
36
|
-
return { reason, json, quiet };
|
|
45
|
+
return { reason, json, quiet, earlyLoop };
|
|
46
|
+
}
|
|
47
|
+
async function repairEarlyLoopFile(projectRoot, io) {
|
|
48
|
+
const flow = await readFlowState(projectRoot).catch(() => null);
|
|
49
|
+
if (!flow) {
|
|
50
|
+
return { performed: false, skipped: "flow-state-unreadable" };
|
|
51
|
+
}
|
|
52
|
+
const stage = flow.currentStage;
|
|
53
|
+
if (!isEarlyLoopStage(stage)) {
|
|
54
|
+
return { performed: false, skipped: `current-stage-${stage}-not-early-loop` };
|
|
55
|
+
}
|
|
56
|
+
const runId = flow.activeRunId.trim();
|
|
57
|
+
if (runId.length === 0) {
|
|
58
|
+
io.stderr.write("cclaw internal flow-state-repair --early-loop: active run has no runId; cannot derive canonical early-loop.json.\n");
|
|
59
|
+
return { performed: false, skipped: "missing-active-runId" };
|
|
60
|
+
}
|
|
61
|
+
const stateDir = path.join(projectRoot, RUNTIME_ROOT, "state");
|
|
62
|
+
const logPath = path.join(stateDir, "early-loop-log.jsonl");
|
|
63
|
+
const status = await computeEarlyLoopStatus(stage, runId, logPath);
|
|
64
|
+
const persisted = clampEarlyLoopStatusForWrite(status);
|
|
65
|
+
const finalStatus = persisted.status;
|
|
66
|
+
const target = path.join(stateDir, "early-loop.json");
|
|
67
|
+
await writeFileSafe(target, `${JSON.stringify(finalStatus, null, 2)}\n`);
|
|
68
|
+
return {
|
|
69
|
+
performed: true,
|
|
70
|
+
stage,
|
|
71
|
+
runId,
|
|
72
|
+
iteration: finalStatus.iteration,
|
|
73
|
+
openConcernCount: finalStatus.openConcerns.length
|
|
74
|
+
};
|
|
37
75
|
}
|
|
38
76
|
export async function runFlowStateRepair(projectRoot, args, io) {
|
|
39
77
|
const result = await repairFlowStateGuard(projectRoot, args.reason);
|
|
40
78
|
const logRel = path.relative(projectRoot, result.repairLogPath).replace(/\\/gu, "/");
|
|
41
79
|
const guardRel = path.relative(projectRoot, result.guardPath).replace(/\\/gu, "/");
|
|
80
|
+
let earlyLoopOutcome = null;
|
|
81
|
+
if (args.earlyLoop) {
|
|
82
|
+
earlyLoopOutcome = await repairEarlyLoopFile(projectRoot, io);
|
|
83
|
+
}
|
|
84
|
+
void fs;
|
|
85
|
+
const payload = {
|
|
86
|
+
ok: true,
|
|
87
|
+
command: "flow-state-repair",
|
|
88
|
+
reason: args.reason,
|
|
89
|
+
sidecar: result.sidecar,
|
|
90
|
+
guardPath: guardRel,
|
|
91
|
+
repairLogPath: logRel,
|
|
92
|
+
completedStageMetaBackfilled: result.completedStageMetaBackfilled,
|
|
93
|
+
earlyLoop: earlyLoopOutcome,
|
|
94
|
+
runtimeRoot: RUNTIME_ROOT
|
|
95
|
+
};
|
|
42
96
|
if (args.json) {
|
|
43
|
-
io.stdout.write(`${JSON.stringify(
|
|
44
|
-
ok: true,
|
|
45
|
-
command: "flow-state-repair",
|
|
46
|
-
reason: args.reason,
|
|
47
|
-
sidecar: result.sidecar,
|
|
48
|
-
guardPath: guardRel,
|
|
49
|
-
repairLogPath: logRel,
|
|
50
|
-
runtimeRoot: RUNTIME_ROOT
|
|
51
|
-
})}\n`);
|
|
97
|
+
io.stdout.write(`${JSON.stringify(payload)}\n`);
|
|
52
98
|
return 0;
|
|
53
99
|
}
|
|
54
100
|
if (!args.quiet) {
|
|
55
|
-
io.stdout.write(`${JSON.stringify(
|
|
56
|
-
ok: true,
|
|
57
|
-
command: "flow-state-repair",
|
|
58
|
-
reason: args.reason,
|
|
59
|
-
sidecar: result.sidecar,
|
|
60
|
-
guardPath: guardRel,
|
|
61
|
-
repairLogPath: logRel
|
|
62
|
-
}, null, 2)}\n`);
|
|
101
|
+
io.stdout.write(`${JSON.stringify(payload, null, 2)}\n`);
|
|
63
102
|
}
|
|
64
103
|
return 0;
|
|
65
104
|
}
|
|
@@ -92,6 +92,8 @@ export interface FlowStateRepairResult {
|
|
|
92
92
|
sidecar: FlowStateGuardSidecar;
|
|
93
93
|
repairLogPath: string;
|
|
94
94
|
guardPath: string;
|
|
95
|
+
/** Stages that were retro-backfilled into completedStageMeta during repair. */
|
|
96
|
+
completedStageMetaBackfilled: FlowStage[];
|
|
95
97
|
}
|
|
96
98
|
/**
|
|
97
99
|
* Recompute the write-guard sidecar from the current on-disk flow-state
|
package/dist/run-persistence.js
CHANGED
|
@@ -700,6 +700,43 @@ export async function writeFlowState(projectRoot, state, options = {}) {
|
|
|
700
700
|
export async function writeFlowStateGuarded(projectRoot, state, options = {}) {
|
|
701
701
|
await writeFlowState(projectRoot, state, options);
|
|
702
702
|
}
|
|
703
|
+
/**
|
|
704
|
+
* v6.9.0 — backfill missing `completedStageMeta` rows for any stage that
|
|
705
|
+
* already lives in `completedStages` but has no audit timestamp. Uses the
|
|
706
|
+
* stage's artifact mtime when available, otherwise the current time. This
|
|
707
|
+
* runs as part of `flow-state-repair` so legacy v6.8 flow-state.json files
|
|
708
|
+
* get their meta carried forward without a destructive rewrite.
|
|
709
|
+
*/
|
|
710
|
+
async function backfillCompletedStageMeta(projectRoot, state) {
|
|
711
|
+
const meta = { ...(state.completedStageMeta ?? {}) };
|
|
712
|
+
const backfilled = [];
|
|
713
|
+
for (const stage of state.completedStages) {
|
|
714
|
+
if (meta[stage] && typeof meta[stage].completedAt === "string" && meta[stage].completedAt.length > 0) {
|
|
715
|
+
continue;
|
|
716
|
+
}
|
|
717
|
+
let completedAt = new Date().toISOString();
|
|
718
|
+
try {
|
|
719
|
+
const { resolveArtifactPath } = await import("./artifact-paths.js");
|
|
720
|
+
const resolved = await resolveArtifactPath(stage, {
|
|
721
|
+
projectRoot,
|
|
722
|
+
track: state.track,
|
|
723
|
+
intent: "read"
|
|
724
|
+
});
|
|
725
|
+
const stat = await fs.stat(resolved.absPath);
|
|
726
|
+
completedAt = new Date(stat.mtimeMs).toISOString();
|
|
727
|
+
}
|
|
728
|
+
catch {
|
|
729
|
+
// artifact missing or unreadable — fall back to "now" so the meta row
|
|
730
|
+
// is at least consistently populated; operators can re-edit if needed.
|
|
731
|
+
}
|
|
732
|
+
meta[stage] = { completedAt };
|
|
733
|
+
backfilled.push(stage);
|
|
734
|
+
}
|
|
735
|
+
if (backfilled.length === 0) {
|
|
736
|
+
return { state, backfilled };
|
|
737
|
+
}
|
|
738
|
+
return { state: { ...state, completedStageMeta: meta }, backfilled };
|
|
739
|
+
}
|
|
703
740
|
/**
|
|
704
741
|
* Recompute the write-guard sidecar from the current on-disk flow-state
|
|
705
742
|
* contents and append an audit entry to `.cclaw/.flow-state-repair.log`.
|
|
@@ -720,12 +757,26 @@ export async function repairFlowStateGuard(projectRoot, reason) {
|
|
|
720
757
|
throw new Error(`flow-state-repair: ${FLOW_STATE_REL_PATH} does not exist; nothing to repair.`);
|
|
721
758
|
}
|
|
722
759
|
return withDirectoryLock(flowStateLockPath(projectRoot), async () => {
|
|
723
|
-
|
|
760
|
+
let raw = await fs.readFile(statePath, "utf8");
|
|
724
761
|
let runId = "unknown-run";
|
|
762
|
+
let backfilledStages = [];
|
|
725
763
|
try {
|
|
726
764
|
const parsed = JSON.parse(raw);
|
|
727
765
|
const coerced = coerceFlowState(parsed).state;
|
|
728
766
|
runId = coerced.activeRunId;
|
|
767
|
+
const { state: nextState, backfilled } = await backfillCompletedStageMeta(projectRoot, coerced);
|
|
768
|
+
backfilledStages = backfilled;
|
|
769
|
+
if (backfilled.length > 0) {
|
|
770
|
+
// Persist the migrated state inside the same lock window so the
|
|
771
|
+
// sha sidecar below covers the post-migration bytes, not the
|
|
772
|
+
// pre-migration ones.
|
|
773
|
+
await writeFlowState(projectRoot, nextState, {
|
|
774
|
+
allowReset: true,
|
|
775
|
+
skipLock: true,
|
|
776
|
+
writerSubsystem: "flow-state-repair-backfill"
|
|
777
|
+
});
|
|
778
|
+
raw = await fs.readFile(statePath, "utf8");
|
|
779
|
+
}
|
|
729
780
|
}
|
|
730
781
|
catch {
|
|
731
782
|
// parsing failure falls back to "unknown-run"; repair intentionally
|
|
@@ -743,9 +794,17 @@ export async function repairFlowStateGuard(projectRoot, reason) {
|
|
|
743
794
|
await writeFileSafe(guardPath, `${JSON.stringify(sidecar, null, 2)}\n`, { mode: 0o600 });
|
|
744
795
|
const logPath = repairLogPath(projectRoot);
|
|
745
796
|
await ensureDir(path.dirname(logPath));
|
|
746
|
-
const
|
|
797
|
+
const backfillNote = backfilledStages.length > 0
|
|
798
|
+
? ` backfilledCompletedStageMeta=${backfilledStages.join(",")}`
|
|
799
|
+
: "";
|
|
800
|
+
const logLine = `${sidecar.writtenAt} reason=${trimmed} runId=${sidecar.runId} sha256=${sidecar.sha256}${backfillNote}\n`;
|
|
747
801
|
await fs.appendFile(logPath, logLine, "utf8");
|
|
748
|
-
return {
|
|
802
|
+
return {
|
|
803
|
+
sidecar,
|
|
804
|
+
repairLogPath: logPath,
|
|
805
|
+
guardPath,
|
|
806
|
+
completedStageMetaBackfilled: backfilledStages
|
|
807
|
+
};
|
|
749
808
|
});
|
|
750
809
|
}
|
|
751
810
|
export function flowStateGuardSidecarPathFor(projectRoot) {
|