cclaw-cli 7.0.6 → 7.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/artifact-linter/plan.js +187 -1
- package/dist/config.d.ts +4 -1
- package/dist/config.js +44 -5
- package/dist/content/core-agents.js +1 -0
- package/dist/content/hooks.d.ts +1 -0
- package/dist/content/hooks.js +116 -0
- package/dist/content/skills.js +2 -2
- package/dist/content/stage-schema.js +1 -0
- package/dist/content/stages/plan.js +3 -0
- package/dist/content/stages/tdd.js +4 -4
- package/dist/content/start-command.js +2 -2
- package/dist/install.js +3 -1
- package/dist/internal/advance-stage.js +6 -2
- package/dist/internal/slice-commit.d.ts +7 -0
- package/dist/internal/slice-commit.js +296 -0
- package/dist/internal/wave-status.d.ts +1 -1
- package/dist/internal/wave-status.js +99 -2
- package/dist/tdd-verification-evidence.js +101 -10
- package/dist/types.d.ts +12 -0
- package/package.json +1 -1
|
@@ -8,6 +8,14 @@ import { PLAN_SPLIT_SMALL_PLAN_THRESHOLD, parseImplementationUnits, parseImpleme
|
|
|
8
8
|
const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
|
|
9
9
|
const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
|
|
10
10
|
const TASK_ID_PATTERN = /\bT-\d{3}[a-z]?(?:\.\d{1,3})?\b/giu;
|
|
11
|
+
const PLAN_LANE_WHITELIST = new Set([
|
|
12
|
+
"production",
|
|
13
|
+
"test",
|
|
14
|
+
"docs",
|
|
15
|
+
"infra",
|
|
16
|
+
"scaffold",
|
|
17
|
+
"migration"
|
|
18
|
+
]);
|
|
11
19
|
/**
|
|
12
20
|
* Extract every distinct T-NNN[a-z]?(.NNN)? id from a markdown body.
|
|
13
21
|
*
|
|
@@ -36,6 +44,102 @@ function extractParallelExecManagedBody(planMarkdown) {
|
|
|
36
44
|
}
|
|
37
45
|
return planMarkdown.slice(startIdx + PARALLEL_EXEC_MANAGED_START.length, endIdx);
|
|
38
46
|
}
|
|
47
|
+
function normalizePathToken(raw) {
|
|
48
|
+
return raw.trim().replace(/^`|`$/gu, "").replace(/^\.\/+/u, "");
|
|
49
|
+
}
|
|
50
|
+
function parsePipeRow(trimmedLine) {
|
|
51
|
+
const inner = trimmedLine.replace(/^\|/u, "").replace(/\|\s*$/u, "");
|
|
52
|
+
return inner.split("|").map((cell) => cell.trim());
|
|
53
|
+
}
|
|
54
|
+
function headerIndexByName(cells) {
|
|
55
|
+
const map = new Map();
|
|
56
|
+
for (let i = 0; i < cells.length; i += 1) {
|
|
57
|
+
const key = cells[i].toLowerCase().replace(/[^a-z0-9]/gu, "");
|
|
58
|
+
if (key.length > 0 && !map.has(key)) {
|
|
59
|
+
map.set(key, i);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return map;
|
|
63
|
+
}
|
|
64
|
+
function parseParallelWaveTableMetadata(planMarkdown) {
|
|
65
|
+
const body = extractParallelExecManagedBody(planMarkdown);
|
|
66
|
+
if (body.trim().length === 0)
|
|
67
|
+
return [];
|
|
68
|
+
const lines = body.split(/\r?\n/u);
|
|
69
|
+
const out = [];
|
|
70
|
+
let current = null;
|
|
71
|
+
let headerIdx = null;
|
|
72
|
+
const flush = () => {
|
|
73
|
+
if (current)
|
|
74
|
+
out.push(current);
|
|
75
|
+
};
|
|
76
|
+
for (const rawLine of lines) {
|
|
77
|
+
const trimmed = rawLine.trim();
|
|
78
|
+
const waveMatch = /^###\s+Wave\s+(?:W-)?(\d+)\b/iu.exec(trimmed);
|
|
79
|
+
if (waveMatch) {
|
|
80
|
+
flush();
|
|
81
|
+
current = {
|
|
82
|
+
waveId: `W-${waveMatch[1].padStart(2, "0")}`,
|
|
83
|
+
rows: [],
|
|
84
|
+
notes: []
|
|
85
|
+
};
|
|
86
|
+
headerIdx = null;
|
|
87
|
+
continue;
|
|
88
|
+
}
|
|
89
|
+
if (!current)
|
|
90
|
+
continue;
|
|
91
|
+
current.notes.push(trimmed);
|
|
92
|
+
if (!trimmed.startsWith("|"))
|
|
93
|
+
continue;
|
|
94
|
+
const cells = parsePipeRow(trimmed);
|
|
95
|
+
if (cells.length === 0)
|
|
96
|
+
continue;
|
|
97
|
+
const first = cells[0].toLowerCase();
|
|
98
|
+
const isHeader = first === "sliceid" || first === "slice id";
|
|
99
|
+
if (isHeader) {
|
|
100
|
+
headerIdx = headerIndexByName(cells);
|
|
101
|
+
continue;
|
|
102
|
+
}
|
|
103
|
+
if (cells.every((cell) => /^:?-{3,}:?$/u.test(cell))) {
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
const sliceCell = cells[0];
|
|
107
|
+
if (!/^S-\d+$/iu.test(sliceCell))
|
|
108
|
+
continue;
|
|
109
|
+
const idx = headerIdx ?? new Map();
|
|
110
|
+
const unitIdx = idx.get("unit") ?? idx.get("taskid") ?? 1;
|
|
111
|
+
const pathsIdx = idx.get("claimedpaths");
|
|
112
|
+
const parallelizableIdx = idx.get("parallelizable");
|
|
113
|
+
const laneIdx = idx.get("lane");
|
|
114
|
+
const rawPaths = pathsIdx !== undefined ? (cells[pathsIdx] ?? "") : "";
|
|
115
|
+
const claimedPaths = rawPaths.length === 0
|
|
116
|
+
? []
|
|
117
|
+
: rawPaths
|
|
118
|
+
.split(",")
|
|
119
|
+
.map((p) => normalizePathToken(p))
|
|
120
|
+
.filter((p) => p.length > 0);
|
|
121
|
+
const rawParallel = parallelizableIdx !== undefined ? (cells[parallelizableIdx] ?? "").toLowerCase() : "";
|
|
122
|
+
let parallelizable = null;
|
|
123
|
+
if (rawParallel === "true" || rawParallel === "yes")
|
|
124
|
+
parallelizable = true;
|
|
125
|
+
if (rawParallel === "false" || rawParallel === "no")
|
|
126
|
+
parallelizable = false;
|
|
127
|
+
const laneRaw = laneIdx !== undefined ? (cells[laneIdx] ?? "").trim().toLowerCase() : "";
|
|
128
|
+
current.rows.push({
|
|
129
|
+
sliceId: sliceCell.toUpperCase(),
|
|
130
|
+
unit: (cells[unitIdx] ?? "").trim(),
|
|
131
|
+
claimedPaths,
|
|
132
|
+
parallelizable,
|
|
133
|
+
lane: laneRaw.length > 0 ? laneRaw : null
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
flush();
|
|
137
|
+
return out;
|
|
138
|
+
}
|
|
139
|
+
function waveHasSequentialModeHint(wave) {
|
|
140
|
+
const noteText = wave.notes.join("\n").toLowerCase();
|
|
141
|
+
return /mode\s*:\s*sequential/iu.test(noteText) || /\bsequential\b/iu.test(noteText) || /\bserial\b/iu.test(noteText);
|
|
142
|
+
}
|
|
39
143
|
export async function lintPlanStage(ctx) {
|
|
40
144
|
const { projectRoot, track, raw, absFile, sections, findings, parsedFrontmatter, brainstormShortCircuitBody, brainstormShortCircuitActivated, staleDiagramAuditEnabled, isTrivialOverride } = ctx;
|
|
41
145
|
evaluateInvestigationTrace(ctx, "Implementation Units");
|
|
@@ -308,7 +412,8 @@ export async function lintPlanStage(ctx) {
|
|
|
308
412
|
: "Parallel-ready units detected or plan is single-unit."
|
|
309
413
|
});
|
|
310
414
|
}
|
|
311
|
-
// plan_parallel_exec_full_coverage
|
|
415
|
+
// plan_parallel_exec_full_coverage + atomic wave metadata checks.
|
|
416
|
+
// Every T-NNN task listed in the
|
|
312
417
|
// plan's Task List must be assigned to a slice inside the
|
|
313
418
|
// <!-- parallel-exec-managed-start --> block. Without this, TDD
|
|
314
419
|
// cannot fan out work the plan never authored as waves; the previous
|
|
@@ -355,5 +460,86 @@ export async function lintPlanStage(ctx) {
|
|
|
355
460
|
? `Parallel Execution Plan covers all ${authoredTaskIds.size} authored task id(s); ${deferredIds.size} task id(s) are explicitly deferred.`
|
|
356
461
|
: `Uncovered task id(s) — author waves for: ${uncovered.slice(0, 25).join(", ")}${uncovered.length > 25 ? `, … (${uncovered.length - 25} more)` : ""}. Either add slices for them inside <!-- parallel-exec-managed-start --> or move them under \`## Deferred Tasks\` with a reason.`
|
|
357
462
|
});
|
|
463
|
+
const waveMeta = parseParallelWaveTableMetadata(raw);
|
|
464
|
+
const pathConflicts = [];
|
|
465
|
+
for (const wave of waveMeta) {
|
|
466
|
+
const rows = wave.rows;
|
|
467
|
+
for (let i = 0; i < rows.length; i += 1) {
|
|
468
|
+
for (let j = i + 1; j < rows.length; j += 1) {
|
|
469
|
+
const left = rows[i];
|
|
470
|
+
const right = rows[j];
|
|
471
|
+
const rightPathSet = new Set(right.claimedPaths);
|
|
472
|
+
const overlap = left.claimedPaths.filter((p) => rightPathSet.has(p));
|
|
473
|
+
if (overlap.length === 0)
|
|
474
|
+
continue;
|
|
475
|
+
pathConflicts.push(`${wave.waveId} ${left.sliceId}<->${right.sliceId} overlap: ${overlap.join(", ")}`);
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
findings.push({
|
|
480
|
+
section: "plan_wave_paths_disjoint",
|
|
481
|
+
required: taskListPresent,
|
|
482
|
+
rule: "Slices within the same wave must keep `claimedPaths` disjoint so TDD can safely fan out parallel slice-builders.",
|
|
483
|
+
found: taskListPresent && blockPresent && pathConflicts.length === 0,
|
|
484
|
+
details: !taskListPresent
|
|
485
|
+
? "Task List section is empty or missing T-NNN ids; disjoint-path wave check skipped."
|
|
486
|
+
: !blockPresent
|
|
487
|
+
? "`<!-- parallel-exec-managed-start -->` block is missing or empty; cannot validate wave path disjointness."
|
|
488
|
+
: pathConflicts.length === 0
|
|
489
|
+
? "All parsed same-wave slice rows have disjoint claimedPaths."
|
|
490
|
+
: `Overlapping claimedPaths detected: ${pathConflicts.slice(0, 12).join(" | ")}${pathConflicts.length > 12 ? ` | … (${pathConflicts.length - 12} more)` : ""}.`
|
|
491
|
+
});
|
|
492
|
+
const invalidLanes = [];
|
|
493
|
+
for (const wave of waveMeta) {
|
|
494
|
+
for (const row of wave.rows) {
|
|
495
|
+
if (!row.lane)
|
|
496
|
+
continue;
|
|
497
|
+
if (!PLAN_LANE_WHITELIST.has(row.lane)) {
|
|
498
|
+
invalidLanes.push(`${wave.waveId}/${row.sliceId}:${row.lane}`);
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
findings.push({
|
|
503
|
+
section: "plan_lane_meaningful",
|
|
504
|
+
required: false,
|
|
505
|
+
rule: "When a lane is declared, it must be one of: production, test, docs, infra, scaffold, migration.",
|
|
506
|
+
found: invalidLanes.length === 0,
|
|
507
|
+
details: invalidLanes.length === 0
|
|
508
|
+
? "All declared lane values are either omitted or in the approved lane whitelist."
|
|
509
|
+
: `Invalid lane value(s): ${invalidLanes.join(", ")}. Remove lane or use a whitelisted value.`
|
|
510
|
+
});
|
|
511
|
+
const inconsistentParallelizable = [];
|
|
512
|
+
for (const wave of waveMeta) {
|
|
513
|
+
const hasSerialSlice = wave.rows.some((row) => row.parallelizable === false);
|
|
514
|
+
if (!hasSerialSlice)
|
|
515
|
+
continue;
|
|
516
|
+
if (!waveHasSequentialModeHint(wave)) {
|
|
517
|
+
const serialSlices = wave.rows
|
|
518
|
+
.filter((row) => row.parallelizable === false)
|
|
519
|
+
.map((row) => row.sliceId)
|
|
520
|
+
.join(", ");
|
|
521
|
+
inconsistentParallelizable.push(`${wave.waveId} [${serialSlices}]`);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
findings.push({
|
|
525
|
+
section: "plan_parallelizable_consistency",
|
|
526
|
+
required: false,
|
|
527
|
+
rule: "Waves containing `parallelizable: false` slices should be explicitly marked sequential in wave notes/mode.",
|
|
528
|
+
found: inconsistentParallelizable.length === 0,
|
|
529
|
+
details: inconsistentParallelizable.length === 0
|
|
530
|
+
? "No serial slices were found outside a sequentially-labeled wave context."
|
|
531
|
+
: `Serial slice(s) found without sequential wave mode hints in: ${inconsistentParallelizable.join(", ")}. Add a wave mode/note indicating sequential execution.`
|
|
532
|
+
});
|
|
533
|
+
const mermaidBlocks = raw.match(/```mermaid[\s\S]*?```/giu) ?? [];
|
|
534
|
+
const hasParallelExecMermaid = mermaidBlocks.some((block) => /(flowchart|gantt)/iu.test(block) && /\bW-\d+\b/iu.test(block) && /\bS-\d+\b/iu.test(block));
|
|
535
|
+
findings.push({
|
|
536
|
+
section: "plan_parallel_exec_mermaid_present",
|
|
537
|
+
required: false,
|
|
538
|
+
rule: "Plan should include a mermaid flowchart/gantt for parallel waves and slice dependencies to make fanout shape visually reviewable.",
|
|
539
|
+
found: hasParallelExecMermaid,
|
|
540
|
+
details: hasParallelExecMermaid
|
|
541
|
+
? "Mermaid visualization for parallel execution waves is present."
|
|
542
|
+
: "No mermaid parallel-execution visualization found (advisory). Add a ` ```mermaid ` flowchart or gantt with W-* and S-* nodes."
|
|
543
|
+
});
|
|
358
544
|
}
|
|
359
545
|
}
|
package/dist/config.d.ts
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
import type { CclawConfig, FlowTrack, HarnessId, LanguageRulePack } from "./types.js";
|
|
1
|
+
import type { CclawConfig, FlowTrack, HarnessId, LanguageRulePack, TddCommitMode } from "./types.js";
|
|
2
|
+
export declare const TDD_COMMIT_MODES: readonly ["managed-per-slice", "agent-required", "checkpoint-only", "off"];
|
|
3
|
+
export declare const DEFAULT_TDD_COMMIT_MODE: TddCommitMode;
|
|
2
4
|
export declare const DEFAULT_TDD_TEST_PATH_PATTERNS: readonly string[];
|
|
3
5
|
export declare const DEFAULT_TDD_TEST_GLOBS: readonly string[];
|
|
4
6
|
export declare const DEFAULT_TDD_PRODUCTION_PATH_PATTERNS: readonly string[];
|
|
@@ -16,6 +18,7 @@ export declare class InvalidConfigError extends Error {
|
|
|
16
18
|
}
|
|
17
19
|
export declare function configPath(projectRoot: string): string;
|
|
18
20
|
export declare function createDefaultConfig(harnesses?: HarnessId[], _defaultTrack?: FlowTrack): CclawConfig;
|
|
21
|
+
export declare function resolveTddCommitMode(config: Pick<CclawConfig, "tdd"> | null | undefined): TddCommitMode;
|
|
19
22
|
export declare function detectLanguageRulePacks(_projectRoot: string): Promise<LanguageRulePack[]>;
|
|
20
23
|
export declare function readConfig(projectRoot: string, _options?: ReadConfigOptions): Promise<CclawConfig>;
|
|
21
24
|
export interface WriteConfigOptions {
|
package/dist/config.js
CHANGED
|
@@ -6,8 +6,16 @@ import { exists, writeFileSafe } from "./fs-utils.js";
|
|
|
6
6
|
import { HARNESS_IDS } from "./types.js";
|
|
7
7
|
const CONFIG_PATH = `${RUNTIME_ROOT}/config.yaml`;
|
|
8
8
|
const HARNESS_ID_SET = new Set(HARNESS_IDS);
|
|
9
|
-
const ALLOWED_CONFIG_KEYS = new Set(["version", "flowVersion", "harnesses"]);
|
|
9
|
+
const ALLOWED_CONFIG_KEYS = new Set(["version", "flowVersion", "harnesses", "tdd"]);
|
|
10
10
|
const SUPPORTED_HARNESSES_TEXT = HARNESS_IDS.join(", ");
|
|
11
|
+
export const TDD_COMMIT_MODES = [
|
|
12
|
+
"managed-per-slice",
|
|
13
|
+
"agent-required",
|
|
14
|
+
"checkpoint-only",
|
|
15
|
+
"off"
|
|
16
|
+
];
|
|
17
|
+
const TDD_COMMIT_MODE_SET = new Set(TDD_COMMIT_MODES);
|
|
18
|
+
export const DEFAULT_TDD_COMMIT_MODE = "managed-per-slice";
|
|
11
19
|
// Kept for runtime modules that use these defaults directly.
|
|
12
20
|
export const DEFAULT_TDD_TEST_PATH_PATTERNS = [
|
|
13
21
|
"**/*.test.*",
|
|
@@ -30,7 +38,9 @@ export class InvalidConfigError extends Error {
|
|
|
30
38
|
function configFixExample() {
|
|
31
39
|
return `harnesses:
|
|
32
40
|
- claude
|
|
33
|
-
- cursor
|
|
41
|
+
- cursor
|
|
42
|
+
tdd:
|
|
43
|
+
commitMode: managed-per-slice`;
|
|
34
44
|
}
|
|
35
45
|
function configValidationError(configFilePath, reason) {
|
|
36
46
|
return new InvalidConfigError(`Invalid cclaw config at ${configFilePath}: ${reason}\n` +
|
|
@@ -48,9 +58,19 @@ export function createDefaultConfig(harnesses = DEFAULT_HARNESSES, _defaultTrack
|
|
|
48
58
|
return {
|
|
49
59
|
version: CCLAW_VERSION,
|
|
50
60
|
flowVersion: FLOW_VERSION,
|
|
51
|
-
harnesses: [...new Set(harnesses)]
|
|
61
|
+
harnesses: [...new Set(harnesses)],
|
|
62
|
+
tdd: {
|
|
63
|
+
commitMode: DEFAULT_TDD_COMMIT_MODE
|
|
64
|
+
}
|
|
52
65
|
};
|
|
53
66
|
}
|
|
67
|
+
export function resolveTddCommitMode(config) {
|
|
68
|
+
const raw = config?.tdd?.commitMode;
|
|
69
|
+
if (typeof raw === "string" && TDD_COMMIT_MODE_SET.has(raw)) {
|
|
70
|
+
return raw;
|
|
71
|
+
}
|
|
72
|
+
return DEFAULT_TDD_COMMIT_MODE;
|
|
73
|
+
}
|
|
54
74
|
function assertOnlySupportedKeys(parsed, fullPath) {
|
|
55
75
|
const unknownKeys = Object.keys(parsed).filter((key) => !ALLOWED_CONFIG_KEYS.has(key));
|
|
56
76
|
if (unknownKeys.length === 0)
|
|
@@ -84,6 +104,10 @@ export async function readConfig(projectRoot, _options = {}) {
|
|
|
84
104
|
!Array.isArray(parsed.harnesses)) {
|
|
85
105
|
throw configValidationError(fullPath, `"harnesses" must be an array`);
|
|
86
106
|
}
|
|
107
|
+
if (Object.prototype.hasOwnProperty.call(parsed, "tdd") &&
|
|
108
|
+
!isRecord(parsed.tdd)) {
|
|
109
|
+
throw configValidationError(fullPath, `"tdd" must be an object when provided`);
|
|
110
|
+
}
|
|
87
111
|
const rawHarnesses = Array.isArray(parsed.harnesses) ? parsed.harnesses : DEFAULT_HARNESSES;
|
|
88
112
|
const normalizedHarnesses = [];
|
|
89
113
|
for (const harness of rawHarnesses) {
|
|
@@ -103,17 +127,32 @@ export async function readConfig(projectRoot, _options = {}) {
|
|
|
103
127
|
const flowVersion = typeof parsed.flowVersion === "string" && parsed.flowVersion.trim().length > 0
|
|
104
128
|
? parsed.flowVersion
|
|
105
129
|
: FLOW_VERSION;
|
|
130
|
+
const parsedTdd = isRecord(parsed.tdd) ? parsed.tdd : {};
|
|
131
|
+
const rawCommitMode = parsedTdd.commitMode;
|
|
132
|
+
if (rawCommitMode !== undefined &&
|
|
133
|
+
(typeof rawCommitMode !== "string" || !TDD_COMMIT_MODE_SET.has(rawCommitMode))) {
|
|
134
|
+
throw configValidationError(fullPath, `"tdd.commitMode" must be one of: ${TDD_COMMIT_MODES.join(", ")}`);
|
|
135
|
+
}
|
|
136
|
+
const commitMode = typeof rawCommitMode === "string"
|
|
137
|
+
? rawCommitMode
|
|
138
|
+
: DEFAULT_TDD_COMMIT_MODE;
|
|
106
139
|
return {
|
|
107
140
|
version,
|
|
108
141
|
flowVersion,
|
|
109
|
-
harnesses: normalizedHarnesses
|
|
142
|
+
harnesses: normalizedHarnesses,
|
|
143
|
+
tdd: {
|
|
144
|
+
commitMode
|
|
145
|
+
}
|
|
110
146
|
};
|
|
111
147
|
}
|
|
112
148
|
export async function writeConfig(projectRoot, config, _options = {}) {
|
|
113
149
|
const serialisable = {
|
|
114
150
|
version: config.version,
|
|
115
151
|
flowVersion: config.flowVersion,
|
|
116
|
-
harnesses: config.harnesses
|
|
152
|
+
harnesses: config.harnesses,
|
|
153
|
+
tdd: {
|
|
154
|
+
commitMode: resolveTddCommitMode(config)
|
|
155
|
+
}
|
|
117
156
|
};
|
|
118
157
|
await writeFileSafe(configPath(projectRoot), stringify(serialisable));
|
|
119
158
|
}
|
|
@@ -156,6 +156,7 @@ export function sliceBuilderProtocol() {
|
|
|
156
156
|
"### Invariants",
|
|
157
157
|
"- Produce failing RED evidence (or cite the delegated RED artifact) **before** production edits.",
|
|
158
158
|
"- Stay inside the slice contract: `claimedPaths`, acceptance mapping, and forbidden-change lists from the parent.",
|
|
159
|
+
"- When `tdd.commitMode=managed-per-slice`, do **not** hand-edit git state for slice files (no manual `git add/commit` on claimed paths). Let `.cclaw/hooks/slice-commit.mjs` own per-slice commits.",
|
|
159
160
|
"- After GREEN, refactor inline **or** record deferred refactor via the same `--refactor-outcome` mechanics the controller specifies.",
|
|
160
161
|
"- Own the prose slice summary at `<artifacts-dir>/tdd-slices/S-<id>.md` yourself.",
|
|
161
162
|
"",
|
package/dist/content/hooks.d.ts
CHANGED
|
@@ -2,6 +2,7 @@ export declare function startFlowScript(): string;
|
|
|
2
2
|
export declare function cancelRunScript(): string;
|
|
3
3
|
export declare function stageCompleteScript(): string;
|
|
4
4
|
export declare function delegationRecordScript(): string;
|
|
5
|
+
export declare function sliceCommitScript(): string;
|
|
5
6
|
export declare function runHookCmdScript(): string;
|
|
6
7
|
export { claudeHooksJsonWithObservation as claudeHooksJson } from "./observe.js";
|
|
7
8
|
export { cursorHooksJsonWithObservation as cursorHooksJson } from "./observe.js";
|
package/dist/content/hooks.js
CHANGED
|
@@ -200,6 +200,7 @@ export function stageCompleteScript() {
|
|
|
200
200
|
export function delegationRecordScript() {
|
|
201
201
|
return `#!/usr/bin/env node
|
|
202
202
|
import { createHash } from "node:crypto";
|
|
203
|
+
import { spawn } from "node:child_process";
|
|
203
204
|
import fs from "node:fs/promises";
|
|
204
205
|
import path from "node:path";
|
|
205
206
|
import process from "node:process";
|
|
@@ -1189,6 +1190,101 @@ async function runRepair(args, json) {
|
|
|
1189
1190
|
}
|
|
1190
1191
|
}
|
|
1191
1192
|
|
|
1193
|
+
async function runSliceCommitIfNeeded(root, row, runId) {
|
|
1194
|
+
if (
|
|
1195
|
+
row.stage !== "tdd" ||
|
|
1196
|
+
row.agent !== "slice-builder" ||
|
|
1197
|
+
row.status !== "completed" ||
|
|
1198
|
+
row.phase !== "doc"
|
|
1199
|
+
) {
|
|
1200
|
+
return { ok: true, skipped: true };
|
|
1201
|
+
}
|
|
1202
|
+
const sliceId = typeof row.sliceId === "string" ? row.sliceId.trim() : "";
|
|
1203
|
+
const spanId = typeof row.spanId === "string" ? row.spanId.trim() : "";
|
|
1204
|
+
if (sliceId.length === 0 || spanId.length === 0) {
|
|
1205
|
+
return { ok: true, skipped: true };
|
|
1206
|
+
}
|
|
1207
|
+
const helperPath = path.join(root, RUNTIME_ROOT, "hooks", "slice-commit.mjs");
|
|
1208
|
+
if (!(await exists(helperPath))) {
|
|
1209
|
+
return { ok: true, skipped: true };
|
|
1210
|
+
}
|
|
1211
|
+
const helperArgs = [
|
|
1212
|
+
helperPath,
|
|
1213
|
+
"--json",
|
|
1214
|
+
"--quiet",
|
|
1215
|
+
"--slice=" + sliceId,
|
|
1216
|
+
"--span-id=" + spanId,
|
|
1217
|
+
"--run-id=" + runId
|
|
1218
|
+
];
|
|
1219
|
+
if (typeof row.taskId === "string" && row.taskId.trim().length > 0) {
|
|
1220
|
+
helperArgs.push("--task-id=" + row.taskId.trim());
|
|
1221
|
+
}
|
|
1222
|
+
if (Array.isArray(row.claimedPaths) && row.claimedPaths.length > 0) {
|
|
1223
|
+
helperArgs.push("--claimed-paths=" + row.claimedPaths.join(","));
|
|
1224
|
+
}
|
|
1225
|
+
if (Array.isArray(row.evidenceRefs) && row.evidenceRefs.length > 0) {
|
|
1226
|
+
const title = String(row.evidenceRefs[0] || "").trim();
|
|
1227
|
+
if (title.length > 0) {
|
|
1228
|
+
helperArgs.push("--title=" + title.slice(0, 120));
|
|
1229
|
+
}
|
|
1230
|
+
}
|
|
1231
|
+
|
|
1232
|
+
return await new Promise((resolve) => {
|
|
1233
|
+
const child = spawn(process.execPath, helperArgs, {
|
|
1234
|
+
cwd: root,
|
|
1235
|
+
env: process.env,
|
|
1236
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
1237
|
+
});
|
|
1238
|
+
let out = "";
|
|
1239
|
+
let err = "";
|
|
1240
|
+
child.stdout.on("data", (chunk) => {
|
|
1241
|
+
out += String(chunk ?? "");
|
|
1242
|
+
});
|
|
1243
|
+
child.stderr.on("data", (chunk) => {
|
|
1244
|
+
err += String(chunk ?? "");
|
|
1245
|
+
});
|
|
1246
|
+
child.on("error", (error) => {
|
|
1247
|
+
resolve({
|
|
1248
|
+
ok: false,
|
|
1249
|
+
errorCode: "slice_commit_failed",
|
|
1250
|
+
details: {
|
|
1251
|
+
message: error instanceof Error ? error.message : String(error)
|
|
1252
|
+
}
|
|
1253
|
+
});
|
|
1254
|
+
});
|
|
1255
|
+
child.on("close", (code) => {
|
|
1256
|
+
let payload = null;
|
|
1257
|
+
const trimmed = out.trim();
|
|
1258
|
+
if (trimmed.length > 0) {
|
|
1259
|
+
try {
|
|
1260
|
+
payload = JSON.parse(trimmed);
|
|
1261
|
+
} catch {
|
|
1262
|
+
payload = null;
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
if (code === 0) {
|
|
1266
|
+
resolve({ ok: true, payload });
|
|
1267
|
+
return;
|
|
1268
|
+
}
|
|
1269
|
+
const payloadCode =
|
|
1270
|
+
payload && typeof payload === "object" && typeof payload.errorCode === "string"
|
|
1271
|
+
? payload.errorCode
|
|
1272
|
+
: "slice_commit_failed";
|
|
1273
|
+
resolve({
|
|
1274
|
+
ok: false,
|
|
1275
|
+
errorCode: payloadCode,
|
|
1276
|
+
details:
|
|
1277
|
+
payload && typeof payload === "object"
|
|
1278
|
+
? payload
|
|
1279
|
+
: {
|
|
1280
|
+
stderr: err.trim(),
|
|
1281
|
+
stdout: out.trim()
|
|
1282
|
+
}
|
|
1283
|
+
});
|
|
1284
|
+
});
|
|
1285
|
+
});
|
|
1286
|
+
}
|
|
1287
|
+
|
|
1192
1288
|
async function main() {
|
|
1193
1289
|
const args = parseArgs(process.argv.slice(2));
|
|
1194
1290
|
const json = args.json !== undefined;
|
|
@@ -1573,6 +1669,23 @@ async function main() {
|
|
|
1573
1669
|
}
|
|
1574
1670
|
}
|
|
1575
1671
|
|
|
1672
|
+
const sliceCommitResult = await runSliceCommitIfNeeded(root, clean, runId);
|
|
1673
|
+
if (!sliceCommitResult.ok) {
|
|
1674
|
+
emitErrorJson(
|
|
1675
|
+
sliceCommitResult.errorCode || "slice_commit_failed",
|
|
1676
|
+
sliceCommitResult.details || {},
|
|
1677
|
+
json
|
|
1678
|
+
);
|
|
1679
|
+
return;
|
|
1680
|
+
}
|
|
1681
|
+
if (
|
|
1682
|
+
sliceCommitResult.payload &&
|
|
1683
|
+
typeof sliceCommitResult.payload === "object" &&
|
|
1684
|
+
typeof sliceCommitResult.payload.commitSha === "string"
|
|
1685
|
+
) {
|
|
1686
|
+
event.sliceCommitSha = sliceCommitResult.payload.commitSha;
|
|
1687
|
+
}
|
|
1688
|
+
|
|
1576
1689
|
await persistEntry(root, runId, clean, event);
|
|
1577
1690
|
|
|
1578
1691
|
process.stdout.write(JSON.stringify({ ok: true, event }, null, 2) + "\\n");
|
|
@@ -1581,6 +1694,9 @@ async function main() {
|
|
|
1581
1694
|
void main();
|
|
1582
1695
|
`;
|
|
1583
1696
|
}
|
|
1697
|
+
export function sliceCommitScript() {
|
|
1698
|
+
return internalHelperScript("slice-commit", "slice-commit", "Usage: node " + RUNTIME_ROOT + "/hooks/slice-commit.mjs --slice=<S-N> --span-id=<span-id> [--task-id=<T-id>] [--title=<text>] [--run-id=<run-id>] [--claimed-paths=<path1,path2,...>] [--claimed-path=<path> ...] [--json] [--quiet]");
|
|
1699
|
+
}
|
|
1584
1700
|
export function runHookCmdScript() {
|
|
1585
1701
|
return `: << 'CMDBLOCK'
|
|
1586
1702
|
@echo off
|
package/dist/content/skills.js
CHANGED
|
@@ -188,14 +188,14 @@ export function tddTopOfSkillBlock(stage) {
|
|
|
188
188
|
**Step 1 — Wave status (always first):**
|
|
189
189
|
\`node .cclaw/cli.mjs internal wave-status --json\`
|
|
190
190
|
|
|
191
|
-
The output names: \`waves[]\` (closed/open), \`nextDispatch.waveId\`, \`nextDispatch.mode\` (\`wave-fanout
|
|
191
|
+
The output names: \`waves[]\` (closed/open), \`nextDispatch.waveId\`, \`nextDispatch.mode\` (\`wave-fanout\`, \`single-slice\`, or \`blocked\`), \`nextDispatch.readyToDispatch\` (slice ids), and \`nextDispatch.pathConflicts\` (overlapping \`claimedPaths\` between members).
|
|
192
192
|
|
|
193
193
|
**Step 2 — Decide automatically (no user question when paths disjoint):**
|
|
194
194
|
|
|
195
195
|
| \`mode\` | \`pathConflicts\` | Action |
|
|
196
196
|
|------------------|-------------------|-----------------------------------------------------------------------------------------------------------------------------------------|
|
|
197
197
|
| \`wave-fanout\` | \`[]\` | **Fan out the entire wave in one tool batch.** Emit one \`Task\` per ready slice in a single controller message. Do NOT ask the user. |
|
|
198
|
-
| \`
|
|
198
|
+
| \`blocked\` | non-empty | Issue exactly one AskQuestion (resolve overlap, split/serialize, or adjust claimedPaths), then re-run \`wave-status\`. |
|
|
199
199
|
| \`single-slice\` | — | One \`Task\` for the next ready slice. |
|
|
200
200
|
|
|
201
201
|
**Step 3 — Dispatch protocol per slice:** in the SAME controller message that issues the \`Task\` call:
|
|
@@ -52,6 +52,7 @@ export const PLAN = {
|
|
|
52
52
|
"Define validation points — mark where progress must be checked before continuing, with concrete command and expected evidence.",
|
|
53
53
|
"Define execution posture — record whether execution should be sequential, dependency-batched, parallel-safe, or blocked; include risk triggers and RED/GREEN/REFACTOR checkpoint/commit expectations when the repo workflow supports them. This fulfills the `plan_execution_posture_recorded` gate.",
|
|
54
54
|
"**Author the FULL Parallel Execution Plan.** Inside the `<!-- parallel-exec-managed-start -->` block, enumerate ALL waves W-02..W-N covering EVERY T-NNN task in `## Task List` — no `we'll author waves later`, `next batch only`, or open-ended Backlog handwave is acceptable. Each task gets a slice with `sliceId | taskId | dependsOn | claimedPaths | parallelizable | riskTier | lane`. Spike rows (`S-N`) and tasks marked `deferred` in an explicit `Deferred:` column may be omitted, but every other T-NNN must be claimed. This fulfills the `plan_parallel_exec_full_coverage` gate. The TDD stage downstream is a pure consumer of these waves — if the plan does not author them, TDD cannot fan out that work.",
|
|
55
|
+
"After authoring/refreshing the managed parallel-exec block, render a Mermaid `flowchart` or `gantt` covering waves (`W-*`) and slice dependencies (`S-*`) so parallelism and fan-in boundaries are visually auditable.",
|
|
55
56
|
"WAIT_FOR_CONFIRM — write plan artifact and explicitly pause. **STOP.** Do NOT proceed until user confirms. Then close the stage with `node .cclaw/hooks/stage-complete.mjs plan` and tell user to run `/cc`."
|
|
56
57
|
],
|
|
57
58
|
interactionProtocol: [
|
|
@@ -59,6 +60,7 @@ export const PLAN = {
|
|
|
59
60
|
"Split work into small vertical slices (target 2-5 minute tasks).",
|
|
60
61
|
"Publish explicit dependency batches with entry and exit checks for each batch.",
|
|
61
62
|
"Expose execution posture: sequential vs batch/parallel, stop conditions, and checkpoint cadence for the TDD handoff.",
|
|
63
|
+
"Keep same-wave `claimedPaths` disjoint; if overlap exists, split waves or serialize explicitly before handoff.",
|
|
62
64
|
"Attach exact verification command/manual step and expected evidence to every task.",
|
|
63
65
|
"Preserve locked scope boundaries: no silent scope reduction language in task rows.",
|
|
64
66
|
"Enforce WAIT_FOR_CONFIRM: present the plan summary with options (A) Approve / (B) Revise / (C) Reject.",
|
|
@@ -82,6 +84,7 @@ export const PLAN = {
|
|
|
82
84
|
{ id: "plan_acceptance_mapped", description: "Each task maps to a spec acceptance criterion." },
|
|
83
85
|
{ id: "plan_execution_posture_recorded", description: "Execution posture is recorded before implementation handoff." },
|
|
84
86
|
{ id: "plan_parallel_exec_full_coverage", description: "Every T-NNN task in `## Task List` (other than spikes/explicitly-deferred) is assigned to at least one slice inside the `<!-- parallel-exec-managed-start -->` block; TDD cannot fan out work that the plan never authored as waves." },
|
|
87
|
+
{ id: "plan_wave_paths_disjoint", description: "Within each authored wave, slice `claimedPaths` remain disjoint so `wave-fanout` can dispatch safely without overlap conflicts." },
|
|
85
88
|
{ id: "plan_wait_for_confirm", description: "Execution blocked until explicit user confirmation." }
|
|
86
89
|
],
|
|
87
90
|
requiredEvidence: [
|
|
@@ -52,7 +52,7 @@ export const TDD = {
|
|
|
52
52
|
"Controller never writes production code or per-slice prose — the delegated worker does. Record routing decisions; cite `wave-status` before redundant slice questions.",
|
|
53
53
|
"Discover existing tests and commands before RED; run a system-wide impact check (callbacks, state, interfaces, contracts) before GREEN.",
|
|
54
54
|
"RED must fail for the right reason; capture logs. GREEN must run the full relevant suite, not a narrow subset.",
|
|
55
|
-
"Before calling a slice done, run verification-before-completion (command + PASS/FAIL + commit
|
|
55
|
+
"Before calling a slice done, run verification-before-completion (command + PASS/FAIL + durable commit evidence: managed-per-slice git commits when `.git` is present, or explicit no-VCS attestation + hash).",
|
|
56
56
|
"Integration-overseer must complete with PASS/PASS_WITH_GAPS when fan-out closes a wave unless the controller emits `cclaw_integration_overseer_skipped` for a documented heuristic skip.",
|
|
57
57
|
"Investigation discipline + behavior anchor in this skill govern evidence: cite commands and paths, not pasted source dumps.",
|
|
58
58
|
],
|
|
@@ -72,7 +72,7 @@ export const TDD = {
|
|
|
72
72
|
{ id: "tdd_red_test_written", description: "Failing tests exist before implementation changes." },
|
|
73
73
|
{ id: "tdd_green_full_suite", description: "Full relevant suite passes in GREEN state." },
|
|
74
74
|
{ id: "tdd_refactor_completed", description: "Refactor pass completed with behavior preservation verified." },
|
|
75
|
-
{ id: "tdd_verified_before_complete", description: "Fresh verification evidence includes test command
|
|
75
|
+
{ id: "tdd_verified_before_complete", description: "Fresh verification evidence includes test command + explicit pass/fail status; when `tdd.commitMode=managed-per-slice` and `.git` exists, closed slices must be backed by real git commits, otherwise provide explicit no-VCS attestation + hash." },
|
|
76
76
|
{ id: "tdd_iron_law_acknowledged", description: "Iron Law acknowledgement is explicit (`Acknowledged: yes`) before implementation proceeds." },
|
|
77
77
|
{ id: "tdd_watched_red_observed", description: "Watched-RED Proof records at least one observed failing test with ISO timestamp evidence." },
|
|
78
78
|
{ id: "tdd_slice_cycle_complete", description: "Vertical Slice Cycle records RED, GREEN, and REFACTOR phases per active slice." },
|
|
@@ -89,7 +89,7 @@ export const TDD = {
|
|
|
89
89
|
"REFACTOR coverage: separate `phase=refactor|refactor-deferred` rows or `refactorOutcome` folded into GREEN as the hook documents.",
|
|
90
90
|
"`tdd-slices/S-<id>.md` kept current with the builder span; phase events remain the ground truth for lint auto-render blocks.",
|
|
91
91
|
"`event: slice-completed` umbrella rows tie RED/GREEN timestamps to the builder once that writer runs on the repo.",
|
|
92
|
-
"Fresh verification (command + PASS/FAIL + commit
|
|
92
|
+
"Fresh verification (command + PASS/FAIL + managed-per-slice commit proof from git log when `.git` exists, or no-VCS reason + hash); Iron Law acknowledgement; acceptance mapping + traceability IDs.",
|
|
93
93
|
],
|
|
94
94
|
inputs: ["approved plan slice", "spec acceptance criterion", "test harness configuration", "coding standards and constraints"],
|
|
95
95
|
requiredContext: ["plan artifact", "spec artifact", "existing test patterns", "affected contracts and state boundaries"],
|
|
@@ -135,7 +135,7 @@ export const TDD = {
|
|
|
135
135
|
{ section: "REFACTOR Notes", required: true, validationRule: "What changed, why, behavior preservation confirmed." },
|
|
136
136
|
{ section: "Traceability", required: true, validationRule: "Plan task ID and spec criterion linked." },
|
|
137
137
|
{ section: "Iron Law Acknowledgement", required: true, validationRule: "Must include `Acknowledged: yes` and list exceptions (or `None`)." },
|
|
138
|
-
{ section: "Verification Ladder", required: true, validationRule: "Per-slice verification tier (static, command, behavioral, human) with evidence captured for the highest tier reached this turn. Must include command + PASS/FAIL
|
|
138
|
+
{ section: "Verification Ladder", required: true, validationRule: "Per-slice verification tier (static, command, behavioral, human) with evidence captured for the highest tier reached this turn. Must include command + PASS/FAIL and durable commit evidence: managed-per-slice git commit proof when VCS is present, or explicit no-vcs reason plus content/artifact hash/config override." },
|
|
139
139
|
{ section: "TDD Blocker Taxonomy", required: false, validationRule: "When blocked, classify as NO_SOURCE_CONTEXT, NO_TEST_SURFACE, NO_IMPLEMENTABLE_SLICE, RED_NOT_EXPRESSIBLE, or NO_VCS_MODE; include blockedBecause, missingInputs, recommendedRoute, nextCommand, and resumeCriteria." }
|
|
140
140
|
]
|
|
141
141
|
},
|
|
@@ -119,7 +119,7 @@ If during any stage the agent discovers evidence that contradicts the initial Ph
|
|
|
119
119
|
|
|
120
120
|
**The controller never edits production code in TDD.** When \`mode: wave-fanout\` and \`pathConflicts: []\`, fan out the entire wave in a SINGLE controller message: one harness \`Task(subagent_type=…, description="slice-builder S-<id>", prompt=<full slice context>)\` call per ready slice, **side by side in the same tool batch**. Each \`slice-builder\` span owns the full RED → GREEN → REFACTOR → DOC cycle for its slice and emits its own \`delegation-record --phase=red|green|refactor|refactor-deferred|doc\` rows. RED-before-GREEN is enforced per-slice by the linter.
|
|
121
121
|
|
|
122
|
-
When \`mode:
|
|
122
|
+
When \`mode: blocked\` with \`pathConflicts\`, surface exactly one AskQuestion that lets the user resolve the overlap (drop / split / serialize). When \`mode: single-slice\`, dispatch one \`Task\` for the next ready slice.
|
|
123
123
|
|
|
124
124
|
6. **Auto-advance after stage-complete:** when \`stage-complete\` returns \`ok\` with a new \`currentStage\`, immediately load the next stage skill and continue without waiting for the user to retype \`/cc\`. Announce \`Stage <prev> complete → entering <next>. Continuing.\` and proceed.
|
|
125
125
|
|
|
@@ -214,7 +214,7 @@ Progress the tracked flow only when one exists:
|
|
|
214
214
|
2. If missing, guide the user to run \`npx cclaw-cli init\` and stop.
|
|
215
215
|
3. If it is only a fresh init placeholder (\`completedStages: []\`, no passed gates, and no \`${RUNTIME_ROOT}/artifacts/00-idea.md\`), stop and ask for \`/cc <prompt>\` to start a tracked run. Do not silently create a brainstorm run.
|
|
216
216
|
4. Check gates for \`currentStage\`.
|
|
217
|
-
5. **TDD:** When \`currentStage\` is \`tdd\`, run \`wave-status --json\`, then reconcile the managed **Parallel Execution Plan** in \`05-plan.md\` with \`wave-plans/wave-NN.md\`. **The controller never edits production code in TDD.** When \`mode: wave-fanout\` and \`pathConflicts: []\`, fan out the wave in a SINGLE controller message — one \`Task\` per ready slice, side by side. Each \`slice-builder\` span owns its full RED → GREEN → REFACTOR → DOC cycle. Mirror plan \`dependsOn\` ordering between waves.
|
|
217
|
+
5. **TDD:** When \`currentStage\` is \`tdd\`, run \`wave-status --json\`, then reconcile the managed **Parallel Execution Plan** in \`05-plan.md\` with \`wave-plans/wave-NN.md\`. **The controller never edits production code in TDD.** When \`mode: wave-fanout\` and \`pathConflicts: []\`, fan out the wave in a SINGLE controller message — one \`Task\` per ready slice, side by side. If \`mode: blocked\`, resolve overlaps first. Each \`slice-builder\` span owns its full RED → GREEN → REFACTOR → DOC cycle. Mirror plan \`dependsOn\` ordering between waves.
|
|
218
218
|
6. **Wave resume:** Parallelize unfinished members; never restart completed lanes. Integration-overseer follows \`integrationCheckRequired\`; when skipped, emit \`cclaw_integration_overseer_skipped\` per the hook contract.
|
|
219
219
|
7. If incomplete → load current stage skill and execute.
|
|
220
220
|
8. If complete → advance to next stage and execute. **Auto-advance:** when \`stage-complete\` returns \`ok\`, immediately load the next stage skill and continue without waiting for the user to retype \`/cc\`.
|
package/dist/install.js
CHANGED
|
@@ -13,7 +13,7 @@ import { cancelCommandContract, cancelCommandSkillMarkdown } from "./content/can
|
|
|
13
13
|
import { subagentDrivenDevSkill, parallelAgentsSkill } from "./content/subagents.js";
|
|
14
14
|
import { sessionHooksSkillMarkdown } from "./content/session-hooks.js";
|
|
15
15
|
import { ironLawsSkillMarkdown } from "./content/iron-laws.js";
|
|
16
|
-
import { stageCompleteScript, startFlowScript, cancelRunScript, runHookCmdScript, delegationRecordScript, opencodePluginJs, claudeHooksJson, codexHooksJson, cursorHooksJson } from "./content/hooks.js";
|
|
16
|
+
import { stageCompleteScript, startFlowScript, cancelRunScript, runHookCmdScript, delegationRecordScript, sliceCommitScript, opencodePluginJs, claudeHooksJson, codexHooksJson, cursorHooksJson } from "./content/hooks.js";
|
|
17
17
|
import { nodeHookRuntimeScript } from "./content/node-hooks.js";
|
|
18
18
|
import { META_SKILL_NAME, usingCclawSkillMarkdown } from "./content/meta-skill.js";
|
|
19
19
|
import { ARTIFACT_TEMPLATES, CURSOR_GUIDELINES_RULE_MDC, CURSOR_WORKFLOW_RULE_MDC, RULEBOOK_MARKDOWN, buildRulesJson } from "./content/templates.js";
|
|
@@ -692,6 +692,7 @@ async function writeHooks(projectRoot, config) {
|
|
|
692
692
|
await writeFileSafe(path.join(hooksDir, "run-hook.mjs"), bundledHookRuntime ?? nodeHookRuntimeScript(hookRuntimeOptions));
|
|
693
693
|
await writeFileSafe(path.join(hooksDir, "run-hook.cmd"), runHookCmdScript());
|
|
694
694
|
await writeFileSafe(path.join(hooksDir, "delegation-record.mjs"), delegationRecordScript());
|
|
695
|
+
await writeFileSafe(path.join(hooksDir, "slice-commit.mjs"), sliceCommitScript());
|
|
695
696
|
const opencodePluginSource = opencodePluginJs();
|
|
696
697
|
await writeFileSafe(path.join(hooksDir, "opencode-plugin.mjs"), opencodePluginSource);
|
|
697
698
|
try {
|
|
@@ -701,6 +702,7 @@ async function writeHooks(projectRoot, config) {
|
|
|
701
702
|
"run-hook.mjs",
|
|
702
703
|
"run-hook.cmd",
|
|
703
704
|
"delegation-record.mjs",
|
|
705
|
+
"slice-commit.mjs",
|
|
704
706
|
"opencode-plugin.mjs",
|
|
705
707
|
"cancel-run.mjs"
|
|
706
708
|
]) {
|
|
@@ -18,6 +18,7 @@ import { DelegationTimestampError, DispatchCapError, DispatchClaimedPathProtecte
|
|
|
18
18
|
import { parsePlanSplitWavesArgs, runPlanSplitWaves } from "./plan-split-waves.js";
|
|
19
19
|
import { runWaveStatusCommand } from "./wave-status.js";
|
|
20
20
|
import { runCohesionContractCommand } from "./cohesion-contract-stub.js";
|
|
21
|
+
import { runSliceCommitCommand } from "./slice-commit.js";
|
|
21
22
|
/**
|
|
22
23
|
* Subcommands that mutate or consult flow-state.json via the CLI runtime.
|
|
23
24
|
* They all require the sha256 sidecar to match before continuing so a
|
|
@@ -35,7 +36,7 @@ const GUARD_ENFORCED_SUBCOMMANDS = new Set([
|
|
|
35
36
|
export async function runInternalCommand(projectRoot, argv, io) {
|
|
36
37
|
const [subcommand, ...tokens] = argv;
|
|
37
38
|
if (!subcommand) {
|
|
38
|
-
io.stderr.write("cclaw internal requires a subcommand: advance-stage | start-flow | cancel-run | rewind | verify-flow-state-diff | verify-current-state | envelope-validate | tdd-red-evidence | tdd-loop-status | early-loop-status | compound-readiness | runtime-integrity | hook | flow-state-repair | waiver-grant | plan-split-waves | wave-status | cohesion-contract\n");
|
|
39
|
+
io.stderr.write("cclaw internal requires a subcommand: advance-stage | start-flow | cancel-run | rewind | verify-flow-state-diff | verify-current-state | envelope-validate | tdd-red-evidence | tdd-loop-status | early-loop-status | compound-readiness | runtime-integrity | hook | slice-commit | flow-state-repair | waiver-grant | plan-split-waves | wave-status | cohesion-contract\n");
|
|
39
40
|
return 1;
|
|
40
41
|
}
|
|
41
42
|
try {
|
|
@@ -81,6 +82,9 @@ export async function runInternalCommand(projectRoot, argv, io) {
|
|
|
81
82
|
if (subcommand === "hook") {
|
|
82
83
|
return await runHookCommand(projectRoot, parseHookArgs(tokens), io);
|
|
83
84
|
}
|
|
85
|
+
if (subcommand === "slice-commit") {
|
|
86
|
+
return await runSliceCommitCommand(projectRoot, tokens, io);
|
|
87
|
+
}
|
|
84
88
|
if (subcommand === "flow-state-repair") {
|
|
85
89
|
return await runFlowStateRepair(projectRoot, parseFlowStateRepairArgs(tokens), io);
|
|
86
90
|
}
|
|
@@ -96,7 +100,7 @@ export async function runInternalCommand(projectRoot, argv, io) {
|
|
|
96
100
|
if (subcommand === "cohesion-contract") {
|
|
97
101
|
return await runCohesionContractCommand(projectRoot, tokens, io);
|
|
98
102
|
}
|
|
99
|
-
io.stderr.write(`Unknown internal subcommand: ${subcommand}. Expected advance-stage | start-flow | cancel-run | rewind | verify-flow-state-diff | verify-current-state | envelope-validate | tdd-red-evidence | tdd-loop-status | early-loop-status | compound-readiness | runtime-integrity | hook | flow-state-repair | waiver-grant | plan-split-waves | wave-status | cohesion-contract\n`);
|
|
103
|
+
io.stderr.write(`Unknown internal subcommand: ${subcommand}. Expected advance-stage | start-flow | cancel-run | rewind | verify-flow-state-diff | verify-current-state | envelope-validate | tdd-red-evidence | tdd-loop-status | early-loop-status | compound-readiness | runtime-integrity | hook | slice-commit | flow-state-repair | waiver-grant | plan-split-waves | wave-status | cohesion-contract\n`);
|
|
100
104
|
return 1;
|
|
101
105
|
}
|
|
102
106
|
catch (err) {
|
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { promisify } from "node:util";
|
|
4
|
+
import { readConfig, resolveTddCommitMode } from "../config.js";
|
|
5
|
+
import { readDelegationLedger } from "../delegation.js";
|
|
6
|
+
import { exists } from "../fs-utils.js";
|
|
7
|
+
const execFileAsync = promisify(execFile);
|
|
8
|
+
function parseCsv(raw) {
|
|
9
|
+
return raw
|
|
10
|
+
.split(",")
|
|
11
|
+
.map((value) => value.trim())
|
|
12
|
+
.filter((value) => value.length > 0);
|
|
13
|
+
}
|
|
14
|
+
function normalizePathLike(value) {
|
|
15
|
+
const slashes = value.replace(/\\/gu, "/");
|
|
16
|
+
const withoutDot = slashes.replace(/^\.\//u, "");
|
|
17
|
+
return withoutDot.replace(/\/+$/u, "");
|
|
18
|
+
}
|
|
19
|
+
function parseSliceCommitArgs(tokens) {
|
|
20
|
+
let sliceId = "";
|
|
21
|
+
let spanId = "";
|
|
22
|
+
let taskId;
|
|
23
|
+
let title;
|
|
24
|
+
let runId;
|
|
25
|
+
const claimedPaths = [];
|
|
26
|
+
let json = false;
|
|
27
|
+
let quiet = false;
|
|
28
|
+
for (let i = 0; i < tokens.length; i += 1) {
|
|
29
|
+
const token = tokens[i];
|
|
30
|
+
const next = tokens[i + 1];
|
|
31
|
+
const valueFrom = (flag) => {
|
|
32
|
+
if (token.startsWith(`${flag}=`))
|
|
33
|
+
return token.slice(flag.length + 1);
|
|
34
|
+
if (token === flag && next && !next.startsWith("--")) {
|
|
35
|
+
i += 1;
|
|
36
|
+
return next;
|
|
37
|
+
}
|
|
38
|
+
throw new Error(`${flag} requires a value.`);
|
|
39
|
+
};
|
|
40
|
+
if (token === "--json") {
|
|
41
|
+
json = true;
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
if (token === "--quiet") {
|
|
45
|
+
quiet = true;
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
if (token.startsWith("--slice=") || token === "--slice") {
|
|
49
|
+
sliceId = valueFrom("--slice").trim();
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
if (token.startsWith("--span-id=") || token === "--span-id") {
|
|
53
|
+
spanId = valueFrom("--span-id").trim();
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
if (token.startsWith("--task-id=") || token === "--task-id") {
|
|
57
|
+
taskId = valueFrom("--task-id").trim();
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
if (token.startsWith("--title=") || token === "--title") {
|
|
61
|
+
title = valueFrom("--title").trim();
|
|
62
|
+
continue;
|
|
63
|
+
}
|
|
64
|
+
if (token.startsWith("--run-id=") || token === "--run-id") {
|
|
65
|
+
runId = valueFrom("--run-id").trim();
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
if (token.startsWith("--claimed-paths=") || token === "--claimed-paths") {
|
|
69
|
+
claimedPaths.push(...parseCsv(valueFrom("--claimed-paths")));
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
if (token.startsWith("--claimed-path=") || token === "--claimed-path") {
|
|
73
|
+
const one = valueFrom("--claimed-path").trim();
|
|
74
|
+
if (one.length > 0)
|
|
75
|
+
claimedPaths.push(one);
|
|
76
|
+
continue;
|
|
77
|
+
}
|
|
78
|
+
throw new Error(`Unknown flag for internal slice-commit: ${token}`);
|
|
79
|
+
}
|
|
80
|
+
if (sliceId.length === 0) {
|
|
81
|
+
throw new Error("internal slice-commit requires --slice=<S-N>.");
|
|
82
|
+
}
|
|
83
|
+
if (spanId.length === 0) {
|
|
84
|
+
throw new Error("internal slice-commit requires --span-id=<span-id>.");
|
|
85
|
+
}
|
|
86
|
+
return {
|
|
87
|
+
sliceId,
|
|
88
|
+
spanId,
|
|
89
|
+
taskId,
|
|
90
|
+
title,
|
|
91
|
+
runId,
|
|
92
|
+
claimedPaths,
|
|
93
|
+
json,
|
|
94
|
+
quiet
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
function output(io, args, payload, channel = "stdout") {
|
|
98
|
+
if (args.quiet && channel === "stdout")
|
|
99
|
+
return;
|
|
100
|
+
const writer = channel === "stdout" ? io.stdout : io.stderr;
|
|
101
|
+
if (args.json) {
|
|
102
|
+
writer.write(`${JSON.stringify(payload)}\n`);
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
const message = typeof payload.message === "string"
|
|
106
|
+
? payload.message
|
|
107
|
+
: JSON.stringify(payload);
|
|
108
|
+
writer.write(`${message}\n`);
|
|
109
|
+
}
|
|
110
|
+
function parsePorcelainPaths(raw) {
|
|
111
|
+
const out = [];
|
|
112
|
+
for (const line of raw.split(/\r?\n/gu)) {
|
|
113
|
+
const trimmed = line.trimEnd();
|
|
114
|
+
if (trimmed.length < 4)
|
|
115
|
+
continue;
|
|
116
|
+
// porcelain line shape: XY<space><path>
|
|
117
|
+
const status = trimmed.slice(0, 2);
|
|
118
|
+
if (status === "??") {
|
|
119
|
+
const p = normalizePathLike(trimmed.slice(3).trim());
|
|
120
|
+
if (p.length > 0)
|
|
121
|
+
out.push(p);
|
|
122
|
+
continue;
|
|
123
|
+
}
|
|
124
|
+
let p = trimmed.slice(3).trim();
|
|
125
|
+
const renameIdx = p.indexOf(" -> ");
|
|
126
|
+
if (renameIdx >= 0) {
|
|
127
|
+
p = p.slice(renameIdx + 4);
|
|
128
|
+
}
|
|
129
|
+
p = normalizePathLike(p.replace(/^"/u, "").replace(/"$/u, ""));
|
|
130
|
+
if (p.length > 0)
|
|
131
|
+
out.push(p);
|
|
132
|
+
}
|
|
133
|
+
return [...new Set(out)];
|
|
134
|
+
}
|
|
135
|
+
function matchesClaimedPath(changedPath, claimedPaths) {
|
|
136
|
+
const changed = normalizePathLike(changedPath);
|
|
137
|
+
return claimedPaths.some((rawClaimed) => {
|
|
138
|
+
const claimed = normalizePathLike(rawClaimed);
|
|
139
|
+
if (claimed.length === 0)
|
|
140
|
+
return false;
|
|
141
|
+
if (changed === claimed)
|
|
142
|
+
return true;
|
|
143
|
+
return changed.startsWith(`${claimed}/`);
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
async function resolveClaimedPathsFromLedger(projectRoot, args) {
|
|
147
|
+
const ledger = await readDelegationLedger(projectRoot);
|
|
148
|
+
const matches = ledger.entries.filter((entry) => entry.stage === "tdd" &&
|
|
149
|
+
entry.agent === "slice-builder" &&
|
|
150
|
+
entry.sliceId === args.sliceId &&
|
|
151
|
+
entry.spanId === args.spanId &&
|
|
152
|
+
(!args.runId || entry.runId === args.runId) &&
|
|
153
|
+
Array.isArray(entry.claimedPaths) &&
|
|
154
|
+
entry.claimedPaths.length > 0);
|
|
155
|
+
matches.sort((a, b) => {
|
|
156
|
+
const aTs = a.ts ?? a.startTs ?? "";
|
|
157
|
+
const bTs = b.ts ?? b.startTs ?? "";
|
|
158
|
+
return aTs < bTs ? 1 : aTs > bTs ? -1 : 0;
|
|
159
|
+
});
|
|
160
|
+
const fromLedger = matches[0]?.claimedPaths ?? [];
|
|
161
|
+
return [...new Set(fromLedger.map((p) => normalizePathLike(p)).filter((p) => p.length > 0))];
|
|
162
|
+
}
|
|
163
|
+
export async function runSliceCommitCommand(projectRoot, tokens, io) {
|
|
164
|
+
let args;
|
|
165
|
+
try {
|
|
166
|
+
args = parseSliceCommitArgs(tokens);
|
|
167
|
+
}
|
|
168
|
+
catch (err) {
|
|
169
|
+
io.stderr.write(`cclaw internal slice-commit: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
170
|
+
return 1;
|
|
171
|
+
}
|
|
172
|
+
const config = await readConfig(projectRoot).catch(() => null);
|
|
173
|
+
const commitMode = resolveTddCommitMode(config);
|
|
174
|
+
if (commitMode !== "managed-per-slice") {
|
|
175
|
+
output(io, args, {
|
|
176
|
+
ok: true,
|
|
177
|
+
skipped: true,
|
|
178
|
+
reason: "commit-mode-not-managed",
|
|
179
|
+
commitMode,
|
|
180
|
+
message: `slice-commit skipped: commitMode=${commitMode}`
|
|
181
|
+
});
|
|
182
|
+
return 0;
|
|
183
|
+
}
|
|
184
|
+
const gitPresent = await exists(path.join(projectRoot, ".git"));
|
|
185
|
+
if (!gitPresent) {
|
|
186
|
+
output(io, args, {
|
|
187
|
+
ok: true,
|
|
188
|
+
skipped: true,
|
|
189
|
+
reason: "no-git",
|
|
190
|
+
message: "slice-commit skipped: .git is missing"
|
|
191
|
+
});
|
|
192
|
+
return 0;
|
|
193
|
+
}
|
|
194
|
+
const claimedPaths = args.claimedPaths.length > 0
|
|
195
|
+
? [...new Set(args.claimedPaths.map((p) => normalizePathLike(p)).filter((p) => p.length > 0))]
|
|
196
|
+
: await resolveClaimedPathsFromLedger(projectRoot, args);
|
|
197
|
+
if (claimedPaths.length === 0) {
|
|
198
|
+
output(io, args, {
|
|
199
|
+
ok: false,
|
|
200
|
+
errorCode: "slice_commit_claimed_paths_missing",
|
|
201
|
+
details: {
|
|
202
|
+
sliceId: args.sliceId,
|
|
203
|
+
spanId: args.spanId
|
|
204
|
+
},
|
|
205
|
+
message: `slice_commit_claimed_paths_missing: no claimed paths for ${args.sliceId}/${args.spanId}`
|
|
206
|
+
}, "stderr");
|
|
207
|
+
return 2;
|
|
208
|
+
}
|
|
209
|
+
const { stdout: statusRaw } = await execFileAsync("git", ["status", "--porcelain", "-uall"], {
|
|
210
|
+
cwd: projectRoot
|
|
211
|
+
});
|
|
212
|
+
const changedPaths = parsePorcelainPaths(statusRaw);
|
|
213
|
+
if (changedPaths.length === 0) {
|
|
214
|
+
output(io, args, {
|
|
215
|
+
ok: true,
|
|
216
|
+
skipped: true,
|
|
217
|
+
reason: "no-changes",
|
|
218
|
+
message: `slice-commit skipped: no working-tree changes for ${args.sliceId}`
|
|
219
|
+
});
|
|
220
|
+
return 0;
|
|
221
|
+
}
|
|
222
|
+
const pathDrift = changedPaths.filter((p) => !matchesClaimedPath(p, claimedPaths));
|
|
223
|
+
if (pathDrift.length > 0) {
|
|
224
|
+
output(io, args, {
|
|
225
|
+
ok: false,
|
|
226
|
+
errorCode: "slice_commit_path_drift",
|
|
227
|
+
details: {
|
|
228
|
+
sliceId: args.sliceId,
|
|
229
|
+
spanId: args.spanId,
|
|
230
|
+
claimedPaths,
|
|
231
|
+
driftPaths: pathDrift
|
|
232
|
+
},
|
|
233
|
+
message: `slice_commit_path_drift: ${pathDrift.join(", ")}`
|
|
234
|
+
}, "stderr");
|
|
235
|
+
return 2;
|
|
236
|
+
}
|
|
237
|
+
const changedInClaim = changedPaths.filter((p) => matchesClaimedPath(p, claimedPaths));
|
|
238
|
+
if (changedInClaim.length === 0) {
|
|
239
|
+
output(io, args, {
|
|
240
|
+
ok: true,
|
|
241
|
+
skipped: true,
|
|
242
|
+
reason: "claimed-paths-unchanged",
|
|
243
|
+
message: `slice-commit skipped: no changes within claimed paths for ${args.sliceId}`
|
|
244
|
+
});
|
|
245
|
+
return 0;
|
|
246
|
+
}
|
|
247
|
+
try {
|
|
248
|
+
await execFileAsync("git", ["add", "--", ...claimedPaths], {
|
|
249
|
+
cwd: projectRoot
|
|
250
|
+
});
|
|
251
|
+
const taskPart = args.taskId && args.taskId.length > 0 ? args.taskId : "task";
|
|
252
|
+
const titlePart = args.title && args.title.length > 0 ? args.title : "slice update";
|
|
253
|
+
const header = `${args.sliceId}/${taskPart}: ${titlePart}`;
|
|
254
|
+
const body = [
|
|
255
|
+
`span-id: ${args.spanId}`,
|
|
256
|
+
`run-id: ${args.runId ?? "unknown"}`,
|
|
257
|
+
"phase-cycle: red->green->refactor->doc"
|
|
258
|
+
].join("\n");
|
|
259
|
+
await execFileAsync("git", ["commit", "-m", header, "-m", body], {
|
|
260
|
+
cwd: projectRoot
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
catch (err) {
|
|
264
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
265
|
+
if (/nothing to commit/iu.test(message)) {
|
|
266
|
+
output(io, args, {
|
|
267
|
+
ok: true,
|
|
268
|
+
skipped: true,
|
|
269
|
+
reason: "nothing-to-commit",
|
|
270
|
+
message: `slice-commit skipped: nothing to commit for ${args.sliceId}`
|
|
271
|
+
});
|
|
272
|
+
return 0;
|
|
273
|
+
}
|
|
274
|
+
output(io, args, {
|
|
275
|
+
ok: false,
|
|
276
|
+
errorCode: "slice_commit_failed",
|
|
277
|
+
details: { message },
|
|
278
|
+
message: `slice_commit_failed: ${message}`
|
|
279
|
+
}, "stderr");
|
|
280
|
+
return 1;
|
|
281
|
+
}
|
|
282
|
+
const { stdout: shaStdout } = await execFileAsync("git", ["rev-parse", "HEAD"], {
|
|
283
|
+
cwd: projectRoot
|
|
284
|
+
});
|
|
285
|
+
const commitSha = shaStdout.trim();
|
|
286
|
+
output(io, args, {
|
|
287
|
+
ok: true,
|
|
288
|
+
commitSha,
|
|
289
|
+
sliceId: args.sliceId,
|
|
290
|
+
spanId: args.spanId,
|
|
291
|
+
claimedPaths,
|
|
292
|
+
changedPaths: changedInClaim,
|
|
293
|
+
message: `slice commit created for ${args.sliceId}: ${commitSha}`
|
|
294
|
+
});
|
|
295
|
+
return 0;
|
|
296
|
+
}
|
|
@@ -16,7 +16,7 @@ export interface WaveStatusNextDispatch {
|
|
|
16
16
|
waveId: string | null;
|
|
17
17
|
readyToDispatch: string[];
|
|
18
18
|
pathConflicts: string[];
|
|
19
|
-
mode: "single-slice" | "wave-fanout" | "none";
|
|
19
|
+
mode: "single-slice" | "wave-fanout" | "blocked" | "none";
|
|
20
20
|
}
|
|
21
21
|
export interface WaveStatusReport {
|
|
22
22
|
activeRunId: string;
|
|
@@ -4,6 +4,8 @@ import { RUNTIME_ROOT } from "../constants.js";
|
|
|
4
4
|
import { readDelegationEvents, readDelegationLedger } from "../delegation.js";
|
|
5
5
|
import { readFlowState } from "../runs.js";
|
|
6
6
|
import { mergeParallelWaveDefinitions, parseParallelExecutionPlanWaves, parseWavePlanDirectory } from "./plan-split-waves.js";
|
|
7
|
+
const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
|
|
8
|
+
const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
|
|
7
9
|
function parseArgs(tokens) {
|
|
8
10
|
const args = { format: "json" };
|
|
9
11
|
for (const token of tokens) {
|
|
@@ -33,6 +35,92 @@ function classifyWaveStatus(total, closedCount) {
|
|
|
33
35
|
return "closed";
|
|
34
36
|
return "partial";
|
|
35
37
|
}
|
|
38
|
+
function parsePipeRow(trimmedLine) {
|
|
39
|
+
const inner = trimmedLine.replace(/^\|/u, "").replace(/\|\s*$/u, "");
|
|
40
|
+
return inner.split("|").map((cell) => cell.trim());
|
|
41
|
+
}
|
|
42
|
+
function normalizePathToken(raw) {
|
|
43
|
+
return raw.trim().replace(/^`|`$/gu, "").replace(/^\.\/+/u, "");
|
|
44
|
+
}
|
|
45
|
+
function parseManagedWaveClaimedPaths(planMarkdown) {
|
|
46
|
+
const out = new Map();
|
|
47
|
+
const startIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_START);
|
|
48
|
+
const endIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_END);
|
|
49
|
+
if (startIdx < 0 || endIdx <= startIdx)
|
|
50
|
+
return out;
|
|
51
|
+
const body = planMarkdown.slice(startIdx + PARALLEL_EXEC_MANAGED_START.length, endIdx);
|
|
52
|
+
const lines = body.split(/\r?\n/u);
|
|
53
|
+
let currentWaveId = null;
|
|
54
|
+
let headerIdx = new Map();
|
|
55
|
+
for (const rawLine of lines) {
|
|
56
|
+
const trimmed = rawLine.trim();
|
|
57
|
+
const waveMatch = /^###\s+Wave\s+(?:W-)?(\d+)\b/iu.exec(trimmed);
|
|
58
|
+
if (waveMatch) {
|
|
59
|
+
currentWaveId = `W-${waveMatch[1].padStart(2, "0")}`;
|
|
60
|
+
if (!out.has(currentWaveId)) {
|
|
61
|
+
out.set(currentWaveId, new Map());
|
|
62
|
+
}
|
|
63
|
+
headerIdx = new Map();
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
if (!currentWaveId || !trimmed.startsWith("|"))
|
|
67
|
+
continue;
|
|
68
|
+
const cells = parsePipeRow(trimmed);
|
|
69
|
+
if (cells.length === 0)
|
|
70
|
+
continue;
|
|
71
|
+
const first = cells[0].toLowerCase();
|
|
72
|
+
if (first === "sliceid" || first === "slice id") {
|
|
73
|
+
headerIdx = new Map();
|
|
74
|
+
for (let i = 0; i < cells.length; i += 1) {
|
|
75
|
+
const key = cells[i].toLowerCase().replace(/[^a-z0-9]/gu, "");
|
|
76
|
+
if (key.length > 0 && !headerIdx.has(key)) {
|
|
77
|
+
headerIdx.set(key, i);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
continue;
|
|
81
|
+
}
|
|
82
|
+
if (cells.every((cell) => /^:?-{3,}:?$/u.test(cell))) {
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
const sliceId = cells[0].trim().toUpperCase();
|
|
86
|
+
if (!/^S-\d+$/u.test(sliceId))
|
|
87
|
+
continue;
|
|
88
|
+
const pathsIdx = headerIdx.get("claimedpaths");
|
|
89
|
+
const rawPaths = pathsIdx !== undefined ? (cells[pathsIdx] ?? "") : "";
|
|
90
|
+
const claimedPaths = rawPaths.length === 0
|
|
91
|
+
? []
|
|
92
|
+
: rawPaths
|
|
93
|
+
.split(",")
|
|
94
|
+
.map((p) => normalizePathToken(p))
|
|
95
|
+
.filter((p) => p.length > 0);
|
|
96
|
+
out.get(currentWaveId).set(sliceId, claimedPaths);
|
|
97
|
+
}
|
|
98
|
+
return out;
|
|
99
|
+
}
|
|
100
|
+
function detectPathConflicts(readySlices, bySlice) {
|
|
101
|
+
const conflicts = new Set();
|
|
102
|
+
const ordered = [...readySlices].sort();
|
|
103
|
+
for (let i = 0; i < ordered.length; i += 1) {
|
|
104
|
+
const leftSlice = ordered[i];
|
|
105
|
+
const leftPaths = bySlice.get(leftSlice) ?? [];
|
|
106
|
+
if (leftPaths.length === 0)
|
|
107
|
+
continue;
|
|
108
|
+
const leftSet = new Set(leftPaths);
|
|
109
|
+
for (let j = i + 1; j < ordered.length; j += 1) {
|
|
110
|
+
const rightSlice = ordered[j];
|
|
111
|
+
const rightPaths = bySlice.get(rightSlice) ?? [];
|
|
112
|
+
if (rightPaths.length === 0)
|
|
113
|
+
continue;
|
|
114
|
+
for (const pathToken of rightPaths) {
|
|
115
|
+
if (!leftSet.has(pathToken))
|
|
116
|
+
continue;
|
|
117
|
+
conflicts.add(`${leftSlice}:${pathToken}`);
|
|
118
|
+
conflicts.add(`${rightSlice}:${pathToken}`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
return [...conflicts].sort();
|
|
123
|
+
}
|
|
36
124
|
const TERMINAL_PHASES = new Set([
|
|
37
125
|
"refactor",
|
|
38
126
|
"refactor-deferred",
|
|
@@ -202,11 +290,20 @@ export async function runWaveStatus(projectRoot, options = {}) {
|
|
|
202
290
|
}
|
|
203
291
|
else {
|
|
204
292
|
const readyToDispatch = [...firstOpenWave.readyMembers].sort();
|
|
293
|
+
const claimedPathsByWave = parseManagedWaveClaimedPaths(planRaw);
|
|
294
|
+
const conflicts = detectPathConflicts(readyToDispatch, claimedPathsByWave.get(firstOpenWave.waveId) ?? new Map());
|
|
295
|
+
const mode = conflicts.length > 0
|
|
296
|
+
? "blocked"
|
|
297
|
+
: readyToDispatch.length > 1
|
|
298
|
+
? "wave-fanout"
|
|
299
|
+
: readyToDispatch.length === 1
|
|
300
|
+
? "single-slice"
|
|
301
|
+
: "none";
|
|
205
302
|
nextDispatch = {
|
|
206
303
|
waveId: firstOpenWave.waveId,
|
|
207
304
|
readyToDispatch,
|
|
208
|
-
pathConflicts:
|
|
209
|
-
mode
|
|
305
|
+
pathConflicts: conflicts,
|
|
306
|
+
mode
|
|
210
307
|
};
|
|
211
308
|
}
|
|
212
309
|
return {
|
|
@@ -1,13 +1,86 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
1
2
|
import path from "node:path";
|
|
3
|
+
import { promisify } from "node:util";
|
|
4
|
+
import { readConfig, resolveTddCommitMode } from "./config.js";
|
|
5
|
+
import { readDelegationLedger } from "./delegation.js";
|
|
2
6
|
import { exists } from "./fs-utils.js";
|
|
7
|
+
const execFileAsync = promisify(execFile);
|
|
3
8
|
export const TEST_COMMAND_HINT_PATTERN = /\b(?:npm test|npm run test(?::[\w:-]+)?|pnpm test|pnpm [\w:-]*test[\w:-]*|yarn test|yarn [\w:-]*test[\w:-]*|bun test|bun run test(?::[\w:-]+)?|vitest|jest|pytest|go test|cargo test|mvn test|gradle test|\.\/gradlew test|dotnet test)\b/iu;
|
|
4
9
|
export const SHA_WITH_LABEL_PATTERN = /\b(?:sha|commit)(?:\s*[:=]|\s+)\s*[0-9a-f]{7,40}\b/iu;
|
|
5
10
|
export const PASS_STATUS_PATTERN = /\b(?:pass|passed|green|ok)\b/iu;
|
|
6
11
|
export const NO_VCS_ATTESTATION_PATTERN = /\b(?:no[-_ ]?vcs|no git|not a git repo|vcs\s*[:=]\s*none)\b/iu;
|
|
7
12
|
export const NO_VCS_HASH_PATTERN = /\b(?:content|artifact)[-_ ]?hash\s*[:=]\s*(?:sha256:)?[0-9a-f]{16,64}\b|\bsha256\s*[:=]\s*[0-9a-f]{16,64}\b/iu;
|
|
13
|
+
function escapeRegex(value) {
|
|
14
|
+
return value.replace(/[.*+?^${}()|[\]\\]/gu, "\\$&");
|
|
15
|
+
}
|
|
16
|
+
function hasRefactorCoverage(entries) {
|
|
17
|
+
const phases = new Set(entries
|
|
18
|
+
.filter((e) => e.status === "completed" && typeof e.phase === "string")
|
|
19
|
+
.map((e) => e.phase));
|
|
20
|
+
if (phases.has("refactor") || phases.has("refactor-deferred")) {
|
|
21
|
+
return true;
|
|
22
|
+
}
|
|
23
|
+
const greenWithOutcome = entries.find((entry) => entry.status === "completed" &&
|
|
24
|
+
entry.phase === "green" &&
|
|
25
|
+
entry.refactorOutcome &&
|
|
26
|
+
(entry.refactorOutcome.mode === "inline" || entry.refactorOutcome.mode === "deferred"));
|
|
27
|
+
if (!greenWithOutcome?.refactorOutcome)
|
|
28
|
+
return false;
|
|
29
|
+
if (greenWithOutcome.refactorOutcome.mode === "inline")
|
|
30
|
+
return true;
|
|
31
|
+
const rationale = greenWithOutcome.refactorOutcome.rationale;
|
|
32
|
+
if (typeof rationale === "string" && rationale.trim().length > 0)
|
|
33
|
+
return true;
|
|
34
|
+
if (!Array.isArray(greenWithOutcome.evidenceRefs))
|
|
35
|
+
return false;
|
|
36
|
+
return greenWithOutcome.evidenceRefs.some((ref) => typeof ref === "string" && ref.trim().length > 0);
|
|
37
|
+
}
|
|
38
|
+
function collectClosedSlices(entries, runId) {
|
|
39
|
+
const bySlice = new Map();
|
|
40
|
+
for (const entry of entries) {
|
|
41
|
+
if (entry.runId !== runId)
|
|
42
|
+
continue;
|
|
43
|
+
if (entry.stage !== "tdd")
|
|
44
|
+
continue;
|
|
45
|
+
if (entry.status !== "completed")
|
|
46
|
+
continue;
|
|
47
|
+
if (typeof entry.sliceId !== "string" || entry.sliceId.length === 0)
|
|
48
|
+
continue;
|
|
49
|
+
if (typeof entry.spanId !== "string" || entry.spanId.length === 0)
|
|
50
|
+
continue;
|
|
51
|
+
const bySpan = bySlice.get(entry.sliceId) ?? new Map();
|
|
52
|
+
const rows = bySpan.get(entry.spanId) ?? [];
|
|
53
|
+
rows.push(entry);
|
|
54
|
+
bySpan.set(entry.spanId, rows);
|
|
55
|
+
bySlice.set(entry.sliceId, bySpan);
|
|
56
|
+
}
|
|
57
|
+
const closedSlices = new Set();
|
|
58
|
+
for (const [sliceId, bySpan] of bySlice.entries()) {
|
|
59
|
+
for (const rows of bySpan.values()) {
|
|
60
|
+
const phases = new Set(rows
|
|
61
|
+
.filter((row) => row.status === "completed" && typeof row.phase === "string")
|
|
62
|
+
.map((row) => row.phase));
|
|
63
|
+
const hasRed = phases.has("red");
|
|
64
|
+
const hasGreen = phases.has("green");
|
|
65
|
+
const hasDoc = phases.has("doc");
|
|
66
|
+
if (hasRed && hasGreen && hasDoc && hasRefactorCoverage(rows)) {
|
|
67
|
+
closedSlices.add(sliceId);
|
|
68
|
+
break;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return [...closedSlices].sort();
|
|
73
|
+
}
|
|
74
|
+
async function hasManagedCommitForSlice(projectRoot, sliceId) {
|
|
75
|
+
const grep = `^${escapeRegex(sliceId)}/`;
|
|
76
|
+
const { stdout } = await execFileAsync("git", ["log", "--format=%s%n%b", "--grep", grep], { cwd: projectRoot });
|
|
77
|
+
return stdout.trim().length > 0;
|
|
78
|
+
}
|
|
8
79
|
export async function validateTddVerificationEvidence(projectRoot, evidence, options = {}) {
|
|
9
80
|
const normalized = evidence.trim();
|
|
10
|
-
const
|
|
81
|
+
const config = await readConfig(projectRoot).catch(() => null);
|
|
82
|
+
const commitMode = resolveTddCommitMode(config);
|
|
83
|
+
const mode = commitMode === "off" ? "disabled" : "auto";
|
|
11
84
|
const gitPresent = await exists(path.join(projectRoot, ".git"));
|
|
12
85
|
const issues = [];
|
|
13
86
|
if (options.requireCommand !== false && !TEST_COMMAND_HINT_PATTERN.test(normalized)) {
|
|
@@ -16,16 +89,34 @@ export async function validateTddVerificationEvidence(projectRoot, evidence, opt
|
|
|
16
89
|
if (options.requirePassStatus !== false && !PASS_STATUS_PATTERN.test(normalized)) {
|
|
17
90
|
issues.push("GREEN repair needed: include explicit success status (for example `PASS` or `GREEN`).");
|
|
18
91
|
}
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
92
|
+
if (mode !== "disabled" && commitMode === "managed-per-slice" && gitPresent) {
|
|
93
|
+
const ledger = await readDelegationLedger(projectRoot).catch(() => null);
|
|
94
|
+
if (ledger && typeof ledger.runId === "string" && ledger.runId.length > 0) {
|
|
95
|
+
const closedSlices = collectClosedSlices(ledger.entries, ledger.runId);
|
|
96
|
+
const missing = [];
|
|
97
|
+
for (const sliceId of closedSlices) {
|
|
98
|
+
const hasCommit = await hasManagedCommitForSlice(projectRoot, sliceId).catch(() => false);
|
|
99
|
+
if (!hasCommit) {
|
|
100
|
+
missing.push(sliceId);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
if (missing.length > 0) {
|
|
104
|
+
issues.push(`managed-per-slice commit check failed: missing git commit(s) for closed slice(s): ${missing.join(", ")}.`);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
26
107
|
}
|
|
27
|
-
else if (mode === "auto"
|
|
28
|
-
|
|
108
|
+
else if (mode === "auto") {
|
|
109
|
+
const hasSha = SHA_WITH_LABEL_PATTERN.test(normalized);
|
|
110
|
+
const hasNoVcs = NO_VCS_ATTESTATION_PATTERN.test(normalized);
|
|
111
|
+
if (gitPresent && !hasSha) {
|
|
112
|
+
issues.push("must include a commit SHA token prefixed with `sha` or `commit` (for example `sha: abc1234`).");
|
|
113
|
+
}
|
|
114
|
+
else if (!gitPresent && !hasSha && !hasNoVcs) {
|
|
115
|
+
issues.push("must include either a commit SHA or an explicit no-VCS attestation (for example `no-vcs: project has no .git directory`).");
|
|
116
|
+
}
|
|
117
|
+
else if (!gitPresent && hasNoVcs && !NO_VCS_HASH_PATTERN.test(normalized)) {
|
|
118
|
+
issues.push("NO_VCS_MODE repair needed: include a content/artifact hash for no-VCS TDD evidence (for example `artifact-hash: sha256:<hash>`).");
|
|
119
|
+
}
|
|
29
120
|
}
|
|
30
121
|
return { ok: issues.length === 0, issues, mode, gitPresent };
|
|
31
122
|
}
|
package/dist/types.d.ts
CHANGED
|
@@ -161,10 +161,22 @@ export interface ReviewLoopConfig {
|
|
|
161
161
|
externalSecondOpinion?: ReviewLoopExternalSecondOpinionConfig;
|
|
162
162
|
}
|
|
163
163
|
export type VcsMode = "git-with-remote" | "git-local-only" | "none";
|
|
164
|
+
export type TddCommitMode = "managed-per-slice" | "agent-required" | "checkpoint-only" | "off";
|
|
165
|
+
export interface TddConfig {
|
|
166
|
+
/**
|
|
167
|
+
* Commit ownership model for closed TDD slices.
|
|
168
|
+
* - managed-per-slice: cclaw-generated hook performs one commit per closed slice.
|
|
169
|
+
* - agent-required: worker/controller must create the commit outside cclaw.
|
|
170
|
+
* - checkpoint-only: coarse-grained checkpoints are allowed (no per-slice enforcement).
|
|
171
|
+
* - off: skip commit-shape enforcement.
|
|
172
|
+
*/
|
|
173
|
+
commitMode?: TddCommitMode;
|
|
174
|
+
}
|
|
164
175
|
export interface CclawConfig {
|
|
165
176
|
version: string;
|
|
166
177
|
flowVersion: string;
|
|
167
178
|
harnesses: HarnessId[];
|
|
179
|
+
tdd?: TddConfig;
|
|
168
180
|
}
|
|
169
181
|
export interface TransitionRule {
|
|
170
182
|
from: FlowStage;
|