cclaw-cli 6.9.0 → 6.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,7 +14,9 @@ import { parseAdvanceStageArgs, parseCancelRunArgs, parseHookArgs, parseRewindAr
14
14
  import { parseFlowStateRepairArgs, runFlowStateRepair } from "./flow-state-repair.js";
15
15
  import { parseWaiverGrantArgs, runWaiverGrant } from "./waiver-grant.js";
16
16
  import { FlowStateGuardMismatchError, verifyFlowStateGuard } from "../run-persistence.js";
17
- import { DelegationTimestampError, DispatchDuplicateError } from "../delegation.js";
17
+ import { DelegationTimestampError, DispatchCapError, DispatchDuplicateError, DispatchOverlapError } from "../delegation.js";
18
+ import { parseTddSliceRecordArgs, runTddSliceRecord } from "../tdd-slices.js";
19
+ import { parsePlanSplitWavesArgs, runPlanSplitWaves } from "./plan-split-waves.js";
18
20
  /**
19
21
  * Subcommands that mutate or consult flow-state.json via the CLI runtime.
20
22
  * They all require the sha256 sidecar to match before continuing so a
@@ -32,7 +34,7 @@ const GUARD_ENFORCED_SUBCOMMANDS = new Set([
32
34
  export async function runInternalCommand(projectRoot, argv, io) {
33
35
  const [subcommand, ...tokens] = argv;
34
36
  if (!subcommand) {
35
- io.stderr.write("cclaw internal requires a subcommand: advance-stage | start-flow | cancel-run | rewind | verify-flow-state-diff | verify-current-state | envelope-validate | tdd-red-evidence | tdd-loop-status | early-loop-status | compound-readiness | runtime-integrity | hook | flow-state-repair | waiver-grant\n");
37
+ io.stderr.write("cclaw internal requires a subcommand: advance-stage | start-flow | cancel-run | rewind | verify-flow-state-diff | verify-current-state | envelope-validate | tdd-red-evidence | tdd-loop-status | tdd-slice-record | early-loop-status | compound-readiness | runtime-integrity | hook | flow-state-repair | waiver-grant | plan-split-waves\n");
36
38
  return 1;
37
39
  }
38
40
  try {
@@ -84,7 +86,13 @@ export async function runInternalCommand(projectRoot, argv, io) {
84
86
  if (subcommand === "waiver-grant") {
85
87
  return await runWaiverGrant(projectRoot, parseWaiverGrantArgs(tokens), io);
86
88
  }
87
- io.stderr.write(`Unknown internal subcommand: ${subcommand}. Expected advance-stage | start-flow | cancel-run | rewind | verify-flow-state-diff | verify-current-state | envelope-validate | tdd-red-evidence | tdd-loop-status | early-loop-status | compound-readiness | runtime-integrity | hook | flow-state-repair | waiver-grant\n`);
89
+ if (subcommand === "tdd-slice-record") {
90
+ return await runTddSliceRecord(projectRoot, parseTddSliceRecordArgs(tokens), io);
91
+ }
92
+ if (subcommand === "plan-split-waves") {
93
+ return await runPlanSplitWaves(projectRoot, parsePlanSplitWavesArgs(tokens), io);
94
+ }
95
+ io.stderr.write(`Unknown internal subcommand: ${subcommand}. Expected advance-stage | start-flow | cancel-run | rewind | verify-flow-state-diff | verify-current-state | envelope-validate | tdd-red-evidence | tdd-loop-status | tdd-slice-record | early-loop-status | compound-readiness | runtime-integrity | hook | flow-state-repair | waiver-grant | plan-split-waves\n`);
88
96
  return 1;
89
97
  }
90
98
  catch (err) {
@@ -100,6 +108,14 @@ export async function runInternalCommand(projectRoot, argv, io) {
100
108
  io.stderr.write(`error: dispatch_duplicate — ${err.message}\n`);
101
109
  return 2;
102
110
  }
111
+ if (err instanceof DispatchOverlapError) {
112
+ io.stderr.write(`error: dispatch_overlap — ${err.message}\n`);
113
+ return 2;
114
+ }
115
+ if (err instanceof DispatchCapError) {
116
+ io.stderr.write(`error: dispatch_cap — ${err.message}\n`);
117
+ return 2;
118
+ }
103
119
  io.stderr.write(`cclaw internal ${subcommand} failed: ${err instanceof Error ? err.message : String(err)}\n`);
104
120
  return 1;
105
121
  }
@@ -0,0 +1,66 @@
1
+ import type { Writable } from "node:stream";
2
+ interface InternalIo {
3
+ stdout: Writable;
4
+ stderr: Writable;
5
+ }
6
+ /**
7
+ * v6.10.0 (P3) — split a large `05-plan.md` Implementation Units section
8
+ * into wave-NN.md sub-files so an executor can carry one wave at a time
9
+ * without re-reading the whole plan.
10
+ *
11
+ * Threshold contract:
12
+ * - total units < SMALL_PLAN_THRESHOLD → no-op, exit 0.
13
+ * - total units >= SMALL_PLAN_THRESHOLD → split into waves of `--wave-size`
14
+ * (default 25).
15
+ *
16
+ * Files written:
17
+ * - `<artifacts-dir>/wave-plans/wave-NN.md` per wave (1-indexed).
18
+ * - In-place update to `05-plan.md` adding (or refreshing) a
19
+ * `## Wave Plans` section between
20
+ * `<!-- wave-split-managed-start -->` and `<!-- wave-split-managed-end -->`
21
+ * markers. Outside-marker content is preserved verbatim.
22
+ *
23
+ * `--dry-run` prints the plan but does not write. `--force` overwrites
24
+ * existing wave files; without it, the command refuses to clobber.
25
+ */
26
+ export interface PlanSplitWavesArgs {
27
+ waveSize: number;
28
+ dryRun: boolean;
29
+ force: boolean;
30
+ json: boolean;
31
+ }
32
+ export declare const PLAN_SPLIT_DEFAULT_WAVE_SIZE = 25;
33
+ export declare const PLAN_SPLIT_SMALL_PLAN_THRESHOLD = 50;
34
+ export interface ParsedImplementationUnit {
35
+ id: string;
36
+ /**
37
+ * The full markdown body of this unit, starting at the
38
+ * `### Implementation Unit U-N` heading and ending right before the
39
+ * next unit heading (or the next `## ` H2, or end of file).
40
+ */
41
+ body: string;
42
+ /** Repo-relative path declarations from the optional `Files:` line. */
43
+ paths: string[];
44
+ }
45
+ /**
46
+ * Parse `## Implementation Units` section into individual unit blocks.
47
+ * Recognizes the canonical heading shape in the TDD-velocity plan template
48
+ * (`### Implementation Unit U-<n>`). Tolerant of `Files:` listed either
49
+ * inline or as a `- **Files (...):**` bullet block.
50
+ */
51
+ export declare function parseImplementationUnits(planMarkdown: string): ParsedImplementationUnit[];
52
+ /**
53
+ * Pull repo-relative paths from a `Files:` line or the `Files (...)` bullet
54
+ * block. Both shapes appear in the wild; the parser extracts after the colon
55
+ * and splits on commas. Empty/whitespace items are dropped.
56
+ */
57
+ export declare function extractPathsLine(unitBody: string): string[];
58
+ export declare function parsePlanSplitWavesArgs(tokens: string[]): PlanSplitWavesArgs;
59
+ /**
60
+ * Replace any existing managed Wave Plans block with the new one, or append
61
+ * it at the end of the file when no markers are present yet. The helper
62
+ * never touches text outside the markers.
63
+ */
64
+ export declare function upsertWavePlansSection(planMarkdown: string, managedBlock: string): string;
65
+ export declare function runPlanSplitWaves(projectRoot: string, args: PlanSplitWavesArgs, io: InternalIo): Promise<number>;
66
+ export {};
@@ -0,0 +1,249 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { resolveArtifactPath } from "../artifact-paths.js";
4
+ import { exists, writeFileSafe } from "../fs-utils.js";
5
+ import { readFlowState } from "../runs.js";
6
+ export const PLAN_SPLIT_DEFAULT_WAVE_SIZE = 25;
7
+ export const PLAN_SPLIT_SMALL_PLAN_THRESHOLD = 50;
8
+ const WAVE_PLANS_DIR = "wave-plans";
9
+ const WAVE_MANAGED_START = "<!-- wave-split-managed-start -->";
10
+ const WAVE_MANAGED_END = "<!-- wave-split-managed-end -->";
11
+ /**
12
+ * Parse `## Implementation Units` section into individual unit blocks.
13
+ * Recognizes the canonical heading shape in the TDD-velocity plan template
14
+ * (`### Implementation Unit U-<n>`). Tolerant of `Files:` listed either
15
+ * inline or as a `- **Files (...):**` bullet block.
16
+ */
17
+ export function parseImplementationUnits(planMarkdown) {
18
+ const units = [];
19
+ const headingRegex = /(^|\n)###\s+Implementation Unit\s+(U-\d+)\b/gu;
20
+ const matches = [];
21
+ let match;
22
+ while ((match = headingRegex.exec(planMarkdown)) !== null) {
23
+ const offset = match[1] === "" ? 0 : 1; // strip the leading newline if present
24
+ matches.push({
25
+ id: match[2],
26
+ start: match.index + offset,
27
+ headingEnd: match.index + match[0].length
28
+ });
29
+ }
30
+ for (let i = 0; i < matches.length; i += 1) {
31
+ const current = matches[i];
32
+ const next = matches[i + 1];
33
+ let endIndex = next ? next.start : planMarkdown.length;
34
+ // If a higher-level H2 (`## ...`) appears before the next unit, end at the H2.
35
+ const tail = planMarkdown.slice(current.headingEnd, endIndex);
36
+ const sectionBreak = /\n##\s+\S/u.exec(tail);
37
+ if (sectionBreak) {
38
+ endIndex = current.headingEnd + sectionBreak.index + 1; // include the trailing newline
39
+ }
40
+ const body = planMarkdown.slice(current.start, endIndex).replace(/\s+$/u, "");
41
+ units.push({
42
+ id: current.id,
43
+ body,
44
+ paths: extractPathsLine(body)
45
+ });
46
+ }
47
+ return units;
48
+ }
49
+ /**
50
+ * Pull repo-relative paths from a `Files:` line or the `Files (...)` bullet
51
+ * block. Both shapes appear in the wild; the parser extracts after the colon
52
+ * and splits on commas. Empty/whitespace items are dropped.
53
+ */
54
+ export function extractPathsLine(unitBody) {
55
+ const lines = unitBody.split(/\r?\n/u);
56
+ for (const rawLine of lines) {
57
+ const line = rawLine.trim();
58
+ const filesMatch = /^[-*]?\s*\*?\*?Files\s*(?:\([^)]*\))?\s*:\*?\*?\s*(.*)$/iu.exec(line);
59
+ if (!filesMatch)
60
+ continue;
61
+ const remainder = filesMatch[1].trim();
62
+ if (remainder.length === 0)
63
+ continue;
64
+ return remainder
65
+ .split(",")
66
+ .map((item) => item.replace(/[`*]/gu, "").trim())
67
+ .filter((item) => item.length > 0);
68
+ }
69
+ return [];
70
+ }
71
+ export function parsePlanSplitWavesArgs(tokens) {
72
+ let waveSize = PLAN_SPLIT_DEFAULT_WAVE_SIZE;
73
+ let dryRun = false;
74
+ let force = false;
75
+ let json = false;
76
+ for (let i = 0; i < tokens.length; i += 1) {
77
+ const token = tokens[i];
78
+ const next = tokens[i + 1];
79
+ if (token === "--dry-run") {
80
+ dryRun = true;
81
+ continue;
82
+ }
83
+ if (token === "--force") {
84
+ force = true;
85
+ continue;
86
+ }
87
+ if (token === "--json") {
88
+ json = true;
89
+ continue;
90
+ }
91
+ if (token === "--wave-size" || token.startsWith("--wave-size=")) {
92
+ let raw = "";
93
+ if (token.startsWith("--wave-size=")) {
94
+ raw = token.slice("--wave-size=".length);
95
+ }
96
+ else {
97
+ if (next === undefined || next.startsWith("--")) {
98
+ throw new Error("--wave-size requires an integer value.");
99
+ }
100
+ raw = next;
101
+ i += 1;
102
+ }
103
+ const trimmed = raw.trim();
104
+ if (!/^[0-9]+$/u.test(trimmed)) {
105
+ throw new Error("--wave-size must be a positive integer.");
106
+ }
107
+ waveSize = Number(trimmed);
108
+ if (waveSize < 1) {
109
+ throw new Error("--wave-size must be >= 1.");
110
+ }
111
+ continue;
112
+ }
113
+ throw new Error(`Unknown flag for internal plan-split-waves: ${token}`);
114
+ }
115
+ return { waveSize, dryRun, force, json };
116
+ }
117
+ function padWaveIndex(index) {
118
+ return index.toString().padStart(2, "0");
119
+ }
120
+ function buildWaveFileBody(waveIndex, units, sourceLabel) {
121
+ const idsRange = `${units[0].id}..${units[units.length - 1].id}`;
122
+ return [
123
+ `# Wave ${padWaveIndex(waveIndex)}`,
124
+ "",
125
+ `Source: ${sourceLabel} units ${idsRange}`,
126
+ "",
127
+ "## Implementation Units",
128
+ "",
129
+ units.map((unit) => unit.body.trim()).join("\n\n"),
130
+ ""
131
+ ].join("\n");
132
+ }
133
+ function buildWavePlansSection(waveFiles) {
134
+ const lines = [];
135
+ lines.push(WAVE_MANAGED_START);
136
+ lines.push("## Wave Plans");
137
+ lines.push("");
138
+ for (let i = 0; i < waveFiles.length; i += 1) {
139
+ lines.push(`- Wave ${padWaveIndex(i + 1)}: \`${waveFiles[i]}\``);
140
+ }
141
+ lines.push("");
142
+ lines.push(WAVE_MANAGED_END);
143
+ return lines.join("\n");
144
+ }
145
+ /**
146
+ * Replace any existing managed Wave Plans block with the new one, or append
147
+ * it at the end of the file when no markers are present yet. The helper
148
+ * never touches text outside the markers.
149
+ */
150
+ export function upsertWavePlansSection(planMarkdown, managedBlock) {
151
+ const startIdx = planMarkdown.indexOf(WAVE_MANAGED_START);
152
+ const endIdx = planMarkdown.indexOf(WAVE_MANAGED_END);
153
+ if (startIdx >= 0 && endIdx > startIdx) {
154
+ const before = planMarkdown.slice(0, startIdx);
155
+ const after = planMarkdown.slice(endIdx + WAVE_MANAGED_END.length);
156
+ const joined = `${before}${managedBlock}${after}`;
157
+ return joined.endsWith("\n") ? joined : `${joined}\n`;
158
+ }
159
+ const trimmed = planMarkdown.replace(/\s+$/u, "");
160
+ return `${trimmed}\n\n${managedBlock}\n`;
161
+ }
162
+ export async function runPlanSplitWaves(projectRoot, args, io) {
163
+ const flow = await readFlowState(projectRoot).catch(() => null);
164
+ const track = flow?.track;
165
+ const planResolved = await resolveArtifactPath("plan", {
166
+ projectRoot,
167
+ track,
168
+ intent: "read"
169
+ });
170
+ if (!(await exists(planResolved.absPath))) {
171
+ io.stderr.write(`cclaw internal plan-split-waves: plan artifact not found at ${planResolved.relPath}.\n`);
172
+ return 1;
173
+ }
174
+ const raw = await fs.readFile(planResolved.absPath, "utf8");
175
+ const units = parseImplementationUnits(raw);
176
+ if (units.length < PLAN_SPLIT_SMALL_PLAN_THRESHOLD) {
177
+ const outcome = {
178
+ ok: true,
179
+ command: "plan-split-waves",
180
+ totalUnits: units.length,
181
+ waveCount: 0,
182
+ waveSize: args.waveSize,
183
+ smallPlanNoOp: true,
184
+ dryRun: args.dryRun,
185
+ waveFiles: [],
186
+ planUpdated: false
187
+ };
188
+ if (args.json) {
189
+ io.stdout.write(`${JSON.stringify(outcome)}\n`);
190
+ }
191
+ else {
192
+ io.stdout.write(`plan is small (${units.length} unit(s), threshold ${PLAN_SPLIT_SMALL_PLAN_THRESHOLD}); no wave split needed.\n`);
193
+ }
194
+ return 0;
195
+ }
196
+ const waves = [];
197
+ for (let i = 0; i < units.length; i += args.waveSize) {
198
+ waves.push(units.slice(i, i + args.waveSize));
199
+ }
200
+ const artifactsDir = path.dirname(planResolved.absPath);
201
+ const wavePlansAbsDir = path.join(artifactsDir, WAVE_PLANS_DIR);
202
+ const waveFileNames = waves.map((_, idx) => `${WAVE_PLANS_DIR}/wave-${padWaveIndex(idx + 1)}.md`);
203
+ if (!args.dryRun && !args.force) {
204
+ for (const fileName of waveFileNames) {
205
+ const abs = path.join(artifactsDir, fileName);
206
+ if (await exists(abs)) {
207
+ io.stderr.write(`cclaw internal plan-split-waves: wave file already exists: ${path.relative(projectRoot, abs)}. Pass --force to overwrite.\n`);
208
+ return 1;
209
+ }
210
+ }
211
+ }
212
+ if (!args.dryRun) {
213
+ await fs.mkdir(wavePlansAbsDir, { recursive: true });
214
+ for (let i = 0; i < waves.length; i += 1) {
215
+ const fileName = waveFileNames[i];
216
+ const body = buildWaveFileBody(i + 1, waves[i], planResolved.fileName);
217
+ await writeFileSafe(path.join(artifactsDir, fileName), body);
218
+ }
219
+ const managed = buildWavePlansSection(waveFileNames);
220
+ const updatedPlan = upsertWavePlansSection(raw, managed);
221
+ if (updatedPlan !== raw) {
222
+ await writeFileSafe(planResolved.absPath, updatedPlan);
223
+ }
224
+ }
225
+ const outcome = {
226
+ ok: true,
227
+ command: "plan-split-waves",
228
+ totalUnits: units.length,
229
+ waveCount: waves.length,
230
+ waveSize: args.waveSize,
231
+ smallPlanNoOp: false,
232
+ dryRun: args.dryRun,
233
+ waveFiles: waveFileNames,
234
+ planUpdated: !args.dryRun
235
+ };
236
+ if (args.json) {
237
+ io.stdout.write(`${JSON.stringify(outcome)}\n`);
238
+ }
239
+ else if (args.dryRun) {
240
+ io.stdout.write(`dry run: would split ${units.length} unit(s) into ${waves.length} wave file(s) of size ${args.waveSize}:\n`);
241
+ for (const fileName of waveFileNames) {
242
+ io.stdout.write(` - ${fileName}\n`);
243
+ }
244
+ }
245
+ else {
246
+ io.stdout.write(`wrote ${waves.length} wave file(s) under ${path.relative(projectRoot, wavePlansAbsDir)} and refreshed Wave Plans section in ${planResolved.relPath}.\n`);
247
+ }
248
+ return 0;
249
+ }
@@ -0,0 +1,90 @@
1
+ /**
2
+ * v6.10.0 — sidecar ledger that replaces the per-slice markdown tables in
3
+ * `06-tdd.md` (Watched-RED Proof, Vertical Slice Cycle, RED/GREEN Evidence).
4
+ *
5
+ * The file lives next to the TDD artifact (`<artifacts-dir>/06-tdd-slices.jsonl`)
6
+ * and is append-only — every CLI call writes a new row, and consumers fold
7
+ * rows by `sliceId` taking the latest entry (by file order). Markdown tables
8
+ * remain a legacy fallback when this sidecar is absent or empty.
9
+ */
10
+ export interface TddSliceLedgerEntry {
11
+ runId: string;
12
+ sliceId: string;
13
+ status: "red" | "green" | "refactor-deferred" | "refactor-done";
14
+ testFile: string;
15
+ testCommand: string;
16
+ redObservedAt?: string;
17
+ redOutputRef?: string;
18
+ greenAt?: string;
19
+ greenOutputRef?: string;
20
+ refactorAt?: string;
21
+ refactorRationale?: string;
22
+ claimedPaths: string[];
23
+ acceptanceCriterionId?: string;
24
+ planUnitId?: string;
25
+ schemaVersion: 1;
26
+ }
27
+ export declare const TDD_SLICE_LEDGER_FILENAME = "06-tdd-slices.jsonl";
28
+ export declare const TDD_SLICE_LEDGER_SCHEMA_VERSION: 1;
29
+ export declare const TDD_SLICE_STATUSES: readonly ["red", "green", "refactor-deferred", "refactor-done"];
30
+ export type TddSliceStatus = (typeof TDD_SLICE_STATUSES)[number];
31
+ /**
32
+ * Resolve `<artifacts-dir>/06-tdd-slices.jsonl`. Mirrors the convention used
33
+ * by the rest of the runtime (see `artifact-paths.ts::searchRoots`): the
34
+ * sidecar always lives under `.cclaw/artifacts/` regardless of the active
35
+ * topic slug for the TDD artifact.
36
+ */
37
+ export declare function tddSliceLedgerPath(projectRoot: string): string;
38
+ export declare function isTddSliceLedgerEntry(value: unknown): value is TddSliceLedgerEntry;
39
+ export declare function readTddSliceLedger(projectRoot: string): Promise<{
40
+ entries: TddSliceLedgerEntry[];
41
+ corruptLines: number[];
42
+ }>;
43
+ /**
44
+ * Latest-row-wins fold by `sliceId`. Returns one entry per slice, ordered by
45
+ * the index of its latest row. Mirrors the pattern used by
46
+ * `computeActiveSubagents` for the delegation ledger.
47
+ */
48
+ export declare function foldTddSliceLedger(entries: TddSliceLedgerEntry[]): TddSliceLedgerEntry[];
49
+ /**
50
+ * Atomic append under a directory lock — reuses the same `withDirectoryLock`
51
+ * primitive that `appendDelegation` uses so concurrent CLI invocations don't
52
+ * tear a half-written JSON line.
53
+ */
54
+ export declare function appendSliceEntry(projectRoot: string, entry: TddSliceLedgerEntry): Promise<void>;
55
+ export interface TddSliceRecordArgs {
56
+ sliceId: string;
57
+ status: TddSliceStatus;
58
+ testFile?: string;
59
+ testCommand?: string;
60
+ claimedPaths?: string[];
61
+ redOutputRef?: string;
62
+ greenOutputRef?: string;
63
+ redObservedAt?: string;
64
+ greenAt?: string;
65
+ refactorAt?: string;
66
+ refactorRationale?: string;
67
+ acceptanceCriterionId?: string;
68
+ planUnitId?: string;
69
+ json: boolean;
70
+ }
71
+ export declare function parseTddSliceRecordArgs(tokens: string[]): TddSliceRecordArgs;
72
+ interface TddSliceRecordIo {
73
+ stdout: {
74
+ write(chunk: string): boolean;
75
+ };
76
+ stderr: {
77
+ write(chunk: string): boolean;
78
+ };
79
+ }
80
+ /**
81
+ * Consume parsed CLI flags, fold against the existing sidecar to inherit
82
+ * fields recorded on earlier rows of the same slice, auto-stamp the
83
+ * status-relevant timestamp when not provided, and append the new row.
84
+ *
85
+ * The CLI surface is intentionally lenient: only the very first call for a
86
+ * slice (status=red) needs `--test-file`, `--command`, `--paths`. Subsequent
87
+ * green/refactor calls inherit those values from the latest prior row.
88
+ */
89
+ export declare function runTddSliceRecord(projectRoot: string, args: TddSliceRecordArgs, io: TddSliceRecordIo): Promise<number>;
90
+ export {};