cclaw-cli 6.13.0 → 6.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/artifact-linter/shared.d.ts +15 -0
- package/dist/artifact-linter/tdd.d.ts +53 -10
- package/dist/artifact-linter/tdd.js +315 -92
- package/dist/artifact-linter.js +10 -2
- package/dist/content/hooks.js +119 -3
- package/dist/content/skills.js +15 -12
- package/dist/content/stages/tdd.js +8 -8
- package/dist/content/start-command.js +6 -3
- package/dist/delegation.d.ts +88 -0
- package/dist/delegation.js +171 -3
- package/dist/flow-state.d.ts +45 -0
- package/dist/flow-state.js +18 -0
- package/dist/install.js +115 -2
- package/dist/internal/plan-split-waves.d.ts +46 -0
- package/dist/internal/plan-split-waves.js +225 -6
- package/dist/run-persistence.js +14 -0
- package/package.json +1 -1
|
@@ -10,6 +10,215 @@ const WAVE_MANAGED_START = "<!-- wave-split-managed-start -->";
|
|
|
10
10
|
const WAVE_MANAGED_END = "<!-- wave-split-managed-end -->";
|
|
11
11
|
const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
|
|
12
12
|
const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
|
|
13
|
+
export class WavePlanDuplicateSliceError extends Error {
|
|
14
|
+
constructor(message) {
|
|
15
|
+
super(message);
|
|
16
|
+
this.name = "WavePlanDuplicateSliceError";
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
export class WavePlanMergeConflictError extends Error {
|
|
20
|
+
constructor(message) {
|
|
21
|
+
super(message);
|
|
22
|
+
this.name = "WavePlanMergeConflictError";
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Raw body between parallel execution managed markers (no markers included).
|
|
27
|
+
*/
|
|
28
|
+
export function extractParallelExecutionManagedBody(planMarkdown) {
|
|
29
|
+
const startIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_START);
|
|
30
|
+
const endIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_END);
|
|
31
|
+
if (startIdx < 0 || endIdx <= startIdx)
|
|
32
|
+
return null;
|
|
33
|
+
return planMarkdown.slice(startIdx + PARALLEL_EXEC_MANAGED_START.length, endIdx).trim();
|
|
34
|
+
}
|
|
35
|
+
function tokenToSliceAndUnit(token) {
|
|
36
|
+
const t = token.trim().replace(/^[`"'[\]()]+|[`"'[\]()]+$/gu, "");
|
|
37
|
+
const u = /^U-(\d+)$/u.exec(t);
|
|
38
|
+
if (u) {
|
|
39
|
+
const n = u[1];
|
|
40
|
+
return { unitId: `U-${n}`, sliceId: `S-${n}` };
|
|
41
|
+
}
|
|
42
|
+
const s = /^S-(\d+)$/u.exec(t);
|
|
43
|
+
if (s) {
|
|
44
|
+
const n = s[1];
|
|
45
|
+
return { unitId: `U-${n}`, sliceId: `S-${n}` };
|
|
46
|
+
}
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Members list after `Members:` in Parallel Execution Plan / wave-NN headers.
|
|
51
|
+
* Supports markdown bold `**Members:**` (colon between Members and closing `**`)
|
|
52
|
+
* and plain `Members:`.
|
|
53
|
+
*/
|
|
54
|
+
export function extractMembersListFromLine(trimmedLine) {
|
|
55
|
+
const bold = /^[-*]?\s*\*\*Members:\*\*\s*(.+)$/iu.exec(trimmedLine);
|
|
56
|
+
if (bold)
|
|
57
|
+
return bold[1].trim();
|
|
58
|
+
const plain = /^[-*]?\s*Members\s*:\s*(.+)$/iu.exec(trimmedLine);
|
|
59
|
+
if (plain)
|
|
60
|
+
return plain[1].trim();
|
|
61
|
+
return null;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Parse `## Parallel Execution Plan` managed block for wave headings and Members lines.
|
|
65
|
+
* Malformed member tokens are skipped. Duplicate slice ids in one plan source throw.
|
|
66
|
+
*/
|
|
67
|
+
export function parseParallelExecutionPlanWaves(planMarkdown) {
|
|
68
|
+
const body = extractParallelExecutionManagedBody(planMarkdown);
|
|
69
|
+
if (!body)
|
|
70
|
+
return [];
|
|
71
|
+
const lines = body.split(/\r?\n/u);
|
|
72
|
+
const waves = [];
|
|
73
|
+
let current = null;
|
|
74
|
+
const seenSlices = new Set();
|
|
75
|
+
const flushCurrent = () => {
|
|
76
|
+
if (current && current.members.length > 0) {
|
|
77
|
+
waves.push(current);
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
for (const rawLine of lines) {
|
|
81
|
+
const trimmed = rawLine.trim();
|
|
82
|
+
const waveMatch = /^###\s+Wave\s+(\d+)\s*$/iu.exec(trimmed);
|
|
83
|
+
if (waveMatch) {
|
|
84
|
+
flushCurrent();
|
|
85
|
+
const n = waveMatch[1];
|
|
86
|
+
current = { waveId: `W-${n.padStart(2, "0")}`, members: [] };
|
|
87
|
+
continue;
|
|
88
|
+
}
|
|
89
|
+
const membersCsv = extractMembersListFromLine(trimmed);
|
|
90
|
+
if (membersCsv !== null && current) {
|
|
91
|
+
const parts = membersCsv
|
|
92
|
+
.split(/,/u)
|
|
93
|
+
.map((p) => p.trim())
|
|
94
|
+
.filter((p) => p.length > 0);
|
|
95
|
+
for (const part of parts) {
|
|
96
|
+
const ids = tokenToSliceAndUnit(part);
|
|
97
|
+
if (!ids)
|
|
98
|
+
continue;
|
|
99
|
+
if (seenSlices.has(ids.sliceId)) {
|
|
100
|
+
throw new WavePlanDuplicateSliceError(`duplicate slice ${ids.sliceId} in Parallel Execution Plan managed block`);
|
|
101
|
+
}
|
|
102
|
+
seenSlices.add(ids.sliceId);
|
|
103
|
+
current.members.push(ids);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
flushCurrent();
|
|
108
|
+
return waves;
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Parse a single wave-NN.md: prefer a `Members:` line in the header; otherwise
|
|
112
|
+
* collect distinct S-N tokens in the first lines (legacy).
|
|
113
|
+
*/
|
|
114
|
+
export function parseWavePlanFileBody(body, waveId) {
|
|
115
|
+
const members = [];
|
|
116
|
+
const seen = new Set();
|
|
117
|
+
const headLines = body.split(/\r?\n/u).slice(0, 120);
|
|
118
|
+
let membersCsv = null;
|
|
119
|
+
for (const raw of headLines) {
|
|
120
|
+
membersCsv = extractMembersListFromLine(raw.trim());
|
|
121
|
+
if (membersCsv !== null)
|
|
122
|
+
break;
|
|
123
|
+
}
|
|
124
|
+
if (membersCsv !== null) {
|
|
125
|
+
for (const part of membersCsv.split(/,/u)) {
|
|
126
|
+
const ids = tokenToSliceAndUnit(part);
|
|
127
|
+
if (!ids)
|
|
128
|
+
continue;
|
|
129
|
+
if (seen.has(ids.sliceId)) {
|
|
130
|
+
throw new WavePlanDuplicateSliceError(`duplicate slice ${ids.sliceId} in ${waveId} wave file`);
|
|
131
|
+
}
|
|
132
|
+
seen.add(ids.sliceId);
|
|
133
|
+
members.push(ids);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
if (members.length === 0) {
|
|
137
|
+
const regex = /\b(S-\d+)\b/gu;
|
|
138
|
+
let match;
|
|
139
|
+
while ((match = regex.exec(body)) !== null) {
|
|
140
|
+
const ids = tokenToSliceAndUnit(match[1]);
|
|
141
|
+
if (!ids)
|
|
142
|
+
continue;
|
|
143
|
+
if (seen.has(ids.sliceId))
|
|
144
|
+
continue;
|
|
145
|
+
seen.add(ids.sliceId);
|
|
146
|
+
members.push(ids);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
return { waveId, members };
|
|
150
|
+
}
|
|
151
|
+
export async function parseWavePlanDirectory(artifactsDir) {
|
|
152
|
+
const wavePlansDir = path.join(artifactsDir, "wave-plans");
|
|
153
|
+
let entries = [];
|
|
154
|
+
try {
|
|
155
|
+
entries = await fs.readdir(wavePlansDir);
|
|
156
|
+
}
|
|
157
|
+
catch {
|
|
158
|
+
return [];
|
|
159
|
+
}
|
|
160
|
+
const out = [];
|
|
161
|
+
for (const name of [...entries].sort()) {
|
|
162
|
+
const match = /^wave-(\d+)\.md$/u.exec(name);
|
|
163
|
+
if (!match)
|
|
164
|
+
continue;
|
|
165
|
+
const waveId = `W-${match[1].padStart(2, "0")}`;
|
|
166
|
+
const body = await fs.readFile(path.join(wavePlansDir, name), "utf8");
|
|
167
|
+
const wave = parseWavePlanFileBody(body, waveId);
|
|
168
|
+
if (wave.members.length > 0) {
|
|
169
|
+
out.push(wave);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
return out;
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Merge wave definitions: managed Parallel Execution Plan first, then wave-NN.md.
|
|
176
|
+
* Same slice must map to the same wave id and unit id in both sources or a
|
|
177
|
+
* `WavePlanMergeConflictError` is thrown.
|
|
178
|
+
*/
|
|
179
|
+
export function mergeParallelWaveDefinitions(primary, secondary) {
|
|
180
|
+
const byWave = new Map();
|
|
181
|
+
const sliceBinding = new Map();
|
|
182
|
+
const addWaves = (waves) => {
|
|
183
|
+
for (const wave of waves) {
|
|
184
|
+
let memMap = byWave.get(wave.waveId);
|
|
185
|
+
if (!memMap) {
|
|
186
|
+
memMap = new Map();
|
|
187
|
+
byWave.set(wave.waveId, memMap);
|
|
188
|
+
}
|
|
189
|
+
for (const member of wave.members) {
|
|
190
|
+
const prev = sliceBinding.get(member.sliceId);
|
|
191
|
+
if (prev) {
|
|
192
|
+
if (prev.waveId !== wave.waveId || prev.unitId !== member.unitId) {
|
|
193
|
+
throw new WavePlanMergeConflictError(`slice ${member.sliceId}: conflicting wave plan sources (wave ${prev.waveId} vs ${wave.waveId}, unit ${prev.unitId} vs ${member.unitId})`);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
else {
|
|
197
|
+
sliceBinding.set(member.sliceId, { waveId: wave.waveId, unitId: member.unitId });
|
|
198
|
+
}
|
|
199
|
+
memMap.set(member.sliceId, member);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
};
|
|
203
|
+
addWaves(primary);
|
|
204
|
+
addWaves(secondary);
|
|
205
|
+
return [...byWave.entries()]
|
|
206
|
+
.sort(([a], [b]) => a.localeCompare(b))
|
|
207
|
+
.map(([wid, memMap]) => ({
|
|
208
|
+
waveId: wid,
|
|
209
|
+
members: [...memMap.values()].sort((p, q) => p.sliceId.localeCompare(q.sliceId))
|
|
210
|
+
}));
|
|
211
|
+
}
|
|
212
|
+
/**
|
|
213
|
+
* One-line operator hint after sync when a multi-member wave exists.
|
|
214
|
+
*/
|
|
215
|
+
export function formatNextParallelWaveSyncHint(merged) {
|
|
216
|
+
const candidate = merged.find((w) => w.members.length >= 2);
|
|
217
|
+
if (!candidate)
|
|
218
|
+
return null;
|
|
219
|
+
const ids = candidate.members.map((m) => m.sliceId).join(", ");
|
|
220
|
+
return `Parallel Execution Plan: ${candidate.waveId} has ${candidate.members.length} parallel members (${ids}).`;
|
|
221
|
+
}
|
|
13
222
|
/**
|
|
14
223
|
* Parse v6.13 parallel-metadata bullets from an implementation unit body.
|
|
15
224
|
* Missing keys use conservative defaults (`dependsOn: []`, `parallelizable: true`
|
|
@@ -18,12 +227,17 @@ const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
|
|
|
18
227
|
export function parseImplementationUnitParallelFields(unit, options) {
|
|
19
228
|
const text = unit.body;
|
|
20
229
|
const pick = (label) => {
|
|
21
|
-
const
|
|
230
|
+
const esc = label.replace(/[.*+?^${}()|[\]\\]/gu, "\\$&");
|
|
231
|
+
const bold = new RegExp(`^[-*]\\s*\\*\\*${esc}:\\*\\*\\s*(.*)$`, "imu");
|
|
232
|
+
const legacy = new RegExp(`^[-*]\\s*\\*{0,2}${esc}\\*{0,2}\\s*:\\s*(.*)$`, "imu");
|
|
22
233
|
for (const rawLine of text.split(/\r?\n/u)) {
|
|
23
234
|
const line = rawLine.trim();
|
|
24
|
-
const
|
|
25
|
-
if (
|
|
26
|
-
return
|
|
235
|
+
const mb = bold.exec(line);
|
|
236
|
+
if (mb)
|
|
237
|
+
return mb[1]?.trim();
|
|
238
|
+
const ml = legacy.exec(line);
|
|
239
|
+
if (ml)
|
|
240
|
+
return ml[1]?.trim();
|
|
27
241
|
}
|
|
28
242
|
return undefined;
|
|
29
243
|
};
|
|
@@ -53,8 +267,13 @@ export function parseImplementationUnitParallelFields(unit, options) {
|
|
|
53
267
|
return { unitId: id, dependsOn, claimedPaths, parallelizable, riskTier, lane };
|
|
54
268
|
}
|
|
55
269
|
function unitBodyHasV613ParallelBullet(body, label) {
|
|
56
|
-
const
|
|
57
|
-
|
|
270
|
+
const esc = label.replace(/[.*+?^${}()|[\]\\]/gu, "\\$&");
|
|
271
|
+
const bold = new RegExp(`^[-*]\\s*\\*\\*${esc}:\\*\\*`, "imu");
|
|
272
|
+
const legacy = new RegExp(`^[-*]\\s*\\*{0,2}${esc}\\*{0,2}\\s*:`, "imu");
|
|
273
|
+
return body.split(/\r?\n/u).some((raw) => {
|
|
274
|
+
const line = raw.trim();
|
|
275
|
+
return bold.test(line) || legacy.test(line);
|
|
276
|
+
});
|
|
58
277
|
}
|
|
59
278
|
/**
|
|
60
279
|
* True when the plan has implementation units but any unit is missing v6.13.0
|
package/dist/run-persistence.js
CHANGED
|
@@ -473,6 +473,8 @@ function coerceFlowState(parsed) {
|
|
|
473
473
|
const completedStageMeta = sanitizeCompletedStageMeta(parsed.completedStageMeta);
|
|
474
474
|
const tddCutoverSliceId = coerceTddCutoverSliceId(parsed.tddCutoverSliceId);
|
|
475
475
|
const worktreeExecutionMode = coerceWorktreeExecutionMode(parsed.worktreeExecutionMode);
|
|
476
|
+
const tddCheckpointMode = coerceTddCheckpointMode(parsed.tddCheckpointMode);
|
|
477
|
+
const integrationOverseerMode = coerceIntegrationOverseerMode(parsed.integrationOverseerMode);
|
|
476
478
|
const legacyContinuation = typeof parsed.legacyContinuation === "boolean" ? parsed.legacyContinuation : undefined;
|
|
477
479
|
const state = {
|
|
478
480
|
schemaVersion: FLOW_STATE_SCHEMA_VERSION,
|
|
@@ -488,6 +490,8 @@ function coerceFlowState(parsed) {
|
|
|
488
490
|
...(completedStageMeta ? { completedStageMeta } : {}),
|
|
489
491
|
...(tddCutoverSliceId ? { tddCutoverSliceId } : {}),
|
|
490
492
|
...(worktreeExecutionMode !== undefined ? { worktreeExecutionMode } : {}),
|
|
493
|
+
...(tddCheckpointMode !== undefined ? { tddCheckpointMode } : {}),
|
|
494
|
+
...(integrationOverseerMode !== undefined ? { integrationOverseerMode } : {}),
|
|
491
495
|
...(legacyContinuation !== undefined ? { legacyContinuation } : {}),
|
|
492
496
|
skippedStages: sanitizeSkippedStages(parsed.skippedStages, track),
|
|
493
497
|
staleStages: sanitizeStaleStages(parsed.staleStages),
|
|
@@ -514,6 +518,16 @@ function coerceWorktreeExecutionMode(value) {
|
|
|
514
518
|
return value;
|
|
515
519
|
return undefined;
|
|
516
520
|
}
|
|
521
|
+
function coerceTddCheckpointMode(value) {
|
|
522
|
+
if (value === "per-slice" || value === "global-red")
|
|
523
|
+
return value;
|
|
524
|
+
return undefined;
|
|
525
|
+
}
|
|
526
|
+
function coerceIntegrationOverseerMode(value) {
|
|
527
|
+
if (value === "conditional" || value === "always")
|
|
528
|
+
return value;
|
|
529
|
+
return undefined;
|
|
530
|
+
}
|
|
517
531
|
export class CorruptFlowStateError extends Error {
|
|
518
532
|
statePath;
|
|
519
533
|
quarantinedPath;
|