cclaw-cli 6.12.0 → 6.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/artifact-linter/plan.js +60 -2
  2. package/dist/artifact-linter/shared.d.ts +9 -0
  3. package/dist/artifact-linter/spec.js +14 -0
  4. package/dist/artifact-linter/tdd.d.ts +19 -6
  5. package/dist/artifact-linter/tdd.js +225 -47
  6. package/dist/artifact-linter.js +10 -1
  7. package/dist/content/hooks.js +88 -1
  8. package/dist/content/skills.js +17 -10
  9. package/dist/content/stages/plan.js +2 -1
  10. package/dist/content/stages/spec.js +2 -2
  11. package/dist/content/stages/tdd.js +7 -6
  12. package/dist/content/start-command.js +6 -3
  13. package/dist/content/templates.js +10 -4
  14. package/dist/delegation.d.ts +82 -3
  15. package/dist/delegation.js +244 -6
  16. package/dist/flow-state.d.ts +20 -0
  17. package/dist/flow-state.js +7 -0
  18. package/dist/gate-evidence.d.ts +5 -0
  19. package/dist/gate-evidence.js +58 -1
  20. package/dist/install.js +90 -2
  21. package/dist/integration-fanin.d.ts +44 -0
  22. package/dist/integration-fanin.js +180 -0
  23. package/dist/internal/advance-stage/advance.js +16 -1
  24. package/dist/internal/advance-stage/start-flow.js +3 -1
  25. package/dist/internal/advance-stage.js +13 -4
  26. package/dist/internal/plan-split-waves.d.ts +85 -1
  27. package/dist/internal/plan-split-waves.js +409 -6
  28. package/dist/internal/set-worktree-mode.d.ts +10 -0
  29. package/dist/internal/set-worktree-mode.js +28 -0
  30. package/dist/managed-resources.js +2 -0
  31. package/dist/run-persistence.js +9 -0
  32. package/dist/worktree-manager.d.ts +50 -0
  33. package/dist/worktree-manager.js +136 -0
  34. package/dist/worktree-types.d.ts +36 -0
  35. package/dist/worktree-types.js +6 -0
  36. package/package.json +1 -1
@@ -3,11 +3,415 @@ import path from "node:path";
3
3
  import { resolveArtifactPath } from "../artifact-paths.js";
4
4
  import { exists, writeFileSafe } from "../fs-utils.js";
5
5
  import { readFlowState } from "../runs.js";
6
- export const PLAN_SPLIT_DEFAULT_WAVE_SIZE = 25;
6
+ export const PLAN_SPLIT_DEFAULT_WAVE_SIZE = 5;
7
7
  export const PLAN_SPLIT_SMALL_PLAN_THRESHOLD = 50;
8
8
  const WAVE_PLANS_DIR = "wave-plans";
9
9
  const WAVE_MANAGED_START = "<!-- wave-split-managed-start -->";
10
10
  const WAVE_MANAGED_END = "<!-- wave-split-managed-end -->";
11
+ const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
12
+ const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
13
+ export class WavePlanDuplicateSliceError extends Error {
14
+ constructor(message) {
15
+ super(message);
16
+ this.name = "WavePlanDuplicateSliceError";
17
+ }
18
+ }
19
+ export class WavePlanMergeConflictError extends Error {
20
+ constructor(message) {
21
+ super(message);
22
+ this.name = "WavePlanMergeConflictError";
23
+ }
24
+ }
25
+ /**
26
+ * Raw body between parallel execution managed markers (no markers included).
27
+ */
28
+ export function extractParallelExecutionManagedBody(planMarkdown) {
29
+ const startIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_START);
30
+ const endIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_END);
31
+ if (startIdx < 0 || endIdx <= startIdx)
32
+ return null;
33
+ return planMarkdown.slice(startIdx + PARALLEL_EXEC_MANAGED_START.length, endIdx).trim();
34
+ }
35
+ function tokenToSliceAndUnit(token) {
36
+ const t = token.trim().replace(/^[`"'[\]()]+|[`"'[\]()]+$/gu, "");
37
+ const u = /^U-(\d+)$/u.exec(t);
38
+ if (u) {
39
+ const n = u[1];
40
+ return { unitId: `U-${n}`, sliceId: `S-${n}` };
41
+ }
42
+ const s = /^S-(\d+)$/u.exec(t);
43
+ if (s) {
44
+ const n = s[1];
45
+ return { unitId: `U-${n}`, sliceId: `S-${n}` };
46
+ }
47
+ return null;
48
+ }
49
+ /**
50
+ * Members list after `Members:` in Parallel Execution Plan / wave-NN headers.
51
+ * Supports markdown bold `**Members:**` (colon between Members and closing `**`)
52
+ * and plain `Members:`.
53
+ */
54
+ export function extractMembersListFromLine(trimmedLine) {
55
+ const bold = /^[-*]?\s*\*\*Members:\*\*\s*(.+)$/iu.exec(trimmedLine);
56
+ if (bold)
57
+ return bold[1].trim();
58
+ const plain = /^[-*]?\s*Members\s*:\s*(.+)$/iu.exec(trimmedLine);
59
+ if (plain)
60
+ return plain[1].trim();
61
+ return null;
62
+ }
63
+ /**
64
+ * Parse `## Parallel Execution Plan` managed block for wave headings and Members lines.
65
+ * Malformed member tokens are skipped. Duplicate slice ids in one plan source throw.
66
+ */
67
+ export function parseParallelExecutionPlanWaves(planMarkdown) {
68
+ const body = extractParallelExecutionManagedBody(planMarkdown);
69
+ if (!body)
70
+ return [];
71
+ const lines = body.split(/\r?\n/u);
72
+ const waves = [];
73
+ let current = null;
74
+ const seenSlices = new Set();
75
+ const flushCurrent = () => {
76
+ if (current && current.members.length > 0) {
77
+ waves.push(current);
78
+ }
79
+ };
80
+ for (const rawLine of lines) {
81
+ const trimmed = rawLine.trim();
82
+ const waveMatch = /^###\s+Wave\s+(\d+)\s*$/iu.exec(trimmed);
83
+ if (waveMatch) {
84
+ flushCurrent();
85
+ const n = waveMatch[1];
86
+ current = { waveId: `W-${n.padStart(2, "0")}`, members: [] };
87
+ continue;
88
+ }
89
+ const membersCsv = extractMembersListFromLine(trimmed);
90
+ if (membersCsv !== null && current) {
91
+ const parts = membersCsv
92
+ .split(/,/u)
93
+ .map((p) => p.trim())
94
+ .filter((p) => p.length > 0);
95
+ for (const part of parts) {
96
+ const ids = tokenToSliceAndUnit(part);
97
+ if (!ids)
98
+ continue;
99
+ if (seenSlices.has(ids.sliceId)) {
100
+ throw new WavePlanDuplicateSliceError(`duplicate slice ${ids.sliceId} in Parallel Execution Plan managed block`);
101
+ }
102
+ seenSlices.add(ids.sliceId);
103
+ current.members.push(ids);
104
+ }
105
+ }
106
+ }
107
+ flushCurrent();
108
+ return waves;
109
+ }
110
+ /**
111
+ * Parse a single wave-NN.md: prefer a `Members:` line in the header; otherwise
112
+ * collect distinct S-N tokens in the first lines (legacy).
113
+ */
114
+ export function parseWavePlanFileBody(body, waveId) {
115
+ const members = [];
116
+ const seen = new Set();
117
+ const headLines = body.split(/\r?\n/u).slice(0, 120);
118
+ let membersCsv = null;
119
+ for (const raw of headLines) {
120
+ membersCsv = extractMembersListFromLine(raw.trim());
121
+ if (membersCsv !== null)
122
+ break;
123
+ }
124
+ if (membersCsv !== null) {
125
+ for (const part of membersCsv.split(/,/u)) {
126
+ const ids = tokenToSliceAndUnit(part);
127
+ if (!ids)
128
+ continue;
129
+ if (seen.has(ids.sliceId)) {
130
+ throw new WavePlanDuplicateSliceError(`duplicate slice ${ids.sliceId} in ${waveId} wave file`);
131
+ }
132
+ seen.add(ids.sliceId);
133
+ members.push(ids);
134
+ }
135
+ }
136
+ if (members.length === 0) {
137
+ const regex = /\b(S-\d+)\b/gu;
138
+ let match;
139
+ while ((match = regex.exec(body)) !== null) {
140
+ const ids = tokenToSliceAndUnit(match[1]);
141
+ if (!ids)
142
+ continue;
143
+ if (seen.has(ids.sliceId))
144
+ continue;
145
+ seen.add(ids.sliceId);
146
+ members.push(ids);
147
+ }
148
+ }
149
+ return { waveId, members };
150
+ }
151
+ export async function parseWavePlanDirectory(artifactsDir) {
152
+ const wavePlansDir = path.join(artifactsDir, "wave-plans");
153
+ let entries = [];
154
+ try {
155
+ entries = await fs.readdir(wavePlansDir);
156
+ }
157
+ catch {
158
+ return [];
159
+ }
160
+ const out = [];
161
+ for (const name of [...entries].sort()) {
162
+ const match = /^wave-(\d+)\.md$/u.exec(name);
163
+ if (!match)
164
+ continue;
165
+ const waveId = `W-${match[1].padStart(2, "0")}`;
166
+ const body = await fs.readFile(path.join(wavePlansDir, name), "utf8");
167
+ const wave = parseWavePlanFileBody(body, waveId);
168
+ if (wave.members.length > 0) {
169
+ out.push(wave);
170
+ }
171
+ }
172
+ return out;
173
+ }
174
+ /**
175
+ * Merge wave definitions: managed Parallel Execution Plan first, then wave-NN.md.
176
+ * Same slice must map to the same wave id and unit id in both sources or a
177
+ * `WavePlanMergeConflictError` is thrown.
178
+ */
179
+ export function mergeParallelWaveDefinitions(primary, secondary) {
180
+ const byWave = new Map();
181
+ const sliceBinding = new Map();
182
+ const addWaves = (waves) => {
183
+ for (const wave of waves) {
184
+ let memMap = byWave.get(wave.waveId);
185
+ if (!memMap) {
186
+ memMap = new Map();
187
+ byWave.set(wave.waveId, memMap);
188
+ }
189
+ for (const member of wave.members) {
190
+ const prev = sliceBinding.get(member.sliceId);
191
+ if (prev) {
192
+ if (prev.waveId !== wave.waveId || prev.unitId !== member.unitId) {
193
+ throw new WavePlanMergeConflictError(`slice ${member.sliceId}: conflicting wave plan sources (wave ${prev.waveId} vs ${wave.waveId}, unit ${prev.unitId} vs ${member.unitId})`);
194
+ }
195
+ }
196
+ else {
197
+ sliceBinding.set(member.sliceId, { waveId: wave.waveId, unitId: member.unitId });
198
+ }
199
+ memMap.set(member.sliceId, member);
200
+ }
201
+ }
202
+ };
203
+ addWaves(primary);
204
+ addWaves(secondary);
205
+ return [...byWave.entries()]
206
+ .sort(([a], [b]) => a.localeCompare(b))
207
+ .map(([wid, memMap]) => ({
208
+ waveId: wid,
209
+ members: [...memMap.values()].sort((p, q) => p.sliceId.localeCompare(q.sliceId))
210
+ }));
211
+ }
212
+ /**
213
+ * One-line operator hint after sync when a multi-member wave exists.
214
+ */
215
+ export function formatNextParallelWaveSyncHint(merged) {
216
+ const candidate = merged.find((w) => w.members.length >= 2);
217
+ if (!candidate)
218
+ return null;
219
+ const ids = candidate.members.map((m) => m.sliceId).join(", ");
220
+ return `Parallel Execution Plan: ${candidate.waveId} has ${candidate.members.length} parallel members (${ids}).`;
221
+ }
222
+ /**
223
+ * Parse v6.13 parallel-metadata bullets from an implementation unit body.
224
+ * Missing keys use conservative defaults (`dependsOn: []`, `parallelizable: true`
225
+ * unless `legacyParallelDefaultSerial` is set).
226
+ */
227
+ export function parseImplementationUnitParallelFields(unit, options) {
228
+ const text = unit.body;
229
+ const pick = (label) => {
230
+ const esc = label.replace(/[.*+?^${}()|[\]\\]/gu, "\\$&");
231
+ const bold = new RegExp(`^[-*]\\s*\\*\\*${esc}:\\*\\*\\s*(.*)$`, "imu");
232
+ const legacy = new RegExp(`^[-*]\\s*\\*{0,2}${esc}\\*{0,2}\\s*:\\s*(.*)$`, "imu");
233
+ for (const rawLine of text.split(/\r?\n/u)) {
234
+ const line = rawLine.trim();
235
+ const mb = bold.exec(line);
236
+ if (mb)
237
+ return mb[1]?.trim();
238
+ const ml = legacy.exec(line);
239
+ if (ml)
240
+ return ml[1]?.trim();
241
+ }
242
+ return undefined;
243
+ };
244
+ const id = pick("id") ?? unit.id;
245
+ const depRaw = pick("dependsOn") ?? pick("depends on") ?? "";
246
+ const dependsOn = depRaw
247
+ .split(/,/u)
248
+ .map((s) => s.trim())
249
+ .filter((s) => s.length > 0 && !/^none$/iu.test(s));
250
+ const pathsRaw = pick("claimedPaths") ?? pick("claimed paths") ?? "";
251
+ const claimedPaths = pathsRaw.length > 0
252
+ ? pathsRaw
253
+ .split(",")
254
+ .map((s) => s.replace(/[`\s]/gu, "").trim())
255
+ .filter((s) => s.length > 0)
256
+ : [...unit.paths];
257
+ const explicitParallel = pick("parallelizable");
258
+ const parallelRaw = (explicitParallel ?? "true").toLowerCase();
259
+ let parallelizable = parallelRaw === "true" || parallelRaw === "yes" || parallelRaw === "y";
260
+ if (options?.legacyParallelDefaultSerial && explicitParallel === undefined) {
261
+ parallelizable = false;
262
+ }
263
+ const riskRaw = (pick("riskTier") ?? pick("risk tier") ?? "standard").toLowerCase();
264
+ const riskTier = riskRaw === "low" ? "low" : riskRaw === "high" ? "high" : "standard";
265
+ const laneRaw = pick("lane");
266
+ const lane = laneRaw && laneRaw.length > 0 ? laneRaw : undefined;
267
+ return { unitId: id, dependsOn, claimedPaths, parallelizable, riskTier, lane };
268
+ }
269
+ function unitBodyHasV613ParallelBullet(body, label) {
270
+ const esc = label.replace(/[.*+?^${}()|[\]\\]/gu, "\\$&");
271
+ const bold = new RegExp(`^[-*]\\s*\\*\\*${esc}:\\*\\*`, "imu");
272
+ const legacy = new RegExp(`^[-*]\\s*\\*{0,2}${esc}\\*{0,2}\\s*:`, "imu");
273
+ return body.split(/\r?\n/u).some((raw) => {
274
+ const line = raw.trim();
275
+ return bold.test(line) || legacy.test(line);
276
+ });
277
+ }
278
+ /**
279
+ * True when the plan has implementation units but any unit is missing v6.13.0
280
+ * `dependsOn` / `claimedPaths` / `parallelizable` / `riskTier` bullets.
281
+ */
282
+ export function planArtifactLacksV613ParallelMetadata(planMarkdown) {
283
+ const units = parseImplementationUnits(planMarkdown);
284
+ if (units.length === 0)
285
+ return false;
286
+ const labels = ["dependsOn", "claimedPaths", "parallelizable", "riskTier"];
287
+ return units.some((u) => !labels.every((lab) => unitBodyHasV613ParallelBullet(u.body, lab)));
288
+ }
289
+ export function compareCanonicalUnitIds(a, b) {
290
+ const ma = /^U-(\d+)$/u.exec(a);
291
+ const mb = /^U-(\d+)$/u.exec(b);
292
+ if (ma && mb)
293
+ return Number(ma[1]) - Number(mb[1]);
294
+ return a.localeCompare(b);
295
+ }
296
+ function topoSortPlanUnits(meta) {
297
+ const idSet = new Set(meta.map((m) => m.unitId));
298
+ const incoming = new Map();
299
+ for (const m of meta)
300
+ incoming.set(m.unitId, 0);
301
+ for (const m of meta) {
302
+ for (const d of m.dependsOn) {
303
+ if (!idSet.has(d))
304
+ continue;
305
+ incoming.set(m.unitId, (incoming.get(m.unitId) ?? 0) + 1);
306
+ }
307
+ }
308
+ const queue = meta
309
+ .filter((m) => (incoming.get(m.unitId) ?? 0) === 0)
310
+ .sort((a, b) => compareCanonicalUnitIds(a.unitId, b.unitId));
311
+ const out = [];
312
+ while (queue.length > 0) {
313
+ const m = queue.shift();
314
+ out.push(m);
315
+ for (const other of meta) {
316
+ if (!other.dependsOn.includes(m.unitId))
317
+ continue;
318
+ const v = (incoming.get(other.unitId) ?? 0) - 1;
319
+ incoming.set(other.unitId, v);
320
+ if (v === 0) {
321
+ queue.push(other);
322
+ queue.sort((a, b) => compareCanonicalUnitIds(a.unitId, b.unitId));
323
+ }
324
+ }
325
+ }
326
+ if (out.length !== meta.length) {
327
+ return [...meta].sort((a, b) => compareCanonicalUnitIds(a.unitId, b.unitId));
328
+ }
329
+ return out;
330
+ }
331
+ /**
332
+ * Group implementation units into waves: topological order, then greedy
333
+ * placement with disjoint `claimedPaths` and `cap` members per wave.
334
+ */
335
+ export function buildConflictAwareWavesFromUnits(units, cap) {
336
+ const metaList = units.map((u) => parseImplementationUnitParallelFields(u));
337
+ const ordered = topoSortPlanUnits(metaList);
338
+ const unitById = new Map(units.map((u) => [parseImplementationUnitParallelFields(u).unitId, u]));
339
+ const waves = [];
340
+ const allMetaIds = new Set(metaList.map((m) => m.unitId));
341
+ for (const m of ordered) {
342
+ const u = unitById.get(m.unitId);
343
+ if (!u)
344
+ continue;
345
+ let placed = false;
346
+ for (let wi = 0; wi < waves.length; wi++) {
347
+ const wave = waves[wi];
348
+ if (wave.length >= cap)
349
+ continue;
350
+ const priorIds = new Set(waves
351
+ .slice(0, wi)
352
+ .flat()
353
+ .map((wu) => parseImplementationUnitParallelFields(wu).unitId));
354
+ const depsOk = m.dependsOn.every((d) => priorIds.has(d) || !allMetaIds.has(d));
355
+ if (!depsOk)
356
+ continue;
357
+ const pathsInWave = new Set();
358
+ for (const wu of wave) {
359
+ for (const p of parseImplementationUnitParallelFields(wu).claimedPaths) {
360
+ pathsInWave.add(p);
361
+ }
362
+ }
363
+ const clash = m.claimedPaths.some((p) => pathsInWave.has(p));
364
+ if (clash)
365
+ continue;
366
+ wave.push(u);
367
+ placed = true;
368
+ break;
369
+ }
370
+ if (!placed) {
371
+ waves.push([u]);
372
+ }
373
+ }
374
+ return waves;
375
+ }
376
+ export function buildParallelExecutionPlanSection(waves, cap) {
377
+ const lines = [];
378
+ lines.push(PARALLEL_EXEC_MANAGED_START);
379
+ lines.push("## Parallel Execution Plan");
380
+ lines.push("");
381
+ lines.push(`- **Cap:** ${cap} parallel units per wave (conflict-aware via \`claimedPaths\`).`);
382
+ lines.push("");
383
+ for (let i = 0; i < waves.length; i += 1) {
384
+ const w = waves[i];
385
+ const ids = w.map((unit) => parseImplementationUnitParallelFields(unit).unitId);
386
+ const union = new Set();
387
+ for (const unit of w) {
388
+ for (const p of parseImplementationUnitParallelFields(unit).claimedPaths) {
389
+ union.add(p);
390
+ }
391
+ }
392
+ lines.push(`### Wave ${padWaveIndex(i + 1)}`);
393
+ lines.push(`- **Members:** ${ids.join(", ")}`);
394
+ lines.push(`- **Claimed paths union:** ${[...union].sort().join(", ") || "(none)"}`);
395
+ lines.push("");
396
+ }
397
+ lines.push(PARALLEL_EXEC_MANAGED_END);
398
+ return lines.join("\n");
399
+ }
400
+ /**
401
+ * Replace or append the managed Parallel Execution Plan block.
402
+ */
403
+ export function upsertParallelExecutionPlanSection(planMarkdown, managedBlock) {
404
+ const startIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_START);
405
+ const endIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_END);
406
+ if (startIdx >= 0 && endIdx > startIdx) {
407
+ const before = planMarkdown.slice(0, startIdx);
408
+ const after = planMarkdown.slice(endIdx + PARALLEL_EXEC_MANAGED_END.length);
409
+ const joined = `${before}${managedBlock}${after}`;
410
+ return joined.endsWith("\n") ? joined : `${joined}\n`;
411
+ }
412
+ const trimmed = planMarkdown.replace(/\s+$/u, "");
413
+ return `${trimmed}\n\n${managedBlock}\n`;
414
+ }
11
415
  /**
12
416
  * Parse `## Implementation Units` section into individual unit blocks.
13
417
  * Recognizes the canonical heading shape in the TDD-velocity plan template
@@ -193,10 +597,7 @@ export async function runPlanSplitWaves(projectRoot, args, io) {
193
597
  }
194
598
  return 0;
195
599
  }
196
- const waves = [];
197
- for (let i = 0; i < units.length; i += args.waveSize) {
198
- waves.push(units.slice(i, i + args.waveSize));
199
- }
600
+ const waves = buildConflictAwareWavesFromUnits(units, args.waveSize);
200
601
  const artifactsDir = path.dirname(planResolved.absPath);
201
602
  const wavePlansAbsDir = path.join(artifactsDir, WAVE_PLANS_DIR);
202
603
  const waveFileNames = waves.map((_, idx) => `${WAVE_PLANS_DIR}/wave-${padWaveIndex(idx + 1)}.md`);
@@ -217,7 +618,9 @@ export async function runPlanSplitWaves(projectRoot, args, io) {
217
618
  await writeFileSafe(path.join(artifactsDir, fileName), body);
218
619
  }
219
620
  const managed = buildWavePlansSection(waveFileNames);
220
- const updatedPlan = upsertWavePlansSection(raw, managed);
621
+ let updatedPlan = upsertWavePlansSection(raw, managed);
622
+ const parallelBlock = buildParallelExecutionPlanSection(waves, args.waveSize);
623
+ updatedPlan = upsertParallelExecutionPlanSection(updatedPlan, parallelBlock);
221
624
  if (updatedPlan !== raw) {
222
625
  await writeFileSafe(planResolved.absPath, updatedPlan);
223
626
  }
@@ -0,0 +1,10 @@
1
+ import type { Writable } from "node:stream";
2
+ export declare function parseSetWorktreeModeArgs(tokens: string[]): {
3
+ mode: "single-tree" | "worktree-first";
4
+ } | null;
5
+ /**
6
+ * Set `flow-state.json::worktreeExecutionMode` without advancing the stage DAG.
7
+ */
8
+ export declare function runSetWorktreeMode(projectRoot: string, tokens: string[], io: {
9
+ stderr: Writable;
10
+ }): Promise<number>;
@@ -0,0 +1,28 @@
1
+ import { readFlowState, writeFlowState } from "../runs.js";
2
+ export function parseSetWorktreeModeArgs(tokens) {
3
+ let mode = null;
4
+ for (const token of tokens) {
5
+ if (token.startsWith("--mode=")) {
6
+ const raw = token.slice("--mode=".length).trim();
7
+ if (raw === "single-tree" || raw === "worktree-first") {
8
+ mode = raw;
9
+ }
10
+ }
11
+ }
12
+ if (!mode)
13
+ return null;
14
+ return { mode };
15
+ }
16
+ /**
17
+ * Set `flow-state.json::worktreeExecutionMode` without advancing the stage DAG.
18
+ */
19
+ export async function runSetWorktreeMode(projectRoot, tokens, io) {
20
+ const parsed = parseSetWorktreeModeArgs(tokens);
21
+ if (!parsed) {
22
+ io.stderr.write("cclaw internal set-worktree-mode: usage: --mode=single-tree|worktree-first\n");
23
+ return 1;
24
+ }
25
+ const state = await readFlowState(projectRoot);
26
+ await writeFlowState(projectRoot, { ...state, worktreeExecutionMode: parsed.mode }, { writerSubsystem: "set-worktree-mode" });
27
+ return 0;
28
+ }
@@ -60,6 +60,8 @@ export function isManagedGeneratedPath(relPath) {
60
60
  return false;
61
61
  if (relPath.startsWith(`${RUNTIME_ROOT}/artifacts/`))
62
62
  return false;
63
+ if (relPath.startsWith(`${RUNTIME_ROOT}/worktrees/`))
64
+ return false;
63
65
  if (relPath.startsWith(`${RUNTIME_ROOT}/archive/`))
64
66
  return false;
65
67
  if (relPath === `${RUNTIME_ROOT}/state/flow-state.json`)
@@ -472,6 +472,8 @@ function coerceFlowState(parsed) {
472
472
  const repoSignals = coerceRepoSignals(parsed.repoSignals);
473
473
  const completedStageMeta = sanitizeCompletedStageMeta(parsed.completedStageMeta);
474
474
  const tddCutoverSliceId = coerceTddCutoverSliceId(parsed.tddCutoverSliceId);
475
+ const worktreeExecutionMode = coerceWorktreeExecutionMode(parsed.worktreeExecutionMode);
476
+ const legacyContinuation = typeof parsed.legacyContinuation === "boolean" ? parsed.legacyContinuation : undefined;
475
477
  const state = {
476
478
  schemaVersion: FLOW_STATE_SCHEMA_VERSION,
477
479
  activeRunId,
@@ -485,6 +487,8 @@ function coerceFlowState(parsed) {
485
487
  ...(repoSignals ? { repoSignals } : {}),
486
488
  ...(completedStageMeta ? { completedStageMeta } : {}),
487
489
  ...(tddCutoverSliceId ? { tddCutoverSliceId } : {}),
490
+ ...(worktreeExecutionMode !== undefined ? { worktreeExecutionMode } : {}),
491
+ ...(legacyContinuation !== undefined ? { legacyContinuation } : {}),
488
492
  skippedStages: sanitizeSkippedStages(parsed.skippedStages, track),
489
493
  staleStages: sanitizeStaleStages(parsed.staleStages),
490
494
  rewinds: sanitizeRewinds(parsed.rewinds),
@@ -505,6 +509,11 @@ function coerceTddCutoverSliceId(value) {
505
509
  const trimmed = value.trim();
506
510
  return /^S-\d+$/u.test(trimmed) ? trimmed : null;
507
511
  }
512
+ function coerceWorktreeExecutionMode(value) {
513
+ if (value === "single-tree" || value === "worktree-first")
514
+ return value;
515
+ return undefined;
516
+ }
508
517
  export class CorruptFlowStateError extends Error {
509
518
  statePath;
510
519
  quarantinedPath;
@@ -0,0 +1,50 @@
1
+ import type { GitBaseRef, WorktreeLaneId } from "./worktree-types.js";
2
+ export interface CreateLaneOptions {
3
+ /** Repository root that owns `.cclaw/`. */
4
+ projectRoot: string;
5
+ /** TDD slice id (e.g. `S-7`). */
6
+ sliceId: string;
7
+ /** Git ref to create the worktree from (e.g. `HEAD`, branch name). */
8
+ baseRef: GitBaseRef;
9
+ }
10
+ export interface CreateLaneResult {
11
+ laneId: WorktreeLaneId;
12
+ workdir: string;
13
+ branchName: string;
14
+ }
15
+ /**
16
+ * Create a dedicated git worktree for a slice under `.cclaw/worktrees/`.
17
+ * Uses branch namespace `cclaw/lane/<sliceId>-<suffix>`. Does not commit.
18
+ */
19
+ export declare function createLane(options: CreateLaneOptions): Promise<CreateLaneResult>;
20
+ /**
21
+ * Assert the lane worktree exists, has a clean working tree, and matches
22
+ * the expected baseline ref (merge-base check with `baseRef`).
23
+ */
24
+ export declare function verifyLaneClean(projectRoot: string, laneId: WorktreeLaneId, baseRef: GitBaseRef): Promise<{
25
+ ok: true;
26
+ } | {
27
+ ok: false;
28
+ reason: string;
29
+ }>;
30
+ /**
31
+ * Prepare the lane for interactive work (no-op placeholder for harness parity).
32
+ */
33
+ export declare function attachLane(_laneId: WorktreeLaneId): Promise<void>;
34
+ /**
35
+ * Release local harness attachment (no-op).
36
+ */
37
+ export declare function detachLane(_laneId: WorktreeLaneId): Promise<void>;
38
+ /**
39
+ * Remove the worktree directory and prune git metadata.
40
+ */
41
+ export declare function cleanupLane(projectRoot: string, laneId: WorktreeLaneId, options?: {
42
+ force?: boolean;
43
+ }): Promise<void>;
44
+ export interface PruneStaleLanesOptions {
45
+ olderThanHours: number;
46
+ }
47
+ /**
48
+ * Remove lane worktrees older than the threshold based on directory mtime.
49
+ */
50
+ export declare function pruneStaleLanes(projectRoot: string, options: PruneStaleLanesOptions): Promise<string[]>;