cclaw-cli 6.12.0 → 6.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/artifact-linter/plan.js +60 -2
  2. package/dist/artifact-linter/shared.d.ts +9 -0
  3. package/dist/artifact-linter/spec.js +14 -0
  4. package/dist/artifact-linter/tdd.d.ts +19 -6
  5. package/dist/artifact-linter/tdd.js +225 -47
  6. package/dist/artifact-linter.js +10 -1
  7. package/dist/content/hooks.js +88 -1
  8. package/dist/content/skills.js +17 -10
  9. package/dist/content/stages/plan.js +2 -1
  10. package/dist/content/stages/spec.js +2 -2
  11. package/dist/content/stages/tdd.js +7 -6
  12. package/dist/content/start-command.js +6 -3
  13. package/dist/content/templates.js +10 -4
  14. package/dist/delegation.d.ts +82 -3
  15. package/dist/delegation.js +244 -6
  16. package/dist/flow-state.d.ts +20 -0
  17. package/dist/flow-state.js +7 -0
  18. package/dist/gate-evidence.d.ts +5 -0
  19. package/dist/gate-evidence.js +58 -1
  20. package/dist/install.js +90 -2
  21. package/dist/integration-fanin.d.ts +44 -0
  22. package/dist/integration-fanin.js +180 -0
  23. package/dist/internal/advance-stage/advance.js +16 -1
  24. package/dist/internal/advance-stage/start-flow.js +3 -1
  25. package/dist/internal/advance-stage.js +13 -4
  26. package/dist/internal/plan-split-waves.d.ts +85 -1
  27. package/dist/internal/plan-split-waves.js +409 -6
  28. package/dist/internal/set-worktree-mode.d.ts +10 -0
  29. package/dist/internal/set-worktree-mode.js +28 -0
  30. package/dist/managed-resources.js +2 -0
  31. package/dist/run-persistence.js +9 -0
  32. package/dist/worktree-manager.d.ts +50 -0
  33. package/dist/worktree-manager.js +136 -0
  34. package/dist/worktree-types.d.ts +36 -0
  35. package/dist/worktree-types.js +6 -0
  36. package/package.json +1 -1
@@ -4,9 +4,9 @@ import { exists } from "../fs-utils.js";
4
4
  import { FORBIDDEN_PLACEHOLDER_TOKENS, CONFIDENCE_FINDING_REGEX_SOURCE } from "../content/skills.js";
5
5
  import fs from "node:fs/promises";
6
6
  import path from "node:path";
7
- import { PLAN_SPLIT_SMALL_PLAN_THRESHOLD, parseImplementationUnits } from "../internal/plan-split-waves.js";
7
+ import { PLAN_SPLIT_SMALL_PLAN_THRESHOLD, parseImplementationUnits, parseImplementationUnitParallelFields } from "../internal/plan-split-waves.js";
8
8
  export async function lintPlanStage(ctx) {
9
- const { projectRoot, track, raw, absFile, sections, findings, parsedFrontmatter, brainstormShortCircuitBody, brainstormShortCircuitActivated, staleDiagramAuditEnabled, isTrivialOverride } = ctx;
9
+ const { projectRoot, track, raw, absFile, sections, findings, parsedFrontmatter, brainstormShortCircuitBody, brainstormShortCircuitActivated, staleDiagramAuditEnabled, isTrivialOverride, legacyContinuation } = ctx;
10
10
  evaluateInvestigationTrace(ctx, "Implementation Units");
11
11
  const strictPlanGuards = parsedFrontmatter.hasFrontmatter ||
12
12
  headingPresent(sections, "Plan Quality Scan") ||
@@ -219,4 +219,62 @@ export async function lintPlanStage(ctx) {
219
219
  : `Unwaived FAIL/PARTIAL statuses: ${layeredDocumentReview.failOrPartialWithoutWaiver.join(", ")}.`
220
220
  });
221
221
  }
222
+ const planUnits = parseImplementationUnits(raw);
223
+ const parallelMetaApplies = strictPlanGuards && planUnits.length > 0;
224
+ if (parallelMetaApplies) {
225
+ const metaRulesRequired = !legacyContinuation;
226
+ const missingDepends = [];
227
+ const missingPaths = [];
228
+ const missingParallelMeta = [];
229
+ for (const unit of planUnits) {
230
+ const id = unit.id;
231
+ if (!/\bdependsOn\s*:/iu.test(unit.body)) {
232
+ missingDepends.push(id);
233
+ }
234
+ if (!/\bclaimedPaths\s*:/iu.test(unit.body)) {
235
+ missingPaths.push(id);
236
+ }
237
+ if (!/\bparallelizable\s*:/iu.test(unit.body) || !/\briskTier\s*:/iu.test(unit.body)) {
238
+ missingParallelMeta.push(id);
239
+ }
240
+ }
241
+ findings.push({
242
+ section: "plan_units_missing_dependsOn",
243
+ required: metaRulesRequired,
244
+ rule: "Every implementation unit must declare `dependsOn:` (v6.13.0) — use comma-separated unit ids or `none`.",
245
+ found: missingDepends.length === 0,
246
+ details: missingDepends.length === 0
247
+ ? "All implementation units declare dependsOn."
248
+ : `Missing dependsOn on: ${missingDepends.join(", ")}. Remediation: add a bullet \`- **dependsOn:** U-2, U-3\` or \`- **dependsOn:** none\`.`
249
+ });
250
+ findings.push({
251
+ section: "plan_units_missing_claimedPaths",
252
+ required: metaRulesRequired,
253
+ rule: "Every implementation unit must declare explicit `claimedPaths:` predictions for parallel scheduling (v6.13.0).",
254
+ found: missingPaths.length === 0,
255
+ details: missingPaths.length === 0
256
+ ? "All implementation units declare claimedPaths."
257
+ : `Missing claimedPaths on: ${missingPaths.join(", ")}. Remediation: add \`- **claimedPaths:** path/a, path/b\` (repo-relative globs or files).`
258
+ });
259
+ findings.push({
260
+ section: "plan_units_missing_parallel_metadata",
261
+ required: metaRulesRequired,
262
+ rule: "Every implementation unit must declare `parallelizable:` and `riskTier:` (low|standard|high) (v6.13.0).",
263
+ found: missingParallelMeta.length === 0,
264
+ details: missingParallelMeta.length === 0
265
+ ? "All implementation units carry parallelizable + riskTier."
266
+ : `Missing parallel metadata on: ${missingParallelMeta.join(", ")}. Remediation: add \`- **parallelizable:** true|false\` and \`- **riskTier:** low|standard|high\`.`
267
+ });
268
+ const parallelizableCount = planUnits.filter((u) => parseImplementationUnitParallelFields(u).parallelizable).length;
269
+ const advisorySerial = parallelizableCount === 0 && planUnits.length > 1;
270
+ findings.push({
271
+ section: "plan_no_parallel_lanes_detected",
272
+ required: false,
273
+ rule: "When multiple independent units exist, consider marking at least one `parallelizable: true` with disjoint claimedPaths.",
274
+ found: !advisorySerial,
275
+ details: advisorySerial
276
+ ? "All units are marked parallelizable false; scheduler will serialize. If surfaces are independent, opt units into parallelism explicitly."
277
+ : "Parallel-ready units detected or plan is single-unit."
278
+ });
279
+ }
222
280
  }
@@ -630,4 +630,13 @@ export interface StageLintContext {
630
630
  * expansion-strategist delegation) from required → advisory.
631
631
  */
632
632
  taskClass: "software-standard" | "software-trivial" | "software-bugfix" | null;
633
+ /**
634
+ * v6.13.0 — when true, plan parallel-metadata rules downgrade to advisory
635
+ * for legacy continuation projects (hox-style).
636
+ */
637
+ legacyContinuation: boolean;
638
+ /**
639
+ * v6.13.0 — effective worktree execution mode for TDD linters.
640
+ */
641
+ worktreeExecutionMode: "single-tree" | "worktree-first";
633
642
  }
@@ -127,4 +127,18 @@ export async function lintSpecStage(ctx) {
127
127
  : `Unwaived FAIL/PARTIAL statuses: ${layeredDocumentReview.failOrPartialWithoutWaiver.join(", ")}.`
128
128
  });
129
129
  }
130
+ const acceptanceCriteriaBody = sectionBodyByName(sections, "Acceptance Criteria");
131
+ if (acceptanceCriteriaBody !== null && /\|/u.test(acceptanceCriteriaBody)) {
132
+ const hasParallel = /\bparallelSafe\b/iu.test(acceptanceCriteriaBody);
133
+ const hasTouch = /\btouchSurface\b/iu.test(acceptanceCriteriaBody);
134
+ findings.push({
135
+ section: "spec_acs_not_sliceable",
136
+ required: false,
137
+ rule: "Acceptance criteria should declare `parallelSafe` and `touchSurface` per row (v6.13.0) so plan/TDD can schedule slices safely.",
138
+ found: hasParallel && hasTouch,
139
+ details: hasParallel && hasTouch
140
+ ? "Acceptance Criteria mentions parallelSafe and touchSurface."
141
+ : "Add columns or inline markers for parallelSafe (true|false) and touchSurface (short area description) for each AC."
142
+ });
143
+ }
130
144
  }
@@ -1,4 +1,4 @@
1
- import type { DelegationEntry } from "../delegation.js";
1
+ import type { DelegationEntry, DelegationEvent } from "../delegation.js";
2
2
  import { type LintFinding, type StageLintContext } from "./shared.js";
3
3
  /**
4
4
  * v6.11.0 — TDD stage linter.
@@ -52,15 +52,28 @@ interface RedCheckpointResult {
52
52
  ok: boolean;
53
53
  details: string;
54
54
  }
55
+ /**
56
+ * v6.13.1 — detect single-slice dispatch when the merged wave plan
57
+ * requires parallel ready slice-implementer fan-out.
58
+ */
59
+ export declare function evaluateWavePlanDispatchIgnored(params: {
60
+ artifactsDir: string;
61
+ planMarkdown: string;
62
+ runEvents: DelegationEvent[];
63
+ runId: string;
64
+ slices: Map<string, DelegationEntry[]>;
65
+ legacyContinuation: boolean;
66
+ }): Promise<LintFinding | null>;
55
67
  /**
56
68
  * v6.12.0 Phase W — RED checkpoint enforcement. The wave protocol
57
69
  * requires ALL Phase A REDs to land before ANY Phase B GREEN starts.
58
70
  * The rule is enforced on a per-wave basis, where a wave is defined by
59
- * `<artifacts-dir>/wave-plans/wave-NN.md` files (when present) listing
60
- * slice ids. When no wave manifest exists, the linter falls back to a
61
- * conservative implicit detection: a wave is a contiguous run of
62
- * `phase=red` events with no other-phase events between them; the rule
63
- * fires only when the implicit wave has 2+ members.
71
+ * the managed `## Parallel Execution Plan` block in `05-plan.md` and/or
72
+ * `<artifacts-dir>/wave-plans/wave-NN.md` files. When no wave manifest
73
+ * exists, the linter falls back to a conservative implicit detection: a
74
+ * wave is a contiguous run of `phase=red` events with no other-phase
75
+ * events between them; the rule fires only when the implicit wave has
76
+ * 2+ members.
64
77
  *
65
78
  * @param waveMembers Optional explicit wave manifest. Map key is wave
66
79
  * name (e.g. `"W-01"`); value is the set of slice ids in that wave.
@@ -1,6 +1,7 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
- import { readDelegationLedger } from "../delegation.js";
3
+ import { loadTddReadySlicePool, readDelegationLedger, readDelegationEvents, selectReadySlices } from "../delegation.js";
4
+ import { mergeParallelWaveDefinitions, parseParallelExecutionPlanWaves, parseWavePlanDirectory } from "../internal/plan-split-waves.js";
4
5
  import { evaluateInvestigationTrace, sectionBodyByName } from "./shared.js";
5
6
  const SLICE_SUMMARY_START = "<!-- auto-start: tdd-slice-summary -->";
6
7
  const SLICE_SUMMARY_END = "<!-- auto-end: tdd-slice-summary -->";
@@ -26,9 +27,17 @@ const SLICES_INDEX_END = "<!-- auto-end: slices-index -->";
26
27
  * via `## Slices Index`.
27
28
  */
28
29
  export async function lintTddStage(ctx) {
29
- const { projectRoot, discoveryMode, raw, absFile, sections, findings, parsedFrontmatter } = ctx;
30
- void projectRoot;
30
+ const { projectRoot, discoveryMode, raw, absFile, sections, findings, parsedFrontmatter, worktreeExecutionMode, legacyContinuation } = ctx;
31
31
  void parsedFrontmatter;
32
+ const artifactsDir = path.dirname(absFile);
33
+ const planPath = path.join(artifactsDir, "05-plan.md");
34
+ let planRaw = "";
35
+ try {
36
+ planRaw = await fs.readFile(planPath, "utf8");
37
+ }
38
+ catch {
39
+ planRaw = "";
40
+ }
32
41
  evaluateInvestigationTrace(ctx, "Watched-RED Proof");
33
42
  const delegationLedger = await readDelegationLedger(ctx.projectRoot);
34
43
  const activeRunEntries = delegationLedger.entries.filter((entry) => entry.stage === "tdd" && entry.runId === delegationLedger.runId);
@@ -177,7 +186,7 @@ export async function lintTddStage(ctx) {
177
186
  // (size >= 2). Sequential per-slice runs (red→green→refactor in a
178
187
  // tight loop) form size-1 implicit waves and are unaffected.
179
188
  if (eventsActive) {
180
- const waveManifest = await readWaveManifest(path.dirname(absFile));
189
+ const waveManifest = await readMergedWaveManifestForCheckpoint(artifactsDir, planRaw);
181
190
  const checkpointResult = evaluateRedCheckpoint(slicesByEvents, waveManifest);
182
191
  if (!checkpointResult.ok) {
183
192
  findings.push({
@@ -202,6 +211,118 @@ export async function lintTddStage(ctx) {
202
211
  if (cutoverFinding) {
203
212
  findings.push(cutoverFinding);
204
213
  }
214
+ const { events: jsonlEvents, fanInAudits } = await readDelegationEvents(projectRoot);
215
+ const runEvents = jsonlEvents.filter((e) => e.runId === delegationLedger.runId);
216
+ if (eventsActive && planRaw.length > 0) {
217
+ const ignoredWave = await evaluateWavePlanDispatchIgnored({
218
+ artifactsDir,
219
+ planMarkdown: planRaw,
220
+ runEvents,
221
+ runId: delegationLedger.runId,
222
+ slices: slicesByEvents,
223
+ legacyContinuation
224
+ });
225
+ if (ignoredWave) {
226
+ findings.push(ignoredWave);
227
+ }
228
+ }
229
+ if (eventsActive && worktreeExecutionMode === "worktree-first") {
230
+ const terminalPhases = new Set([
231
+ "refactor",
232
+ "refactor-deferred",
233
+ "resolve-conflict"
234
+ ]);
235
+ const missingGreenMeta = new Set();
236
+ for (const ev of runEvents) {
237
+ if (ev.stage !== "tdd" || ev.agent !== "slice-implementer")
238
+ continue;
239
+ if (ev.status !== "completed" || ev.phase !== "green")
240
+ continue;
241
+ if (typeof ev.sliceId !== "string")
242
+ continue;
243
+ const tok = ev.claimToken?.trim() ?? "";
244
+ const lane = ev.ownerLaneId?.trim() ?? "";
245
+ const lease = ev.leasedUntil?.trim() ?? "";
246
+ if (tok.length === 0 || lane.length === 0 || lease.length === 0) {
247
+ missingGreenMeta.add(ev.sliceId);
248
+ }
249
+ }
250
+ if (missingGreenMeta.size > 0) {
251
+ findings.push({
252
+ section: "tdd_slice_lane_metadata_missing",
253
+ required: true,
254
+ rule: "Worktree-first: every completed slice-implementer phase=green row must record claimToken, ownerLaneId (--lane-id), and leasedUntil (--lease-until).",
255
+ found: false,
256
+ details: `Slices missing one or more lane fields on GREEN: ${[...missingGreenMeta].sort().join(", ")}. Remediation: include --claim-token, --lane-id, and --lease-until on every slice-implementer --phase green delegation-record write (schedule through completion); the hook fails fast with dispatch_lane_metadata_missing when they are omitted.`
257
+ });
258
+ }
259
+ const missingClaim = new Set();
260
+ for (const ev of runEvents) {
261
+ if (ev.stage !== "tdd" || ev.agent !== "slice-implementer")
262
+ continue;
263
+ if (ev.status !== "completed" && ev.status !== "failed")
264
+ continue;
265
+ if (!ev.phase || !terminalPhases.has(ev.phase))
266
+ continue;
267
+ const tok = ev.claimToken?.trim() ?? "";
268
+ if (tok.length === 0 && typeof ev.sliceId === "string") {
269
+ missingClaim.add(ev.sliceId);
270
+ }
271
+ }
272
+ if (missingClaim.size > 0) {
273
+ findings.push({
274
+ section: "tdd_slice_claim_token_missing",
275
+ required: true,
276
+ rule: "Worktree-first: terminal slice-implementer rows (refactor / refactor-deferred / resolve-conflict) must echo --claim-token. Remediation: pass the same --claim-token used on the scheduled row for every completed/failed terminal phase.",
277
+ found: false,
278
+ details: `Slices missing claim token on non-GREEN terminal rows: ${[...missingClaim].join(", ")}.`
279
+ });
280
+ }
281
+ const conflictSlices = [
282
+ ...new Set([
283
+ ...runEvents
284
+ .filter((e) => e.integrationState === "conflict")
285
+ .map((e) => e.sliceId)
286
+ .filter((s) => typeof s === "string"),
287
+ ...fanInAudits
288
+ .filter((a) => a.runId === delegationLedger.runId &&
289
+ a.event === "cclaw_fanin_conflict" &&
290
+ Array.isArray(a.sliceIds))
291
+ .flatMap((a) => a.sliceIds ?? [])
292
+ ].filter((s) => typeof s === "string" && s.length > 0))
293
+ ];
294
+ if (conflictSlices.length > 0) {
295
+ findings.push({
296
+ section: "tdd_fanin_conflict_unresolved",
297
+ required: true,
298
+ rule: "Resolve fan-in conflicts before stage-complete: dispatch slice-implementer --phase resolve-conflict or abandon the slice explicitly.",
299
+ found: false,
300
+ details: `integrationState=conflict for slice(s): ${conflictSlices.join(", ")}. Remediation: finish deterministic fan-in or mark integrationState=resolved after manual merge evidence.`
301
+ });
302
+ }
303
+ const now = Date.now();
304
+ const leaseStale = new Set();
305
+ for (const ev of runEvents) {
306
+ if (typeof ev.leasedUntil !== "string")
307
+ continue;
308
+ const until = Date.parse(ev.leasedUntil);
309
+ if (!Number.isFinite(until) || until >= now)
310
+ continue;
311
+ if (ev.leaseState === "reclaimed" || ev.leaseState === "released")
312
+ continue;
313
+ if (typeof ev.sliceId === "string")
314
+ leaseStale.add(ev.sliceId);
315
+ }
316
+ if (leaseStale.size > 0) {
317
+ findings.push({
318
+ section: "tdd_lease_expired_unreclaimed",
319
+ required: true,
320
+ rule: "Leases past leasedUntil must be reclaimed or released. Remediation: run scheduler reclaim or emit leaseState=reclaimed audit rows after controller action.",
321
+ found: false,
322
+ details: `Expired leases not reclaimed for slice(s): ${[...leaseStale].join(", ")}.`
323
+ });
324
+ }
325
+ }
205
326
  const assertionBody = sectionBodyByName(sections, "Assertion Correctness Notes");
206
327
  if (assertionBody !== null) {
207
328
  const tableRows = assertionBody.split("\n").filter((line) => /^\|/u.test(line));
@@ -244,7 +365,6 @@ export async function lintTddStage(ctx) {
244
365
  const completedSliceImplementers = activeRunEntries.filter((entry) => entry.agent === "slice-implementer" && entry.status === "completed");
245
366
  const fanOutDetected = completedSliceImplementers.length > 1;
246
367
  if (fanOutDetected) {
247
- const artifactsDir = path.dirname(absFile);
248
368
  const cohesionContractMarkdownPath = path.join(artifactsDir, "cohesion-contract.md");
249
369
  const cohesionContractJsonPath = path.join(artifactsDir, "cohesion-contract.json");
250
370
  let cohesionContractFound = true;
@@ -324,7 +444,6 @@ export async function lintTddStage(ctx) {
324
444
  // Phase S — sharded slice files. Validate per-slice file presence
325
445
  // and required headings. `tdd-slices/` is optional; missing folder
326
446
  // simply means main-only mode (legacy fallback).
327
- const artifactsDir = path.dirname(absFile);
328
447
  const slicesDir = path.join(artifactsDir, "tdd-slices");
329
448
  const sliceFiles = await listSliceFiles(slicesDir);
330
449
  for (const sliceFile of sliceFiles) {
@@ -600,15 +719,110 @@ export function evaluateSliceImplementerCoverage(slices) {
600
719
  }
601
720
  return { missing };
602
721
  }
722
+ async function readMergedWaveManifestForCheckpoint(artifactsDir, planMarkdown) {
723
+ try {
724
+ const merged = mergeParallelWaveDefinitions(parseParallelExecutionPlanWaves(planMarkdown), await parseWavePlanDirectory(artifactsDir));
725
+ if (merged.length === 0)
726
+ return null;
727
+ const map = new Map();
728
+ for (const w of merged) {
729
+ map.set(w.waveId, new Set(w.members.map((m) => m.sliceId)));
730
+ }
731
+ return map.size > 0 ? map : null;
732
+ }
733
+ catch {
734
+ return null;
735
+ }
736
+ }
737
+ function sliceRefactorTerminal(sliceId, slices) {
738
+ const rows = slices.get(sliceId);
739
+ if (!rows)
740
+ return false;
741
+ return rows.some((e) => e.agent === "slice-implementer" &&
742
+ (e.phase === "refactor" || e.phase === "refactor-deferred") &&
743
+ (e.status === "completed" || e.status === "failed"));
744
+ }
745
+ /**
746
+ * v6.13.1 — detect single-slice dispatch when the merged wave plan
747
+ * requires parallel ready slice-implementer fan-out.
748
+ */
749
+ export async function evaluateWavePlanDispatchIgnored(params) {
750
+ let merged;
751
+ try {
752
+ merged = mergeParallelWaveDefinitions(parseParallelExecutionPlanWaves(params.planMarkdown), await parseWavePlanDirectory(params.artifactsDir));
753
+ }
754
+ catch {
755
+ return null;
756
+ }
757
+ if (merged.length === 0)
758
+ return null;
759
+ let pool;
760
+ try {
761
+ pool = await loadTddReadySlicePool(params.planMarkdown, params.artifactsDir, {
762
+ legacyParallelDefaultSerial: params.legacyContinuation
763
+ });
764
+ }
765
+ catch {
766
+ return null;
767
+ }
768
+ if (pool.length === 0)
769
+ return null;
770
+ const completedUnitIds = new Set();
771
+ for (const u of pool) {
772
+ if (sliceRefactorTerminal(u.sliceId, params.slices)) {
773
+ completedUnitIds.add(u.unitId);
774
+ }
775
+ }
776
+ const scoped = params.runEvents.filter((e) => e.runId === params.runId);
777
+ const tail = scoped.slice(-20);
778
+ const implInTail = new Set();
779
+ for (const e of tail) {
780
+ if (e.agent === "slice-implementer" && typeof e.sliceId === "string" && e.sliceId.length > 0) {
781
+ implInTail.add(e.sliceId);
782
+ }
783
+ }
784
+ if (implInTail.size !== 1)
785
+ return null;
786
+ for (const wave of merged) {
787
+ const waveSliceSet = new Set(wave.members.map((m) => m.sliceId));
788
+ const wavePool = pool.filter((u) => waveSliceSet.has(u.sliceId));
789
+ if (wavePool.length < 2)
790
+ continue;
791
+ const waveIncomplete = wave.members.some((m) => !sliceRefactorTerminal(m.sliceId, params.slices));
792
+ if (!waveIncomplete)
793
+ continue;
794
+ const ready = selectReadySlices(wavePool, {
795
+ cap: Math.max(32, wavePool.length),
796
+ completedUnitIds,
797
+ activePathHolders: [],
798
+ legacyContinuation: params.legacyContinuation
799
+ });
800
+ if (ready.length < 2)
801
+ continue;
802
+ const only = [...implInTail][0];
803
+ const missed = ready.map((r) => r.sliceId).filter((s) => s !== only);
804
+ if (missed.length === 0)
805
+ continue;
806
+ return {
807
+ section: "tdd_wave_plan_ignored",
808
+ required: true,
809
+ rule: "When the Parallel Execution Plan (or wave-plans/) defines an open wave with two or more ready parallelizable slices, the controller must fan out slice-implementer work for each ready slice instead of serializing to one slice only.",
810
+ found: false,
811
+ details: `Wave ${wave.waveId}: scheduler-ready members ${ready.map((r) => r.sliceId).join(", ")}; last 20 delegation events show slice-implementer only for ${only}. Missed parallel dispatch: ${missed.join(", ")}. Remediation: load \`05-plan.md\` (Parallel Execution Plan) and \`wave-plans/\` before routing, launch the wave (AskQuestion only when waveCount>=2 and single-slice is a real alternative), then dispatch GREEN+DOC for every ready slice with mandatory worktree-first flags on GREEN.`
812
+ };
813
+ }
814
+ return null;
815
+ }
603
816
  /**
604
817
  * v6.12.0 Phase W — RED checkpoint enforcement. The wave protocol
605
818
  * requires ALL Phase A REDs to land before ANY Phase B GREEN starts.
606
819
  * The rule is enforced on a per-wave basis, where a wave is defined by
607
- * `<artifacts-dir>/wave-plans/wave-NN.md` files (when present) listing
608
- * slice ids. When no wave manifest exists, the linter falls back to a
609
- * conservative implicit detection: a wave is a contiguous run of
610
- * `phase=red` events with no other-phase events between them; the rule
611
- * fires only when the implicit wave has 2+ members.
820
+ * the managed `## Parallel Execution Plan` block in `05-plan.md` and/or
821
+ * `<artifacts-dir>/wave-plans/wave-NN.md` files. When no wave manifest
822
+ * exists, the linter falls back to a conservative implicit detection: a
823
+ * wave is a contiguous run of `phase=red` events with no other-phase
824
+ * events between them; the rule fires only when the implicit wave has
825
+ * 2+ members.
612
826
  *
613
827
  * @param waveMembers Optional explicit wave manifest. Map key is wave
614
828
  * name (e.g. `"W-01"`); value is the set of slice ids in that wave.
@@ -689,42 +903,6 @@ export function evaluateRedCheckpoint(slices, waveMembers = null) {
689
903
  "Dispatch ALL Phase A test-author --phase red calls in one message, verify every phase=red event lands with non-empty evidenceRefs, and only then dispatch Phase B slice-implementer --phase green + slice-documenter --phase doc fan-out."
690
904
  };
691
905
  }
692
- /**
693
- * Read explicit wave manifest from `<artifacts-dir>/wave-plans/wave-NN.md`
694
- * files. Returns a map from wave name to the set of slice ids it
695
- * contains. Slice ids are extracted via `S-<digits>` regex matches in
696
- * each wave file. Returns null when no wave files exist or all are
697
- * empty/unparseable.
698
- */
699
- async function readWaveManifest(artifactsDir) {
700
- const wavePlansDir = path.join(artifactsDir, "wave-plans");
701
- let entries = [];
702
- try {
703
- entries = await fs.readdir(wavePlansDir);
704
- }
705
- catch {
706
- return null;
707
- }
708
- const waves = new Map();
709
- for (const name of entries) {
710
- const match = /^wave-(\d+)\.md$/u.exec(name);
711
- if (!match)
712
- continue;
713
- const wavePath = path.join(wavePlansDir, name);
714
- let body = "";
715
- try {
716
- body = await fs.readFile(wavePath, "utf8");
717
- }
718
- catch {
719
- continue;
720
- }
721
- const ids = extractSliceIdsFromBody(body);
722
- if (ids.length === 0)
723
- continue;
724
- waves.set(`W-${match[1]}`, new Set(ids));
725
- }
726
- return waves.size > 0 ? waves : null;
727
- }
728
906
  const LEGACY_PER_SLICE_SECTIONS = [
729
907
  "Test Discovery",
730
908
  "RED Evidence",
@@ -1,6 +1,7 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
3
  import { resolveArtifactPath as resolveStageArtifactPath } from "./artifact-paths.js";
4
+ import { effectiveWorktreeExecutionMode } from "./flow-state.js";
4
5
  import { exists } from "./fs-utils.js";
5
6
  import { stageSchema } from "./content/stage-schema.js";
6
7
  import { readFlowState } from "./run-persistence.js";
@@ -121,6 +122,8 @@ export async function lintArtifact(projectRoot, stage, track = "standard", optio
121
122
  let activeRunId = null;
122
123
  let completedStagesForAudit = [];
123
124
  let completedStageMetaForAudit;
125
+ let legacyContinuation = false;
126
+ let worktreeExecutionMode = "single-tree";
124
127
  try {
125
128
  const flowState = await readFlowState(projectRoot);
126
129
  const hint = flowState.interactionHints?.[stage];
@@ -131,6 +134,8 @@ export async function lintArtifact(projectRoot, stage, track = "standard", optio
131
134
  activeRunId = flowState.activeRunId ?? null;
132
135
  completedStagesForAudit = flowState.completedStages;
133
136
  completedStageMetaForAudit = flowState.completedStageMeta;
137
+ legacyContinuation = flowState.legacyContinuation === true;
138
+ worktreeExecutionMode = effectiveWorktreeExecutionMode(flowState);
134
139
  }
135
140
  catch {
136
141
  activeStageFlags = [];
@@ -139,6 +144,8 @@ export async function lintArtifact(projectRoot, stage, track = "standard", optio
139
144
  activeRunId = null;
140
145
  completedStagesForAudit = [];
141
146
  completedStageMetaForAudit = undefined;
147
+ legacyContinuation = false;
148
+ worktreeExecutionMode = "single-tree";
142
149
  }
143
150
  for (const extra of options.extraStageFlags ?? []) {
144
151
  if (typeof extra === "string" && extra.length > 0 && !activeStageFlags.includes(extra)) {
@@ -274,7 +281,9 @@ export async function lintArtifact(projectRoot, stage, track = "standard", optio
274
281
  isTrivialOverride,
275
282
  overrideSet,
276
283
  activeStageFlags,
277
- taskClass
284
+ taskClass,
285
+ legacyContinuation,
286
+ worktreeExecutionMode
278
287
  };
279
288
  switch (stage) {
280
289
  case "brainstorm":
@@ -294,6 +294,19 @@ async function readRunId(root) {
294
294
  }
295
295
  }
296
296
 
297
+ async function readWorktreeExecutionModeInline(root) {
298
+ try {
299
+ const raw = await fs.readFile(path.join(root, RUNTIME_ROOT, "state", "flow-state.json"), "utf8");
300
+ const parsed = JSON.parse(raw);
301
+ if (parsed && parsed.worktreeExecutionMode === "worktree-first") {
302
+ return "worktree-first";
303
+ }
304
+ return "single-tree";
305
+ } catch {
306
+ return "single-tree";
307
+ }
308
+ }
309
+
297
310
  async function readDelegationEvents(root) {
298
311
  try {
299
312
  const raw = await fs.readFile(path.join(root, RUNTIME_ROOT, "state", "delegation-events.jsonl"), "utf8");
@@ -350,6 +363,11 @@ function usage() {
350
363
  " --slice=<id> TDD slice identifier (e.g. S-1) used by the linter to auto-derive the Watched-RED + Vertical Slice Cycle tables.",
351
364
  " --phase=<phase> one of " + VALID_DELEGATION_PHASES.join(", ") + ". Pair with --slice to record a TDD slice phase event.",
352
365
  " --refactor-rationale=<t> required when --phase=refactor-deferred unless --evidence-ref carries the rationale text.",
366
+ " --claim-token=<opaque> v6.13 — required for worktree-first slice-implementer schedules with --slice (echo on all terminal rows for the span).",
367
+ " --lane-id=<id> v6.13 — worktree lane id (ownerLaneId metadata).",
368
+ " --lease-until=<iso> v6.13 — ISO8601 lease expiry for reclaim tooling.",
369
+ " --depends-on=<a,b> v6.13 — comma-separated plan unit ids for scheduler diagnostics.",
370
+ " --integration-state=<s> v6.13 — one of pending|applied|conflict|resolved|abandoned.",
353
371
  ""
354
372
  ].join("\\n") + "\\n");
355
373
  }
@@ -484,6 +502,42 @@ function buildRow(args, status, runId, now, options) {
484
502
  resolvedEvidenceRefs = [rationale, ...resolvedEvidenceRefs];
485
503
  }
486
504
  }
505
+ const integrationStateRaw =
506
+ typeof args["integration-state"] === "string" ? args["integration-state"].trim() : "";
507
+ const integrationStateAllowed = new Set([
508
+ "pending",
509
+ "applied",
510
+ "conflict",
511
+ "resolved",
512
+ "abandoned"
513
+ ]);
514
+ const integrationState =
515
+ integrationStateRaw.length > 0 && integrationStateAllowed.has(integrationStateRaw)
516
+ ? integrationStateRaw
517
+ : undefined;
518
+ const claimToken =
519
+ typeof args["claim-token"] === "string" && args["claim-token"].trim().length > 0
520
+ ? args["claim-token"].trim()
521
+ : undefined;
522
+ const ownerLaneId =
523
+ typeof args["lane-id"] === "string" && args["lane-id"].trim().length > 0
524
+ ? args["lane-id"].trim()
525
+ : undefined;
526
+ const leasedUntil =
527
+ typeof args["lease-until"] === "string" && args["lease-until"].trim().length > 0
528
+ ? args["lease-until"].trim()
529
+ : undefined;
530
+ const dependsOnRaw =
531
+ typeof args["depends-on"] === "string" ? args["depends-on"].trim() : "";
532
+ const dependsOn =
533
+ dependsOnRaw.length > 0
534
+ ? dependsOnRaw
535
+ .split(",")
536
+ .map((value) => value.trim())
537
+ .filter((value) => value.length > 0)
538
+ : undefined;
539
+ const leaseState =
540
+ leasedUntil && status === "scheduled" ? "claimed" : undefined;
487
541
  return {
488
542
  stage: args.stage,
489
543
  agent: args.agent,
@@ -508,7 +562,13 @@ function buildRow(args, status, runId, now, options) {
508
562
  allowParallel: args["allow-parallel"] === true ? true : undefined,
509
563
  claimedPaths: claimedPaths.length > 0 ? claimedPaths : undefined,
510
564
  sliceId,
511
- phase
565
+ phase,
566
+ claimToken,
567
+ ownerLaneId,
568
+ leasedUntil,
569
+ leaseState,
570
+ dependsOn,
571
+ integrationState
512
572
  };
513
573
  }
514
574
 
@@ -1218,6 +1278,33 @@ async function main() {
1218
1278
  }
1219
1279
  }
1220
1280
 
1281
+ if (
1282
+ clean.stage === "tdd" &&
1283
+ clean.agent === "slice-implementer" &&
1284
+ clean.phase === "green" &&
1285
+ (await readWorktreeExecutionModeInline(root)) === "worktree-first"
1286
+ ) {
1287
+ const tok = typeof clean.claimToken === "string" ? clean.claimToken.trim() : "";
1288
+ const lane = typeof clean.ownerLaneId === "string" ? clean.ownerLaneId.trim() : "";
1289
+ const lease = typeof clean.leasedUntil === "string" ? clean.leasedUntil.trim() : "";
1290
+ if (tok.length === 0 || lane.length === 0 || lease.length === 0) {
1291
+ const missing = [];
1292
+ if (tok.length === 0) missing.push("--claim-token");
1293
+ if (lane.length === 0) missing.push("--lane-id");
1294
+ if (lease.length === 0) missing.push("--lease-until");
1295
+ emitErrorJson(
1296
+ "dispatch_lane_metadata_missing",
1297
+ {
1298
+ missing,
1299
+ remediation:
1300
+ "worktree-first mode requires --claim-token, --lane-id, and --lease-until on every slice-implementer --phase green delegation-record write (from scheduled through completed)."
1301
+ },
1302
+ json
1303
+ );
1304
+ return;
1305
+ }
1306
+ }
1307
+
1221
1308
  await persistEntry(root, runId, clean, event);
1222
1309
  process.stdout.write(JSON.stringify({ ok: true, event }, null, 2) + "\\n");
1223
1310
  }