cclaw-cli 7.5.0 → 7.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +2 -1
  2. package/dist/artifact-linter/plan.js +238 -26
  3. package/dist/artifact-linter/tdd.js +4 -3
  4. package/dist/config.d.ts +18 -1
  5. package/dist/config.js +176 -5
  6. package/dist/content/core-agents.d.ts +1 -1
  7. package/dist/content/core-agents.js +17 -2
  8. package/dist/content/hooks.js +37 -0
  9. package/dist/content/meta-skill.js +4 -4
  10. package/dist/content/skills.js +12 -8
  11. package/dist/content/stage-schema.js +3 -2
  12. package/dist/content/stages/plan.js +18 -17
  13. package/dist/content/stages/tdd.js +13 -10
  14. package/dist/content/start-command.js +3 -3
  15. package/dist/content/subagent-context-skills.js +2 -2
  16. package/dist/content/subagents.js +6 -6
  17. package/dist/content/templates.js +12 -7
  18. package/dist/delegation.d.ts +43 -3
  19. package/dist/delegation.js +80 -9
  20. package/dist/execution-topology.d.ts +36 -0
  21. package/dist/execution-topology.js +73 -0
  22. package/dist/gate-evidence.js +10 -12
  23. package/dist/internal/advance-stage/start-flow.js +13 -4
  24. package/dist/internal/cohesion-contract-stub.js +2 -14
  25. package/dist/internal/plan-split-waves.d.ts +5 -2
  26. package/dist/internal/plan-split-waves.js +27 -16
  27. package/dist/internal/slice-commit.js +161 -7
  28. package/dist/internal/wave-status.d.ts +4 -0
  29. package/dist/internal/wave-status.js +50 -9
  30. package/dist/stack-detection.d.ts +94 -0
  31. package/dist/stack-detection.js +431 -0
  32. package/dist/tdd-cycle.js +7 -5
  33. package/dist/types.d.ts +67 -0
  34. package/dist/util/slice-id.d.ts +58 -0
  35. package/dist/util/slice-id.js +89 -0
  36. package/package.json +1 -1
package/README.md CHANGED
@@ -68,7 +68,8 @@ That gives you:
68
68
 
69
69
  - **One path** from idea to ship, with one user-chosen discovery mode (`lean`, `guided`, `deep`) and internal `quick` / `medium` / `standard` tracks.
70
70
  - **Real gates** for evidence, tests, review, delegation, stale-stage recovery, and closeout.
71
- - **Subagents with accountability**: controller owns state, workers do bounded tasks, overseers validate, evidence lands in `delegation-log.json`.
71
+ - **Adaptive TDD execution**: feature-atomic slices carry internal 2-5 minute RED/GREEN/REFACTOR steps; cclaw routes them inline, through one builder, through parallel builders, or through strict micro-slices when risk demands it.
72
+ - **Subagents with accountability**: controller owns state, workers do bounded implementation units, overseers validate, evidence lands in `delegation-log.json`.
72
73
  - **Recovery instead of confusion**: `npx cclaw-cli sync` tells you blockers and next fixes.
73
74
  - **Portable harness behavior** across Claude Code, Cursor, OpenCode, and Codex.
74
75
 
@@ -4,10 +4,17 @@ import { exists } from "../fs-utils.js";
4
4
  import { FORBIDDEN_PLACEHOLDER_TOKENS, CONFIDENCE_FINDING_REGEX_SOURCE } from "../content/skills.js";
5
5
  import fs from "node:fs/promises";
6
6
  import path from "node:path";
7
- import { PLAN_SPLIT_SMALL_PLAN_THRESHOLD, parseImplementationUnits, parseImplementationUnitParallelFields } from "../internal/plan-split-waves.js";
7
+ import { PLAN_SPLIT_SMALL_PLAN_THRESHOLD, parseImplementationUnits, parseImplementationUnitParallelFields, parseParallelExecutionPlanWaves } from "../internal/plan-split-waves.js";
8
+ import { compareSliceIds, parseSliceId } from "../util/slice-id.js";
9
+ import { execFile } from "node:child_process";
10
+ import { promisify } from "node:util";
11
+ import { loadStackAdapter } from "../stack-detection.js";
12
+ import { readConfig, resolveExecutionStrictness, resolveExecutionTopology, resolvePlanMicroTaskPolicy, resolvePlanSliceGranularity } from "../config.js";
13
+ const execFileAsync = promisify(execFile);
8
14
  const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
9
15
  const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
10
16
  const TASK_ID_PATTERN = /\bT-\d{3}[a-z]?(?:\.\d{1,3})?\b/giu;
17
+ const UNIT_ID_PATTERN = /\bU-\d+(?:[a-z][a-z0-9]*)?\b/giu;
11
18
  const ACCEPTANCE_ID_PATTERN = /\bAC-\d+\b/giu;
12
19
  const PLAN_LANE_WHITELIST = new Set([
13
20
  "production",
@@ -31,6 +38,13 @@ function extractTaskIds(body) {
31
38
  }
32
39
  return ids;
33
40
  }
41
+ function extractUnitIds(body) {
42
+ const ids = new Set();
43
+ for (const match of body.matchAll(UNIT_ID_PATTERN)) {
44
+ ids.add(match[0].toUpperCase());
45
+ }
46
+ return ids;
47
+ }
34
48
  function extractAcceptanceTaskLinks(body) {
35
49
  const links = [];
36
50
  for (const line of body.split(/\r?\n/u)) {
@@ -111,7 +125,11 @@ function parseParallelWaveTableMetadata(planMarkdown) {
111
125
  if (cells.length === 0)
112
126
  continue;
113
127
  const first = cells[0].toLowerCase();
114
- const isHeader = first === "sliceid" || first === "slice id";
128
+ const isHeader = first === "sliceid" ||
129
+ first === "slice id" ||
130
+ first === "unitid" ||
131
+ first === "unit id" ||
132
+ first === "unit";
115
133
  if (isHeader) {
116
134
  headerIdx = headerIndexByName(cells);
117
135
  continue;
@@ -119,14 +137,18 @@ function parseParallelWaveTableMetadata(planMarkdown) {
119
137
  if (cells.every((cell) => /^:?-{3,}:?$/u.test(cell))) {
120
138
  continue;
121
139
  }
122
- const sliceCell = cells[0];
123
- if (!/^S-\d+$/iu.test(sliceCell))
140
+ const sliceCell = cells[0].replace(/^`|`$/gu, "").trim();
141
+ const parsedSlice = parseSliceId(sliceCell);
142
+ const parsedUnit = /^U-(\d+(?:[a-z][a-z0-9]*)?)$/iu.exec(sliceCell);
143
+ if (!parsedSlice && !parsedUnit)
124
144
  continue;
145
+ const sliceId = parsedSlice?.id ?? `S-${parsedUnit[1].toLowerCase()}`;
125
146
  const idx = headerIdx ?? new Map();
126
147
  const unitIdx = idx.get("unit") ?? idx.get("taskid") ?? 1;
127
148
  const pathsIdx = idx.get("claimedpaths");
128
149
  const parallelizableIdx = idx.get("parallelizable");
129
150
  const laneIdx = idx.get("lane");
151
+ const dependsOnIdx = idx.get("dependson");
130
152
  const rawPaths = pathsIdx !== undefined ? (cells[pathsIdx] ?? "") : "";
131
153
  const claimedPaths = rawPaths.length === 0
132
154
  ? []
@@ -141,12 +163,22 @@ function parseParallelWaveTableMetadata(planMarkdown) {
141
163
  if (rawParallel === "false" || rawParallel === "no")
142
164
  parallelizable = false;
143
165
  const laneRaw = laneIdx !== undefined ? (cells[laneIdx] ?? "").trim().toLowerCase() : "";
166
+ const rawDeps = dependsOnIdx !== undefined ? (cells[dependsOnIdx] ?? "") : "";
167
+ const dependsOn = rawDeps.length === 0
168
+ ? []
169
+ : rawDeps
170
+ .replace(/^\[|\]$/gu, "")
171
+ .split(/[,\s]+/u)
172
+ .map((token) => token.trim().replace(/^`|`$/gu, ""))
173
+ .map((token) => parseSliceId(token)?.id ?? "")
174
+ .filter((id) => id.length > 0);
144
175
  current.rows.push({
145
- sliceId: sliceCell.toUpperCase(),
176
+ sliceId,
146
177
  unit: (cells[unitIdx] ?? "").trim(),
147
178
  claimedPaths,
148
179
  parallelizable,
149
- lane: laneRaw.length > 0 ? laneRaw : null
180
+ lane: laneRaw.length > 0 ? laneRaw : null,
181
+ dependsOn
150
182
  });
151
183
  }
152
184
  flush();
@@ -156,8 +188,72 @@ function waveHasSequentialModeHint(wave) {
156
188
  const noteText = wave.notes.join("\n").toLowerCase();
157
189
  return /mode\s*:\s*sequential/iu.test(noteText) || /\bsequential\b/iu.test(noteText) || /\bserial\b/iu.test(noteText);
158
190
  }
191
+ /**
192
+ * Capture the set of repo-relative paths tracked at HEAD. Returns an
193
+ * empty set when the project root is not a git repo or `git ls-files`
194
+ * fails — the wiring linter degrades to "no aggregator required" in
195
+ * that case rather than crashing the whole stage check.
196
+ */
197
+ async function readHeadFiles(projectRoot) {
198
+ try {
199
+ const { stdout } = await execFileAsync("git", ["ls-files", "-z"], { cwd: projectRoot, maxBuffer: 64 * 1024 * 1024 });
200
+ const out = new Set();
201
+ for (const segment of stdout.split("\u0000")) {
202
+ const trimmed = segment.trim();
203
+ if (trimmed.length === 0)
204
+ continue;
205
+ out.add(trimmed.replace(/\\/gu, "/"));
206
+ }
207
+ return out;
208
+ }
209
+ catch {
210
+ return new Set();
211
+ }
212
+ }
213
+ function buildSliceClaimGraph(waves) {
214
+ const bySliceId = new Map();
215
+ for (const wave of waves) {
216
+ for (const row of wave.rows) {
217
+ bySliceId.set(row.sliceId, row);
218
+ }
219
+ }
220
+ return { bySliceId };
221
+ }
222
+ /**
223
+ * Walk the dependsOn graph from `sliceId` and return the set of
224
+ * predecessor slice ids (transitive). Skips ids that aren't in the
225
+ * graph and handles cycles via a `visiting` set so a malformed plan
226
+ * doesn't lock the linter.
227
+ */
228
+ function transitivePredecessors(sliceId, graph) {
229
+ const out = new Set();
230
+ const stack = [sliceId];
231
+ const visiting = new Set();
232
+ while (stack.length > 0) {
233
+ const current = stack.pop();
234
+ if (visiting.has(current))
235
+ continue;
236
+ visiting.add(current);
237
+ const row = graph.bySliceId.get(current);
238
+ if (!row)
239
+ continue;
240
+ for (const predecessor of row.dependsOn) {
241
+ const normalized = parseSliceId(predecessor)?.id ?? predecessor;
242
+ if (out.has(normalized))
243
+ continue;
244
+ out.add(normalized);
245
+ stack.push(normalized);
246
+ }
247
+ }
248
+ return out;
249
+ }
159
250
  export async function lintPlanStage(ctx) {
160
251
  const { projectRoot, track, raw, absFile, sections, findings, parsedFrontmatter, brainstormShortCircuitBody, brainstormShortCircuitActivated, staleDiagramAuditEnabled, isTrivialOverride } = ctx;
252
+ const config = await readConfig(projectRoot).catch(() => null);
253
+ const executionStrictness = resolveExecutionStrictness(config);
254
+ const executionTopology = resolveExecutionTopology(config);
255
+ const planSliceGranularity = resolvePlanSliceGranularity(config);
256
+ const planMicroTaskPolicy = resolvePlanMicroTaskPolicy(config);
161
257
  evaluateInvestigationTrace(ctx, "Implementation Units");
162
258
  const strictPlanGuards = parsedFrontmatter.hasFrontmatter ||
163
259
  headingPresent(sections, "Plan Quality Scan") ||
@@ -424,6 +520,29 @@ export async function lintPlanStage(ctx) {
424
520
  });
425
521
  }
426
522
  const planUnits = parseImplementationUnits(raw);
523
+ const authoredTaskIdsForShape = extractTaskIds(sectionBodyByName(sections, "Task List") ?? "");
524
+ const microtaskOnlyPlan = authoredTaskIdsForShape.size > 1 &&
525
+ planUnits.length === 0 &&
526
+ executionTopology !== "strict-micro" &&
527
+ planSliceGranularity !== "strict-micro";
528
+ const strictMicroPolicy = executionStrictness === "strict" ||
529
+ executionTopology === "strict-micro" ||
530
+ planSliceGranularity === "strict-micro" ||
531
+ planMicroTaskPolicy === "strict";
532
+ const microtaskOnlyAdvisoryApplies = microtaskOnlyPlan &&
533
+ !strictMicroPolicy &&
534
+ (executionStrictness === "fast" || executionStrictness === "balanced");
535
+ findings.push({
536
+ section: "plan_microtask_only_advisory",
537
+ required: false,
538
+ rule: "Balanced/fast execution should plan feature-atomic implementation units/slices with internal 2-5 minute TDD steps; reserve one-task-one-slice microtask plans for `execution.topology: strict-micro`, `execution.strictness: strict`, or `plan.microTaskPolicy: strict`.",
539
+ found: !microtaskOnlyAdvisoryApplies,
540
+ details: microtaskOnlyAdvisoryApplies
541
+ ? `Task List has ${authoredTaskIdsForShape.size} tiny task id(s) but no Implementation Units. In execution.strictness=${executionStrictness} with plan.microTaskPolicy=${planMicroTaskPolicy}, group related tasks into U-* feature-atomic slices with internal RED/GREEN/REFACTOR steps, or set execution.topology=strict-micro / plan.microTaskPolicy=strict for high-risk micro-slice execution.`
542
+ : strictMicroPolicy
543
+ ? "Strict micro-slice posture is configured; microtask-only planning is allowed."
544
+ : "Plan includes implementation units or does not look microtask-only."
545
+ });
427
546
  const parallelMetaApplies = strictPlanGuards && planUnits.length > 0;
428
547
  if (parallelMetaApplies) {
429
548
  const metaRulesRequired = true;
@@ -497,6 +616,7 @@ export async function lintPlanStage(ctx) {
497
616
  if (strictPlanGuards) {
498
617
  const taskListSection = sectionBodyByName(sections, "Task List") ?? "";
499
618
  const authoredTaskIds = extractTaskIds(taskListSection);
619
+ const authoredUnitIds = new Set(planUnits.map((unit) => unit.id.toUpperCase()));
500
620
  // Collect deferred / backlog task ids so they don't trigger the
501
621
  // "uncovered" finding. Both heading variants are accepted.
502
622
  const deferredBody = (sectionBodyByName(sections, "Deferred Tasks") ?? "") +
@@ -505,29 +625,58 @@ export async function lintPlanStage(ctx) {
505
625
  const deferredIds = extractTaskIds(deferredBody);
506
626
  const parallelExecBody = extractParallelExecManagedBody(raw);
507
627
  const claimedIds = extractTaskIds(parallelExecBody);
628
+ const claimedUnitIds = extractUnitIds(parallelExecBody);
629
+ try {
630
+ for (const wave of parseParallelExecutionPlanWaves(raw)) {
631
+ for (const member of wave.members) {
632
+ if (/^U-\d+(?:[a-z][a-z0-9]*)?$/iu.test(member.unitId)) {
633
+ claimedUnitIds.add(member.unitId.toUpperCase());
634
+ }
635
+ }
636
+ }
637
+ }
638
+ catch {
639
+ // Duplicate/malformed wave plans are reported by the wave parser/status
640
+ // path; this coverage gate falls back to raw token extraction.
641
+ }
642
+ const useImplementationUnitCoverage = authoredUnitIds.size > 0;
508
643
  const uncovered = [];
509
- for (const id of authoredTaskIds) {
510
- if (claimedIds.has(id))
511
- continue;
512
- if (deferredIds.has(id))
513
- continue;
514
- uncovered.push(id);
644
+ if (useImplementationUnitCoverage) {
645
+ for (const id of authoredUnitIds) {
646
+ if (claimedUnitIds.has(id))
647
+ continue;
648
+ uncovered.push(id);
649
+ }
650
+ }
651
+ else {
652
+ for (const id of authoredTaskIds) {
653
+ if (claimedIds.has(id))
654
+ continue;
655
+ if (deferredIds.has(id))
656
+ continue;
657
+ uncovered.push(id);
658
+ }
515
659
  }
516
660
  uncovered.sort();
517
661
  const blockPresent = parallelExecBody.length > 0;
518
- const taskListPresent = authoredTaskIds.size > 0;
662
+ const coverageTargetPresent = useImplementationUnitCoverage || authoredTaskIds.size > 0;
663
+ const coverageTargetLabel = useImplementationUnitCoverage
664
+ ? "implementation unit"
665
+ : "task id";
519
666
  findings.push({
520
667
  section: "plan_parallel_exec_full_coverage",
521
- required: taskListPresent,
522
- rule: "Every T-NNN task in `## Task List` must be assigned to at least one slice inside the `<!-- parallel-exec-managed-start -->` block (or moved to an explicit `## Deferred Tasks` / `## Backlog` section). TDD cannot fan out waves the plan never authored.",
523
- found: taskListPresent && blockPresent && uncovered.length === 0,
524
- details: !taskListPresent
525
- ? "Task List section is empty or missing T-NNN ids; full-coverage check skipped."
668
+ required: coverageTargetPresent,
669
+ rule: "Every feature-atomic Implementation Unit (`U-*`) must be assigned to at least one slice/wave inside the `<!-- parallel-exec-managed-start -->` block. Legacy strict-micro plans without units may instead cover every non-deferred `T-NNN` task. TDD cannot fan out waves the plan never authored.",
670
+ found: coverageTargetPresent && blockPresent && uncovered.length === 0,
671
+ details: !coverageTargetPresent
672
+ ? "No Implementation Units or T-NNN task ids found; full-coverage check skipped."
526
673
  : !blockPresent
527
- ? "`<!-- parallel-exec-managed-start -->` block is missing or empty. Author the Parallel Execution Plan with W-02..W-N covering every task before plan-final-approval."
674
+ ? "`<!-- parallel-exec-managed-start -->` block is missing or empty. Author the Parallel Execution Plan with W-02..W-N covering every implementation unit/slice before plan-final-approval."
528
675
  : uncovered.length === 0
529
- ? `Parallel Execution Plan covers all ${authoredTaskIds.size} authored task id(s); ${deferredIds.size} task id(s) are explicitly deferred.`
530
- : `Uncovered task id(s) author waves for: ${uncovered.slice(0, 25).join(", ")}${uncovered.length > 25 ? `, … (${uncovered.length - 25} more)` : ""}. Either add slices for them inside <!-- parallel-exec-managed-start --> or move them under \`## Deferred Tasks\` with a reason.`
676
+ ? useImplementationUnitCoverage
677
+ ? `Parallel Execution Plan covers all ${authoredUnitIds.size} implementation unit(s); internal ${authoredTaskIds.size} T-NNN step(s) remain inside those units.`
678
+ : `Parallel Execution Plan covers all ${authoredTaskIds.size} authored task id(s); ${deferredIds.size} task id(s) are explicitly deferred.`
679
+ : `Uncovered ${coverageTargetLabel}(s) — author waves for: ${uncovered.slice(0, 25).join(", ")}${uncovered.length > 25 ? `, … (${uncovered.length - 25} more)` : ""}. ${useImplementationUnitCoverage ? "Add U-* rows/members inside <!-- parallel-exec-managed-start -->." : "Either add slices for them inside <!-- parallel-exec-managed-start --> or move them under `## Deferred Tasks` with a reason."}`
531
680
  });
532
681
  const waveMeta = parseParallelWaveTableMetadata(raw);
533
682
  const pathConflicts = [];
@@ -547,11 +696,11 @@ export async function lintPlanStage(ctx) {
547
696
  }
548
697
  findings.push({
549
698
  section: "plan_wave_paths_disjoint",
550
- required: taskListPresent,
699
+ required: coverageTargetPresent,
551
700
  rule: "Slices within the same wave must keep `claimedPaths` disjoint so TDD can safely fan out parallel slice-builders.",
552
- found: taskListPresent && blockPresent && pathConflicts.length === 0,
553
- details: !taskListPresent
554
- ? "Task List section is empty or missing T-NNN ids; disjoint-path wave check skipped."
701
+ found: coverageTargetPresent && blockPresent && pathConflicts.length === 0,
702
+ details: !coverageTargetPresent
703
+ ? "No Implementation Units or T-NNN task ids found; disjoint-path wave check skipped."
555
704
  : !blockPresent
556
705
  ? "`<!-- parallel-exec-managed-start -->` block is missing or empty; cannot validate wave path disjointness."
557
706
  : pathConflicts.length === 0
@@ -600,7 +749,7 @@ export async function lintPlanStage(ctx) {
600
749
  : `Serial slice(s) found without sequential wave mode hints in: ${inconsistentParallelizable.join(", ")}. Add a wave mode/note indicating sequential execution.`
601
750
  });
602
751
  const mermaidBlocks = raw.match(/```mermaid[\s\S]*?```/giu) ?? [];
603
- const hasParallelExecMermaid = mermaidBlocks.some((block) => /(flowchart|gantt)/iu.test(block) && /\bW-\d+\b/iu.test(block) && /\bS-\d+\b/iu.test(block));
752
+ const hasParallelExecMermaid = mermaidBlocks.some((block) => /(flowchart|gantt)/iu.test(block) && /\bW-\d+\b/iu.test(block) && /\bS-\d+(?:[a-z][a-z0-9]*)?\b/iu.test(block));
604
753
  findings.push({
605
754
  section: "plan_parallel_exec_mermaid_present",
606
755
  required: false,
@@ -610,5 +759,68 @@ export async function lintPlanStage(ctx) {
610
759
  ? "Mermaid visualization for parallel execution waves is present."
611
760
  : "No mermaid parallel-execution visualization found (advisory). Add a ` ```mermaid ` flowchart or gantt with W-* and S-* nodes."
612
761
  });
762
+ // 7.6.0 — plan_module_introducing_slice_wires_root.
763
+ // Stack-aware: stack-adapter exposes a `wiringAggregator` contract
764
+ // for stacks where introducing a new module file requires a
765
+ // sibling aggregator update (Rust lib.rs, Python __init__.py,
766
+ // optional Node-TS index.ts). For each NEW path in a slice's
767
+ // claim, if the adapter says an aggregator is required, the
768
+ // aggregator path must appear in the slice's own claim or in any
769
+ // transitive predecessor's claim within the same flow.
770
+ //
771
+ // For unknown stacks (Go, Java, Ruby, Swift, .NET, Elixir, …)
772
+ // the adapter returns `wiringAggregator: undefined`, so this
773
+ // gate is a no-op and `found: true`.
774
+ const stackAdapter = await loadStackAdapter(projectRoot);
775
+ const headFiles = await readHeadFiles(projectRoot);
776
+ const wiringIssues = [];
777
+ if (stackAdapter.wiringAggregator) {
778
+ const claimGraph = buildSliceClaimGraph(waveMeta);
779
+ for (const wave of waveMeta) {
780
+ for (const row of [...wave.rows].sort((a, b) => compareSliceIds(a.sliceId, b.sliceId))) {
781
+ const predecessors = transitivePredecessors(row.sliceId, claimGraph);
782
+ const predecessorClaims = new Set();
783
+ for (const predId of predecessors) {
784
+ const predRow = claimGraph.bySliceId.get(predId);
785
+ if (!predRow)
786
+ continue;
787
+ for (const claim of predRow.claimedPaths) {
788
+ predecessorClaims.add(normalizePathToken(claim));
789
+ }
790
+ }
791
+ const ownClaims = new Set(row.claimedPaths.map(normalizePathToken));
792
+ for (const rawClaim of row.claimedPaths) {
793
+ const claim = normalizePathToken(rawClaim);
794
+ if (claim.length === 0)
795
+ continue;
796
+ // Only NEW paths (not present at HEAD) require an
797
+ // aggregator update — existing modules are already wired.
798
+ if (headFiles.size > 0 && headFiles.has(claim))
799
+ continue;
800
+ const required = stackAdapter.wiringAggregator.resolveAggregatorFor(claim, { headFiles });
801
+ if (!required)
802
+ continue;
803
+ const aggregatorPath = normalizePathToken(required);
804
+ if (ownClaims.has(aggregatorPath))
805
+ continue;
806
+ if (predecessorClaims.has(aggregatorPath))
807
+ continue;
808
+ wiringIssues.push(`${wave.waveId}/${row.sliceId} introduces ${claim} but wiring aggregator ${aggregatorPath} is not in its claim or any predecessor's claim`);
809
+ }
810
+ }
811
+ }
812
+ }
813
+ const wiringApplies = stackAdapter.wiringAggregator !== undefined;
814
+ findings.push({
815
+ section: "plan_module_introducing_slice_wires_root",
816
+ required: coverageTargetPresent && wiringApplies,
817
+ rule: "When a slice introduces a new module file, the stack-adapter's wiring aggregator (e.g. Rust `lib.rs`, Python `__init__.py`, Node-TS barrel `index.*` when present) must be in the same slice's claim or in a transitive predecessor's claim, otherwise the new module is dead code and RED can't be expressed.",
818
+ found: !wiringApplies || wiringIssues.length === 0,
819
+ details: !wiringApplies
820
+ ? `Stack adapter (id=${stackAdapter.id}) does not declare a wiring aggregator; gate is a no-op for this stack.`
821
+ : wiringIssues.length === 0
822
+ ? `Stack adapter (id=${stackAdapter.id}) wiring aggregator coverage verified across all wave slices.`
823
+ : `Wiring aggregator coverage gaps: ${wiringIssues.slice(0, 12).join(" | ")}${wiringIssues.length > 12 ? ` | … (${wiringIssues.length - 12} more)` : ""}.`
824
+ });
613
825
  }
614
826
  }
@@ -6,6 +6,7 @@ import { loadTddReadySlicePool, readDelegationLedger, readDelegationEvents, sele
6
6
  import { resolveArtifactPath as resolveStageArtifactPath } from "../artifact-paths.js";
7
7
  import { exists } from "../fs-utils.js";
8
8
  import { mergeParallelWaveDefinitions, parseParallelExecutionPlanWaves, parseWavePlanDirectory } from "../internal/plan-split-waves.js";
9
+ import { compareSliceIds } from "../util/slice-id.js";
9
10
  import { extractAcceptanceCriterionIdsFromMarkdown, extractH2Sections, evaluateInvestigationTrace, sectionBodyByName } from "./shared.js";
10
11
  const SLICE_SUMMARY_START = "<!-- auto-start: tdd-slice-summary -->";
11
12
  const SLICE_SUMMARY_END = "<!-- auto-end: tdd-slice-summary -->";
@@ -509,7 +510,7 @@ async function listSliceFiles(slicesDir) {
509
510
  continue;
510
511
  files.push({ sliceId: match[1], absPath: path.join(slicesDir, name) });
511
512
  }
512
- files.sort((a, b) => (a.sliceId < b.sliceId ? -1 : a.sliceId > b.sliceId ? 1 : 0));
513
+ files.sort((a, b) => compareSliceIds(a.sliceId, b.sliceId));
513
514
  return files;
514
515
  }
515
516
  function escapeForRegex(value) {
@@ -995,9 +996,9 @@ export async function evaluateWavePlanDispatchIgnored(params) {
995
996
  return {
996
997
  section: "tdd_wave_plan_ignored",
997
998
  required: true,
998
- rule: "When the Parallel Execution Plan (or wave-plans/) defines an open wave with two or more ready parallelizable slices, the controller must fan out slice-builder Tasks for each ready slice instead of serializing to one slice only.",
999
+ rule: "When the Parallel Execution Plan (or wave-plans/) defines an open wave with two or more ready parallelizable units/slices, the controller must honor the parallel-builders topology instead of serializing to one slice only.",
999
1000
  found: false,
1000
- details: `Wave ${wave.waveId}: scheduler-ready members ${ready.map((r) => r.sliceId).join(", ")}; last 20 delegation events show slice workers only for ${only}. Missed parallel dispatch: ${missed.join(", ")}. Remediation: load \`05-plan.md\` (Parallel Execution Plan) and \`wave-plans/\` before routing, launch the wave (AskQuestion only when waveCount>=2 and single-slice is a real alternative), then dispatch workers for every ready slice.`
1001
+ details: `Wave ${wave.waveId}: scheduler-ready members ${ready.map((r) => r.sliceId).join(", ")}; last 20 delegation events show slice workers only for ${only}. Missed parallel dispatch: ${missed.join(", ")}. Remediation: load \`05-plan.md\` (Parallel Execution Plan) and \`wave-plans/\` before routing, honor \`nextDispatch.topology=parallel-builders\`, then dispatch the routed ready builders in one controller message.`
1001
1002
  };
1002
1003
  }
1003
1004
  return null;
package/dist/config.d.ts CHANGED
@@ -1,9 +1,20 @@
1
- import type { CclawConfig, FlowTrack, HarnessId, LanguageRulePack, TddCommitMode, TddIsolationMode } from "./types.js";
1
+ import type { CclawConfig, ExecutionStrictnessProfile, ExecutionTopology, FlowTrack, HarnessId, LanguageRulePack, LockfileTwinPolicy, PlanMicroTaskPolicy, PlanSliceGranularity, TddCommitMode, TddIsolationMode } from "./types.js";
2
2
  export declare const TDD_COMMIT_MODES: readonly ["managed-per-slice", "agent-required", "checkpoint-only", "off"];
3
3
  export declare const DEFAULT_TDD_COMMIT_MODE: TddCommitMode;
4
4
  export declare const TDD_ISOLATION_MODES: readonly ["worktree", "in-place", "auto"];
5
5
  export declare const DEFAULT_TDD_ISOLATION_MODE: TddIsolationMode;
6
6
  export declare const DEFAULT_TDD_WORKTREE_ROOT = ".cclaw/worktrees";
7
+ export declare const LOCKFILE_TWIN_POLICIES: readonly ["auto-include", "auto-revert", "strict-fence"];
8
+ export declare const DEFAULT_LOCKFILE_TWIN_POLICY: LockfileTwinPolicy;
9
+ export declare const EXECUTION_TOPOLOGIES: readonly ["auto", "inline", "single-builder", "parallel-builders", "strict-micro"];
10
+ export declare const DEFAULT_EXECUTION_TOPOLOGY: ExecutionTopology;
11
+ export declare const EXECUTION_STRICTNESS_PROFILES: readonly ["fast", "balanced", "strict"];
12
+ export declare const DEFAULT_EXECUTION_STRICTNESS: ExecutionStrictnessProfile;
13
+ export declare const DEFAULT_MAX_BUILDERS = 5;
14
+ export declare const PLAN_SLICE_GRANULARITIES: readonly ["feature-atomic", "strict-micro"];
15
+ export declare const DEFAULT_PLAN_SLICE_GRANULARITY: PlanSliceGranularity;
16
+ export declare const PLAN_MICRO_TASK_POLICIES: readonly ["advisory", "strict"];
17
+ export declare const DEFAULT_PLAN_MICRO_TASK_POLICY: PlanMicroTaskPolicy;
7
18
  export declare const DEFAULT_TDD_TEST_PATH_PATTERNS: readonly string[];
8
19
  export declare const DEFAULT_TDD_TEST_GLOBS: readonly string[];
9
20
  export declare const DEFAULT_TDD_PRODUCTION_PATH_PATTERNS: readonly string[];
@@ -24,6 +35,12 @@ export declare function createDefaultConfig(harnesses?: HarnessId[], _defaultTra
24
35
  export declare function resolveTddCommitMode(config: Pick<CclawConfig, "tdd"> | null | undefined): TddCommitMode;
25
36
  export declare function resolveTddIsolationMode(config: Pick<CclawConfig, "tdd"> | null | undefined): TddIsolationMode;
26
37
  export declare function resolveTddWorktreeRoot(config: Pick<CclawConfig, "tdd"> | null | undefined): string;
38
+ export declare function resolveLockfileTwinPolicy(config: Pick<CclawConfig, "tdd"> | null | undefined): LockfileTwinPolicy;
39
+ export declare function resolveExecutionTopology(config: Pick<CclawConfig, "execution"> | null | undefined): ExecutionTopology;
40
+ export declare function resolveExecutionStrictness(config: Pick<CclawConfig, "execution"> | null | undefined): ExecutionStrictnessProfile;
41
+ export declare function resolveMaxBuilders(config: Pick<CclawConfig, "execution"> | null | undefined): number;
42
+ export declare function resolvePlanSliceGranularity(config: Pick<CclawConfig, "plan"> | null | undefined): PlanSliceGranularity;
43
+ export declare function resolvePlanMicroTaskPolicy(config: Pick<CclawConfig, "plan"> | null | undefined): PlanMicroTaskPolicy;
27
44
  export declare function detectLanguageRulePacks(_projectRoot: string): Promise<LanguageRulePack[]>;
28
45
  export declare function readConfig(projectRoot: string, _options?: ReadConfigOptions): Promise<CclawConfig>;
29
46
  export interface WriteConfigOptions {