cclaw-cli 7.1.0 → 7.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/artifact-linter/plan.js +187 -1
- package/dist/content/skills.js +2 -2
- package/dist/content/stage-schema.js +1 -0
- package/dist/content/stages/plan.js +3 -0
- package/dist/content/start-command.js +2 -2
- package/dist/internal/wave-status.d.ts +1 -1
- package/dist/internal/wave-status.js +99 -2
- package/package.json +1 -1
|
@@ -8,6 +8,14 @@ import { PLAN_SPLIT_SMALL_PLAN_THRESHOLD, parseImplementationUnits, parseImpleme
|
|
|
8
8
|
const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
|
|
9
9
|
const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
|
|
10
10
|
const TASK_ID_PATTERN = /\bT-\d{3}[a-z]?(?:\.\d{1,3})?\b/giu;
|
|
11
|
+
const PLAN_LANE_WHITELIST = new Set([
|
|
12
|
+
"production",
|
|
13
|
+
"test",
|
|
14
|
+
"docs",
|
|
15
|
+
"infra",
|
|
16
|
+
"scaffold",
|
|
17
|
+
"migration"
|
|
18
|
+
]);
|
|
11
19
|
/**
|
|
12
20
|
* Extract every distinct T-NNN[a-z]?(.NNN)? id from a markdown body.
|
|
13
21
|
*
|
|
@@ -36,6 +44,102 @@ function extractParallelExecManagedBody(planMarkdown) {
|
|
|
36
44
|
}
|
|
37
45
|
return planMarkdown.slice(startIdx + PARALLEL_EXEC_MANAGED_START.length, endIdx);
|
|
38
46
|
}
|
|
47
|
+
function normalizePathToken(raw) {
|
|
48
|
+
return raw.trim().replace(/^`|`$/gu, "").replace(/^\.\/+/u, "");
|
|
49
|
+
}
|
|
50
|
+
function parsePipeRow(trimmedLine) {
|
|
51
|
+
const inner = trimmedLine.replace(/^\|/u, "").replace(/\|\s*$/u, "");
|
|
52
|
+
return inner.split("|").map((cell) => cell.trim());
|
|
53
|
+
}
|
|
54
|
+
function headerIndexByName(cells) {
|
|
55
|
+
const map = new Map();
|
|
56
|
+
for (let i = 0; i < cells.length; i += 1) {
|
|
57
|
+
const key = cells[i].toLowerCase().replace(/[^a-z0-9]/gu, "");
|
|
58
|
+
if (key.length > 0 && !map.has(key)) {
|
|
59
|
+
map.set(key, i);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return map;
|
|
63
|
+
}
|
|
64
|
+
function parseParallelWaveTableMetadata(planMarkdown) {
|
|
65
|
+
const body = extractParallelExecManagedBody(planMarkdown);
|
|
66
|
+
if (body.trim().length === 0)
|
|
67
|
+
return [];
|
|
68
|
+
const lines = body.split(/\r?\n/u);
|
|
69
|
+
const out = [];
|
|
70
|
+
let current = null;
|
|
71
|
+
let headerIdx = null;
|
|
72
|
+
const flush = () => {
|
|
73
|
+
if (current)
|
|
74
|
+
out.push(current);
|
|
75
|
+
};
|
|
76
|
+
for (const rawLine of lines) {
|
|
77
|
+
const trimmed = rawLine.trim();
|
|
78
|
+
const waveMatch = /^###\s+Wave\s+(?:W-)?(\d+)\b/iu.exec(trimmed);
|
|
79
|
+
if (waveMatch) {
|
|
80
|
+
flush();
|
|
81
|
+
current = {
|
|
82
|
+
waveId: `W-${waveMatch[1].padStart(2, "0")}`,
|
|
83
|
+
rows: [],
|
|
84
|
+
notes: []
|
|
85
|
+
};
|
|
86
|
+
headerIdx = null;
|
|
87
|
+
continue;
|
|
88
|
+
}
|
|
89
|
+
if (!current)
|
|
90
|
+
continue;
|
|
91
|
+
current.notes.push(trimmed);
|
|
92
|
+
if (!trimmed.startsWith("|"))
|
|
93
|
+
continue;
|
|
94
|
+
const cells = parsePipeRow(trimmed);
|
|
95
|
+
if (cells.length === 0)
|
|
96
|
+
continue;
|
|
97
|
+
const first = cells[0].toLowerCase();
|
|
98
|
+
const isHeader = first === "sliceid" || first === "slice id";
|
|
99
|
+
if (isHeader) {
|
|
100
|
+
headerIdx = headerIndexByName(cells);
|
|
101
|
+
continue;
|
|
102
|
+
}
|
|
103
|
+
if (cells.every((cell) => /^:?-{3,}:?$/u.test(cell))) {
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
const sliceCell = cells[0];
|
|
107
|
+
if (!/^S-\d+$/iu.test(sliceCell))
|
|
108
|
+
continue;
|
|
109
|
+
const idx = headerIdx ?? new Map();
|
|
110
|
+
const unitIdx = idx.get("unit") ?? idx.get("taskid") ?? 1;
|
|
111
|
+
const pathsIdx = idx.get("claimedpaths");
|
|
112
|
+
const parallelizableIdx = idx.get("parallelizable");
|
|
113
|
+
const laneIdx = idx.get("lane");
|
|
114
|
+
const rawPaths = pathsIdx !== undefined ? (cells[pathsIdx] ?? "") : "";
|
|
115
|
+
const claimedPaths = rawPaths.length === 0
|
|
116
|
+
? []
|
|
117
|
+
: rawPaths
|
|
118
|
+
.split(",")
|
|
119
|
+
.map((p) => normalizePathToken(p))
|
|
120
|
+
.filter((p) => p.length > 0);
|
|
121
|
+
const rawParallel = parallelizableIdx !== undefined ? (cells[parallelizableIdx] ?? "").toLowerCase() : "";
|
|
122
|
+
let parallelizable = null;
|
|
123
|
+
if (rawParallel === "true" || rawParallel === "yes")
|
|
124
|
+
parallelizable = true;
|
|
125
|
+
if (rawParallel === "false" || rawParallel === "no")
|
|
126
|
+
parallelizable = false;
|
|
127
|
+
const laneRaw = laneIdx !== undefined ? (cells[laneIdx] ?? "").trim().toLowerCase() : "";
|
|
128
|
+
current.rows.push({
|
|
129
|
+
sliceId: sliceCell.toUpperCase(),
|
|
130
|
+
unit: (cells[unitIdx] ?? "").trim(),
|
|
131
|
+
claimedPaths,
|
|
132
|
+
parallelizable,
|
|
133
|
+
lane: laneRaw.length > 0 ? laneRaw : null
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
flush();
|
|
137
|
+
return out;
|
|
138
|
+
}
|
|
139
|
+
function waveHasSequentialModeHint(wave) {
|
|
140
|
+
const noteText = wave.notes.join("\n").toLowerCase();
|
|
141
|
+
return /mode\s*:\s*sequential/iu.test(noteText) || /\bsequential\b/iu.test(noteText) || /\bserial\b/iu.test(noteText);
|
|
142
|
+
}
|
|
39
143
|
export async function lintPlanStage(ctx) {
|
|
40
144
|
const { projectRoot, track, raw, absFile, sections, findings, parsedFrontmatter, brainstormShortCircuitBody, brainstormShortCircuitActivated, staleDiagramAuditEnabled, isTrivialOverride } = ctx;
|
|
41
145
|
evaluateInvestigationTrace(ctx, "Implementation Units");
|
|
@@ -308,7 +412,8 @@ export async function lintPlanStage(ctx) {
|
|
|
308
412
|
: "Parallel-ready units detected or plan is single-unit."
|
|
309
413
|
});
|
|
310
414
|
}
|
|
311
|
-
// plan_parallel_exec_full_coverage
|
|
415
|
+
// plan_parallel_exec_full_coverage + atomic wave metadata checks.
|
|
416
|
+
// Every T-NNN task listed in the
|
|
312
417
|
// plan's Task List must be assigned to a slice inside the
|
|
313
418
|
// <!-- parallel-exec-managed-start --> block. Without this, TDD
|
|
314
419
|
// cannot fan out work the plan never authored as waves; the previous
|
|
@@ -355,5 +460,86 @@ export async function lintPlanStage(ctx) {
|
|
|
355
460
|
? `Parallel Execution Plan covers all ${authoredTaskIds.size} authored task id(s); ${deferredIds.size} task id(s) are explicitly deferred.`
|
|
356
461
|
: `Uncovered task id(s) — author waves for: ${uncovered.slice(0, 25).join(", ")}${uncovered.length > 25 ? `, … (${uncovered.length - 25} more)` : ""}. Either add slices for them inside <!-- parallel-exec-managed-start --> or move them under \`## Deferred Tasks\` with a reason.`
|
|
357
462
|
});
|
|
463
|
+
const waveMeta = parseParallelWaveTableMetadata(raw);
|
|
464
|
+
const pathConflicts = [];
|
|
465
|
+
for (const wave of waveMeta) {
|
|
466
|
+
const rows = wave.rows;
|
|
467
|
+
for (let i = 0; i < rows.length; i += 1) {
|
|
468
|
+
for (let j = i + 1; j < rows.length; j += 1) {
|
|
469
|
+
const left = rows[i];
|
|
470
|
+
const right = rows[j];
|
|
471
|
+
const rightPathSet = new Set(right.claimedPaths);
|
|
472
|
+
const overlap = left.claimedPaths.filter((p) => rightPathSet.has(p));
|
|
473
|
+
if (overlap.length === 0)
|
|
474
|
+
continue;
|
|
475
|
+
pathConflicts.push(`${wave.waveId} ${left.sliceId}<->${right.sliceId} overlap: ${overlap.join(", ")}`);
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
findings.push({
|
|
480
|
+
section: "plan_wave_paths_disjoint",
|
|
481
|
+
required: taskListPresent,
|
|
482
|
+
rule: "Slices within the same wave must keep `claimedPaths` disjoint so TDD can safely fan out parallel slice-builders.",
|
|
483
|
+
found: taskListPresent && blockPresent && pathConflicts.length === 0,
|
|
484
|
+
details: !taskListPresent
|
|
485
|
+
? "Task List section is empty or missing T-NNN ids; disjoint-path wave check skipped."
|
|
486
|
+
: !blockPresent
|
|
487
|
+
? "`<!-- parallel-exec-managed-start -->` block is missing or empty; cannot validate wave path disjointness."
|
|
488
|
+
: pathConflicts.length === 0
|
|
489
|
+
? "All parsed same-wave slice rows have disjoint claimedPaths."
|
|
490
|
+
: `Overlapping claimedPaths detected: ${pathConflicts.slice(0, 12).join(" | ")}${pathConflicts.length > 12 ? ` | … (${pathConflicts.length - 12} more)` : ""}.`
|
|
491
|
+
});
|
|
492
|
+
const invalidLanes = [];
|
|
493
|
+
for (const wave of waveMeta) {
|
|
494
|
+
for (const row of wave.rows) {
|
|
495
|
+
if (!row.lane)
|
|
496
|
+
continue;
|
|
497
|
+
if (!PLAN_LANE_WHITELIST.has(row.lane)) {
|
|
498
|
+
invalidLanes.push(`${wave.waveId}/${row.sliceId}:${row.lane}`);
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
findings.push({
|
|
503
|
+
section: "plan_lane_meaningful",
|
|
504
|
+
required: false,
|
|
505
|
+
rule: "When a lane is declared, it must be one of: production, test, docs, infra, scaffold, migration.",
|
|
506
|
+
found: invalidLanes.length === 0,
|
|
507
|
+
details: invalidLanes.length === 0
|
|
508
|
+
? "All declared lane values are either omitted or in the approved lane whitelist."
|
|
509
|
+
: `Invalid lane value(s): ${invalidLanes.join(", ")}. Remove lane or use a whitelisted value.`
|
|
510
|
+
});
|
|
511
|
+
const inconsistentParallelizable = [];
|
|
512
|
+
for (const wave of waveMeta) {
|
|
513
|
+
const hasSerialSlice = wave.rows.some((row) => row.parallelizable === false);
|
|
514
|
+
if (!hasSerialSlice)
|
|
515
|
+
continue;
|
|
516
|
+
if (!waveHasSequentialModeHint(wave)) {
|
|
517
|
+
const serialSlices = wave.rows
|
|
518
|
+
.filter((row) => row.parallelizable === false)
|
|
519
|
+
.map((row) => row.sliceId)
|
|
520
|
+
.join(", ");
|
|
521
|
+
inconsistentParallelizable.push(`${wave.waveId} [${serialSlices}]`);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
findings.push({
|
|
525
|
+
section: "plan_parallelizable_consistency",
|
|
526
|
+
required: false,
|
|
527
|
+
rule: "Waves containing `parallelizable: false` slices should be explicitly marked sequential in wave notes/mode.",
|
|
528
|
+
found: inconsistentParallelizable.length === 0,
|
|
529
|
+
details: inconsistentParallelizable.length === 0
|
|
530
|
+
? "No serial slices were found outside a sequentially-labeled wave context."
|
|
531
|
+
: `Serial slice(s) found without sequential wave mode hints in: ${inconsistentParallelizable.join(", ")}. Add a wave mode/note indicating sequential execution.`
|
|
532
|
+
});
|
|
533
|
+
const mermaidBlocks = raw.match(/```mermaid[\s\S]*?```/giu) ?? [];
|
|
534
|
+
const hasParallelExecMermaid = mermaidBlocks.some((block) => /(flowchart|gantt)/iu.test(block) && /\bW-\d+\b/iu.test(block) && /\bS-\d+\b/iu.test(block));
|
|
535
|
+
findings.push({
|
|
536
|
+
section: "plan_parallel_exec_mermaid_present",
|
|
537
|
+
required: false,
|
|
538
|
+
rule: "Plan should include a mermaid flowchart/gantt for parallel waves and slice dependencies to make fanout shape visually reviewable.",
|
|
539
|
+
found: hasParallelExecMermaid,
|
|
540
|
+
details: hasParallelExecMermaid
|
|
541
|
+
? "Mermaid visualization for parallel execution waves is present."
|
|
542
|
+
: "No mermaid parallel-execution visualization found (advisory). Add a ` ```mermaid ` flowchart or gantt with W-* and S-* nodes."
|
|
543
|
+
});
|
|
358
544
|
}
|
|
359
545
|
}
|
package/dist/content/skills.js
CHANGED
|
@@ -188,14 +188,14 @@ export function tddTopOfSkillBlock(stage) {
|
|
|
188
188
|
**Step 1 — Wave status (always first):**
|
|
189
189
|
\`node .cclaw/cli.mjs internal wave-status --json\`
|
|
190
190
|
|
|
191
|
-
The output names: \`waves[]\` (closed/open), \`nextDispatch.waveId\`, \`nextDispatch.mode\` (\`wave-fanout
|
|
191
|
+
The output names: \`waves[]\` (closed/open), \`nextDispatch.waveId\`, \`nextDispatch.mode\` (\`wave-fanout\`, \`single-slice\`, or \`blocked\`), \`nextDispatch.readyToDispatch\` (slice ids), and \`nextDispatch.pathConflicts\` (overlapping \`claimedPaths\` between members).
|
|
192
192
|
|
|
193
193
|
**Step 2 — Decide automatically (no user question when paths disjoint):**
|
|
194
194
|
|
|
195
195
|
| \`mode\` | \`pathConflicts\` | Action |
|
|
196
196
|
|------------------|-------------------|-----------------------------------------------------------------------------------------------------------------------------------------|
|
|
197
197
|
| \`wave-fanout\` | \`[]\` | **Fan out the entire wave in one tool batch.** Emit one \`Task\` per ready slice in a single controller message. Do NOT ask the user. |
|
|
198
|
-
| \`
|
|
198
|
+
| \`blocked\` | non-empty | Issue exactly one AskQuestion (resolve overlap, split/serialize, or adjust claimedPaths), then re-run \`wave-status\`. |
|
|
199
199
|
| \`single-slice\` | — | One \`Task\` for the next ready slice. |
|
|
200
200
|
|
|
201
201
|
**Step 3 — Dispatch protocol per slice:** in the SAME controller message that issues the \`Task\` call:
|
|
@@ -52,6 +52,7 @@ export const PLAN = {
|
|
|
52
52
|
"Define validation points — mark where progress must be checked before continuing, with concrete command and expected evidence.",
|
|
53
53
|
"Define execution posture — record whether execution should be sequential, dependency-batched, parallel-safe, or blocked; include risk triggers and RED/GREEN/REFACTOR checkpoint/commit expectations when the repo workflow supports them. This fulfills the `plan_execution_posture_recorded` gate.",
|
|
54
54
|
"**Author the FULL Parallel Execution Plan.** Inside the `<!-- parallel-exec-managed-start -->` block, enumerate ALL waves W-02..W-N covering EVERY T-NNN task in `## Task List` — no `we'll author waves later`, `next batch only`, or open-ended Backlog handwave is acceptable. Each task gets a slice with `sliceId | taskId | dependsOn | claimedPaths | parallelizable | riskTier | lane`. Spike rows (`S-N`) and tasks marked `deferred` in an explicit `Deferred:` column may be omitted, but every other T-NNN must be claimed. This fulfills the `plan_parallel_exec_full_coverage` gate. The TDD stage downstream is a pure consumer of these waves — if the plan does not author them, TDD cannot fan out that work.",
|
|
55
|
+
"After authoring/refreshing the managed parallel-exec block, render a Mermaid `flowchart` or `gantt` covering waves (`W-*`) and slice dependencies (`S-*`) so parallelism and fan-in boundaries are visually auditable.",
|
|
55
56
|
"WAIT_FOR_CONFIRM — write plan artifact and explicitly pause. **STOP.** Do NOT proceed until user confirms. Then close the stage with `node .cclaw/hooks/stage-complete.mjs plan` and tell user to run `/cc`."
|
|
56
57
|
],
|
|
57
58
|
interactionProtocol: [
|
|
@@ -59,6 +60,7 @@ export const PLAN = {
|
|
|
59
60
|
"Split work into small vertical slices (target 2-5 minute tasks).",
|
|
60
61
|
"Publish explicit dependency batches with entry and exit checks for each batch.",
|
|
61
62
|
"Expose execution posture: sequential vs batch/parallel, stop conditions, and checkpoint cadence for the TDD handoff.",
|
|
63
|
+
"Keep same-wave `claimedPaths` disjoint; if overlap exists, split waves or serialize explicitly before handoff.",
|
|
62
64
|
"Attach exact verification command/manual step and expected evidence to every task.",
|
|
63
65
|
"Preserve locked scope boundaries: no silent scope reduction language in task rows.",
|
|
64
66
|
"Enforce WAIT_FOR_CONFIRM: present the plan summary with options (A) Approve / (B) Revise / (C) Reject.",
|
|
@@ -82,6 +84,7 @@ export const PLAN = {
|
|
|
82
84
|
{ id: "plan_acceptance_mapped", description: "Each task maps to a spec acceptance criterion." },
|
|
83
85
|
{ id: "plan_execution_posture_recorded", description: "Execution posture is recorded before implementation handoff." },
|
|
84
86
|
{ id: "plan_parallel_exec_full_coverage", description: "Every T-NNN task in `## Task List` (other than spikes/explicitly-deferred) is assigned to at least one slice inside the `<!-- parallel-exec-managed-start -->` block; TDD cannot fan out work that the plan never authored as waves." },
|
|
87
|
+
{ id: "plan_wave_paths_disjoint", description: "Within each authored wave, slice `claimedPaths` remain disjoint so `wave-fanout` can dispatch safely without overlap conflicts." },
|
|
85
88
|
{ id: "plan_wait_for_confirm", description: "Execution blocked until explicit user confirmation." }
|
|
86
89
|
],
|
|
87
90
|
requiredEvidence: [
|
|
@@ -119,7 +119,7 @@ If during any stage the agent discovers evidence that contradicts the initial Ph
|
|
|
119
119
|
|
|
120
120
|
**The controller never edits production code in TDD.** When \`mode: wave-fanout\` and \`pathConflicts: []\`, fan out the entire wave in a SINGLE controller message: one harness \`Task(subagent_type=…, description="slice-builder S-<id>", prompt=<full slice context>)\` call per ready slice, **side by side in the same tool batch**. Each \`slice-builder\` span owns the full RED → GREEN → REFACTOR → DOC cycle for its slice and emits its own \`delegation-record --phase=red|green|refactor|refactor-deferred|doc\` rows. RED-before-GREEN is enforced per-slice by the linter.
|
|
121
121
|
|
|
122
|
-
When \`mode:
|
|
122
|
+
When \`mode: blocked\` with \`pathConflicts\`, surface exactly one AskQuestion that lets the user resolve the overlap (drop / split / serialize). When \`mode: single-slice\`, dispatch one \`Task\` for the next ready slice.
|
|
123
123
|
|
|
124
124
|
6. **Auto-advance after stage-complete:** when \`stage-complete\` returns \`ok\` with a new \`currentStage\`, immediately load the next stage skill and continue without waiting for the user to retype \`/cc\`. Announce \`Stage <prev> complete → entering <next>. Continuing.\` and proceed.
|
|
125
125
|
|
|
@@ -214,7 +214,7 @@ Progress the tracked flow only when one exists:
|
|
|
214
214
|
2. If missing, guide the user to run \`npx cclaw-cli init\` and stop.
|
|
215
215
|
3. If it is only a fresh init placeholder (\`completedStages: []\`, no passed gates, and no \`${RUNTIME_ROOT}/artifacts/00-idea.md\`), stop and ask for \`/cc <prompt>\` to start a tracked run. Do not silently create a brainstorm run.
|
|
216
216
|
4. Check gates for \`currentStage\`.
|
|
217
|
-
5. **TDD:** When \`currentStage\` is \`tdd\`, run \`wave-status --json\`, then reconcile the managed **Parallel Execution Plan** in \`05-plan.md\` with \`wave-plans/wave-NN.md\`. **The controller never edits production code in TDD.** When \`mode: wave-fanout\` and \`pathConflicts: []\`, fan out the wave in a SINGLE controller message — one \`Task\` per ready slice, side by side. Each \`slice-builder\` span owns its full RED → GREEN → REFACTOR → DOC cycle. Mirror plan \`dependsOn\` ordering between waves.
|
|
217
|
+
5. **TDD:** When \`currentStage\` is \`tdd\`, run \`wave-status --json\`, then reconcile the managed **Parallel Execution Plan** in \`05-plan.md\` with \`wave-plans/wave-NN.md\`. **The controller never edits production code in TDD.** When \`mode: wave-fanout\` and \`pathConflicts: []\`, fan out the wave in a SINGLE controller message — one \`Task\` per ready slice, side by side. If \`mode: blocked\`, resolve overlaps first. Each \`slice-builder\` span owns its full RED → GREEN → REFACTOR → DOC cycle. Mirror plan \`dependsOn\` ordering between waves.
|
|
218
218
|
6. **Wave resume:** Parallelize unfinished members; never restart completed lanes. Integration-overseer follows \`integrationCheckRequired\`; when skipped, emit \`cclaw_integration_overseer_skipped\` per the hook contract.
|
|
219
219
|
7. If incomplete → load current stage skill and execute.
|
|
220
220
|
8. If complete → advance to next stage and execute. **Auto-advance:** when \`stage-complete\` returns \`ok\`, immediately load the next stage skill and continue without waiting for the user to retype \`/cc\`.
|
|
@@ -16,7 +16,7 @@ export interface WaveStatusNextDispatch {
|
|
|
16
16
|
waveId: string | null;
|
|
17
17
|
readyToDispatch: string[];
|
|
18
18
|
pathConflicts: string[];
|
|
19
|
-
mode: "single-slice" | "wave-fanout" | "none";
|
|
19
|
+
mode: "single-slice" | "wave-fanout" | "blocked" | "none";
|
|
20
20
|
}
|
|
21
21
|
export interface WaveStatusReport {
|
|
22
22
|
activeRunId: string;
|
|
@@ -4,6 +4,8 @@ import { RUNTIME_ROOT } from "../constants.js";
|
|
|
4
4
|
import { readDelegationEvents, readDelegationLedger } from "../delegation.js";
|
|
5
5
|
import { readFlowState } from "../runs.js";
|
|
6
6
|
import { mergeParallelWaveDefinitions, parseParallelExecutionPlanWaves, parseWavePlanDirectory } from "./plan-split-waves.js";
|
|
7
|
+
const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
|
|
8
|
+
const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
|
|
7
9
|
function parseArgs(tokens) {
|
|
8
10
|
const args = { format: "json" };
|
|
9
11
|
for (const token of tokens) {
|
|
@@ -33,6 +35,92 @@ function classifyWaveStatus(total, closedCount) {
|
|
|
33
35
|
return "closed";
|
|
34
36
|
return "partial";
|
|
35
37
|
}
|
|
38
|
+
function parsePipeRow(trimmedLine) {
|
|
39
|
+
const inner = trimmedLine.replace(/^\|/u, "").replace(/\|\s*$/u, "");
|
|
40
|
+
return inner.split("|").map((cell) => cell.trim());
|
|
41
|
+
}
|
|
42
|
+
function normalizePathToken(raw) {
|
|
43
|
+
return raw.trim().replace(/^`|`$/gu, "").replace(/^\.\/+/u, "");
|
|
44
|
+
}
|
|
45
|
+
function parseManagedWaveClaimedPaths(planMarkdown) {
|
|
46
|
+
const out = new Map();
|
|
47
|
+
const startIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_START);
|
|
48
|
+
const endIdx = planMarkdown.indexOf(PARALLEL_EXEC_MANAGED_END);
|
|
49
|
+
if (startIdx < 0 || endIdx <= startIdx)
|
|
50
|
+
return out;
|
|
51
|
+
const body = planMarkdown.slice(startIdx + PARALLEL_EXEC_MANAGED_START.length, endIdx);
|
|
52
|
+
const lines = body.split(/\r?\n/u);
|
|
53
|
+
let currentWaveId = null;
|
|
54
|
+
let headerIdx = new Map();
|
|
55
|
+
for (const rawLine of lines) {
|
|
56
|
+
const trimmed = rawLine.trim();
|
|
57
|
+
const waveMatch = /^###\s+Wave\s+(?:W-)?(\d+)\b/iu.exec(trimmed);
|
|
58
|
+
if (waveMatch) {
|
|
59
|
+
currentWaveId = `W-${waveMatch[1].padStart(2, "0")}`;
|
|
60
|
+
if (!out.has(currentWaveId)) {
|
|
61
|
+
out.set(currentWaveId, new Map());
|
|
62
|
+
}
|
|
63
|
+
headerIdx = new Map();
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
if (!currentWaveId || !trimmed.startsWith("|"))
|
|
67
|
+
continue;
|
|
68
|
+
const cells = parsePipeRow(trimmed);
|
|
69
|
+
if (cells.length === 0)
|
|
70
|
+
continue;
|
|
71
|
+
const first = cells[0].toLowerCase();
|
|
72
|
+
if (first === "sliceid" || first === "slice id") {
|
|
73
|
+
headerIdx = new Map();
|
|
74
|
+
for (let i = 0; i < cells.length; i += 1) {
|
|
75
|
+
const key = cells[i].toLowerCase().replace(/[^a-z0-9]/gu, "");
|
|
76
|
+
if (key.length > 0 && !headerIdx.has(key)) {
|
|
77
|
+
headerIdx.set(key, i);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
continue;
|
|
81
|
+
}
|
|
82
|
+
if (cells.every((cell) => /^:?-{3,}:?$/u.test(cell))) {
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
const sliceId = cells[0].trim().toUpperCase();
|
|
86
|
+
if (!/^S-\d+$/u.test(sliceId))
|
|
87
|
+
continue;
|
|
88
|
+
const pathsIdx = headerIdx.get("claimedpaths");
|
|
89
|
+
const rawPaths = pathsIdx !== undefined ? (cells[pathsIdx] ?? "") : "";
|
|
90
|
+
const claimedPaths = rawPaths.length === 0
|
|
91
|
+
? []
|
|
92
|
+
: rawPaths
|
|
93
|
+
.split(",")
|
|
94
|
+
.map((p) => normalizePathToken(p))
|
|
95
|
+
.filter((p) => p.length > 0);
|
|
96
|
+
out.get(currentWaveId).set(sliceId, claimedPaths);
|
|
97
|
+
}
|
|
98
|
+
return out;
|
|
99
|
+
}
|
|
100
|
+
function detectPathConflicts(readySlices, bySlice) {
|
|
101
|
+
const conflicts = new Set();
|
|
102
|
+
const ordered = [...readySlices].sort();
|
|
103
|
+
for (let i = 0; i < ordered.length; i += 1) {
|
|
104
|
+
const leftSlice = ordered[i];
|
|
105
|
+
const leftPaths = bySlice.get(leftSlice) ?? [];
|
|
106
|
+
if (leftPaths.length === 0)
|
|
107
|
+
continue;
|
|
108
|
+
const leftSet = new Set(leftPaths);
|
|
109
|
+
for (let j = i + 1; j < ordered.length; j += 1) {
|
|
110
|
+
const rightSlice = ordered[j];
|
|
111
|
+
const rightPaths = bySlice.get(rightSlice) ?? [];
|
|
112
|
+
if (rightPaths.length === 0)
|
|
113
|
+
continue;
|
|
114
|
+
for (const pathToken of rightPaths) {
|
|
115
|
+
if (!leftSet.has(pathToken))
|
|
116
|
+
continue;
|
|
117
|
+
conflicts.add(`${leftSlice}:${pathToken}`);
|
|
118
|
+
conflicts.add(`${rightSlice}:${pathToken}`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
return [...conflicts].sort();
|
|
123
|
+
}
|
|
36
124
|
const TERMINAL_PHASES = new Set([
|
|
37
125
|
"refactor",
|
|
38
126
|
"refactor-deferred",
|
|
@@ -202,11 +290,20 @@ export async function runWaveStatus(projectRoot, options = {}) {
|
|
|
202
290
|
}
|
|
203
291
|
else {
|
|
204
292
|
const readyToDispatch = [...firstOpenWave.readyMembers].sort();
|
|
293
|
+
const claimedPathsByWave = parseManagedWaveClaimedPaths(planRaw);
|
|
294
|
+
const conflicts = detectPathConflicts(readyToDispatch, claimedPathsByWave.get(firstOpenWave.waveId) ?? new Map());
|
|
295
|
+
const mode = conflicts.length > 0
|
|
296
|
+
? "blocked"
|
|
297
|
+
: readyToDispatch.length > 1
|
|
298
|
+
? "wave-fanout"
|
|
299
|
+
: readyToDispatch.length === 1
|
|
300
|
+
? "single-slice"
|
|
301
|
+
: "none";
|
|
205
302
|
nextDispatch = {
|
|
206
303
|
waveId: firstOpenWave.waveId,
|
|
207
304
|
readyToDispatch,
|
|
208
|
-
pathConflicts:
|
|
209
|
-
mode
|
|
305
|
+
pathConflicts: conflicts,
|
|
306
|
+
mode
|
|
210
307
|
};
|
|
211
308
|
}
|
|
212
309
|
return {
|