cclaw-cli 7.5.0 → 7.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/dist/artifact-linter/plan.js +238 -26
- package/dist/artifact-linter/tdd.js +4 -3
- package/dist/config.d.ts +18 -1
- package/dist/config.js +176 -5
- package/dist/content/core-agents.d.ts +1 -1
- package/dist/content/core-agents.js +17 -2
- package/dist/content/hooks.js +37 -0
- package/dist/content/meta-skill.js +4 -4
- package/dist/content/skills.js +12 -8
- package/dist/content/stage-schema.js +3 -2
- package/dist/content/stages/plan.js +18 -17
- package/dist/content/stages/tdd.js +13 -10
- package/dist/content/start-command.js +3 -3
- package/dist/content/subagent-context-skills.js +2 -2
- package/dist/content/subagents.js +6 -6
- package/dist/content/templates.js +12 -7
- package/dist/delegation.d.ts +43 -3
- package/dist/delegation.js +80 -9
- package/dist/execution-topology.d.ts +36 -0
- package/dist/execution-topology.js +73 -0
- package/dist/gate-evidence.js +10 -12
- package/dist/internal/advance-stage/start-flow.js +13 -4
- package/dist/internal/cohesion-contract-stub.js +2 -14
- package/dist/internal/plan-split-waves.d.ts +5 -2
- package/dist/internal/plan-split-waves.js +27 -16
- package/dist/internal/slice-commit.js +161 -7
- package/dist/internal/wave-status.d.ts +4 -0
- package/dist/internal/wave-status.js +50 -9
- package/dist/stack-detection.d.ts +94 -0
- package/dist/stack-detection.js +431 -0
- package/dist/tdd-cycle.js +7 -5
- package/dist/types.d.ts +67 -0
- package/dist/util/slice-id.d.ts +58 -0
- package/dist/util/slice-id.js +89 -0
- package/package.json +1 -1
|
@@ -3,6 +3,7 @@ import path from "node:path";
|
|
|
3
3
|
import { RUNTIME_ROOT } from "../constants.js";
|
|
4
4
|
import { writeFileSafe } from "../fs-utils.js";
|
|
5
5
|
import { readDelegationLedger, isParallelTddSliceWorker } from "../delegation.js";
|
|
6
|
+
import { compareSliceIds } from "../util/slice-id.js";
|
|
6
7
|
export function parseCohesionContractArgs(tokens) {
|
|
7
8
|
const args = { stub: false, force: false, reason: null };
|
|
8
9
|
for (const token of tokens) {
|
|
@@ -143,18 +144,5 @@ function collectSliceIds(entries) {
|
|
|
143
144
|
continue;
|
|
144
145
|
set.add(entry.sliceId);
|
|
145
146
|
}
|
|
146
|
-
return [...set].sort(
|
|
147
|
-
const an = parseSliceNum(a);
|
|
148
|
-
const bn = parseSliceNum(b);
|
|
149
|
-
if (an !== null && bn !== null)
|
|
150
|
-
return an - bn;
|
|
151
|
-
return a.localeCompare(b);
|
|
152
|
-
});
|
|
153
|
-
}
|
|
154
|
-
function parseSliceNum(sliceId) {
|
|
155
|
-
const m = /^S-(\d+)$/u.exec(sliceId);
|
|
156
|
-
if (!m)
|
|
157
|
-
return null;
|
|
158
|
-
const n = Number.parseInt(m[1], 10);
|
|
159
|
-
return Number.isFinite(n) ? n : null;
|
|
147
|
+
return [...set].sort(compareSliceIds);
|
|
160
148
|
}
|
|
@@ -64,8 +64,11 @@ export declare function extractMembersListFromLine(trimmedLine: string): string
|
|
|
64
64
|
*
|
|
65
65
|
* Rules:
|
|
66
66
|
* - The line must start with `|` (after trimming).
|
|
67
|
-
* - Column 1 (after stripping markdown noise)
|
|
68
|
-
*
|
|
67
|
+
* - Column 1 (after stripping markdown noise) may be either a slice id
|
|
68
|
+
* (`S-N`) or an implementation-unit id (`U-N`). Unit ids derive their
|
|
69
|
+
* execution slice as `S-N`, which lets 7.7+ plans schedule feature-atomic
|
|
70
|
+
* units without inventing a tiny `T-NNN` row per dispatch lane. Header rows
|
|
71
|
+
* (`| sliceId | …`, `| unit | …`) and separator rows (`|---|---|…`) are
|
|
69
72
|
* silently skipped.
|
|
70
73
|
* - Column 2, when present and non-empty, becomes the `unitId`
|
|
71
74
|
* verbatim (after stripping whitespace + backticks/quotes/brackets).
|
|
@@ -3,6 +3,7 @@ import path from "node:path";
|
|
|
3
3
|
import { resolveArtifactPath } from "../artifact-paths.js";
|
|
4
4
|
import { exists, writeFileSafe } from "../fs-utils.js";
|
|
5
5
|
import { readFlowState } from "../runs.js";
|
|
6
|
+
import { compareSliceIds, parseSliceId } from "../util/slice-id.js";
|
|
6
7
|
export const PLAN_SPLIT_DEFAULT_WAVE_SIZE = 5;
|
|
7
8
|
export const PLAN_SPLIT_SMALL_PLAN_THRESHOLD = 50;
|
|
8
9
|
const WAVE_PLANS_DIR = "wave-plans";
|
|
@@ -34,15 +35,17 @@ export function extractParallelExecutionManagedBody(planMarkdown) {
|
|
|
34
35
|
}
|
|
35
36
|
function tokenToSliceAndUnit(token) {
|
|
36
37
|
const t = token.trim().replace(/^[`"'[\]()]+|[`"'[\]()]+$/gu, "");
|
|
37
|
-
const u = /^U-(\d+)
|
|
38
|
+
const u = /^U-(\d+)([a-z][a-z0-9]*)?$/iu.exec(t);
|
|
38
39
|
if (u) {
|
|
39
|
-
const
|
|
40
|
-
|
|
40
|
+
const num = u[1];
|
|
41
|
+
const suffix = (u[2] ?? "").toLowerCase();
|
|
42
|
+
const tail = suffix.length > 0 ? `${num}${suffix}` : num;
|
|
43
|
+
return { unitId: `U-${tail}`, sliceId: `S-${tail}` };
|
|
41
44
|
}
|
|
42
|
-
const
|
|
43
|
-
if (
|
|
44
|
-
const
|
|
45
|
-
return { unitId: `U-${
|
|
45
|
+
const parsed = parseSliceId(t);
|
|
46
|
+
if (parsed) {
|
|
47
|
+
const tail = parsed.suffix.length > 0 ? `${parsed.numeric}${parsed.suffix}` : `${parsed.numeric}`;
|
|
48
|
+
return { unitId: `U-${tail}`, sliceId: parsed.id };
|
|
46
49
|
}
|
|
47
50
|
return null;
|
|
48
51
|
}
|
|
@@ -68,8 +71,11 @@ export function extractMembersListFromLine(trimmedLine) {
|
|
|
68
71
|
*
|
|
69
72
|
* Rules:
|
|
70
73
|
* - The line must start with `|` (after trimming).
|
|
71
|
-
* - Column 1 (after stripping markdown noise)
|
|
72
|
-
*
|
|
74
|
+
* - Column 1 (after stripping markdown noise) may be either a slice id
|
|
75
|
+
* (`S-N`) or an implementation-unit id (`U-N`). Unit ids derive their
|
|
76
|
+
* execution slice as `S-N`, which lets 7.7+ plans schedule feature-atomic
|
|
77
|
+
* units without inventing a tiny `T-NNN` row per dispatch lane. Header rows
|
|
78
|
+
* (`| sliceId | …`, `| unit | …`) and separator rows (`|---|---|…`) are
|
|
73
79
|
* silently skipped.
|
|
74
80
|
* - Column 2, when present and non-empty, becomes the `unitId`
|
|
75
81
|
* verbatim (after stripping whitespace + backticks/quotes/brackets).
|
|
@@ -90,12 +96,17 @@ export function parseTableRowMember(trimmedLine) {
|
|
|
90
96
|
return null;
|
|
91
97
|
const stripDecorations = (raw) => raw.replace(/^[`"'[\]()]+|[`"'[\]()]+$/gu, "").trim();
|
|
92
98
|
const col1 = stripDecorations(cells[0]);
|
|
93
|
-
const
|
|
94
|
-
|
|
99
|
+
const parsedSlice = parseSliceId(col1);
|
|
100
|
+
const parsedUnit = tokenToSliceAndUnit(col1);
|
|
101
|
+
if (!parsedSlice && !parsedUnit)
|
|
95
102
|
return null;
|
|
96
|
-
const
|
|
97
|
-
|
|
98
|
-
|
|
103
|
+
const sliceTail = parsedSlice
|
|
104
|
+
? parsedSlice.suffix.length > 0
|
|
105
|
+
? `${parsedSlice.numeric}${parsedSlice.suffix}`
|
|
106
|
+
: `${parsedSlice.numeric}`
|
|
107
|
+
: "";
|
|
108
|
+
const sliceId = parsedSlice ? parsedSlice.id : parsedUnit.sliceId;
|
|
109
|
+
let unitId = parsedSlice ? `U-${sliceTail}` : parsedUnit.unitId;
|
|
99
110
|
if (cells.length >= 2) {
|
|
100
111
|
const col2 = stripDecorations(cells[1]);
|
|
101
112
|
if (col2.length > 0) {
|
|
@@ -235,7 +246,7 @@ export function parseWavePlanFileBody(body, waveId) {
|
|
|
235
246
|
}
|
|
236
247
|
}
|
|
237
248
|
if (members.length === 0) {
|
|
238
|
-
const regex = /\b(S-\d+)\b/
|
|
249
|
+
const regex = /\b(S-\d+(?:[a-z][a-z0-9]*)?)\b/giu;
|
|
239
250
|
let match;
|
|
240
251
|
while ((match = regex.exec(body)) !== null) {
|
|
241
252
|
const ids = tokenToSliceAndUnit(match[1]);
|
|
@@ -307,7 +318,7 @@ export function mergeParallelWaveDefinitions(primary, secondary) {
|
|
|
307
318
|
.sort(([a], [b]) => a.localeCompare(b))
|
|
308
319
|
.map(([wid, memMap]) => ({
|
|
309
320
|
waveId: wid,
|
|
310
|
-
members: [...memMap.values()].sort((p, q) => p.sliceId
|
|
321
|
+
members: [...memMap.values()].sort((p, q) => compareSliceIds(p.sliceId, q.sliceId))
|
|
311
322
|
}));
|
|
312
323
|
}
|
|
313
324
|
/**
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import { execFile } from "node:child_process";
|
|
2
2
|
import path from "node:path";
|
|
3
3
|
import { promisify } from "node:util";
|
|
4
|
-
import { readConfig, resolveTddCommitMode, resolveTddIsolationMode, resolveTddWorktreeRoot } from "../config.js";
|
|
4
|
+
import { readConfig, resolveLockfileTwinPolicy, resolveTddCommitMode, resolveTddIsolationMode, resolveTddWorktreeRoot } from "../config.js";
|
|
5
5
|
import { readDelegationLedger } from "../delegation.js";
|
|
6
6
|
import { exists } from "../fs-utils.js";
|
|
7
|
+
import { loadStackAdapter } from "../stack-detection.js";
|
|
7
8
|
import { cleanupWorktree, commitAndMergeBack, createSliceWorktree, WorktreeMergeConflictError, WorktreeUnsupportedError } from "../worktree-manager.js";
|
|
8
9
|
const execFileAsync = promisify(execFile);
|
|
9
10
|
function parseCsv(raw) {
|
|
@@ -165,6 +166,71 @@ function matchesClaimedPath(changedPath, claimedPaths) {
|
|
|
165
166
|
return changed.startsWith(`${claimed}/`);
|
|
166
167
|
});
|
|
167
168
|
}
|
|
169
|
+
/**
|
|
170
|
+
* 7.6.0 — match a candidate path against a stack-adapter glob pattern.
|
|
171
|
+
*
|
|
172
|
+
* Adapter globs are intentionally simple: literal paths (`Cargo.toml`),
|
|
173
|
+
* recursive prefix (`**\/Cargo.toml`), or single-level wildcard
|
|
174
|
+
* (`*.csproj`). We translate those shapes here without pulling in a
|
|
175
|
+
* full glob library so the slice-commit hook stays dependency-light.
|
|
176
|
+
*/
|
|
177
|
+
function matchesAdapterGlob(candidate, glob) {
|
|
178
|
+
const normalizedCandidate = normalizePathLike(candidate);
|
|
179
|
+
const normalizedGlob = normalizePathLike(glob);
|
|
180
|
+
if (normalizedGlob.length === 0)
|
|
181
|
+
return false;
|
|
182
|
+
if (normalizedGlob.includes("**")) {
|
|
183
|
+
// `**/foo` → match either `foo` at root or any nested `foo`.
|
|
184
|
+
if (normalizedGlob.startsWith("**/")) {
|
|
185
|
+
const tail = normalizedGlob.slice(3);
|
|
186
|
+
if (tail === normalizedCandidate)
|
|
187
|
+
return true;
|
|
188
|
+
return normalizedCandidate.endsWith(`/${tail}`);
|
|
189
|
+
}
|
|
190
|
+
// Generic ** in the middle: collapse to suffix match for simplicity.
|
|
191
|
+
const tail = normalizedGlob.split("**/").pop() ?? "";
|
|
192
|
+
return tail.length > 0 && normalizedCandidate.endsWith(tail);
|
|
193
|
+
}
|
|
194
|
+
if (normalizedGlob.includes("*")) {
|
|
195
|
+
// Single-segment wildcard like `*.csproj`. Convert to a basic regex.
|
|
196
|
+
const regexSrc = normalizedGlob
|
|
197
|
+
.split("/")
|
|
198
|
+
.map((segment) => segment
|
|
199
|
+
.replace(/[.+?^${}()|[\]\\]/gu, "\\$&")
|
|
200
|
+
.replace(/\*/gu, "[^/]*"))
|
|
201
|
+
.join("/");
|
|
202
|
+
return new RegExp(`^${regexSrc}$`, "u").test(normalizedCandidate);
|
|
203
|
+
}
|
|
204
|
+
return normalizedGlob === normalizedCandidate;
|
|
205
|
+
}
|
|
206
|
+
/**
|
|
207
|
+
* Find lockfile twins whose manifestGlob matches at least one claimed
|
|
208
|
+
* path. The returned twins are the candidates whose lockfileGlob we
|
|
209
|
+
* should auto-include / auto-revert when they drift.
|
|
210
|
+
*/
|
|
211
|
+
function activeLockfileTwins(adapter, claimedPaths) {
|
|
212
|
+
if (adapter.lockfileTwins.length === 0)
|
|
213
|
+
return [];
|
|
214
|
+
const active = [];
|
|
215
|
+
for (const twin of adapter.lockfileTwins) {
|
|
216
|
+
const claimedManifest = claimedPaths.some((claimed) => matchesAdapterGlob(claimed, twin.manifestGlob));
|
|
217
|
+
if (claimedManifest)
|
|
218
|
+
active.push(twin);
|
|
219
|
+
}
|
|
220
|
+
return active;
|
|
221
|
+
}
|
|
222
|
+
/**
|
|
223
|
+
* Partition a candidate path: `is it a lockfile twin we should
|
|
224
|
+
* auto-handle?`. Returns the twin entry that matches, or null.
|
|
225
|
+
*/
|
|
226
|
+
function findMatchingLockfileTwin(changedPath, twins) {
|
|
227
|
+
for (const twin of twins) {
|
|
228
|
+
if (matchesAdapterGlob(changedPath, twin.lockfileGlob)) {
|
|
229
|
+
return twin;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
return null;
|
|
233
|
+
}
|
|
168
234
|
async function resolveClaimedPathsFromLedger(projectRoot, args) {
|
|
169
235
|
const ledger = await readDelegationLedger(projectRoot);
|
|
170
236
|
const matches = ledger.entries.filter((entry) => entry.stage === "tdd" &&
|
|
@@ -195,6 +261,8 @@ export async function runSliceCommitCommand(projectRoot, tokens, io) {
|
|
|
195
261
|
const commitMode = resolveTddCommitMode(config);
|
|
196
262
|
const isolationMode = resolveTddIsolationMode(config);
|
|
197
263
|
const worktreeRoot = resolveTddWorktreeRoot(config);
|
|
264
|
+
const lockfileTwinPolicy = resolveLockfileTwinPolicy(config);
|
|
265
|
+
const stackAdapter = await loadStackAdapter(projectRoot);
|
|
198
266
|
const gitPresent = await exists(path.join(projectRoot, ".git"));
|
|
199
267
|
if (args.prepareWorktree) {
|
|
200
268
|
if (!gitPresent) {
|
|
@@ -354,8 +422,41 @@ export async function runSliceCommitCommand(projectRoot, tokens, io) {
|
|
|
354
422
|
});
|
|
355
423
|
return 0;
|
|
356
424
|
}
|
|
357
|
-
const
|
|
358
|
-
|
|
425
|
+
const initialDrift = changedPaths.filter((p) => !matchesClaimedPath(p, claimedPaths));
|
|
426
|
+
const twinsForCommit = activeLockfileTwins(stackAdapter, claimedPaths);
|
|
427
|
+
// 7.6.0 — split drift into "lockfile twin drift" (handle per policy)
|
|
428
|
+
// vs "true drift" (always rejected).
|
|
429
|
+
const lockfileTwinDrift = [];
|
|
430
|
+
const trueDrift = [];
|
|
431
|
+
for (const driftPath of initialDrift) {
|
|
432
|
+
const twin = findMatchingLockfileTwin(driftPath, twinsForCommit);
|
|
433
|
+
if (twin) {
|
|
434
|
+
lockfileTwinDrift.push({ path: driftPath, twin });
|
|
435
|
+
}
|
|
436
|
+
else {
|
|
437
|
+
trueDrift.push(driftPath);
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
// Report a separate true-drift error when there is actual non-twin
|
|
441
|
+
// drift, regardless of policy: the operator's claim should still
|
|
442
|
+
// cover everything they changed.
|
|
443
|
+
if (trueDrift.length > 0) {
|
|
444
|
+
output(io, args, {
|
|
445
|
+
ok: false,
|
|
446
|
+
errorCode: "slice_commit_path_drift",
|
|
447
|
+
details: {
|
|
448
|
+
sliceId: args.sliceId,
|
|
449
|
+
spanId: args.spanId,
|
|
450
|
+
claimedPaths,
|
|
451
|
+
driftPaths: trueDrift
|
|
452
|
+
},
|
|
453
|
+
message: `slice_commit_path_drift: ${trueDrift.join(", ")}`
|
|
454
|
+
}, "stderr");
|
|
455
|
+
return 2;
|
|
456
|
+
}
|
|
457
|
+
// strict-fence: lockfile twins still count as drift.
|
|
458
|
+
if (lockfileTwinDrift.length > 0 && lockfileTwinPolicy === "strict-fence") {
|
|
459
|
+
const driftPaths = lockfileTwinDrift.map((entry) => entry.path);
|
|
359
460
|
output(io, args, {
|
|
360
461
|
ok: false,
|
|
361
462
|
errorCode: "slice_commit_path_drift",
|
|
@@ -363,13 +464,62 @@ export async function runSliceCommitCommand(projectRoot, tokens, io) {
|
|
|
363
464
|
sliceId: args.sliceId,
|
|
364
465
|
spanId: args.spanId,
|
|
365
466
|
claimedPaths,
|
|
366
|
-
driftPaths
|
|
467
|
+
driftPaths,
|
|
468
|
+
lockfileTwinPolicy,
|
|
469
|
+
stackAdapterId: stackAdapter.id
|
|
367
470
|
},
|
|
368
|
-
message: `slice_commit_path_drift: ${
|
|
471
|
+
message: `slice_commit_path_drift: ${driftPaths.join(", ")} (lockfileTwinPolicy=strict-fence)`
|
|
369
472
|
}, "stderr");
|
|
370
473
|
return 2;
|
|
371
474
|
}
|
|
372
|
-
|
|
475
|
+
// auto-revert: restore the lockfile, then exclude from changed set.
|
|
476
|
+
const revertedTwinPaths = [];
|
|
477
|
+
if (lockfileTwinDrift.length > 0 && lockfileTwinPolicy === "auto-revert") {
|
|
478
|
+
for (const entry of lockfileTwinDrift) {
|
|
479
|
+
try {
|
|
480
|
+
await execFileAsync("git", ["restore", "--", entry.path], { cwd: activeCwd });
|
|
481
|
+
revertedTwinPaths.push(entry.path);
|
|
482
|
+
}
|
|
483
|
+
catch {
|
|
484
|
+
// Fall through; if restore fails the drift will reappear in the
|
|
485
|
+
// recomputed status and we'll reject as drift.
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
changedPaths = await gitChangedPaths(activeCwd);
|
|
489
|
+
const remainingDrift = changedPaths.filter((p) => !matchesClaimedPath(p, claimedPaths));
|
|
490
|
+
if (remainingDrift.length > 0) {
|
|
491
|
+
output(io, args, {
|
|
492
|
+
ok: false,
|
|
493
|
+
errorCode: "slice_commit_path_drift",
|
|
494
|
+
details: {
|
|
495
|
+
sliceId: args.sliceId,
|
|
496
|
+
spanId: args.spanId,
|
|
497
|
+
claimedPaths,
|
|
498
|
+
driftPaths: remainingDrift,
|
|
499
|
+
lockfileTwinPolicy,
|
|
500
|
+
stackAdapterId: stackAdapter.id
|
|
501
|
+
},
|
|
502
|
+
message: `slice_commit_path_drift: ${remainingDrift.join(", ")}`
|
|
503
|
+
}, "stderr");
|
|
504
|
+
return 2;
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
// auto-include: add the twin path(s) to the effective claim so the
|
|
508
|
+
// commit picks them up. We don't mutate the persisted claim — only
|
|
509
|
+
// the in-memory list used for the upcoming `git add`.
|
|
510
|
+
const effectiveCommitPaths = [...claimedPaths];
|
|
511
|
+
const includedTwinPaths = [];
|
|
512
|
+
if (lockfileTwinDrift.length > 0 && lockfileTwinPolicy === "auto-include") {
|
|
513
|
+
for (const entry of lockfileTwinDrift) {
|
|
514
|
+
if (!effectiveCommitPaths.includes(entry.path)) {
|
|
515
|
+
effectiveCommitPaths.push(entry.path);
|
|
516
|
+
}
|
|
517
|
+
includedTwinPaths.push(entry.path);
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
const changedInClaim = changedPaths.filter((p) => matchesClaimedPath(p, claimedPaths) ||
|
|
521
|
+
(lockfileTwinPolicy === "auto-include" &&
|
|
522
|
+
findMatchingLockfileTwin(p, twinsForCommit) !== null));
|
|
373
523
|
if (changedInClaim.length === 0) {
|
|
374
524
|
await cleanupManagedWorktree();
|
|
375
525
|
output(io, args, {
|
|
@@ -381,7 +531,7 @@ export async function runSliceCommitCommand(projectRoot, tokens, io) {
|
|
|
381
531
|
return 0;
|
|
382
532
|
}
|
|
383
533
|
try {
|
|
384
|
-
await execFileAsync("git", ["add", "--", ...
|
|
534
|
+
await execFileAsync("git", ["add", "--", ...effectiveCommitPaths], {
|
|
385
535
|
cwd: activeCwd
|
|
386
536
|
});
|
|
387
537
|
const taskPart = args.taskId && args.taskId.length > 0 ? args.taskId : "task";
|
|
@@ -459,6 +609,10 @@ export async function runSliceCommitCommand(projectRoot, tokens, io) {
|
|
|
459
609
|
changedPaths: changedInClaim,
|
|
460
610
|
worktreePath: managedWorktreePath ?? undefined,
|
|
461
611
|
degradedToInPlace: degradedToInPlace || undefined,
|
|
612
|
+
lockfileTwinPolicy,
|
|
613
|
+
lockfileTwinsIncluded: includedTwinPaths.length > 0 ? includedTwinPaths : undefined,
|
|
614
|
+
lockfileTwinsReverted: revertedTwinPaths.length > 0 ? revertedTwinPaths : undefined,
|
|
615
|
+
stackAdapterId: stackAdapter.id,
|
|
462
616
|
message: `slice commit created for ${args.sliceId}: ${commitSha}`
|
|
463
617
|
});
|
|
464
618
|
return 0;
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { Writable } from "node:stream";
|
|
2
|
+
import type { ExecutionTopology } from "../types.js";
|
|
2
3
|
interface InternalIo {
|
|
3
4
|
stdout: Writable;
|
|
4
5
|
stderr: Writable;
|
|
@@ -17,6 +18,9 @@ export interface WaveStatusNextDispatch {
|
|
|
17
18
|
readyToDispatch: string[];
|
|
18
19
|
pathConflicts: string[];
|
|
19
20
|
mode: "single-slice" | "wave-fanout" | "blocked" | "none";
|
|
21
|
+
topology: Exclude<ExecutionTopology, "auto"> | "none";
|
|
22
|
+
topologyReason: string;
|
|
23
|
+
maxBuilders: number;
|
|
20
24
|
}
|
|
21
25
|
export interface WaveStatusReport {
|
|
22
26
|
activeRunId: string;
|
|
@@ -4,7 +4,10 @@ import { RUNTIME_ROOT } from "../constants.js";
|
|
|
4
4
|
import { readDelegationEvents, readDelegationLedger } from "../delegation.js";
|
|
5
5
|
import { readFlowState } from "../runs.js";
|
|
6
6
|
import { DEFAULT_SLICE_STREAM_REL_PATH, readEventStreamFile } from "../streaming/event-stream.js";
|
|
7
|
+
import { readConfig, resolveExecutionStrictness, resolveExecutionTopology, resolveMaxBuilders } from "../config.js";
|
|
8
|
+
import { routeExecutionTopology } from "../execution-topology.js";
|
|
7
9
|
import { mergeParallelWaveDefinitions, parseParallelExecutionPlanWaves, parseWavePlanDirectory } from "./plan-split-waves.js";
|
|
10
|
+
import { compareSliceIds, parseSliceId } from "../util/slice-id.js";
|
|
8
11
|
const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
|
|
9
12
|
const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
|
|
10
13
|
function parseArgs(tokens) {
|
|
@@ -81,7 +84,11 @@ function parseManagedWaveClaimedPaths(planMarkdown) {
|
|
|
81
84
|
if (cells.length === 0)
|
|
82
85
|
continue;
|
|
83
86
|
const first = cells[0].toLowerCase();
|
|
84
|
-
if (first === "sliceid" ||
|
|
87
|
+
if (first === "sliceid" ||
|
|
88
|
+
first === "slice id" ||
|
|
89
|
+
first === "unitid" ||
|
|
90
|
+
first === "unit id" ||
|
|
91
|
+
first === "unit") {
|
|
85
92
|
headerIdx = new Map();
|
|
86
93
|
for (let i = 0; i < cells.length; i += 1) {
|
|
87
94
|
const key = cells[i].toLowerCase().replace(/[^a-z0-9]/gu, "");
|
|
@@ -94,9 +101,12 @@ function parseManagedWaveClaimedPaths(planMarkdown) {
|
|
|
94
101
|
if (cells.every((cell) => /^:?-{3,}:?$/u.test(cell))) {
|
|
95
102
|
continue;
|
|
96
103
|
}
|
|
97
|
-
const
|
|
98
|
-
|
|
104
|
+
const firstCell = (cells[0] ?? "").replace(/^`|`$/gu, "").trim();
|
|
105
|
+
const parsedSlice = parseSliceId(firstCell);
|
|
106
|
+
const parsedUnit = /^U-(\d+(?:[a-z][a-z0-9]*)?)$/iu.exec(firstCell);
|
|
107
|
+
if (!parsedSlice && !parsedUnit)
|
|
99
108
|
continue;
|
|
109
|
+
const sliceId = parsedSlice?.id ?? `S-${parsedUnit[1].toLowerCase()}`;
|
|
100
110
|
const pathsIdx = headerIdx.get("claimedpaths");
|
|
101
111
|
const rawPaths = pathsIdx !== undefined ? (cells[pathsIdx] ?? "") : "";
|
|
102
112
|
const claimedPaths = rawPaths.length === 0
|
|
@@ -111,7 +121,7 @@ function parseManagedWaveClaimedPaths(planMarkdown) {
|
|
|
111
121
|
}
|
|
112
122
|
function detectPathConflicts(readySlices, bySlice) {
|
|
113
123
|
const conflicts = new Set();
|
|
114
|
-
const ordered = [...readySlices].sort();
|
|
124
|
+
const ordered = [...readySlices].sort(compareSliceIds);
|
|
115
125
|
for (let i = 0; i < ordered.length; i += 1) {
|
|
116
126
|
const leftSlice = ordered[i];
|
|
117
127
|
const leftPaths = bySlice.get(leftSlice) ?? [];
|
|
@@ -173,7 +183,10 @@ export async function runWaveStatus(projectRoot, options = {}) {
|
|
|
173
183
|
waveId: null,
|
|
174
184
|
readyToDispatch: [],
|
|
175
185
|
pathConflicts: [],
|
|
176
|
-
mode: "none"
|
|
186
|
+
mode: "none",
|
|
187
|
+
topology: "none",
|
|
188
|
+
topologyReason: "wave plan could not be parsed",
|
|
189
|
+
maxBuilders: 0
|
|
177
190
|
},
|
|
178
191
|
warnings: [
|
|
179
192
|
`wave_plan_parse_error: ${err instanceof Error ? err.message : String(err)}`
|
|
@@ -201,7 +214,10 @@ export async function runWaveStatus(projectRoot, options = {}) {
|
|
|
201
214
|
waveId: null,
|
|
202
215
|
readyToDispatch: [],
|
|
203
216
|
pathConflicts: [],
|
|
204
|
-
mode: "none"
|
|
217
|
+
mode: "none",
|
|
218
|
+
topology: "none",
|
|
219
|
+
topologyReason: "wave plan sources conflict",
|
|
220
|
+
maxBuilders: 0
|
|
205
221
|
},
|
|
206
222
|
warnings: [
|
|
207
223
|
`wave_plan_merge_conflict: ${err instanceof Error ? err.message : String(err)}`
|
|
@@ -329,16 +345,23 @@ export async function runWaveStatus(projectRoot, options = {}) {
|
|
|
329
345
|
warnings.push("wave_plan_managed_block_missing: <!-- parallel-exec-managed-start --> block not found in 05-plan.md and wave-plans/ has no parseable wave files.");
|
|
330
346
|
}
|
|
331
347
|
let nextDispatch;
|
|
348
|
+
const config = await readConfig(projectRoot).catch(() => null);
|
|
349
|
+
const configuredTopology = resolveExecutionTopology(config);
|
|
350
|
+
const strictness = resolveExecutionStrictness(config);
|
|
351
|
+
const maxBuilders = resolveMaxBuilders(config);
|
|
332
352
|
if (firstOpenWave === null) {
|
|
333
353
|
nextDispatch = {
|
|
334
354
|
waveId: null,
|
|
335
355
|
readyToDispatch: [],
|
|
336
356
|
pathConflicts: [],
|
|
337
|
-
mode: "none"
|
|
357
|
+
mode: "none",
|
|
358
|
+
topology: "none",
|
|
359
|
+
topologyReason: "no open wave has ready units",
|
|
360
|
+
maxBuilders
|
|
338
361
|
};
|
|
339
362
|
}
|
|
340
363
|
else {
|
|
341
|
-
const readyToDispatch = [...firstOpenWave.readyMembers].sort();
|
|
364
|
+
const readyToDispatch = [...firstOpenWave.readyMembers].sort(compareSliceIds);
|
|
342
365
|
const claimedPathsByWave = parseManagedWaveClaimedPaths(planRaw);
|
|
343
366
|
const conflicts = detectPathConflicts(readyToDispatch, claimedPathsByWave.get(firstOpenWave.waveId) ?? new Map());
|
|
344
367
|
const mode = conflicts.length > 0
|
|
@@ -348,11 +371,28 @@ export async function runWaveStatus(projectRoot, options = {}) {
|
|
|
348
371
|
: readyToDispatch.length === 1
|
|
349
372
|
? "single-slice"
|
|
350
373
|
: "none";
|
|
374
|
+
const topologyDecision = mode === "none"
|
|
375
|
+
? null
|
|
376
|
+
: routeExecutionTopology({
|
|
377
|
+
configuredTopology,
|
|
378
|
+
strictness,
|
|
379
|
+
maxBuilders,
|
|
380
|
+
shape: {
|
|
381
|
+
unitCount: readyToDispatch.length,
|
|
382
|
+
independentUnitCount: conflicts.length > 0 ? 0 : readyToDispatch.length,
|
|
383
|
+
substantialUnitCount: readyToDispatch.length,
|
|
384
|
+
hasPathConflicts: conflicts.length > 0,
|
|
385
|
+
inlineSafe: false
|
|
386
|
+
}
|
|
387
|
+
});
|
|
351
388
|
nextDispatch = {
|
|
352
389
|
waveId: firstOpenWave.waveId,
|
|
353
390
|
readyToDispatch,
|
|
354
391
|
pathConflicts: conflicts,
|
|
355
|
-
mode
|
|
392
|
+
mode,
|
|
393
|
+
topology: topologyDecision?.topology ?? "none",
|
|
394
|
+
topologyReason: topologyDecision?.reason ?? "no ready units",
|
|
395
|
+
maxBuilders: topologyDecision?.maxBuilders ?? maxBuilders
|
|
356
396
|
};
|
|
357
397
|
}
|
|
358
398
|
return {
|
|
@@ -380,6 +420,7 @@ function formatHumanReport(report) {
|
|
|
380
420
|
}
|
|
381
421
|
lines.push(`nextDispatch: wave=${report.nextDispatch.waveId ?? "(none)"} ` +
|
|
382
422
|
`mode=${report.nextDispatch.mode} ` +
|
|
423
|
+
`topology=${report.nextDispatch.topology} ` +
|
|
383
424
|
`ready=[${report.nextDispatch.readyToDispatch.join(",")}]`);
|
|
384
425
|
if (report.warnings.length > 0) {
|
|
385
426
|
lines.push("warnings:");
|
|
@@ -20,3 +20,97 @@ export declare const STACK_DISCOVERY_MARKERS: readonly string[];
|
|
|
20
20
|
* Directory markers (checked with pathExists) for stack discovery.
|
|
21
21
|
*/
|
|
22
22
|
export declare const STACK_DISCOVERY_DIR_MARKERS: readonly string[];
|
|
23
|
+
export type StackAdapterId = "rust" | "node" | "python" | "go" | "ruby" | "php" | "swift" | "dotnet" | "elixir" | "java" | "unknown";
|
|
24
|
+
/** Twin describing manifest → lockfile coupling for a stack. */
|
|
25
|
+
export interface ManifestLockfileTwin {
|
|
26
|
+
/** Manifest glob (path relative to repo root). */
|
|
27
|
+
manifestGlob: string;
|
|
28
|
+
/** Lockfile glob that the manifest's package manager regenerates. */
|
|
29
|
+
lockfileGlob: string;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Wiring-aggregator contract — describes whether a new file in a stack
|
|
33
|
+
* needs an explicit aggregator/parent module update for the new module to
|
|
34
|
+
* be reachable from the rest of the project.
|
|
35
|
+
*
|
|
36
|
+
* - `aggregatorPattern` is a human-facing description; consumers should
|
|
37
|
+
* call `resolveAggregatorFor(filePath, repoState?)` to compute the
|
|
38
|
+
* concrete aggregator path.
|
|
39
|
+
* - `resolveAggregatorFor` returns the concrete repo-relative path of
|
|
40
|
+
* the aggregator file required to wire `filePath`, or `null` when no
|
|
41
|
+
* aggregator is required (e.g. file IS the aggregator, or the stack
|
|
42
|
+
* layout makes wiring implicit).
|
|
43
|
+
* - `repoState.headFiles` lets the resolver check whether sibling
|
|
44
|
+
* aggregators already exist (so e.g. node-ts only requires
|
|
45
|
+
* `index.ts` updates when an `index.ts` already exists in that
|
|
46
|
+
* directory).
|
|
47
|
+
*/
|
|
48
|
+
export interface WiringAggregatorContract {
|
|
49
|
+
aggregatorPattern: string;
|
|
50
|
+
/**
|
|
51
|
+
* Resolve the aggregator path required to wire `filePath` into its
|
|
52
|
+
* parent module, given a snapshot of repo state. Return `null` when
|
|
53
|
+
* no aggregator update is required.
|
|
54
|
+
*/
|
|
55
|
+
resolveAggregatorFor(filePath: string, repoState?: {
|
|
56
|
+
headFiles?: ReadonlySet<string>;
|
|
57
|
+
}): string | null;
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Universal stack-adapter contract used by hooks (slice-commit lockfile
|
|
61
|
+
* twins), linters (`plan_module_introducing_slice_wires_root`), and
|
|
62
|
+
* future stack-specific evidence validators.
|
|
63
|
+
*
|
|
64
|
+
* Each stack returns:
|
|
65
|
+
* - `id` — short stable identifier; routes used elsewhere should match.
|
|
66
|
+
* - `displayName` — used in user-facing prose so error messages stay
|
|
67
|
+
* stack-agnostic at the surface ("Rust workspace" vs "Node project"
|
|
68
|
+
* are forbidden in generic code; use `adapter.displayName` instead).
|
|
69
|
+
* - `manifestGlobs` — repo-relative manifest paths the stack uses.
|
|
70
|
+
* - `lockfileTwins` — manifest→lockfile twin entries; auto-detected
|
|
71
|
+
* from disk at adapter init so node projects with yarn.lock get
|
|
72
|
+
* `yarn.lock`, pnpm gets `pnpm-lock.yaml`, etc.
|
|
73
|
+
* - `testCommandHints` — example test command lines for prompts and
|
|
74
|
+
* evidence validators (advisory; not authoritative).
|
|
75
|
+
* - `wiringAggregator` — see contract above. `undefined` when the
|
|
76
|
+
* stack has no aggregator pattern (Go, Java, Ruby, Swift, .NET,
|
|
77
|
+
* Elixir use implicit/automatic wiring).
|
|
78
|
+
*/
|
|
79
|
+
export interface StackAdapter {
|
|
80
|
+
id: StackAdapterId;
|
|
81
|
+
displayName: string;
|
|
82
|
+
manifestGlobs: string[];
|
|
83
|
+
lockfileTwins: ManifestLockfileTwin[];
|
|
84
|
+
testCommandHints: string[];
|
|
85
|
+
wiringAggregator?: WiringAggregatorContract;
|
|
86
|
+
}
|
|
87
|
+
interface LoadStackAdapterOptions {
|
|
88
|
+
/**
|
|
89
|
+
* Override the project root for tests. Defaults to the supplied
|
|
90
|
+
* argument; primarily here so callers can inject a synthesized
|
|
91
|
+
* directory in fixtures.
|
|
92
|
+
*/
|
|
93
|
+
projectRoot?: string;
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Load the stack-adapter for a project. Walks the registered factories
|
|
97
|
+
* in order; the first detector that returns true wins. Returns the
|
|
98
|
+
* `unknown` adapter (no-op) when no detector matches.
|
|
99
|
+
*
|
|
100
|
+
* Adapter init reads the filesystem to auto-detect lockfile twins
|
|
101
|
+
* (e.g. yarn.lock vs package-lock.json). Callers should cache the
|
|
102
|
+
* adapter for the lifetime of the operation rather than calling this
|
|
103
|
+
* per-row.
|
|
104
|
+
*/
|
|
105
|
+
export declare function loadStackAdapter(projectRoot: string, options?: LoadStackAdapterOptions): Promise<StackAdapter>;
|
|
106
|
+
/**
|
|
107
|
+
* Synthesize a stack adapter from explicit lockfile-twin overrides.
|
|
108
|
+
* Useful in tests that want to pin twins without a real filesystem
|
|
109
|
+
* scan, and for the linter test suite.
|
|
110
|
+
*/
|
|
111
|
+
export declare function buildStackAdapterForTests(partial: Partial<StackAdapter> & {
|
|
112
|
+
id: StackAdapterId;
|
|
113
|
+
displayName: string;
|
|
114
|
+
}): StackAdapter;
|
|
115
|
+
export declare const UNKNOWN_STACK: StackAdapter;
|
|
116
|
+
export {};
|