cclaw-cli 7.5.0 → 7.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/artifact-linter/plan.js +143 -4
- package/dist/artifact-linter/tdd.js +2 -1
- package/dist/config.d.ts +4 -1
- package/dist/config.js +24 -3
- package/dist/content/core-agents.js +15 -0
- package/dist/content/hooks.js +37 -0
- package/dist/content/stage-schema.js +1 -0
- package/dist/content/stages/plan.js +1 -0
- package/dist/delegation.d.ts +39 -0
- package/dist/delegation.js +66 -1
- package/dist/gate-evidence.js +10 -12
- package/dist/internal/advance-stage/start-flow.js +13 -4
- package/dist/internal/cohesion-contract-stub.js +2 -14
- package/dist/internal/plan-split-waves.js +19 -14
- package/dist/internal/slice-commit.js +161 -7
- package/dist/internal/wave-status.js +6 -4
- package/dist/stack-detection.d.ts +94 -0
- package/dist/stack-detection.js +431 -0
- package/dist/tdd-cycle.js +7 -5
- package/dist/types.d.ts +22 -0
- package/dist/util/slice-id.d.ts +58 -0
- package/dist/util/slice-id.js +89 -0
- package/package.json +1 -1
|
@@ -5,6 +5,11 @@ import { FORBIDDEN_PLACEHOLDER_TOKENS, CONFIDENCE_FINDING_REGEX_SOURCE } from ".
|
|
|
5
5
|
import fs from "node:fs/promises";
|
|
6
6
|
import path from "node:path";
|
|
7
7
|
import { PLAN_SPLIT_SMALL_PLAN_THRESHOLD, parseImplementationUnits, parseImplementationUnitParallelFields } from "../internal/plan-split-waves.js";
|
|
8
|
+
import { compareSliceIds, parseSliceId } from "../util/slice-id.js";
|
|
9
|
+
import { execFile } from "node:child_process";
|
|
10
|
+
import { promisify } from "node:util";
|
|
11
|
+
import { loadStackAdapter } from "../stack-detection.js";
|
|
12
|
+
const execFileAsync = promisify(execFile);
|
|
8
13
|
const PARALLEL_EXEC_MANAGED_START = "<!-- parallel-exec-managed-start -->";
|
|
9
14
|
const PARALLEL_EXEC_MANAGED_END = "<!-- parallel-exec-managed-end -->";
|
|
10
15
|
const TASK_ID_PATTERN = /\bT-\d{3}[a-z]?(?:\.\d{1,3})?\b/giu;
|
|
@@ -120,13 +125,15 @@ function parseParallelWaveTableMetadata(planMarkdown) {
|
|
|
120
125
|
continue;
|
|
121
126
|
}
|
|
122
127
|
const sliceCell = cells[0];
|
|
123
|
-
|
|
128
|
+
const parsedSlice = parseSliceId(sliceCell);
|
|
129
|
+
if (!parsedSlice)
|
|
124
130
|
continue;
|
|
125
131
|
const idx = headerIdx ?? new Map();
|
|
126
132
|
const unitIdx = idx.get("unit") ?? idx.get("taskid") ?? 1;
|
|
127
133
|
const pathsIdx = idx.get("claimedpaths");
|
|
128
134
|
const parallelizableIdx = idx.get("parallelizable");
|
|
129
135
|
const laneIdx = idx.get("lane");
|
|
136
|
+
const dependsOnIdx = idx.get("dependson");
|
|
130
137
|
const rawPaths = pathsIdx !== undefined ? (cells[pathsIdx] ?? "") : "";
|
|
131
138
|
const claimedPaths = rawPaths.length === 0
|
|
132
139
|
? []
|
|
@@ -141,12 +148,22 @@ function parseParallelWaveTableMetadata(planMarkdown) {
|
|
|
141
148
|
if (rawParallel === "false" || rawParallel === "no")
|
|
142
149
|
parallelizable = false;
|
|
143
150
|
const laneRaw = laneIdx !== undefined ? (cells[laneIdx] ?? "").trim().toLowerCase() : "";
|
|
151
|
+
const rawDeps = dependsOnIdx !== undefined ? (cells[dependsOnIdx] ?? "") : "";
|
|
152
|
+
const dependsOn = rawDeps.length === 0
|
|
153
|
+
? []
|
|
154
|
+
: rawDeps
|
|
155
|
+
.replace(/^\[|\]$/gu, "")
|
|
156
|
+
.split(/[,\s]+/u)
|
|
157
|
+
.map((token) => token.trim().replace(/^`|`$/gu, ""))
|
|
158
|
+
.map((token) => parseSliceId(token)?.id ?? "")
|
|
159
|
+
.filter((id) => id.length > 0);
|
|
144
160
|
current.rows.push({
|
|
145
|
-
sliceId:
|
|
161
|
+
sliceId: parsedSlice.id,
|
|
146
162
|
unit: (cells[unitIdx] ?? "").trim(),
|
|
147
163
|
claimedPaths,
|
|
148
164
|
parallelizable,
|
|
149
|
-
lane: laneRaw.length > 0 ? laneRaw : null
|
|
165
|
+
lane: laneRaw.length > 0 ? laneRaw : null,
|
|
166
|
+
dependsOn
|
|
150
167
|
});
|
|
151
168
|
}
|
|
152
169
|
flush();
|
|
@@ -156,6 +173,65 @@ function waveHasSequentialModeHint(wave) {
|
|
|
156
173
|
const noteText = wave.notes.join("\n").toLowerCase();
|
|
157
174
|
return /mode\s*:\s*sequential/iu.test(noteText) || /\bsequential\b/iu.test(noteText) || /\bserial\b/iu.test(noteText);
|
|
158
175
|
}
|
|
176
|
+
/**
|
|
177
|
+
* Capture the set of repo-relative paths tracked at HEAD. Returns an
|
|
178
|
+
* empty set when the project root is not a git repo or `git ls-files`
|
|
179
|
+
* fails — the wiring linter degrades to "no aggregator required" in
|
|
180
|
+
* that case rather than crashing the whole stage check.
|
|
181
|
+
*/
|
|
182
|
+
async function readHeadFiles(projectRoot) {
|
|
183
|
+
try {
|
|
184
|
+
const { stdout } = await execFileAsync("git", ["ls-files", "-z"], { cwd: projectRoot, maxBuffer: 64 * 1024 * 1024 });
|
|
185
|
+
const out = new Set();
|
|
186
|
+
for (const segment of stdout.split("\u0000")) {
|
|
187
|
+
const trimmed = segment.trim();
|
|
188
|
+
if (trimmed.length === 0)
|
|
189
|
+
continue;
|
|
190
|
+
out.add(trimmed.replace(/\\/gu, "/"));
|
|
191
|
+
}
|
|
192
|
+
return out;
|
|
193
|
+
}
|
|
194
|
+
catch {
|
|
195
|
+
return new Set();
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
function buildSliceClaimGraph(waves) {
|
|
199
|
+
const bySliceId = new Map();
|
|
200
|
+
for (const wave of waves) {
|
|
201
|
+
for (const row of wave.rows) {
|
|
202
|
+
bySliceId.set(row.sliceId, row);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
return { bySliceId };
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Walk the dependsOn graph from `sliceId` and return the set of
|
|
209
|
+
* predecessor slice ids (transitive). Skips ids that aren't in the
|
|
210
|
+
* graph and handles cycles via a `visiting` set so a malformed plan
|
|
211
|
+
* doesn't lock the linter.
|
|
212
|
+
*/
|
|
213
|
+
function transitivePredecessors(sliceId, graph) {
|
|
214
|
+
const out = new Set();
|
|
215
|
+
const stack = [sliceId];
|
|
216
|
+
const visiting = new Set();
|
|
217
|
+
while (stack.length > 0) {
|
|
218
|
+
const current = stack.pop();
|
|
219
|
+
if (visiting.has(current))
|
|
220
|
+
continue;
|
|
221
|
+
visiting.add(current);
|
|
222
|
+
const row = graph.bySliceId.get(current);
|
|
223
|
+
if (!row)
|
|
224
|
+
continue;
|
|
225
|
+
for (const predecessor of row.dependsOn) {
|
|
226
|
+
const normalized = parseSliceId(predecessor)?.id ?? predecessor;
|
|
227
|
+
if (out.has(normalized))
|
|
228
|
+
continue;
|
|
229
|
+
out.add(normalized);
|
|
230
|
+
stack.push(normalized);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
return out;
|
|
234
|
+
}
|
|
159
235
|
export async function lintPlanStage(ctx) {
|
|
160
236
|
const { projectRoot, track, raw, absFile, sections, findings, parsedFrontmatter, brainstormShortCircuitBody, brainstormShortCircuitActivated, staleDiagramAuditEnabled, isTrivialOverride } = ctx;
|
|
161
237
|
evaluateInvestigationTrace(ctx, "Implementation Units");
|
|
@@ -600,7 +676,7 @@ export async function lintPlanStage(ctx) {
|
|
|
600
676
|
: `Serial slice(s) found without sequential wave mode hints in: ${inconsistentParallelizable.join(", ")}. Add a wave mode/note indicating sequential execution.`
|
|
601
677
|
});
|
|
602
678
|
const mermaidBlocks = raw.match(/```mermaid[\s\S]*?```/giu) ?? [];
|
|
603
|
-
const hasParallelExecMermaid = mermaidBlocks.some((block) => /(flowchart|gantt)/iu.test(block) && /\bW-\d+\b/iu.test(block) && /\bS-\d
|
|
679
|
+
const hasParallelExecMermaid = mermaidBlocks.some((block) => /(flowchart|gantt)/iu.test(block) && /\bW-\d+\b/iu.test(block) && /\bS-\d+(?:[a-z][a-z0-9]*)?\b/iu.test(block));
|
|
604
680
|
findings.push({
|
|
605
681
|
section: "plan_parallel_exec_mermaid_present",
|
|
606
682
|
required: false,
|
|
@@ -610,5 +686,68 @@ export async function lintPlanStage(ctx) {
|
|
|
610
686
|
? "Mermaid visualization for parallel execution waves is present."
|
|
611
687
|
: "No mermaid parallel-execution visualization found (advisory). Add a ` ```mermaid ` flowchart or gantt with W-* and S-* nodes."
|
|
612
688
|
});
|
|
689
|
+
// 7.6.0 — plan_module_introducing_slice_wires_root.
|
|
690
|
+
// Stack-aware: stack-adapter exposes a `wiringAggregator` contract
|
|
691
|
+
// for stacks where introducing a new module file requires a
|
|
692
|
+
// sibling aggregator update (Rust lib.rs, Python __init__.py,
|
|
693
|
+
// optional Node-TS index.ts). For each NEW path in a slice's
|
|
694
|
+
// claim, if the adapter says an aggregator is required, the
|
|
695
|
+
// aggregator path must appear in the slice's own claim or in any
|
|
696
|
+
// transitive predecessor's claim within the same flow.
|
|
697
|
+
//
|
|
698
|
+
// For unknown stacks (Go, Java, Ruby, Swift, .NET, Elixir, …)
|
|
699
|
+
// the adapter returns `wiringAggregator: undefined`, so this
|
|
700
|
+
// gate is a no-op and `found: true`.
|
|
701
|
+
const stackAdapter = await loadStackAdapter(projectRoot);
|
|
702
|
+
const headFiles = await readHeadFiles(projectRoot);
|
|
703
|
+
const wiringIssues = [];
|
|
704
|
+
if (stackAdapter.wiringAggregator) {
|
|
705
|
+
const claimGraph = buildSliceClaimGraph(waveMeta);
|
|
706
|
+
for (const wave of waveMeta) {
|
|
707
|
+
for (const row of [...wave.rows].sort((a, b) => compareSliceIds(a.sliceId, b.sliceId))) {
|
|
708
|
+
const predecessors = transitivePredecessors(row.sliceId, claimGraph);
|
|
709
|
+
const predecessorClaims = new Set();
|
|
710
|
+
for (const predId of predecessors) {
|
|
711
|
+
const predRow = claimGraph.bySliceId.get(predId);
|
|
712
|
+
if (!predRow)
|
|
713
|
+
continue;
|
|
714
|
+
for (const claim of predRow.claimedPaths) {
|
|
715
|
+
predecessorClaims.add(normalizePathToken(claim));
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
const ownClaims = new Set(row.claimedPaths.map(normalizePathToken));
|
|
719
|
+
for (const rawClaim of row.claimedPaths) {
|
|
720
|
+
const claim = normalizePathToken(rawClaim);
|
|
721
|
+
if (claim.length === 0)
|
|
722
|
+
continue;
|
|
723
|
+
// Only NEW paths (not present at HEAD) require an
|
|
724
|
+
// aggregator update — existing modules are already wired.
|
|
725
|
+
if (headFiles.size > 0 && headFiles.has(claim))
|
|
726
|
+
continue;
|
|
727
|
+
const required = stackAdapter.wiringAggregator.resolveAggregatorFor(claim, { headFiles });
|
|
728
|
+
if (!required)
|
|
729
|
+
continue;
|
|
730
|
+
const aggregatorPath = normalizePathToken(required);
|
|
731
|
+
if (ownClaims.has(aggregatorPath))
|
|
732
|
+
continue;
|
|
733
|
+
if (predecessorClaims.has(aggregatorPath))
|
|
734
|
+
continue;
|
|
735
|
+
wiringIssues.push(`${wave.waveId}/${row.sliceId} introduces ${claim} but wiring aggregator ${aggregatorPath} is not in its claim or any predecessor's claim`);
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
}
|
|
740
|
+
const wiringApplies = stackAdapter.wiringAggregator !== undefined;
|
|
741
|
+
findings.push({
|
|
742
|
+
section: "plan_module_introducing_slice_wires_root",
|
|
743
|
+
required: taskListPresent && wiringApplies,
|
|
744
|
+
rule: "When a slice introduces a new module file, the stack-adapter's wiring aggregator (e.g. Rust `lib.rs`, Python `__init__.py`, Node-TS barrel `index.*` when present) must be in the same slice's claim or in a transitive predecessor's claim, otherwise the new module is dead code and RED can't be expressed.",
|
|
745
|
+
found: !wiringApplies || wiringIssues.length === 0,
|
|
746
|
+
details: !wiringApplies
|
|
747
|
+
? `Stack adapter (id=${stackAdapter.id}) does not declare a wiring aggregator; gate is a no-op for this stack.`
|
|
748
|
+
: wiringIssues.length === 0
|
|
749
|
+
? `Stack adapter (id=${stackAdapter.id}) wiring aggregator coverage verified across all wave slices.`
|
|
750
|
+
: `Wiring aggregator coverage gaps: ${wiringIssues.slice(0, 12).join(" | ")}${wiringIssues.length > 12 ? ` | … (${wiringIssues.length - 12} more)` : ""}.`
|
|
751
|
+
});
|
|
613
752
|
}
|
|
614
753
|
}
|
|
@@ -6,6 +6,7 @@ import { loadTddReadySlicePool, readDelegationLedger, readDelegationEvents, sele
|
|
|
6
6
|
import { resolveArtifactPath as resolveStageArtifactPath } from "../artifact-paths.js";
|
|
7
7
|
import { exists } from "../fs-utils.js";
|
|
8
8
|
import { mergeParallelWaveDefinitions, parseParallelExecutionPlanWaves, parseWavePlanDirectory } from "../internal/plan-split-waves.js";
|
|
9
|
+
import { compareSliceIds } from "../util/slice-id.js";
|
|
9
10
|
import { extractAcceptanceCriterionIdsFromMarkdown, extractH2Sections, evaluateInvestigationTrace, sectionBodyByName } from "./shared.js";
|
|
10
11
|
const SLICE_SUMMARY_START = "<!-- auto-start: tdd-slice-summary -->";
|
|
11
12
|
const SLICE_SUMMARY_END = "<!-- auto-end: tdd-slice-summary -->";
|
|
@@ -509,7 +510,7 @@ async function listSliceFiles(slicesDir) {
|
|
|
509
510
|
continue;
|
|
510
511
|
files.push({ sliceId: match[1], absPath: path.join(slicesDir, name) });
|
|
511
512
|
}
|
|
512
|
-
files.sort((a, b) => (a.sliceId
|
|
513
|
+
files.sort((a, b) => compareSliceIds(a.sliceId, b.sliceId));
|
|
513
514
|
return files;
|
|
514
515
|
}
|
|
515
516
|
function escapeForRegex(value) {
|
package/dist/config.d.ts
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
import type { CclawConfig, FlowTrack, HarnessId, LanguageRulePack, TddCommitMode, TddIsolationMode } from "./types.js";
|
|
1
|
+
import type { CclawConfig, FlowTrack, HarnessId, LanguageRulePack, LockfileTwinPolicy, TddCommitMode, TddIsolationMode } from "./types.js";
|
|
2
2
|
export declare const TDD_COMMIT_MODES: readonly ["managed-per-slice", "agent-required", "checkpoint-only", "off"];
|
|
3
3
|
export declare const DEFAULT_TDD_COMMIT_MODE: TddCommitMode;
|
|
4
4
|
export declare const TDD_ISOLATION_MODES: readonly ["worktree", "in-place", "auto"];
|
|
5
5
|
export declare const DEFAULT_TDD_ISOLATION_MODE: TddIsolationMode;
|
|
6
6
|
export declare const DEFAULT_TDD_WORKTREE_ROOT = ".cclaw/worktrees";
|
|
7
|
+
export declare const LOCKFILE_TWIN_POLICIES: readonly ["auto-include", "auto-revert", "strict-fence"];
|
|
8
|
+
export declare const DEFAULT_LOCKFILE_TWIN_POLICY: LockfileTwinPolicy;
|
|
7
9
|
export declare const DEFAULT_TDD_TEST_PATH_PATTERNS: readonly string[];
|
|
8
10
|
export declare const DEFAULT_TDD_TEST_GLOBS: readonly string[];
|
|
9
11
|
export declare const DEFAULT_TDD_PRODUCTION_PATH_PATTERNS: readonly string[];
|
|
@@ -24,6 +26,7 @@ export declare function createDefaultConfig(harnesses?: HarnessId[], _defaultTra
|
|
|
24
26
|
export declare function resolveTddCommitMode(config: Pick<CclawConfig, "tdd"> | null | undefined): TddCommitMode;
|
|
25
27
|
export declare function resolveTddIsolationMode(config: Pick<CclawConfig, "tdd"> | null | undefined): TddIsolationMode;
|
|
26
28
|
export declare function resolveTddWorktreeRoot(config: Pick<CclawConfig, "tdd"> | null | undefined): string;
|
|
29
|
+
export declare function resolveLockfileTwinPolicy(config: Pick<CclawConfig, "tdd"> | null | undefined): LockfileTwinPolicy;
|
|
27
30
|
export declare function detectLanguageRulePacks(_projectRoot: string): Promise<LanguageRulePack[]>;
|
|
28
31
|
export declare function readConfig(projectRoot: string, _options?: ReadConfigOptions): Promise<CclawConfig>;
|
|
29
32
|
export interface WriteConfigOptions {
|
package/dist/config.js
CHANGED
|
@@ -20,6 +20,9 @@ export const TDD_ISOLATION_MODES = ["worktree", "in-place", "auto"];
|
|
|
20
20
|
const TDD_ISOLATION_MODE_SET = new Set(TDD_ISOLATION_MODES);
|
|
21
21
|
export const DEFAULT_TDD_ISOLATION_MODE = "worktree";
|
|
22
22
|
export const DEFAULT_TDD_WORKTREE_ROOT = `${RUNTIME_ROOT}/worktrees`;
|
|
23
|
+
export const LOCKFILE_TWIN_POLICIES = ["auto-include", "auto-revert", "strict-fence"];
|
|
24
|
+
const LOCKFILE_TWIN_POLICY_SET = new Set(LOCKFILE_TWIN_POLICIES);
|
|
25
|
+
export const DEFAULT_LOCKFILE_TWIN_POLICY = "auto-include";
|
|
23
26
|
// Kept for runtime modules that use these defaults directly.
|
|
24
27
|
export const DEFAULT_TDD_TEST_PATH_PATTERNS = [
|
|
25
28
|
"**/*.test.*",
|
|
@@ -68,7 +71,8 @@ export function createDefaultConfig(harnesses = DEFAULT_HARNESSES, _defaultTrack
|
|
|
68
71
|
tdd: {
|
|
69
72
|
commitMode: DEFAULT_TDD_COMMIT_MODE,
|
|
70
73
|
isolationMode: DEFAULT_TDD_ISOLATION_MODE,
|
|
71
|
-
worktreeRoot: DEFAULT_TDD_WORKTREE_ROOT
|
|
74
|
+
worktreeRoot: DEFAULT_TDD_WORKTREE_ROOT,
|
|
75
|
+
lockfileTwinPolicy: DEFAULT_LOCKFILE_TWIN_POLICY
|
|
72
76
|
}
|
|
73
77
|
};
|
|
74
78
|
}
|
|
@@ -93,6 +97,13 @@ export function resolveTddWorktreeRoot(config) {
|
|
|
93
97
|
}
|
|
94
98
|
return DEFAULT_TDD_WORKTREE_ROOT;
|
|
95
99
|
}
|
|
100
|
+
export function resolveLockfileTwinPolicy(config) {
|
|
101
|
+
const raw = config?.tdd?.lockfileTwinPolicy;
|
|
102
|
+
if (typeof raw === "string" && LOCKFILE_TWIN_POLICY_SET.has(raw)) {
|
|
103
|
+
return raw;
|
|
104
|
+
}
|
|
105
|
+
return DEFAULT_LOCKFILE_TWIN_POLICY;
|
|
106
|
+
}
|
|
96
107
|
function assertOnlySupportedKeys(parsed, fullPath) {
|
|
97
108
|
const unknownKeys = Object.keys(parsed).filter((key) => !ALLOWED_CONFIG_KEYS.has(key));
|
|
98
109
|
if (unknownKeys.length === 0)
|
|
@@ -153,6 +164,7 @@ export async function readConfig(projectRoot, _options = {}) {
|
|
|
153
164
|
const rawCommitMode = parsedTdd.commitMode;
|
|
154
165
|
const rawIsolationMode = parsedTdd.isolationMode;
|
|
155
166
|
const rawWorktreeRoot = parsedTdd.worktreeRoot;
|
|
167
|
+
const rawLockfileTwinPolicy = parsedTdd.lockfileTwinPolicy;
|
|
156
168
|
if (rawCommitMode !== undefined &&
|
|
157
169
|
(typeof rawCommitMode !== "string" || !TDD_COMMIT_MODE_SET.has(rawCommitMode))) {
|
|
158
170
|
throw configValidationError(fullPath, `"tdd.commitMode" must be one of: ${TDD_COMMIT_MODES.join(", ")}`);
|
|
@@ -165,6 +177,10 @@ export async function readConfig(projectRoot, _options = {}) {
|
|
|
165
177
|
(typeof rawWorktreeRoot !== "string" || rawWorktreeRoot.trim().length === 0)) {
|
|
166
178
|
throw configValidationError(fullPath, `"tdd.worktreeRoot" must be a non-empty string when provided`);
|
|
167
179
|
}
|
|
180
|
+
if (rawLockfileTwinPolicy !== undefined &&
|
|
181
|
+
(typeof rawLockfileTwinPolicy !== "string" || !LOCKFILE_TWIN_POLICY_SET.has(rawLockfileTwinPolicy))) {
|
|
182
|
+
throw configValidationError(fullPath, `"tdd.lockfileTwinPolicy" must be one of: ${LOCKFILE_TWIN_POLICIES.join(", ")}`);
|
|
183
|
+
}
|
|
168
184
|
const commitMode = typeof rawCommitMode === "string"
|
|
169
185
|
? rawCommitMode
|
|
170
186
|
: DEFAULT_TDD_COMMIT_MODE;
|
|
@@ -174,6 +190,9 @@ export async function readConfig(projectRoot, _options = {}) {
|
|
|
174
190
|
const worktreeRoot = typeof rawWorktreeRoot === "string" && rawWorktreeRoot.trim().length > 0
|
|
175
191
|
? rawWorktreeRoot.trim()
|
|
176
192
|
: DEFAULT_TDD_WORKTREE_ROOT;
|
|
193
|
+
const lockfileTwinPolicy = typeof rawLockfileTwinPolicy === "string"
|
|
194
|
+
? rawLockfileTwinPolicy
|
|
195
|
+
: DEFAULT_LOCKFILE_TWIN_POLICY;
|
|
177
196
|
return {
|
|
178
197
|
version,
|
|
179
198
|
flowVersion,
|
|
@@ -181,7 +200,8 @@ export async function readConfig(projectRoot, _options = {}) {
|
|
|
181
200
|
tdd: {
|
|
182
201
|
commitMode,
|
|
183
202
|
isolationMode,
|
|
184
|
-
worktreeRoot
|
|
203
|
+
worktreeRoot,
|
|
204
|
+
lockfileTwinPolicy
|
|
185
205
|
}
|
|
186
206
|
};
|
|
187
207
|
}
|
|
@@ -193,7 +213,8 @@ export async function writeConfig(projectRoot, config, _options = {}) {
|
|
|
193
213
|
tdd: {
|
|
194
214
|
commitMode: resolveTddCommitMode(config),
|
|
195
215
|
isolationMode: resolveTddIsolationMode(config),
|
|
196
|
-
worktreeRoot: resolveTddWorktreeRoot(config)
|
|
216
|
+
worktreeRoot: resolveTddWorktreeRoot(config),
|
|
217
|
+
lockfileTwinPolicy: resolveLockfileTwinPolicy(config)
|
|
197
218
|
}
|
|
198
219
|
};
|
|
199
220
|
await writeFileSafe(configPath(projectRoot), stringify(serialisable));
|
|
@@ -165,6 +165,21 @@ export function sliceBuilderProtocol() {
|
|
|
165
165
|
"- Honor every `delegation-record`/`delegation-record.mjs` row shape the controller requests so artifact linters keep passing.",
|
|
166
166
|
"- The umbrella `slice-completed` row ties RED/GREEN/REFACTOR/DOC timestamps to your builder span.",
|
|
167
167
|
"",
|
|
168
|
+
"### Event → status flag table (7.6.0 — phase-event status validation)",
|
|
169
|
+
"",
|
|
170
|
+
"Phase-level granularity is only meaningful on terminal outcomes. The dispatch-level ack (no `--phase`) is the controller saying \"I see the dispatch surface back\" — it stays on `--status=acknowledged`. Phase events MUST use `--status=completed` or `--status=failed`. The hook rejects mismatches with `phase_event_requires_completed_or_failed_status` (exit 2) and prints a corrected-command hint.",
|
|
171
|
+
"",
|
|
172
|
+
"| event | --phase | --status |",
|
|
173
|
+
"|---|---|---|",
|
|
174
|
+
"| dispatch ack (controller-side) | (none) | `acknowledged` |",
|
|
175
|
+
"| RED watched-fail captured | `red` | `completed` |",
|
|
176
|
+
"| GREEN test passes | `green` | `completed` (with `--refactor-outcome=inline\\|deferred\\|...`) |",
|
|
177
|
+
"| REFACTOR landed | `refactor` | `completed` |",
|
|
178
|
+
"| DOC card landed (triggers slice-commit) | `doc` | `completed` |",
|
|
179
|
+
"| BLOCKED / unrecoverable | (any phase reached) | `failed` |",
|
|
180
|
+
"",
|
|
181
|
+
"Common slip: recording every phase event with `--status=acknowledged` (e.g. `--phase=doc --status=acknowledged`). The event row is silently dropped from terminal-phase aggregations, `slice-commit.mjs` never fires (it only triggers on `phase=doc status=completed`), and `wave-status` reports the slice as phantom-open. Recovery requires raw backfill commands. The 7.6.0 hook validator forbids this configuration up front.",
|
|
182
|
+
"",
|
|
168
183
|
"### Streaming output contract",
|
|
169
184
|
"- Emit one JSON line to stdout per completed phase: `{\"event\":\"phase-completed\",\"stage\":\"tdd\",\"sliceId\":\"S-<n>\",\"phase\":\"<red|green|refactor|refactor-deferred|doc>\",\"spanId\":\"<span>\",\"runId\":\"<run>\",\"ts\":\"<iso>\"}`.",
|
|
170
185
|
"- For `phase=green` with inline/deferred refactor folding, include `refactorOutcome.mode` in the same JSON line so live controllers can close the slice without waiting for file sync.",
|
package/dist/content/hooks.js
CHANGED
|
@@ -1489,6 +1489,43 @@ async function main() {
|
|
|
1489
1489
|
emitProblems(problems, json, 2);
|
|
1490
1490
|
return;
|
|
1491
1491
|
}
|
|
1492
|
+
// 7.6.0 — phase-event status validation.
|
|
1493
|
+
// \`--phase=<phase>\` carries phase-level granularity (RED/GREEN/REFACTOR/DOC
|
|
1494
|
+
// outcomes). It is only meaningful on terminal statuses
|
|
1495
|
+
// (\`completed\` or \`failed\`). The dispatch-level ack (no phase) keeps
|
|
1496
|
+
// \`--status=acknowledged\`. Refuse acknowledged/launched/scheduled/waived/stale
|
|
1497
|
+
// rows that carry a phase so phantom-open slices cannot be recorded.
|
|
1498
|
+
if (
|
|
1499
|
+
typeof args.phase === "string" &&
|
|
1500
|
+
args.phase.length > 0 &&
|
|
1501
|
+
args.status !== "completed" &&
|
|
1502
|
+
args.status !== "failed"
|
|
1503
|
+
) {
|
|
1504
|
+
const sliceFlag = typeof args.slice === "string" && args.slice.length > 0
|
|
1505
|
+
? "--slice=" + args.slice + " "
|
|
1506
|
+
: "";
|
|
1507
|
+
const spanFlag = typeof args["span-id"] === "string" && args["span-id"].length > 0
|
|
1508
|
+
? "--span-id=" + args["span-id"] + " "
|
|
1509
|
+
: "";
|
|
1510
|
+
const correctedCommandHint =
|
|
1511
|
+
"node .cclaw/hooks/delegation-record.mjs --stage=" + (args.stage || "<stage>") +
|
|
1512
|
+
" --agent=" + (args.agent || "<agent>") +
|
|
1513
|
+
" --mode=" + (args.mode || "mandatory") +
|
|
1514
|
+
" --status=completed --phase=" + args.phase +
|
|
1515
|
+
" " + sliceFlag + spanFlag +
|
|
1516
|
+
'--evidence-ref="<phase outcome>"';
|
|
1517
|
+
emitErrorJson(
|
|
1518
|
+
"phase_event_requires_completed_or_failed_status",
|
|
1519
|
+
{
|
|
1520
|
+
phase: args.phase,
|
|
1521
|
+
status: args.status,
|
|
1522
|
+
spanId: args["span-id"] || "unknown",
|
|
1523
|
+
correctedCommandHint
|
|
1524
|
+
},
|
|
1525
|
+
json
|
|
1526
|
+
);
|
|
1527
|
+
return;
|
|
1528
|
+
}
|
|
1492
1529
|
if (args.phase === "refactor-deferred") {
|
|
1493
1530
|
const rationaleQuality = validateDeferredRationaleInline(args["refactor-rationale"], args);
|
|
1494
1531
|
if (rationaleQuality !== "ok") {
|
|
@@ -85,6 +85,7 @@ export const PLAN = {
|
|
|
85
85
|
{ id: "plan_execution_posture_recorded", description: "Execution posture is recorded before implementation handoff." },
|
|
86
86
|
{ id: "plan_parallel_exec_full_coverage", description: "Every T-NNN task in `## Task List` (other than spikes/explicitly-deferred) is assigned to at least one slice inside the `<!-- parallel-exec-managed-start -->` block; TDD cannot fan out work that the plan never authored as waves." },
|
|
87
87
|
{ id: "plan_wave_paths_disjoint", description: "Within each authored wave, slice `claimedPaths` remain disjoint so `wave-fanout` can dispatch safely without overlap conflicts." },
|
|
88
|
+
{ id: "plan_module_introducing_slice_wires_root", description: "When a slice introduces a new module file, the stack-adapter's wiring aggregator (Rust `lib.rs`, Python `__init__.py`, Node-TS barrel when present) must appear in the same slice's claim or a transitive predecessor's claim so RED can be expressed." },
|
|
88
89
|
{ id: "plan_wait_for_confirm", description: "Execution blocked until explicit user confirmation." }
|
|
89
90
|
],
|
|
90
91
|
requiredEvidence: [
|
package/dist/delegation.d.ts
CHANGED
|
@@ -381,6 +381,45 @@ export declare class DispatchClaimedPathProtectedError extends Error {
|
|
|
381
381
|
* offending path so the operator can fix the dispatch in one pass.
|
|
382
382
|
*/
|
|
383
383
|
export declare function validateClaimedPathsNotProtected(stamped: DelegationEntry): void;
|
|
384
|
+
/**
|
|
385
|
+
* Thrown by `appendDelegation` (and the inline `delegation-record.mjs`
|
|
386
|
+
* helper) when an event with a non-null `phase` is recorded with
|
|
387
|
+
* `status="acknowledged"`. Phase-level granularity only makes sense on
|
|
388
|
+
* terminal outcomes (`completed` or `failed`); the dispatch-level ACK
|
|
389
|
+
* (no phase) is the controller saying "I see the dispatch surface back".
|
|
390
|
+
*
|
|
391
|
+
* Motivated by hox W-08/S-41: the slice-builder agent recorded all four
|
|
392
|
+
* phase events with `--status=acknowledged`, which the helper silently
|
|
393
|
+
* accepted but `slice-commit.mjs` only fires on `phase=doc status=completed`.
|
|
394
|
+
* `wave-status` then saw the slice as phantom-open even though the
|
|
395
|
+
* worker had finished. Recovery required raw backfill commands.
|
|
396
|
+
*
|
|
397
|
+
* 7.6.0 makes the constraint explicit: pair `--phase=<phase>` with
|
|
398
|
+
* `--status=completed` (or `--status=failed`) and use
|
|
399
|
+
* `--status=acknowledged` only for the dispatch-level ack (no phase).
|
|
400
|
+
*/
|
|
401
|
+
export declare class PhaseEventRequiresTerminalStatusError extends Error {
|
|
402
|
+
readonly phase: string;
|
|
403
|
+
readonly status: DelegationStatus;
|
|
404
|
+
readonly spanId: string;
|
|
405
|
+
readonly correctedCommandHint: string;
|
|
406
|
+
constructor(params: {
|
|
407
|
+
phase: string;
|
|
408
|
+
status: DelegationStatus;
|
|
409
|
+
spanId: string;
|
|
410
|
+
correctedCommandHint: string;
|
|
411
|
+
});
|
|
412
|
+
}
|
|
413
|
+
/**
|
|
414
|
+
* Reject delegation rows where `phase` is set but `status` is not
|
|
415
|
+
* `completed` or `failed`. Acknowledged/launched/scheduled/waived/stale
|
|
416
|
+
* rows must NOT carry a phase — the phase-level lifecycle exists only
|
|
417
|
+
* to record terminal outcomes per phase (RED/GREEN/REFACTOR/DOC).
|
|
418
|
+
*
|
|
419
|
+
* Throws `PhaseEventRequiresTerminalStatusError`; the message includes
|
|
420
|
+
* an actionable corrected-command hint that the controller can paste.
|
|
421
|
+
*/
|
|
422
|
+
export declare function validatePhaseEventStatus(stamped: DelegationEntry): void;
|
|
384
423
|
/**
|
|
385
424
|
* Thrown by `appendDelegation` when a new `scheduled` span would open a
|
|
386
425
|
* second TDD cycle for a slice that already has at least one closed span
|
package/dist/delegation.js
CHANGED
|
@@ -9,6 +9,7 @@ import { HARNESS_ADAPTERS } from "./harness-adapters.js";
|
|
|
9
9
|
import { readFlowState } from "./runs.js";
|
|
10
10
|
import { mandatoryAgentsFor, stageSchema } from "./content/stage-schema.js";
|
|
11
11
|
import { compareCanonicalUnitIds, mergeParallelWaveDefinitions, parseImplementationUnitParallelFields, parseImplementationUnits, parseParallelExecutionPlanWaves, parseWavePlanDirectory } from "./internal/plan-split-waves.js";
|
|
12
|
+
import { compareSliceIds } from "./util/slice-id.js";
|
|
12
13
|
const execFileAsync = promisify(execFile);
|
|
13
14
|
const TERMINAL_DELEGATION_STATUSES = new Set(["completed", "failed", "waived", "stale"]);
|
|
14
15
|
export const DELEGATION_DISPATCH_SURFACES = [
|
|
@@ -700,6 +701,69 @@ export function validateClaimedPathsNotProtected(stamped) {
|
|
|
700
701
|
spanId: stamped.spanId ?? "unknown"
|
|
701
702
|
});
|
|
702
703
|
}
|
|
704
|
+
/**
|
|
705
|
+
* Thrown by `appendDelegation` (and the inline `delegation-record.mjs`
|
|
706
|
+
* helper) when an event with a non-null `phase` is recorded with
|
|
707
|
+
* `status="acknowledged"`. Phase-level granularity only makes sense on
|
|
708
|
+
* terminal outcomes (`completed` or `failed`); the dispatch-level ACK
|
|
709
|
+
* (no phase) is the controller saying "I see the dispatch surface back".
|
|
710
|
+
*
|
|
711
|
+
* Motivated by hox W-08/S-41: the slice-builder agent recorded all four
|
|
712
|
+
* phase events with `--status=acknowledged`, which the helper silently
|
|
713
|
+
* accepted but `slice-commit.mjs` only fires on `phase=doc status=completed`.
|
|
714
|
+
* `wave-status` then saw the slice as phantom-open even though the
|
|
715
|
+
* worker had finished. Recovery required raw backfill commands.
|
|
716
|
+
*
|
|
717
|
+
* 7.6.0 makes the constraint explicit: pair `--phase=<phase>` with
|
|
718
|
+
* `--status=completed` (or `--status=failed`) and use
|
|
719
|
+
* `--status=acknowledged` only for the dispatch-level ack (no phase).
|
|
720
|
+
*/
|
|
721
|
+
export class PhaseEventRequiresTerminalStatusError extends Error {
|
|
722
|
+
phase;
|
|
723
|
+
status;
|
|
724
|
+
spanId;
|
|
725
|
+
correctedCommandHint;
|
|
726
|
+
constructor(params) {
|
|
727
|
+
super(`phase_event_requires_completed_or_failed_status — span ${params.spanId} recorded --phase=${params.phase} with --status=${params.status}; ` +
|
|
728
|
+
`phase-level events are only valid on terminal outcomes (--status=completed or --status=failed). ` +
|
|
729
|
+
`The dispatch-level ack (no --phase) can still use --status=acknowledged. ` +
|
|
730
|
+
`Corrected command: ${params.correctedCommandHint}`);
|
|
731
|
+
this.name = "PhaseEventRequiresTerminalStatusError";
|
|
732
|
+
this.phase = params.phase;
|
|
733
|
+
this.status = params.status;
|
|
734
|
+
this.spanId = params.spanId;
|
|
735
|
+
this.correctedCommandHint = params.correctedCommandHint;
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
/**
|
|
739
|
+
* Reject delegation rows where `phase` is set but `status` is not
|
|
740
|
+
* `completed` or `failed`. Acknowledged/launched/scheduled/waived/stale
|
|
741
|
+
* rows must NOT carry a phase — the phase-level lifecycle exists only
|
|
742
|
+
* to record terminal outcomes per phase (RED/GREEN/REFACTOR/DOC).
|
|
743
|
+
*
|
|
744
|
+
* Throws `PhaseEventRequiresTerminalStatusError`; the message includes
|
|
745
|
+
* an actionable corrected-command hint that the controller can paste.
|
|
746
|
+
*/
|
|
747
|
+
export function validatePhaseEventStatus(stamped) {
|
|
748
|
+
if (typeof stamped.phase !== "string" || stamped.phase.length === 0)
|
|
749
|
+
return;
|
|
750
|
+
if (stamped.status === "completed" || stamped.status === "failed")
|
|
751
|
+
return;
|
|
752
|
+
const phase = stamped.phase;
|
|
753
|
+
const sliceFlag = typeof stamped.sliceId === "string" && stamped.sliceId.length > 0
|
|
754
|
+
? `--slice=${stamped.sliceId} `
|
|
755
|
+
: "";
|
|
756
|
+
const spanFlag = typeof stamped.spanId === "string" && stamped.spanId.length > 0
|
|
757
|
+
? `--span-id=${stamped.spanId} `
|
|
758
|
+
: "";
|
|
759
|
+
const correctedCommandHint = `node .cclaw/hooks/delegation-record.mjs --stage=${stamped.stage} --agent=${stamped.agent} --mode=${stamped.mode} --status=completed --phase=${phase} ${sliceFlag}${spanFlag}--evidence-ref="<phase outcome>"`;
|
|
760
|
+
throw new PhaseEventRequiresTerminalStatusError({
|
|
761
|
+
phase,
|
|
762
|
+
status: stamped.status,
|
|
763
|
+
spanId: stamped.spanId ?? "unknown",
|
|
764
|
+
correctedCommandHint
|
|
765
|
+
});
|
|
766
|
+
}
|
|
703
767
|
/**
|
|
704
768
|
* Thrown by `appendDelegation` when a new `scheduled` span would open a
|
|
705
769
|
* second TDD cycle for a slice that already has at least one closed span
|
|
@@ -845,7 +909,7 @@ export function readySliceUnitsFromMergedWaves(mergedWaves, planMarkdown, option
|
|
|
845
909
|
}
|
|
846
910
|
}
|
|
847
911
|
const out = [];
|
|
848
|
-
for (const sliceId of [...sliceSet].sort(
|
|
912
|
+
for (const sliceId of [...sliceSet].sort(compareSliceIds)) {
|
|
849
913
|
const member = mergedWaves.flatMap((w) => w.members).find((x) => x.sliceId === sliceId);
|
|
850
914
|
if (!member)
|
|
851
915
|
continue;
|
|
@@ -1183,6 +1247,7 @@ export async function appendDelegation(projectRoot, entry) {
|
|
|
1183
1247
|
return;
|
|
1184
1248
|
}
|
|
1185
1249
|
validateMonotonicTimestamps(stamped, prior.entries);
|
|
1250
|
+
validatePhaseEventStatus(stamped);
|
|
1186
1251
|
if (stamped.status === "scheduled" &&
|
|
1187
1252
|
typeof stamped.sliceId === "string" &&
|
|
1188
1253
|
stamped.sliceId.length > 0 &&
|
package/dist/gate-evidence.js
CHANGED
|
@@ -11,6 +11,7 @@ import { computeEarlyLoopStatus, isEarlyLoopStage, normalizeEarlyLoopMaxIteratio
|
|
|
11
11
|
import { detectPublicApiChanges } from "./internal/detect-public-api-changes.js";
|
|
12
12
|
import { detectSupplyChainChanges } from "./internal/detect-supply-chain-changes.js";
|
|
13
13
|
import { readFlowState, writeFlowState } from "./runs.js";
|
|
14
|
+
import { loadStackAdapter } from "./stack-detection.js";
|
|
14
15
|
import { validateTddVerificationEvidence } from "./tdd-verification-evidence.js";
|
|
15
16
|
async function currentStageArtifactExists(projectRoot, stage, track) {
|
|
16
17
|
const resolved = await resolveArtifactPath(stage, {
|
|
@@ -88,20 +89,17 @@ async function discoverRealTestCommands(projectRoot) {
|
|
|
88
89
|
commands.push(name === "test" ? "bun test" : `bun run ${name}`);
|
|
89
90
|
}
|
|
90
91
|
}
|
|
91
|
-
|
|
92
|
-
|
|
92
|
+
// 7.6.0 — pull additional commands from the stack-adapter's
|
|
93
|
+
// testCommandHints rather than hardcoding pytest/go test/cargo
|
|
94
|
+
// test/mvn/gradle here. Adapters that don't apply to the project
|
|
95
|
+
// contribute no commands; pytest.ini support is kept as an
|
|
96
|
+
// explicit fallback because pyproject.toml-less projects exist.
|
|
97
|
+
const stackAdapter = await loadStackAdapter(projectRoot);
|
|
98
|
+
for (const hint of stackAdapter.testCommandHints) {
|
|
99
|
+
commands.push(hint);
|
|
100
|
+
}
|
|
93
101
|
if (await exists(path.join(projectRoot, "pytest.ini")))
|
|
94
102
|
commands.push("pytest");
|
|
95
|
-
if (await exists(path.join(projectRoot, "go.mod")))
|
|
96
|
-
commands.push("go test ./...");
|
|
97
|
-
if (await exists(path.join(projectRoot, "Cargo.toml")))
|
|
98
|
-
commands.push("cargo test");
|
|
99
|
-
if (await exists(path.join(projectRoot, "pom.xml")))
|
|
100
|
-
commands.push("mvn test");
|
|
101
|
-
if (await exists(path.join(projectRoot, "build.gradle")) ||
|
|
102
|
-
await exists(path.join(projectRoot, "build.gradle.kts"))) {
|
|
103
|
-
commands.push("gradle test", "./gradlew test");
|
|
104
|
-
}
|
|
105
103
|
return unique(commands);
|
|
106
104
|
}
|
|
107
105
|
async function verifyDiscoveredCommandEvidence(projectRoot, stage, gateId, flowState) {
|
|
@@ -4,7 +4,7 @@ import { RUNTIME_ROOT } from "../../constants.js";
|
|
|
4
4
|
import { createInitialFlowState } from "../../flow-state.js";
|
|
5
5
|
import { readFlowState, writeFlowState } from "../../runs.js";
|
|
6
6
|
import { listExistingFiles, listFilesUnder, pathExists } from "./helpers.js";
|
|
7
|
-
import { STACK_DISCOVERY_DIR_MARKERS, STACK_DISCOVERY_MARKERS } from "../../stack-detection.js";
|
|
7
|
+
import { STACK_DISCOVERY_DIR_MARKERS, STACK_DISCOVERY_MARKERS, loadStackAdapter } from "../../stack-detection.js";
|
|
8
8
|
import { TRACK_STAGES } from "../../types.js";
|
|
9
9
|
import { buildValidationReport } from "./advance.js";
|
|
10
10
|
import { carriedCompletedStageCatalog, completedStageClosureEvidenceIssues, firstIncompleteStageForTrack } from "./verify.js";
|
|
@@ -58,11 +58,20 @@ export async function collectRepoSignals(projectRoot) {
|
|
|
58
58
|
// ignore
|
|
59
59
|
}
|
|
60
60
|
}
|
|
61
|
-
|
|
61
|
+
// 7.6.0 — manifest detection now routes through the stack-adapter
|
|
62
|
+
// contract instead of hardcoding `package.json` / `pyproject.toml` /
|
|
63
|
+
// `Cargo.toml`. Adapters that declare manifestGlobs probe their
|
|
64
|
+
// declared paths; the unknown adapter is a no-op.
|
|
65
|
+
const stackAdapter = await loadStackAdapter(projectRoot);
|
|
66
|
+
for (const manifestGlob of stackAdapter.manifestGlobs) {
|
|
67
|
+
if (manifestGlob.includes("*"))
|
|
68
|
+
continue;
|
|
62
69
|
try {
|
|
63
|
-
const st = await fs.stat(path.join(projectRoot,
|
|
64
|
-
if (st.isFile())
|
|
70
|
+
const st = await fs.stat(path.join(projectRoot, manifestGlob));
|
|
71
|
+
if (st.isFile()) {
|
|
65
72
|
hasPackageManifest = true;
|
|
73
|
+
break;
|
|
74
|
+
}
|
|
66
75
|
}
|
|
67
76
|
catch {
|
|
68
77
|
// ignore
|
|
@@ -3,6 +3,7 @@ import path from "node:path";
|
|
|
3
3
|
import { RUNTIME_ROOT } from "../constants.js";
|
|
4
4
|
import { writeFileSafe } from "../fs-utils.js";
|
|
5
5
|
import { readDelegationLedger, isParallelTddSliceWorker } from "../delegation.js";
|
|
6
|
+
import { compareSliceIds } from "../util/slice-id.js";
|
|
6
7
|
export function parseCohesionContractArgs(tokens) {
|
|
7
8
|
const args = { stub: false, force: false, reason: null };
|
|
8
9
|
for (const token of tokens) {
|
|
@@ -143,18 +144,5 @@ function collectSliceIds(entries) {
|
|
|
143
144
|
continue;
|
|
144
145
|
set.add(entry.sliceId);
|
|
145
146
|
}
|
|
146
|
-
return [...set].sort(
|
|
147
|
-
const an = parseSliceNum(a);
|
|
148
|
-
const bn = parseSliceNum(b);
|
|
149
|
-
if (an !== null && bn !== null)
|
|
150
|
-
return an - bn;
|
|
151
|
-
return a.localeCompare(b);
|
|
152
|
-
});
|
|
153
|
-
}
|
|
154
|
-
function parseSliceNum(sliceId) {
|
|
155
|
-
const m = /^S-(\d+)$/u.exec(sliceId);
|
|
156
|
-
if (!m)
|
|
157
|
-
return null;
|
|
158
|
-
const n = Number.parseInt(m[1], 10);
|
|
159
|
-
return Number.isFinite(n) ? n : null;
|
|
147
|
+
return [...set].sort(compareSliceIds);
|
|
160
148
|
}
|