cclaw-cli 6.6.0 → 6.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/artifact-linter/findings-dedup.d.ts +56 -0
- package/dist/artifact-linter/findings-dedup.js +232 -0
- package/dist/artifact-linter/plan.js +3 -2
- package/dist/artifact-linter/shared.d.ts +49 -0
- package/dist/artifact-linter/shared.js +35 -0
- package/dist/artifact-linter.d.ts +1 -1
- package/dist/artifact-linter.js +45 -3
- package/dist/content/hooks.js +241 -7
- package/dist/content/node-hooks.js +43 -0
- package/dist/content/skills-elicitation.js +3 -6
- package/dist/content/skills.js +3 -1
- package/dist/content/stages/brainstorm.js +4 -4
- package/dist/content/stages/scope.js +2 -2
- package/dist/content/templates.js +3 -2
- package/dist/delegation.d.ts +107 -0
- package/dist/delegation.js +223 -6
- package/dist/internal/advance-stage/advance.js +23 -1
- package/dist/internal/advance-stage/parsers.d.ts +8 -0
- package/dist/internal/advance-stage/parsers.js +7 -0
- package/dist/internal/advance-stage/proactive-delegation-trace.d.ts +3 -0
- package/dist/internal/advance-stage/proactive-delegation-trace.js +8 -1
- package/dist/internal/advance-stage/rewind.js +2 -2
- package/dist/internal/advance-stage/start-flow.js +4 -1
- package/dist/internal/advance-stage.js +41 -2
- package/dist/internal/flow-state-repair.d.ts +13 -0
- package/dist/internal/flow-state-repair.js +65 -0
- package/dist/internal/waiver-grant.d.ts +62 -0
- package/dist/internal/waiver-grant.js +294 -0
- package/dist/run-persistence.d.ts +70 -0
- package/dist/run-persistence.js +215 -3
- package/dist/runs.d.ts +1 -1
- package/dist/runs.js +1 -1
- package/dist/runtime/run-hook.mjs +43 -0
- package/package.json +1 -1
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import type { FlowStage } from "../types.js";
|
|
2
|
+
import type { LintFinding } from "./shared.js";
|
|
3
|
+
export declare const FINDINGS_CACHE_SCHEMA_VERSION = 1;
|
|
4
|
+
export type FindingStatus = {
|
|
5
|
+
kind: "new";
|
|
6
|
+
} | {
|
|
7
|
+
kind: "repeat";
|
|
8
|
+
count: number;
|
|
9
|
+
} | {
|
|
10
|
+
kind: "resolved";
|
|
11
|
+
};
|
|
12
|
+
export interface ClassifiedFinding {
|
|
13
|
+
finding: LintFinding;
|
|
14
|
+
fingerprint: string;
|
|
15
|
+
status: FindingStatus;
|
|
16
|
+
}
|
|
17
|
+
export interface ResolvedFinding {
|
|
18
|
+
fingerprint: string;
|
|
19
|
+
rule: string;
|
|
20
|
+
lastSeenAt: string;
|
|
21
|
+
}
|
|
22
|
+
export interface FindingsDedupSummary {
|
|
23
|
+
newCount: number;
|
|
24
|
+
repeatCount: number;
|
|
25
|
+
resolvedCount: number;
|
|
26
|
+
resolved: ResolvedFinding[];
|
|
27
|
+
}
|
|
28
|
+
export interface LintRunDedupResult {
|
|
29
|
+
classified: ClassifiedFinding[];
|
|
30
|
+
summary: FindingsDedupSummary;
|
|
31
|
+
header: string;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Normalize a finding detail string so volatile tokens (run IDs,
|
|
35
|
+
* timestamps, counts, hex hashes, temp paths) don't cause a finding
|
|
36
|
+
* to appear "new" on every invocation.
|
|
37
|
+
*/
|
|
38
|
+
export declare function normalizeFindingDetail(detail: string): string;
|
|
39
|
+
export declare function fingerprintFinding(stage: FlowStage, finding: LintFinding): string;
|
|
40
|
+
/**
|
|
41
|
+
* Classify each emitted finding as `new`, `repeat:N`, or `resolved`
|
|
42
|
+
* relative to the cached sidecar for this stage. Persists the updated
|
|
43
|
+
* fingerprint set under a directory lock so concurrent lint runs for
|
|
44
|
+
* the same project don't clobber each other.
|
|
45
|
+
*
|
|
46
|
+
* The returned `header` is a short human string intended for inclusion
|
|
47
|
+
* above the linter output; it's stable across runs when findings
|
|
48
|
+
* repeat. Empty string when there is nothing meaningful to report
|
|
49
|
+
* (no findings and no carry-over state).
|
|
50
|
+
*/
|
|
51
|
+
export declare function classifyAndPersistFindings(projectRoot: string, stage: FlowStage, findings: LintFinding[], options?: {
|
|
52
|
+
now?: Date;
|
|
53
|
+
}): Promise<LintRunDedupResult>;
|
|
54
|
+
export declare function buildDedupHeader(stage: FlowStage, summary: FindingsDedupSummary): string;
|
|
55
|
+
export declare function formatFindingStatusTag(status: FindingStatus): string;
|
|
56
|
+
export declare function findingsDedupCachePathFor(projectRoot: string): string;
|
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import fs from "node:fs/promises";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { RUNTIME_ROOT } from "../constants.js";
|
|
5
|
+
import { ensureDir, exists, withDirectoryLock, writeFileSafe } from "../fs-utils.js";
|
|
6
|
+
/**
|
|
7
|
+
* Wave 26 (v6.7.0) linter-dedup cache. The linter persists a per-stage
|
|
8
|
+
* fingerprint of each finding between runs so authors can tell at a
|
|
9
|
+
* glance what's `new`, `repeat`, or `resolved` relative to the last run.
|
|
10
|
+
*
|
|
11
|
+
* Fingerprint = `sha256(stage | rule | normalizedDetail).slice(0, 8)`.
|
|
12
|
+
* Details are normalized to stabilize the digest: whitespace collapsed,
|
|
13
|
+
* run-ids/hashes/timestamps replaced with placeholders, and enumeration
|
|
14
|
+
* counts (e.g. "3 approach detail card(s)") replaced with `<N>`.
|
|
15
|
+
*
|
|
16
|
+
* The cache is intentionally bounded by `MAX_PER_STAGE` so a noisy stage
|
|
17
|
+
* can't grow the sidecar without bound. When the active run trims the
|
|
18
|
+
* cache we drop the oldest `firstSeenAt` entries first.
|
|
19
|
+
*/
|
|
20
|
+
const FINDINGS_CACHE_REL_PATH = `${RUNTIME_ROOT}/.linter-findings.json`;
|
|
21
|
+
const FINDINGS_CACHE_LOCK_REL_PATH = `${RUNTIME_ROOT}/.linter-findings.json.lock`;
|
|
22
|
+
export const FINDINGS_CACHE_SCHEMA_VERSION = 1;
|
|
23
|
+
const MAX_PER_STAGE = 200;
|
|
24
|
+
function cachePath(projectRoot) {
|
|
25
|
+
return path.join(projectRoot, FINDINGS_CACHE_REL_PATH);
|
|
26
|
+
}
|
|
27
|
+
function cacheLockPath(projectRoot) {
|
|
28
|
+
return path.join(projectRoot, FINDINGS_CACHE_LOCK_REL_PATH);
|
|
29
|
+
}
|
|
30
|
+
function emptyStageCache() {
|
|
31
|
+
return { findings: [], lastRunAt: null };
|
|
32
|
+
}
|
|
33
|
+
function emptyCacheFile() {
|
|
34
|
+
return { schemaVersion: FINDINGS_CACHE_SCHEMA_VERSION, stages: {} };
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Normalize a finding detail string so volatile tokens (run IDs,
|
|
38
|
+
* timestamps, counts, hex hashes, temp paths) don't cause a finding
|
|
39
|
+
* to appear "new" on every invocation.
|
|
40
|
+
*/
|
|
41
|
+
export function normalizeFindingDetail(detail) {
|
|
42
|
+
if (typeof detail !== "string" || detail.length === 0)
|
|
43
|
+
return "";
|
|
44
|
+
let normalized = detail;
|
|
45
|
+
normalized = normalized.replace(/\brun-[a-z0-9-]+\b/giu, "run-<id>");
|
|
46
|
+
normalized = normalized.replace(/\b[0-9a-f]{16,}\b/giu, "<hex>");
|
|
47
|
+
normalized = normalized.replace(/\b\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z?\b/gu, "<ts>");
|
|
48
|
+
normalized = normalized.replace(/\b\d{10,}\b/gu, "<n>");
|
|
49
|
+
normalized = normalized.replace(/\b\d+\b/gu, "<n>");
|
|
50
|
+
normalized = normalized.replace(/[ \t]+/gu, " ");
|
|
51
|
+
normalized = normalized.replace(/\r?\n/gu, " ");
|
|
52
|
+
return normalized.trim().toLowerCase();
|
|
53
|
+
}
|
|
54
|
+
export function fingerprintFinding(stage, finding) {
|
|
55
|
+
const payload = `${stage}|${finding.rule.trim()}|${normalizeFindingDetail(finding.details)}`;
|
|
56
|
+
return createHash("sha256").update(payload, "utf8").digest("hex").slice(0, 8);
|
|
57
|
+
}
|
|
58
|
+
async function readCacheFile(projectRoot) {
|
|
59
|
+
const filePath = cachePath(projectRoot);
|
|
60
|
+
if (!(await exists(filePath)))
|
|
61
|
+
return emptyCacheFile();
|
|
62
|
+
let raw;
|
|
63
|
+
try {
|
|
64
|
+
raw = await fs.readFile(filePath, "utf8");
|
|
65
|
+
}
|
|
66
|
+
catch {
|
|
67
|
+
return emptyCacheFile();
|
|
68
|
+
}
|
|
69
|
+
let parsed;
|
|
70
|
+
try {
|
|
71
|
+
parsed = JSON.parse(raw);
|
|
72
|
+
}
|
|
73
|
+
catch {
|
|
74
|
+
return emptyCacheFile();
|
|
75
|
+
}
|
|
76
|
+
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
|
|
77
|
+
return emptyCacheFile();
|
|
78
|
+
}
|
|
79
|
+
const typed = parsed;
|
|
80
|
+
const stages = (typed.stages ?? {});
|
|
81
|
+
const next = emptyCacheFile();
|
|
82
|
+
for (const [stageKey, value] of Object.entries(stages)) {
|
|
83
|
+
if (!value || typeof value !== "object" || Array.isArray(value))
|
|
84
|
+
continue;
|
|
85
|
+
const rawStage = value;
|
|
86
|
+
const findingsRaw = Array.isArray(rawStage.findings) ? rawStage.findings : [];
|
|
87
|
+
const findings = [];
|
|
88
|
+
for (const row of findingsRaw) {
|
|
89
|
+
if (!row || typeof row !== "object" || Array.isArray(row))
|
|
90
|
+
continue;
|
|
91
|
+
const r = row;
|
|
92
|
+
const fingerprint = typeof r.fingerprint === "string" ? r.fingerprint : "";
|
|
93
|
+
const rule = typeof r.rule === "string" ? r.rule : "";
|
|
94
|
+
const section = typeof r.section === "string" ? r.section : "";
|
|
95
|
+
const firstSeenAt = typeof r.firstSeenAt === "string" ? r.firstSeenAt : "";
|
|
96
|
+
const lastSeenAt = typeof r.lastSeenAt === "string" ? r.lastSeenAt : "";
|
|
97
|
+
const runCount = typeof r.runCount === "number" && Number.isFinite(r.runCount)
|
|
98
|
+
? Math.max(1, Math.floor(r.runCount))
|
|
99
|
+
: 1;
|
|
100
|
+
if (fingerprint.length === 0 || rule.length === 0)
|
|
101
|
+
continue;
|
|
102
|
+
findings.push({ fingerprint, rule, section, firstSeenAt, lastSeenAt, runCount });
|
|
103
|
+
}
|
|
104
|
+
next.stages[stageKey] = {
|
|
105
|
+
findings,
|
|
106
|
+
lastRunAt: typeof rawStage.lastRunAt === "string" ? rawStage.lastRunAt : null
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
return next;
|
|
110
|
+
}
|
|
111
|
+
async function writeCacheFile(projectRoot, cache) {
|
|
112
|
+
await ensureDir(path.dirname(cachePath(projectRoot)));
|
|
113
|
+
await writeFileSafe(cachePath(projectRoot), `${JSON.stringify(cache, null, 2)}\n`, { mode: 0o600 });
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Classify each emitted finding as `new`, `repeat:N`, or `resolved`
|
|
117
|
+
* relative to the cached sidecar for this stage. Persists the updated
|
|
118
|
+
* fingerprint set under a directory lock so concurrent lint runs for
|
|
119
|
+
* the same project don't clobber each other.
|
|
120
|
+
*
|
|
121
|
+
* The returned `header` is a short human string intended for inclusion
|
|
122
|
+
* above the linter output; it's stable across runs when findings
|
|
123
|
+
* repeat. Empty string when there is nothing meaningful to report
|
|
124
|
+
* (no findings and no carry-over state).
|
|
125
|
+
*/
|
|
126
|
+
export async function classifyAndPersistFindings(projectRoot, stage, findings, options = {}) {
|
|
127
|
+
const nowIso = (options.now ?? new Date()).toISOString();
|
|
128
|
+
return withDirectoryLock(cacheLockPath(projectRoot), async () => {
|
|
129
|
+
const cache = await readCacheFile(projectRoot);
|
|
130
|
+
const previous = cache.stages[stage] ?? emptyStageCache();
|
|
131
|
+
const previousByFingerprint = new Map();
|
|
132
|
+
for (const entry of previous.findings) {
|
|
133
|
+
previousByFingerprint.set(entry.fingerprint, entry);
|
|
134
|
+
}
|
|
135
|
+
const currentFingerprints = new Set();
|
|
136
|
+
const classified = [];
|
|
137
|
+
const nextFindings = [];
|
|
138
|
+
let newCount = 0;
|
|
139
|
+
let repeatCount = 0;
|
|
140
|
+
for (const finding of findings) {
|
|
141
|
+
const fingerprint = fingerprintFinding(stage, finding);
|
|
142
|
+
currentFingerprints.add(fingerprint);
|
|
143
|
+
const prior = previousByFingerprint.get(fingerprint);
|
|
144
|
+
if (prior) {
|
|
145
|
+
const nextEntry = {
|
|
146
|
+
fingerprint,
|
|
147
|
+
rule: finding.rule,
|
|
148
|
+
section: finding.section,
|
|
149
|
+
firstSeenAt: prior.firstSeenAt || nowIso,
|
|
150
|
+
lastSeenAt: nowIso,
|
|
151
|
+
runCount: prior.runCount + 1
|
|
152
|
+
};
|
|
153
|
+
nextFindings.push(nextEntry);
|
|
154
|
+
repeatCount += 1;
|
|
155
|
+
classified.push({
|
|
156
|
+
finding,
|
|
157
|
+
fingerprint,
|
|
158
|
+
status: { kind: "repeat", count: nextEntry.runCount }
|
|
159
|
+
});
|
|
160
|
+
continue;
|
|
161
|
+
}
|
|
162
|
+
const nextEntry = {
|
|
163
|
+
fingerprint,
|
|
164
|
+
rule: finding.rule,
|
|
165
|
+
section: finding.section,
|
|
166
|
+
firstSeenAt: nowIso,
|
|
167
|
+
lastSeenAt: nowIso,
|
|
168
|
+
runCount: 1
|
|
169
|
+
};
|
|
170
|
+
nextFindings.push(nextEntry);
|
|
171
|
+
newCount += 1;
|
|
172
|
+
classified.push({
|
|
173
|
+
finding,
|
|
174
|
+
fingerprint,
|
|
175
|
+
status: { kind: "new" }
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
const resolved = [];
|
|
179
|
+
for (const entry of previous.findings) {
|
|
180
|
+
if (currentFingerprints.has(entry.fingerprint))
|
|
181
|
+
continue;
|
|
182
|
+
resolved.push({
|
|
183
|
+
fingerprint: entry.fingerprint,
|
|
184
|
+
rule: entry.rule,
|
|
185
|
+
lastSeenAt: entry.lastSeenAt
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
nextFindings.sort((a, b) => {
|
|
189
|
+
const aTime = Date.parse(a.firstSeenAt);
|
|
190
|
+
const bTime = Date.parse(b.firstSeenAt);
|
|
191
|
+
return Number.isFinite(aTime) && Number.isFinite(bTime) ? aTime - bTime : 0;
|
|
192
|
+
});
|
|
193
|
+
const trimmed = nextFindings.length > MAX_PER_STAGE
|
|
194
|
+
? nextFindings.slice(nextFindings.length - MAX_PER_STAGE)
|
|
195
|
+
: nextFindings;
|
|
196
|
+
cache.stages[stage] = {
|
|
197
|
+
findings: trimmed,
|
|
198
|
+
lastRunAt: nowIso
|
|
199
|
+
};
|
|
200
|
+
await writeCacheFile(projectRoot, cache);
|
|
201
|
+
const summary = {
|
|
202
|
+
newCount,
|
|
203
|
+
repeatCount,
|
|
204
|
+
resolvedCount: resolved.length,
|
|
205
|
+
resolved
|
|
206
|
+
};
|
|
207
|
+
const header = buildDedupHeader(stage, summary);
|
|
208
|
+
return { classified, summary, header };
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
export function buildDedupHeader(stage, summary) {
|
|
212
|
+
const parts = [];
|
|
213
|
+
if (summary.newCount > 0)
|
|
214
|
+
parts.push(`${summary.newCount} new`);
|
|
215
|
+
if (summary.repeatCount > 0)
|
|
216
|
+
parts.push(`${summary.repeatCount} repeat`);
|
|
217
|
+
if (summary.resolvedCount > 0)
|
|
218
|
+
parts.push(`${summary.resolvedCount} resolved`);
|
|
219
|
+
if (parts.length === 0)
|
|
220
|
+
return "";
|
|
221
|
+
return `linter findings (stage=${stage}): ${parts.join(", ")}.`;
|
|
222
|
+
}
|
|
223
|
+
export function formatFindingStatusTag(status) {
|
|
224
|
+
if (status.kind === "new")
|
|
225
|
+
return "[new]";
|
|
226
|
+
if (status.kind === "resolved")
|
|
227
|
+
return "[resolved]";
|
|
228
|
+
return `[repeat:${status.count}]`;
|
|
229
|
+
}
|
|
230
|
+
export function findingsDedupCachePathFor(projectRoot) {
|
|
231
|
+
return cachePath(projectRoot);
|
|
232
|
+
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { evaluateInvestigationTrace, evaluateLayeredDocumentReviewStatus, headingPresent, sectionBodyByName, collectPatternHits, PLACEHOLDER_PATTERNS, extractDecisionIds, SCOPE_REDUCTION_PATTERNS } from "./shared.js";
|
|
1
|
+
import { evaluateInvestigationTrace, evaluateLayeredDocumentReviewStatus, extractAuthoredBody, headingPresent, sectionBodyByName, collectPatternHits, PLACEHOLDER_PATTERNS, extractDecisionIds, SCOPE_REDUCTION_PATTERNS } from "./shared.js";
|
|
2
2
|
import { resolveArtifactPath as resolveStageArtifactPath } from "../artifact-paths.js";
|
|
3
3
|
import { exists } from "../fs-utils.js";
|
|
4
4
|
import { FORBIDDEN_PLACEHOLDER_TOKENS, CONFIDENCE_FINDING_REGEX_SOURCE } from "../content/skills.js";
|
|
@@ -86,7 +86,8 @@ export async function lintPlanStage(ctx) {
|
|
|
86
86
|
});
|
|
87
87
|
}
|
|
88
88
|
const allPlaceholderTokens = FORBIDDEN_PLACEHOLDER_TOKENS.map((token) => token.toLowerCase());
|
|
89
|
-
const
|
|
89
|
+
const authoredBody = extractAuthoredBody(raw);
|
|
90
|
+
const lowerRaw = authoredBody.toLowerCase();
|
|
90
91
|
const planWidePlaceholderHits = allPlaceholderTokens.filter((token) => lowerRaw.includes(token));
|
|
91
92
|
// Strip the "## NO PLACEHOLDERS Rule" section (which lists tokens) and
|
|
92
93
|
// any acknowledgement text from the scan to avoid false positives where
|
|
@@ -123,11 +123,36 @@ export interface LintFinding {
|
|
|
123
123
|
found: boolean;
|
|
124
124
|
details: string;
|
|
125
125
|
}
|
|
126
|
+
export interface LintFindingDedupSummary {
|
|
127
|
+
newCount: number;
|
|
128
|
+
repeatCount: number;
|
|
129
|
+
resolvedCount: number;
|
|
130
|
+
/**
|
|
131
|
+
* Short single-line human-facing summary of the dedup outcome. Empty
|
|
132
|
+
* string when there is nothing to report.
|
|
133
|
+
*/
|
|
134
|
+
header: string;
|
|
135
|
+
/**
|
|
136
|
+
* Parallel to the `findings` array on `LintResult`; each status tags
|
|
137
|
+
* the finding at the same index as `new`, `repeat`, or `resolved`.
|
|
138
|
+
* `null` slots correspond to findings that weren't classified (for
|
|
139
|
+
* example, when the dedup cache is unreadable).
|
|
140
|
+
*/
|
|
141
|
+
statuses: Array<{
|
|
142
|
+
kind: "new";
|
|
143
|
+
} | {
|
|
144
|
+
kind: "repeat";
|
|
145
|
+
count: number;
|
|
146
|
+
} | {
|
|
147
|
+
kind: "resolved";
|
|
148
|
+
} | null>;
|
|
149
|
+
}
|
|
126
150
|
export interface LintResult {
|
|
127
151
|
stage: string;
|
|
128
152
|
file: string;
|
|
129
153
|
passed: boolean;
|
|
130
154
|
findings: LintFinding[];
|
|
155
|
+
dedup?: LintFindingDedupSummary;
|
|
131
156
|
}
|
|
132
157
|
export declare function normalizeHeadingTitle(title: string): string;
|
|
133
158
|
export type H2SectionMap = Map<string, string>;
|
|
@@ -144,6 +169,30 @@ export type H2SectionMap = Map<string, string>;
|
|
|
144
169
|
*/
|
|
145
170
|
export declare function extractH2Sections(markdown: string): H2SectionMap;
|
|
146
171
|
export declare function duplicateH2Headings(markdown: string): string[];
|
|
172
|
+
/**
|
|
173
|
+
* Return the author-authored prose of an artifact, stripping linter meta
|
|
174
|
+
* regions so free-text scans (placeholder tokens, scope-reduction phrases,
|
|
175
|
+
* investigation trigger words) don't self-cannibalize by matching the
|
|
176
|
+
* linter's own templated meta-phrases.
|
|
177
|
+
*
|
|
178
|
+
* Stripping rules (in order):
|
|
179
|
+
* 1. `<!-- linter-meta --> ... <!-- /linter-meta -->` paired blocks.
|
|
180
|
+
* Both markers must appear on their own line; unterminated openings
|
|
181
|
+
* are left as-is so a malformed artifact cannot hide arbitrary
|
|
182
|
+
* content by omitting the closing marker.
|
|
183
|
+
* 2. Every other HTML comment (`<!-- ... -->`, possibly multi-line).
|
|
184
|
+
* 3. Fenced code blocks that are tagged `linter-rule` (e.g.
|
|
185
|
+
* ```` ```linter-rule ````). Plain fenced code blocks are preserved
|
|
186
|
+
* because many stages quote code samples that the linter should
|
|
187
|
+
* still see.
|
|
188
|
+
*
|
|
189
|
+
* The function guarantees the returned string is a strict subset of the
|
|
190
|
+
* original: no characters are synthesized, and line offsets are
|
|
191
|
+
* preserved for any surviving line (blank lines stand in for stripped
|
|
192
|
+
* regions). This keeps regex-based linter checks stable when authors
|
|
193
|
+
* add or remove linter-meta blocks between runs.
|
|
194
|
+
*/
|
|
195
|
+
export declare function extractAuthoredBody(rawArtifact: string): string;
|
|
147
196
|
export declare function headingPresent(sections: H2SectionMap, section: string): boolean;
|
|
148
197
|
export declare function sectionBodyByName(sections: H2SectionMap, section: string): string | null;
|
|
149
198
|
export declare function sectionBodyByAnyName(sections: H2SectionMap, sectionNames: string[]): string | null;
|
|
@@ -384,6 +384,41 @@ export function duplicateH2Headings(markdown) {
|
|
|
384
384
|
.filter(([, count]) => count > 1)
|
|
385
385
|
.map(([key]) => displayHeading.get(key) ?? key);
|
|
386
386
|
}
|
|
387
|
+
/**
|
|
388
|
+
* Return the author-authored prose of an artifact, stripping linter meta
|
|
389
|
+
* regions so free-text scans (placeholder tokens, scope-reduction phrases,
|
|
390
|
+
* investigation trigger words) don't self-cannibalize by matching the
|
|
391
|
+
* linter's own templated meta-phrases.
|
|
392
|
+
*
|
|
393
|
+
* Stripping rules (in order):
|
|
394
|
+
* 1. `<!-- linter-meta --> ... <!-- /linter-meta -->` paired blocks.
|
|
395
|
+
* Both markers must appear on their own line; unterminated openings
|
|
396
|
+
* are left as-is so a malformed artifact cannot hide arbitrary
|
|
397
|
+
* content by omitting the closing marker.
|
|
398
|
+
* 2. Every other HTML comment (`<!-- ... -->`, possibly multi-line).
|
|
399
|
+
* 3. Fenced code blocks that are tagged `linter-rule` (e.g.
|
|
400
|
+
* ```` ```linter-rule ````). Plain fenced code blocks are preserved
|
|
401
|
+
* because many stages quote code samples that the linter should
|
|
402
|
+
* still see.
|
|
403
|
+
*
|
|
404
|
+
* The function guarantees the returned string is a strict subset of the
|
|
405
|
+
* original: no characters are synthesized, and line offsets are
|
|
406
|
+
* preserved for any surviving line (blank lines stand in for stripped
|
|
407
|
+
* regions). This keeps regex-based linter checks stable when authors
|
|
408
|
+
* add or remove linter-meta blocks between runs.
|
|
409
|
+
*/
|
|
410
|
+
export function extractAuthoredBody(rawArtifact) {
|
|
411
|
+
if (typeof rawArtifact !== "string" || rawArtifact.length === 0) {
|
|
412
|
+
return "";
|
|
413
|
+
}
|
|
414
|
+
const linterMetaBlock = /^[ \t]*<!--\s*linter-meta\s*-->[\s\S]*?^[ \t]*<!--\s*\/linter-meta\s*-->[ \t]*$/gmu;
|
|
415
|
+
let body = rawArtifact.replace(linterMetaBlock, (match) => match.replace(/[^\n]/gu, ""));
|
|
416
|
+
const htmlComment = /<!--[\s\S]*?-->/gu;
|
|
417
|
+
body = body.replace(htmlComment, (match) => match.replace(/[^\n]/gu, ""));
|
|
418
|
+
const linterRuleFence = /^([ \t]*)(`{3,}|~{3,})\s*linter-rule\b[^\n]*\n[\s\S]*?\n\1\2[ \t]*$/gmu;
|
|
419
|
+
body = body.replace(linterRuleFence, (match) => match.replace(/[^\n]/gu, ""));
|
|
420
|
+
return body;
|
|
421
|
+
}
|
|
387
422
|
export function headingPresent(sections, section) {
|
|
388
423
|
const want = normalizeHeadingTitle(section).toLowerCase();
|
|
389
424
|
for (const h of sections.keys()) {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import type { FlowStage, FlowTrack } from "./types.js";
|
|
2
2
|
import { type LintResult } from "./artifact-linter/shared.js";
|
|
3
3
|
export { validateReviewArmy, checkReviewVerdictConsistency, checkReviewSecurityNoChangeAttestation, checkReviewTddNoCrossArtifactDuplication, type ReviewVerdictConsistencyResult, type ReviewSecurityNoChangeAttestationResult, type ReviewTddDuplicationConflict, type ReviewTddDuplicationResult } from "./artifact-linter/review-army.js";
|
|
4
|
-
export { type LintFinding, type LintResult, type LearningEntryType, type LearningConfidence, type LearningSeverity, type LearningSource, type LearningSeedEntry, type LearningsParseResult, formatLearningsErrorsBullets, learningsParseFailureHumanSummary, extractMarkdownSectionBody, parseLearningsSection } from "./artifact-linter/shared.js";
|
|
4
|
+
export { type LintFinding, type LintResult, type LearningEntryType, type LearningConfidence, type LearningSeverity, type LearningSource, type LearningSeedEntry, type LearningsParseResult, extractAuthoredBody, formatLearningsErrorsBullets, learningsParseFailureHumanSummary, extractMarkdownSectionBody, parseLearningsSection } from "./artifact-linter/shared.js";
|
|
5
5
|
export interface LintArtifactOptions {
|
|
6
6
|
/**
|
|
7
7
|
* Stage-level flags supplied by the caller (typically `advance-stage`)
|
package/dist/artifact-linter.js
CHANGED
|
@@ -5,7 +5,8 @@ import { stageSchema } from "./content/stage-schema.js";
|
|
|
5
5
|
import { readFlowState } from "./run-persistence.js";
|
|
6
6
|
import { duplicateH2Headings, extractH2Sections, extractRequirementIdsFromMarkdown, isShortCircuitActivated, normalizeHeadingTitle, parseFrontmatter, parseLearningsSection, sectionBodyByAnyName, sectionBodyByHeadingPrefix, sectionBodyByName, validateSectionBody, formatLearningsErrorsBullets } from "./artifact-linter/shared.js";
|
|
7
7
|
import { shouldDemoteArtifactValidationByTrack } from "./content/stage-schema.js";
|
|
8
|
-
import { recordArtifactValidationDemotedByTrack } from "./delegation.js";
|
|
8
|
+
import { readDelegationLedger, recordArtifactValidationDemotedByTrack } from "./delegation.js";
|
|
9
|
+
import { classifyAndPersistFindings } from "./artifact-linter/findings-dedup.js";
|
|
9
10
|
import { lintBrainstormStage } from "./artifact-linter/brainstorm.js";
|
|
10
11
|
import { lintDesignStage } from "./artifact-linter/design.js";
|
|
11
12
|
import { lintPlanStage } from "./artifact-linter/plan.js";
|
|
@@ -15,7 +16,7 @@ import { lintTddStage } from "./artifact-linter/tdd.js";
|
|
|
15
16
|
import { lintReviewStage } from "./artifact-linter/review.js";
|
|
16
17
|
import { lintShipStage } from "./artifact-linter/ship.js";
|
|
17
18
|
export { validateReviewArmy, checkReviewVerdictConsistency, checkReviewSecurityNoChangeAttestation, checkReviewTddNoCrossArtifactDuplication } from "./artifact-linter/review-army.js";
|
|
18
|
-
export { formatLearningsErrorsBullets, learningsParseFailureHumanSummary, extractMarkdownSectionBody, parseLearningsSection } from "./artifact-linter/shared.js";
|
|
19
|
+
export { extractAuthoredBody, formatLearningsErrorsBullets, learningsParseFailureHumanSummary, extractMarkdownSectionBody, parseLearningsSection } from "./artifact-linter/shared.js";
|
|
19
20
|
const FRONTMATTER_REQUIRED_KEYS = [
|
|
20
21
|
"stage",
|
|
21
22
|
"schema_version",
|
|
@@ -328,6 +329,30 @@ export async function lintArtifact(projectRoot, stage, track = "standard", optio
|
|
|
328
329
|
});
|
|
329
330
|
}
|
|
330
331
|
}
|
|
332
|
+
try {
|
|
333
|
+
const delegationLedger = await readDelegationLedger(projectRoot);
|
|
334
|
+
const legacyWaivers = delegationLedger.entries.filter((entry) => entry.status === "waived" &&
|
|
335
|
+
entry.mode === "proactive" &&
|
|
336
|
+
entry.stage === stage &&
|
|
337
|
+
(typeof entry.approvalToken !== "string" || entry.approvalToken.trim().length === 0));
|
|
338
|
+
if (legacyWaivers.length > 0) {
|
|
339
|
+
const descriptors = legacyWaivers
|
|
340
|
+
.map((entry) => [entry.agent, entry.spanId].filter((value) => typeof value === "string").join("@"))
|
|
341
|
+
.filter((value) => value.length > 0);
|
|
342
|
+
findings.push({
|
|
343
|
+
section: "waiver_legacy_provenance",
|
|
344
|
+
required: false,
|
|
345
|
+
rule: "waiver_legacy_provenance — proactive waiver(s) without approvalToken. Issue new waivers via `cclaw-cli internal waiver-grant --stage <stage> --reason <slug>` so the provenance trail is signed. Legacy waivers remain valid (advisory).",
|
|
346
|
+
found: false,
|
|
347
|
+
details: `Found ${legacyWaivers.length} proactive waiver(s) on stage="${stage}" without approvalToken` +
|
|
348
|
+
(descriptors.length > 0 ? ` (${descriptors.join(", ")})` : "") +
|
|
349
|
+
". Next waiver should be issued with `cclaw-cli internal waiver-grant` and consumed via `--accept-proactive-waiver=<token>`."
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
catch {
|
|
354
|
+
// Ledger absent or unreadable: no advisory to emit.
|
|
355
|
+
}
|
|
331
356
|
const demote = shouldDemoteArtifactValidationByTrack(track, taskClass);
|
|
332
357
|
const demotedSections = [];
|
|
333
358
|
if (demote) {
|
|
@@ -356,7 +381,24 @@ export async function lintArtifact(projectRoot, stage, track = "standard", optio
|
|
|
356
381
|
}
|
|
357
382
|
}
|
|
358
383
|
const passed = findings.every((f) => !f.required || f.found);
|
|
359
|
-
|
|
384
|
+
let dedup;
|
|
385
|
+
try {
|
|
386
|
+
const dedupResult = await classifyAndPersistFindings(projectRoot, stage, findings);
|
|
387
|
+
const statusByFingerprint = new Map(dedupResult.classified.map(({ fingerprint, status }) => [fingerprint, status]));
|
|
388
|
+
const statuses = dedupResult.classified.map(({ status }) => status);
|
|
389
|
+
void statusByFingerprint;
|
|
390
|
+
dedup = {
|
|
391
|
+
newCount: dedupResult.summary.newCount,
|
|
392
|
+
repeatCount: dedupResult.summary.repeatCount,
|
|
393
|
+
resolvedCount: dedupResult.summary.resolvedCount,
|
|
394
|
+
header: dedupResult.header,
|
|
395
|
+
statuses
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
catch {
|
|
399
|
+
dedup = undefined;
|
|
400
|
+
}
|
|
401
|
+
return { stage, file: relFile, passed, findings, ...(dedup ? { dedup } : {}) };
|
|
360
402
|
}
|
|
361
403
|
/**
|
|
362
404
|
* Wave 25 (v6.1.0) — section names whose required-finding outcome is
|