cclaw-cli 0.51.30 → 0.55.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -16
- package/dist/artifact-linter/brainstorm.d.ts +2 -0
- package/dist/artifact-linter/brainstorm.js +245 -0
- package/dist/artifact-linter/design.d.ts +2 -0
- package/dist/artifact-linter/design.js +323 -0
- package/dist/artifact-linter/plan.d.ts +2 -0
- package/dist/artifact-linter/plan.js +162 -0
- package/dist/artifact-linter/review-army.d.ts +24 -0
- package/dist/artifact-linter/review-army.js +365 -0
- package/dist/artifact-linter/review.d.ts +2 -0
- package/dist/artifact-linter/review.js +65 -0
- package/dist/artifact-linter/scope.d.ts +2 -0
- package/dist/artifact-linter/scope.js +115 -0
- package/dist/artifact-linter/shared.d.ts +246 -0
- package/dist/artifact-linter/shared.js +1488 -0
- package/dist/artifact-linter/ship.d.ts +2 -0
- package/dist/artifact-linter/ship.js +46 -0
- package/dist/artifact-linter/spec.d.ts +2 -0
- package/dist/artifact-linter/spec.js +108 -0
- package/dist/artifact-linter/tdd.d.ts +2 -0
- package/dist/artifact-linter/tdd.js +124 -0
- package/dist/artifact-linter.d.ts +4 -76
- package/dist/artifact-linter.js +56 -2949
- package/dist/cli.d.ts +1 -6
- package/dist/cli.js +4 -159
- package/dist/codex-feature-flag.d.ts +1 -1
- package/dist/codex-feature-flag.js +1 -1
- package/dist/config.d.ts +3 -2
- package/dist/config.js +67 -3
- package/dist/constants.d.ts +1 -7
- package/dist/constants.js +9 -15
- package/dist/content/cancel-command.js +2 -2
- package/dist/content/closeout-guidance.js +10 -7
- package/dist/content/core-agents.d.ts +18 -0
- package/dist/content/core-agents.js +46 -2
- package/dist/content/decision-protocol.d.ts +1 -1
- package/dist/content/decision-protocol.js +1 -1
- package/dist/content/examples.js +6 -6
- package/dist/content/harness-doc.js +20 -2
- package/dist/content/hook-inline-snippets.d.ts +17 -4
- package/dist/content/hook-inline-snippets.js +218 -5
- package/dist/content/hook-manifest.d.ts +2 -2
- package/dist/content/hook-manifest.js +2 -2
- package/dist/content/hooks.d.ts +1 -0
- package/dist/content/hooks.js +32 -137
- package/dist/content/idea-command.d.ts +8 -0
- package/dist/content/{ideate-command.js → idea-command.js} +57 -50
- package/dist/content/idea-frames.d.ts +31 -0
- package/dist/content/{ideate-frames.js → idea-frames.js} +9 -9
- package/dist/content/idea-ranking.d.ts +25 -0
- package/dist/content/{ideate-ranking.js → idea-ranking.js} +5 -5
- package/dist/content/iron-laws.d.ts +0 -1
- package/dist/content/iron-laws.js +31 -16
- package/dist/content/learnings.js +1 -1
- package/dist/content/meta-skill.js +7 -7
- package/dist/content/node-hooks.d.ts +10 -0
- package/dist/content/node-hooks.js +43 -9
- package/dist/content/opencode-plugin.js +3 -3
- package/dist/content/skills.js +19 -7
- package/dist/content/stage-schema.js +44 -2
- package/dist/content/stages/_lint-metadata/index.js +26 -2
- package/dist/content/stages/brainstorm.js +13 -7
- package/dist/content/stages/design.js +16 -11
- package/dist/content/stages/plan.js +7 -4
- package/dist/content/stages/review.js +4 -4
- package/dist/content/stages/schema-types.d.ts +1 -1
- package/dist/content/stages/scope.js +15 -12
- package/dist/content/stages/ship.js +2 -2
- package/dist/content/stages/spec.js +9 -3
- package/dist/content/stages/tdd.js +14 -4
- package/dist/content/start-command.js +11 -10
- package/dist/content/status-command.js +3 -3
- package/dist/content/subagents.js +60 -6
- package/dist/content/templates.d.ts +1 -1
- package/dist/content/templates.js +102 -150
- package/dist/content/tree-command.js +2 -2
- package/dist/content/utility-skills.d.ts +2 -2
- package/dist/content/utility-skills.js +2 -2
- package/dist/content/view-command.js +4 -2
- package/dist/delegation.d.ts +2 -0
- package/dist/delegation.js +2 -1
- package/dist/early-loop.d.ts +66 -0
- package/dist/early-loop.js +275 -0
- package/dist/gate-evidence.d.ts +8 -0
- package/dist/gate-evidence.js +141 -5
- package/dist/harness-adapters.d.ts +2 -2
- package/dist/harness-adapters.js +47 -18
- package/dist/install.js +153 -29
- package/dist/internal/advance-stage/advance.d.ts +50 -0
- package/dist/internal/advance-stage/advance.js +480 -0
- package/dist/internal/advance-stage/cancel-run.d.ts +8 -0
- package/dist/internal/advance-stage/cancel-run.js +19 -0
- package/dist/internal/advance-stage/flow-state-coercion.d.ts +3 -0
- package/dist/internal/advance-stage/flow-state-coercion.js +81 -0
- package/dist/internal/advance-stage/helpers.d.ts +14 -0
- package/dist/internal/advance-stage/helpers.js +145 -0
- package/dist/internal/advance-stage/hook.d.ts +8 -0
- package/dist/internal/advance-stage/hook.js +40 -0
- package/dist/internal/advance-stage/parsers.d.ts +54 -0
- package/dist/internal/advance-stage/parsers.js +307 -0
- package/dist/internal/advance-stage/review-loop.d.ts +7 -0
- package/dist/internal/advance-stage/review-loop.js +170 -0
- package/dist/internal/advance-stage/rewind.d.ts +14 -0
- package/dist/internal/advance-stage/rewind.js +108 -0
- package/dist/internal/advance-stage/start-flow.d.ts +11 -0
- package/dist/internal/advance-stage/start-flow.js +136 -0
- package/dist/internal/advance-stage/verify.d.ts +29 -0
- package/dist/internal/advance-stage/verify.js +225 -0
- package/dist/internal/advance-stage.js +21 -1470
- package/dist/internal/compound-readiness.d.ts +1 -1
- package/dist/internal/compound-readiness.js +2 -2
- package/dist/internal/early-loop-status.d.ts +7 -0
- package/dist/internal/early-loop-status.js +90 -0
- package/dist/internal/runtime-integrity.d.ts +7 -0
- package/dist/internal/runtime-integrity.js +288 -0
- package/dist/internal/tdd-red-evidence.js +1 -1
- package/dist/knowledge-store.d.ts +3 -8
- package/dist/knowledge-store.js +16 -29
- package/dist/managed-resources.js +24 -2
- package/dist/policy.js +4 -6
- package/dist/run-archive.d.ts +1 -1
- package/dist/run-archive.js +12 -12
- package/dist/run-persistence.js +111 -11
- package/dist/tdd-cycle.d.ts +3 -3
- package/dist/tdd-cycle.js +1 -1
- package/dist/types.d.ts +18 -10
- package/package.json +1 -1
- package/dist/content/ideate-command.d.ts +0 -8
- package/dist/content/ideate-frames.d.ts +0 -31
- package/dist/content/ideate-ranking.d.ts +0 -25
- package/dist/content/next-command.d.ts +0 -20
- package/dist/content/next-command.js +0 -298
- package/dist/content/seed-shelf.d.ts +0 -36
- package/dist/content/seed-shelf.js +0 -301
- package/dist/content/stage-common-guidance.d.ts +0 -1
- package/dist/content/stage-common-guidance.js +0 -106
- package/dist/doctor-registry.d.ts +0 -10
- package/dist/doctor-registry.js +0 -186
- package/dist/doctor.d.ts +0 -17
- package/dist/doctor.js +0 -2201
- package/dist/internal/hook-manifest.d.ts +0 -16
- package/dist/internal/hook-manifest.js +0 -77
package/dist/doctor.js
DELETED
|
@@ -1,2201 +0,0 @@
|
|
|
1
|
-
import fs from "node:fs/promises";
|
|
2
|
-
import path from "node:path";
|
|
3
|
-
import { execFile } from "node:child_process";
|
|
4
|
-
import { pathToFileURL } from "node:url";
|
|
5
|
-
import { promisify } from "node:util";
|
|
6
|
-
import { CCLAW_VERSION, REQUIRED_DIRS, RUNTIME_ROOT } from "./constants.js";
|
|
7
|
-
import { CCLAW_AGENTS } from "./content/core-agents.js";
|
|
8
|
-
import { detectAdvancedKeys, InvalidConfigError, readConfig } from "./config.js";
|
|
9
|
-
import { exists } from "./fs-utils.js";
|
|
10
|
-
import { hashManagedResourceContent, isManagedGeneratedPath, MANAGED_RESOURCE_MANIFEST_REL_PATH, readManagedResourceManifest, validateManagedResourceManifest } from "./managed-resources.js";
|
|
11
|
-
import { gitignoreHasRequiredPatterns } from "./gitignore.js";
|
|
12
|
-
import { HARNESS_ADAPTERS, CCLAW_MARKER_START, CCLAW_MARKER_END, harnessShimFileNames, harnessShimSkillNames } from "./harness-adapters.js";
|
|
13
|
-
import { policyChecks } from "./policy.js";
|
|
14
|
-
import { CorruptFlowStateError, readFlowState } from "./runs.js";
|
|
15
|
-
import { createInitialFlowState, skippedStagesForTrack } from "./flow-state.js";
|
|
16
|
-
import { FLOW_STAGES, TRACK_STAGES } from "./types.js";
|
|
17
|
-
import { checkMandatoryDelegations, readDelegationEvents } from "./delegation.js";
|
|
18
|
-
import { buildTraceMatrix } from "./trace-matrix.js";
|
|
19
|
-
import { classifyReconciliationNotices, reconcileAndWriteCurrentStageGateCatalog, readReconciliationNotices, RECONCILIATION_NOTICES_REL_PATH, verifyCompletedStagesGateClosure, verifyCurrentStageGateEvidence } from "./gate-evidence.js";
|
|
20
|
-
import { parseTddCycleLog, validateTddCycleOrder } from "./tdd-cycle.js";
|
|
21
|
-
import { stageSkillFolder } from "./content/skills.js";
|
|
22
|
-
import { stageCommandShimMarkdown } from "./content/stage-command.js";
|
|
23
|
-
import { doctorCheckMetadata } from "./doctor-registry.js";
|
|
24
|
-
import { resolveTrackFromPrompt } from "./track-heuristics.js";
|
|
25
|
-
import { detectHarnesses } from "./init-detect.js";
|
|
26
|
-
import { classifyCodexHooksFlag, codexConfigPath, readCodexConfig } from "./codex-feature-flag.js";
|
|
27
|
-
import { LANGUAGE_RULE_PACK_DIR, LANGUAGE_RULE_PACK_FILES, LEGACY_LANGUAGE_RULE_PACK_FOLDERS } from "./content/utility-skills.js";
|
|
28
|
-
import { validateHookDocument } from "./hook-schema.js";
|
|
29
|
-
import { HOOK_EVENTS_BY_HARNESS } from "./content/hook-events.js";
|
|
30
|
-
import { validateKnowledgeEntry } from "./knowledge-store.js";
|
|
31
|
-
import { readSeedShelf } from "./content/seed-shelf.js";
|
|
32
|
-
import { evaluateRetroGate } from "./retro-gate.js";
|
|
33
|
-
const execFileAsync = promisify(execFile);
|
|
34
|
-
async function isGitRepo(projectRoot) {
|
|
35
|
-
try {
|
|
36
|
-
await execFileAsync("git", ["rev-parse", "--is-inside-work-tree"], { cwd: projectRoot });
|
|
37
|
-
return true;
|
|
38
|
-
}
|
|
39
|
-
catch {
|
|
40
|
-
return false;
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
async function resolveGitHooksDir(projectRoot) {
|
|
44
|
-
try {
|
|
45
|
-
const { stdout } = await execFileAsync("git", ["rev-parse", "--git-path", "hooks"], { cwd: projectRoot });
|
|
46
|
-
const rel = stdout.trim();
|
|
47
|
-
if (rel.length === 0) {
|
|
48
|
-
return null;
|
|
49
|
-
}
|
|
50
|
-
return path.resolve(projectRoot, rel);
|
|
51
|
-
}
|
|
52
|
-
catch {
|
|
53
|
-
return null;
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
async function gitIgnoresRuntime(projectRoot) {
|
|
57
|
-
try {
|
|
58
|
-
await execFileAsync("git", ["check-ignore", "-q", `${RUNTIME_ROOT}/`], { cwd: projectRoot });
|
|
59
|
-
return true;
|
|
60
|
-
}
|
|
61
|
-
catch {
|
|
62
|
-
return false;
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
function toObject(value) {
|
|
66
|
-
if (!value || typeof value !== "object" || Array.isArray(value))
|
|
67
|
-
return null;
|
|
68
|
-
return value;
|
|
69
|
-
}
|
|
70
|
-
function collectHookCommands(value) {
|
|
71
|
-
if (Array.isArray(value)) {
|
|
72
|
-
return value.flatMap((item) => collectHookCommands(item));
|
|
73
|
-
}
|
|
74
|
-
const obj = toObject(value);
|
|
75
|
-
if (!obj)
|
|
76
|
-
return [];
|
|
77
|
-
const direct = typeof obj.command === "string" ? [obj.command] : [];
|
|
78
|
-
const nested = collectHookCommands(obj.hooks);
|
|
79
|
-
return [...direct, ...nested];
|
|
80
|
-
}
|
|
81
|
-
function extractGeneratedCliEntrypoints(scriptContent) {
|
|
82
|
-
const paths = [];
|
|
83
|
-
for (const match of scriptContent.matchAll(/const\s+CCLAW_CLI_ENTRYPOINT\s*=\s*("(?:\\.|[^"\\])*"|null);/gu)) {
|
|
84
|
-
const raw = match[1];
|
|
85
|
-
if (!raw || raw === "null")
|
|
86
|
-
continue;
|
|
87
|
-
try {
|
|
88
|
-
const parsed = JSON.parse(raw);
|
|
89
|
-
if (typeof parsed === "string" && parsed.trim().length > 0) {
|
|
90
|
-
paths.push(parsed);
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
catch {
|
|
94
|
-
// malformed generated constant; treat below as missing/unusable
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
for (const match of scriptContent.matchAll(/const\s+CCLAW_CLI_ARGS_PREFIX\s*=\s*(\[(?:\\.|[^\]])*\]);/gu)) {
|
|
98
|
-
try {
|
|
99
|
-
const parsed = JSON.parse(match[1] ?? "[]");
|
|
100
|
-
if (Array.isArray(parsed)) {
|
|
101
|
-
for (const item of parsed) {
|
|
102
|
-
if (typeof item === "string" && item.trim().length > 0 && !item.startsWith("-")) {
|
|
103
|
-
paths.push(item);
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
catch {
|
|
109
|
-
// malformed generated constant; treat below as missing/unusable
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
return paths;
|
|
113
|
-
}
|
|
114
|
-
async function walkGeneratedCandidates(projectRoot, relDir, candidates) {
|
|
115
|
-
const fullDir = path.join(projectRoot, relDir);
|
|
116
|
-
if (!(await exists(fullDir)))
|
|
117
|
-
return;
|
|
118
|
-
let entries = [];
|
|
119
|
-
try {
|
|
120
|
-
entries = await fs.readdir(fullDir, { withFileTypes: true });
|
|
121
|
-
}
|
|
122
|
-
catch {
|
|
123
|
-
return;
|
|
124
|
-
}
|
|
125
|
-
for (const entry of entries) {
|
|
126
|
-
const rel = path.join(relDir, entry.name).replace(/\\/gu, "/");
|
|
127
|
-
if (entry.isDirectory()) {
|
|
128
|
-
await walkGeneratedCandidates(projectRoot, rel, candidates);
|
|
129
|
-
}
|
|
130
|
-
else if (entry.isFile() && isManagedGeneratedPath(rel)) {
|
|
131
|
-
candidates.push(rel);
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
function formatManagedValidationIssue(issue) {
|
|
136
|
-
const subject = issue.path ?? (issue.index !== undefined ? `resources[${issue.index}]` : "manifest");
|
|
137
|
-
return `${subject} ${issue.field}: ${issue.message}`;
|
|
138
|
-
}
|
|
139
|
-
async function generatedCliEntrypointsOk(projectRoot) {
|
|
140
|
-
const hookScripts = ["stage-complete.mjs", "start-flow.mjs", "run-hook.mjs"];
|
|
141
|
-
const problems = [];
|
|
142
|
-
const checked = [];
|
|
143
|
-
for (const script of hookScripts) {
|
|
144
|
-
const scriptPath = path.join(projectRoot, RUNTIME_ROOT, "hooks", script);
|
|
145
|
-
if (!(await exists(scriptPath)))
|
|
146
|
-
continue;
|
|
147
|
-
const content = await fs.readFile(scriptPath, "utf8");
|
|
148
|
-
const entrypoints = extractGeneratedCliEntrypoints(content);
|
|
149
|
-
if (entrypoints.length === 0) {
|
|
150
|
-
problems.push(`${RUNTIME_ROOT}/hooks/${script} has no local CLI entrypoint`);
|
|
151
|
-
continue;
|
|
152
|
-
}
|
|
153
|
-
for (const entrypoint of entrypoints) {
|
|
154
|
-
checked.push(`${RUNTIME_ROOT}/hooks/${script} -> ${entrypoint}`);
|
|
155
|
-
try {
|
|
156
|
-
const stat = await fs.stat(entrypoint);
|
|
157
|
-
if (!stat.isFile()) {
|
|
158
|
-
problems.push(`${RUNTIME_ROOT}/hooks/${script} points to non-file ${entrypoint}`);
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
catch {
|
|
162
|
-
problems.push(`${RUNTIME_ROOT}/hooks/${script} points to missing ${entrypoint}`);
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
}
|
|
166
|
-
if (problems.length > 0) {
|
|
167
|
-
return { ok: false, details: problems.join("; ") };
|
|
168
|
-
}
|
|
169
|
-
return {
|
|
170
|
-
ok: true,
|
|
171
|
-
details: checked.length > 0
|
|
172
|
-
? `local CLI entrypoints valid: ${checked.join("; ")}`
|
|
173
|
-
: "local CLI entrypoint check skipped because generated hook scripts are absent"
|
|
174
|
-
};
|
|
175
|
-
}
|
|
176
|
-
function expectedArtifactPrefix(stage) {
|
|
177
|
-
const index = FLOW_STAGES.indexOf(stage);
|
|
178
|
-
return `${String(index + 1).padStart(2, "0")}-`;
|
|
179
|
-
}
|
|
180
|
-
function artifactStageFromFileName(fileName) {
|
|
181
|
-
if (!fileName.endsWith(".md"))
|
|
182
|
-
return null;
|
|
183
|
-
for (const stage of FLOW_STAGES) {
|
|
184
|
-
if (fileName.startsWith(expectedArtifactPrefix(stage))) {
|
|
185
|
-
return stage;
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
return null;
|
|
189
|
-
}
|
|
190
|
-
function extractUserPromptFromIdeaArtifact(markdown) {
|
|
191
|
-
const normalized = markdown.replace(/\r\n?/gu, "\n");
|
|
192
|
-
const heading = /^##\s+User prompt\s*$/imu.exec(normalized);
|
|
193
|
-
if (!heading || heading.index === undefined) {
|
|
194
|
-
return null;
|
|
195
|
-
}
|
|
196
|
-
const sectionStart = heading.index + heading[0].length;
|
|
197
|
-
const tail = normalized.slice(sectionStart).replace(/^\s*\n/gu, "");
|
|
198
|
-
const nextHeadingIndex = tail.search(/^##\s+/mu);
|
|
199
|
-
const body = (nextHeadingIndex >= 0 ? tail.slice(0, nextHeadingIndex) : tail).trim();
|
|
200
|
-
return body.length > 0 ? body : null;
|
|
201
|
-
}
|
|
202
|
-
function knowledgeRoutingSurfaceIsDiscoverable(content) {
|
|
203
|
-
const normalized = content.toLowerCase();
|
|
204
|
-
if (!normalized.includes(".cclaw/knowledge.jsonl"))
|
|
205
|
-
return false;
|
|
206
|
-
if (!/\b(rule|pattern|lesson|compound)\b/u.test(normalized))
|
|
207
|
-
return false;
|
|
208
|
-
return ["trigger", "action", "origin_run"].every((term) => normalized.includes(term));
|
|
209
|
-
}
|
|
210
|
-
async function commandAvailable(command) {
|
|
211
|
-
const version = await commandVersion(command);
|
|
212
|
-
return version.available;
|
|
213
|
-
}
|
|
214
|
-
async function commandVersion(command, args = ["--version"]) {
|
|
215
|
-
try {
|
|
216
|
-
if (process.platform === "win32") {
|
|
217
|
-
await execFileAsync("where", [command]);
|
|
218
|
-
}
|
|
219
|
-
const { stdout, stderr } = await execFileAsync(command, args);
|
|
220
|
-
return { available: true, output: `${stdout}${stderr}`.trim() };
|
|
221
|
-
}
|
|
222
|
-
catch {
|
|
223
|
-
return { available: false, output: "" };
|
|
224
|
-
}
|
|
225
|
-
}
|
|
226
|
-
function parseNodeMajor(versionOutput) {
|
|
227
|
-
const match = /v?(\d+)\./u.exec(versionOutput);
|
|
228
|
-
if (!match)
|
|
229
|
-
return null;
|
|
230
|
-
return Number(match[1]);
|
|
231
|
-
}
|
|
232
|
-
function gitVersionLooksUsable(versionOutput) {
|
|
233
|
-
return /git version \d+\.\d+/iu.test(versionOutput);
|
|
234
|
-
}
|
|
235
|
-
function stripJsonCommentsOutsideStrings(input) {
|
|
236
|
-
let out = "";
|
|
237
|
-
let i = 0;
|
|
238
|
-
let inString = false;
|
|
239
|
-
let escape = false;
|
|
240
|
-
while (i < input.length) {
|
|
241
|
-
const c = input[i];
|
|
242
|
-
if (inString) {
|
|
243
|
-
out += c;
|
|
244
|
-
if (escape) {
|
|
245
|
-
escape = false;
|
|
246
|
-
}
|
|
247
|
-
else if (c === "\\") {
|
|
248
|
-
escape = true;
|
|
249
|
-
}
|
|
250
|
-
else if (c === "\"") {
|
|
251
|
-
inString = false;
|
|
252
|
-
}
|
|
253
|
-
i += 1;
|
|
254
|
-
continue;
|
|
255
|
-
}
|
|
256
|
-
if (c === "\"") {
|
|
257
|
-
inString = true;
|
|
258
|
-
out += c;
|
|
259
|
-
i += 1;
|
|
260
|
-
continue;
|
|
261
|
-
}
|
|
262
|
-
const next = input[i + 1];
|
|
263
|
-
if (c === "/" && next === "/") {
|
|
264
|
-
while (i < input.length && input[i] !== "\n" && input[i] !== "\r")
|
|
265
|
-
i += 1;
|
|
266
|
-
continue;
|
|
267
|
-
}
|
|
268
|
-
if (c === "/" && next === "*") {
|
|
269
|
-
i += 2;
|
|
270
|
-
while (i < input.length - 1 && !(input[i] === "*" && input[i + 1] === "/"))
|
|
271
|
-
i += 1;
|
|
272
|
-
i = Math.min(i + 2, input.length);
|
|
273
|
-
continue;
|
|
274
|
-
}
|
|
275
|
-
out += c;
|
|
276
|
-
i += 1;
|
|
277
|
-
}
|
|
278
|
-
return out;
|
|
279
|
-
}
|
|
280
|
-
function parseJsonLike(raw) {
|
|
281
|
-
try {
|
|
282
|
-
return JSON.parse(raw);
|
|
283
|
-
}
|
|
284
|
-
catch {
|
|
285
|
-
// fall through
|
|
286
|
-
}
|
|
287
|
-
try {
|
|
288
|
-
const normalized = stripJsonCommentsOutsideStrings(raw).replace(/,\s*([}\]])/gu, "$1");
|
|
289
|
-
return JSON.parse(normalized);
|
|
290
|
-
}
|
|
291
|
-
catch {
|
|
292
|
-
return null;
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
async function readHookDocument(filePath) {
|
|
296
|
-
if (!(await exists(filePath)))
|
|
297
|
-
return null;
|
|
298
|
-
try {
|
|
299
|
-
const raw = await fs.readFile(filePath, "utf8");
|
|
300
|
-
const parsed = parseJsonLike(raw);
|
|
301
|
-
const obj = toObject(parsed);
|
|
302
|
-
return obj ?? null;
|
|
303
|
-
}
|
|
304
|
-
catch {
|
|
305
|
-
return null;
|
|
306
|
-
}
|
|
307
|
-
}
|
|
308
|
-
async function readJsonObjectStatus(filePath) {
|
|
309
|
-
if (!(await exists(filePath))) {
|
|
310
|
-
return { exists: false, ok: false, error: "file is missing" };
|
|
311
|
-
}
|
|
312
|
-
try {
|
|
313
|
-
const parsed = JSON.parse(await fs.readFile(filePath, "utf8"));
|
|
314
|
-
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
|
|
315
|
-
return { exists: true, ok: false, error: "JSON root must be an object" };
|
|
316
|
-
}
|
|
317
|
-
return { exists: true, ok: true };
|
|
318
|
-
}
|
|
319
|
-
catch (error) {
|
|
320
|
-
return {
|
|
321
|
-
exists: true,
|
|
322
|
-
ok: false,
|
|
323
|
-
error: error instanceof Error ? error.message : String(error)
|
|
324
|
-
};
|
|
325
|
-
}
|
|
326
|
-
}
|
|
327
|
-
async function readPermissionBits(filePath) {
|
|
328
|
-
try {
|
|
329
|
-
const stat = await fs.stat(filePath);
|
|
330
|
-
return stat.mode & 0o777;
|
|
331
|
-
}
|
|
332
|
-
catch {
|
|
333
|
-
return null;
|
|
334
|
-
}
|
|
335
|
-
}
|
|
336
|
-
function normalizeOpenCodePluginEntry(entry) {
|
|
337
|
-
if (typeof entry === "string" && entry.trim().length > 0)
|
|
338
|
-
return entry.trim();
|
|
339
|
-
if (!entry || typeof entry !== "object" || Array.isArray(entry))
|
|
340
|
-
return null;
|
|
341
|
-
const obj = entry;
|
|
342
|
-
for (const key of ["path", "src", "plugin"]) {
|
|
343
|
-
const value = obj[key];
|
|
344
|
-
if (typeof value === "string" && value.trim().length > 0) {
|
|
345
|
-
return value.trim();
|
|
346
|
-
}
|
|
347
|
-
}
|
|
348
|
-
return null;
|
|
349
|
-
}
|
|
350
|
-
function generatedAgentShape(content, kind, agentName) {
|
|
351
|
-
if (kind === "opencode") {
|
|
352
|
-
return content.includes("mode: subagent") && content.includes(`# ${agentName}`) && content.includes("STRICT_RETURN_SCHEMA");
|
|
353
|
-
}
|
|
354
|
-
return content.includes(`name = "${agentName}"`) && content.includes("developer_instructions") && content.includes("STRICT_RETURN_SCHEMA");
|
|
355
|
-
}
|
|
356
|
-
function harnessRealityLabel(harness) {
|
|
357
|
-
const adapter = HARNESS_ADAPTERS[harness];
|
|
358
|
-
const declaredSupport = adapter.capabilities.nativeSubagentDispatch;
|
|
359
|
-
const runtimeLaunch = harness === "opencode" || harness === "codex" ? "prompt-level launch" : declaredSupport === "generic" ? "generic Task launch" : "native tool launch";
|
|
360
|
-
const proofRequired = adapter.capabilities.subagentFallback === "native" ? "dispatchId+spanId+ack" : "evidenceRefs";
|
|
361
|
-
const proofSource = harness === "opencode" ? ".opencode/agents + delegation-events.jsonl" : harness === "codex" ? ".codex/agents + delegation-events.jsonl" : ".cclaw/state/delegation-log.json";
|
|
362
|
-
return `declaredSupport=${declaredSupport}; runtimeLaunch=${runtimeLaunch}; proofRequired=${proofRequired}; proofSource=${proofSource}`;
|
|
363
|
-
}
|
|
364
|
-
const OPENCODE_PLUGIN_REL_PATH = ".opencode/plugins/cclaw-plugin.mjs";
|
|
365
|
-
function opencodeConfigCandidates(projectRoot) {
|
|
366
|
-
return [
|
|
367
|
-
path.join(projectRoot, "opencode.json"),
|
|
368
|
-
path.join(projectRoot, "opencode.jsonc"),
|
|
369
|
-
path.join(projectRoot, "oh-my-opencode.jsonc"),
|
|
370
|
-
path.join(projectRoot, "oh-my-openagent.jsonc"),
|
|
371
|
-
path.join(projectRoot, ".opencode/opencode.json"),
|
|
372
|
-
path.join(projectRoot, ".opencode/opencode.jsonc"),
|
|
373
|
-
path.join(projectRoot, ".opencode/oh-my-opencode.jsonc"),
|
|
374
|
-
path.join(projectRoot, ".opencode/oh-my-openagent.jsonc")
|
|
375
|
-
];
|
|
376
|
-
}
|
|
377
|
-
function openCodeConfigRegistersPlugin(parsed) {
|
|
378
|
-
const plugins = Array.isArray(parsed.plugin) ? parsed.plugin : [];
|
|
379
|
-
return plugins.some((entry) => normalizeOpenCodePluginEntry(entry) === OPENCODE_PLUGIN_REL_PATH);
|
|
380
|
-
}
|
|
381
|
-
async function opencodeRegistrationCheck(projectRoot) {
|
|
382
|
-
const mismatches = [];
|
|
383
|
-
let foundAnyConfig = false;
|
|
384
|
-
for (const configPath of opencodeConfigCandidates(projectRoot)) {
|
|
385
|
-
if (!(await exists(configPath))) {
|
|
386
|
-
continue;
|
|
387
|
-
}
|
|
388
|
-
foundAnyConfig = true;
|
|
389
|
-
const parsed = await readHookDocument(configPath);
|
|
390
|
-
if (!parsed) {
|
|
391
|
-
mismatches.push(`${path.relative(projectRoot, configPath)} is unreadable or invalid JSON`);
|
|
392
|
-
continue;
|
|
393
|
-
}
|
|
394
|
-
if (openCodeConfigRegistersPlugin(parsed)) {
|
|
395
|
-
return { ok: true, details: `${path.relative(projectRoot, configPath)} registers ${OPENCODE_PLUGIN_REL_PATH}` };
|
|
396
|
-
}
|
|
397
|
-
mismatches.push(`${path.relative(projectRoot, configPath)} missing plugin ${OPENCODE_PLUGIN_REL_PATH}`);
|
|
398
|
-
}
|
|
399
|
-
if (foundAnyConfig) {
|
|
400
|
-
return { ok: false, details: mismatches.join(" | ") };
|
|
401
|
-
}
|
|
402
|
-
return { ok: false, details: `No opencode.json/opencode.jsonc found with plugin ${OPENCODE_PLUGIN_REL_PATH}` };
|
|
403
|
-
}
|
|
404
|
-
async function opencodeQuestionPermissionCheck(projectRoot) {
|
|
405
|
-
const mismatches = [];
|
|
406
|
-
for (const configPath of opencodeConfigCandidates(projectRoot)) {
|
|
407
|
-
if (!(await exists(configPath)))
|
|
408
|
-
continue;
|
|
409
|
-
const parsed = await readHookDocument(configPath);
|
|
410
|
-
if (!parsed || !openCodeConfigRegistersPlugin(parsed))
|
|
411
|
-
continue;
|
|
412
|
-
const permission = toObject(parsed.permission) ?? {};
|
|
413
|
-
if (permission.question === "allow") {
|
|
414
|
-
return {
|
|
415
|
-
ok: true,
|
|
416
|
-
details: `${path.relative(projectRoot, configPath)} sets permission.question to "allow" for structured questions`
|
|
417
|
-
};
|
|
418
|
-
}
|
|
419
|
-
mismatches.push(`${path.relative(projectRoot, configPath)} registers ${OPENCODE_PLUGIN_REL_PATH} but must set permission.question to "allow"`);
|
|
420
|
-
}
|
|
421
|
-
if (mismatches.length > 0) {
|
|
422
|
-
return { ok: false, details: mismatches.join(" | ") };
|
|
423
|
-
}
|
|
424
|
-
return {
|
|
425
|
-
ok: false,
|
|
426
|
-
details: `No opencode config with ${OPENCODE_PLUGIN_REL_PATH} registration found; cannot verify permission.question = "allow"`
|
|
427
|
-
};
|
|
428
|
-
}
|
|
429
|
-
function opencodeQuestionEnvCheck() {
|
|
430
|
-
if (process.env.OPENCODE_ENABLE_QUESTION_TOOL === "1") {
|
|
431
|
-
return { ok: true, details: "OPENCODE_ENABLE_QUESTION_TOOL=1 is set for ACP question tooling" };
|
|
432
|
-
}
|
|
433
|
-
return {
|
|
434
|
-
ok: false,
|
|
435
|
-
details: "Set OPENCODE_ENABLE_QUESTION_TOOL=1 for OpenCode ACP clients so permission-gated structured questions can use the question tool."
|
|
436
|
-
};
|
|
437
|
-
}
|
|
438
|
-
function codexFlagInactiveDetail(configPath, state, error) {
|
|
439
|
-
if (state === "enabled") {
|
|
440
|
-
return `codex_hooks feature flag is enabled in ${configPath}; Codex hooks are active.`;
|
|
441
|
-
}
|
|
442
|
-
if (state === "read-error") {
|
|
443
|
-
return `Codex hooks are inactive: could not read ${configPath} (${error instanceof Error ? error.message : String(error)}).`;
|
|
444
|
-
}
|
|
445
|
-
if (state === "missing-file") {
|
|
446
|
-
return `Codex hooks are inactive: ${configPath} does not exist; .codex/hooks.json is ignored until [features] codex_hooks = true is configured.`;
|
|
447
|
-
}
|
|
448
|
-
if (state === "missing-section") {
|
|
449
|
-
return `Codex hooks are inactive: ${configPath} has no [features] section; add codex_hooks = true to activate configured hooks.`;
|
|
450
|
-
}
|
|
451
|
-
if (state === "missing-key") {
|
|
452
|
-
return `Codex hooks are inactive: ${configPath} is missing codex_hooks under [features]; add codex_hooks = true to activate configured hooks.`;
|
|
453
|
-
}
|
|
454
|
-
return `Codex hooks are inactive: ${configPath} sets codex_hooks to a non-true value; set codex_hooks = true under [features].`;
|
|
455
|
-
}
|
|
456
|
-
function hookCommandsWithMatchers(value) {
|
|
457
|
-
if (!Array.isArray(value)) {
|
|
458
|
-
return [];
|
|
459
|
-
}
|
|
460
|
-
const out = [];
|
|
461
|
-
for (const item of value) {
|
|
462
|
-
const obj = toObject(item);
|
|
463
|
-
if (!obj)
|
|
464
|
-
continue;
|
|
465
|
-
const matcher = typeof obj.matcher === "string" ? obj.matcher : undefined;
|
|
466
|
-
if (typeof obj.command === "string") {
|
|
467
|
-
out.push({ command: obj.command, matcher });
|
|
468
|
-
}
|
|
469
|
-
const nested = hookCommandsWithMatchers(obj.hooks);
|
|
470
|
-
for (const child of nested) {
|
|
471
|
-
out.push({ ...child, matcher: child.matcher ?? matcher });
|
|
472
|
-
}
|
|
473
|
-
}
|
|
474
|
-
return out;
|
|
475
|
-
}
|
|
476
|
-
function commandHasHandler(entries, handler) {
|
|
477
|
-
return entries.some((entry) => entry.command.includes(`run-hook.cmd ${handler}`) || entry.command.includes(`run-hook.mjs ${handler}`));
|
|
478
|
-
}
|
|
479
|
-
function codexBashOnly(entries, handler) {
|
|
480
|
-
const matches = entries.filter((entry) => entry.command.includes(`run-hook.cmd ${handler}`) || entry.command.includes(`run-hook.mjs ${handler}`));
|
|
481
|
-
return matches.length > 0 && matches.every((entry) => entry.matcher === "Bash|bash");
|
|
482
|
-
}
|
|
483
|
-
function codexStructuralWiringCheck(codexHooks) {
|
|
484
|
-
const problems = [];
|
|
485
|
-
const expectedSession = HOOK_EVENTS_BY_HARNESS.codex.session_rehydrate;
|
|
486
|
-
if (expectedSession !== "SessionStart matcher=startup|resume") {
|
|
487
|
-
problems.push("semantic session_rehydrate mapping must remain SessionStart matcher=startup|resume");
|
|
488
|
-
}
|
|
489
|
-
const session = hookCommandsWithMatchers(codexHooks.SessionStart);
|
|
490
|
-
if (!commandHasHandler(session, "session-start") || !session.some((entry) => entry.matcher === "startup|resume")) {
|
|
491
|
-
problems.push("SessionStart must run session-start with matcher startup|resume");
|
|
492
|
-
}
|
|
493
|
-
const userPrompt = hookCommandsWithMatchers(codexHooks.UserPromptSubmit);
|
|
494
|
-
if (!commandHasHandler(userPrompt, "prompt-guard")) {
|
|
495
|
-
problems.push("UserPromptSubmit must run prompt-guard");
|
|
496
|
-
}
|
|
497
|
-
if (!commandHasHandler(userPrompt, "verify-current-state")) {
|
|
498
|
-
problems.push("UserPromptSubmit must run verify-current-state");
|
|
499
|
-
}
|
|
500
|
-
const pre = hookCommandsWithMatchers(codexHooks.PreToolUse);
|
|
501
|
-
if (!codexBashOnly(pre, "prompt-guard")) {
|
|
502
|
-
problems.push("PreToolUse prompt-guard must be Bash-only matcher Bash|bash");
|
|
503
|
-
}
|
|
504
|
-
if (!codexBashOnly(pre, "workflow-guard")) {
|
|
505
|
-
problems.push("PreToolUse workflow-guard must be Bash-only matcher Bash|bash");
|
|
506
|
-
}
|
|
507
|
-
const post = hookCommandsWithMatchers(codexHooks.PostToolUse);
|
|
508
|
-
if (!codexBashOnly(post, "context-monitor")) {
|
|
509
|
-
problems.push("PostToolUse context-monitor must be Bash-only matcher Bash|bash");
|
|
510
|
-
}
|
|
511
|
-
const stop = hookCommandsWithMatchers(codexHooks.Stop);
|
|
512
|
-
if (!commandHasHandler(stop, "stop-handoff")) {
|
|
513
|
-
problems.push("Stop must run stop-handoff");
|
|
514
|
-
}
|
|
515
|
-
return problems.length === 0
|
|
516
|
-
? { ok: true, details: "Codex hook events, matchers, and manifest semantic mappings are structurally valid" }
|
|
517
|
-
: { ok: false, details: problems.join("; ") };
|
|
518
|
-
}
|
|
519
|
-
async function initRecoveryCheck(projectRoot) {
|
|
520
|
-
const sentinelPath = path.join(projectRoot, RUNTIME_ROOT, "state", ".init-in-progress");
|
|
521
|
-
if (!(await exists(sentinelPath))) {
|
|
522
|
-
return { ok: true, details: "no partial init/sync sentinel found" };
|
|
523
|
-
}
|
|
524
|
-
let summary = `${RUNTIME_ROOT}/state/.init-in-progress sentinel present`;
|
|
525
|
-
try {
|
|
526
|
-
const parsed = JSON.parse(await fs.readFile(sentinelPath, "utf8"));
|
|
527
|
-
const operation = typeof parsed.operation === "string" ? parsed.operation : "unknown";
|
|
528
|
-
const startedAt = typeof parsed.startedAt === "string" ? parsed.startedAt : "unknown";
|
|
529
|
-
summary = `${summary} (operation=${operation}, startedAt=${startedAt})`;
|
|
530
|
-
}
|
|
531
|
-
catch {
|
|
532
|
-
summary = `${summary} (unreadable sentinel payload)`;
|
|
533
|
-
}
|
|
534
|
-
return {
|
|
535
|
-
ok: false,
|
|
536
|
-
details: `${summary}. Fix: inspect generated runtime files, then rerun cclaw sync or remove the sentinel only after confirming the runtime is complete.`
|
|
537
|
-
};
|
|
538
|
-
}
|
|
539
|
-
async function archiveIntegrityCheck(projectRoot) {
|
|
540
|
-
const runsDir = path.join(projectRoot, RUNTIME_ROOT, "runs");
|
|
541
|
-
if (!(await exists(runsDir))) {
|
|
542
|
-
return { ok: true, details: `${RUNTIME_ROOT}/runs is absent; no archives to inspect yet` };
|
|
543
|
-
}
|
|
544
|
-
let entries;
|
|
545
|
-
try {
|
|
546
|
-
entries = await fs.readdir(runsDir, { withFileTypes: true });
|
|
547
|
-
}
|
|
548
|
-
catch (error) {
|
|
549
|
-
const reason = error instanceof Error ? error.message : String(error);
|
|
550
|
-
return { ok: false, details: `unable to inspect ${RUNTIME_ROOT}/runs (${reason})` };
|
|
551
|
-
}
|
|
552
|
-
const problems = [];
|
|
553
|
-
for (const entry of entries) {
|
|
554
|
-
if (!entry.isDirectory())
|
|
555
|
-
continue;
|
|
556
|
-
const runId = entry.name;
|
|
557
|
-
const runPath = path.join(runsDir, runId);
|
|
558
|
-
const relRunPath = `${RUNTIME_ROOT}/runs/${runId}`;
|
|
559
|
-
if (await exists(path.join(runPath, ".archive-in-progress"))) {
|
|
560
|
-
problems.push(`${relRunPath}/.archive-in-progress sentinel present`);
|
|
561
|
-
}
|
|
562
|
-
const manifestPath = path.join(runPath, "archive-manifest.json");
|
|
563
|
-
if (!(await exists(manifestPath))) {
|
|
564
|
-
problems.push(`${relRunPath} missing archive-manifest.json`);
|
|
565
|
-
continue;
|
|
566
|
-
}
|
|
567
|
-
let manifest;
|
|
568
|
-
try {
|
|
569
|
-
manifest = JSON.parse(await fs.readFile(manifestPath, "utf8"));
|
|
570
|
-
}
|
|
571
|
-
catch (error) {
|
|
572
|
-
const reason = error instanceof Error ? error.message : String(error);
|
|
573
|
-
problems.push(`${relRunPath}/archive-manifest.json unreadable (${reason})`);
|
|
574
|
-
continue;
|
|
575
|
-
}
|
|
576
|
-
const stateFiles = Array.isArray(manifest.snapshottedStateFiles)
|
|
577
|
-
? manifest.snapshottedStateFiles.filter((value) => typeof value === "string")
|
|
578
|
-
: [];
|
|
579
|
-
const stateDir = path.join(runPath, "state");
|
|
580
|
-
if (stateFiles.length > 0 && !(await exists(stateDir))) {
|
|
581
|
-
problems.push(`${relRunPath} manifest lists state snapshot files but state/ is missing`);
|
|
582
|
-
continue;
|
|
583
|
-
}
|
|
584
|
-
for (const stateFile of stateFiles) {
|
|
585
|
-
if (stateFile.endsWith("/"))
|
|
586
|
-
continue;
|
|
587
|
-
if (!(await exists(path.join(stateDir, stateFile)))) {
|
|
588
|
-
problems.push(`${relRunPath}/state missing ${stateFile} listed in manifest`);
|
|
589
|
-
}
|
|
590
|
-
}
|
|
591
|
-
}
|
|
592
|
-
if (problems.length === 0) {
|
|
593
|
-
return { ok: true, details: "no partial archive sentinels or incomplete archive snapshots found" };
|
|
594
|
-
}
|
|
595
|
-
return {
|
|
596
|
-
ok: false,
|
|
597
|
-
details: `${problems.join("; ")}. Fix: inspect the archive directory, retry archive if the active run was restored, or recover/rollback artifacts and state from the snapshot before removing the sentinel.`
|
|
598
|
-
};
|
|
599
|
-
}
|
|
600
|
-
async function opencodePluginRuntimeShapeCheck(projectRoot) {
|
|
601
|
-
const pluginPath = path.join(projectRoot, ".opencode/plugins/cclaw-plugin.mjs");
|
|
602
|
-
if (!(await exists(pluginPath))) {
|
|
603
|
-
return { ok: false, details: `${path.relative(projectRoot, pluginPath)} not found` };
|
|
604
|
-
}
|
|
605
|
-
try {
|
|
606
|
-
const moduleUrl = `${pathToFileURL(pluginPath).href}?doctor=${Date.now()}`;
|
|
607
|
-
const imported = await import(moduleUrl);
|
|
608
|
-
if (typeof imported.default !== "function") {
|
|
609
|
-
return {
|
|
610
|
-
ok: false,
|
|
611
|
-
details: `${path.relative(projectRoot, pluginPath)} must export a default plugin factory function`
|
|
612
|
-
};
|
|
613
|
-
}
|
|
614
|
-
const plugin = imported.default({ directory: projectRoot });
|
|
615
|
-
if (!plugin || typeof plugin !== "object" || Array.isArray(plugin)) {
|
|
616
|
-
return {
|
|
617
|
-
ok: false,
|
|
618
|
-
details: `${path.relative(projectRoot, pluginPath)} factory must return a plugin object`
|
|
619
|
-
};
|
|
620
|
-
}
|
|
621
|
-
const requiredHandlers = [
|
|
622
|
-
"event",
|
|
623
|
-
"tool.execute.before",
|
|
624
|
-
"tool.execute.after",
|
|
625
|
-
"experimental.chat.system.transform"
|
|
626
|
-
];
|
|
627
|
-
const missing = requiredHandlers.filter((name) => typeof plugin?.[name] !== "function");
|
|
628
|
-
if (missing.length > 0) {
|
|
629
|
-
return {
|
|
630
|
-
ok: false,
|
|
631
|
-
details: `${path.relative(projectRoot, pluginPath)} missing runtime handlers: ${missing.join(", ")}`
|
|
632
|
-
};
|
|
633
|
-
}
|
|
634
|
-
return {
|
|
635
|
-
ok: true,
|
|
636
|
-
details: `${path.relative(projectRoot, pluginPath)} exports compatible runtime handler shape`
|
|
637
|
-
};
|
|
638
|
-
}
|
|
639
|
-
catch (error) {
|
|
640
|
-
return {
|
|
641
|
-
ok: false,
|
|
642
|
-
details: `runtime load failed for .opencode/plugins/cclaw-plugin.mjs: ${error instanceof Error ? error.message : String(error)}`
|
|
643
|
-
};
|
|
644
|
-
}
|
|
645
|
-
}
|
|
646
|
-
export async function doctorChecks(projectRoot, options = {}) {
|
|
647
|
-
const checks = [];
|
|
648
|
-
for (const dir of REQUIRED_DIRS) {
|
|
649
|
-
const fullPath = path.join(projectRoot, dir);
|
|
650
|
-
checks.push({
|
|
651
|
-
name: `dir:${dir}`,
|
|
652
|
-
ok: await exists(fullPath),
|
|
653
|
-
details: fullPath
|
|
654
|
-
});
|
|
655
|
-
}
|
|
656
|
-
for (const stage of FLOW_STAGES) {
|
|
657
|
-
const skillPath = path.join(projectRoot, RUNTIME_ROOT, "skills", stageSkillFolder(stage), "SKILL.md");
|
|
658
|
-
const skillExists = await exists(skillPath);
|
|
659
|
-
checks.push({
|
|
660
|
-
name: `skill:${stage}`,
|
|
661
|
-
ok: skillExists,
|
|
662
|
-
details: skillPath
|
|
663
|
-
});
|
|
664
|
-
if (skillExists) {
|
|
665
|
-
const skillContent = await fs.readFile(skillPath, "utf8");
|
|
666
|
-
const lineCount = skillContent.split("\n").length;
|
|
667
|
-
const MIN_SKILL_LINES = 110;
|
|
668
|
-
// Soft max tightened from 650 → 500 after externalising the TDD
|
|
669
|
-
// batch-execution walkthrough and collapsing the duplicate "what
|
|
670
|
-
// goes wrong" lists. Stage skills beyond 500 lines drift into unread
|
|
671
|
-
// bloat; long-form content belongs in shared guidance sections instead.
|
|
672
|
-
const MAX_SKILL_LINES = 500;
|
|
673
|
-
checks.push({
|
|
674
|
-
name: `skill:${stage}:min_lines`,
|
|
675
|
-
ok: lineCount >= MIN_SKILL_LINES,
|
|
676
|
-
details: `${skillPath} has ${lineCount} lines (minimum ${MIN_SKILL_LINES})`
|
|
677
|
-
});
|
|
678
|
-
checks.push({
|
|
679
|
-
name: `skill:${stage}:max_lines`,
|
|
680
|
-
ok: lineCount <= MAX_SKILL_LINES,
|
|
681
|
-
details: `${skillPath} has ${lineCount} lines (soft max ${MAX_SKILL_LINES}; stage skills beyond this drift into unread bloat)`
|
|
682
|
-
});
|
|
683
|
-
const canonicalSections = [
|
|
684
|
-
{ id: "frontmatter", pattern: /^---\nname: [\w-]+\ndescription: /m, label: "YAML frontmatter (name + description)" },
|
|
685
|
-
{ id: "iron_law", pattern: /^\*\*IRON LAW — [A-Z]+:\*\* .+$/m, label: "Iron Law punchcard (<EXTREMELY-IMPORTANT> wrapper)" },
|
|
686
|
-
{ id: "hard_gate", pattern: /^## HARD-GATE$/m, label: "## HARD-GATE" },
|
|
687
|
-
{ id: "checklist", pattern: /^## Checklist$/m, label: "## Checklist" },
|
|
688
|
-
{ id: "completion_parameters", pattern: /^## Completion Parameters$/m, label: "## Completion Parameters" },
|
|
689
|
-
{ id: "shared_guidance", pattern: /^## Shared Stage Guidance$/m, label: "## Shared Stage Guidance" },
|
|
690
|
-
{ id: "anti_patterns", pattern: /^## Anti-Patterns & Red Flags$/m, label: "## Anti-Patterns & Red Flags" }
|
|
691
|
-
];
|
|
692
|
-
const missingSections = canonicalSections
|
|
693
|
-
.filter((section) => !section.pattern.test(skillContent))
|
|
694
|
-
.map((section) => section.label);
|
|
695
|
-
checks.push({
|
|
696
|
-
name: `skill:${stage}:canonical_sections`,
|
|
697
|
-
ok: missingSections.length === 0,
|
|
698
|
-
details: missingSections.length === 0
|
|
699
|
-
? `${skillPath} contains all canonical sections`
|
|
700
|
-
: `${skillPath} missing sections: ${missingSections.join(", ")}`
|
|
701
|
-
});
|
|
702
|
-
}
|
|
703
|
-
}
|
|
704
|
-
// Meta-skill health — the using-cclaw routing brain must always contain the
|
|
705
|
-
// signals that stage skills reference. When one of these drifts, every stage
|
|
706
|
-
// citation breaks silently.
|
|
707
|
-
const metaSkillPath = path.join(projectRoot, RUNTIME_ROOT, "skills", "using-cclaw", "SKILL.md");
|
|
708
|
-
if (await exists(metaSkillPath)) {
|
|
709
|
-
const metaContent = await fs.readFile(metaSkillPath, "utf8");
|
|
710
|
-
const requiredSignals = [
|
|
711
|
-
{ id: "instruction_priority", pattern: /Instruction Priority/i, label: "Instruction Priority" },
|
|
712
|
-
{ id: "routing_flow", pattern: /Routing flow/i, label: "Routing flow" },
|
|
713
|
-
{ id: "task_classification", pattern: /Task classification/i, label: "Task classification" },
|
|
714
|
-
{ id: "stage_map", pattern: /Stage quick map/i, label: "Stage quick map" },
|
|
715
|
-
{ id: "protocol_behavior", pattern: /Protocol Behavior/i, label: "Protocol Behavior" },
|
|
716
|
-
{ id: "knowledge_guidance", pattern: /Knowledge guidance/i, label: "Knowledge guidance" },
|
|
717
|
-
{ id: "failure_guardrails", pattern: /Failure guardrails/i, label: "Failure guardrails" }
|
|
718
|
-
];
|
|
719
|
-
const missingMeta = requiredSignals
|
|
720
|
-
.filter((signal) => !signal.pattern.test(metaContent))
|
|
721
|
-
.map((signal) => signal.label);
|
|
722
|
-
checks.push({
|
|
723
|
-
name: "skill:meta:signals",
|
|
724
|
-
ok: missingMeta.length === 0,
|
|
725
|
-
details: missingMeta.length === 0
|
|
726
|
-
? `${metaSkillPath} contains all required routing signals`
|
|
727
|
-
: `${metaSkillPath} missing signals: ${missingMeta.join(", ")}`
|
|
728
|
-
});
|
|
729
|
-
}
|
|
730
|
-
checks.push({
|
|
731
|
-
name: "gitignore:required_patterns",
|
|
732
|
-
ok: await gitignoreHasRequiredPatterns(projectRoot),
|
|
733
|
-
details: ".gitignore must include cclaw ignore block"
|
|
734
|
-
});
|
|
735
|
-
const managedManifestPath = path.join(projectRoot, MANAGED_RESOURCE_MANIFEST_REL_PATH);
|
|
736
|
-
let rawManagedManifest = null;
|
|
737
|
-
let managedManifestParseError = null;
|
|
738
|
-
if (await exists(managedManifestPath)) {
|
|
739
|
-
try {
|
|
740
|
-
rawManagedManifest = JSON.parse(await fs.readFile(managedManifestPath, "utf8"));
|
|
741
|
-
}
|
|
742
|
-
catch (error) {
|
|
743
|
-
managedManifestParseError = error instanceof Error ? error.message : String(error);
|
|
744
|
-
}
|
|
745
|
-
}
|
|
746
|
-
const managedManifestValidationIssues = rawManagedManifest === null
|
|
747
|
-
? []
|
|
748
|
-
: validateManagedResourceManifest(rawManagedManifest);
|
|
749
|
-
const managedManifest = await readManagedResourceManifest(projectRoot).catch(() => null);
|
|
750
|
-
checks.push({
|
|
751
|
-
name: "managed_resources:manifest_exists",
|
|
752
|
-
ok: managedManifest !== null,
|
|
753
|
-
details: managedManifest
|
|
754
|
-
? `${MANAGED_RESOURCE_MANIFEST_REL_PATH} tracks ${managedManifest.resources.length} managed generated file(s)`
|
|
755
|
-
: `${MANAGED_RESOURCE_MANIFEST_REL_PATH} missing; run cclaw sync to establish generated file ownership`
|
|
756
|
-
});
|
|
757
|
-
checks.push({
|
|
758
|
-
name: "managed_resources:manifest_valid",
|
|
759
|
-
ok: managedManifestParseError === null && managedManifestValidationIssues.length === 0,
|
|
760
|
-
details: managedManifestParseError
|
|
761
|
-
? `${MANAGED_RESOURCE_MANIFEST_REL_PATH} is unreadable JSON (${managedManifestParseError})`
|
|
762
|
-
: managedManifestValidationIssues.length === 0
|
|
763
|
-
? `${MANAGED_RESOURCE_MANIFEST_REL_PATH} metadata is structurally valid`
|
|
764
|
-
: `malformed managed resource metadata: ${managedManifestValidationIssues.slice(0, 12).map(formatManagedValidationIssue).join("; ")}`
|
|
765
|
-
});
|
|
766
|
-
if (managedManifest) {
|
|
767
|
-
checks.push({
|
|
768
|
-
name: "managed_resources:manifest_package_version",
|
|
769
|
-
ok: managedManifest.packageVersion === CCLAW_VERSION,
|
|
770
|
-
details: managedManifest.packageVersion === CCLAW_VERSION
|
|
771
|
-
? `${MANAGED_RESOURCE_MANIFEST_REL_PATH} packageVersion matches cclaw ${CCLAW_VERSION}`
|
|
772
|
-
: `${MANAGED_RESOURCE_MANIFEST_REL_PATH} packageVersion ${managedManifest.packageVersion} is stale; current cclaw is ${CCLAW_VERSION}. Run cclaw upgrade.`
|
|
773
|
-
});
|
|
774
|
-
const rawResources = toObject(rawManagedManifest)?.resources;
|
|
775
|
-
const stalePackageEntries = Array.isArray(rawResources)
|
|
776
|
-
? rawResources.flatMap((entry, index) => {
|
|
777
|
-
const obj = toObject(entry);
|
|
778
|
-
if (!obj)
|
|
779
|
-
return [];
|
|
780
|
-
const entryPath = typeof obj.path === "string" ? obj.path : `resources[${index}]`;
|
|
781
|
-
return typeof obj.packageVersion === "string" && obj.packageVersion !== CCLAW_VERSION
|
|
782
|
-
? [`${entryPath} (${obj.packageVersion})`]
|
|
783
|
-
: [];
|
|
784
|
-
})
|
|
785
|
-
: [];
|
|
786
|
-
const stale = [];
|
|
787
|
-
const missing = [];
|
|
788
|
-
for (const entry of managedManifest.resources) {
|
|
789
|
-
const filePath = path.join(projectRoot, entry.path);
|
|
790
|
-
if (!(await exists(filePath))) {
|
|
791
|
-
missing.push(entry.path);
|
|
792
|
-
continue;
|
|
793
|
-
}
|
|
794
|
-
const currentHash = hashManagedResourceContent(await fs.readFile(filePath));
|
|
795
|
-
if (currentHash !== entry.sha256) {
|
|
796
|
-
stale.push(entry.path);
|
|
797
|
-
}
|
|
798
|
-
}
|
|
799
|
-
checks.push({
|
|
800
|
-
name: "managed_resources:entry_package_versions",
|
|
801
|
-
ok: stalePackageEntries.length === 0,
|
|
802
|
-
details: stalePackageEntries.length === 0
|
|
803
|
-
? `all manifest entries match cclaw ${CCLAW_VERSION}`
|
|
804
|
-
: `manifest entries have stale packageVersion; run cclaw upgrade: ${stalePackageEntries.slice(0, 12).join(", ")}`
|
|
805
|
-
});
|
|
806
|
-
checks.push({
|
|
807
|
-
name: "managed_resources:user_modified",
|
|
808
|
-
ok: stale.length === 0,
|
|
809
|
-
details: stale.length === 0
|
|
810
|
-
? "all manifest-tracked managed files match recorded hashes"
|
|
811
|
-
: `manifest-tracked managed files have user modifications: ${stale.slice(0, 12).join(", ")}`
|
|
812
|
-
});
|
|
813
|
-
checks.push({
|
|
814
|
-
name: "managed_resources:stale_entries",
|
|
815
|
-
ok: missing.length === 0,
|
|
816
|
-
details: missing.length === 0
|
|
817
|
-
? "all manifest entries still exist"
|
|
818
|
-
: `manifest entries point to missing files: ${missing.slice(0, 12).join(", ")}`
|
|
819
|
-
});
|
|
820
|
-
const manifestPaths = new Set(managedManifest.resources.map((entry) => entry.path));
|
|
821
|
-
const candidates = [];
|
|
822
|
-
for (const relDir of [
|
|
823
|
-
`${RUNTIME_ROOT}/commands`,
|
|
824
|
-
`${RUNTIME_ROOT}/skills`,
|
|
825
|
-
`${RUNTIME_ROOT}/templates`,
|
|
826
|
-
`${RUNTIME_ROOT}/rules`,
|
|
827
|
-
`${RUNTIME_ROOT}/agents`,
|
|
828
|
-
`${RUNTIME_ROOT}/hooks`,
|
|
829
|
-
".claude/commands",
|
|
830
|
-
".cursor/commands",
|
|
831
|
-
".opencode/commands",
|
|
832
|
-
".opencode/agents",
|
|
833
|
-
".codex/agents",
|
|
834
|
-
".agents/skills",
|
|
835
|
-
".claude/hooks",
|
|
836
|
-
".cursor",
|
|
837
|
-
".codex",
|
|
838
|
-
".opencode/plugins"
|
|
839
|
-
]) {
|
|
840
|
-
await walkGeneratedCandidates(projectRoot, relDir, candidates);
|
|
841
|
-
}
|
|
842
|
-
for (const rel of ["AGENTS.md", "CLAUDE.md"]) {
|
|
843
|
-
if ((await exists(path.join(projectRoot, rel))) && isManagedGeneratedPath(rel)) {
|
|
844
|
-
candidates.push(rel);
|
|
845
|
-
}
|
|
846
|
-
}
|
|
847
|
-
const orphaned = [...new Set(candidates)].filter((rel) => !manifestPaths.has(rel)).sort();
|
|
848
|
-
checks.push({
|
|
849
|
-
name: "managed_resources:orphaned_generated_files",
|
|
850
|
-
ok: orphaned.length === 0,
|
|
851
|
-
details: orphaned.length === 0
|
|
852
|
-
? "no orphaned generated files detected across known cclaw surfaces"
|
|
853
|
-
: `warning: generated-looking files are not tracked in manifest: ${orphaned.slice(0, 12).join(", ")}`
|
|
854
|
-
});
|
|
855
|
-
}
|
|
856
|
-
let configuredHarnesses = [];
|
|
857
|
-
let parsedConfig = null;
|
|
858
|
-
const configFileExists = await exists(path.join(projectRoot, RUNTIME_ROOT, "config.yaml"));
|
|
859
|
-
if (!configFileExists) {
|
|
860
|
-
const detectedHarnesses = await detectHarnesses(projectRoot).catch(() => []);
|
|
861
|
-
checks.push({
|
|
862
|
-
name: "config:present",
|
|
863
|
-
ok: detectedHarnesses.length === 0,
|
|
864
|
-
details: detectedHarnesses.length > 0
|
|
865
|
-
? `${RUNTIME_ROOT}/config.yaml is missing but harness markers were detected (${detectedHarnesses.join(", ")}). Run cclaw sync --harnesses=${detectedHarnesses.join(",")} or cclaw sync --interactive.`
|
|
866
|
-
: `${RUNTIME_ROOT}/config.yaml missing and no harness markers were detected; run cclaw init or cclaw sync when ready.`
|
|
867
|
-
});
|
|
868
|
-
}
|
|
869
|
-
try {
|
|
870
|
-
const config = await readConfig(projectRoot);
|
|
871
|
-
parsedConfig = config;
|
|
872
|
-
configuredHarnesses = config.harnesses;
|
|
873
|
-
checks.push({
|
|
874
|
-
name: "config:valid",
|
|
875
|
-
ok: true,
|
|
876
|
-
details: `${RUNTIME_ROOT}/config.yaml parsed successfully`
|
|
877
|
-
});
|
|
878
|
-
}
|
|
879
|
-
catch (error) {
|
|
880
|
-
checks.push({
|
|
881
|
-
name: "config:valid",
|
|
882
|
-
ok: false,
|
|
883
|
-
severity: error instanceof InvalidConfigError ? "error" : "warning",
|
|
884
|
-
details: error instanceof Error ? error.message : "Invalid config"
|
|
885
|
-
});
|
|
886
|
-
}
|
|
887
|
-
if (parsedConfig) {
|
|
888
|
-
const advancedKeys = await detectAdvancedKeys(projectRoot).catch(() => new Set());
|
|
889
|
-
const hasLegacyTddTestGlobs = advancedKeys.has("tddTestGlobs");
|
|
890
|
-
const hasModernTddConfig = advancedKeys.has("tdd");
|
|
891
|
-
checks.push({
|
|
892
|
-
name: "warning:config:deprecated_tdd_test_globs",
|
|
893
|
-
ok: !hasLegacyTddTestGlobs,
|
|
894
|
-
details: hasLegacyTddTestGlobs
|
|
895
|
-
? hasModernTddConfig
|
|
896
|
-
? `warning: ${RUNTIME_ROOT}/config.yaml sets deprecated "tddTestGlobs" alongside "tdd.*"; "tdd.testPathPatterns" takes precedence. Remove legacy key.`
|
|
897
|
-
: `warning: ${RUNTIME_ROOT}/config.yaml uses deprecated "tddTestGlobs". Migrate to "tdd.testPathPatterns".`
|
|
898
|
-
: `no deprecated "tddTestGlobs" key detected in ${RUNTIME_ROOT}/config.yaml`
|
|
899
|
-
});
|
|
900
|
-
const expectedStrictness = parsedConfig.strictness === "strict" ? "strict" : "advisory";
|
|
901
|
-
const hookRuntimePath = path.join(projectRoot, RUNTIME_ROOT, "hooks", "run-hook.mjs");
|
|
902
|
-
let strictnessOk = false;
|
|
903
|
-
if (await exists(hookRuntimePath)) {
|
|
904
|
-
const runtimeContent = await fs.readFile(hookRuntimePath, "utf8");
|
|
905
|
-
strictnessOk = runtimeContent.includes(`const DEFAULT_STRICTNESS = "${expectedStrictness}"`);
|
|
906
|
-
}
|
|
907
|
-
checks.push({
|
|
908
|
-
name: "hook:runtime:strictness",
|
|
909
|
-
ok: strictnessOk,
|
|
910
|
-
details: `${hookRuntimePath} must embed DEFAULT_STRICTNESS = "${expectedStrictness}" matching config.strictness`
|
|
911
|
-
});
|
|
912
|
-
if (parsedConfig.gitHookGuards === true) {
|
|
913
|
-
const runtimePreCommit = path.join(projectRoot, RUNTIME_ROOT, "hooks", "git", "pre-commit.mjs");
|
|
914
|
-
const runtimePrePush = path.join(projectRoot, RUNTIME_ROOT, "hooks", "git", "pre-push.mjs");
|
|
915
|
-
const runtimeScriptsOk = (await exists(runtimePreCommit)) && (await exists(runtimePrePush));
|
|
916
|
-
checks.push({
|
|
917
|
-
name: "git_hooks:managed:runtime_scripts",
|
|
918
|
-
ok: runtimeScriptsOk,
|
|
919
|
-
details: `${RUNTIME_ROOT}/hooks/git/pre-commit.mjs and pre-push.mjs must exist when gitHookGuards=true`
|
|
920
|
-
});
|
|
921
|
-
const gitHooksDir = await resolveGitHooksDir(projectRoot);
|
|
922
|
-
if (!gitHooksDir) {
|
|
923
|
-
checks.push({
|
|
924
|
-
name: "git_hooks:managed:relays",
|
|
925
|
-
ok: true,
|
|
926
|
-
details: "git repository not detected; relay hook check skipped"
|
|
927
|
-
});
|
|
928
|
-
}
|
|
929
|
-
else {
|
|
930
|
-
const preCommitHookPath = path.join(gitHooksDir, "pre-commit");
|
|
931
|
-
const prePushHookPath = path.join(gitHooksDir, "pre-push");
|
|
932
|
-
let relaysOk = false;
|
|
933
|
-
if ((await exists(preCommitHookPath)) && (await exists(prePushHookPath))) {
|
|
934
|
-
const preCommitHook = await fs.readFile(preCommitHookPath, "utf8");
|
|
935
|
-
const prePushHook = await fs.readFile(prePushHookPath, "utf8");
|
|
936
|
-
relaysOk =
|
|
937
|
-
preCommitHook.includes("cclaw-managed-git-hook") &&
|
|
938
|
-
prePushHook.includes("cclaw-managed-git-hook");
|
|
939
|
-
}
|
|
940
|
-
checks.push({
|
|
941
|
-
name: "git_hooks:managed:relays",
|
|
942
|
-
ok: relaysOk,
|
|
943
|
-
details: `${path.relative(projectRoot, gitHooksDir)}/pre-commit and pre-push must contain managed relay marker`
|
|
944
|
-
});
|
|
945
|
-
}
|
|
946
|
-
}
|
|
947
|
-
}
|
|
948
|
-
for (const harness of configuredHarnesses) {
|
|
949
|
-
const adapter = HARNESS_ADAPTERS[harness];
|
|
950
|
-
if (!adapter) {
|
|
951
|
-
checks.push({
|
|
952
|
-
name: `harness:${harness}:supported`,
|
|
953
|
-
ok: false,
|
|
954
|
-
details: `Unsupported harness "${harness}" in ${RUNTIME_ROOT}/config.yaml`
|
|
955
|
-
});
|
|
956
|
-
continue;
|
|
957
|
-
}
|
|
958
|
-
// For command-kind harnesses we check flat files; skill-kind (codex) is
|
|
959
|
-
// validated in the codex-specific block below (`shim:codex:<name>:*`).
|
|
960
|
-
if (adapter.shimKind === "command") {
|
|
961
|
-
for (const shim of harnessShimFileNames()) {
|
|
962
|
-
const shimPath = path.join(projectRoot, adapter.commandDir, shim);
|
|
963
|
-
checks.push({
|
|
964
|
-
name: `shim:${harness}:${shim.replace(".md", "")}`,
|
|
965
|
-
ok: await exists(shimPath),
|
|
966
|
-
details: shimPath
|
|
967
|
-
});
|
|
968
|
-
}
|
|
969
|
-
}
|
|
970
|
-
}
|
|
971
|
-
for (const harness of configuredHarnesses) {
|
|
972
|
-
checks.push({
|
|
973
|
-
name: `harness:reality:${harness}`,
|
|
974
|
-
ok: true,
|
|
975
|
-
severity: "info",
|
|
976
|
-
details: harnessRealityLabel(harness)
|
|
977
|
-
});
|
|
978
|
-
}
|
|
979
|
-
const agentsFile = path.join(projectRoot, "AGENTS.md");
|
|
980
|
-
let agentsBlockOk = false;
|
|
981
|
-
if (await exists(agentsFile)) {
|
|
982
|
-
const content = await fs.readFile(agentsFile, "utf8");
|
|
983
|
-
const hasMarkers = content.includes(CCLAW_MARKER_START) && content.includes(CCLAW_MARKER_END);
|
|
984
|
-
const hasCcCommand = content.includes("/cc");
|
|
985
|
-
const hasCcIdeate = content.includes("/cc-ideate");
|
|
986
|
-
const hasCcCancel = content.includes("/cc-cancel");
|
|
987
|
-
const omitsFinish = !content.includes("/cc-finish");
|
|
988
|
-
const hasVerification = content.includes("Verification Discipline");
|
|
989
|
-
const hasMinimalMarker = content.includes("intentionally minimal for cross-project use");
|
|
990
|
-
const hasMetaSkillPointer = content.includes(".cclaw/skills/using-cclaw/SKILL.md");
|
|
991
|
-
agentsBlockOk = hasMarkers
|
|
992
|
-
&& hasCcCommand
|
|
993
|
-
&& hasCcIdeate
|
|
994
|
-
&& hasCcCancel
|
|
995
|
-
&& omitsFinish
|
|
996
|
-
&& hasVerification
|
|
997
|
-
&& hasMinimalMarker
|
|
998
|
-
&& hasMetaSkillPointer;
|
|
999
|
-
}
|
|
1000
|
-
checks.push({
|
|
1001
|
-
name: "agents:cclaw_block",
|
|
1002
|
-
ok: agentsBlockOk,
|
|
1003
|
-
details: `${agentsFile} must contain the managed cclaw marker block with routing, verification, and minimal detail pointer`
|
|
1004
|
-
});
|
|
1005
|
-
for (const cmd of ["start", "next", "ideate", "view", "cancel"]) {
|
|
1006
|
-
const cmdPath = path.join(projectRoot, RUNTIME_ROOT, "commands", `${cmd}.md`);
|
|
1007
|
-
checks.push({
|
|
1008
|
-
name: `utility_command:${cmd}`,
|
|
1009
|
-
ok: await exists(cmdPath),
|
|
1010
|
-
details: cmdPath
|
|
1011
|
-
});
|
|
1012
|
-
}
|
|
1013
|
-
for (const stage of FLOW_STAGES) {
|
|
1014
|
-
const cmdPath = path.join(projectRoot, RUNTIME_ROOT, "commands", `${stage}.md`);
|
|
1015
|
-
let stageCommandOk = false;
|
|
1016
|
-
if (await exists(cmdPath)) {
|
|
1017
|
-
const content = await fs.readFile(cmdPath, "utf8");
|
|
1018
|
-
stageCommandOk = content === stageCommandShimMarkdown(stage);
|
|
1019
|
-
}
|
|
1020
|
-
checks.push({
|
|
1021
|
-
name: `stage_command:${stage}`,
|
|
1022
|
-
ok: stageCommandOk,
|
|
1023
|
-
details: `${cmdPath} must be a thin shim to ${RUNTIME_ROOT}/skills/${stageSkillFolder(stage)}/SKILL.md and /cc`
|
|
1024
|
-
});
|
|
1025
|
-
}
|
|
1026
|
-
// Utility skills
|
|
1027
|
-
for (const [folder, label] of [
|
|
1028
|
-
["learnings", "learnings"],
|
|
1029
|
-
["flow-ideate", "flow-ideate"],
|
|
1030
|
-
["flow-view", "flow-view"],
|
|
1031
|
-
["flow-cancel", "flow-cancel"],
|
|
1032
|
-
["subagent-dev", "sdd"],
|
|
1033
|
-
["parallel-dispatch", "parallel-agents"],
|
|
1034
|
-
["session", "session"],
|
|
1035
|
-
["using-cclaw", "meta-skill"]
|
|
1036
|
-
]) {
|
|
1037
|
-
const skillPath = path.join(projectRoot, RUNTIME_ROOT, "skills", folder, "SKILL.md");
|
|
1038
|
-
checks.push({
|
|
1039
|
-
name: `utility_skill:${label}`,
|
|
1040
|
-
ok: await exists(skillPath),
|
|
1041
|
-
details: skillPath
|
|
1042
|
-
});
|
|
1043
|
-
}
|
|
1044
|
-
// Opt-in language rule packs: only check presence for packs the user enabled.
|
|
1045
|
-
// Canonical location is .cclaw/rules/lang/<pack>.md.
|
|
1046
|
-
for (const pack of parsedConfig?.languageRulePacks ?? []) {
|
|
1047
|
-
const fileName = LANGUAGE_RULE_PACK_FILES[pack];
|
|
1048
|
-
if (!fileName)
|
|
1049
|
-
continue;
|
|
1050
|
-
const packPath = path.join(projectRoot, RUNTIME_ROOT, ...LANGUAGE_RULE_PACK_DIR, fileName);
|
|
1051
|
-
checks.push({
|
|
1052
|
-
name: `language_rule_pack:${pack}`,
|
|
1053
|
-
ok: await exists(packPath),
|
|
1054
|
-
details: packPath
|
|
1055
|
-
});
|
|
1056
|
-
}
|
|
1057
|
-
// Drift: legacy per-language skill folders from v0.7.0 must not coexist with
|
|
1058
|
-
// the new rules/lang/ layout. `cclaw sync` removes them on the next run.
|
|
1059
|
-
for (const legacyFolder of LEGACY_LANGUAGE_RULE_PACK_FOLDERS) {
|
|
1060
|
-
const legacyPath = path.join(projectRoot, RUNTIME_ROOT, "skills", legacyFolder);
|
|
1061
|
-
const legacyPresent = await exists(legacyPath);
|
|
1062
|
-
checks.push({
|
|
1063
|
-
name: `language_rule_pack:no_legacy:${legacyFolder}`,
|
|
1064
|
-
ok: !legacyPresent,
|
|
1065
|
-
details: legacyPresent
|
|
1066
|
-
? `legacy ${legacyPath} must be removed — language packs moved to ${RUNTIME_ROOT}/${LANGUAGE_RULE_PACK_DIR.join("/")}/. Run \`cclaw sync\`.`
|
|
1067
|
-
: `no legacy ${legacyFolder} skill folder`
|
|
1068
|
-
});
|
|
1069
|
-
}
|
|
1070
|
-
// Agent definition files
|
|
1071
|
-
for (const agent of CCLAW_AGENTS) {
|
|
1072
|
-
const agentPath = path.join(projectRoot, RUNTIME_ROOT, "agents", `${agent.name}.md`);
|
|
1073
|
-
let agentOk = await exists(agentPath);
|
|
1074
|
-
if (agentOk) {
|
|
1075
|
-
const content = await fs.readFile(agentPath, "utf8");
|
|
1076
|
-
agentOk = content.includes(`name: ${agent.name}`) && content.includes("tools:");
|
|
1077
|
-
}
|
|
1078
|
-
checks.push({
|
|
1079
|
-
name: `agent:${agent.name}`,
|
|
1080
|
-
ok: agentOk,
|
|
1081
|
-
details: agentPath
|
|
1082
|
-
});
|
|
1083
|
-
}
|
|
1084
|
-
for (const agent of CCLAW_AGENTS) {
|
|
1085
|
-
if (configuredHarnesses.includes("opencode")) {
|
|
1086
|
-
const agentPath = path.join(projectRoot, ".opencode", "agents", `${agent.name}.md`);
|
|
1087
|
-
let ok = false;
|
|
1088
|
-
if (await exists(agentPath)) {
|
|
1089
|
-
ok = generatedAgentShape(await fs.readFile(agentPath, "utf8"), "opencode", agent.name);
|
|
1090
|
-
}
|
|
1091
|
-
checks.push({
|
|
1092
|
-
name: `agent:opencode:${agent.name}:shape`,
|
|
1093
|
-
ok,
|
|
1094
|
-
details: `${agentPath} must be a generated OpenCode subagent with mode: subagent and strict return schema`
|
|
1095
|
-
});
|
|
1096
|
-
}
|
|
1097
|
-
if (configuredHarnesses.includes("codex")) {
|
|
1098
|
-
const agentPath = path.join(projectRoot, ".codex", "agents", `${agent.name}.toml`);
|
|
1099
|
-
let ok = false;
|
|
1100
|
-
if (await exists(agentPath)) {
|
|
1101
|
-
ok = generatedAgentShape(await fs.readFile(agentPath, "utf8"), "codex", agent.name);
|
|
1102
|
-
}
|
|
1103
|
-
checks.push({
|
|
1104
|
-
name: `agent:codex:${agent.name}:shape`,
|
|
1105
|
-
ok,
|
|
1106
|
-
details: `${agentPath} must be a generated Codex custom agent TOML with developer_instructions and strict return schema`
|
|
1107
|
-
});
|
|
1108
|
-
}
|
|
1109
|
-
}
|
|
1110
|
-
// Hook scripts
|
|
1111
|
-
for (const script of [
|
|
1112
|
-
"run-hook.mjs",
|
|
1113
|
-
"run-hook.cmd",
|
|
1114
|
-
"stage-complete.mjs",
|
|
1115
|
-
"start-flow.mjs",
|
|
1116
|
-
"delegation-record.mjs",
|
|
1117
|
-
"opencode-plugin.mjs"
|
|
1118
|
-
]) {
|
|
1119
|
-
const scriptPath = path.join(projectRoot, RUNTIME_ROOT, "hooks", script);
|
|
1120
|
-
const scriptExists = await exists(scriptPath);
|
|
1121
|
-
checks.push({
|
|
1122
|
-
name: `hook:script:${script}`,
|
|
1123
|
-
ok: scriptExists,
|
|
1124
|
-
details: scriptPath
|
|
1125
|
-
});
|
|
1126
|
-
if (scriptExists) {
|
|
1127
|
-
let executable = false;
|
|
1128
|
-
try {
|
|
1129
|
-
const stat = await fs.stat(scriptPath);
|
|
1130
|
-
executable = (stat.mode & 0o111) !== 0;
|
|
1131
|
-
}
|
|
1132
|
-
catch {
|
|
1133
|
-
executable = false;
|
|
1134
|
-
}
|
|
1135
|
-
const executableCheckOk = process.platform === "win32" ? true : executable;
|
|
1136
|
-
checks.push({
|
|
1137
|
-
name: `hook:script:${script}:executable`,
|
|
1138
|
-
ok: executableCheckOk,
|
|
1139
|
-
details: process.platform === "win32"
|
|
1140
|
-
? `${scriptPath} executable-bit check skipped on Windows`
|
|
1141
|
-
: `${scriptPath} must be executable`
|
|
1142
|
-
});
|
|
1143
|
-
}
|
|
1144
|
-
}
|
|
1145
|
-
const localCliEntrypoints = await generatedCliEntrypointsOk(projectRoot);
|
|
1146
|
-
checks.push({
|
|
1147
|
-
name: "hook:script:local_cli_entrypoint",
|
|
1148
|
-
ok: localCliEntrypoints.ok,
|
|
1149
|
-
details: localCliEntrypoints.details
|
|
1150
|
-
});
|
|
1151
|
-
// Hook JSON files per harness. OpenCode ships hooks through its plugin
|
|
1152
|
-
// system (covered below). Codex joined the managed list in v0.40.0 — Codex
|
|
1153
|
-
// CLI ≥ v0.114 consumes `.codex/hooks.json` behind the `codex_hooks`
|
|
1154
|
-
// feature flag.
|
|
1155
|
-
const hookPaths = {
|
|
1156
|
-
claude: ".claude/hooks/hooks.json",
|
|
1157
|
-
cursor: ".cursor/hooks.json",
|
|
1158
|
-
codex: ".codex/hooks.json"
|
|
1159
|
-
};
|
|
1160
|
-
for (const harness of configuredHarnesses) {
|
|
1161
|
-
const hp = hookPaths[harness];
|
|
1162
|
-
if (!hp && harness !== "opencode") {
|
|
1163
|
-
checks.push({
|
|
1164
|
-
name: `hook:json:${harness}`,
|
|
1165
|
-
ok: false,
|
|
1166
|
-
details: `Unsupported harness "${harness}" in ${RUNTIME_ROOT}/config.yaml`
|
|
1167
|
-
});
|
|
1168
|
-
continue;
|
|
1169
|
-
}
|
|
1170
|
-
if (hp) {
|
|
1171
|
-
const fullPath = path.join(projectRoot, hp);
|
|
1172
|
-
const parsed = await readHookDocument(fullPath);
|
|
1173
|
-
const hookOk = !!(parsed && typeof parsed.hooks === "object" && parsed.hooks !== null);
|
|
1174
|
-
checks.push({
|
|
1175
|
-
name: `hook:json:${harness}`,
|
|
1176
|
-
ok: hookOk,
|
|
1177
|
-
details: fullPath
|
|
1178
|
-
});
|
|
1179
|
-
if (harness === "claude" || harness === "cursor" || harness === "codex") {
|
|
1180
|
-
const schema = validateHookDocument(harness, parsed);
|
|
1181
|
-
checks.push({
|
|
1182
|
-
name: `hook:schema:${harness}`,
|
|
1183
|
-
ok: schema.ok,
|
|
1184
|
-
details: schema.ok
|
|
1185
|
-
? `${fullPath} matches cclaw hook schema v1`
|
|
1186
|
-
: `${fullPath} schema issues: ${schema.errors.join("; ")}`
|
|
1187
|
-
});
|
|
1188
|
-
}
|
|
1189
|
-
}
|
|
1190
|
-
}
|
|
1191
|
-
// OpenCode plugin deployed path. (Presence of the source under
|
|
1192
|
-
// `${RUNTIME_ROOT}/hooks/opencode-plugin.mjs` is already asserted by the
|
|
1193
|
-
// generic `hook:script:opencode-plugin.mjs` check above; avoid a duplicate.)
|
|
1194
|
-
const opencodeEnabled = configuredHarnesses.includes("opencode");
|
|
1195
|
-
const opencodeDeployed = await exists(path.join(projectRoot, ".opencode/plugins/cclaw-plugin.mjs"));
|
|
1196
|
-
checks.push({
|
|
1197
|
-
name: "hook:opencode_plugin_deployed",
|
|
1198
|
-
ok: opencodeEnabled ? opencodeDeployed : true,
|
|
1199
|
-
details: opencodeEnabled
|
|
1200
|
-
? ".opencode/plugins/cclaw-plugin.mjs"
|
|
1201
|
-
: "opencode harness disabled; deployed plugin check skipped"
|
|
1202
|
-
});
|
|
1203
|
-
if (configuredHarnesses.includes("claude")) {
|
|
1204
|
-
const file = path.join(projectRoot, ".claude/hooks/hooks.json");
|
|
1205
|
-
const parsed = await readHookDocument(file);
|
|
1206
|
-
const hooks = toObject(parsed?.hooks) ?? {};
|
|
1207
|
-
const sessionStart = hooks.SessionStart;
|
|
1208
|
-
const ok = JSON.stringify(sessionStart ?? "").includes("startup|resume|clear|compact");
|
|
1209
|
-
checks.push({
|
|
1210
|
-
name: "lifecycle:claude:rehydration_matcher",
|
|
1211
|
-
ok,
|
|
1212
|
-
details: `${file} must include SessionStart matcher startup|resume|clear|compact`
|
|
1213
|
-
});
|
|
1214
|
-
const sessionCommands = collectHookCommands(hooks.SessionStart);
|
|
1215
|
-
const preCommands = collectHookCommands(hooks.PreToolUse);
|
|
1216
|
-
const postCommands = collectHookCommands(hooks.PostToolUse);
|
|
1217
|
-
const stopCommands = collectHookCommands(hooks.Stop);
|
|
1218
|
-
const wiringOk = sessionCommands.some((cmd) => cmd.includes("session-start")) &&
|
|
1219
|
-
preCommands.some((cmd) => cmd.includes("prompt-guard")) &&
|
|
1220
|
-
preCommands.some((cmd) => cmd.includes("workflow-guard")) &&
|
|
1221
|
-
postCommands.some((cmd) => cmd.includes("context-monitor")) &&
|
|
1222
|
-
stopCommands.some((cmd) => cmd.includes("stop-handoff"));
|
|
1223
|
-
checks.push({
|
|
1224
|
-
name: "hook:wiring:claude",
|
|
1225
|
-
ok: wiringOk,
|
|
1226
|
-
details: `${file} must wire session-start/prompt-guard/workflow-guard/context-monitor/stop-handoff`
|
|
1227
|
-
});
|
|
1228
|
-
}
|
|
1229
|
-
if (configuredHarnesses.includes("cursor")) {
|
|
1230
|
-
const file = path.join(projectRoot, ".cursor/hooks.json");
|
|
1231
|
-
const parsed = await readHookDocument(file);
|
|
1232
|
-
const hooks = toObject(parsed?.hooks) ?? {};
|
|
1233
|
-
const hasLifecycleKeys = Array.isArray(hooks.sessionStart) &&
|
|
1234
|
-
Array.isArray(hooks.sessionResume) &&
|
|
1235
|
-
Array.isArray(hooks.sessionClear) &&
|
|
1236
|
-
Array.isArray(hooks.sessionCompact);
|
|
1237
|
-
checks.push({
|
|
1238
|
-
name: "lifecycle:cursor:rehydration_events",
|
|
1239
|
-
ok: hasLifecycleKeys,
|
|
1240
|
-
details: `${file} must include sessionStart/sessionResume/sessionClear/sessionCompact hooks`
|
|
1241
|
-
});
|
|
1242
|
-
const sessionCommands = [
|
|
1243
|
-
...collectHookCommands(hooks.sessionStart),
|
|
1244
|
-
...collectHookCommands(hooks.sessionResume),
|
|
1245
|
-
...collectHookCommands(hooks.sessionClear),
|
|
1246
|
-
...collectHookCommands(hooks.sessionCompact)
|
|
1247
|
-
];
|
|
1248
|
-
const preCommands = collectHookCommands(hooks.preToolUse);
|
|
1249
|
-
const postCommands = collectHookCommands(hooks.postToolUse);
|
|
1250
|
-
const stopCommands = collectHookCommands(hooks.stop);
|
|
1251
|
-
const wiringOk = sessionCommands.some((cmd) => cmd.includes("session-start")) &&
|
|
1252
|
-
preCommands.some((cmd) => cmd.includes("prompt-guard")) &&
|
|
1253
|
-
preCommands.some((cmd) => cmd.includes("workflow-guard")) &&
|
|
1254
|
-
postCommands.some((cmd) => cmd.includes("context-monitor")) &&
|
|
1255
|
-
stopCommands.some((cmd) => cmd.includes("stop-handoff"));
|
|
1256
|
-
checks.push({
|
|
1257
|
-
name: "hook:wiring:cursor",
|
|
1258
|
-
ok: wiringOk,
|
|
1259
|
-
details: `${file} must wire session-start/prompt-guard/workflow-guard/context-monitor/stop-handoff`
|
|
1260
|
-
});
|
|
1261
|
-
const cursorRulePath = path.join(projectRoot, ".cursor/rules/cclaw-workflow.mdc");
|
|
1262
|
-
let cursorRuleOk = false;
|
|
1263
|
-
if (await exists(cursorRulePath)) {
|
|
1264
|
-
const content = await fs.readFile(cursorRulePath, "utf8");
|
|
1265
|
-
cursorRuleOk =
|
|
1266
|
-
content.includes("cclaw-managed-cursor-workflow-rule") &&
|
|
1267
|
-
content.includes(".cclaw/state/flow-state.json") &&
|
|
1268
|
-
content.includes("/cc");
|
|
1269
|
-
}
|
|
1270
|
-
checks.push({
|
|
1271
|
-
name: "rules:cursor:workflow",
|
|
1272
|
-
ok: cursorRuleOk,
|
|
1273
|
-
details: `${cursorRulePath} must include managed marker and core cclaw workflow guardrails`
|
|
1274
|
-
});
|
|
1275
|
-
}
|
|
1276
|
-
if (configuredHarnesses.includes("codex")) {
|
|
1277
|
-
// Codex CLI has no custom slash-command discovery (`.codex/commands/*`
|
|
1278
|
-
// was never read, even historically). cclaw ships codex entry points
|
|
1279
|
-
// as skills under `.agents/skills/cc*/SKILL.md`; Codex v0.114+ also
|
|
1280
|
-
// supports lifecycle hooks at `.codex/hooks.json` (gated by the
|
|
1281
|
-
// `codex_hooks` feature flag in `~/.codex/config.toml`).
|
|
1282
|
-
const skillsRoot = path.join(projectRoot, ".agents/skills");
|
|
1283
|
-
for (const skillName of harnessShimSkillNames()) {
|
|
1284
|
-
const skillPath = path.join(skillsRoot, skillName, "SKILL.md");
|
|
1285
|
-
let ok = false;
|
|
1286
|
-
let frontmatterOk = false;
|
|
1287
|
-
if (await exists(skillPath)) {
|
|
1288
|
-
ok = true;
|
|
1289
|
-
const content = await fs.readFile(skillPath, "utf8");
|
|
1290
|
-
frontmatterOk = new RegExp(`^---[\\s\\S]*?\\nname: ${skillName}\\b`, "u").test(content);
|
|
1291
|
-
}
|
|
1292
|
-
checks.push({
|
|
1293
|
-
name: `shim:codex:${skillName}:present`,
|
|
1294
|
-
ok,
|
|
1295
|
-
details: skillPath
|
|
1296
|
-
});
|
|
1297
|
-
checks.push({
|
|
1298
|
-
name: `shim:codex:${skillName}:frontmatter`,
|
|
1299
|
-
ok: frontmatterOk,
|
|
1300
|
-
details: frontmatterOk
|
|
1301
|
-
? `${skillPath} has \`name: ${skillName}\` frontmatter`
|
|
1302
|
-
: ok
|
|
1303
|
-
? `${skillPath} present but \`name: ${skillName}\` frontmatter is missing`
|
|
1304
|
-
: `${skillPath} absent; cannot validate frontmatter`
|
|
1305
|
-
});
|
|
1306
|
-
}
|
|
1307
|
-
// Hook wiring: the generated `.codex/hooks.json` must reference every
|
|
1308
|
-
// runtime script cclaw needs. Separate from the schema check above;
|
|
1309
|
-
// schema covers structure, this check covers semantic wiring.
|
|
1310
|
-
const codexHooksFile = path.join(projectRoot, ".codex/hooks.json");
|
|
1311
|
-
const codexDoc = await readHookDocument(codexHooksFile);
|
|
1312
|
-
const codexHooks = toObject(codexDoc?.hooks) ?? {};
|
|
1313
|
-
const codexSessionCmds = collectHookCommands(codexHooks.SessionStart);
|
|
1314
|
-
const codexUserPromptCmds = collectHookCommands(codexHooks.UserPromptSubmit);
|
|
1315
|
-
const codexPreCmds = collectHookCommands(codexHooks.PreToolUse);
|
|
1316
|
-
const codexPostCmds = collectHookCommands(codexHooks.PostToolUse);
|
|
1317
|
-
const codexStopCmds = collectHookCommands(codexHooks.Stop);
|
|
1318
|
-
const codexWiringOk = codexSessionCmds.some((cmd) => cmd.includes("session-start")) &&
|
|
1319
|
-
codexUserPromptCmds.some((cmd) => cmd.includes("prompt-guard")) &&
|
|
1320
|
-
codexUserPromptCmds.some((cmd) => cmd.includes("verify-current-state")) &&
|
|
1321
|
-
codexPreCmds.some((cmd) => cmd.includes("prompt-guard")) &&
|
|
1322
|
-
codexPreCmds.some((cmd) => cmd.includes("workflow-guard")) &&
|
|
1323
|
-
codexPostCmds.some((cmd) => cmd.includes("context-monitor")) &&
|
|
1324
|
-
codexStopCmds.some((cmd) => cmd.includes("stop-handoff"));
|
|
1325
|
-
checks.push({
|
|
1326
|
-
name: "hook:wiring:codex",
|
|
1327
|
-
ok: codexWiringOk,
|
|
1328
|
-
details: `${codexHooksFile} must wire SessionStart, UserPromptSubmit(prompt/verify-current-state), Bash-only PreToolUse(prompt/workflow), Bash-only PostToolUse(context-monitor), and Stop(stop-handoff). Codex workflow-guard is intentionally strict Bash-only.`
|
|
1329
|
-
});
|
|
1330
|
-
const codexStructural = codexStructuralWiringCheck(codexHooks);
|
|
1331
|
-
checks.push({
|
|
1332
|
-
name: "hook:wiring:codex:structure",
|
|
1333
|
-
ok: codexStructural.ok,
|
|
1334
|
-
details: codexStructural.details
|
|
1335
|
-
});
|
|
1336
|
-
// Codex ignores `.codex/hooks.json` unless the user has
|
|
1337
|
-
// `[features] codex_hooks = true` in `~/.codex/config.toml`.
|
|
1338
|
-
const codexConfig = codexConfigPath();
|
|
1339
|
-
let codexFlagState = "read-error";
|
|
1340
|
-
let codexFlagReadError;
|
|
1341
|
-
try {
|
|
1342
|
-
const content = await readCodexConfig(codexConfig);
|
|
1343
|
-
codexFlagState = classifyCodexHooksFlag(content);
|
|
1344
|
-
}
|
|
1345
|
-
catch (err) {
|
|
1346
|
-
codexFlagReadError = err;
|
|
1347
|
-
}
|
|
1348
|
-
const featureFlagNote = codexFlagInactiveDetail(codexConfig, codexFlagState, codexFlagReadError);
|
|
1349
|
-
const featureFlagOk = codexFlagState === "enabled";
|
|
1350
|
-
checks.push({
|
|
1351
|
-
name: "warning:codex:feature_flag",
|
|
1352
|
-
ok: featureFlagOk,
|
|
1353
|
-
details: featureFlagNote,
|
|
1354
|
-
summary: featureFlagOk
|
|
1355
|
-
? "Codex hooks are active."
|
|
1356
|
-
: "Codex hooks are inactive; configured hooks will be ignored.",
|
|
1357
|
-
fix: "Set `[features] codex_hooks = true` in the Codex config or run cclaw init/sync with Codex flag repair.",
|
|
1358
|
-
docRef: "docs/harnesses.md"
|
|
1359
|
-
});
|
|
1360
|
-
if (parsedConfig?.strictness === "strict") {
|
|
1361
|
-
checks.push({
|
|
1362
|
-
name: "hook:codex:feature_flag_active",
|
|
1363
|
-
ok: featureFlagOk,
|
|
1364
|
-
details: featureFlagNote,
|
|
1365
|
-
summary: featureFlagOk
|
|
1366
|
-
? "Codex hooks are active for strict runtime enforcement."
|
|
1367
|
-
: "Codex hooks are inactive; strict Codex hook enforcement is not ready.",
|
|
1368
|
-
fix: "Set `[features] codex_hooks = true` in the Codex config so strict Codex hooks can run.",
|
|
1369
|
-
docRef: "docs/harnesses.md"
|
|
1370
|
-
});
|
|
1371
|
-
}
|
|
1372
|
-
// Legacy `.codex/commands/*` must not linger from older cclaw installs.
|
|
1373
|
-
// (The `.codex/hooks.json` path is now managed and is validated above,
|
|
1374
|
-
// so there is no longer a legacy_hooks_json warning.)
|
|
1375
|
-
const legacyCommandsDir = path.join(projectRoot, ".codex/commands");
|
|
1376
|
-
const legacyCommandsPresent = await exists(legacyCommandsDir);
|
|
1377
|
-
checks.push({
|
|
1378
|
-
name: "warning:codex:legacy_commands_dir",
|
|
1379
|
-
ok: true,
|
|
1380
|
-
details: legacyCommandsPresent
|
|
1381
|
-
? `warning: ${legacyCommandsDir} still present; Codex never consumed this directory — run \`cclaw sync\` to remove it.`
|
|
1382
|
-
: `no legacy ${legacyCommandsDir} detected`
|
|
1383
|
-
});
|
|
1384
|
-
// Legacy v0.39.x skill layout under `.agents/skills/cclaw-cc*/`
|
|
1385
|
-
// must have been removed — cclaw sync deletes these automatically,
|
|
1386
|
-
// but flag leftovers so users notice an upgrade issue.
|
|
1387
|
-
const legacyCodexSkills = [];
|
|
1388
|
-
try {
|
|
1389
|
-
const entries = await fs.readdir(skillsRoot);
|
|
1390
|
-
for (const entry of entries) {
|
|
1391
|
-
if (/^cclaw-cc(?:-.*)?$/u.test(entry)) {
|
|
1392
|
-
legacyCodexSkills.push(entry);
|
|
1393
|
-
}
|
|
1394
|
-
}
|
|
1395
|
-
}
|
|
1396
|
-
catch {
|
|
1397
|
-
// skills root absent; nothing to warn about
|
|
1398
|
-
}
|
|
1399
|
-
checks.push({
|
|
1400
|
-
name: "warning:codex:legacy_cclaw_cc_skills",
|
|
1401
|
-
ok: legacyCodexSkills.length === 0,
|
|
1402
|
-
details: legacyCodexSkills.length === 0
|
|
1403
|
-
? `no legacy cclaw-cc* skill folders detected under .agents/skills/`
|
|
1404
|
-
: `warning: legacy skill folders from cclaw v0.39.x present (${legacyCodexSkills.join(", ")}); run \`cclaw sync\` to remove them.`
|
|
1405
|
-
});
|
|
1406
|
-
}
|
|
1407
|
-
if (configuredHarnesses.includes("opencode")) {
|
|
1408
|
-
const file = path.join(projectRoot, ".opencode/plugins/cclaw-plugin.mjs");
|
|
1409
|
-
let ok = false;
|
|
1410
|
-
let singleHandlerPathOk = false;
|
|
1411
|
-
let precompactHookOk = false;
|
|
1412
|
-
if (await exists(file)) {
|
|
1413
|
-
const content = await fs.readFile(file, "utf8");
|
|
1414
|
-
ok =
|
|
1415
|
-
content.includes("event: async") &&
|
|
1416
|
-
content.includes('"tool.execute.before"') &&
|
|
1417
|
-
content.includes('"tool.execute.after"') &&
|
|
1418
|
-
content.includes("prompt-guard") &&
|
|
1419
|
-
content.includes("workflow-guard") &&
|
|
1420
|
-
content.includes("context-monitor") &&
|
|
1421
|
-
content.includes("pre-compact") &&
|
|
1422
|
-
content.includes('"session.created"') &&
|
|
1423
|
-
content.includes('"session.idle"') &&
|
|
1424
|
-
content.includes('"session.resumed"') &&
|
|
1425
|
-
content.includes('"session.compacted"') &&
|
|
1426
|
-
content.includes('"session.cleared"') &&
|
|
1427
|
-
content.includes('"session.updated"') &&
|
|
1428
|
-
content.includes('"experimental.chat.system.transform"');
|
|
1429
|
-
singleHandlerPathOk =
|
|
1430
|
-
!content.includes('eventType === "tool.execute.before"') &&
|
|
1431
|
-
!content.includes('eventType === "tool.execute.after"') &&
|
|
1432
|
-
content.includes('"tool.execute.before": async') &&
|
|
1433
|
-
content.includes('"tool.execute.after": async');
|
|
1434
|
-
precompactHookOk =
|
|
1435
|
-
content.includes('eventType === "session.compacted"') &&
|
|
1436
|
-
content.includes('runHookScript("pre-compact"');
|
|
1437
|
-
}
|
|
1438
|
-
checks.push({
|
|
1439
|
-
name: "lifecycle:opencode:rehydration_events",
|
|
1440
|
-
ok,
|
|
1441
|
-
details: `${file} must include event lifecycle handler, session.created/updated/resumed/cleared/compacted rehydration, tool.execute.before/after with prompt/workflow/context hooks, session.idle handoff, and transform rehydration`
|
|
1442
|
-
});
|
|
1443
|
-
checks.push({
|
|
1444
|
-
name: "hook:opencode:single_tool_handler_path",
|
|
1445
|
-
ok: singleHandlerPathOk,
|
|
1446
|
-
details: `${file} must route tool.execute.before/after through dedicated handlers exactly once (no duplicate event() branches).`
|
|
1447
|
-
});
|
|
1448
|
-
checks.push({
|
|
1449
|
-
name: "hook:opencode:precompact_compat",
|
|
1450
|
-
ok: precompactHookOk,
|
|
1451
|
-
details: `${file} must run pre-compact on session.compacted before bootstrap refresh.`
|
|
1452
|
-
});
|
|
1453
|
-
const runtimeShape = await opencodePluginRuntimeShapeCheck(projectRoot);
|
|
1454
|
-
checks.push({
|
|
1455
|
-
name: "hook:opencode:runtime_shape",
|
|
1456
|
-
ok: runtimeShape.ok,
|
|
1457
|
-
details: runtimeShape.details
|
|
1458
|
-
});
|
|
1459
|
-
const registration = await opencodeRegistrationCheck(projectRoot);
|
|
1460
|
-
checks.push({
|
|
1461
|
-
name: "hook:opencode:config_registration",
|
|
1462
|
-
ok: registration.ok,
|
|
1463
|
-
details: registration.details
|
|
1464
|
-
});
|
|
1465
|
-
const questionPermission = await opencodeQuestionPermissionCheck(projectRoot);
|
|
1466
|
-
checks.push({
|
|
1467
|
-
name: "hook:opencode:question_permission",
|
|
1468
|
-
ok: questionPermission.ok,
|
|
1469
|
-
details: questionPermission.details
|
|
1470
|
-
});
|
|
1471
|
-
const questionEnv = opencodeQuestionEnvCheck();
|
|
1472
|
-
checks.push({
|
|
1473
|
-
name: "warning:opencode:question_tool_env",
|
|
1474
|
-
ok: questionEnv.ok,
|
|
1475
|
-
details: questionEnv.details
|
|
1476
|
-
});
|
|
1477
|
-
}
|
|
1478
|
-
const nodeVersion = await commandVersion("node");
|
|
1479
|
-
const nodeMajor = parseNodeMajor(nodeVersion.output);
|
|
1480
|
-
checks.push({
|
|
1481
|
-
name: "capability:required:node",
|
|
1482
|
-
ok: nodeVersion.available,
|
|
1483
|
-
details: nodeVersion.available
|
|
1484
|
-
? `node binary available (${nodeVersion.output || "version unknown"})`
|
|
1485
|
-
: "node is required for cclaw runtime scripts and CLI wiring"
|
|
1486
|
-
});
|
|
1487
|
-
checks.push({
|
|
1488
|
-
name: "capability:required:node_version",
|
|
1489
|
-
ok: nodeVersion.available && nodeMajor !== null && nodeMajor >= 20,
|
|
1490
|
-
details: nodeVersion.available
|
|
1491
|
-
? `node >=20 required; detected ${nodeVersion.output || "unknown version"}`
|
|
1492
|
-
: "node version check skipped because node binary is unavailable"
|
|
1493
|
-
});
|
|
1494
|
-
const gitVersion = await commandVersion("git");
|
|
1495
|
-
checks.push({
|
|
1496
|
-
name: "capability:required:git",
|
|
1497
|
-
ok: gitVersion.available,
|
|
1498
|
-
details: gitVersion.available
|
|
1499
|
-
? `git binary available (${gitVersion.output || "version unknown"})`
|
|
1500
|
-
: "git is required for repository detection, hook setup, and doctor checks"
|
|
1501
|
-
});
|
|
1502
|
-
checks.push({
|
|
1503
|
-
name: "capability:required:git_version",
|
|
1504
|
-
ok: gitVersion.available && gitVersionLooksUsable(gitVersion.output),
|
|
1505
|
-
details: gitVersion.available
|
|
1506
|
-
? `git version output: ${gitVersion.output || "unknown version"}`
|
|
1507
|
-
: "git version check skipped because git binary is unavailable"
|
|
1508
|
-
});
|
|
1509
|
-
const windowsHookConfigCandidates = [
|
|
1510
|
-
path.join(projectRoot, ".claude/hooks/hooks.json"),
|
|
1511
|
-
path.join(projectRoot, ".cursor/hooks.json"),
|
|
1512
|
-
path.join(projectRoot, ".codex/hooks.json")
|
|
1513
|
-
];
|
|
1514
|
-
const legacyDispatchFiles = [];
|
|
1515
|
-
for (const candidate of windowsHookConfigCandidates) {
|
|
1516
|
-
if (!(await exists(candidate)))
|
|
1517
|
-
continue;
|
|
1518
|
-
const content = (await fs.readFile(candidate, "utf8")).replace(/\\/gu, "/");
|
|
1519
|
-
if (/bash\s+\.cclaw\/hooks\/|\.cclaw\/hooks\/(?:session-start|stop-handoff|stop-checkpoint|pre-compact|prompt-guard|workflow-guard|context-monitor)\.sh/u.test(content)) {
|
|
1520
|
-
legacyDispatchFiles.push(path.relative(projectRoot, candidate));
|
|
1521
|
-
}
|
|
1522
|
-
}
|
|
1523
|
-
checks.push({
|
|
1524
|
-
name: "warning:windows:hook_dispatch_node_only",
|
|
1525
|
-
ok: legacyDispatchFiles.length === 0,
|
|
1526
|
-
details: legacyDispatchFiles.length === 0
|
|
1527
|
-
? "hook configs use managed .cclaw/hooks/run-hook.cmd dispatch commands"
|
|
1528
|
-
: `warning: legacy shell hook dispatch remains in ${legacyDispatchFiles.join(", ")}`
|
|
1529
|
-
});
|
|
1530
|
-
// Knowledge store exists (canonical JSONL, no markdown mirror)
|
|
1531
|
-
checks.push({
|
|
1532
|
-
name: "knowledge:store_exists",
|
|
1533
|
-
ok: await exists(path.join(projectRoot, RUNTIME_ROOT, "knowledge.jsonl")),
|
|
1534
|
-
details: `${RUNTIME_ROOT}/knowledge.jsonl must exist`
|
|
1535
|
-
});
|
|
1536
|
-
// There must be NO legacy markdown knowledge store — JSONL is the only store.
|
|
1537
|
-
const legacyKnowledgeMdPath = path.join(projectRoot, RUNTIME_ROOT, "knowledge.md");
|
|
1538
|
-
const legacyExists = await exists(legacyKnowledgeMdPath);
|
|
1539
|
-
checks.push({
|
|
1540
|
-
name: "knowledge:no_legacy_markdown",
|
|
1541
|
-
ok: !legacyExists,
|
|
1542
|
-
details: legacyExists
|
|
1543
|
-
? `legacy ${RUNTIME_ROOT}/knowledge.md must be removed — cclaw is JSONL-native`
|
|
1544
|
-
: `no legacy markdown store present`
|
|
1545
|
-
});
|
|
1546
|
-
const knowledgePath = path.join(projectRoot, RUNTIME_ROOT, "knowledge.jsonl");
|
|
1547
|
-
if (await exists(knowledgePath)) {
|
|
1548
|
-
let malformedKnowledgeLines = 0;
|
|
1549
|
-
let missingSchemaV2Fields = 0;
|
|
1550
|
-
let parsedKnowledgeLines = 0;
|
|
1551
|
-
let lowConfidenceLines = 0;
|
|
1552
|
-
let staleRawEntries = 0;
|
|
1553
|
-
const schemaErrors = [];
|
|
1554
|
-
const triggerActionCounts = new Map();
|
|
1555
|
-
// Stale threshold for raw entries: ~90 days with no re-observation.
|
|
1556
|
-
// Chosen to match the compound drift checklist language; anything newer is
|
|
1557
|
-
// recent enough to trust, anything older deserves a curate/supersede pass.
|
|
1558
|
-
const STALE_RAW_THRESHOLD_MS = 90 * 24 * 60 * 60 * 1000;
|
|
1559
|
-
const now = Date.now();
|
|
1560
|
-
const requiredV2Fields = [
|
|
1561
|
-
"type",
|
|
1562
|
-
"trigger",
|
|
1563
|
-
"action",
|
|
1564
|
-
"confidence",
|
|
1565
|
-
"domain",
|
|
1566
|
-
"stage",
|
|
1567
|
-
"origin_stage",
|
|
1568
|
-
"origin_run",
|
|
1569
|
-
"frequency",
|
|
1570
|
-
"universality",
|
|
1571
|
-
"maturity",
|
|
1572
|
-
"created",
|
|
1573
|
-
"first_seen_ts",
|
|
1574
|
-
"last_seen_ts",
|
|
1575
|
-
"project"
|
|
1576
|
-
];
|
|
1577
|
-
try {
|
|
1578
|
-
const raw = await fs.readFile(knowledgePath, "utf8");
|
|
1579
|
-
const lines = raw
|
|
1580
|
-
.split("\n")
|
|
1581
|
-
.map((line) => line.trim())
|
|
1582
|
-
.filter((line) => line.length > 0);
|
|
1583
|
-
for (const line of lines) {
|
|
1584
|
-
try {
|
|
1585
|
-
const parsed = JSON.parse(line);
|
|
1586
|
-
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
|
|
1587
|
-
malformedKnowledgeLines += 1;
|
|
1588
|
-
continue;
|
|
1589
|
-
}
|
|
1590
|
-
parsedKnowledgeLines += 1;
|
|
1591
|
-
const validation = validateKnowledgeEntry(parsed);
|
|
1592
|
-
if (!validation.ok) {
|
|
1593
|
-
schemaErrors.push(`line ${parsedKnowledgeLines}: ${validation.errors.slice(0, 3).join(" ")}`);
|
|
1594
|
-
}
|
|
1595
|
-
const confidence = typeof parsed.confidence === "string" ? parsed.confidence.toLowerCase() : "";
|
|
1596
|
-
if (confidence === "low") {
|
|
1597
|
-
lowConfidenceLines += 1;
|
|
1598
|
-
}
|
|
1599
|
-
const trigger = typeof parsed.trigger === "string" ? parsed.trigger.trim().toLowerCase() : "";
|
|
1600
|
-
const action = typeof parsed.action === "string" ? parsed.action.trim().toLowerCase() : "";
|
|
1601
|
-
if (trigger.length > 0 && action.length > 0) {
|
|
1602
|
-
const key = `${trigger} => ${action}`;
|
|
1603
|
-
triggerActionCounts.set(key, (triggerActionCounts.get(key) ?? 0) + 1);
|
|
1604
|
-
}
|
|
1605
|
-
const missing = requiredV2Fields.some((field) => !Object.prototype.hasOwnProperty.call(parsed, field));
|
|
1606
|
-
if (missing) {
|
|
1607
|
-
missingSchemaV2Fields += 1;
|
|
1608
|
-
}
|
|
1609
|
-
const maturity = typeof parsed.maturity === "string" ? parsed.maturity.toLowerCase() : "";
|
|
1610
|
-
const lastSeenRaw = typeof parsed.last_seen_ts === "string" ? parsed.last_seen_ts : "";
|
|
1611
|
-
if (maturity === "raw" && lastSeenRaw.length > 0) {
|
|
1612
|
-
const lastSeenMs = Date.parse(lastSeenRaw);
|
|
1613
|
-
if (Number.isFinite(lastSeenMs) && now - lastSeenMs > STALE_RAW_THRESHOLD_MS) {
|
|
1614
|
-
staleRawEntries += 1;
|
|
1615
|
-
}
|
|
1616
|
-
}
|
|
1617
|
-
}
|
|
1618
|
-
catch {
|
|
1619
|
-
malformedKnowledgeLines += 1;
|
|
1620
|
-
}
|
|
1621
|
-
}
|
|
1622
|
-
}
|
|
1623
|
-
catch {
|
|
1624
|
-
malformedKnowledgeLines += 1;
|
|
1625
|
-
}
|
|
1626
|
-
checks.push({
|
|
1627
|
-
name: "knowledge:jsonl_parseable",
|
|
1628
|
-
ok: malformedKnowledgeLines === 0,
|
|
1629
|
-
details: malformedKnowledgeLines === 0
|
|
1630
|
-
? "knowledge.jsonl lines parse as JSON objects"
|
|
1631
|
-
: `knowledge.jsonl contains ${malformedKnowledgeLines} malformed line(s)`
|
|
1632
|
-
});
|
|
1633
|
-
checks.push({
|
|
1634
|
-
name: "warning:knowledge:schema_v2_fields",
|
|
1635
|
-
ok: true,
|
|
1636
|
-
details: parsedKnowledgeLines === 0
|
|
1637
|
-
? "knowledge.jsonl is empty"
|
|
1638
|
-
: missingSchemaV2Fields === 0
|
|
1639
|
-
? `all ${parsedKnowledgeLines} knowledge line(s) include schema v2 fields`
|
|
1640
|
-
: `warning: ${missingSchemaV2Fields}/${parsedKnowledgeLines} knowledge line(s) miss schema v2 fields (origin/maturity/frequency metadata)`
|
|
1641
|
-
});
|
|
1642
|
-
checks.push({
|
|
1643
|
-
name: "warning:knowledge:current_schema",
|
|
1644
|
-
ok: schemaErrors.length === 0,
|
|
1645
|
-
details: parsedKnowledgeLines === 0
|
|
1646
|
-
? "knowledge.jsonl is empty"
|
|
1647
|
-
: schemaErrors.length === 0
|
|
1648
|
-
? `all ${parsedKnowledgeLines} knowledge line(s) match the current strict schema`
|
|
1649
|
-
: `warning: ${schemaErrors.length}/${parsedKnowledgeLines} knowledge line(s) fail current schema validation (${schemaErrors.slice(0, 3).join("; ")})`
|
|
1650
|
-
});
|
|
1651
|
-
const lowConfidenceRatio = parsedKnowledgeLines === 0 ? 0 : lowConfidenceLines / parsedKnowledgeLines;
|
|
1652
|
-
checks.push({
|
|
1653
|
-
name: "warning:knowledge:low_confidence_density",
|
|
1654
|
-
ok: true,
|
|
1655
|
-
details: parsedKnowledgeLines === 0
|
|
1656
|
-
? "knowledge.jsonl is empty"
|
|
1657
|
-
: lowConfidenceRatio <= 0.35
|
|
1658
|
-
? `low-confidence entries: ${lowConfidenceLines}/${parsedKnowledgeLines}`
|
|
1659
|
-
: `warning: low-confidence entries are high (${lowConfidenceLines}/${parsedKnowledgeLines}, ${(lowConfidenceRatio * 100).toFixed(1)}%). Consider a learnings curation pass before adding more.`
|
|
1660
|
-
});
|
|
1661
|
-
const repeatedClusters = [...triggerActionCounts.entries()].filter(([, count]) => count >= 3);
|
|
1662
|
-
checks.push({
|
|
1663
|
-
name: "warning:knowledge:repeat_clusters",
|
|
1664
|
-
ok: true,
|
|
1665
|
-
details: repeatedClusters.length === 0
|
|
1666
|
-
? "no high-frequency repeated trigger/action clusters detected"
|
|
1667
|
-
: `warning: ${repeatedClusters.length} repeated learning cluster(s) detected (>=3 repeats). Consider curating knowledge lifts into durable rules/skills.`
|
|
1668
|
-
});
|
|
1669
|
-
checks.push({
|
|
1670
|
-
name: "warning:knowledge:stale_raw_entries",
|
|
1671
|
-
ok: true,
|
|
1672
|
-
details: parsedKnowledgeLines === 0
|
|
1673
|
-
? "knowledge.jsonl is empty"
|
|
1674
|
-
: staleRawEntries === 0
|
|
1675
|
-
? `no raw knowledge entries older than 90 days`
|
|
1676
|
-
: `warning: ${staleRawEntries} raw knowledge entry(ies) have last_seen_ts older than 90 days. Run a learnings curation pass or append a superseding entry before the next compound pass.`
|
|
1677
|
-
});
|
|
1678
|
-
}
|
|
1679
|
-
const routingKnowledgeSurfaces = [];
|
|
1680
|
-
for (const routingFileName of ["AGENTS.md", "CLAUDE.md"]) {
|
|
1681
|
-
const routingFilePath = path.join(projectRoot, routingFileName);
|
|
1682
|
-
if (!(await exists(routingFilePath)))
|
|
1683
|
-
continue;
|
|
1684
|
-
const content = await fs.readFile(routingFilePath, "utf8");
|
|
1685
|
-
if (knowledgeRoutingSurfaceIsDiscoverable(content)) {
|
|
1686
|
-
routingKnowledgeSurfaces.push(routingFileName);
|
|
1687
|
-
}
|
|
1688
|
-
}
|
|
1689
|
-
checks.push({
|
|
1690
|
-
name: "warning:knowledge:discoverability",
|
|
1691
|
-
ok: routingKnowledgeSurfaces.length > 0,
|
|
1692
|
-
details: routingKnowledgeSurfaces.length > 0
|
|
1693
|
-
? `knowledge store schema is discoverable from ${routingKnowledgeSurfaces.join(", ")}`
|
|
1694
|
-
: "warning: AGENTS.md or CLAUDE.md should mention .cclaw/knowledge.jsonl and its type/trigger/action/origin_run usage"
|
|
1695
|
-
});
|
|
1696
|
-
const seedEntries = await readSeedShelf(projectRoot);
|
|
1697
|
-
const orphanSeeds = seedEntries.filter((seed) => seed.sourceArtifact === null || seed.triggerWhen.length === 0 || seed.action === null || seed.action.trim().length === 0);
|
|
1698
|
-
checks.push({
|
|
1699
|
-
name: "warning:knowledge:orphan_seeds",
|
|
1700
|
-
ok: orphanSeeds.length === 0,
|
|
1701
|
-
details: seedEntries.length === 0
|
|
1702
|
-
? "no seed shelf entries present"
|
|
1703
|
-
: orphanSeeds.length === 0
|
|
1704
|
-
? `all ${seedEntries.length} seed shelf entr${seedEntries.length === 1 ? "y is" : "ies are"} discoverable`
|
|
1705
|
-
: `warning: ${orphanSeeds.length}/${seedEntries.length} seed shelf entr${seedEntries.length === 1 ? "y is" : "ies are"} missing source_artifact, trigger_when, or action (${orphanSeeds.slice(0, 3).map((seed) => seed.relPath).join(", ")})`
|
|
1706
|
-
});
|
|
1707
|
-
let flowState = createInitialFlowState();
|
|
1708
|
-
let flowStateCorruptError = null;
|
|
1709
|
-
try {
|
|
1710
|
-
flowState = await readFlowState(projectRoot, { repairFeatureSystem: false });
|
|
1711
|
-
}
|
|
1712
|
-
catch (error) {
|
|
1713
|
-
if (error instanceof CorruptFlowStateError) {
|
|
1714
|
-
flowStateCorruptError = error;
|
|
1715
|
-
checks.push({
|
|
1716
|
-
name: "flow_state:readable",
|
|
1717
|
-
ok: false,
|
|
1718
|
-
severity: "error",
|
|
1719
|
-
details: error.message
|
|
1720
|
-
});
|
|
1721
|
-
}
|
|
1722
|
-
else {
|
|
1723
|
-
throw error;
|
|
1724
|
-
}
|
|
1725
|
-
}
|
|
1726
|
-
if (options.reconcileCurrentStageGates === true && !flowStateCorruptError) {
|
|
1727
|
-
const reconciliation = await reconcileAndWriteCurrentStageGateCatalog(projectRoot);
|
|
1728
|
-
if (reconciliation.wrote) {
|
|
1729
|
-
flowState = {
|
|
1730
|
-
...flowState,
|
|
1731
|
-
stageGateCatalog: {
|
|
1732
|
-
...flowState.stageGateCatalog,
|
|
1733
|
-
[reconciliation.stage]: reconciliation.after
|
|
1734
|
-
}
|
|
1735
|
-
};
|
|
1736
|
-
}
|
|
1737
|
-
checks.push({
|
|
1738
|
-
name: "gates:reconcile:writeback",
|
|
1739
|
-
ok: true,
|
|
1740
|
-
details: reconciliation.wrote
|
|
1741
|
-
? `reconciled gate catalog for stage "${reconciliation.stage}": ${reconciliation.notes.join("; ")}`
|
|
1742
|
-
: `no gate reconciliation changes needed for stage "${reconciliation.stage}"`
|
|
1743
|
-
});
|
|
1744
|
-
}
|
|
1745
|
-
else if (options.reconcileCurrentStageGates === true && flowStateCorruptError) {
|
|
1746
|
-
checks.push({
|
|
1747
|
-
name: "gates:reconcile:writeback",
|
|
1748
|
-
ok: false,
|
|
1749
|
-
details: "skipped gate reconciliation because flow-state.json is corrupt"
|
|
1750
|
-
});
|
|
1751
|
-
}
|
|
1752
|
-
const activeRunId = typeof flowState.activeRunId === "string" ? flowState.activeRunId.trim() : "";
|
|
1753
|
-
checks.push({
|
|
1754
|
-
name: "flow_state:active_run_id",
|
|
1755
|
-
ok: activeRunId.length > 0,
|
|
1756
|
-
details: `${RUNTIME_ROOT}/state/flow-state.json must include activeRunId`
|
|
1757
|
-
});
|
|
1758
|
-
const sensitivePermissionTargets = [
|
|
1759
|
-
path.join(projectRoot, RUNTIME_ROOT, "state", "flow-state.json"),
|
|
1760
|
-
path.join(projectRoot, RUNTIME_ROOT, "state", "delegation-log.json"),
|
|
1761
|
-
path.join(projectRoot, RUNTIME_ROOT, "state", "reconciliation-notices.json"),
|
|
1762
|
-
path.join(projectRoot, RUNTIME_ROOT, "knowledge.jsonl")
|
|
1763
|
-
];
|
|
1764
|
-
const permissiveStateFiles = [];
|
|
1765
|
-
for (const targetPath of sensitivePermissionTargets) {
|
|
1766
|
-
const bits = await readPermissionBits(targetPath);
|
|
1767
|
-
if (bits === null)
|
|
1768
|
-
continue;
|
|
1769
|
-
if (bits > 0o640) {
|
|
1770
|
-
permissiveStateFiles.push(`${path.relative(projectRoot, targetPath)}:${bits.toString(8)}`);
|
|
1771
|
-
}
|
|
1772
|
-
}
|
|
1773
|
-
checks.push({
|
|
1774
|
-
name: "warning:state:file_permissions",
|
|
1775
|
-
ok: true,
|
|
1776
|
-
details: permissiveStateFiles.length === 0
|
|
1777
|
-
? "sensitive state files are <=0640 permissions"
|
|
1778
|
-
: `warning: sensitive state files are overly permissive (${permissiveStateFiles.join(", ")}). Run \`chmod 600 .cclaw/state/*.json .cclaw/state/*.jsonl .cclaw/knowledge.jsonl\` if this machine is multi-user.`
|
|
1779
|
-
});
|
|
1780
|
-
const reconciliationNotices = await readReconciliationNotices(projectRoot);
|
|
1781
|
-
checks.push({
|
|
1782
|
-
name: "state:reconciliation_notices_parse",
|
|
1783
|
-
ok: reconciliationNotices.parseOk && reconciliationNotices.schemaOk,
|
|
1784
|
-
details: !reconciliationNotices.parseOk
|
|
1785
|
-
? `unable to parse ${RECONCILIATION_NOTICES_REL_PATH}; reset with \`cclaw sync\` or repair JSON by hand`
|
|
1786
|
-
: !reconciliationNotices.schemaOk
|
|
1787
|
-
? `${RECONCILIATION_NOTICES_REL_PATH} schemaVersion mismatch; expected ${reconciliationNotices.schemaVersion}`
|
|
1788
|
-
: `${RECONCILIATION_NOTICES_REL_PATH} parsed successfully`
|
|
1789
|
-
});
|
|
1790
|
-
const noticeBuckets = classifyReconciliationNotices(flowState, reconciliationNotices.notices);
|
|
1791
|
-
const formatNoticeList = (items) => items
|
|
1792
|
-
.slice(0, 8)
|
|
1793
|
-
.map((notice) => `${notice.stage}.${notice.gateId}`)
|
|
1794
|
-
.join(", ");
|
|
1795
|
-
checks.push({
|
|
1796
|
-
name: "state:reconciliation_notices",
|
|
1797
|
-
ok: noticeBuckets.unsynced.length === 0,
|
|
1798
|
-
details: noticeBuckets.unsynced.length > 0
|
|
1799
|
-
? `reconciliation notices out of sync in ${RECONCILIATION_NOTICES_REL_PATH}: ${formatNoticeList(noticeBuckets.unsynced)}. Run \`cclaw doctor --reconcile-gates\` to resync and clear stale entries.`
|
|
1800
|
-
: noticeBuckets.currentStageBlocked.length > 0
|
|
1801
|
-
? `active reconciliation notices for current stage "${flowState.currentStage}": ${formatNoticeList(noticeBuckets.currentStageBlocked)}`
|
|
1802
|
-
: noticeBuckets.activeBlocked.length > 0
|
|
1803
|
-
? `active reconciliation notices for run "${flowState.activeRunId}": ${formatNoticeList(noticeBuckets.activeBlocked)}`
|
|
1804
|
-
: `no active reconciliation notices in ${RECONCILIATION_NOTICES_REL_PATH}`
|
|
1805
|
-
});
|
|
1806
|
-
const activeTrack = flowState.track ?? "standard";
|
|
1807
|
-
const trackStageList = TRACK_STAGES[activeTrack];
|
|
1808
|
-
const skippedFromState = Array.isArray(flowState.skippedStages) ? flowState.skippedStages : [];
|
|
1809
|
-
const expectedSkipped = skippedStagesForTrack(activeTrack);
|
|
1810
|
-
const skippedConsistent = expectedSkipped.length === skippedFromState.length &&
|
|
1811
|
-
expectedSkipped.every((stage) => skippedFromState.includes(stage));
|
|
1812
|
-
checks.push({
|
|
1813
|
-
name: "flow_state:track",
|
|
1814
|
-
ok: skippedConsistent,
|
|
1815
|
-
details: skippedConsistent
|
|
1816
|
-
? `active track "${activeTrack}" (${trackStageList.length}/${FLOW_STAGES.length} stages: ${trackStageList.join(" → ")})${expectedSkipped.length > 0 ? `; skippedStages=${expectedSkipped.join(", ")}` : ""}`
|
|
1817
|
-
: `track "${activeTrack}" expects skippedStages=[${expectedSkipped.join(", ")}] but flow-state has [${skippedFromState.join(", ")}] — run \`cclaw sync\` to repair`
|
|
1818
|
-
});
|
|
1819
|
-
if (parsedConfig?.trackHeuristics) {
|
|
1820
|
-
const ideaArtifactPath = path.join(projectRoot, RUNTIME_ROOT, "artifacts", "00-idea.md");
|
|
1821
|
-
let heuristicsAligned = true;
|
|
1822
|
-
let heuristicsDetails = "trackHeuristics configured; advisory alignment check skipped.";
|
|
1823
|
-
if (!(await exists(ideaArtifactPath))) {
|
|
1824
|
-
heuristicsDetails = `trackHeuristics configured but ${RUNTIME_ROOT}/artifacts/00-idea.md is missing; advisory alignment check skipped.`;
|
|
1825
|
-
}
|
|
1826
|
-
else {
|
|
1827
|
-
const ideaMarkdown = await fs.readFile(ideaArtifactPath, "utf8");
|
|
1828
|
-
if (/^Reclassification:\s*/imu.test(ideaMarkdown)) {
|
|
1829
|
-
heuristicsDetails = "00-idea.md contains Reclassification entry; advisory heuristic mismatch check skipped.";
|
|
1830
|
-
}
|
|
1831
|
-
else {
|
|
1832
|
-
const userPrompt = extractUserPromptFromIdeaArtifact(ideaMarkdown);
|
|
1833
|
-
if (!userPrompt) {
|
|
1834
|
-
heuristicsDetails = "00-idea.md has no `## User prompt` section; advisory heuristic mismatch check skipped.";
|
|
1835
|
-
}
|
|
1836
|
-
else {
|
|
1837
|
-
const resolution = resolveTrackFromPrompt(userPrompt, parsedConfig.trackHeuristics);
|
|
1838
|
-
const tokenNote = resolution.matchedTokens.length > 0
|
|
1839
|
-
? `matched: ${resolution.matchedTokens.join(", ")}`
|
|
1840
|
-
: "matched: none (fallback)";
|
|
1841
|
-
heuristicsAligned = resolution.track === activeTrack;
|
|
1842
|
-
heuristicsDetails = heuristicsAligned
|
|
1843
|
-
? `trackHeuristics advisory matches active track "${activeTrack}" (${tokenNote}).`
|
|
1844
|
-
: `warning: trackHeuristics advisory predicts "${resolution.track}" (${tokenNote}; ${resolution.reason}) but flow-state track is "${activeTrack}". Re-run classification or add Reclassification in 00-idea.md if override was intentional.`;
|
|
1845
|
-
}
|
|
1846
|
-
}
|
|
1847
|
-
}
|
|
1848
|
-
checks.push({
|
|
1849
|
-
name: "warning:track_heuristics:advisory_alignment",
|
|
1850
|
-
ok: heuristicsAligned,
|
|
1851
|
-
details: heuristicsDetails
|
|
1852
|
-
});
|
|
1853
|
-
}
|
|
1854
|
-
checks.push({
|
|
1855
|
-
name: "flow_state:track_completed_in_track",
|
|
1856
|
-
ok: flowState.completedStages.every((stage) => trackStageList.includes(stage) || expectedSkipped.includes(stage)),
|
|
1857
|
-
details: (() => {
|
|
1858
|
-
const offTrack = flowState.completedStages.filter((stage) => !trackStageList.includes(stage) && !expectedSkipped.includes(stage));
|
|
1859
|
-
return offTrack.length === 0
|
|
1860
|
-
? `every completed stage belongs to track "${activeTrack}" or its skipped set`
|
|
1861
|
-
: `completed stages contain entries outside track "${activeTrack}" and not in skipped set: ${offTrack.join(", ")}`;
|
|
1862
|
-
})()
|
|
1863
|
-
});
|
|
1864
|
-
checks.push({
|
|
1865
|
-
name: "artifacts:active_root",
|
|
1866
|
-
ok: await exists(path.join(projectRoot, RUNTIME_ROOT, "artifacts")),
|
|
1867
|
-
details: `${RUNTIME_ROOT}/artifacts must exist as the active artifact root`
|
|
1868
|
-
});
|
|
1869
|
-
const artifactsRoot = path.join(projectRoot, RUNTIME_ROOT, "artifacts");
|
|
1870
|
-
let artifactPlaceholderHits = [];
|
|
1871
|
-
let duplicateArtifactGroups = [];
|
|
1872
|
-
if (await exists(artifactsRoot)) {
|
|
1873
|
-
try {
|
|
1874
|
-
const entries = await fs.readdir(artifactsRoot, { withFileTypes: true });
|
|
1875
|
-
const placeholderPattern = /\b(?:TODO|TBD|FIXME)\b|<fill-in>|<your-.*-here>/giu;
|
|
1876
|
-
const stageArtifactFiles = new Map();
|
|
1877
|
-
for (const entry of entries) {
|
|
1878
|
-
if (!entry.isFile() || !entry.name.endsWith(".md"))
|
|
1879
|
-
continue;
|
|
1880
|
-
const stageForArtifact = artifactStageFromFileName(entry.name);
|
|
1881
|
-
if (stageForArtifact) {
|
|
1882
|
-
const files = stageArtifactFiles.get(stageForArtifact) ?? [];
|
|
1883
|
-
files.push(entry.name);
|
|
1884
|
-
stageArtifactFiles.set(stageForArtifact, files);
|
|
1885
|
-
}
|
|
1886
|
-
const filePath = path.join(artifactsRoot, entry.name);
|
|
1887
|
-
const content = await fs.readFile(filePath, "utf8");
|
|
1888
|
-
const matchCount = (content.match(placeholderPattern) ?? []).length;
|
|
1889
|
-
if (matchCount > 0) {
|
|
1890
|
-
artifactPlaceholderHits.push(`${entry.name}:${matchCount}`);
|
|
1891
|
-
}
|
|
1892
|
-
}
|
|
1893
|
-
duplicateArtifactGroups = [...stageArtifactFiles.entries()]
|
|
1894
|
-
.filter(([, files]) => files.length > 1)
|
|
1895
|
-
.map(([stageName, files]) => `${stageName}: ${files.sort().join(", ")}`);
|
|
1896
|
-
}
|
|
1897
|
-
catch {
|
|
1898
|
-
artifactPlaceholderHits = [];
|
|
1899
|
-
duplicateArtifactGroups = [];
|
|
1900
|
-
}
|
|
1901
|
-
}
|
|
1902
|
-
checks.push({
|
|
1903
|
-
name: "warning:artifacts:stale_placeholders",
|
|
1904
|
-
ok: true,
|
|
1905
|
-
details: artifactPlaceholderHits.length === 0
|
|
1906
|
-
? "no TODO/TBD/FIXME placeholder markers found in active artifacts"
|
|
1907
|
-
: `warning: placeholder markers detected in active artifacts (${artifactPlaceholderHits.join(", ")}). Clear before marking completion.`
|
|
1908
|
-
});
|
|
1909
|
-
checks.push({
|
|
1910
|
-
name: "warning:artifacts:duplicate_stage_artifacts",
|
|
1911
|
-
ok: duplicateArtifactGroups.length === 0,
|
|
1912
|
-
details: duplicateArtifactGroups.length === 0
|
|
1913
|
-
? "no duplicate stage artifacts detected in active artifacts"
|
|
1914
|
-
: `warning: duplicate stage artifacts detected (${duplicateArtifactGroups.join("; ")}). The resolver uses the newest matching file; archive or rename stale copies to avoid ambiguous operator handoff.`
|
|
1915
|
-
});
|
|
1916
|
-
const staleStages = Object.keys(flowState.staleStages).filter((value) => FLOW_STAGES.includes(value));
|
|
1917
|
-
checks.push({
|
|
1918
|
-
name: "state:stale_stages_resolved",
|
|
1919
|
-
ok: staleStages.length === 0,
|
|
1920
|
-
details: staleStages.length === 0
|
|
1921
|
-
? "no stale stages pending acknowledgement"
|
|
1922
|
-
: `stale stages pending acknowledgement: ${staleStages.join(", ")}. Re-run the current stale stage, then clear it with cclaw internal rewind --ack ${flowState.currentStage}.`
|
|
1923
|
-
});
|
|
1924
|
-
const retroGateStatus = await evaluateRetroGate(projectRoot, flowState);
|
|
1925
|
-
checks.push({
|
|
1926
|
-
name: "state:retro_gate",
|
|
1927
|
-
ok: retroGateStatus.completed,
|
|
1928
|
-
details: retroGateStatus.completed
|
|
1929
|
-
? retroGateStatus.required
|
|
1930
|
-
? retroGateStatus.skipped
|
|
1931
|
-
? "retro gate complete (retro skipped with recorded closeout decision)"
|
|
1932
|
-
: `retro gate complete (${retroGateStatus.compoundEntries} compound entries)`
|
|
1933
|
-
: "retro gate not required yet (ship not completed)"
|
|
1934
|
-
: "retro gate incomplete: ship flow requires recorded retrospective evidence or an explicit retro skip."
|
|
1935
|
-
});
|
|
1936
|
-
const tddLogPath = path.join(projectRoot, RUNTIME_ROOT, "state", "tdd-cycle-log.jsonl");
|
|
1937
|
-
const tddLogExists = await exists(tddLogPath);
|
|
1938
|
-
const tddCompleted = flowState.completedStages.includes("tdd")
|
|
1939
|
-
|| (flowState.currentStage === "review" || flowState.currentStage === "ship");
|
|
1940
|
-
checks.push({
|
|
1941
|
-
name: "state:tdd_cycle_log_exists",
|
|
1942
|
-
ok: tddLogExists || !tddCompleted,
|
|
1943
|
-
details: tddLogExists
|
|
1944
|
-
? `${RUNTIME_ROOT}/state/tdd-cycle-log.jsonl exists`
|
|
1945
|
-
: tddCompleted
|
|
1946
|
-
? `${RUNTIME_ROOT}/state/tdd-cycle-log.jsonl must exist once TDD is complete`
|
|
1947
|
-
: `${RUNTIME_ROOT}/state/tdd-cycle-log.jsonl will be created when TDD evidence is generated`
|
|
1948
|
-
});
|
|
1949
|
-
if (tddLogExists) {
|
|
1950
|
-
const tddLogRaw = await fs.readFile(tddLogPath, "utf8");
|
|
1951
|
-
const parsedCycles = parseTddCycleLog(tddLogRaw);
|
|
1952
|
-
const validation = validateTddCycleOrder(parsedCycles, { runId: activeRunId || undefined });
|
|
1953
|
-
const hasCoverage = validation.sliceCount > 0;
|
|
1954
|
-
checks.push({
|
|
1955
|
-
name: "state:tdd_cycle_order",
|
|
1956
|
-
ok: validation.ok && (!tddCompleted || hasCoverage),
|
|
1957
|
-
details: validation.ok
|
|
1958
|
-
? tddCompleted && !hasCoverage
|
|
1959
|
-
? "tdd stage complete but no RED/GREEN cycle evidence logged"
|
|
1960
|
-
: `tdd cycle log valid (${validation.sliceCount} slice(s), open_red=${validation.openRedSlices.length})`
|
|
1961
|
-
: `tdd cycle order issues: ${validation.issues.join("; ")}${validation.openRedSlices.length > 0
|
|
1962
|
-
? ` | open red slices: ${validation.openRedSlices.join(", ")}`
|
|
1963
|
-
: ""}`
|
|
1964
|
-
});
|
|
1965
|
-
}
|
|
1966
|
-
else {
|
|
1967
|
-
checks.push({
|
|
1968
|
-
name: "state:tdd_cycle_order",
|
|
1969
|
-
ok: !tddCompleted,
|
|
1970
|
-
details: tddCompleted
|
|
1971
|
-
? "tdd stage complete but tdd-cycle-log.jsonl is missing"
|
|
1972
|
-
: "tdd cycle order deferred until tdd stage evidence is generated"
|
|
1973
|
-
});
|
|
1974
|
-
}
|
|
1975
|
-
checks.push({
|
|
1976
|
-
name: "runs:archive_root",
|
|
1977
|
-
ok: await exists(path.join(projectRoot, RUNTIME_ROOT, "runs")),
|
|
1978
|
-
details: `${RUNTIME_ROOT}/runs must exist for archived run snapshots`
|
|
1979
|
-
});
|
|
1980
|
-
const initRecovery = await initRecoveryCheck(projectRoot);
|
|
1981
|
-
checks.push({
|
|
1982
|
-
name: "state:init_recovery",
|
|
1983
|
-
ok: initRecovery.ok,
|
|
1984
|
-
details: initRecovery.details
|
|
1985
|
-
});
|
|
1986
|
-
const archiveIntegrity = await archiveIntegrityCheck(projectRoot);
|
|
1987
|
-
checks.push({
|
|
1988
|
-
name: "runs:archive_integrity",
|
|
1989
|
-
ok: archiveIntegrity.ok,
|
|
1990
|
-
details: archiveIntegrity.details
|
|
1991
|
-
});
|
|
1992
|
-
const currentGateState = flowState.stageGateCatalog[flowState.currentStage];
|
|
1993
|
-
const currentStageUntouched = flowState.completedStages.length === 0 &&
|
|
1994
|
-
flowState.rewinds.length === 0 &&
|
|
1995
|
-
Object.keys(flowState.guardEvidence).length === 0 &&
|
|
1996
|
-
(currentGateState?.passed.length ?? 0) === 0 &&
|
|
1997
|
-
(currentGateState?.blocked.length ?? 0) === 0;
|
|
1998
|
-
const delegation = await checkMandatoryDelegations(projectRoot, flowState.currentStage, {
|
|
1999
|
-
repairFeatureSystem: false
|
|
2000
|
-
});
|
|
2001
|
-
const delegationEvents = await readDelegationEvents(projectRoot);
|
|
2002
|
-
const delegationSatisfiedForDoctor = currentStageUntouched || delegation.satisfied;
|
|
2003
|
-
const missingEvidenceNote = delegation.missingEvidence && delegation.missingEvidence.length > 0
|
|
2004
|
-
? ` (role-switch rows without evidenceRefs: ${delegation.missingEvidence.join(", ")})`
|
|
2005
|
-
: "";
|
|
2006
|
-
checks.push({
|
|
2007
|
-
name: "delegation:mandatory:current_stage",
|
|
2008
|
-
ok: delegationSatisfiedForDoctor,
|
|
2009
|
-
details: currentStageUntouched
|
|
2010
|
-
? `mandatory delegation check deferred for untouched stage "${flowState.currentStage}"; stage-complete enforces it when work begins`
|
|
2011
|
-
: delegation.satisfied
|
|
2012
|
-
? `All mandatory delegations satisfied for stage "${flowState.currentStage}" (mode: ${delegation.expectedMode})`
|
|
2013
|
-
: `Missing mandatory delegations for stage "${flowState.currentStage}": ${delegation.missing.join(", ")}${missingEvidenceNote}; missingDispatchProof=${delegation.missingDispatchProof.join(", ")}; staleWorkers=${delegation.staleWorkers.join(", ")}; corruptEventLines=${delegation.corruptEventLines.join(", ")}`
|
|
2014
|
-
});
|
|
2015
|
-
checks.push({
|
|
2016
|
-
name: "delegation:events:parse",
|
|
2017
|
-
ok: delegationEvents.corruptLines.length === 0,
|
|
2018
|
-
details: delegationEvents.corruptLines.length === 0
|
|
2019
|
-
? `${RUNTIME_ROOT}/state/delegation-events.jsonl parsed successfully (${delegationEvents.events.length} event(s))`
|
|
2020
|
-
: `corrupt delegation event line(s): ${delegationEvents.corruptLines.join(", ")}`
|
|
2021
|
-
});
|
|
2022
|
-
checks.push({
|
|
2023
|
-
name: "delegation:proof:current_stage",
|
|
2024
|
-
ok: currentStageUntouched || delegation.missingDispatchProof.length === 0,
|
|
2025
|
-
details: currentStageUntouched
|
|
2026
|
-
? `dispatch proof check deferred for untouched stage "${flowState.currentStage}"`
|
|
2027
|
-
: delegation.missingDispatchProof.length === 0
|
|
2028
|
-
? `no dispatch proof gaps for current stage "${flowState.currentStage}"`
|
|
2029
|
-
: `isolated completions missing dispatchId/dispatchSurface/agentDefinitionPath/ackTs/completedTs: ${delegation.missingDispatchProof.join(", ")}`
|
|
2030
|
-
});
|
|
2031
|
-
checks.push({
|
|
2032
|
-
name: "warning:delegation:legacy_inferred_completions",
|
|
2033
|
-
ok: true,
|
|
2034
|
-
details: delegation.legacyInferredCompletions.length > 0
|
|
2035
|
-
? `warning: legacy inferred isolated completion rows lack event-log proof: ${delegation.legacyInferredCompletions.join(", ")}`
|
|
2036
|
-
: "no legacy inferred isolated completions for current stage"
|
|
2037
|
-
});
|
|
2038
|
-
checks.push({
|
|
2039
|
-
name: "warning:delegation:waived",
|
|
2040
|
-
ok: true,
|
|
2041
|
-
details: delegation.waived.length > 0
|
|
2042
|
-
? `warning: waived mandatory delegations for stage "${flowState.currentStage}": ${delegation.waived.join(", ")}`
|
|
2043
|
-
: "no waived mandatory delegations for current stage"
|
|
2044
|
-
});
|
|
2045
|
-
checks.push({
|
|
2046
|
-
name: "warning:delegation:stale_runs",
|
|
2047
|
-
ok: true,
|
|
2048
|
-
details: delegation.staleIgnored.length > 0
|
|
2049
|
-
? `warning: ${delegation.staleIgnored.length} delegation entries from other runs were ignored: ${delegation.staleIgnored.join(", ")}`
|
|
2050
|
-
: "no stale delegation entries from prior runs"
|
|
2051
|
-
});
|
|
2052
|
-
const trace = await buildTraceMatrix(projectRoot);
|
|
2053
|
-
const artifactsDir = path.join(projectRoot, RUNTIME_ROOT, "artifacts");
|
|
2054
|
-
const specExists = await exists(path.join(artifactsDir, "04-spec.md"));
|
|
2055
|
-
const planExists = await exists(path.join(artifactsDir, "05-plan.md"));
|
|
2056
|
-
const tddExists = await exists(path.join(artifactsDir, "06-tdd.md"));
|
|
2057
|
-
const traceHasSignal = trace.entries.length > 0 ||
|
|
2058
|
-
trace.orphanedCriteria.length > 0 ||
|
|
2059
|
-
trace.orphanedTasks.length > 0 ||
|
|
2060
|
-
trace.orphanedTests.length > 0;
|
|
2061
|
-
const artifactsPresent = specExists || planExists || tddExists;
|
|
2062
|
-
const emptyMatrixWithArtifacts = !traceHasSignal && artifactsPresent;
|
|
2063
|
-
checks.push({
|
|
2064
|
-
name: "trace:matrix_populated",
|
|
2065
|
-
ok: !emptyMatrixWithArtifacts,
|
|
2066
|
-
details: emptyMatrixWithArtifacts
|
|
2067
|
-
? `trace matrix is empty but artifacts exist (${[
|
|
2068
|
-
specExists ? "04-spec.md" : null,
|
|
2069
|
-
planExists ? "05-plan.md" : null,
|
|
2070
|
-
tddExists ? "06-tdd.md" : null
|
|
2071
|
-
].filter(Boolean).join(", ")}). The extractors found no criterion/task/slice IDs — check heading conventions and ID formats.`
|
|
2072
|
-
: artifactsPresent
|
|
2073
|
-
? `trace matrix parsed ${trace.entries.length} criterion(s) from present artifacts`
|
|
2074
|
-
: "no downstream artifacts to trace yet"
|
|
2075
|
-
});
|
|
2076
|
-
checks.push({
|
|
2077
|
-
name: "trace:criteria_coverage",
|
|
2078
|
-
ok: !traceHasSignal || trace.orphanedCriteria.length === 0,
|
|
2079
|
-
details: trace.orphanedCriteria.length === 0
|
|
2080
|
-
? "all spec criteria are linked to plan tasks"
|
|
2081
|
-
: `orphaned criteria: ${trace.orphanedCriteria.join(", ")}`
|
|
2082
|
-
});
|
|
2083
|
-
checks.push({
|
|
2084
|
-
name: "trace:task_to_test_coverage",
|
|
2085
|
-
ok: !traceHasSignal || trace.orphanedTasks.length === 0,
|
|
2086
|
-
details: trace.orphanedTasks.length === 0
|
|
2087
|
-
? "all plan tasks are linked to test slices"
|
|
2088
|
-
: `orphaned tasks: ${trace.orphanedTasks.join(", ")}`
|
|
2089
|
-
});
|
|
2090
|
-
checks.push({
|
|
2091
|
-
name: "trace:test_to_criteria_coverage",
|
|
2092
|
-
ok: !traceHasSignal || trace.orphanedTests.length === 0,
|
|
2093
|
-
details: trace.orphanedTests.length === 0
|
|
2094
|
-
? "all test slices map to acceptance-linked tasks"
|
|
2095
|
-
: `orphaned test slices: ${trace.orphanedTests.join(", ")}`
|
|
2096
|
-
});
|
|
2097
|
-
// Slice-review warning (opt-in via config.sliceReview.enabled).
|
|
2098
|
-
// Fires when:
|
|
2099
|
-
// - sliceReview.enabled is true
|
|
2100
|
-
// - current track is listed in sliceReview.enforceOnTracks
|
|
2101
|
-
// - 06-tdd.md exists (so the slice loop actually started)
|
|
2102
|
-
// - artifact contains at least one slice marker (look for the tdd
|
|
2103
|
-
// "Acceptance Mapping" or any `### Slice` heading) AND the Per-Slice
|
|
2104
|
-
// Review heading is absent
|
|
2105
|
-
// Non-blocking — warnings guide the user toward adding the review
|
|
2106
|
-
// section without failing doctor.
|
|
2107
|
-
const sliceReviewConfig = parsedConfig?.sliceReview;
|
|
2108
|
-
const sliceReviewEnabled = sliceReviewConfig?.enabled === true;
|
|
2109
|
-
const sliceReviewEnforcedTracks = sliceReviewConfig?.enforceOnTracks ?? ["standard"];
|
|
2110
|
-
const sliceReviewEnforcedHere = sliceReviewEnabled && sliceReviewEnforcedTracks.includes(activeTrack);
|
|
2111
|
-
if (sliceReviewEnforcedHere && tddExists) {
|
|
2112
|
-
const tddMarkdown = await fs.readFile(path.join(artifactsDir, "06-tdd.md"), "utf8");
|
|
2113
|
-
const hasSliceSignal = /^###\s+Slice\b/im.test(tddMarkdown)
|
|
2114
|
-
|| /^##\s+Acceptance Mapping\b/im.test(tddMarkdown)
|
|
2115
|
-
|| /^##\s+RED\b/im.test(tddMarkdown);
|
|
2116
|
-
const hasReviewHeading = /^##\s+Per-Slice Review\b/im.test(tddMarkdown);
|
|
2117
|
-
const missing = hasSliceSignal && !hasReviewHeading;
|
|
2118
|
-
checks.push({
|
|
2119
|
-
name: "warning:slice_review:missing_section",
|
|
2120
|
-
ok: !missing,
|
|
2121
|
-
details: missing
|
|
2122
|
-
? `warning: sliceReview is enabled for track "${activeTrack}" and 06-tdd.md contains slice evidence but no "## Per-Slice Review" section. Add a Per-Slice Review entry for every triggered slice (touchCount >= ${sliceReviewConfig?.filesChangedThreshold ?? 5}, touchPaths match, or highRisk=true), or record "not triggered" explicitly.`
|
|
2123
|
-
: hasReviewHeading
|
|
2124
|
-
? `sliceReview section present in 06-tdd.md (track "${activeTrack}")`
|
|
2125
|
-
: `sliceReview enabled but no slice evidence yet in 06-tdd.md (track "${activeTrack}")`
|
|
2126
|
-
});
|
|
2127
|
-
}
|
|
2128
|
-
const gateEvidence = await verifyCurrentStageGateEvidence(projectRoot, flowState);
|
|
2129
|
-
checks.push({
|
|
2130
|
-
name: "gates:evidence:current_stage",
|
|
2131
|
-
ok: gateEvidence.ok,
|
|
2132
|
-
details: gateEvidence.ok
|
|
2133
|
-
? `stage "${gateEvidence.stage}" gate evidence is consistent (required=${gateEvidence.requiredCount}, recommended=${gateEvidence.recommendedCount}, conditional=${gateEvidence.conditionalCount}, triggered=${gateEvidence.triggeredConditionalCount}, passed=${gateEvidence.passedCount}, blocked=${gateEvidence.blockedCount})`
|
|
2134
|
-
: gateEvidence.issues.join(" ")
|
|
2135
|
-
});
|
|
2136
|
-
checks.push({
|
|
2137
|
-
name: "warning:gates:recommended:current_stage",
|
|
2138
|
-
ok: true,
|
|
2139
|
-
details: gateEvidence.missingRecommended.length > 0
|
|
2140
|
-
? `warning: stage "${gateEvidence.stage}" has unmet recommended gates: ${gateEvidence.missingRecommended.join(", ")}`
|
|
2141
|
-
: `no unmet recommended gates for stage "${gateEvidence.stage}"`
|
|
2142
|
-
});
|
|
2143
|
-
const completedClosure = verifyCompletedStagesGateClosure(flowState);
|
|
2144
|
-
checks.push({
|
|
2145
|
-
name: "gates:closure:completed_stages",
|
|
2146
|
-
ok: completedClosure.ok,
|
|
2147
|
-
details: completedClosure.ok
|
|
2148
|
-
? flowState.completedStages.length === 0
|
|
2149
|
-
? "no completed stages yet"
|
|
2150
|
-
: `all ${flowState.completedStages.length} completed stages have every required gate passed`
|
|
2151
|
-
: completedClosure.issues.join(" ")
|
|
2152
|
-
});
|
|
2153
|
-
const isRepo = await isGitRepo(projectRoot);
|
|
2154
|
-
checks.push({
|
|
2155
|
-
name: "git:cclaw_ignored_runtime",
|
|
2156
|
-
ok: isRepo ? await gitIgnoresRuntime(projectRoot) : true,
|
|
2157
|
-
details: isRepo
|
|
2158
|
-
? `git check-ignore must pass for ${RUNTIME_ROOT}/`
|
|
2159
|
-
: "repository not initialized; check skipped"
|
|
2160
|
-
});
|
|
2161
|
-
const rulesJsonPath = path.join(projectRoot, RUNTIME_ROOT, "rules", "rules.json");
|
|
2162
|
-
let hasRules = false;
|
|
2163
|
-
if (await exists(rulesJsonPath)) {
|
|
2164
|
-
try {
|
|
2165
|
-
const parsed = JSON.parse(await fs.readFile(rulesJsonPath, "utf8"));
|
|
2166
|
-
const hasCoreLists = Array.isArray(parsed.MUST_ALWAYS) && Array.isArray(parsed.MUST_NEVER);
|
|
2167
|
-
const stageOrder = parsed.stage_order;
|
|
2168
|
-
const stageGates = parsed.stage_gates;
|
|
2169
|
-
const hasStageOrder = Array.isArray(stageOrder) &&
|
|
2170
|
-
FLOW_STAGES.every((stage) => stageOrder.includes(stage));
|
|
2171
|
-
const hasStageGates = typeof stageGates === "object" &&
|
|
2172
|
-
stageGates !== null &&
|
|
2173
|
-
FLOW_STAGES.every((stage) => Array.isArray(stageGates[stage]));
|
|
2174
|
-
hasRules = hasCoreLists && hasStageOrder && hasStageGates;
|
|
2175
|
-
}
|
|
2176
|
-
catch {
|
|
2177
|
-
hasRules = false;
|
|
2178
|
-
}
|
|
2179
|
-
}
|
|
2180
|
-
checks.push({
|
|
2181
|
-
name: "rules:policy_schema",
|
|
2182
|
-
ok: hasRules,
|
|
2183
|
-
details: rulesJsonPath
|
|
2184
|
-
});
|
|
2185
|
-
const policy = await policyChecks(projectRoot, { harnesses: configuredHarnesses });
|
|
2186
|
-
checks.push(...policy);
|
|
2187
|
-
return checks.map((check) => {
|
|
2188
|
-
const metadata = doctorCheckMetadata(check.name);
|
|
2189
|
-
return {
|
|
2190
|
-
...check,
|
|
2191
|
-
severity: check.severity ?? metadata.severity,
|
|
2192
|
-
summary: check.summary ?? metadata.summary,
|
|
2193
|
-
fix: check.fix ?? metadata.fix,
|
|
2194
|
-
actionGroup: check.actionGroup ?? metadata.actionGroup,
|
|
2195
|
-
docRef: check.docRef ?? metadata.docRef
|
|
2196
|
-
};
|
|
2197
|
-
});
|
|
2198
|
-
}
|
|
2199
|
-
export function doctorSucceeded(checks) {
|
|
2200
|
-
return checks.every((check) => check.ok || check.severity !== "error");
|
|
2201
|
-
}
|