cclaw-cli 0.47.0 → 0.48.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/artifact-linter.d.ts +7 -0
- package/dist/artifact-linter.js +43 -0
- package/dist/config.d.ts +6 -6
- package/dist/config.js +22 -0
- package/dist/constants.d.ts +10 -1
- package/dist/constants.js +19 -10
- package/dist/content/contracts.d.ts +1 -1
- package/dist/content/contracts.js +1 -1
- package/dist/content/{harnesses-doc.js → harness-doc.js} +32 -1
- package/dist/content/harness-playbooks.js +4 -4
- package/dist/content/ideate-command.js +19 -19
- package/dist/content/skills.js +2 -2
- package/dist/content/stage-schema.js +34 -8
- package/dist/content/stages/design.js +2 -2
- package/dist/content/stages/review.js +1 -1
- package/dist/content/stages/ship.js +2 -0
- package/dist/content/stages/tdd.js +8 -4
- package/dist/content/templates.js +4 -3
- package/dist/delegation.js +99 -33
- package/dist/doctor.js +77 -9
- package/dist/flow-state.d.ts +8 -0
- package/dist/flow-state.js +11 -8
- package/dist/gate-evidence.js +20 -1
- package/dist/harness-adapters.d.ts +2 -2
- package/dist/harness-adapters.js +2 -2
- package/dist/install.js +28 -6
- package/dist/internal/detect-public-api-changes.d.ts +5 -0
- package/dist/internal/detect-public-api-changes.js +45 -0
- package/dist/policy.js +3 -2
- package/dist/retro-gate.js +19 -2
- package/dist/run-persistence.js +5 -4
- package/dist/types.d.ts +6 -1
- package/package.json +4 -1
- /package/dist/content/{harnesses-doc.d.ts → harness-doc.d.ts} +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { COMMAND_FILE_ORDER } from "../constants.js";
|
|
2
1
|
import { orderedStageSchemas } from "./stage-schema.js";
|
|
2
|
+
import { FLOW_STAGES } from "../types.js";
|
|
3
3
|
export const ARTIFACT_TEMPLATES = {
|
|
4
4
|
"01-brainstorm.md": `---
|
|
5
5
|
stage: brainstorm
|
|
@@ -522,6 +522,7 @@ inputs_hash: sha256:pending
|
|
|
522
522
|
| ID | Severity | Category | Description | Status |
|
|
523
523
|
|---|---|---|---|---|
|
|
524
524
|
| R-1 | Critical/Important/Suggestion | correctness/security/performance/architecture | | open/resolved |
|
|
525
|
+
- NO_CHANGE_ATTESTATION: <required when Category=security has no entries; explain why no security-relevant changes were detected>
|
|
525
526
|
|
|
526
527
|
## Incoming Feedback Queue
|
|
527
528
|
| ID | Source | Severity | File:line | Request | Status | Evidence |
|
|
@@ -802,7 +803,7 @@ Track-specific skips are allowed only when \`flow-state.track\` + \`skippedStage
|
|
|
802
803
|
export function buildRulesJson() {
|
|
803
804
|
return {
|
|
804
805
|
version: 1,
|
|
805
|
-
stage_order:
|
|
806
|
+
stage_order: FLOW_STAGES,
|
|
806
807
|
stage_gates: Object.fromEntries(orderedStageSchemas().map((schema) => [
|
|
807
808
|
schema.stage,
|
|
808
809
|
schema.requiredGates.map((gate) => gate.id)
|
|
@@ -820,7 +821,7 @@ export function buildRulesJson() {
|
|
|
820
821
|
"conventional_commits"
|
|
821
822
|
],
|
|
822
823
|
MUST_NEVER: [
|
|
823
|
-
"
|
|
824
|
+
"skip_tdd_stage",
|
|
824
825
|
"ship_with_critical_findings",
|
|
825
826
|
"implement_in_brainstorm",
|
|
826
827
|
"manual_edit_generated",
|
package/dist/delegation.js
CHANGED
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
import fs from "node:fs/promises";
|
|
2
2
|
import path from "node:path";
|
|
3
|
+
import { execFile } from "node:child_process";
|
|
4
|
+
import { promisify } from "node:util";
|
|
3
5
|
import { RUNTIME_ROOT } from "./constants.js";
|
|
4
6
|
import { readConfig } from "./config.js";
|
|
5
7
|
import { exists, withDirectoryLock, writeFileSafe } from "./fs-utils.js";
|
|
6
8
|
import { HARNESS_ADAPTERS } from "./harness-adapters.js";
|
|
7
9
|
import { readFlowState } from "./runs.js";
|
|
8
10
|
import { stageSchema } from "./content/stage-schema.js";
|
|
11
|
+
const execFileAsync = promisify(execFile);
|
|
9
12
|
function delegationLogPath(projectRoot) {
|
|
10
13
|
return path.join(projectRoot, RUNTIME_ROOT, "state", "delegation-log.json");
|
|
11
14
|
}
|
|
@@ -15,6 +18,82 @@ function delegationLockPath(projectRoot) {
|
|
|
15
18
|
function createSpanId() {
|
|
16
19
|
return `dspan-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`;
|
|
17
20
|
}
|
|
21
|
+
async function resolveReviewDiffBase(projectRoot) {
|
|
22
|
+
let head = "";
|
|
23
|
+
try {
|
|
24
|
+
head = (await execFileAsync("git", ["rev-parse", "HEAD"], { cwd: projectRoot })).stdout.trim();
|
|
25
|
+
}
|
|
26
|
+
catch {
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
const candidates = ["origin/main", "origin/master", "main", "master"];
|
|
30
|
+
for (const candidate of candidates) {
|
|
31
|
+
try {
|
|
32
|
+
await execFileAsync("git", ["rev-parse", "--verify", candidate], { cwd: projectRoot });
|
|
33
|
+
const { stdout } = await execFileAsync("git", ["merge-base", "HEAD", candidate], {
|
|
34
|
+
cwd: projectRoot
|
|
35
|
+
});
|
|
36
|
+
const base = stdout.trim();
|
|
37
|
+
if (base.length > 0 && base !== head) {
|
|
38
|
+
return base;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
try {
|
|
46
|
+
const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD~1"], {
|
|
47
|
+
cwd: projectRoot
|
|
48
|
+
});
|
|
49
|
+
const base = stdout.trim();
|
|
50
|
+
return base.length > 0 ? base : null;
|
|
51
|
+
}
|
|
52
|
+
catch {
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
async function detectReviewTriggers(projectRoot) {
|
|
57
|
+
const empty = {
|
|
58
|
+
changedFiles: 0,
|
|
59
|
+
changedLines: 0,
|
|
60
|
+
trustBoundaryChanged: false,
|
|
61
|
+
requireAdversarialReviewer: false
|
|
62
|
+
};
|
|
63
|
+
const base = await resolveReviewDiffBase(projectRoot);
|
|
64
|
+
if (!base) {
|
|
65
|
+
return empty;
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
const range = `${base}..HEAD`;
|
|
69
|
+
const shortstat = await execFileAsync("git", ["diff", "--shortstat", range], {
|
|
70
|
+
cwd: projectRoot
|
|
71
|
+
});
|
|
72
|
+
const short = shortstat.stdout.trim();
|
|
73
|
+
const changedFiles = Number((/(\d+)\s+files?\s+changed/u.exec(short)?.[1] ?? "0"));
|
|
74
|
+
const insertions = Number((/(\d+)\s+insertions?\(\+\)/u.exec(short)?.[1] ?? "0"));
|
|
75
|
+
const deletions = Number((/(\d+)\s+deletions?\(-\)/u.exec(short)?.[1] ?? "0"));
|
|
76
|
+
const changedLines = insertions + deletions;
|
|
77
|
+
const names = await execFileAsync("git", ["diff", "--name-only", range], {
|
|
78
|
+
cwd: projectRoot
|
|
79
|
+
});
|
|
80
|
+
const changedPaths = names.stdout
|
|
81
|
+
.split(/\r?\n/gu)
|
|
82
|
+
.map((line) => line.trim())
|
|
83
|
+
.filter((line) => line.length > 0);
|
|
84
|
+
const trustBoundaryChanged = changedPaths.some((filePath) => /(auth|security|secret|token|credential|permission|acl|policy|oauth|session|encrypt|decrypt|input|validation)/iu.test(filePath));
|
|
85
|
+
const requireAdversarialReviewer = changedLines > 100 || changedFiles > 10 || trustBoundaryChanged;
|
|
86
|
+
return {
|
|
87
|
+
changedFiles,
|
|
88
|
+
changedLines,
|
|
89
|
+
trustBoundaryChanged,
|
|
90
|
+
requireAdversarialReviewer
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
catch {
|
|
94
|
+
return empty;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
18
97
|
function isDelegationTokenUsage(value) {
|
|
19
98
|
if (!value || typeof value !== "object" || Array.isArray(value))
|
|
20
99
|
return false;
|
|
@@ -76,6 +155,8 @@ function parseLedger(raw, runId) {
|
|
|
76
155
|
for (const item of entriesRaw) {
|
|
77
156
|
if (isDelegationEntry(item)) {
|
|
78
157
|
const ts = item.startTs ?? item.ts ?? new Date().toISOString();
|
|
158
|
+
const inferredFulfillmentMode = item.fulfillmentMode
|
|
159
|
+
?? (item.status === "completed" ? "isolated" : undefined);
|
|
79
160
|
entries.push({
|
|
80
161
|
...item,
|
|
81
162
|
spanId: item.spanId ?? createSpanId(),
|
|
@@ -85,6 +166,7 @@ function parseLedger(raw, runId) {
|
|
|
85
166
|
? item.retryCount
|
|
86
167
|
: 0,
|
|
87
168
|
evidenceRefs: Array.isArray(item.evidenceRefs) ? item.evidenceRefs : [],
|
|
169
|
+
fulfillmentMode: inferredFulfillmentMode,
|
|
88
170
|
schemaVersion: 1
|
|
89
171
|
});
|
|
90
172
|
}
|
|
@@ -126,6 +208,12 @@ export async function appendDelegation(projectRoot, entry) {
|
|
|
126
208
|
if (!Array.isArray(stamped.evidenceRefs)) {
|
|
127
209
|
stamped.evidenceRefs = [];
|
|
128
210
|
}
|
|
211
|
+
if (stamped.status === "completed" && stamped.fulfillmentMode === undefined) {
|
|
212
|
+
const config = await readConfig(projectRoot).catch(() => null);
|
|
213
|
+
const harnesses = config?.harnesses ?? [];
|
|
214
|
+
const fallbacks = harnesses.map((h) => HARNESS_ADAPTERS[h].capabilities.subagentFallback);
|
|
215
|
+
stamped.fulfillmentMode = expectedFulfillmentMode(fallbacks);
|
|
216
|
+
}
|
|
129
217
|
// Idempotency: if a caller (or a retried hook) tries to append a row
|
|
130
218
|
// with a spanId that already exists in the ledger, treat it as a no-op
|
|
131
219
|
// instead of growing the log with duplicate entries that subsequent
|
|
@@ -174,51 +262,29 @@ export async function checkMandatoryDelegations(projectRoot, stage) {
|
|
|
174
262
|
const harnesses = config?.harnesses ?? [];
|
|
175
263
|
const fallbacks = harnesses.map((h) => HARNESS_ADAPTERS[h].capabilities.subagentFallback);
|
|
176
264
|
const expectedMode = expectedFulfillmentMode(fallbacks);
|
|
177
|
-
const
|
|
265
|
+
const reviewTriggers = stage === "review" ? await detectReviewTriggers(projectRoot) : null;
|
|
178
266
|
for (const agent of mandatory) {
|
|
179
267
|
const rows = forRun.filter((e) => e.agent === agent);
|
|
180
268
|
const completedRows = rows.filter((e) => e.status === "completed");
|
|
181
269
|
const waivedRows = rows.filter((e) => e.status === "waived");
|
|
182
|
-
const
|
|
270
|
+
const requiredCompletedCount = stage === "review" &&
|
|
271
|
+
agent === "reviewer" &&
|
|
272
|
+
reviewTriggers?.requireAdversarialReviewer
|
|
273
|
+
? 2
|
|
274
|
+
: 1;
|
|
275
|
+
const hasCompleted = completedRows.length >= requiredCompletedCount;
|
|
183
276
|
const hasWaived = waivedRows.length > 0;
|
|
184
277
|
const ok = hasCompleted || hasWaived;
|
|
185
278
|
if (!ok) {
|
|
186
|
-
|
|
187
|
-
const existingHarnessWaiver = rows.some((e) => e.status === "waived" && e.waiverReason === "harness_limitation");
|
|
188
|
-
if (!existingHarnessWaiver) {
|
|
189
|
-
await appendDelegation(projectRoot, {
|
|
190
|
-
stage,
|
|
191
|
-
agent,
|
|
192
|
-
mode: "mandatory",
|
|
193
|
-
status: "waived",
|
|
194
|
-
waiverReason: "harness_limitation",
|
|
195
|
-
fulfillmentMode: "harness-waiver",
|
|
196
|
-
ts: new Date().toISOString(),
|
|
197
|
-
runId: activeRunId
|
|
198
|
-
});
|
|
199
|
-
}
|
|
200
|
-
waived.push(agent);
|
|
201
|
-
autoWaived.push(agent);
|
|
202
|
-
}
|
|
203
|
-
else {
|
|
204
|
-
missing.push(agent);
|
|
205
|
-
}
|
|
279
|
+
missing.push(agent);
|
|
206
280
|
continue;
|
|
207
281
|
}
|
|
208
282
|
if (hasWaived) {
|
|
209
283
|
waived.push(agent);
|
|
210
284
|
}
|
|
211
|
-
// Evidence
|
|
212
|
-
//
|
|
213
|
-
|
|
214
|
-
// 2. Any completed row is explicitly stamped `fulfillmentMode:
|
|
215
|
-
// "role-switch"` — even in a mixed install. This closes the loop
|
|
216
|
-
// where a Codex session logs a role-switch completion inside a
|
|
217
|
-
// claude+codex project: the aggregate expectedMode is "isolated"
|
|
218
|
-
// (claude wins), so the role-switch row would previously sail
|
|
219
|
-
// through without evidenceRefs.
|
|
220
|
-
const hasExplicitRoleSwitchRow = completedRows.some((e) => e.fulfillmentMode === "role-switch");
|
|
221
|
-
const evidenceRequired = expectedMode === "role-switch" || hasExplicitRoleSwitchRow;
|
|
285
|
+
// Evidence is required for any non-isolated completion mode. Legacy rows
|
|
286
|
+
// without fulfillmentMode are inferred to `isolated` during parse.
|
|
287
|
+
const evidenceRequired = completedRows.some((e) => (e.fulfillmentMode ?? "isolated") !== "isolated");
|
|
222
288
|
if (hasCompleted &&
|
|
223
289
|
evidenceRequired &&
|
|
224
290
|
!completedRows.some((e) => Array.isArray(e.evidenceRefs) && e.evidenceRefs.length > 0)) {
|
package/dist/doctor.js
CHANGED
|
@@ -3,16 +3,16 @@ import path from "node:path";
|
|
|
3
3
|
import { execFile } from "node:child_process";
|
|
4
4
|
import { pathToFileURL } from "node:url";
|
|
5
5
|
import { promisify } from "node:util";
|
|
6
|
-
import {
|
|
6
|
+
import { REQUIRED_DIRS, RUNTIME_ROOT } from "./constants.js";
|
|
7
7
|
import { CCLAW_AGENTS } from "./content/core-agents.js";
|
|
8
|
-
import { readConfig } from "./config.js";
|
|
8
|
+
import { detectAdvancedKeys, readConfig } from "./config.js";
|
|
9
9
|
import { exists } from "./fs-utils.js";
|
|
10
10
|
import { gitignoreHasRequiredPatterns } from "./gitignore.js";
|
|
11
11
|
import { HARNESS_ADAPTERS, CCLAW_MARKER_START, CCLAW_MARKER_END, harnessShimFileNames, harnessShimSkillNames } from "./harness-adapters.js";
|
|
12
12
|
import { policyChecks } from "./policy.js";
|
|
13
13
|
import { readFlowState } from "./runs.js";
|
|
14
14
|
import { skippedStagesForTrack } from "./flow-state.js";
|
|
15
|
-
import { TRACK_STAGES } from "./types.js";
|
|
15
|
+
import { FLOW_STAGES, TRACK_STAGES } from "./types.js";
|
|
16
16
|
import { checkMandatoryDelegations } from "./delegation.js";
|
|
17
17
|
import { ensureFeatureSystem, listFeatures, readActiveFeature, readFeatureWorktreeRegistry, resolveFeatureWorkspacePath, worktreeRegistryPath } from "./feature-system.js";
|
|
18
18
|
import { buildTraceMatrix } from "./trace-matrix.js";
|
|
@@ -280,7 +280,7 @@ export async function doctorChecks(projectRoot, options = {}) {
|
|
|
280
280
|
details: fullPath
|
|
281
281
|
});
|
|
282
282
|
}
|
|
283
|
-
for (const stage of
|
|
283
|
+
for (const stage of FLOW_STAGES) {
|
|
284
284
|
const commandPath = path.join(projectRoot, RUNTIME_ROOT, "commands", `${stage}.md`);
|
|
285
285
|
checks.push({
|
|
286
286
|
name: `command:${stage}`,
|
|
@@ -377,7 +377,7 @@ export async function doctorChecks(projectRoot, options = {}) {
|
|
|
377
377
|
// skill's Examples section points here; the file MUST exist or the pointer
|
|
378
378
|
// is a dangling link.
|
|
379
379
|
const stageRefDir = path.join(projectRoot, RUNTIME_ROOT, "references", "stages");
|
|
380
|
-
for (const stage of
|
|
380
|
+
for (const stage of FLOW_STAGES) {
|
|
381
381
|
const refPath = path.join(stageRefDir, `${stage}-examples.md`);
|
|
382
382
|
checks.push({
|
|
383
383
|
name: `stage_examples_ref:${stage}`,
|
|
@@ -430,6 +430,18 @@ export async function doctorChecks(projectRoot, options = {}) {
|
|
|
430
430
|
});
|
|
431
431
|
}
|
|
432
432
|
if (parsedConfig) {
|
|
433
|
+
const advancedKeys = await detectAdvancedKeys(projectRoot).catch(() => new Set());
|
|
434
|
+
const hasLegacyTddTestGlobs = advancedKeys.has("tddTestGlobs");
|
|
435
|
+
const hasModernTddConfig = advancedKeys.has("tdd");
|
|
436
|
+
checks.push({
|
|
437
|
+
name: "warning:config:deprecated_tdd_test_globs",
|
|
438
|
+
ok: !hasLegacyTddTestGlobs,
|
|
439
|
+
details: hasLegacyTddTestGlobs
|
|
440
|
+
? hasModernTddConfig
|
|
441
|
+
? `warning: ${RUNTIME_ROOT}/config.yaml sets deprecated "tddTestGlobs" alongside "tdd.*"; "tdd.testPathPatterns" takes precedence. Remove legacy key.`
|
|
442
|
+
: `warning: ${RUNTIME_ROOT}/config.yaml uses deprecated "tddTestGlobs". Migrate to "tdd.testPathPatterns".`
|
|
443
|
+
: `no deprecated "tddTestGlobs" key detected in ${RUNTIME_ROOT}/config.yaml`
|
|
444
|
+
});
|
|
433
445
|
const expectedMode = parsedConfig.promptGuardMode === "strict" ? "strict" : "advisory";
|
|
434
446
|
const promptGuardPath = path.join(projectRoot, RUNTIME_ROOT, "hooks", "prompt-guard.sh");
|
|
435
447
|
let promptGuardModeOk = false;
|
|
@@ -1191,6 +1203,62 @@ export async function doctorChecks(projectRoot, options = {}) {
|
|
|
1191
1203
|
ok: await exists(path.join(projectRoot, RUNTIME_ROOT, "state", "harness-gaps.json")),
|
|
1192
1204
|
details: `${RUNTIME_ROOT}/state/harness-gaps.json must exist for tiered harness capability tracking`
|
|
1193
1205
|
});
|
|
1206
|
+
const adapterManifestPath = path.join(projectRoot, RUNTIME_ROOT, "adapters", "manifest.json");
|
|
1207
|
+
const adapterManifestExists = await exists(adapterManifestPath);
|
|
1208
|
+
checks.push({
|
|
1209
|
+
name: "state:adapter_manifest_exists",
|
|
1210
|
+
ok: adapterManifestExists,
|
|
1211
|
+
details: `${RUNTIME_ROOT}/adapters/manifest.json must exist for harness adapter provenance`
|
|
1212
|
+
});
|
|
1213
|
+
if (adapterManifestExists) {
|
|
1214
|
+
let harnessesOk = false;
|
|
1215
|
+
let harnessesDetails = "";
|
|
1216
|
+
let sourcesOk = false;
|
|
1217
|
+
let sourcesDetails = "";
|
|
1218
|
+
try {
|
|
1219
|
+
const parsed = JSON.parse(await fs.readFile(adapterManifestPath, "utf8"));
|
|
1220
|
+
const manifestHarnesses = Array.isArray(parsed.harnesses)
|
|
1221
|
+
? parsed.harnesses.filter((entry) => typeof entry === "string")
|
|
1222
|
+
: [];
|
|
1223
|
+
const expectedHarnesses = configuredHarnesses.length > 0
|
|
1224
|
+
? [...new Set(configuredHarnesses)].sort()
|
|
1225
|
+
: null;
|
|
1226
|
+
const actualHarnesses = [...new Set(manifestHarnesses)].sort();
|
|
1227
|
+
harnessesOk = expectedHarnesses
|
|
1228
|
+
? actualHarnesses.length === expectedHarnesses.length &&
|
|
1229
|
+
actualHarnesses.every((harness, index) => harness === expectedHarnesses[index])
|
|
1230
|
+
: actualHarnesses.length > 0;
|
|
1231
|
+
harnessesDetails = expectedHarnesses
|
|
1232
|
+
? harnessesOk
|
|
1233
|
+
? `adapter manifest harnesses match config.yaml: ${actualHarnesses.join(", ")}`
|
|
1234
|
+
: `adapter manifest harnesses [${actualHarnesses.join(", ")}] do not match config.yaml [${expectedHarnesses.join(", ")}]`
|
|
1235
|
+
: harnessesOk
|
|
1236
|
+
? `adapter manifest declares harnesses: ${actualHarnesses.join(", ")}`
|
|
1237
|
+
: "adapter manifest must declare at least one harness";
|
|
1238
|
+
const commandSource = typeof parsed.commandSource === "string" ? parsed.commandSource.trim() : "";
|
|
1239
|
+
const skillSource = typeof parsed.skillSource === "string" ? parsed.skillSource.trim() : "";
|
|
1240
|
+
sourcesOk = commandSource.length > 0 && skillSource.length > 0;
|
|
1241
|
+
sourcesDetails = sourcesOk
|
|
1242
|
+
? `adapter manifest source globs are set (commandSource=${commandSource}; skillSource=${skillSource})`
|
|
1243
|
+
: "adapter manifest must include non-empty commandSource and skillSource";
|
|
1244
|
+
}
|
|
1245
|
+
catch {
|
|
1246
|
+
harnessesOk = false;
|
|
1247
|
+
harnessesDetails = "adapter manifest must be valid JSON with a harnesses array";
|
|
1248
|
+
sourcesOk = false;
|
|
1249
|
+
sourcesDetails = "adapter manifest must be valid JSON with source globs";
|
|
1250
|
+
}
|
|
1251
|
+
checks.push({
|
|
1252
|
+
name: "state:adapter_manifest_harnesses",
|
|
1253
|
+
ok: harnessesOk,
|
|
1254
|
+
details: harnessesDetails
|
|
1255
|
+
});
|
|
1256
|
+
checks.push({
|
|
1257
|
+
name: "state:adapter_manifest_sources",
|
|
1258
|
+
ok: sourcesOk,
|
|
1259
|
+
details: sourcesDetails
|
|
1260
|
+
});
|
|
1261
|
+
}
|
|
1194
1262
|
const contextModeStatePath = path.join(projectRoot, RUNTIME_ROOT, "state", "context-mode.json");
|
|
1195
1263
|
checks.push({
|
|
1196
1264
|
name: "state:context_mode_exists",
|
|
@@ -1276,7 +1344,7 @@ export async function doctorChecks(projectRoot, options = {}) {
|
|
|
1276
1344
|
name: "flow_state:track",
|
|
1277
1345
|
ok: skippedConsistent,
|
|
1278
1346
|
details: skippedConsistent
|
|
1279
|
-
? `active track "${activeTrack}" (${trackStageList.length}/${
|
|
1347
|
+
? `active track "${activeTrack}" (${trackStageList.length}/${FLOW_STAGES.length} stages: ${trackStageList.join(" → ")})${expectedSkipped.length > 0 ? `; skippedStages=${expectedSkipped.join(", ")}` : ""}`
|
|
1280
1348
|
: `track "${activeTrack}" expects skippedStages=[${expectedSkipped.join(", ")}] but flow-state has [${skippedFromState.join(", ")}] — run \`cclaw sync\` to repair`
|
|
1281
1349
|
});
|
|
1282
1350
|
if (parsedConfig?.trackHeuristics) {
|
|
@@ -1441,7 +1509,7 @@ export async function doctorChecks(projectRoot, options = {}) {
|
|
|
1441
1509
|
? "no legacy .cclaw/features snapshot entries remain"
|
|
1442
1510
|
: `legacy snapshot entries still present (read-only): ${legacyWorkspaceEntries.join(", ")}`
|
|
1443
1511
|
});
|
|
1444
|
-
const staleStages = Object.keys(flowState.staleStages).filter((value) =>
|
|
1512
|
+
const staleStages = Object.keys(flowState.staleStages).filter((value) => FLOW_STAGES.includes(value));
|
|
1445
1513
|
checks.push({
|
|
1446
1514
|
name: "state:stale_stages_resolved",
|
|
1447
1515
|
ok: staleStages.length === 0,
|
|
@@ -1667,10 +1735,10 @@ export async function doctorChecks(projectRoot, options = {}) {
|
|
|
1667
1735
|
const stageOrder = parsed.stage_order;
|
|
1668
1736
|
const stageGates = parsed.stage_gates;
|
|
1669
1737
|
const hasStageOrder = Array.isArray(stageOrder) &&
|
|
1670
|
-
|
|
1738
|
+
FLOW_STAGES.every((stage) => stageOrder.includes(stage));
|
|
1671
1739
|
const hasStageGates = typeof stageGates === "object" &&
|
|
1672
1740
|
stageGates !== null &&
|
|
1673
|
-
|
|
1741
|
+
FLOW_STAGES.every((stage) => Array.isArray(stageGates[stage]));
|
|
1674
1742
|
hasRules = hasCoreLists && hasStageOrder && hasStageGates;
|
|
1675
1743
|
}
|
|
1676
1744
|
catch {
|
package/dist/flow-state.d.ts
CHANGED
|
@@ -43,6 +43,14 @@ export interface RetroState {
|
|
|
43
43
|
* automatic step.
|
|
44
44
|
* - `archived` — archive completed in this session (transient — archive
|
|
45
45
|
* resets flow-state so this value does not persist between runs).
|
|
46
|
+
*
|
|
47
|
+
* Layer separation (intentional):
|
|
48
|
+
* - `next: "done"` in stage schema means "the flow stage chain ended".
|
|
49
|
+
* - `shipSubstate: "archived"` is closeout-machine progress after ship.
|
|
50
|
+
* - `shipSubstate: "idle"` is the default closeout value before ship.
|
|
51
|
+
*
|
|
52
|
+
* These are not duplicates: `done` lives in stage transitions; `archived` /
|
|
53
|
+
* `idle` live in closeout lifecycle state.
|
|
46
54
|
*/
|
|
47
55
|
export declare const SHIP_SUBSTATES: readonly ["idle", "retro_review", "compound_review", "ready_to_archive", "archived"];
|
|
48
56
|
export type ShipSubstate = (typeof SHIP_SUBSTATES)[number];
|
package/dist/flow-state.js
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { COMMAND_FILE_ORDER } from "./constants.js";
|
|
2
1
|
import { buildTransitionRules, orderedStageSchemas, stageGateIds, stageRecommendedGateIds } from "./content/stage-schema.js";
|
|
3
2
|
import { FLOW_STAGES, FLOW_TRACKS, TRACK_STAGES } from "./types.js";
|
|
4
3
|
export const TRANSITION_RULES = buildTransitionRules();
|
|
@@ -17,6 +16,14 @@ export const TRANSITION_RULES = buildTransitionRules();
|
|
|
17
16
|
* automatic step.
|
|
18
17
|
* - `archived` — archive completed in this session (transient — archive
|
|
19
18
|
* resets flow-state so this value does not persist between runs).
|
|
19
|
+
*
|
|
20
|
+
* Layer separation (intentional):
|
|
21
|
+
* - `next: "done"` in stage schema means "the flow stage chain ended".
|
|
22
|
+
* - `shipSubstate: "archived"` is closeout-machine progress after ship.
|
|
23
|
+
* - `shipSubstate: "idle"` is the default closeout value before ship.
|
|
24
|
+
*
|
|
25
|
+
* These are not duplicates: `done` lives in stage transitions; `archived` /
|
|
26
|
+
* `idle` live in closeout lifecycle state.
|
|
20
27
|
*/
|
|
21
28
|
export const SHIP_SUBSTATES = [
|
|
22
29
|
"idle",
|
|
@@ -98,11 +105,7 @@ export function nextStage(stage, track = "standard") {
|
|
|
98
105
|
const ordered = TRACK_STAGES[track];
|
|
99
106
|
const index = ordered.indexOf(stage);
|
|
100
107
|
if (index < 0) {
|
|
101
|
-
|
|
102
|
-
if (fallback < 0 || fallback === COMMAND_FILE_ORDER.length - 1) {
|
|
103
|
-
return null;
|
|
104
|
-
}
|
|
105
|
-
return COMMAND_FILE_ORDER[fallback + 1];
|
|
108
|
+
return null;
|
|
106
109
|
}
|
|
107
110
|
if (index === ordered.length - 1) {
|
|
108
111
|
return null;
|
|
@@ -116,11 +119,11 @@ export function previousStage(stage, track = "standard") {
|
|
|
116
119
|
return null;
|
|
117
120
|
}
|
|
118
121
|
if (index < 0) {
|
|
119
|
-
const fallback =
|
|
122
|
+
const fallback = FLOW_STAGES.indexOf(stage);
|
|
120
123
|
if (fallback <= 0) {
|
|
121
124
|
return null;
|
|
122
125
|
}
|
|
123
|
-
return
|
|
126
|
+
return FLOW_STAGES[fallback - 1];
|
|
124
127
|
}
|
|
125
128
|
return ordered[index - 1];
|
|
126
129
|
}
|
package/dist/gate-evidence.js
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import fs from "node:fs/promises";
|
|
2
2
|
import path from "node:path";
|
|
3
|
-
import { checkReviewVerdictConsistency, extractMarkdownSectionBody, lintArtifact, validateReviewArmy } from "./artifact-linter.js";
|
|
3
|
+
import { checkReviewSecurityNoChangeAttestation, checkReviewVerdictConsistency, extractMarkdownSectionBody, lintArtifact, validateReviewArmy } from "./artifact-linter.js";
|
|
4
4
|
import { RUNTIME_ROOT } from "./constants.js";
|
|
5
5
|
import { stageSchema } from "./content/stage-schema.js";
|
|
6
|
+
import { readDelegationLedger } from "./delegation.js";
|
|
6
7
|
import { ensureDir, exists, writeFileSafe } from "./fs-utils.js";
|
|
8
|
+
import { detectPublicApiChanges } from "./internal/detect-public-api-changes.js";
|
|
7
9
|
import { readFlowState, writeFlowState } from "./runs.js";
|
|
8
10
|
import { buildTraceMatrix } from "./trace-matrix.js";
|
|
9
11
|
import { FLOW_STAGES } from "./types.js";
|
|
@@ -228,6 +230,10 @@ export async function verifyCurrentStageGateEvidence(projectRoot, flowState) {
|
|
|
228
230
|
if (!verdictConsistency.ok) {
|
|
229
231
|
issues.push(`review verdict inconsistency: ${verdictConsistency.errors.join("; ")}`);
|
|
230
232
|
}
|
|
233
|
+
const securityAttestation = await checkReviewSecurityNoChangeAttestation(projectRoot);
|
|
234
|
+
if (!securityAttestation.ok) {
|
|
235
|
+
issues.push(`review security attestation failed: ${securityAttestation.errors.join("; ")}`);
|
|
236
|
+
}
|
|
231
237
|
const traceGateRequired = schema.requiredGates.some((gate) => gate.id === "review_trace_matrix_clean" && gate.tier === "required");
|
|
232
238
|
if (traceGateRequired) {
|
|
233
239
|
const trace = await buildTraceMatrix(projectRoot);
|
|
@@ -282,6 +288,19 @@ export async function verifyCurrentStageGateEvidence(projectRoot, flowState) {
|
|
|
282
288
|
}
|
|
283
289
|
}
|
|
284
290
|
}
|
|
291
|
+
if (stage === "tdd") {
|
|
292
|
+
const docsDriftDetection = await detectPublicApiChanges(projectRoot);
|
|
293
|
+
if (docsDriftDetection.triggered) {
|
|
294
|
+
const ledger = await readDelegationLedger(projectRoot);
|
|
295
|
+
const hasDocUpdaterCompletion = ledger.entries.some((entry) => entry.runId === flowState.activeRunId &&
|
|
296
|
+
entry.stage === "tdd" &&
|
|
297
|
+
entry.agent === "doc-updater" &&
|
|
298
|
+
entry.status === "completed");
|
|
299
|
+
if (!hasDocUpdaterCompletion) {
|
|
300
|
+
issues.push(`tdd docs drift gate blocked (tdd_docs_drift_check): public surface changes detected (${docsDriftDetection.changedFiles.join(", ")}) but no completed doc-updater delegation exists for the active run.`);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
}
|
|
285
304
|
}
|
|
286
305
|
const passedSet = new Set(catalog.passed);
|
|
287
306
|
const missingRequired = required.filter((gateId) => !passedSet.has(gateId));
|
|
@@ -17,8 +17,8 @@ export type SubagentFallback =
|
|
|
17
17
|
*/
|
|
18
18
|
| "role-switch"
|
|
19
19
|
/**
|
|
20
|
-
*
|
|
21
|
-
*
|
|
20
|
+
* Reserved escape hatch for future harnesses with no parity path.
|
|
21
|
+
* Current shipped harnesses do not use this fallback.
|
|
22
22
|
*/
|
|
23
23
|
| "waiver";
|
|
24
24
|
/**
|
package/dist/harness-adapters.js
CHANGED
|
@@ -222,7 +222,7 @@ When in doubt, prefer **non-trivial** — the quick track is opt-in and only saf
|
|
|
222
222
|
|---|---|
|
|
223
223
|
| \`/cc\` | **Entry point.** No args = resume current stage. With prompt = classify task and start the right flow. |
|
|
224
224
|
| \`/cc-next\` | **Progression.** Advances to the next stage when current is complete. |
|
|
225
|
-
| \`/cc-ideate\` | **
|
|
225
|
+
| \`/cc-ideate\` | **Ideate mode.** Generates a ranked repo-improvement backlog before implementation. |
|
|
226
226
|
| \`/cc-view\` | **Read-only router.** Unified entry for status/tree/diff views. |
|
|
227
227
|
| \`/cc-ops\` | **Operations router.** Unified entry for feature/tdd-log/retro/compound/archive/rewind actions. |
|
|
228
228
|
|
|
@@ -356,7 +356,7 @@ function codexSkillDescription(command) {
|
|
|
356
356
|
case "next":
|
|
357
357
|
return `Advance the cclaw flow to the next stage. Use when the user types \`/cc-next\` or asks to "move to the next stage", "continue the flow", "advance cclaw", "progress the workflow", or when the current stage skill reports completion and gates have passed.`;
|
|
358
358
|
case "ideate":
|
|
359
|
-
return `Read-only repo-improvement
|
|
359
|
+
return `Read-only repo-improvement ideate mode for cclaw. Use when the user types \`/cc-ideate\` or asks to "ideate", "scan the repo for TODOs/tech debt", "generate a backlog", or wants a ranked list of candidate ideas before committing to a single flow. Does not mutate \`.cclaw/state/flow-state.json\`.`;
|
|
360
360
|
case "view":
|
|
361
361
|
return `Read-only router for cclaw flow views. Use when the user types \`/cc-view\`, \`/cc-view status\`, \`/cc-view tree\`, \`/cc-view diff\`, or asks to "show cclaw status", "show the flow tree", "diff flow state", or wants a snapshot without mutation.`;
|
|
362
362
|
case "ops":
|
package/dist/install.js
CHANGED
|
@@ -2,9 +2,9 @@ import { execFile } from "node:child_process";
|
|
|
2
2
|
import fs from "node:fs/promises";
|
|
3
3
|
import path from "node:path";
|
|
4
4
|
import { promisify } from "node:util";
|
|
5
|
-
import { CCLAW_VERSION,
|
|
5
|
+
import { CCLAW_VERSION, FLOW_VERSION, REQUIRED_DIRS, RUNTIME_ROOT } from "./constants.js";
|
|
6
6
|
import { writeConfig, createDefaultConfig, readConfig, configPath, detectLanguageRulePacks, detectAdvancedKeys } from "./config.js";
|
|
7
|
-
import {
|
|
7
|
+
import { stageCommandContract } from "./content/contracts.js";
|
|
8
8
|
import { contextModeFiles, createInitialContextModeState } from "./content/contexts.js";
|
|
9
9
|
import { learnSkillMarkdown, learnCommandContract } from "./content/learnings.js";
|
|
10
10
|
import { nextCommandContract, nextCommandSkillMarkdown } from "./content/next-command.js";
|
|
@@ -36,7 +36,7 @@ import { LANGUAGE_RULE_PACK_DIR, LANGUAGE_RULE_PACK_FILES, LANGUAGE_RULE_PACK_GE
|
|
|
36
36
|
import { RESEARCH_PLAYBOOKS } from "./content/research-playbooks.js";
|
|
37
37
|
import { HARNESS_TOOL_REFS_DIR, HARNESS_TOOL_REFS_INDEX_MD, harnessToolRefMarkdown } from "./content/harness-tool-refs.js";
|
|
38
38
|
import { DOCTOR_REFERENCE_MARKDOWN } from "./content/doctor-references.js";
|
|
39
|
-
import { harnessDocsOverviewMarkdown, harnessIntegrationDocMarkdown } from "./content/
|
|
39
|
+
import { harnessDocsOverviewMarkdown, harnessIntegrationDocMarkdown } from "./content/harness-doc.js";
|
|
40
40
|
import { HARNESS_PLAYBOOKS_DIR, harnessPlaybookFileName, harnessPlaybookMarkdown, harnessPlaybooksIndexMarkdown } from "./content/harness-playbooks.js";
|
|
41
41
|
import { HOOK_EVENTS_BY_HARNESS, HOOK_SEMANTIC_EVENTS } from "./content/hook-events.js";
|
|
42
42
|
import { createInitialFlowState } from "./flow-state.js";
|
|
@@ -45,6 +45,7 @@ import { ensureGitignore, removeGitignorePatterns } from "./gitignore.js";
|
|
|
45
45
|
import { HARNESS_ADAPTERS, harnessShimFileNames, harnessTier, syncHarnessShims, removeCclawFromAgentsMd } from "./harness-adapters.js";
|
|
46
46
|
import { validateHookDocument } from "./hook-schema.js";
|
|
47
47
|
import { ensureRunSystem, readFlowState } from "./runs.js";
|
|
48
|
+
import { FLOW_STAGES } from "./types.js";
|
|
48
49
|
const OPENCODE_PLUGIN_REL_PATH = ".opencode/plugins/cclaw-plugin.mjs";
|
|
49
50
|
const CURSOR_RULE_REL_PATH = ".cursor/rules/cclaw-workflow.mdc";
|
|
50
51
|
const GIT_HOOK_MANAGED_MARKER = "cclaw-managed-git-hook";
|
|
@@ -177,8 +178,8 @@ async function ensureStructure(projectRoot) {
|
|
|
177
178
|
}
|
|
178
179
|
}
|
|
179
180
|
async function writeCommandContracts(projectRoot) {
|
|
180
|
-
for (const stage of
|
|
181
|
-
await writeFileSafe(runtimePath(projectRoot, "commands", `${stage}.md`),
|
|
181
|
+
for (const stage of FLOW_STAGES) {
|
|
182
|
+
await writeFileSafe(runtimePath(projectRoot, "commands", `${stage}.md`), stageCommandContract(stage));
|
|
182
183
|
}
|
|
183
184
|
}
|
|
184
185
|
async function writeArtifactTemplates(projectRoot) {
|
|
@@ -214,7 +215,7 @@ async function writeEvalScaffold(projectRoot) {
|
|
|
214
215
|
}
|
|
215
216
|
async function writeSkills(projectRoot, config) {
|
|
216
217
|
const skillTrack = config?.defaultTrack ?? "standard";
|
|
217
|
-
for (const stage of
|
|
218
|
+
for (const stage of FLOW_STAGES) {
|
|
218
219
|
const folder = stageSkillFolder(stage);
|
|
219
220
|
await writeFileSafe(runtimePath(projectRoot, "skills", folder, "SKILL.md"), stageSkillMarkdown(stage, skillTrack));
|
|
220
221
|
// Progressive disclosure (A.2#8): materialize the full example artifact as
|
|
@@ -1114,6 +1115,27 @@ async function cleanLegacyArtifacts(projectRoot) {
|
|
|
1114
1115
|
// best-effort cleanup
|
|
1115
1116
|
}
|
|
1116
1117
|
}
|
|
1118
|
+
// D-4 terminology migration: rename historical ideation artifacts to the
|
|
1119
|
+
// canonical ideate-* naming without deleting user-authored content.
|
|
1120
|
+
const artifactsDir = runtimePath(projectRoot, "artifacts");
|
|
1121
|
+
try {
|
|
1122
|
+
const entries = await fs.readdir(artifactsDir);
|
|
1123
|
+
for (const entry of entries) {
|
|
1124
|
+
const match = /^ideation-(.+\.md)$/u.exec(entry);
|
|
1125
|
+
if (!match)
|
|
1126
|
+
continue;
|
|
1127
|
+
const nextName = `ideate-${match[1]}`;
|
|
1128
|
+
const from = path.join(artifactsDir, entry);
|
|
1129
|
+
const to = path.join(artifactsDir, nextName);
|
|
1130
|
+
if (await exists(to)) {
|
|
1131
|
+
continue;
|
|
1132
|
+
}
|
|
1133
|
+
await fs.rename(from, to);
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
catch {
|
|
1137
|
+
// no artifacts directory yet (fresh init) or read-only FS
|
|
1138
|
+
}
|
|
1117
1139
|
}
|
|
1118
1140
|
async function cleanStaleFiles(projectRoot) {
|
|
1119
1141
|
const expectedShimFiles = new Set(harnessShimFileNames());
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
const execFileAsync = promisify(execFile);
|
|
4
|
+
const PUBLIC_SURFACE_PATH_PATTERNS = [
|
|
5
|
+
/(^|\/)(cli|types?|config)\.[cm]?[jt]s$/iu,
|
|
6
|
+
/(^|\/)(openapi|swagger|schema)(\/|[-_.])/iu,
|
|
7
|
+
/(^|\/)(api|commands?|flags?)(\/|[-_.])/iu,
|
|
8
|
+
/(^|\/)(package|tsconfig)\.json$/iu
|
|
9
|
+
];
|
|
10
|
+
async function resolveDiffBase(projectRoot) {
|
|
11
|
+
try {
|
|
12
|
+
const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD~1"], {
|
|
13
|
+
cwd: projectRoot
|
|
14
|
+
});
|
|
15
|
+
const base = stdout.trim();
|
|
16
|
+
return base.length > 0 ? base : null;
|
|
17
|
+
}
|
|
18
|
+
catch {
|
|
19
|
+
return null;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
export async function detectPublicApiChanges(projectRoot) {
|
|
23
|
+
const base = await resolveDiffBase(projectRoot);
|
|
24
|
+
if (!base) {
|
|
25
|
+
return { triggered: false, changedFiles: [] };
|
|
26
|
+
}
|
|
27
|
+
try {
|
|
28
|
+
const range = `${base}..HEAD`;
|
|
29
|
+
const { stdout } = await execFileAsync("git", ["diff", "--name-only", range], {
|
|
30
|
+
cwd: projectRoot
|
|
31
|
+
});
|
|
32
|
+
const changedFiles = stdout
|
|
33
|
+
.split(/\r?\n/gu)
|
|
34
|
+
.map((line) => line.trim())
|
|
35
|
+
.filter((line) => line.length > 0)
|
|
36
|
+
.filter((filePath) => PUBLIC_SURFACE_PATH_PATTERNS.some((pattern) => pattern.test(filePath)));
|
|
37
|
+
return {
|
|
38
|
+
triggered: changedFiles.length > 0,
|
|
39
|
+
changedFiles
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
catch {
|
|
43
|
+
return { triggered: false, changedFiles: [] };
|
|
44
|
+
}
|
|
45
|
+
}
|