cclaw-cli 0.47.0 → 0.48.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +3 -1
  2. package/dist/artifact-linter.d.ts +9 -2
  3. package/dist/artifact-linter.js +45 -2
  4. package/dist/config.d.ts +6 -6
  5. package/dist/config.js +22 -0
  6. package/dist/constants.d.ts +10 -1
  7. package/dist/constants.js +19 -10
  8. package/dist/content/contracts.d.ts +1 -1
  9. package/dist/content/contracts.js +1 -1
  10. package/dist/content/core-agents.d.ts +53 -1
  11. package/dist/content/core-agents.js +6 -0
  12. package/dist/content/{harnesses-doc.js → harness-doc.js} +32 -1
  13. package/dist/content/harness-playbooks.js +4 -4
  14. package/dist/content/ideate-command.js +19 -19
  15. package/dist/content/observe.js +22 -1
  16. package/dist/content/opencode-plugin.js +5 -1
  17. package/dist/content/skills.js +2 -2
  18. package/dist/content/stage-schema.js +36 -8
  19. package/dist/content/stages/design.js +2 -2
  20. package/dist/content/stages/review.js +1 -1
  21. package/dist/content/stages/ship.js +2 -0
  22. package/dist/content/stages/tdd.js +8 -4
  23. package/dist/content/templates.js +15 -13
  24. package/dist/content/utility-skills.d.ts +7 -1
  25. package/dist/content/utility-skills.js +5 -0
  26. package/dist/delegation.d.ts +10 -0
  27. package/dist/delegation.js +111 -33
  28. package/dist/doctor.js +80 -12
  29. package/dist/flow-state.d.ts +9 -1
  30. package/dist/flow-state.js +26 -9
  31. package/dist/fs-utils.d.ts +9 -0
  32. package/dist/fs-utils.js +35 -1
  33. package/dist/gate-evidence.js +21 -2
  34. package/dist/gitignore.js +6 -3
  35. package/dist/harness-adapters.d.ts +2 -2
  36. package/dist/harness-adapters.js +13 -3
  37. package/dist/install.js +68 -10
  38. package/dist/internal/detect-public-api-changes.d.ts +5 -0
  39. package/dist/internal/detect-public-api-changes.js +45 -0
  40. package/dist/knowledge-store.js +2 -2
  41. package/dist/policy.js +3 -2
  42. package/dist/retro-gate.js +41 -15
  43. package/dist/run-archive.js +63 -33
  44. package/dist/run-persistence.js +12 -4
  45. package/dist/tdd-cycle.js +6 -1
  46. package/dist/types.d.ts +6 -1
  47. package/package.json +4 -1
  48. /package/dist/content/{harnesses-doc.d.ts → harness-doc.d.ts} +0 -0
package/dist/doctor.js CHANGED
@@ -3,16 +3,16 @@ import path from "node:path";
3
3
  import { execFile } from "node:child_process";
4
4
  import { pathToFileURL } from "node:url";
5
5
  import { promisify } from "node:util";
6
- import { COMMAND_FILE_ORDER, REQUIRED_DIRS, RUNTIME_ROOT } from "./constants.js";
6
+ import { REQUIRED_DIRS, RUNTIME_ROOT, UTILITY_COMMANDS } from "./constants.js";
7
7
  import { CCLAW_AGENTS } from "./content/core-agents.js";
8
- import { readConfig } from "./config.js";
8
+ import { detectAdvancedKeys, readConfig } from "./config.js";
9
9
  import { exists } from "./fs-utils.js";
10
10
  import { gitignoreHasRequiredPatterns } from "./gitignore.js";
11
11
  import { HARNESS_ADAPTERS, CCLAW_MARKER_START, CCLAW_MARKER_END, harnessShimFileNames, harnessShimSkillNames } from "./harness-adapters.js";
12
12
  import { policyChecks } from "./policy.js";
13
13
  import { readFlowState } from "./runs.js";
14
14
  import { skippedStagesForTrack } from "./flow-state.js";
15
- import { TRACK_STAGES } from "./types.js";
15
+ import { FLOW_STAGES, TRACK_STAGES } from "./types.js";
16
16
  import { checkMandatoryDelegations } from "./delegation.js";
17
17
  import { ensureFeatureSystem, listFeatures, readActiveFeature, readFeatureWorktreeRegistry, resolveFeatureWorkspacePath, worktreeRegistryPath } from "./feature-system.js";
18
18
  import { buildTraceMatrix } from "./trace-matrix.js";
@@ -280,7 +280,7 @@ export async function doctorChecks(projectRoot, options = {}) {
280
280
  details: fullPath
281
281
  });
282
282
  }
283
- for (const stage of COMMAND_FILE_ORDER) {
283
+ for (const stage of FLOW_STAGES) {
284
284
  const commandPath = path.join(projectRoot, RUNTIME_ROOT, "commands", `${stage}.md`);
285
285
  checks.push({
286
286
  name: `command:${stage}`,
@@ -377,7 +377,7 @@ export async function doctorChecks(projectRoot, options = {}) {
377
377
  // skill's Examples section points here; the file MUST exist or the pointer
378
378
  // is a dangling link.
379
379
  const stageRefDir = path.join(projectRoot, RUNTIME_ROOT, "references", "stages");
380
- for (const stage of COMMAND_FILE_ORDER) {
380
+ for (const stage of FLOW_STAGES) {
381
381
  const refPath = path.join(stageRefDir, `${stage}-examples.md`);
382
382
  checks.push({
383
383
  name: `stage_examples_ref:${stage}`,
@@ -430,6 +430,18 @@ export async function doctorChecks(projectRoot, options = {}) {
430
430
  });
431
431
  }
432
432
  if (parsedConfig) {
433
+ const advancedKeys = await detectAdvancedKeys(projectRoot).catch(() => new Set());
434
+ const hasLegacyTddTestGlobs = advancedKeys.has("tddTestGlobs");
435
+ const hasModernTddConfig = advancedKeys.has("tdd");
436
+ checks.push({
437
+ name: "warning:config:deprecated_tdd_test_globs",
438
+ ok: !hasLegacyTddTestGlobs,
439
+ details: hasLegacyTddTestGlobs
440
+ ? hasModernTddConfig
441
+ ? `warning: ${RUNTIME_ROOT}/config.yaml sets deprecated "tddTestGlobs" alongside "tdd.*"; "tdd.testPathPatterns" takes precedence. Remove legacy key.`
442
+ : `warning: ${RUNTIME_ROOT}/config.yaml uses deprecated "tddTestGlobs". Migrate to "tdd.testPathPatterns".`
443
+ : `no deprecated "tddTestGlobs" key detected in ${RUNTIME_ROOT}/config.yaml`
444
+ });
433
445
  const expectedMode = parsedConfig.promptGuardMode === "strict" ? "strict" : "advisory";
434
446
  const promptGuardPath = path.join(projectRoot, RUNTIME_ROOT, "hooks", "prompt-guard.sh");
435
447
  let promptGuardModeOk = false;
@@ -535,8 +547,8 @@ export async function doctorChecks(projectRoot, options = {}) {
535
547
  ok: agentsBlockOk,
536
548
  details: `${agentsFile} must contain the managed cclaw marker block with routing, verification, and minimal detail pointer`
537
549
  });
538
- // Utility commands
539
- for (const cmd of ["learn", "next", "ideate", "status", "tree", "diff", "feature", "tdd-log", "retro", "compound", "rewind"]) {
550
+ // Utility commands — keep in sync with UTILITY_COMMANDS (src/constants.ts)
551
+ for (const cmd of UTILITY_COMMANDS) {
540
552
  const cmdPath = path.join(projectRoot, RUNTIME_ROOT, "commands", `${cmd}.md`);
541
553
  checks.push({
542
554
  name: `utility_command:${cmd}`,
@@ -803,7 +815,7 @@ export async function doctorChecks(projectRoot, options = {}) {
803
815
  });
804
816
  checks.push({
805
817
  name: `shim:codex:${skillName}:frontmatter`,
806
- ok,
818
+ ok: frontmatterOk,
807
819
  details: frontmatterOk
808
820
  ? `${skillPath} has \`name: ${skillName}\` frontmatter`
809
821
  : ok
@@ -1191,6 +1203,62 @@ export async function doctorChecks(projectRoot, options = {}) {
1191
1203
  ok: await exists(path.join(projectRoot, RUNTIME_ROOT, "state", "harness-gaps.json")),
1192
1204
  details: `${RUNTIME_ROOT}/state/harness-gaps.json must exist for tiered harness capability tracking`
1193
1205
  });
1206
+ const adapterManifestPath = path.join(projectRoot, RUNTIME_ROOT, "adapters", "manifest.json");
1207
+ const adapterManifestExists = await exists(adapterManifestPath);
1208
+ checks.push({
1209
+ name: "state:adapter_manifest_exists",
1210
+ ok: adapterManifestExists,
1211
+ details: `${RUNTIME_ROOT}/adapters/manifest.json must exist for harness adapter provenance`
1212
+ });
1213
+ if (adapterManifestExists) {
1214
+ let harnessesOk = false;
1215
+ let harnessesDetails = "";
1216
+ let sourcesOk = false;
1217
+ let sourcesDetails = "";
1218
+ try {
1219
+ const parsed = JSON.parse(await fs.readFile(adapterManifestPath, "utf8"));
1220
+ const manifestHarnesses = Array.isArray(parsed.harnesses)
1221
+ ? parsed.harnesses.filter((entry) => typeof entry === "string")
1222
+ : [];
1223
+ const expectedHarnesses = configuredHarnesses.length > 0
1224
+ ? [...new Set(configuredHarnesses)].sort()
1225
+ : null;
1226
+ const actualHarnesses = [...new Set(manifestHarnesses)].sort();
1227
+ harnessesOk = expectedHarnesses
1228
+ ? actualHarnesses.length === expectedHarnesses.length &&
1229
+ actualHarnesses.every((harness, index) => harness === expectedHarnesses[index])
1230
+ : actualHarnesses.length > 0;
1231
+ harnessesDetails = expectedHarnesses
1232
+ ? harnessesOk
1233
+ ? `adapter manifest harnesses match config.yaml: ${actualHarnesses.join(", ")}`
1234
+ : `adapter manifest harnesses [${actualHarnesses.join(", ")}] do not match config.yaml [${expectedHarnesses.join(", ")}]`
1235
+ : harnessesOk
1236
+ ? `adapter manifest declares harnesses: ${actualHarnesses.join(", ")}`
1237
+ : "adapter manifest must declare at least one harness";
1238
+ const commandSource = typeof parsed.commandSource === "string" ? parsed.commandSource.trim() : "";
1239
+ const skillSource = typeof parsed.skillSource === "string" ? parsed.skillSource.trim() : "";
1240
+ sourcesOk = commandSource.length > 0 && skillSource.length > 0;
1241
+ sourcesDetails = sourcesOk
1242
+ ? `adapter manifest source globs are set (commandSource=${commandSource}; skillSource=${skillSource})`
1243
+ : "adapter manifest must include non-empty commandSource and skillSource";
1244
+ }
1245
+ catch {
1246
+ harnessesOk = false;
1247
+ harnessesDetails = "adapter manifest must be valid JSON with a harnesses array";
1248
+ sourcesOk = false;
1249
+ sourcesDetails = "adapter manifest must be valid JSON with source globs";
1250
+ }
1251
+ checks.push({
1252
+ name: "state:adapter_manifest_harnesses",
1253
+ ok: harnessesOk,
1254
+ details: harnessesDetails
1255
+ });
1256
+ checks.push({
1257
+ name: "state:adapter_manifest_sources",
1258
+ ok: sourcesOk,
1259
+ details: sourcesDetails
1260
+ });
1261
+ }
1194
1262
  const contextModeStatePath = path.join(projectRoot, RUNTIME_ROOT, "state", "context-mode.json");
1195
1263
  checks.push({
1196
1264
  name: "state:context_mode_exists",
@@ -1276,7 +1344,7 @@ export async function doctorChecks(projectRoot, options = {}) {
1276
1344
  name: "flow_state:track",
1277
1345
  ok: skippedConsistent,
1278
1346
  details: skippedConsistent
1279
- ? `active track "${activeTrack}" (${trackStageList.length}/${COMMAND_FILE_ORDER.length} stages: ${trackStageList.join(" → ")})${expectedSkipped.length > 0 ? `; skippedStages=${expectedSkipped.join(", ")}` : ""}`
1347
+ ? `active track "${activeTrack}" (${trackStageList.length}/${FLOW_STAGES.length} stages: ${trackStageList.join(" → ")})${expectedSkipped.length > 0 ? `; skippedStages=${expectedSkipped.join(", ")}` : ""}`
1280
1348
  : `track "${activeTrack}" expects skippedStages=[${expectedSkipped.join(", ")}] but flow-state has [${skippedFromState.join(", ")}] — run \`cclaw sync\` to repair`
1281
1349
  });
1282
1350
  if (parsedConfig?.trackHeuristics) {
@@ -1441,7 +1509,7 @@ export async function doctorChecks(projectRoot, options = {}) {
1441
1509
  ? "no legacy .cclaw/features snapshot entries remain"
1442
1510
  : `legacy snapshot entries still present (read-only): ${legacyWorkspaceEntries.join(", ")}`
1443
1511
  });
1444
- const staleStages = Object.keys(flowState.staleStages).filter((value) => COMMAND_FILE_ORDER.includes(value));
1512
+ const staleStages = Object.keys(flowState.staleStages).filter((value) => FLOW_STAGES.includes(value));
1445
1513
  checks.push({
1446
1514
  name: "state:stale_stages_resolved",
1447
1515
  ok: staleStages.length === 0,
@@ -1667,10 +1735,10 @@ export async function doctorChecks(projectRoot, options = {}) {
1667
1735
  const stageOrder = parsed.stage_order;
1668
1736
  const stageGates = parsed.stage_gates;
1669
1737
  const hasStageOrder = Array.isArray(stageOrder) &&
1670
- COMMAND_FILE_ORDER.every((stage) => stageOrder.includes(stage));
1738
+ FLOW_STAGES.every((stage) => stageOrder.includes(stage));
1671
1739
  const hasStageGates = typeof stageGates === "object" &&
1672
1740
  stageGates !== null &&
1673
- COMMAND_FILE_ORDER.every((stage) => Array.isArray(stageGates[stage]));
1741
+ FLOW_STAGES.every((stage) => Array.isArray(stageGates[stage]));
1674
1742
  hasRules = hasCoreLists && hasStageOrder && hasStageGates;
1675
1743
  }
1676
1744
  catch {
@@ -43,6 +43,14 @@ export interface RetroState {
43
43
  * automatic step.
44
44
  * - `archived` — archive completed in this session (transient — archive
45
45
  * resets flow-state so this value does not persist between runs).
46
+ *
47
+ * Layer separation (intentional):
48
+ * - `next: "done"` in stage schema means "the flow stage chain ended".
49
+ * - `shipSubstate: "archived"` is closeout-machine progress after ship.
50
+ * - `shipSubstate: "idle"` is the default closeout value before ship.
51
+ *
52
+ * These are not duplicates: `done` lives in stage transitions; `archived` /
53
+ * `idle` live in closeout lifecycle state.
46
54
  */
47
55
  export declare const SHIP_SUBSTATES: readonly ["idle", "retro_review", "compound_review", "ready_to_archive", "archived"];
48
56
  export type ShipSubstate = (typeof SHIP_SUBSTATES)[number];
@@ -86,6 +94,6 @@ export declare function skippedStagesForTrack(track: FlowTrack): FlowStage[];
86
94
  export declare function firstStageForTrack(track: FlowTrack): FlowStage;
87
95
  export declare function createInitialFlowState(activeRunIdOrOptions?: string | InitialFlowStateOptions, maybeTrack?: FlowTrack): FlowState;
88
96
  export declare function canTransition(from: FlowStage, to: FlowStage): boolean;
89
- export declare function getTransitionGuards(from: FlowStage, to: FlowStage): string[];
97
+ export declare function getTransitionGuards(from: FlowStage, to: FlowStage, track?: FlowTrack): string[];
90
98
  export declare function nextStage(stage: FlowStage, track?: FlowTrack): FlowStage | null;
91
99
  export declare function previousStage(stage: FlowStage, track?: FlowTrack): FlowStage | null;
@@ -1,4 +1,3 @@
1
- import { COMMAND_FILE_ORDER } from "./constants.js";
2
1
  import { buildTransitionRules, orderedStageSchemas, stageGateIds, stageRecommendedGateIds } from "./content/stage-schema.js";
3
2
  import { FLOW_STAGES, FLOW_TRACKS, TRACK_STAGES } from "./types.js";
4
3
  export const TRANSITION_RULES = buildTransitionRules();
@@ -17,6 +16,14 @@ export const TRANSITION_RULES = buildTransitionRules();
17
16
  * automatic step.
18
17
  * - `archived` — archive completed in this session (transient — archive
19
18
  * resets flow-state so this value does not persist between runs).
19
+ *
20
+ * Layer separation (intentional):
21
+ * - `next: "done"` in stage schema means "the flow stage chain ended".
22
+ * - `shipSubstate: "archived"` is closeout-machine progress after ship.
23
+ * - `shipSubstate: "idle"` is the default closeout value before ship.
24
+ *
25
+ * These are not duplicates: `done` lives in stage transitions; `archived` /
26
+ * `idle` live in closeout lifecycle state.
20
27
  */
21
28
  export const SHIP_SUBSTATES = [
22
29
  "idle",
@@ -90,7 +97,21 @@ export function createInitialFlowState(activeRunIdOrOptions = "active", maybeTra
90
97
  export function canTransition(from, to) {
91
98
  return TRANSITION_RULES.some((rule) => rule.from === from && rule.to === to);
92
99
  }
93
- export function getTransitionGuards(from, to) {
100
+ export function getTransitionGuards(from, to, track = "standard") {
101
+ // Natural forward edge on this track: derive guards fresh from the
102
+ // track-specific gate schema. `TRANSITION_RULES` collapses shared edges
103
+ // across tracks (first-registered wins), so reading guards directly
104
+ // from the track-aware schema avoids silently dropping gates that only
105
+ // the current track requires (e.g. `tdd_traceable_to_plan` on standard
106
+ // gets lost if quick was registered first).
107
+ const ordered = TRACK_STAGES[track];
108
+ const fromIdx = ordered.indexOf(from);
109
+ if (fromIdx >= 0 && ordered[fromIdx + 1] === to) {
110
+ return stageGateIds(from, track);
111
+ }
112
+ // Non-neighbour edges (e.g. `review -> tdd` with `review_verdict_blocked`)
113
+ // carry special guards not derivable from a stage's gate catalog; fall
114
+ // back to the pre-computed rule table.
94
115
  const match = TRANSITION_RULES.find((rule) => rule.from === from && rule.to === to);
95
116
  return match ? [...match.guards] : [];
96
117
  }
@@ -98,11 +119,7 @@ export function nextStage(stage, track = "standard") {
98
119
  const ordered = TRACK_STAGES[track];
99
120
  const index = ordered.indexOf(stage);
100
121
  if (index < 0) {
101
- const fallback = COMMAND_FILE_ORDER.indexOf(stage);
102
- if (fallback < 0 || fallback === COMMAND_FILE_ORDER.length - 1) {
103
- return null;
104
- }
105
- return COMMAND_FILE_ORDER[fallback + 1];
122
+ return null;
106
123
  }
107
124
  if (index === ordered.length - 1) {
108
125
  return null;
@@ -116,11 +133,11 @@ export function previousStage(stage, track = "standard") {
116
133
  return null;
117
134
  }
118
135
  if (index < 0) {
119
- const fallback = COMMAND_FILE_ORDER.indexOf(stage);
136
+ const fallback = FLOW_STAGES.indexOf(stage);
120
137
  if (fallback <= 0) {
121
138
  return null;
122
139
  }
123
- return COMMAND_FILE_ORDER[fallback - 1];
140
+ return FLOW_STAGES[fallback - 1];
124
141
  }
125
142
  return ordered[index - 1];
126
143
  }
@@ -1,4 +1,13 @@
1
1
  export declare function ensureDir(dirPath: string): Promise<void>;
2
+ /**
3
+ * Strip a leading UTF-8 BOM (U+FEFF) if present. Many editors (VS Code on
4
+ * Windows, Notepad, some CI tools) silently prepend a BOM when saving
5
+ * UTF-8; when the file is then split on `\n` the first line keeps the
6
+ * invisible BOM and `JSON.parse` rejects it, which caused the first
7
+ * knowledge.jsonl entry to be silently dropped on load. Treat BOM as a
8
+ * no-op at read time so the rest of the pipeline sees clean UTF-8.
9
+ */
10
+ export declare function stripBom(text: string): string;
2
11
  export interface DirectoryLockOptions {
3
12
  retries?: number;
4
13
  retryDelayMs?: number;
package/dist/fs-utils.js CHANGED
@@ -3,6 +3,17 @@ import path from "node:path";
3
3
  export async function ensureDir(dirPath) {
4
4
  await fs.mkdir(dirPath, { recursive: true });
5
5
  }
6
+ /**
7
+ * Strip a leading UTF-8 BOM (U+FEFF) if present. Many editors (VS Code on
8
+ * Windows, Notepad, some CI tools) silently prepend a BOM when saving
9
+ * UTF-8; when the file is then split on `\n` the first line keeps the
10
+ * invisible BOM and `JSON.parse` rejects it, which caused the first
11
+ * knowledge.jsonl entry to be silently dropped on load. Treat BOM as a
12
+ * no-op at read time so the rest of the pipeline sees clean UTF-8.
13
+ */
14
+ export function stripBom(text) {
15
+ return text.charCodeAt(0) === 0xfeff ? text.slice(1) : text;
16
+ }
6
17
  function sleep(ms) {
7
18
  return new Promise((resolve) => setTimeout(resolve, ms));
8
19
  }
@@ -54,7 +65,30 @@ export async function writeFileSafe(filePath, content) {
54
65
  await ensureDir(path.dirname(filePath));
55
66
  const tempPath = path.join(path.dirname(filePath), `.${path.basename(filePath)}.tmp-${process.pid}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`);
56
67
  await fs.writeFile(tempPath, content, "utf8");
57
- await fs.rename(tempPath, filePath);
68
+ try {
69
+ await fs.rename(tempPath, filePath);
70
+ }
71
+ catch (error) {
72
+ const code = error?.code;
73
+ // `rename` fails with EXDEV when the temp file and target live on
74
+ // different filesystems (container bind mounts, tmpfs + rootfs,
75
+ // cross-volume setups). Fall back to copy + unlink so atomic writes
76
+ // still work — copyFile is not fully atomic but is the best we can
77
+ // do across devices, and we remove the temp even if copy fails.
78
+ if (code === "EXDEV") {
79
+ try {
80
+ await fs.copyFile(tempPath, filePath);
81
+ }
82
+ finally {
83
+ await fs.unlink(tempPath).catch(() => undefined);
84
+ }
85
+ return;
86
+ }
87
+ // Other errors: try to clean up the temp to avoid littering the
88
+ // directory with orphaned `.tmp-<pid>-*` files, then rethrow.
89
+ await fs.unlink(tempPath).catch(() => undefined);
90
+ throw error;
91
+ }
58
92
  }
59
93
  export async function exists(filePath) {
60
94
  try {
@@ -1,9 +1,11 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
- import { checkReviewVerdictConsistency, extractMarkdownSectionBody, lintArtifact, validateReviewArmy } from "./artifact-linter.js";
3
+ import { checkReviewSecurityNoChangeAttestation, checkReviewVerdictConsistency, extractMarkdownSectionBody, lintArtifact, validateReviewArmy } from "./artifact-linter.js";
4
4
  import { RUNTIME_ROOT } from "./constants.js";
5
5
  import { stageSchema } from "./content/stage-schema.js";
6
+ import { readDelegationLedger } from "./delegation.js";
6
7
  import { ensureDir, exists, writeFileSafe } from "./fs-utils.js";
8
+ import { detectPublicApiChanges } from "./internal/detect-public-api-changes.js";
7
9
  import { readFlowState, writeFlowState } from "./runs.js";
8
10
  import { buildTraceMatrix } from "./trace-matrix.js";
9
11
  import { FLOW_STAGES } from "./types.js";
@@ -210,7 +212,7 @@ export async function verifyCurrentStageGateEvidence(projectRoot, flowState) {
210
212
  const artifactPresent = await currentStageArtifactExists(projectRoot, stage, flowState.track);
211
213
  const shouldValidateArtifact = artifactPresent || catalog.passed.length > 0 || flowState.completedStages.includes(stage);
212
214
  if (shouldValidateArtifact) {
213
- const lint = await lintArtifact(projectRoot, stage);
215
+ const lint = await lintArtifact(projectRoot, stage, flowState.track);
214
216
  if (!lint.passed) {
215
217
  const failedRequired = lint.findings
216
218
  .filter((finding) => finding.required && !finding.found)
@@ -228,6 +230,10 @@ export async function verifyCurrentStageGateEvidence(projectRoot, flowState) {
228
230
  if (!verdictConsistency.ok) {
229
231
  issues.push(`review verdict inconsistency: ${verdictConsistency.errors.join("; ")}`);
230
232
  }
233
+ const securityAttestation = await checkReviewSecurityNoChangeAttestation(projectRoot);
234
+ if (!securityAttestation.ok) {
235
+ issues.push(`review security attestation failed: ${securityAttestation.errors.join("; ")}`);
236
+ }
231
237
  const traceGateRequired = schema.requiredGates.some((gate) => gate.id === "review_trace_matrix_clean" && gate.tier === "required");
232
238
  if (traceGateRequired) {
233
239
  const trace = await buildTraceMatrix(projectRoot);
@@ -282,6 +288,19 @@ export async function verifyCurrentStageGateEvidence(projectRoot, flowState) {
282
288
  }
283
289
  }
284
290
  }
291
+ if (stage === "tdd") {
292
+ const docsDriftDetection = await detectPublicApiChanges(projectRoot);
293
+ if (docsDriftDetection.triggered) {
294
+ const ledger = await readDelegationLedger(projectRoot);
295
+ const hasDocUpdaterCompletion = ledger.entries.some((entry) => entry.runId === flowState.activeRunId &&
296
+ entry.stage === "tdd" &&
297
+ entry.agent === "doc-updater" &&
298
+ entry.status === "completed");
299
+ if (!hasDocUpdaterCompletion) {
300
+ issues.push(`tdd docs drift gate blocked (tdd_docs_drift_check): public surface changes detected (${docsDriftDetection.changedFiles.join(", ")}) but no completed doc-updater delegation exists for the active run.`);
301
+ }
302
+ }
303
+ }
285
304
  }
286
305
  const passedSet = new Set(catalog.passed);
287
306
  const missingRequired = required.filter((gateId) => !passedSet.has(gateId));
package/dist/gitignore.js CHANGED
@@ -1,7 +1,7 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
3
  import { REQUIRED_GITIGNORE_PATTERNS } from "./constants.js";
4
- import { exists } from "./fs-utils.js";
4
+ import { exists, writeFileSafe } from "./fs-utils.js";
5
5
  export async function ensureGitignore(projectRoot) {
6
6
  const gitignorePath = path.join(projectRoot, ".gitignore");
7
7
  const currentContent = (await exists(gitignorePath))
@@ -15,7 +15,10 @@ export async function ensureGitignore(projectRoot) {
15
15
  }
16
16
  const base = lines.join("\n").replace(/\s+$/u, "");
17
17
  const suffix = `${base.length > 0 ? "\n" : ""}${missing.join("\n")}\n`;
18
- await fs.writeFile(gitignorePath, `${base}${suffix}`, "utf8");
18
+ // `writeFileSafe` performs a tmp-file + rename so a crash mid-write
19
+ // cannot leave `.gitignore` in a half-written state; the previous
20
+ // direct `fs.writeFile` could truncate the file on SIGKILL.
21
+ await writeFileSafe(gitignorePath, `${base}${suffix}`);
19
22
  }
20
23
  export async function removeGitignorePatterns(projectRoot) {
21
24
  const gitignorePath = path.join(projectRoot, ".gitignore");
@@ -30,7 +33,7 @@ export async function removeGitignorePatterns(projectRoot) {
30
33
  await fs.rm(gitignorePath, { force: true });
31
34
  }
32
35
  else {
33
- await fs.writeFile(gitignorePath, `${result}\n`, "utf8");
36
+ await writeFileSafe(gitignorePath, `${result}\n`);
34
37
  }
35
38
  }
36
39
  export async function gitignoreHasRequiredPatterns(projectRoot) {
@@ -17,8 +17,8 @@ export type SubagentFallback =
17
17
  */
18
18
  | "role-switch"
19
19
  /**
20
- * No meaningful fallback mandatory delegations can only be waived
21
- * under `waiverReason: "harness_limitation"`.
20
+ * Reserved escape hatch for future harnesses with no parity path.
21
+ * Current shipped harnesses do not use this fallback.
22
22
  */
23
23
  | "waiver";
24
24
  /**
@@ -54,7 +54,12 @@ const LEGACY_CODEX_SKILL_NAMES = [
54
54
  "cclaw-cc-next",
55
55
  "cclaw-cc-view",
56
56
  "cclaw-cc-ops",
57
- "cclaw-cc-ideate"
57
+ "cclaw-cc-ideate",
58
+ // Pre-v0.40 installed `/cc-learn` as a top-level skill before it was
59
+ // folded into `/cc-ops`. Without this entry the orphan stays behind
60
+ // after upgrade and Codex lists both the new in-thread workflow and
61
+ // the legacy slash command.
62
+ "cclaw-cc-learn"
58
63
  ];
59
64
  /**
60
65
  * Shims that older cclaw versions installed as top-level slash commands but
@@ -222,7 +227,7 @@ When in doubt, prefer **non-trivial** — the quick track is opt-in and only saf
222
227
  |---|---|
223
228
  | \`/cc\` | **Entry point.** No args = resume current stage. With prompt = classify task and start the right flow. |
224
229
  | \`/cc-next\` | **Progression.** Advances to the next stage when current is complete. |
225
- | \`/cc-ideate\` | **Discovery mode.** Generates a ranked repo-improvement backlog before implementation. |
230
+ | \`/cc-ideate\` | **Ideate mode.** Generates a ranked repo-improvement backlog before implementation. |
226
231
  | \`/cc-view\` | **Read-only router.** Unified entry for status/tree/diff views. |
227
232
  | \`/cc-ops\` | **Operations router.** Unified entry for feature/tdd-log/retro/compound/archive/rewind actions. |
228
233
 
@@ -356,7 +361,7 @@ function codexSkillDescription(command) {
356
361
  case "next":
357
362
  return `Advance the cclaw flow to the next stage. Use when the user types \`/cc-next\` or asks to "move to the next stage", "continue the flow", "advance cclaw", "progress the workflow", or when the current stage skill reports completion and gates have passed.`;
358
363
  case "ideate":
359
- return `Read-only repo-improvement discovery for cclaw. Use when the user types \`/cc-ideate\` or asks to "ideate", "brainstorm improvements", "scan the repo for TODOs/tech debt", "generate a backlog", or wants a ranked list of candidate ideas before committing to a single flow. Does not mutate \`.cclaw/state/flow-state.json\`.`;
364
+ return `Read-only repo-improvement ideate mode for cclaw. Use when the user types \`/cc-ideate\` or asks to "ideate", "scan the repo for TODOs/tech debt", "generate a backlog", or wants a ranked list of candidate ideas before committing to a single flow. Does not mutate \`.cclaw/state/flow-state.json\`.`;
360
365
  case "view":
361
366
  return `Read-only router for cclaw flow views. Use when the user types \`/cc-view\`, \`/cc-view status\`, \`/cc-view tree\`, \`/cc-view diff\`, or asks to "show cclaw status", "show the flow tree", "diff flow state", or wants a snapshot without mutation.`;
362
367
  case "ops":
@@ -417,6 +422,11 @@ what the hook surface does and does not cover.
417
422
  are **not** gated by hooks — read
418
423
  \`.cclaw/references/harnesses/codex-playbook.md\` for what cclaw
419
424
  substitutes with in-turn agent steps for those call classes.
425
+ - Codex's \`SessionStart\` matcher only supports \`startup|resume\`. Claude
426
+ and Cursor also fire on \`clear\` and \`compact\`, so mid-session
427
+ context resets there re-inject cclaw's bootstrap automatically. In
428
+ Codex you must re-announce the active stage yourself after any
429
+ \`/clear\` or compaction — the skill does not reload implicitly.
420
430
  `;
421
431
  }
422
432
  function codexSkillMarkdown(command, skillName, skillFolder, commandFile) {
package/dist/install.js CHANGED
@@ -2,9 +2,9 @@ import { execFile } from "node:child_process";
2
2
  import fs from "node:fs/promises";
3
3
  import path from "node:path";
4
4
  import { promisify } from "node:util";
5
- import { CCLAW_VERSION, COMMAND_FILE_ORDER, FLOW_VERSION, REQUIRED_DIRS, RUNTIME_ROOT } from "./constants.js";
5
+ import { CCLAW_VERSION, FLOW_VERSION, REQUIRED_DIRS, RUNTIME_ROOT } from "./constants.js";
6
6
  import { writeConfig, createDefaultConfig, readConfig, configPath, detectLanguageRulePacks, detectAdvancedKeys } from "./config.js";
7
- import { commandContract } from "./content/contracts.js";
7
+ import { stageCommandContract } from "./content/contracts.js";
8
8
  import { contextModeFiles, createInitialContextModeState } from "./content/contexts.js";
9
9
  import { learnSkillMarkdown, learnCommandContract } from "./content/learnings.js";
10
10
  import { nextCommandContract, nextCommandSkillMarkdown } from "./content/next-command.js";
@@ -36,7 +36,7 @@ import { LANGUAGE_RULE_PACK_DIR, LANGUAGE_RULE_PACK_FILES, LANGUAGE_RULE_PACK_GE
36
36
  import { RESEARCH_PLAYBOOKS } from "./content/research-playbooks.js";
37
37
  import { HARNESS_TOOL_REFS_DIR, HARNESS_TOOL_REFS_INDEX_MD, harnessToolRefMarkdown } from "./content/harness-tool-refs.js";
38
38
  import { DOCTOR_REFERENCE_MARKDOWN } from "./content/doctor-references.js";
39
- import { harnessDocsOverviewMarkdown, harnessIntegrationDocMarkdown } from "./content/harnesses-doc.js";
39
+ import { harnessDocsOverviewMarkdown, harnessIntegrationDocMarkdown } from "./content/harness-doc.js";
40
40
  import { HARNESS_PLAYBOOKS_DIR, harnessPlaybookFileName, harnessPlaybookMarkdown, harnessPlaybooksIndexMarkdown } from "./content/harness-playbooks.js";
41
41
  import { HOOK_EVENTS_BY_HARNESS, HOOK_SEMANTIC_EVENTS } from "./content/hook-events.js";
42
42
  import { createInitialFlowState } from "./flow-state.js";
@@ -44,7 +44,9 @@ import { ensureDir, exists, writeFileSafe } from "./fs-utils.js";
44
44
  import { ensureGitignore, removeGitignorePatterns } from "./gitignore.js";
45
45
  import { HARNESS_ADAPTERS, harnessShimFileNames, harnessTier, syncHarnessShims, removeCclawFromAgentsMd } from "./harness-adapters.js";
46
46
  import { validateHookDocument } from "./hook-schema.js";
47
- import { ensureRunSystem, readFlowState } from "./runs.js";
47
+ import { detectHarnesses } from "./init-detect.js";
48
+ import { CorruptFlowStateError, ensureRunSystem, readFlowState } from "./runs.js";
49
+ import { FLOW_STAGES } from "./types.js";
48
50
  const OPENCODE_PLUGIN_REL_PATH = ".opencode/plugins/cclaw-plugin.mjs";
49
51
  const CURSOR_RULE_REL_PATH = ".cursor/rules/cclaw-workflow.mdc";
50
52
  const GIT_HOOK_MANAGED_MARKER = "cclaw-managed-git-hook";
@@ -177,8 +179,8 @@ async function ensureStructure(projectRoot) {
177
179
  }
178
180
  }
179
181
  async function writeCommandContracts(projectRoot) {
180
- for (const stage of COMMAND_FILE_ORDER) {
181
- await writeFileSafe(runtimePath(projectRoot, "commands", `${stage}.md`), commandContract(stage));
182
+ for (const stage of FLOW_STAGES) {
183
+ await writeFileSafe(runtimePath(projectRoot, "commands", `${stage}.md`), stageCommandContract(stage));
182
184
  }
183
185
  }
184
186
  async function writeArtifactTemplates(projectRoot) {
@@ -214,7 +216,7 @@ async function writeEvalScaffold(projectRoot) {
214
216
  }
215
217
  async function writeSkills(projectRoot, config) {
216
218
  const skillTrack = config?.defaultTrack ?? "standard";
217
- for (const stage of COMMAND_FILE_ORDER) {
219
+ for (const stage of FLOW_STAGES) {
218
220
  const folder = stageSkillFolder(stage);
219
221
  await writeFileSafe(runtimePath(projectRoot, "skills", folder, "SKILL.md"), stageSkillMarkdown(stage, skillTrack));
220
222
  // Progressive disclosure (A.2#8): materialize the full example artifact as
@@ -852,7 +854,24 @@ Drop this section if no hard rule applies. Keep it crisp:
852
854
  async function ensureSessionStateFiles(projectRoot) {
853
855
  const stateDir = runtimePath(projectRoot, "state");
854
856
  await ensureDir(stateDir);
855
- const flow = await readFlowState(projectRoot);
857
+ // If flow-state.json is corrupt, `readFlowState` quarantines the bad
858
+ // file and throws. During install we'd rather continue than abort:
859
+ // the user just asked to set up cclaw, and the corrupt file is already
860
+ // preserved next to the original path. Fall back to a fresh initial
861
+ // state so the rest of install completes and the user can inspect the
862
+ // `.corrupt-<timestamp>.json` quarantine afterwards.
863
+ let flow;
864
+ try {
865
+ flow = await readFlowState(projectRoot);
866
+ }
867
+ catch (err) {
868
+ if (err instanceof CorruptFlowStateError) {
869
+ flow = createInitialFlowState();
870
+ }
871
+ else {
872
+ throw err;
873
+ }
874
+ }
856
875
  const activityPath = path.join(stateDir, "stage-activity.jsonl");
857
876
  if (!(await exists(activityPath))) {
858
877
  await writeFileSafe(activityPath, "");
@@ -1020,6 +1039,14 @@ async function writeHarnessGapsState(projectRoot, harnesses) {
1020
1039
  break;
1021
1040
  }
1022
1041
  for (const event of missingHookEvents) {
1042
+ if (harness === "codex" && event === "precompact_digest") {
1043
+ // Codex CLI has no PreCompact event. Generic "schedule the script
1044
+ // manually" copy doesn't help; instead, point the agent at the
1045
+ // in-thread substitute that already exists in cclaw content
1046
+ // (`/cc-ops retro` reads the same digest the hook would emit).
1047
+ remediation.push("hook event precompact_digest → Codex has no PreCompact event; run `/cc-ops retro` in-thread before compaction instead of relying on a hook");
1048
+ continue;
1049
+ }
1023
1050
  remediation.push(`hook event ${event} → schedule the corresponding script manually or accept reduced observability`);
1024
1051
  }
1025
1052
  return {
@@ -1114,6 +1141,27 @@ async function cleanLegacyArtifacts(projectRoot) {
1114
1141
  // best-effort cleanup
1115
1142
  }
1116
1143
  }
1144
+ // D-4 terminology migration: rename historical ideation artifacts to the
1145
+ // canonical ideate-* naming without deleting user-authored content.
1146
+ const artifactsDir = runtimePath(projectRoot, "artifacts");
1147
+ try {
1148
+ const entries = await fs.readdir(artifactsDir);
1149
+ for (const entry of entries) {
1150
+ const match = /^ideation-(.+\.md)$/u.exec(entry);
1151
+ if (!match)
1152
+ continue;
1153
+ const nextName = `ideate-${match[1]}`;
1154
+ const from = path.join(artifactsDir, entry);
1155
+ const to = path.join(artifactsDir, nextName);
1156
+ if (await exists(to)) {
1157
+ continue;
1158
+ }
1159
+ await fs.rename(from, to);
1160
+ }
1161
+ }
1162
+ catch {
1163
+ // no artifacts directory yet (fresh init) or read-only FS
1164
+ }
1117
1165
  }
1118
1166
  async function cleanStaleFiles(projectRoot) {
1119
1167
  const expectedShimFiles = new Set(harnessShimFileNames());
@@ -1188,9 +1236,19 @@ export async function initCclaw(options) {
1188
1236
  }
1189
1237
  export async function syncCclaw(projectRoot) {
1190
1238
  const configExists = await exists(configPath(projectRoot));
1191
- const config = await readConfig(projectRoot);
1239
+ let config = await readConfig(projectRoot);
1192
1240
  if (!configExists) {
1193
- await writeConfig(projectRoot, createDefaultConfig(config.harnesses));
1241
+ // Prefer detected harness markers over the hardcoded default list.
1242
+ // Without this, a user running `cclaw sync` in a `.claude`-only
1243
+ // project ends up with a config that also enables cursor/opencode/
1244
+ // codex, which then fails doctor checks for missing shim folders.
1245
+ // Fall back to the previous default (config.harnesses) if no markers
1246
+ // are found so brand-new projects still bootstrap cleanly.
1247
+ const detected = await detectHarnesses(projectRoot);
1248
+ const harnesses = detected.length > 0 ? detected : config.harnesses;
1249
+ const defaultConfig = createDefaultConfig(harnesses);
1250
+ await writeConfig(projectRoot, defaultConfig);
1251
+ config = defaultConfig;
1194
1252
  }
1195
1253
  await materializeRuntime(projectRoot, config, false);
1196
1254
  }
@@ -0,0 +1,5 @@
1
+ export interface PublicApiChangeDetection {
2
+ triggered: boolean;
3
+ changedFiles: string[];
4
+ }
5
+ export declare function detectPublicApiChanges(projectRoot: string): Promise<PublicApiChangeDetection>;