cclaw-cli 0.47.0 → 0.48.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +3 -1
  2. package/dist/artifact-linter.d.ts +9 -2
  3. package/dist/artifact-linter.js +45 -2
  4. package/dist/config.d.ts +6 -6
  5. package/dist/config.js +22 -0
  6. package/dist/constants.d.ts +10 -1
  7. package/dist/constants.js +19 -10
  8. package/dist/content/contracts.d.ts +1 -1
  9. package/dist/content/contracts.js +1 -1
  10. package/dist/content/core-agents.d.ts +53 -1
  11. package/dist/content/core-agents.js +6 -0
  12. package/dist/content/{harnesses-doc.js → harness-doc.js} +32 -1
  13. package/dist/content/harness-playbooks.js +4 -4
  14. package/dist/content/ideate-command.js +19 -19
  15. package/dist/content/observe.js +22 -1
  16. package/dist/content/opencode-plugin.js +5 -1
  17. package/dist/content/skills.js +2 -2
  18. package/dist/content/stage-schema.js +36 -8
  19. package/dist/content/stages/design.js +2 -2
  20. package/dist/content/stages/review.js +1 -1
  21. package/dist/content/stages/ship.js +2 -0
  22. package/dist/content/stages/tdd.js +8 -4
  23. package/dist/content/templates.js +15 -13
  24. package/dist/content/utility-skills.d.ts +7 -1
  25. package/dist/content/utility-skills.js +5 -0
  26. package/dist/delegation.d.ts +10 -0
  27. package/dist/delegation.js +111 -33
  28. package/dist/doctor.js +80 -12
  29. package/dist/flow-state.d.ts +9 -1
  30. package/dist/flow-state.js +26 -9
  31. package/dist/fs-utils.d.ts +9 -0
  32. package/dist/fs-utils.js +35 -1
  33. package/dist/gate-evidence.js +21 -2
  34. package/dist/gitignore.js +6 -3
  35. package/dist/harness-adapters.d.ts +2 -2
  36. package/dist/harness-adapters.js +13 -3
  37. package/dist/install.js +68 -10
  38. package/dist/internal/detect-public-api-changes.d.ts +5 -0
  39. package/dist/internal/detect-public-api-changes.js +45 -0
  40. package/dist/knowledge-store.js +2 -2
  41. package/dist/policy.js +3 -2
  42. package/dist/retro-gate.js +41 -15
  43. package/dist/run-archive.js +63 -33
  44. package/dist/run-persistence.js +12 -4
  45. package/dist/tdd-cycle.js +6 -1
  46. package/dist/types.d.ts +6 -1
  47. package/package.json +4 -1
  48. /package/dist/content/{harnesses-doc.d.ts → harness-doc.d.ts} +0 -0
@@ -0,0 +1,45 @@
1
+ import { execFile } from "node:child_process";
2
+ import { promisify } from "node:util";
3
+ const execFileAsync = promisify(execFile);
4
+ const PUBLIC_SURFACE_PATH_PATTERNS = [
5
+ /(^|\/)(cli|types?|config)\.[cm]?[jt]s$/iu,
6
+ /(^|\/)(openapi|swagger|schema)(\/|[-_.])/iu,
7
+ /(^|\/)(api|commands?|flags?)(\/|[-_.])/iu,
8
+ /(^|\/)(package|tsconfig)\.json$/iu
9
+ ];
10
+ async function resolveDiffBase(projectRoot) {
11
+ try {
12
+ const { stdout } = await execFileAsync("git", ["rev-parse", "HEAD~1"], {
13
+ cwd: projectRoot
14
+ });
15
+ const base = stdout.trim();
16
+ return base.length > 0 ? base : null;
17
+ }
18
+ catch {
19
+ return null;
20
+ }
21
+ }
22
+ export async function detectPublicApiChanges(projectRoot) {
23
+ const base = await resolveDiffBase(projectRoot);
24
+ if (!base) {
25
+ return { triggered: false, changedFiles: [] };
26
+ }
27
+ try {
28
+ const range = `${base}..HEAD`;
29
+ const { stdout } = await execFileAsync("git", ["diff", "--name-only", range], {
30
+ cwd: projectRoot
31
+ });
32
+ const changedFiles = stdout
33
+ .split(/\r?\n/gu)
34
+ .map((line) => line.trim())
35
+ .filter((line) => line.length > 0)
36
+ .filter((filePath) => PUBLIC_SURFACE_PATH_PATTERNS.some((pattern) => pattern.test(filePath)));
37
+ return {
38
+ triggered: changedFiles.length > 0,
39
+ changedFiles
40
+ };
41
+ }
42
+ catch {
43
+ return { triggered: false, changedFiles: [] };
44
+ }
45
+ }
@@ -1,7 +1,7 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
3
  import { RUNTIME_ROOT } from "./constants.js";
4
- import { withDirectoryLock } from "./fs-utils.js";
4
+ import { stripBom, withDirectoryLock } from "./fs-utils.js";
5
5
  import { FLOW_STAGES } from "./types.js";
6
6
  const KNOWLEDGE_TYPE_SET = new Set(["rule", "pattern", "lesson", "compound"]);
7
7
  const KNOWLEDGE_CONFIDENCE_SET = new Set(["high", "medium", "low"]);
@@ -179,7 +179,7 @@ export function materializeKnowledgeEntry(seed, defaults = {}) {
179
179
  async function readExistingKnowledgeKeys(filePath) {
180
180
  const keys = new Set();
181
181
  try {
182
- const raw = await fs.readFile(filePath, "utf8");
182
+ const raw = stripBom(await fs.readFile(filePath, "utf8"));
183
183
  const lines = raw.split(/\r?\n/u).map((line) => line.trim()).filter((line) => line.length > 0);
184
184
  for (const line of lines) {
185
185
  try {
package/dist/policy.js CHANGED
@@ -1,6 +1,7 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
- import { COMMAND_FILE_ORDER, RUNTIME_ROOT } from "./constants.js";
3
+ import { RUNTIME_ROOT } from "./constants.js";
4
+ import { FLOW_STAGES } from "./types.js";
4
5
  import { stageSchema, stagePolicyNeedles } from "./content/stage-schema.js";
5
6
  import { stageSkillFolder } from "./content/skills.js";
6
7
  import { exists } from "./fs-utils.js";
@@ -10,7 +11,7 @@ export async function policyChecks(projectRoot, options = {}) {
10
11
  const checks = [];
11
12
  const rules = [...POLICY_RULES];
12
13
  const activeHarnesses = new Set(options.harnesses && options.harnesses.length > 0 ? options.harnesses : ALL_HARNESSES);
13
- for (const stage of COMMAND_FILE_ORDER) {
14
+ for (const stage of FLOW_STAGES) {
14
15
  const folder = stageSkillFolder(stage);
15
16
  const schema = stageSchema(stage);
16
17
  const commandFile = `${RUNTIME_ROOT}/commands/${stage}.md`;
@@ -1,13 +1,20 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
3
  import { RUNTIME_ROOT } from "./constants.js";
4
- import { exists } from "./fs-utils.js";
4
+ import { exists, stripBom } from "./fs-utils.js";
5
5
  function activeArtifactsPath(projectRoot) {
6
6
  return path.join(projectRoot, RUNTIME_ROOT, "artifacts");
7
7
  }
8
8
  function retroArtifactPath(projectRoot) {
9
9
  return path.join(activeArtifactsPath(projectRoot), "09-retro.md");
10
10
  }
11
+ // Fallback window for compound-entry scanning when `retroDraftedAt` /
12
+ // `retroAcceptedAt` are not set (legacy runs or imports): use the retro
13
+ // artifact's mtime ± 7 days. 24h was too narrow for long-running retros
14
+ // that are edited over several days or runs imported from another
15
+ // machine with slightly different clocks; 7 days is still tight enough
16
+ // that entries from an unrelated future run are excluded.
17
+ const RETRO_ARTIFACT_MTIME_FALLBACK_WINDOW_MS = 7 * 24 * 60 * 60 * 1000;
11
18
  function parseIsoTimestamp(value) {
12
19
  if (!value || value.trim().length === 0)
13
20
  return null;
@@ -35,13 +42,29 @@ export async function evaluateRetroGate(projectRoot, state) {
35
42
  }
36
43
  }
37
44
  let compoundEntries = state.retro.compoundEntries;
38
- const windowStartMs = parseIsoTimestamp(state.closeout.retroDraftedAt);
39
- const windowEndMs = parseIsoTimestamp(state.closeout.retroAcceptedAt) ?? parseIsoTimestamp(state.retro.completedAt);
45
+ let windowStartMs = parseIsoTimestamp(state.closeout.retroDraftedAt);
46
+ let windowEndMs = parseIsoTimestamp(state.closeout.retroAcceptedAt) ?? parseIsoTimestamp(state.retro.completedAt);
47
+ if (compoundEntries <= 0 &&
48
+ hasRetroArtifact &&
49
+ windowStartMs === null &&
50
+ windowEndMs === null) {
51
+ try {
52
+ const stats = await fs.stat(artifactFile);
53
+ const anchor = stats.mtimeMs;
54
+ if (Number.isFinite(anchor) && anchor > 0) {
55
+ windowStartMs = anchor - RETRO_ARTIFACT_MTIME_FALLBACK_WINDOW_MS;
56
+ windowEndMs = anchor + RETRO_ARTIFACT_MTIME_FALLBACK_WINDOW_MS;
57
+ }
58
+ }
59
+ catch {
60
+ // fallback scan remains disabled when mtime cannot be read
61
+ }
62
+ }
40
63
  const shouldFallbackScan = compoundEntries <= 0 && (windowStartMs !== null || windowEndMs !== null);
41
64
  const knowledgeFile = path.join(projectRoot, RUNTIME_ROOT, "knowledge.jsonl");
42
65
  if (shouldFallbackScan && (await exists(knowledgeFile))) {
43
66
  try {
44
- const raw = await fs.readFile(knowledgeFile, "utf8");
67
+ const raw = stripBom(await fs.readFile(knowledgeFile, "utf8"));
45
68
  compoundEntries = 0;
46
69
  for (const line of raw.split(/\r?\n/)) {
47
70
  const trimmed = line.trim();
@@ -73,17 +96,20 @@ export async function evaluateRetroGate(projectRoot, state) {
73
96
  compoundEntries = 0;
74
97
  }
75
98
  }
76
- // A retro is considered complete when either:
77
- // - at least one compound learning was promoted during the retro window, or
78
- // - the operator explicitly skipped retro or compound (`retroSkipped` /
79
- // `compoundSkipped` recorded in the closeout substate) after reviewing
80
- // the draft. Previously the gate required `compoundEntries > 0`
81
- // unconditionally, which dead-locked ship closeout whenever the retro
82
- // yielded no new patterns worth promoting.
83
- const explicitSkip = Boolean(state.closeout.retroSkipped || state.closeout.compoundSkipped);
84
- const completed = required
85
- ? hasRetroArtifact && (compoundEntries > 0 || explicitSkip)
86
- : true;
99
+ // A retro is considered complete when any of:
100
+ // - the retro artifact exists AND (at least one compound learning was
101
+ // promoted during the retro window OR compound was explicitly skipped
102
+ // after reviewing the draft), or
103
+ // - the operator explicitly skipped the retro step itself
104
+ // (`retroSkipped === true` with a reason). `retroSkipped` is an
105
+ // operator-level override of the artifact requirement, so it must
106
+ // bypass `hasRetroArtifact` otherwise a run that legitimately had
107
+ // nothing worth retro-ing dead-locks at closeout waiting for a
108
+ // file that will never exist.
109
+ const retroSkipped = state.closeout.retroSkipped === true;
110
+ const compoundSkipped = state.closeout.compoundSkipped === true;
111
+ const artifactPathComplete = hasRetroArtifact && (compoundEntries > 0 || compoundSkipped);
112
+ const completed = required ? retroSkipped || artifactPathComplete : true;
87
113
  return {
88
114
  required,
89
115
  completed,
@@ -204,40 +204,70 @@ export async function archiveRun(projectRoot, featureName, options = {}) {
204
204
  compoundEntries: retroGate.compoundEntries
205
205
  };
206
206
  await ensureDir(archivePath);
207
- await fs.rename(artifactsDir, archiveArtifactsPath);
208
- await ensureDir(artifactsDir);
209
- const archiveStatePath = path.join(archivePath, "state");
210
- const snapshottedStateFiles = await snapshotStateDirectory(projectRoot, archiveStatePath);
211
- const resetState = createInitialFlowState();
212
- await writeFlowState(projectRoot, resetState, { allowReset: true });
213
- await resetCarryoverStateFiles(projectRoot, resetState.activeRunId);
207
+ // Drop an `.archive-in-progress` sentinel immediately so that a crash
208
+ // between the artifact rename and the final manifest write leaves a
209
+ // recoverable marker (doctor surfaces these; re-running archive on an
210
+ // orphan attempts to complete or roll back). The sentinel is removed
211
+ // only after the manifest lands successfully.
212
+ const sentinelPath = path.join(archivePath, ".archive-in-progress");
214
213
  const archivedAt = new Date().toISOString();
215
- const manifest = {
216
- version: 1,
217
- archiveId,
218
- archivedAt,
219
- featureName: feature,
220
- activeFeature,
221
- sourceRunId: sourceState.activeRunId,
222
- sourceCurrentStage: sourceState.currentStage,
223
- sourceCompletedStages: sourceState.completedStages,
224
- snapshottedStateFiles,
225
- retro: retroSummary
226
- };
227
- await writeFileSafe(path.join(archivePath, "archive-manifest.json"), `${JSON.stringify(manifest, null, 2)}\n`);
228
- const knowledgeStats = await readKnowledgeStats(projectRoot);
229
- await syncActiveFeatureSnapshot(projectRoot);
230
- return {
231
- archiveId,
232
- archivePath,
233
- archivedAt,
234
- featureName: feature,
235
- activeFeature,
236
- resetState,
237
- snapshottedStateFiles,
238
- knowledge: knowledgeStats,
239
- retro: retroSummary
240
- };
214
+ await writeFileSafe(sentinelPath, `${JSON.stringify({ archiveId, startedAt: archivedAt, sourceRunId: sourceState.activeRunId }, null, 2)}\n`);
215
+ let artifactsMoved = false;
216
+ try {
217
+ await fs.rename(artifactsDir, archiveArtifactsPath);
218
+ artifactsMoved = true;
219
+ await ensureDir(artifactsDir);
220
+ const archiveStatePath = path.join(archivePath, "state");
221
+ const snapshottedStateFiles = await snapshotStateDirectory(projectRoot, archiveStatePath);
222
+ const resetState = createInitialFlowState();
223
+ await writeFlowState(projectRoot, resetState, { allowReset: true });
224
+ await resetCarryoverStateFiles(projectRoot, resetState.activeRunId);
225
+ const manifest = {
226
+ version: 1,
227
+ archiveId,
228
+ archivedAt,
229
+ featureName: feature,
230
+ activeFeature,
231
+ sourceRunId: sourceState.activeRunId,
232
+ sourceCurrentStage: sourceState.currentStage,
233
+ sourceCompletedStages: sourceState.completedStages,
234
+ snapshottedStateFiles,
235
+ retro: retroSummary
236
+ };
237
+ await writeFileSafe(path.join(archivePath, "archive-manifest.json"), `${JSON.stringify(manifest, null, 2)}\n`);
238
+ // Manifest landed — sentinel is no longer needed.
239
+ await fs.unlink(sentinelPath).catch(() => undefined);
240
+ const knowledgeStats = await readKnowledgeStats(projectRoot);
241
+ await syncActiveFeatureSnapshot(projectRoot);
242
+ return {
243
+ archiveId,
244
+ archivePath,
245
+ archivedAt,
246
+ featureName: feature,
247
+ activeFeature,
248
+ resetState,
249
+ snapshottedStateFiles,
250
+ knowledge: knowledgeStats,
251
+ retro: retroSummary
252
+ };
253
+ }
254
+ catch (err) {
255
+ // Best-effort rollback: if artifacts were moved but the subsequent
256
+ // steps failed, put artifacts back so the user is not left without
257
+ // a working run. The sentinel is intentionally left behind for
258
+ // inspection; doctor surfaces it.
259
+ if (artifactsMoved) {
260
+ try {
261
+ await fs.rm(artifactsDir, { recursive: true, force: true });
262
+ await fs.rename(archiveArtifactsPath, artifactsDir);
263
+ }
264
+ catch {
265
+ // Rollback failed — sentinel + orphaned archive dir will be
266
+ // surfaced by doctor and can be reconciled manually.
267
+ }
268
+ }
269
+ throw err;
270
+ }
241
271
  }
242
272
  const KNOWLEDGE_SOFT_THRESHOLD = 50;
243
273
  async function readKnowledgeStats(projectRoot) {
@@ -1,9 +1,10 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
- import { COMMAND_FILE_ORDER, RUNTIME_ROOT } from "./constants.js";
3
+ import { RUNTIME_ROOT } from "./constants.js";
4
4
  import { canTransition, createInitialCloseoutState, createInitialFlowState, isFlowTrack, skippedStagesForTrack, SHIP_SUBSTATES } from "./flow-state.js";
5
5
  import { ensureFeatureSystem, syncActiveFeatureSnapshot } from "./feature-system.js";
6
6
  import { ensureDir, exists, withDirectoryLock, writeFileSafe } from "./fs-utils.js";
7
+ import { FLOW_STAGES } from "./types.js";
7
8
  export class InvalidStageTransitionError extends Error {
8
9
  from;
9
10
  to;
@@ -17,12 +18,19 @@ export class InvalidStageTransitionError extends Error {
17
18
  const FLOW_STATE_REL_PATH = `${RUNTIME_ROOT}/state/flow-state.json`;
18
19
  const RUNS_DIR_REL_PATH = `${RUNTIME_ROOT}/runs`;
19
20
  const ACTIVE_ARTIFACTS_REL_PATH = `${RUNTIME_ROOT}/artifacts`;
20
- const FLOW_STAGE_SET = new Set(COMMAND_FILE_ORDER);
21
+ const FLOW_STAGE_SET = new Set(FLOW_STAGES);
21
22
  function validateFlowTransition(prev, next) {
22
23
  if (prev.activeRunId !== next.activeRunId) {
23
24
  // New run — only reset paths may change the runId, but those set allowReset.
24
25
  throw new InvalidStageTransitionError(prev.currentStage, next.currentStage, `cannot change activeRunId from "${prev.activeRunId}" to "${next.activeRunId}" without allowReset.`);
25
26
  }
27
+ // Track is immutable within a single run: stage schemas, gate sets, and
28
+ // cross-stage reads all branch on track. Silently flipping the track
29
+ // mid-run would let completed stages satisfy one gate tier and the
30
+ // current stage re-read the catalog under a different tier.
31
+ if (prev.track !== next.track) {
32
+ throw new InvalidStageTransitionError(prev.currentStage, next.currentStage, `cannot change track from "${prev.track}" to "${next.track}" mid-run (activeRunId="${prev.activeRunId}"). Archive the run and start a new one to switch tracks.`);
33
+ }
26
34
  for (const completed of prev.completedStages) {
27
35
  if (!next.completedStages.includes(completed)) {
28
36
  throw new InvalidStageTransitionError(prev.currentStage, next.currentStage, `completedStages must be monotonic: stage "${completed}" was previously completed but is missing from the new state.`);
@@ -85,7 +93,7 @@ function sanitizeGuardEvidence(value) {
85
93
  function sanitizeStageGateCatalog(value, fallback) {
86
94
  const uniqueStrings = (items) => [...new Set(items)];
87
95
  const next = {};
88
- for (const stage of COMMAND_FILE_ORDER) {
96
+ for (const stage of FLOW_STAGES) {
89
97
  const base = fallback[stage];
90
98
  next[stage] = {
91
99
  required: [...base.required],
@@ -100,7 +108,7 @@ function sanitizeStageGateCatalog(value, fallback) {
100
108
  return next;
101
109
  }
102
110
  const rawCatalog = value;
103
- for (const stage of COMMAND_FILE_ORDER) {
111
+ for (const stage of FLOW_STAGES) {
104
112
  const rawStage = rawCatalog[stage];
105
113
  if (!rawStage || typeof rawStage !== "object" || Array.isArray(rawStage)) {
106
114
  continue;
package/dist/tdd-cycle.js CHANGED
@@ -1,6 +1,10 @@
1
1
  export function parseTddCycleLog(text) {
2
2
  const out = [];
3
- for (const raw of text.split(/\r?\n/)) {
3
+ // Strip a leading UTF-8 BOM on the whole blob so the first line parses
4
+ // cleanly; `trim()` handles BOM on subsequent lines through the same
5
+ // codepath (empty/whitespace-only lines are skipped).
6
+ const normalized = text.charCodeAt(0) === 0xfeff ? text.slice(1) : text;
7
+ for (const raw of normalized.split(/\r?\n/)) {
4
8
  const line = raw.trim();
5
9
  if (!line)
6
10
  continue;
@@ -100,6 +104,7 @@ export function validateTddCycleOrder(entries, options = {}) {
100
104
  }
101
105
  if (state !== "green_done") {
102
106
  issues.push(`slice ${slice}: refactor logged before green`);
107
+ continue;
103
108
  }
104
109
  state = "need_red";
105
110
  }
package/dist/types.d.ts CHANGED
@@ -109,7 +109,7 @@ export interface TddPathConfig {
109
109
  export interface CompoundConfig {
110
110
  recurrenceThreshold?: number;
111
111
  }
112
- export interface VibyConfig {
112
+ export interface CclawConfig {
113
113
  version: string;
114
114
  flowVersion: string;
115
115
  harnesses: HarnessId[];
@@ -141,6 +141,7 @@ export interface VibyConfig {
141
141
  /**
142
142
  * Legacy alias for test-side path detection in workflow-guard.
143
143
  * Prefer `tdd.testPathPatterns` in new configs.
144
+ * @deprecated Use `tdd.testPathPatterns` instead.
144
145
  */
145
146
  tddTestGlobs?: string[];
146
147
  /** Path-pattern routing for TDD test/production write classification. */
@@ -173,6 +174,10 @@ export interface VibyConfig {
173
174
  */
174
175
  sliceReview?: SliceReviewConfig;
175
176
  }
177
+ /**
178
+ * @deprecated Use `CclawConfig` instead.
179
+ */
180
+ export type VibyConfig = CclawConfig;
176
181
  export interface TransitionRule {
177
182
  from: FlowStage;
178
183
  to: FlowStage;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "cclaw-cli",
3
- "version": "0.47.0",
3
+ "version": "0.48.1",
4
4
  "description": "Installer-first flow toolkit for coding agents",
5
5
  "type": "module",
6
6
  "bin": {
@@ -21,6 +21,7 @@
21
21
  "test": "vitest run",
22
22
  "test:watch": "vitest",
23
23
  "test:coverage": "vitest run --coverage",
24
+ "test:mutation": "stryker run",
24
25
  "smoke:runtime": "npm run build && node scripts/smoke-init.mjs",
25
26
  "lint:hooks": "npm run build && node scripts/lint-generated-hooks.mjs",
26
27
  "build:harness-docs": "npm run build && node scripts/build-harness-docs.mjs",
@@ -44,6 +45,8 @@
44
45
  "yaml": "^2.8.1"
45
46
  },
46
47
  "devDependencies": {
48
+ "@stryker-mutator/core": "^9.6.1",
49
+ "@stryker-mutator/vitest-runner": "^9.6.1",
47
50
  "@types/node": "^24.7.2",
48
51
  "@vitest/coverage-v8": "^3.2.4",
49
52
  "typescript": "^5.9.3",